Compare commits

..

121 Commits

Author SHA1 Message Date
16eb688607 Clients logo 2025-12-10 12:28:36 +01:00
2132c130a3 update changelog 2025-12-08 23:09:54 +01:00
dffbc31432 Refactor job sorting logic, reduce poll interval, and implement SignalR subscriptions for real-time updates in DataInbox and Layers pages
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m26s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m38s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m38s
2025-12-08 23:08:46 +01:00
151ecaa98f Fix job scheduler race condition and enhance Blazor reconnection UI
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m27s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m23s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m43s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m39s
2025-12-08 22:45:31 +01:00
b917aa5077 Add Blazor Server reconnection modal and timer functionality 2025-12-08 22:30:31 +01:00
24f5f91704 update readme
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m27s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-08 22:07:16 +01:00
00c9584d03 Schedule Jobs from UI 2025-12-08 22:02:57 +01:00
c94a3b41c9 Duplicate models fields fix 2025-12-08 21:54:48 +01:00
e25cdc4441 UI timezone 2025-12-08 21:42:10 +01:00
1f95d57717 JobList filter fix 2025-12-08 21:28:24 +01:00
d2fb9b8071 Fix API Key Authorization for Cron Jobs by adding [AllowAnonymous] attribute to scheduling endpoints
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m29s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m46s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m49s
2025-12-06 00:50:20 +01:00
08abd96751 SignalR FIX
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m26s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m38s
2025-12-06 00:36:22 +01:00
eb570679ba UI Fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m26s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m39s
2025-12-06 00:03:46 +01:00
8713ed9686 LayerDetail improvement
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m27s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m39s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 23:49:16 +01:00
595076033b More security!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m40s
2025-12-05 23:41:56 +01:00
0c874575d4 SignalR Security 2025-12-05 23:17:02 +01:00
71c293320b Security: controllers and stack traces in logs
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 21:37:15 +01:00
46805fb196 Security: JWT
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m30s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m51s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m50s
2025-12-05 21:17:04 +01:00
51f2679732 Handle unauthorized
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m33s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m53s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m51s
2025-12-05 20:34:18 +01:00
6b0f936f40 P2 processor is working as a charm!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 19:10:28 +01:00
0eb2a457f7 PedrolloPL: P2 -> B3
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-05 10:35:35 +01:00
0cf0bad6b1 UI build fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m30s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m45s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 10:07:45 +01:00
c7d9acead0 UI refactor (structure cleanup)
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m18s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m38s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m37s
2025-12-05 09:51:04 +01:00
193127b86a SingalR for realtime entitychanges
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m36s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m31s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m55s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m53s
2025-12-04 22:20:00 +01:00
bf2beda390 build fix2
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m36s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m49s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m48s
2025-12-04 18:44:39 +01:00
942da18d85 Build fix
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m27s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m28s
2025-12-04 17:57:37 +01:00
a3fa8f9b91 P2 import is working
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m14s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m11s
2025-12-04 15:53:11 +01:00
0e3b3933f0 WIP: p2 plugin
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
2025-12-03 13:33:38 +01:00
445c07a8d8 Morska plugins refactor 2025-12-02 21:24:37 +01:00
3f8e62fbb8 WIP: queue engine 2025-12-02 15:35:04 +01:00
248106a239 Plugins little refactor 2025-12-02 15:21:27 +01:00
587d4d66f8 Pedrollo plugins 2025-12-02 14:31:21 +01:00
f68e57ce3b Small UI fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m42s
2025-12-02 13:43:01 +01:00
e70a8dda6e Remember list filters 2025-12-02 13:23:03 +01:00
89859cd4a3 Record histori is working 2025-12-02 13:14:09 +01:00
0c6848556b WIP: Record history 2025-12-01 18:37:09 +01:00
c8ded1f0a4 Edit Records 2025-12-01 17:56:17 +01:00
7ea5ed506e Filter Layers by Type
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-12-01 13:21:45 +01:00
4d7df85df1 DataInbox Detail 2025-12-01 13:00:01 +01:00
3d654d972e DataInbox list 2025-12-01 12:55:47 +01:00
a71b6feefc Pagination style fix 2025-12-01 12:35:22 +01:00
cb0d050ad4 Imports for 2025.12 2025-11-30 16:09:32 +01:00
24387bf96c debug
All checks were successful
Build Docker Images / test (push) Successful in 1m47s
Build Docker Images / build-and-push (push) Successful in 1m55s
2025-11-28 16:15:39 +01:00
87d19dcadf App logo
All checks were successful
Build Docker Images / test (push) Successful in 1m43s
Build Docker Images / build-and-push (push) Successful in 1m47s
2025-11-28 12:13:19 +01:00
a289690b6b Add custom app name per instance 2025-11-28 11:44:19 +01:00
57f1359c96 Bu9ild path fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m31s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-28 11:29:38 +01:00
b0e77ec835 Enable Main build
Some checks failed
Build Docker Images / test (push) Failing after 26s
Build Docker Images / build-and-push (push) Failing after 11s
2025-11-28 11:26:58 +01:00
b3053b859a Last refactor steps (I hope) 2025-11-28 11:26:17 +01:00
07423023a0 after refactor cleanup 2025-11-28 11:21:22 +01:00
5db6de1503 Merge pull request 'ddd-refactor' (#2) from ddd-refactor into main
Some checks failed
BuildApp / build-frontend (push) Successful in 1m54s
BuildApp / build-backend (push) Failing after 26s
Reviewed-on: #2
2025-11-28 11:14:42 +01:00
091e33a75e Update DataInboc auth
All checks were successful
Build Docker Images / test (push) Successful in 1m44s
Build Docker Images / build-and-push (push) Successful in 1m38s
2025-11-27 11:07:23 +01:00
908770f4e3 update external API credentials
All checks were successful
Build Docker Images / test (push) Successful in 1m34s
Build Docker Images / build-and-push (push) Successful in 1m41s
2025-11-26 14:34:32 +01:00
4965c3ecd7 get configuration fix
All checks were successful
Build Docker Images / test (push) Successful in 1m36s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-26 14:14:56 +01:00
aa6c54a692 auth fix
All checks were successful
Build Docker Images / test (push) Successful in 1m39s
Build Docker Images / build-and-push (push) Successful in 1m42s
2025-11-25 23:30:18 +01:00
bb41926a31 Web Auth Fix
All checks were successful
Build Docker Images / test (push) Successful in 1m41s
Build Docker Images / build-and-push (push) Successful in 1m43s
2025-11-25 23:18:55 +01:00
0a5382081f R6 export fix
All checks were successful
Build Docker Images / test (push) Successful in 1m50s
Build Docker Images / build-and-push (push) Successful in 1m45s
2025-11-25 22:09:07 +01:00
3514c022d6 Enable R6 GSheet update
All checks were successful
Build Docker Images / test (push) Successful in 1m39s
Build Docker Images / build-and-push (push) Successful in 1m48s
2025-11-25 21:16:10 +01:00
3a9b8c7b27 Enable R6 GSheet
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m40s
2025-11-25 15:07:41 +01:00
d289fbb274 Enable R2 GSheet
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-25 14:46:21 +01:00
e83ab80dff R1 - enable GSheets update
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m44s
2025-11-25 14:35:08 +01:00
1a84179586 autoprocess fix
All checks were successful
Build Docker Images / test (push) Successful in 1m34s
Build Docker Images / build-and-push (push) Successful in 1m36s
2025-11-25 14:09:53 +01:00
0b665f238b Temporary disable GSheet update in processing
All checks were successful
Build Docker Images / test (push) Successful in 1m34s
Build Docker Images / build-and-push (push) Successful in 1m41s
2025-11-24 13:45:04 +01:00
6ed470a1b8 build fix
All checks were successful
Build Docker Images / test (push) Successful in 1m54s
Build Docker Images / build-and-push (push) Successful in 2m1s
2025-11-23 15:25:35 +01:00
31c3b59411 Update .gitea/workflows/buildContainers.yml
Some checks failed
Build Docker Images / test (push) Failing after 29s
Build Docker Images / build-and-push (push) Successful in 1m50s
2025-11-23 15:20:29 +01:00
8077826c46 WIP: ios loggin
Some checks failed
Build Docker Images / test (push) Failing after 27s
Build Docker Images / build-and-push (push) Successful in 1m49s
2025-11-23 15:17:54 +01:00
951855074f Add MAUI project
Some checks failed
Build Docker Images / test (push) Failing after 30s
Build Docker Images / build-and-push (push) Successful in 1m51s
2025-11-23 15:01:15 +01:00
7558bb80e3 LayerDetails page
All checks were successful
Build Docker Images / test (push) Successful in 1m18s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-20 21:03:19 +01:00
090224b19f DB Schema FIX
All checks were successful
Build Docker Images / test (push) Successful in 1m21s
Build Docker Images / build-and-push (push) Successful in 1m45s
2025-11-20 20:34:04 +01:00
e6b4563712 Remove host checking in auto import
All checks were successful
Build Docker Images / test (push) Successful in 1m15s
Build Docker Images / build-and-push (push) Successful in 1m36s
2025-11-19 19:03:04 +01:00
2ecfc5bfe5 Products -> Layers
All checks were successful
Build Docker Images / test (push) Successful in 1m17s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-19 18:51:09 +01:00
3b03c52cd4 auth fix
All checks were successful
Build Docker Images / test (push) Successful in 1m16s
Build Docker Images / build-and-push (push) Successful in 1m36s
2025-11-19 18:30:55 +01:00
62cf71eaa8 google auth fix
All checks were successful
Build Docker Images / test (push) Successful in 1m13s
Build Docker Images / build-and-push (push) Successful in 1m40s
2025-11-19 17:53:02 +01:00
cfb0cdc2f7 and another
All checks were successful
Build Docker Images / test (push) Successful in 1m15s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-19 17:40:27 +01:00
66a9b975a5 add some debug info
All checks were successful
Build Docker Images / test (push) Successful in 1m14s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-19 17:30:36 +01:00
c0a1945465 and another
All checks were successful
Build Docker Images / test (push) Successful in 1m13s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-19 17:21:14 +01:00
5a5b70a5ed another fix
All checks were successful
Build Docker Images / test (push) Successful in 1m16s
Build Docker Images / build-and-push (push) Successful in 1m36s
2025-11-19 16:57:42 +01:00
8e3210f0e6 layers fix
All checks were successful
Build Docker Images / test (push) Successful in 1m16s
Build Docker Images / build-and-push (push) Successful in 1m40s
2025-11-19 16:42:02 +01:00
58e5ad7f5b LayersController fix
All checks were successful
Build Docker Images / test (push) Successful in 1m19s
Build Docker Images / build-and-push (push) Successful in 1m41s
2025-11-19 16:19:06 +01:00
f354874e3d Another web fix
All checks were successful
Build Docker Images / test (push) Successful in 1m17s
Build Docker Images / build-and-push (push) Successful in 1m41s
2025-11-19 13:49:26 +01:00
ec4b79bbc6 WEB fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m15s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-19 13:11:07 +01:00
ae5db26479 Update build
All checks were successful
Build Docker Images / test (push) Successful in 1m14s
Build Docker Images / build-and-push (push) Successful in 1m39s
2025-11-19 12:39:06 +01:00
f30a8a74ff .NET 10 and few minor things
Some checks failed
Build Docker Images / test (push) Failing after 28s
Build Docker Images / build-and-push (push) Successful in 1m44s
2025-11-19 12:33:37 +01:00
c6a777c245 New lines fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m20s
Build Docker Images / build-and-push (push) Successful in 1m38s
2025-11-18 20:38:35 +01:00
Michał Zieliński
a66e2a86da change endpoints path
All checks were successful
Build Docker Images / test (push) Successful in 1m18s
Build Docker Images / build-and-push (push) Successful in 1m25s
2025-11-13 11:15:32 +01:00
Michał Zieliński
7cd69da0bd Update API port
All checks were successful
Build Docker Images / test (push) Successful in 1m24s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-11-12 15:53:11 +01:00
Michał Zieliński
29aae21f97 Migrations fix
All checks were successful
Build Docker Images / test (push) Successful in 1m18s
Build Docker Images / build-and-push (push) Successful in 1m31s
2025-11-12 14:09:47 +01:00
Michał Zieliński
eeae4b55a4 Migrations fix
All checks were successful
Build Docker Images / test (push) Successful in 1m29s
Build Docker Images / build-and-push (push) Successful in 1m40s
2025-11-12 13:08:28 +01:00
Michał Zieliński
5a896f46b7 Auto apply migrations fix
All checks were successful
Build Docker Images / test (push) Successful in 1m21s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-11-12 12:55:19 +01:00
Michał Zieliński
5d40af5446 auto apply migrations
All checks were successful
Build Docker Images / test (push) Successful in 1m13s
Build Docker Images / build-and-push (push) Successful in 1m28s
2025-11-12 12:16:23 +01:00
Michał Zieliński
fd39b72596 Environemnts refactor
All checks were successful
Build Docker Images / test (push) Successful in 1m19s
Build Docker Images / build-and-push (push) Successful in 1m34s
2025-11-12 11:59:11 +01:00
Michał Zieliński
f7b9009215 App login is working
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m52s
2025-11-09 19:39:52 +01:00
Michał Zieliński
95438efcbd wip biuild
All checks were successful
Build Docker Images / test (push) Successful in 1m11s
Build Docker Images / build-and-push (push) Successful in 1m26s
2025-11-06 12:49:48 +01:00
Michał Zieliński
337eab3b41 build containers
Some checks failed
Build Docker Images / test (push) Successful in 1m16s
Build Docker Images / build-and-push (push) Failing after 11s
2025-11-06 12:14:05 +01:00
Michał Zieliński
7f04cab0d9 WIP: frontend refactor 2025-11-06 10:20:00 +01:00
Michał Zieliński
5bee3912f1 App refactor done 2025-11-05 20:50:25 +01:00
Michał Zieliński
b65ea7d17f new month imports 2025-10-31 12:48:45 +01:00
Michał Zieliński
fa3e380597 Add CORS for new domain
All checks were successful
BuildApp / build-frontend (push) Successful in 1m5s
BuildApp / build-backend (push) Successful in 1m34s
2025-10-29 17:56:10 +01:00
Michał Zieliński
071396f126 Impoer D1 fix
All checks were successful
BuildApp / build-frontend (push) Successful in 1m4s
BuildApp / build-backend (push) Successful in 1m31s
2025-10-28 19:00:05 +01:00
Michał Zieliński
66b650119b serilog test
All checks were successful
BuildApp / build-frontend (push) Successful in 1m1s
BuildApp / build-backend (push) Successful in 1m27s
2025-10-21 10:49:24 +02:00
Michał Zieliński
5d33a5a9a5 sql scripts
All checks were successful
BuildApp / build-frontend (push) Successful in 2m53s
BuildApp / build-backend (push) Successful in 1m49s
2025-09-29 21:32:34 +02:00
Michał Zieliński
642850306a Gitea runners are ready
All checks were successful
BuildApp / build-frontend (push) Successful in 58s
BuildApp / build-backend (push) Successful in 1m32s
2025-09-18 13:07:28 +02:00
Michał Zieliński
a3f00f8c47 now! 2025-09-18 12:54:54 +02:00
Michał Zieliński
1d59aec5a9 Maybe now.. 2025-09-18 12:46:55 +02:00
Michał Zieliński
493cd748dc Release is ready? 2025-09-18 12:42:22 +02:00
Michał Zieliński
6feb20c85e r 2025-09-18 12:35:25 +02:00
Michał Zieliński
504a331f9a release 2025-09-18 12:30:38 +02:00
Michał Zieliński
9981b1c21b release 2025-09-18 12:23:55 +02:00
Michał Zieliński
dcbbdc287a release 2025-09-18 12:11:14 +02:00
Michał Zieliński
a4249728be release 2025-09-18 12:05:10 +02:00
Michał Zieliński
b0e779947c release 2025-09-18 11:43:03 +02:00
Michał Zieliński
687f5b2be3 wip release 2025-09-18 08:58:49 +02:00
Michał Zieliński
e2e0e753f2 remove unused variables 2025-09-18 08:48:52 +02:00
Michał Zieliński
b73a037605 release 2025-09-18 08:42:38 +02:00
Michał Zieliński
21cb4c44e8 release 2025-09-18 08:41:42 +02:00
Michał Zieliński
ec8c8aef35 release 2025-09-18 08:39:36 +02:00
Michał Zieliński
fc1325de58 rrr 2025-09-18 08:25:45 +02:00
Michał Zieliński
8fcde802d7 release 2025-09-18 08:24:41 +02:00
Michał Zieliński
f8bd0afb45 wip release 2025-09-18 08:22:37 +02:00
Michał Zieliński
728ae32058 remove debug key step 2025-09-15 21:23:50 +02:00
Michał Zieliński
df2fa3319b wip: release 2025-09-15 21:16:34 +02:00
Michał Zieliński
142f2a28e7 wip: release 2025-09-15 21:15:33 +02:00
370 changed files with 13855 additions and 14725 deletions

View File

@@ -0,0 +1,3 @@
Read the project context file at `.claude/project-context.md` to quickly understand the DiunaBI project structure, architecture, key components, and recent development focus. This will bootstrap your knowledge without needing to explore the entire codebase.
After reading the context file, briefly acknowledge what you've learned and ask the user what they need help with.

View File

@@ -0,0 +1,27 @@
Update the `.claude/project-context.md` file by ONLY appending changes made during THIS session to the "RECENT CHANGES (This Session)" section at the top of the file.
**DO NOT re-scan or re-explore the entire codebase** - this wastes tokens and time.
**What to do:**
1. Review the conversation history to identify what was changed/added/fixed in THIS session
2. Read the current `.claude/project-context.md` file
3. Update ONLY the "RECENT CHANGES (This Session)" section at the top with:
- Date of changes (today's date)
- Brief bullet points describing what was modified
- Files that were changed with brief descriptions
- Any new functionality added
- Bug fixes completed
4. Leave the rest of the file unchanged
**Format for session changes:**
```markdown
## RECENT CHANGES (This Session)
**[Feature/Fix Name] ([Date]):**
- ✅ Brief description of change 1
- ✅ Brief description of change 2
- Files modified: [file1.cs](path/to/file1.cs), [file2.cs](path/to/file2.cs)
```
When done, provide a brief summary of what session changes were documented.

819
.claude/project-context.md Normal file
View File

@@ -0,0 +1,819 @@
# DiunaBI Project Context
> This file is auto-generated for Claude Code to quickly understand the project structure.
> Last updated: 2025-12-08
## RECENT CHANGES (This Session)
**SignalR Real-Time Updates & UI Consistency (Dec 8, 2025):**
-**Removed Manual Refresh Button** - Removed refresh button from Jobs/Index.razor (SignalR auto-refresh eliminates need)
-**SignalR on Layers List** - Added real-time updates to Layers/Index with EntityChangeHubService subscription
-**SignalR on DataInbox List** - Added real-time updates to DataInbox/Index with EntityChangeHubService subscription
-**SignalR on Layer Details** - Added real-time updates to Layers/Details for both layer and record changes
-**Consistent UI Behavior** - All lists now have uniform SignalR-based real-time updates
-**Proper Cleanup** - Implemented IDisposable pattern to unsubscribe from SignalR events on all pages
-**Jobs Sorting Fix** - Changed sorting from Priority→JobType→CreatedAt DESC to CreatedAt DESC→Priority ASC (newest jobs first, then by priority)
-**Faster Job Processing** - Reduced JobWorkerService poll interval from 10 seconds to 5 seconds
- Files modified:
- [Jobs/Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor) - removed refresh button
- [Layers/Index.razor](DiunaBI.UI.Shared/Pages/Layers/Index.razor), [Layers/Index.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Index.razor.cs) - added SignalR + IDisposable
- [DataInbox/Index.razor](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor), [DataInbox/Index.razor.cs](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor.cs) - added SignalR + IDisposable
- [Layers/Details.razor](DiunaBI.UI.Shared/Pages/Layers/Details.razor), [Layers/Details.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Details.razor.cs) - added SignalR + IDisposable
- [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs) - fixed sorting logic
- [JobWorkerService.cs](DiunaBI.Infrastructure/Services/JobWorkerService.cs) - reduced poll interval to 5 seconds
- Status: All lists have consistent real-time behavior, no manual refresh needed, jobs sorted by date first
---
**Job Scheduler Race Condition Fix (Dec 8, 2025):**
-**In-Memory Deduplication** - Added `HashSet<Guid>` to track LayerIds scheduled within the same batch
-**Prevents Duplicate Jobs** - Fixed race condition where same layer could be scheduled multiple times during single "Run All Jobs" operation
-**Two-Level Protection** - In-memory check (HashSet) runs before database check for O(1) performance
-**Applied to Both Methods** - Fixed both ScheduleImportJobsAsync and ScheduleProcessJobsAsync
-**Better Logging** - Added debug log message "Job already scheduled in this batch" for transparency
- Root cause: When multiple layers had same ID in query results or import plugins created new layers during scheduling loop, database check couldn't detect duplicates added in same batch before SaveChangesAsync()
- Solution: Track scheduled LayerIds in HashSet during loop iteration to prevent within-batch duplicates
- Files modified: [JobSchedulerService.cs](DiunaBI.Infrastructure/Services/JobSchedulerService.cs)
- Status: Race condition resolved, duplicate job creation prevented
---
**Blazor Server Reconnection UI Customization (Dec 8, 2025):**
-**Custom Reconnection Modal** - Replaced default Blazor "Rejoin failed..." dialog with custom-styled modal
-**Theme-Matched Styling** - Changed loader and button colors from blue to app's primary red (#e7163d) matching navbar
-**Timer with Elapsed Seconds** - Added real-time timer showing elapsed reconnection time (0s, 1s, 2s...)
-**CSS Classes Integration** - Used Blazor's built-in `.components-reconnect-show/failed/rejected` classes for state management
-**MutationObserver Timer** - JavaScript watches for CSS class changes to start/stop elapsed time counter
-**Professional Design** - Modal backdrop blur, spinner animation, red reload button with hover effects
- Files modified: [App.razor](DiunaBI.UI.Web/Components/App.razor), [app.css](DiunaBI.UI.Web/wwwroot/app.css)
- Files created: [reconnect.js](DiunaBI.UI.Web/wwwroot/js/reconnect.js)
- Status: Blazor reconnection UI now matches app theme with timer indicator
**Jobs List Sorting and Multi-Select Filtering (Dec 8, 2025):**
-**Fixed Job Sorting** - Changed from single CreatedAt DESC to Priority ASC → JobType → CreatedAt DESC
-**Multi-Select Status Filter** - Replaced single status dropdown with multi-select supporting multiple JobStatus values
-**Auto-Refresh on Filter Change** - Filters now automatically trigger data reload without requiring manual button click
-**API Updates** - JobsController GetAll endpoint accepts `List<JobStatus>? statuses` instead of single status
-**JobService Updates** - Sends status values as integers in query string for multi-select support
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: Jobs list now sortable by priority/type/date with working multi-select filters
**User Timezone Support (Dec 8, 2025):**
-**DateTimeHelper Service** - Created JS Interop service to detect user's browser timezone
-**UTC to Local Conversion** - All date displays now show user's local timezone instead of UTC
-**Database Consistency** - Database continues to store UTC (correct), conversion only for display
-**Updated Pages** - Applied timezone conversion to all date fields in:
- Jobs Index and Details pages
- Layers Details page (CreatedAt, ModifiedAt, record history)
- DataInbox Index page
-**Service Registration** - Registered DateTimeHelper as scoped service in DI container
- Files created: [DateTimeHelper.cs](DiunaBI.UI.Shared/Services/DateTimeHelper.cs)
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs), [Jobs/Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs), [Jobs/Details.razor](DiunaBI.UI.Shared/Pages/Jobs/Details.razor), [Layers/Details.razor](DiunaBI.UI.Shared/Pages/Layers/Details.razor), [Layers/Details.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Details.razor.cs), [DataInbox/Index.razor.cs](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor.cs)
- Status: All dates display in user's local timezone with format "yyyy-MM-dd HH:mm:ss"
**QueueJob Model Cleanup and AutoImport User (Dec 8, 2025):**
-**Removed Duplicate Fields** - Removed CreatedAtUtc and ModifiedAtUtc from QueueJob (were duplicates of CreatedAt/ModifiedAt)
-**Added ModifiedAt Field** - Was missing, now tracks job modification timestamp
-**AutoImport User ID** - Created User.AutoImportUserId constant: `f392209e-123e-4651-a5a4-0b1d6cf9ff9d`
-**System Operations** - All system-created/modified jobs now use AutoImportUserId for CreatedById and ModifiedById
-**Database Migration** - Created migration: RemoveQueueJobDuplicateUTCFields
- Files modified: [QueueJob.cs](DiunaBI.Domain/Entities/QueueJob.cs), [User.cs](DiunaBI.Domain/Entities/User.cs), [JobWorkerService.cs](DiunaBI.Infrastructure/Services/JobWorkerService.cs), [JobSchedulerService.cs](DiunaBI.Infrastructure/Services/JobSchedulerService.cs), [AppDbContext.cs](DiunaBI.Infrastructure/Data/AppDbContext.cs), [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Files created: [20251208205202_RemoveQueueJobDuplicateUTCFields.cs](DiunaBI.Infrastructure/Migrations/20251208205202_RemoveQueueJobDuplicateUTCFields.cs)
- Status: QueueJob model cleaned up, all automated operations tracked with AutoImport user ID
**Job Scheduling UI with JWT Authorization (Dec 8, 2025):**
-**New JWT Endpoints** - Created UI-specific endpoints at `/jobs/ui/schedule/*` with JWT authorization (parallel to API key endpoints)
-**Three Scheduling Options** - MudMenu dropdown in Jobs Index with:
- Run All Jobs - schedules all import and process jobs
- Run All Imports - schedules import jobs only
- Run All Processes - schedules process jobs only
-**JobService Methods** - Added three scheduling methods returning (success, jobsCreated, message) tuples
-**Auto-Refresh** - Jobs list automatically reloads after scheduling with success/failure notifications
-**Dual Authorization** - Existing `/jobs/schedule/{apiKey}` endpoints for automation, new `/jobs/ui/schedule` endpoints for UI users
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: UI users can now schedule jobs directly from Jobs page using JWT authentication
---
**API Key Authorization Fix for Cron Jobs (Dec 6, 2025):**
-**Fixed 401 Unauthorized on API Key Endpoints** - Cron jobs calling `/jobs/schedule` endpoints were getting rejected despite valid API keys
-**Added [AllowAnonymous] Attribute** - Bypasses controller-level `[Authorize]` to allow `[ApiKeyAuth]` filter to handle authorization
-**Three Endpoints Fixed** - Applied fix to all job scheduling endpoints:
- `POST /jobs/schedule` - Schedule all jobs (imports + processes)
- `POST /jobs/schedule/imports` - Schedule import jobs only
- `POST /jobs/schedule/processes` - Schedule process jobs only
- Root cause: Controller-level `[Authorize]` attribute required JWT Bearer auth for all endpoints, blocking API key authentication
- Solution: Add `[AllowAnonymous]` to allow `[ApiKeyAuth]` filter to validate X-API-Key header
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Status: Cron jobs can now authenticate with API key via X-API-Key header
**SignalR Authentication Token Flow Fix (Dec 6, 2025):**
-**TokenProvider Population** - Fixed `TokenProvider.Token` never being set with JWT, causing 401 Unauthorized on SignalR connections
-**AuthService Token Management** - Injected `TokenProvider` into `AuthService` and set token in 3 key places:
- `ValidateWithBackendAsync()` - on fresh Google login
- `CheckAuthenticationAsync()` - on session restore from localStorage
- `ClearAuthenticationAsync()` - clear token on logout
-**SignalR Initialization Timing** - Moved SignalR initialization from `MainLayout.OnInitializedAsync` to after authentication completes
-**Event-Driven Architecture** - `MainLayout` now subscribes to `AuthenticationStateChanged` event to initialize SignalR when user authenticates
-**Session Restore Support** - `CheckAuthenticationAsync()` now fires `AuthenticationStateChanged` event to initialize SignalR on page refresh
- Root cause: SignalR was initialized before authentication, so JWT token was empty during connection setup
- Solution: Initialize SignalR only after token is available via event subscription
- Files modified: [AuthService.cs](DiunaBI.UI.Shared/Services/AuthService.cs), [MainLayout.razor](DiunaBI.UI.Shared/Components/Layout/MainLayout.razor)
- Status: SignalR authentication working for both fresh login and restored sessions
**SignalR Authentication DI Fix (Dec 6, 2025):**
-**TokenProvider Registration** - Added missing `TokenProvider` service registration in DI container
-**EntityChangeHubService Scope Fix** - Changed from singleton to scoped to support user-specific JWT tokens
-**Bug Fix** - Resolved `InvalidOperationException` preventing app from starting after SignalR authentication was added
- Root cause: Singleton service (`EntityChangeHubService`) cannot depend on scoped service (`TokenProvider`) in DI
- Solution: Made `EntityChangeHubService` scoped so each user session has its own authenticated SignalR connection
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs)
---
**Security Audit & Hardening (Dec 5, 2025):**
-**JWT Token Validation** - Enabled issuer/audience validation in [Program.cs](DiunaBI.API/Program.cs), fixed config key mismatch in [JwtTokenService.cs](DiunaBI.API/Services/JwtTokenService.cs)
-**API Key Security** - Created [ApiKeyAuthAttribute.cs](DiunaBI.API/Attributes/ApiKeyAuthAttribute.cs) with X-API-Key header auth, constant-time comparison
-**Job Endpoints** - Migrated 3 job scheduling endpoints in [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs) from URL-based to header-based API keys
-**Stack Trace Exposure** - Fixed 20 instances across 3 controllers ([JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [LayersController.cs](DiunaBI.API/Controllers/LayersController.cs), [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)) - now returns generic error messages
-**SignalR Authentication** - Added [Authorize] to [EntityChangeHub.cs](DiunaBI.API/Hubs/EntityChangeHub.cs), configured JWT token in [EntityChangeHubService.cs](DiunaBI.UI.Shared/Services/EntityChangeHubService.cs)
-**Rate Limiting** - Implemented ASP.NET Core rate limiting: 100 req/min general, 10 req/min auth in [Program.cs](DiunaBI.API/Program.cs)
-**Security Headers** - Added XSS, clickjacking, MIME sniffing protection middleware in [Program.cs](DiunaBI.API/Program.cs)
-**Input Validation** - Added pagination limits (1-1000) to GetAll endpoints in 3 controllers
-**User Enumeration** - Fixed generic auth error in [GoogleAuthService.cs](DiunaBI.API/Services/GoogleAuthService.cs)
-**Sensitive Data Logging** - Made conditional on development only in [Program.cs](DiunaBI.API/Program.cs)
-**Base64 Size Limit** - Added 10MB limit to DataInbox in [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)
- Files modified: 12 files (API: Program.cs, 4 controllers, 3 services, 1 hub, 1 new attribute; UI: EntityChangeHubService.cs, ServiceCollectionExtensions.cs)
- Security status: 5/5 CRITICAL fixed, 3/3 HIGH fixed, 4/4 MEDIUM fixed
**Seq Removal - Logging Cleanup (Dec 5, 2025):**
- ✅ Removed Seq logging sink to eliminate commercial licensing concerns
- ✅ Removed `Serilog.Sinks.Seq` NuGet package from DiunaBI.API.csproj
- ✅ Removed Seq sink configuration from appsettings.Development.json
- ✅ Kept Serilog (free, open-source) with Console + File sinks for production-ready logging
- ✅ Build verified - no errors after Seq removal
- Files modified: [DiunaBI.API.csproj](DiunaBI.API/DiunaBI.API.csproj), [appsettings.Development.json](DiunaBI.API/appsettings.Development.json)
- Manual step required: Remove `seq` service from docker-compose.yml and add Docker log rotation config
**UI Reorganization (Dec 5, 2025):**
- ✅ Moved pages to feature-based folders: `Pages/Layers/`, `Pages/Jobs/`, `Pages/DataInbox/`
- ✅ Organized components: `Components/Layout/` (MainLayout, EmptyLayout, Routes), `Components/Auth/` (AuthGuard, LoginCard)
- ✅ Removed obsolete wrapper files (LayerListPage, JobListPage, DataInboxListPage, etc.)
- ✅ Removed duplicate component files (LayerListComponent, JobListComponent, DataInboxListComponent)
- ✅ Standardized code-behind: `.razor.cs` for complex logic, inline `@code` for simple pages
- ✅ Updated `_Imports.razor` with new namespaces: `DiunaBI.UI.Shared.Components.Layout`, `DiunaBI.UI.Shared.Components.Auth`
- ✅ All routes unchanged - backward compatible
---
## PROJECT TYPE & TECH STACK
**Application Type:** Full-stack Business Intelligence (BI) platform with multi-tier architecture, real-time capabilities, and plugin system
**Core Stack:**
- Backend: ASP.NET Core 10.0 Web API
- Frontend: Blazor Server + MAUI Mobile
- Database: SQL Server + EF Core 10.0
- UI: MudBlazor 8.0
- Real-time: SignalR (EntityChangeHub)
- Google: Sheets API, Drive API, OAuth
- Logging: Serilog (Console, File)
- Auth: JWT Bearer + Google OAuth
---
## SOLUTION STRUCTURE (10 Projects)
```
DiunaBI.API (Web API)
├── Controllers: Auth, Layers, Jobs, DataInbox
├── Hubs: EntityChangeHub (SignalR real-time updates)
└── Services: GoogleAuth, JwtToken
DiunaBI.Domain (Entities)
└── User, Layer, Record, RecordHistory, QueueJob, DataInbox, ProcessSource
DiunaBI.Application (DTOs)
└── LayerDto, RecordDto, UserDto, RecordHistoryDto, PagedResult, JobDto
DiunaBI.Infrastructure (Data + Services)
├── Data: AppDbContext, Migrations (47 total)
├── Interceptors: EntityChangeInterceptor (auto-broadcasts DB changes)
├── Services: PluginManager, JobScheduler, JobWorker, GoogleSheets/Drive
├── Plugins: BaseDataImporter, BaseDataProcessor, BaseDataExporter
└── Interfaces: IPlugin, IDataProcessor, IDataImporter, IDataExporter
DiunaBI.UI.Web (Blazor Server)
└── Server-side Blazor web application
DiunaBI.UI.Mobile (MAUI)
└── iOS, Android, Windows, macOS support
DiunaBI.UI.Shared (Blazor Component Library - Reorganized)
├── Pages/
│ ├── Layers/ (Index.razor, Details.razor)
│ ├── Jobs/ (Index.razor, Details.razor)
│ ├── DataInbox/ (Index.razor, Details.razor)
│ ├── Dashboard.razor, Login.razor, Index.razor
├── Components/
│ ├── Layout/ (MainLayout, EmptyLayout, Routes)
│ └── Auth/ (AuthGuard, LoginCard)
└── Services/
├── LayerService, JobService, DataInboxService
├── EntityChangeHubService (SignalR client)
├── FilterStateServices (remember filters)
└── AuthService, TokenProvider
DiunaBI.Plugins.Morska (Feature Plugin)
├── Importers: Standard, D1, D3, FK2 (4 total)
├── Processors: D6, T1, T3, T4, T5 variants (12 total)
└── Exporters: Google Sheets export (1)
DiunaBI.Plugins.PedrolloPL (Feature Plugin - NEW)
└── Importers: B3 (1 total)
DiunaBI.Tests (Testing)
└── Unit and integration tests
```
---
## CORE FUNCTIONALITY
**Purpose:** BI platform for data import, processing, transformation via modular plugin architecture. Multi-layer workflows with audit trails, real-time notifications, scheduled job processing.
**Main Features:**
1. **Layer Management** - 4 types (Import/Processed/Admin/Dictionary), parent-child relationships, soft deletes
2. **Data Records** - 32 numeric columns (Value1-32) + description, hierarchical, full audit trail
3. **Plugin Architecture** - Dynamic assembly loading, base classes in Infrastructure, 3 types (Importers/Processors/Exporters)
4. **Job Queue System** - Background worker with retry logic (30s → 2m → 5m), priority-based, auto-scheduling
5. **External Data** - DataInbox API, Google Sheets read/write, Google Drive integration
6. **Real-time Updates** - SignalR broadcasts entity changes (create/update/delete) to all connected clients
7. **Audit Trail** - RecordHistory tracks all record changes with field-level diffs and JSON summaries
8. **Filter Persistence** - UI filter states saved across sessions (LayerFilterStateService, DataInboxFilterStateService)
---
## KEY ENTITIES
**Layer**
- Id, Number, Name, Type (Import/Processed/Administration/Dictionary)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy (with user relations)
- IsDeleted (soft delete), IsCancelled (processing control), ParentId
- Relations: Records (1-to-many), ProcessSources (1-to-many)
**Record**
- Id, Code (unique identifier), LayerId
- Value1-Value32 (double?), Desc1 (string, max 10000 chars)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy, IsDeleted
- Audit: Full history tracked in RecordHistory table
**RecordHistory** (NEW - Migration 47)
- RecordId, LayerId, ChangedAt, ChangedById
- ChangeType (Created/Updated/Deleted)
- Code, Desc1 (snapshot at time of change)
- ChangedFields (comma-separated field names)
- ChangesSummary (JSON with old/new values)
- Indexes: (RecordId, ChangedAt), (LayerId, ChangedAt) for performance
**QueueJob**
- LayerId, LayerName, PluginName
- JobType (Import/Process)
- Priority (0 = highest), Status (Pending/Running/Completed/Failed/Retrying)
- RetryCount, MaxRetries (default 5)
- CreatedAt, LastAttemptAt, CompletedAt
- LastError (detailed error message)
**DataInbox**
- Id, Name, Source (identifiers)
- Data (base64-encoded JSON array)
- CreatedAt
- Used by importers to stage incoming data
**User**
- Id (Guid), Email, UserName
- CreatedAt, LastLoginAt
- Google OAuth identity
**ProcessSource**
- Id, SourceLayerId, TargetLayerId
- Defines layer processing relationships
---
## API ENDPOINTS
**Base:** `/` (ApiController routes)
### AuthController (/auth)
- `POST /auth/apiToken` - Exchange Google ID token for JWT (AllowAnonymous)
- `POST /auth/refresh` - Refresh expired JWT token
### LayersController (/layers)
- `GET /layers?page=1&pageSize=10&search=&type=` - List layers (paged, filterable)
- `GET /layers/{id}` - Get layer details with records
- `POST /layers` - Create new layer
- `PUT /layers/{id}` - Update layer
- `DELETE /layers/{id}` - Soft delete layer
- `POST /layers/{id}/records` - Add/update records
- `PUT /layers/{layerId}/records/{recordId}` - Update specific record
- `DELETE /layers/{layerId}/records/{recordId}` - Delete record
- `GET /layers/{layerId}/records/{recordId}/history` - Get record history
- `GET /layers/{layerId}/deleted-records` - Get deleted records with history
### JobsController (/jobs) - NEW
- `GET /jobs?page=1&pageSize=50&status=&jobType=` - List jobs (paged, filterable)
- `GET /jobs/{id}` - Get job details
- `GET /jobs/stats` - Get job statistics (counts by status)
- `POST /jobs/schedule/{apiKey}` - Schedule all jobs from layer configs
- `POST /jobs/schedule/imports/{apiKey}` - Schedule import jobs only
- `POST /jobs/schedule/processes/{apiKey}` - Schedule process jobs only
- `POST /jobs/create-for-layer/{layerId}` - Create job for specific layer (manual trigger)
- `POST /jobs/{id}/retry` - Retry failed job (resets to Pending)
- `DELETE /jobs/{id}` - Cancel pending/retrying job
### DataInboxController (/datainbox)
- `GET /datainbox?page=1&pageSize=10&search=` - List inbox items (paged, filterable)
- `GET /datainbox/{id}` - Get inbox item with decoded data
- `POST /datainbox` - Create inbox item
- `PUT /datainbox/Add/{apiKey}` - Add data (API key + Basic Auth)
- `DELETE /datainbox/{id}` - Delete inbox item
### SignalR Hub
- `/hubs/entitychanges` - SignalR hub for real-time entity change notifications
- Event: `EntityChanged(module, id, operation)` - broadcasts to all clients
- Modules: QueueJobs, Layers, Records, RecordHistory
---
## AUTHENTICATION & SECURITY
**Flow:**
1. Client exchanges Google ID token → `/auth/apiToken`
2. GoogleAuthService validates token with Google, maps to internal User
3. Returns JWT (7-day expiration, HS256 signing)
4. JWT required on all protected endpoints (except /auth/apiToken, /health)
5. UserId extraction middleware sets X-UserId header for audit trails
**Security:**
- Google OAuth 2.0 for identity verification
- JWT Bearer tokens for API access
- API key + Basic Auth for DataInbox external endpoints
- CORS configured for:
- http://localhost:4200
- https://diuna.bim-it.pl
- https://morska.diunabi.com
---
## KEY SERVICES
### Infrastructure Services
**PluginManager**
- Location: `DiunaBI.Infrastructure/Services/PluginManager.cs`
- Loads plugin assemblies from `bin/Plugins/` directory at startup
- Registers IDataProcessor, IDataImporter, IDataExporter implementations
- Provides plugin discovery and execution
**JobSchedulerService**
- Location: `DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Creates QueueJob entries from Administration layer configs
- Reads layer.Records with Code="Plugin", Code="Priority", Code="MaxRetries"
- Methods: ScheduleImportJobsAsync, ScheduleProcessJobsAsync, ScheduleAllJobsAsync
**JobWorkerService** (BackgroundService)
- Location: `DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- Polls QueueJobs table every 10 seconds
- Executes jobs via PluginManager (Import/Process)
- Retry logic with exponential backoff: 30s → 2m → 5m delays
- Rate limiting: 5-second delay after imports (Google Sheets API quota)
- Updates job status in real-time (triggers SignalR broadcasts)
**EntityChangeInterceptor**
- Location: `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Captures entity changes: Added, Modified, Deleted
- Broadcasts changes via SignalR EntityChangeHub after successful save
- Uses reflection to avoid circular dependencies with IHubContext
**GoogleSheetsHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleSheetsHelper.cs`
- Google Sheets API v4 integration
- Methods: ReadRange, WriteRange, CreateSpreadsheet, UpdateSpreadsheet
**GoogleDriveHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleDriveHelper.cs`
- Google Drive API v3 integration
- Methods: UploadFile, ListFiles, MoveFile
**GoogleAuthService / JwtTokenService**
- Authentication and token management
- JWT generation and validation
### UI Services
**EntityChangeHubService**
- Location: `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton service for SignalR client connection
- Auto-reconnect enabled
- Event: `EntityChanged` - UI components subscribe for real-time updates
- Initialized in MainLayout.OnInitializedAsync
**LayerService / JobService / DataInboxService**
- HTTP clients for API communication
- DTOs serialization/deserialization
- Paged result handling
**LayerFilterStateService / DataInboxFilterStateService**
- Persist filter state across navigation
- Singleton services remember search, type, page selections
---
## DATABASE SCHEMA
**Total Migrations:** 47
**Latest Migrations:**
**Migration 47: RecordHistory (Dec 1, 2025)**
- **NEW Table: RecordHistory**
- Tracks all record changes (Created, Updated, Deleted)
- Fields: Id, RecordId, LayerId, ChangedAt, ChangedById, ChangeType, Code, Desc1, ChangedFields, ChangesSummary
- Indexes: IX_RecordHistory_RecordId_ChangedAt, IX_RecordHistory_LayerId_ChangedAt
- Foreign key: RecordHistory.ChangedById → Users.Id
**Migration 46: FixLayerDefaultValues (Nov 20, 2025)**
- Set default value: Layers.IsDeleted = false
**Migration 45: UpdateModel (Nov 19, 2025)**
- Added GETUTCDATE() defaults for all timestamp fields
- Changed foreign key constraints from CASCADE to RESTRICT:
- Layers → Users (CreatedById, ModifiedById)
- Records → Users (CreatedById, ModifiedById)
- Added FK_ProcessSources_Layers_LayerId
**Core Tables:**
- Users (authentication, audit)
- Layers (4 types, soft deletes, parent-child)
- Records (32 Value fields + Desc1, audit, soft deletes)
- RecordHistory (change tracking, field diffs, JSON summaries)
- QueueJobs (job queue, retry logic, status tracking)
- DataInbox (incoming data staging, base64 encoded)
- ProcessSources (layer relationships)
---
## PLUGIN SYSTEM
### Base Classes (Infrastructure/Plugins/)
**BaseDataImporter** (`DiunaBI.Infrastructure/Plugins/BaseDataImporter.cs`)
- Abstract base for all importers
- Methods: ImportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager, GoogleSheetsHelper, GoogleDriveHelper
**BaseDataProcessor** (`DiunaBI.Infrastructure/Plugins/BaseDataProcessor.cs`)
- Abstract base for all processors
- Methods: ProcessAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager
**BaseDataExporter** (`DiunaBI.Infrastructure/Plugins/BaseDataExporter.cs`)
- Abstract base for all exporters
- Methods: ExportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, GoogleSheetsHelper, GoogleDriveHelper
### Morska Plugin (DiunaBI.Plugins.Morska)
**Importers (4):**
- MorskaStandardImporter - Generic CSV/Excel import
- MorskaD1Importer - D1 data format
- MorskaD3Importer - D3 data format
- MorskaFK2Importer - FK2 data format
**Processors (12):**
- MorskaD6Processor
- MorskaT1R1Processor
- MorskaT1R3Processor
- MorskaT3SingleSourceProcessor
- MorskaT3SourceYearSummaryProcessor
- MorskaT3MultiSourceSummaryProcessor
- MorskaT3MultiSourceYearSummaryProcessor
- MorskaT4R2Processor
- MorskaT4SingleSourceProcessor
- MorskaT5LastValuesProcessor
- MorskaT3MultiSourceCopySelectedCodesProcessor-TO_REMOVE (deprecated)
- MorskaT3MultiSourceCopySelectedCodesYearSummaryProcessor-TO_REMOVE (deprecated)
**Exporters (1):**
- googleSheet.export.cs - Google Sheets export
**Total:** ~6,566 lines of code
### PedrolloPL Plugin (DiunaBI.Plugins.PedrolloPL) - NEW
**Importers (1):**
- **PedrolloPLImportB3** (`DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportB3.cs`)
- Imports B3 data from DataInbox
- Uses L1-D-B3-CODES dictionary layer for region code mapping
- Creates 12 monthly records per region (Value1-Value12)
- Generates Import layers: L{Number}-I-B3-{Year}-{Timestamp}
- Handles base64 JSON data decoding
---
## UI STRUCTURE (DiunaBI.UI.Shared)
### Reorganized Structure (Dec 5, 2025)
**Pages/** (Routable pages with @page directive)
```
Pages/
├── Layers/
│ ├── Index.razor + Index.razor.cs - /layers (list with filters, pagination)
│ └── Details.razor + Details.razor.cs - /layers/{id} (detail, edit, history)
├── Jobs/
│ ├── Index.razor + Index.razor.cs - /jobs (list with filters, real-time updates)
│ └── Details.razor - /jobs/{id} (detail, retry, cancel, real-time)
├── DataInbox/
│ ├── Index.razor + Index.razor.cs - /datainbox (list with filters)
│ └── Details.razor + Details.razor.cs - /datainbox/{id} (detail, base64 decode)
├── Dashboard.razor - /dashboard (user info)
├── Login.razor - /login (Google OAuth)
└── Index.razor - / (redirects to /dashboard)
```
**Components/** (Reusable components, no routes)
```
Components/
├── Layout/
│ ├── MainLayout.razor - Main app layout with drawer, nav menu
│ ├── EmptyLayout.razor - Minimal layout for login page
│ └── Routes.razor - Router configuration
└── Auth/
├── AuthGuard.razor - Authentication guard wrapper
└── LoginCard.razor - Google login button component
```
**Navigation Menu:**
- Dashboard (/dashboard) - User profile
- Layers (/layers) - Layer management
- Data Inbox (/datainbox) - Incoming data review
- Jobs (/jobs) - Job queue monitoring (with real-time status updates)
**Code-Behind Pattern:**
- Complex pages (50+ lines logic): Separate `.razor.cs` files
- Simple pages: Inline `@code` blocks
- Namespaces: `DiunaBI.UI.Shared.Pages.{Feature}`
---
## REAL-TIME FEATURES (SignalR)
### Architecture
**Hub:** `DiunaBI.API/Hubs/EntityChangeHub.cs`
- Endpoint: `/hubs/entitychanges`
- Method: `SendEntityChange(string module, string id, string operation)`
- Broadcasts: `EntityChanged` event to all connected clients
**Interceptor:** `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Detects: Added, Modified, Deleted entities
- Broadcasts: After successful SaveChanges
- Modules: QueueJobs, Layers, Records, RecordHistory
**UI Service:** `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton initialized in MainLayout
- Auto-reconnect enabled
- Components subscribe: `HubService.EntityChanged += OnEntityChanged`
### Real-time Update Flow
1. User action → API endpoint
2. DbContext.SaveChangesAsync()
3. EntityChangeInterceptor captures changes
4. SignalR broadcast to all clients: `EntityChanged(module, id, operation)`
5. UI components receive event and refresh data
6. StateHasChanged() updates UI
**Example:** Job status changes appear instantly on JobDetailPage and JobListPage
---
## JOB QUEUE SYSTEM
### Components
**Entity:** `QueueJob` (DiunaBI.Domain/Entities/QueueJob.cs)
- JobType: Import, Process
- JobStatus: Pending, Running, Completed, Failed, Retrying
- Priority: 0 = highest priority
- Retry: 30s → 2m → 5m delays, max 5 attempts
**Scheduler:** `JobSchedulerService`
- Reads Administration layer configs (Type=ImportWorker/ProcessWorker)
- Auto-creates jobs based on layer.Records configuration
- API endpoints: `/jobs/schedule/{apiKey}`, `/jobs/schedule/imports/{apiKey}`, `/jobs/schedule/processes/{apiKey}`
**Worker:** `JobWorkerService` (BackgroundService)
- Polls every 10 seconds
- Executes via PluginManager
- Exponential backoff on failures
- Rate limiting for Google API quota
- Real-time status updates via SignalR
**UI:** `Pages/Jobs/`
- Index.razor - Job list with filters, real-time updates
- Details.razor - Job detail with retry/cancel, real-time status
### Job Lifecycle
1. **Creation** - JobSchedulerService or manual via API
2. **Queued** - Status: Pending, sorted by Priority
3. **Execution** - JobWorkerService picks up, Status: Running
4. **Completion** - Status: Completed or Failed
5. **Retry** - On failure, Status: Retrying with exponential backoff
6. **Real-time** - All status changes broadcast via SignalR
**Statistics Endpoint:** `GET /jobs/stats`
```json
{
"pending": 5,
"running": 2,
"completed": 150,
"failed": 3,
"retrying": 1,
"total": 161
}
```
---
## RECENT DEVELOPMENT
**Recent Commits (Dec 2-5, 2025):**
- **193127b:** SignalR for realtime entitychanges (Dec 4)
- **bf2beda, 942da18:** Build fixes (Dec 4)
- **a3fa8f9:** B3 import is working (Dec 4)
- **0e3b393:** WIP: b3 plugin (Dec 3)
- **445c07a:** Morska plugins refactor (Dec 2)
- **3f8e62f:** WIP: queue engine (Dec 2)
- **248106a:** Plugins little refactor (Dec 2)
- **587d4d6:** Pedrollo plugins (Dec 2)
- **e70a8dd:** Remember list filters (Dec 2)
- **89859cd:** Record history is working (Dec 1)
**Development Focus (Last 30 Days):**
1. ✅ Real-time updates (SignalR integration)
2. ✅ Job queue system (background worker, retry logic)
3. ✅ PedrolloPL plugin (B3 importer)
4. ✅ Record history tracking (audit trail)
5. ✅ UI reorganization (feature-based folders)
6. ✅ Plugin refactoring (base classes in Infrastructure)
7. ✅ Filter persistence (UI state management)
**Major Features Added:**
- SignalR real-time entity change notifications
- Background job processing with retry logic
- Record history with field-level diffs
- PedrolloPL B3 data importer
- UI reorganization (Pages/Layers, Pages/Jobs, Pages/DataInbox)
- Filter state persistence across sessions
---
## CONFIGURATION
**Key Settings (appsettings.Development.json):**
- ConnectionStrings:SQLDatabase - SQL Server (localhost:21433, DB: DiunaBI-PedrolloPL)
- JwtSettings:SecurityKey, ExpiryDays (7)
- GoogleAuth:ClientId, RedirectUri
- apiKey, apiUser, apiPass - DataInbox API security
- exportDirectory - Google Drive folder ID for exports
- apiLocalUrl - localhost:5400
- InstanceName - DEV/PROD environment identifier
**Logging Configuration:**
```json
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore.Database.Command": "Warning",
"Microsoft.EntityFrameworkCore.Infrastructure": "Warning",
"System.Net.Http.HttpClient": "Warning",
"Google.Apis": "Warning",
"DiunaBI.Core.Services.PluginManager": "Information"
}
}
}
```
**CORS Origins:**
- http://localhost:4200 (development)
- https://diuna.bim-it.pl (production)
- https://morska.diunabi.com (production)
---
## PATTERNS & ARCHITECTURE
**Design Patterns:**
- Clean Architecture (Domain → Application → Infrastructure → API)
- Plugin Pattern (dynamic loading, base classes, interface contracts)
- Interceptor Pattern (EF Core SaveChangesInterceptor for change tracking)
- Hub Pattern (SignalR for real-time notifications)
- Service Pattern (dependency injection throughout)
- Repository Pattern (EF Core DbContext as repository)
- Background Service Pattern (JobWorkerService for async processing)
**Tech Versions:**
- .NET 10.0 (upgraded from .NET 8.0)
- EF Core 10.0
- C# 13.0
- Blazor Server (net10.0)
- MAUI (net10.0-ios/android/windows/macos)
- MudBlazor 8.0
**Architectural Decisions:**
- Plugin base classes in Infrastructure for reusability
- SignalR for real-time updates (no polling)
- Background service for job processing (no external scheduler)
- Soft deletes with audit trails
- Foreign key RESTRICT to prevent accidental cascades
- Feature-based folder structure in UI
---
## QUICK REFERENCE
**Database:**
- SQL Server with 47 EF Core migrations
- Auto-timestamps via GETUTCDATE() defaults
- Soft deletes (IsDeleted flag)
- Audit trails (CreatedBy, ModifiedBy, RecordHistory table)
**Build Process:**
- MSBuild target copies plugin DLLs to `bin/Plugins/` after build
- Plugins: DiunaBI.Plugins.Morska.dll, DiunaBI.Plugins.PedrolloPL.dll
**SignalR:**
- Hub: `/hubs/entitychanges`
- Broadcasts: `EntityChanged(module, id, operation)`
- Auto-reconnect enabled in UI
- Real-time updates for QueueJobs, Layers, Records
**Job Queue:**
- Auto-scheduling from layer configs (Type=ImportWorker/ProcessWorker)
- Background processing every 10 seconds
- Retry logic: 30s → 2m → 5m (max 5 retries)
- Priority-based execution (0 = highest)
- Real-time status updates via SignalR
**Plugins:**
- **Morska:** 4 importers, 12 processors, 1 exporter (~6,566 LOC)
- **PedrolloPL:** 1 importer (B3 data)
- Base classes: BaseDataImporter, BaseDataProcessor, BaseDataExporter
- Dynamic loading from `bin/Plugins/` at startup
**UI Structure:**
- Feature-based folders: Pages/Layers, Pages/Jobs, Pages/DataInbox
- Separate code-behind for complex logic (.razor.cs files)
- Inline @code for simple pages
- Organized components: Layout/, Auth/
- Filter state persistence across navigation
---
## FILE PATHS REFERENCE
**Key Configuration:**
- API: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/appsettings.json`
- API Startup: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Program.cs`
**SignalR:**
- Hub: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Hubs/EntityChangeHub.cs`
- Interceptor: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- UI Service: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
**Job System:**
- Controller: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Controllers/JobsController.cs`
- Scheduler: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Worker: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- UI Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/Jobs/`
**Plugins:**
- Base Classes: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Plugins/`
- Morska: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.Morska/`
- PedrolloPL: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.PedrolloPL/`
**Migrations:**
- Latest: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Migrations/20251201165810_RecordHistory.cs`
**UI Components:**
- Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/`
- Components: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Components/`
- Services: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/`

View File

@@ -1,21 +1,21 @@
// .gitea/scripts/getLatestRunWithArtifacts.js // .gitea/scripts/getLatestRunWithArtifacts.js
// Purpose: Find latest successful run that exposes all REQUIRED_ARTIFACTS via GUI URLs. // Purpose: Find latest successful run that exposes all REQUIRED_ARTIFACTS via GUI URLs.
// Strategy:
// 1. Try to list runs via API (/actions/runs).
// 2. If not available (404), fallback to scraping HTML /actions page.
// 3. For each run (newest first, only "success"), check artifacts by probing GUI URLs.
// Outputs: sets `run_id` to GITHUB_OUTPUT and writes .gitea/.cache/run_id file. // Outputs: sets `run_id` to GITHUB_OUTPUT and writes .gitea/.cache/run_id file.
const fs = require("fs"); const fs = require("fs");
const path = require("path"); const path = require("path");
(async () => { const BASE = process.env.GITEA_BASE_URL;
// --- Config from environment --- const OWNER = process.env.OWNER;
const BASE = process.env.GITEA_BASE_URL; // e.g. https://code.bim-it.pl const REPO = process.env.REPO;
const OWNER = process.env.OWNER; // e.g. mz const TOKEN = process.env.GITEA_PAT;
const REPO = process.env.REPO; // e.g. DiunaBI
const TOKEN = process.env.GITEA_PAT; // PAT
const SCAN_LIMIT = Number(process.env.SCAN_LIMIT || "100"); const SCAN_LIMIT = Number(process.env.SCAN_LIMIT || "100");
const REQUIRED_ARTIFACTS = (process.env.REQUIRED_ARTIFACTS || "frontend,webapi") const REQUIRED_ARTIFACTS = (process.env.REQUIRED_ARTIFACTS || "frontend,webapi")
.split(",") .split(",").map(s => s.trim()).filter(Boolean);
.map(s => s.trim())
.filter(Boolean);
if (!BASE || !OWNER || !REPO) { if (!BASE || !OWNER || !REPO) {
console.error("Missing one of: GITEA_BASE_URL, OWNER, REPO"); console.error("Missing one of: GITEA_BASE_URL, OWNER, REPO");
@@ -26,64 +26,97 @@ const path = require("path");
process.exit(1); process.exit(1);
} }
// Ensure cache dir exists
const cacheDir = path.join(".gitea", ".cache"); const cacheDir = path.join(".gitea", ".cache");
fs.mkdirSync(cacheDir, { recursive: true }); fs.mkdirSync(cacheDir, { recursive: true });
// Helpers async function http(url, opts = {}) {
const api = async (url) => { return fetch(url, {
const res = await fetch(url, { ...opts,
headers: { Authorization: `token ${TOKEN}` } headers: { Authorization: `token ${TOKEN}`, ...(opts.headers || {}) },
}); });
}
async function apiJSON(url) {
const res = await http(url);
if (!res.ok) { if (!res.ok) {
const text = await res.text().catch(() => ""); const text = await res.text().catch(() => "");
throw new Error(`API ${res.status} ${res.statusText} for ${url}\n${text}`); const err = new Error(`API ${res.status} ${res.statusText} for ${url}\n${text}`);
err.status = res.status;
throw err;
} }
return res.json(); return res.json();
}; }
const headOk = async (url) => { // ---- Run listing ----
// Try HEAD first; some instances may require GET for redirects function normalizeRunList(resp) {
let res = await fetch(url, { if (Array.isArray(resp)) return resp;
method: "HEAD", return resp?.runs || resp?.workflow_runs || resp?.data || resp?.items || [];
redirect: "follow", }
headers: { Authorization: `token ${TOKEN}` }
}); async function tryApiListRuns() {
const url = `${BASE}/api/v1/repos/${OWNER}/${REPO}/actions/runs?limit=${SCAN_LIMIT}`;
try {
const resp = await apiJSON(url);
return normalizeRunList(resp);
} catch (e) {
if (e.status === 404) return null;
throw e;
}
}
async function listRunsFromHtml() {
const url = `${BASE}/${OWNER}/${REPO}/actions`;
const res = await http(url, { headers: { Accept: "text/html" } });
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTML ${res.status} for ${url}\n${t}`);
}
const html = await res.text();
const runIds = Array.from(html.matchAll(/\/actions\/runs\/(\d+)/g))
.map(m => Number(m[1]))
.filter(n => Number.isFinite(n));
const unique = [...new Set(runIds)].sort((a, b) => b - a);
return unique.slice(0, SCAN_LIMIT).map(id => ({ id, status: "success" })); // HTML doesnt give status, assume ok
}
// ---- Artifact check via GUI URL ----
async function headOk(url) {
let res = await http(url, { method: "HEAD", redirect: "follow" });
if (res.ok) return true; if (res.ok) return true;
res = await http(url, { method: "GET", redirect: "manual" });
// Fallback to GET (no download) just to test availability
res = await fetch(url, {
method: "GET",
redirect: "manual",
headers: { Authorization: `token ${TOKEN}` }
});
// Accept 200 OK, or 3xx redirect to a signed download URL
return res.status >= 200 && res.status < 400; return res.status >= 200 && res.status < 400;
}; }
// 1) Get recent workflow runs (a.k.a. tasks) via REST (async () => {
const listUrl = `${BASE}/api/v1/repos/${OWNER}/${REPO}/actions/tasks?limit=${SCAN_LIMIT}`; let runs = await tryApiListRuns();
const resp = await api(listUrl); if (!runs) {
console.log("Runs API not available on this Gitea falling back to HTML scraping.");
runs = await listRunsFromHtml();
}
// 2) Build candidate list: only status == "success", newest first by id if (!runs.length) {
const runs = Array.isArray(resp.workflow_runs) ? resp.workflow_runs : []; console.error("No workflow runs found.");
process.exit(1);
}
// newest first
const candidates = runs const candidates = runs
.filter(r => r && r.status === "success") .filter(r => r && r.id != null)
.sort((a, b) => (b.id ?? 0) - (a.id ?? 0)); .sort((a, b) => (b.id ?? 0) - (a.id ?? 0))
.filter(r => (r.status || "").toLowerCase() === "success" || !r.status); // HTML case: no status info
if (!candidates.length) { if (!candidates.length) {
console.error("No successful runs found."); console.error("No successful runs found.");
process.exit(1); process.exit(1);
} }
console.log(`Scanning ${candidates.length} successful runs for artifacts: ${REQUIRED_ARTIFACTS.join(", ")}`); console.log(`Scanning ${candidates.length} runs for artifacts: ${REQUIRED_ARTIFACTS.join(", ")}`);
// 3) Find the first run that exposes all required artifacts via GUI URLs
let picked = null; let picked = null;
for (const r of candidates) { for (const r of candidates) {
const runId = r.id; const runId = r.id;
const urls = REQUIRED_ARTIFACTS.map(name => const urls = REQUIRED_ARTIFACTS.map(
`${BASE}/${OWNER}/${REPO}/actions/runs/${runId}/artifacts/${encodeURIComponent(name)}` name => `${BASE}/${OWNER}/${REPO}/actions/runs/${runId}/artifacts/${encodeURIComponent(name)}`
); );
let allPresent = true; let allPresent = true;
@@ -95,6 +128,7 @@ const path = require("path");
break; break;
} }
} }
if (allPresent) { if (allPresent) {
picked = { id: runId }; picked = { id: runId };
console.log(`Picked run_id=${runId}`); console.log(`Picked run_id=${runId}`);
@@ -103,21 +137,16 @@ const path = require("path");
} }
if (!picked) { if (!picked) {
console.error("No run exposes all required artifacts. Consider increasing SCAN_LIMIT or verify artifact names."); console.error("No run exposes all required artifacts.");
process.exit(1); process.exit(1);
} }
// 4) Write outputs
const runIdStr = String(picked.id); const runIdStr = String(picked.id);
// Write to cache (handy for debugging)
fs.writeFileSync(path.join(cacheDir, "run_id"), runIdStr, "utf8"); fs.writeFileSync(path.join(cacheDir, "run_id"), runIdStr, "utf8");
// Export as GitHub-style output (supported by Gitea runners) if (process.env.GITHUB_OUTPUT) {
const outFile = process.env.GITHUB_OUTPUT; fs.appendFileSync(process.env.GITHUB_OUTPUT, `run_id=${runIdStr}\n`);
if (outFile) {
fs.appendFileSync(outFile, `run_id=${runIdStr}\n`);
} else { } else {
// Fallback: also print for visibility
console.log(`::set-output name=run_id::${runIdStr}`); console.log(`::set-output name=run_id::${runIdStr}`);
} }
})().catch(err => { })().catch(err => {

View File

@@ -0,0 +1,91 @@
// .gitea/scripts/replaceTokens.js
// Skanuje:
// - artifacts/frontend/**/*.js
// - artifacts/webapi/appsettings.json (jeśli jest)
// - artifacts/webapi/client_secrets.json (jeśli jest)
// Tokeny: #{NAME}# -> wartość z VARIABLES/SECRETS (NAME: uppercased, '-'->'_')
// Dodatkowo: #{BUILDID}# -> RUN_ID (z ENV)
const fs = require('fs');
const path = require('path');
function walk(dir, predicate) {
const out = [];
if (!fs.existsSync(dir)) return out;
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
const full = path.join(dir, entry.name);
if (entry.isDirectory()) out.push(...walk(full, predicate));
else if (predicate(full)) out.push(full);
}
return out;
}
function replaceInFile(file, mapToken) {
let data = fs.readFileSync(file, 'utf8');
const re = /#\{(.*?)\}#/g;
let changed = false;
data = data.replace(re, (_, raw) => {
const token = (raw || '').replace(/-/g, '_').toUpperCase();
const val = mapToken(token);
if (val == null || val === '') return `#{${raw}}#`; // zostaw bez zmian, podbijemy błąd później
changed = true;
return String(val);
});
fs.writeFileSync(file, data, 'utf8');
return changed;
}
(async () => {
const secrets = JSON.parse(process.env.SECRETS || '{}');
const variables = JSON.parse(process.env.VARIABLES || '{}');
const RUN_ID = process.env.RUN_ID || process.env.GITHUB_RUN_ID || '';
const mapToken = (token) => {
if (token === 'BUILDID') return RUN_ID;
return (variables[token] != null ? variables[token] : secrets[token]);
};
// 1) Frontend: wszystkie .js
const feRoot = path.resolve('artifacts/frontend');
const feFiles = walk(feRoot, (f) => f.endsWith('.js'));
// 2) Backend: wybrane pliki jeśli istnieją
const beRoot = path.resolve('artifacts/webapi');
const beFiles = []
;['appsettings.json', 'client_secrets.json'].forEach((name) => {
const p = path.join(beRoot, name);
if (fs.existsSync(p)) beFiles.push(p);
});
const files = [...feFiles, ...beFiles];
if (files.length === 0) {
console.error('❌ No candidate files found to tokenize (frontend .js / backend json).');
process.exit(1);
}
console.log(`🔎 Tokenizing ${files.length} file(s)`);
const missing = new Set();
// drugi przebieg: wypisz brakujące tokeny, jeśli jakieś zostały w plikach
for (const file of files) {
// pierwsze podejście: podstaw wartości
replaceInFile(file, mapToken);
}
for (const file of files) {
const content = fs.readFileSync(file, 'utf8');
const reLeft = /#\{(.*?)\}#/g;
let m;
while ((m = reLeft.exec(content))) {
const token = (m[1] || '').replace(/-/g, '_').toUpperCase();
missing.add(token);
}
}
if (missing.size > 0) {
console.error(`❌ Missing values for tokens: ${Array.from(missing).join(', ')}`);
process.exit(1);
}
console.log('✅ Tokenization complete.');
})();

View File

@@ -1,6 +1,9 @@
name: BuildApp name: Build Docker Images
on: on:
push:
branches:
- main
workflow_dispatch: {} workflow_dispatch: {}
concurrency: concurrency:
@@ -8,109 +11,139 @@ concurrency:
cancel-in-progress: false cancel-in-progress: false
jobs: jobs:
build-frontend: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
customer:
- name: Morska
plugin_project: DiunaBI.Plugins.Morska
- name: PedrolloPL
plugin_project: DiunaBI.Plugins.PedrolloPL
steps: steps:
- name: Checkout - name: Checkout
uses: https://github.com/actions/checkout@v4 uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20 - name: Setup .NET 10
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install Angular CLI
run: npm install -g @angular/cli
- name: Install PNPM
run: npm install -g pnpm
- name: Install dependencies
working-directory: src/Frontend
run: pnpm install
- name: Build Angular
working-directory: src/Frontend
run: ng build --configuration=production
- name: Upload artifact (frontend)
uses: https://github.com/actions/upload-artifact@v3
with:
name: frontend
path: src/Frontend/dist
if-no-files-found: error
retention-days: 7
build-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 8
uses: https://github.com/actions/setup-dotnet@v4 uses: https://github.com/actions/setup-dotnet@v4
with: with:
dotnet-version: 8.0.x dotnet-version: 10.0.x
- name: Restore dependencies - name: Restore dependencies
working-directory: src/Backend working-directory: .
run: dotnet restore DiunaBI.sln run: |
dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins - name: Build solution and prepare plugins
working-directory: src/Backend working-directory: .
run: | run: |
set -e set -e
dotnet build DiunaBI.sln --configuration Release # Build plugin first to avoid missing dependency issues
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release --no-restore
mkdir -p DiunaBI.Tests/bin/Release/net8.0/Plugins # Skip automatic plugin copy in API build since we only have one plugin restored
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/ dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release --no-restore -p:SkipPluginCopy=true
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Core.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/ dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release --no-restore
ls -la DiunaBI.Tests/bin/Release/net8.0/Plugins/
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp ${{ matrix.customer.plugin_project }}/bin/Release/net10.0/${{ matrix.customer.plugin_project }}.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests - name: Run Tests
working-directory: src/Backend working-directory: .
run: | run: |
dotnet add DiunaBI.Tests/DiunaBI.Tests.csproj package coverlet.collector
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \ dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \ --configuration Release \
--no-restore \ --no-restore \
--logger "trx;LogFileName=test-results.trx" \ --logger "trx;LogFileName=test-results-${{ matrix.customer.name }}.trx" \
--collect:"XPlat Code Coverage" \ --collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" --filter "Category!=LocalOnly" || true
- name: Publish Test Results - name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/upload-artifact@v3
if: success() || failure() if: success() || failure()
with: with:
name: test-results name: test-results-${{ matrix.customer.name }}
path: | path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7 retention-days: 7
- name: Publish WebAPI build-and-push:
if: success() runs-on: ubuntu-latest
working-directory: src/Backend needs: test
run: | if: success() || failure()
dotnet publish DiunaBI.WebAPI/DiunaBI.WebAPI.csproj \ strategy:
--configuration Release \ matrix:
--framework net8.0 \ customer:
--self-contained false \ - name: Morska
--output ../../build/webapi plugin_project: DiunaBI.Plugins.Morska
mkdir -p ../../build/webapi/Plugins image_suffix: morska
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll ../../build/webapi/Plugins/ - name: PedrolloPL
ls -la ../../build/webapi/Plugins/ plugin_project: DiunaBI.Plugins.PedrolloPL
image_suffix: pedrollopl
- name: Clean up sensitive files steps:
working-directory: build/webapi - name: Debug secrets
run: | run: |
rm -f appsettings.Development.json || true echo "User length: ${#REGISTRY_USER}"
rm -f client_secrets.Development.json || true echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Upload artifact (webapi) - name: Checkout code
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/checkout@v4
with:
name: webapi - name: Set up Docker Buildx
path: build/webapi uses: https://github.com/docker/setup-buildx-action@v3
if-no-files-found: error
retention-days: 7 - name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
--build-arg PLUGIN_PROJECT=${{ matrix.customer.plugin_project }} \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built - ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "**Customer:** ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,23 +0,0 @@
name: _debug-mount
on: { workflow_dispatch: {} }
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Who/where
run: |
set -e
echo "uname -a:"; uname -a || true
echo "--- cgroup ---"; cat /proc/1/cgroup || true
- name: Show docker image info
run: |
cat /etc/os-release || true
- name: Mounts & write marker
run: |
set -e
echo "== grep mount =="
mount | grep -E 'runner-cache|ci-keys' || true
echo "== ls /runner-cache =="
ls -la /runner-cache || true
echo "MARKER $(date -Iseconds)" | tee /runner-cache/__ok.txt
echo "OK"

View File

@@ -1,114 +0,0 @@
name: ReleaseApp (JS finder + download)
on:
workflow_dispatch: {}
jobs:
release:
runs-on: ubuntu-latest
env:
GITEA_BASE_URL: https://code.bim-it.pl
OWNER: mz
REPO: DiunaBI
# Comma-separated artifact names that must exist
REQUIRED_ARTIFACTS: frontend,webapi
# How many recent successful runs to scan
SCAN_LIMIT: "100"
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install unzip (for extraction)
run: |
sudo apt-get update
sudo apt-get install -y unzip
- name: Resolve latest run that exposes required artifacts
id: resolve
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
run: |
node .gitea/scripts/getLatestRunWithArtifacts.js
echo "Resolved run_id: $(cat .gitea/.cache/run_id)"
- name: Download frontend artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: frontend
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/frontend
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Download webapi artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: webapi
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/webapi
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Show artifact structure
run: |
echo "::group::frontend"
ls -laR artifacts/frontend || true
echo "::endgroup::"
echo "::group::webapi"
ls -laR artifacts/webapi || true
echo "::endgroup::"
# 3) Package artifacts as ZIPs for transfer
- name: Package artifacts as ZIPs
run: |
mkdir -p build
(cd artifacts/frontend && zip -rq ../../build/DiunaBI-Morska-Frontend.zip .)
(cd artifacts/webapi && zip -rq ../../build/DiunaBI-Morska-WebApi.zip .)
ls -la build
- name: Debug SSH key
env:
SSH_PRIVATE_KEY: ${{ secrets.BIMIT_SSH_KEY }}
run: |
echo "== Początek klucza =="
echo "$SSH_PRIVATE_KEY" | head -n 5
echo "== Koniec klucza =="
echo "$SSH_PRIVATE_KEY" | tail -n 5
echo "== Liczba linii =="
echo "$SSH_PRIVATE_KEY" | wc -l
echo "== ssh-keygen sprawdzenie formatu =="
printf "%s" "$SSH_PRIVATE_KEY" > private_key
ssh-keygen -lf private_key || true
# 4) Upload ZIPs to remote server via SSH (using secret key)
- name: Upload artifacts to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.BIMIT_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
REMOTE_DIR: deployment
run: |
set -euo pipefail
# Prepare key
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
# Preload known_hosts (safer than StrictHostKeyChecking=no)
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
# Ensure remote dir exists
ssh -i private_key "$SSH_USER@$SSH_HOST" "mkdir -p ~/$REMOTE_DIR"
# Upload files
scp -i private_key build/DiunaBI-Morska-Frontend.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
scp -i private_key build/DiunaBI-Morska-WebApi.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
# Cleanup
shred -u private_key

View File

@@ -1,113 +0,0 @@
name: BuildApp
on:
#push:
# branches:
# - main
#pull_request:
# branches:
# - main
workflow_dispatch:
jobs:
build-frontend:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install Angular CLI
run: npm install -g @angular/cli
- name: Install PNPM
run: npm install -g pnpm
- name: Install dependencies
working-directory: src/Frontend
run: pnpm install
- name: Build Angular
working-directory: src/Frontend
run: ng build --configuration=production
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: frontend
path: src/Frontend/dist
build-backend:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup .NET 8
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
- name: Restore dependencies
working-directory: src/Backend
run: dotnet restore DiunaBI.sln
- name: Build solution and prepare plugins
working-directory: src/Backend
run: |
dotnet build DiunaBI.sln --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release
# Przygotuj katalog dla testów
mkdir -p DiunaBI.Tests/bin/Release/net8.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Core.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/
echo "✅ Plugins copied to test directory:"
ls -la DiunaBI.Tests/bin/Release/net8.0/Plugins/
- name: Run Tests
working-directory: src/Backend
run: |
dotnet add DiunaBI.Tests/DiunaBI.Tests.csproj package coverlet.collector
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \
--no-restore \
--logger "trx;LogFileName=test-results.trx" \
--collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly"
- name: Publish Test Results
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: test-results
path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
- name: Publish WebAPI
if: success()
working-directory: src/Backend
run: |
dotnet publish DiunaBI.WebAPI/DiunaBI.WebAPI.csproj \
--configuration Release \
--framework net8.0 \
--self-contained false \
--output ../../build/webapi
# Kopiuj pluginy do katalogu webapi
mkdir -p ../../build/webapi/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll ../../build/webapi/Plugins/
echo "✅ Plugins copied to webapi:"
ls -la ../../build/webapi/Plugins/
- name: Clean up sensitive files
working-directory: build/webapi
run: |
rm -f appsettings.Development.json
rm -f client_secrets.Development.json
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: webapi
path: build/webapi

View File

@@ -1,27 +0,0 @@
module.exports = async ({ github, context, core, jobId, name }) => {
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: jobId,
});
if (artifacts.data.total_count === 0) {
core.setFailed("No artifacts found for jobID: " + jobId);
return false;
}
const artifact = artifacts.data.artifacts.find(
(artifact) => artifact.name === name
);
if (!artifact) {
core.setFailed(`${name} not found in artifacts`);
return false;
}
const response = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: "zip"
});
require('fs').writeFileSync(`${name}.zip`, Buffer.from(response.data));
require('fs').mkdirSync(`./${jobId}/${name}`, { recursive: true });
require('child_process').execSync(`unzip -o ${name}.zip -d ./${jobId}/${name}`);
};

View File

@@ -1,11 +0,0 @@
module.exports = async ({ github, context }) => {
const { data: runs } = await github.rest.actions.listWorkflowRuns({
owner: context.repo.owner,
repo: context.repo.repo,
workflow_id: "build.yml",
branch: "main",
status: "success",
per_page: 1,
});
return runs.workflow_runs[0].id;
};

View File

@@ -1,38 +0,0 @@
module.exports = async ({ github, context, core, jobId }) => {
const frontendPath = `./${jobId}/frontend/diunaBI/browser/`;
const files = (require('fs').readdirSync(frontendPath).filter(file => file.endsWith('.js')))
.map(file => `${frontendPath}${file}`);
if (files.length === 0) {
core.setFailed("Frontend JS files not found");
return false;
}
files.push(`./${jobId}/webapi/appsettings.json`);
files.push(`./${jobId}/webapi/client_secrets.json`);
files.forEach(file => {
let data = require('fs').readFileSync(file, 'utf8');
const regex = /#{(.*?)}#/g;
let match;
while (match = regex.exec(data)) {
const original = match[0];
const token = match[1].replace(/-/g, '_').toUpperCase();
const value = getValue(token, jobId);
console.log(`Replacing ${original} with ${value} for ${token}`);
if (!value) {
core.setFailed(`Token ${token} not found`);
return false;
}
data = data.replace(new RegExp(original, 'g'), value);
}
require('fs').writeFileSync(file, data, 'utf8');
});
}
function getValue(token, jobId) {
if (token == 'BUILDID') { return jobId; }
const secrets = JSON.parse(process.env.SECRETS);
const variables = JSON.parse(process.env.VARIABLES);
return variables[token] || secrets[token];
}

View File

@@ -1,109 +0,0 @@
name: Morska Release
on:
workflow_dispatch:
inputs:
job_id:
description: 'Job ID of the build to release'
required: false
permissions:
actions: read
contents: read
jobs:
release:
runs-on: ubuntu-latest
environment: Morska
steps:
- uses: actions/checkout@v3
- name: Assign run ID if provided
if: ${{ github.event.inputs.job_id != '' }}
run: echo "job_id=${{ github.event.inputs.job_id }}" >> $GITHUB_ENV
- name: Get last build ID
id: get-build-id
if: ${{ github.event.inputs.job_id == '' }}
uses: actions/github-script@v6
with:
script: |
const script = require('./.github/workflows/buildScripts/getLastBuildId.js');
const jobId = await script({github, context});
core.setOutput("job_id", jobId);
- name: Set job_id
if: ${{ github.event.inputs.job_id == '' }}
run: echo "job_id=${{ steps.get-build-id.outputs.job_id }}" >> $GITHUB_ENV
- name: Check job_id
run: |
if [ -z "${{ env.job_id }}" ]; then
echo "Error: job_id is empty"
exit 1
fi
- name: Download frontend artifacts
uses: actions/github-script@v6
with:
script: |
const script = require('./.github/workflows/buildScripts/downloadArtifacts.js');
await script({github, context, core, jobId: ${{env.job_id}}, name: 'frontend'});
- name: Download backend artifacts
uses: actions/github-script@v6
with:
script: |
const script = require('./.github/workflows/buildScripts/downloadArtifacts.js');
await script({github, context, core, jobId: ${{env.job_id}}, name: 'webapi'});
- name: Tokenize
uses: actions/github-script@v6
env:
SECRETS: ${{ toJson(secrets) }}
VARIABLES: ${{ toJson(vars) }}
with:
script: |
const script = require('./.github/workflows/buildScripts/replaceTokens.js');
await script({github, context, core, jobId: ${{env.job_id}} });
- name: Archive frontend artifacts
run: |
cd ${{env.job_id}}/frontend
zip -r ../DiunaBI-Morska-Frontend.zip .
- name: Archive backend artifacts
run: |
cd ${{env.job_id}}/webapi
zip -r ../DiunaBI-Morska-WebApi.zip .
- name: List artifacts
run: ls -la .
- name: Send frontend archive to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.PROD_SRV_PRIVATE_KEY }}
run: |
echo "${SSH_PRIVATE_KEY}" > private_key
chmod 600 private_key
scp -i private_key -o StrictHostKeyChecking=no ./${{env.job_id}}/DiunaBI-Morska-Frontend.zip mz@bim-it.pl:./deployment/
rm private_key
- name: Send frontend archive to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.PROD_SRV_PRIVATE_KEY }}
run: |
echo "${SSH_PRIVATE_KEY}" > private_key
chmod 600 private_key
scp -i private_key -o StrictHostKeyChecking=no ./${{env.job_id}}/DiunaBI-Morska-WebApi.zip mz@bim-it.pl:./deployment/
rm private_key
- name: Run SSH commands on remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.PROD_SRV_PRIVATE_KEY }}
run: |
echo "${SSH_PRIVATE_KEY}" > private_key
chmod 600 private_key
ssh -i private_key -o StrictHostKeyChecking=no mz@bim-it.pl << 'EOF'
./deployment/DiunaBI-Morska.Release.sh
EOF
rm private_key

9
.gitignore vendored
View File

@@ -484,6 +484,8 @@ yarn-error.log
## ##
**/appsettings.Development.json **/appsettings.Development.json
**/appsettings.Local.json **/appsettings.Local.json
**/client_secrets.Development.json
**/client
*.p12 *.p12
*.key *.key
*.pem *.pem
@@ -561,3 +563,10 @@ coverage/
## ##
tmp/ tmp/
temp/ temp/
##
## LocalDB Development Files
##
DevTools/LocalDB/backups/*.bak
DevTools/LocalDB/backups/*.bacpac
DevTools/LocalDB/data/

36
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,36 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "API",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build-api",
"program": "${workspaceFolder}/DiunaBI.API/bin/Debug/net10.0/DiunaBI.API.dll",
"args": [],
"cwd": "${workspaceFolder}/DiunaBI.API",
"stopAtEntry": false,
"env": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
{
"name": "Web",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build-web",
"program": "${workspaceFolder}/DiunaBI.UI.Web/bin/Debug/net10.0/DiunaBI.UI.Web.dll",
"args": [],
"cwd": "${workspaceFolder}/DiunaBI.UI.Web",
"stopAtEntry": false,
"serverReadyAction": {
"action": "openExternally",
"pattern": "\\bNow listening on:\\s+(https?://\\S+)",
"uriFormat": "%s"
},
"env": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
]
}

114
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,114 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build-api",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/DiunaBI.API/DiunaBI.API.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile"
},
{
"label": "build-web",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/DiunaBI.UI.Web/DiunaBI.UI.Web.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile"
},
{
"label": "publish-api",
"command": "dotnet",
"type": "process",
"args": [
"publish",
"${workspaceFolder}/DiunaBI.API/DiunaBI.API.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile"
},
{
"label": "publish-web",
"command": "dotnet",
"type": "process",
"args": [
"publish",
"${workspaceFolder}/DiunaBI.UI.Web/DiunaBI.UI.Web.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile"
},
{
"label": "watch-api",
"command": "dotnet",
"type": "process",
"args": [
"watch",
"run",
"--project",
"${workspaceFolder}/DiunaBI.API/DiunaBI.API.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "watch-web",
"command": "dotnet",
"type": "process",
"args": [
"watch",
"run",
"--project",
"${workspaceFolder}/DiunaBI.UI.Web/DiunaBI.UI.Web.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "build-mobile-ios",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/DiunaBI.UI.Mobile/DiunaBI.UI.Mobile.csproj",
"-f",
"net10.0-ios",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile"
},
{
"label": "run-mobile-ios",
"command": "dotnet",
"type": "shell",
"args": [
"build",
"${workspaceFolder}/DiunaBI.UI.Mobile/DiunaBI.UI.Mobile.csproj",
"-f",
"net10.0-ios",
"-t:Run",
"/p:_DeviceName=:v2:udid=B72F13ED-156F-481D-80EE-6A17494DBB70"
],
"problemMatcher": [],
"presentation": {
"reveal": "always",
"panel": "dedicated",
"focus": false
},
"group": {
"kind": "build",
"isDefault": false
}
}
]
}

View File

@@ -0,0 +1 @@
POST http://localhost:5400/jobs/schedule/10763478CB738D4ecb2h76g803478CB738D4e

View File

@@ -0,0 +1,3 @@
###
GET http://localhost:5400/Layers/AutoImport/10763478CB738D4ecb2h76g803478CB738D4e/K5-

View File

@@ -0,0 +1,2 @@
###
GET http://localhost:5400/api/Tests/Ping

View File

@@ -0,0 +1,2 @@
###
GET http://localhost:5400/health

View File

@@ -0,0 +1,101 @@
DECLARE @JustForDebug TINYINT = 0;
-- FIX DATAINBOX!
-- SETUP VARIABLES
DECLARE @Year INT = 2025;
DECLARE @Type NVARCHAR(5) = 'B3';
DECLARE @StartDate NVARCHAR(10) = '2025.01.02';
DECLARE @EndDate NVARCHAR(10) = '2026.12.31'
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-IW_', @Type, '-', @Year,'-', @CurrentTimestamp
);
DECLARE @Plugin NVARCHAR(100);
SET @Plugin =
CASE @Type
WHEN 'B3' THEN 'PedrolloPL.Import.B3'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxName NVARCHAR(100);
SET @DataInboxName =
CASE @Type
WHEN 'B3' THEN 'P2_2025'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxSource NVARCHAR(100);
SET @DataInboxSource =
CASE @Type
WHEN 'B3' THEN 'Comarch'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @StartDate AS StartDate, @EndDate AS EndDate, @Type AS Type, @Year AS Year, @Plugin AS Plugin,
@DataInboxName AS DataInboxName, @DataInboxSource AS DataInboxSource;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'StartDate', @StartDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'EndDate', @EndDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'DataInbox', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportName', @Type, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportYear', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ImportWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', @Plugin, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxName', @DataInboxName, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxSource', @DataInboxSource, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '10', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'MaxRetries', '3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,71 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-PW_P2-', @Year, '-', @CurrentTimestamp
);
DECLARE @SourceNameFilter NVARCHAR(50) = CONCAT('%-A-IW_B3', '-', @Year, '-%');
DECLARE @SourceLayer NVARCHAR(50) = (SELECT TOP 1 [Name] FROM [DiunaBI-PedrolloPL].[dbo].[Layers] WHERE [Name] LIKE @SourceNameFilter);
IF @SourceLayer IS NULL
BEGIN
SELECT 'SourceLayer is NULL' AS Logger;
RETURN;
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @SourceLayer AS SourceLayer;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'B3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'SourceLayer', @SourceLayer, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ProcessWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Year', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', 'PedrolloPL.Process.P2', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '110', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
--
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetId', '1jI-3QrlBADm5slEl2Balf29cKmHwkYi4pboaHY-gRqc', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetTab', 'P2_Export_DiunaBI', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetRange', 'C32:O48', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -2,7 +2,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D3'; DECLARE @Type NVARCHAR(3) = 'D3';
DECLARE @Month INT = 7; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('D3') IF @Type NOT IN ('D3')
@@ -14,7 +14,7 @@ END;
DECLARE @ImportType NVARCHAR(20) = 'Import-D3'; DECLARE @ImportType NVARCHAR(20) = 'Import-D3';
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,9 +2,9 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D1'; DECLARE @Type NVARCHAR(3) = 'D1';
DECLARE @Month INT = 7; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @MonthName NVARCHAR(20) = 'Lipiec_2025'; DECLARE @MonthName NVARCHAR(20) = 'Grudzien_2025';
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2')
BEGIN BEGIN
@@ -27,7 +27,7 @@ SET @ImportType =
ELSE 'Standard' ELSE 'Standard'
END; END;
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 9; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK'; DECLARE @Type NVARCHAR(3) = 'FK';
DECLARE @Month INT = 9; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK')

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK2'; DECLARE @Type NVARCHAR(3) = 'FK2';
DECLARE @Month INT = 9; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 7; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,9 +2,9 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @Name NVARCHAR(50) = CONCAT( DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-D-D6-SELL-CODES' 'L', @Number, 'D-P2-CODES'
); );
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID(); DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
@@ -16,7 +16,7 @@ BEGIN
RETURN; RETURN;
END; END;
INSERT INTO [diunabi-morska].[dbo].[Layers] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type]) ([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3); VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3);
@@ -27,16 +27,23 @@ DECLARE @Array TABLE (
INSERT INTO @Array (Code, Desc1) INSERT INTO @Array (Code, Desc1)
VALUES VALUES
('1002', '1102'), ('01','<nieznany>'),
('1003','1202'), ('02','DOLNOŚLĄSKIE'),
('1008','1302'), ('03','KUJAWSKO-POMORSKIE'),
('1009','1302'), ('04','LUBELSKIE'),
('9085','1203'), ('05','LUBUSKIE'),
('1010','1304'), ('06','ŁÓDZKIE'),
('9086','1005'), ('07','MAŁOPOLSKIE'),
('1021','1206'), ('08','MAZOWIECKIE'),
('9089','1207'), ('09','OPOLSKIE'),
('9091','1208') ('10','PODKARPACKIE'),
('11','PODLASKIE'),
('12','POMORSKIE'),
('13','ŚLĄSKIE'),
('14','ŚWIĘTOKRZYSKIE'),
('15','WARMIŃSKO-MAZURSKIE'),
('16','WIELKOPOLSKIE'),
('17','ZACHODNIOPOMORSKIE');
-- Loop through the array and insert into the target table -- Loop through the array and insert into the target table
DECLARE @Code NVARCHAR(50); DECLARE @Code NVARCHAR(50);
@@ -51,7 +58,7 @@ FETCH NEXT FROM CursorArray INTO @Code, @Desc1;
WHILE @@FETCH_STATUS = 0 WHILE @@FETCH_STATUS = 0
BEGIN BEGIN
INSERT INTO [diunabi-morska].[dbo].[Records] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) ([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,3 @@
INSERT INTO [diunabi-morska].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'API-ENDPOINT', 'https://https://diunabi-morska.bim-it.pl/api/DataInbox/Add', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, 'f5194e87-8af0-4bda-a1f9-f65352319922');

View File

@@ -1,3 +1,3 @@
DECLARE @LayerId UNIQUEIDENTIFIER = 'e5336f20-82aa-438a-bfa5-003f1e55dc09'; DECLARE @LayerId UNIQUEIDENTIFIER = '27b6540f-ccc3-4756-afc6-2b74a150c37a';
DELETE FROM [diunabi-morska].[dbo].[Records] WHERE [LayerId] = @LayerId; DELETE FROM [diunabi-morska].[dbo].[Records] WHERE [LayerId] = @LayerId;
DELETE FROM [diunabi-morska].[dbo].[Layers] WHERE [Id] = @LayerId; DELETE FROM [diunabi-morska].[dbo].[Layers] WHERE [Id] = @LayerId;

View File

@@ -0,0 +1,63 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
namespace DiunaBI.API.Attributes;
/// <summary>
/// Authorization attribute that validates API key from X-API-Key header.
/// Uses constant-time comparison to prevent timing attacks.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public class ApiKeyAuthAttribute : Attribute, IAuthorizationFilter
{
private const string ApiKeyHeaderName = "X-API-Key";
public void OnAuthorization(AuthorizationFilterContext context)
{
var configuration = context.HttpContext.RequestServices.GetRequiredService<IConfiguration>();
var logger = context.HttpContext.RequestServices.GetRequiredService<ILogger<ApiKeyAuthAttribute>>();
// Get expected API key from configuration
var expectedApiKey = configuration["apiKey"];
if (string.IsNullOrEmpty(expectedApiKey))
{
logger.LogError("API key not configured in appsettings");
context.Result = new StatusCodeResult(StatusCodes.Status500InternalServerError);
return;
}
// Get API key from header
if (!context.HttpContext.Request.Headers.TryGetValue(ApiKeyHeaderName, out var extractedApiKey))
{
logger.LogWarning("API key missing from request header");
context.Result = new UnauthorizedObjectResult(new { error = "API key is required" });
return;
}
// Constant-time comparison to prevent timing attacks
if (!IsApiKeyValid(extractedApiKey!, expectedApiKey))
{
logger.LogWarning("Invalid API key provided from {RemoteIp}", context.HttpContext.Connection.RemoteIpAddress);
context.Result = new UnauthorizedObjectResult(new { error = "Invalid API key" });
return;
}
// API key is valid - allow the request to proceed
}
/// <summary>
/// Constant-time string comparison to prevent timing attacks.
/// </summary>
private static bool IsApiKeyValid(string providedKey, string expectedKey)
{
if (providedKey == null || expectedKey == null)
return false;
var providedBytes = Encoding.UTF8.GetBytes(providedKey);
var expectedBytes = Encoding.UTF8.GetBytes(expectedKey);
return CryptographicOperations.FixedTimeEquals(providedBytes, expectedBytes);
}
}

View File

@@ -0,0 +1,55 @@
using DiunaBI.API.Services;
using DiunaBI.Domain.Entities;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
namespace DiunaBI.API.Controllers;
[AllowAnonymous]
[ApiController]
[Route("[controller]")]
public class AuthController(
GoogleAuthService googleAuthService,
JwtTokenService jwtTokenService,
ILogger<AuthController> logger)
: ControllerBase
{
[HttpPost("apiToken")]
[EnableRateLimiting("auth")]
public async Task<IActionResult> ApiToken([FromBody] string idToken)
{
try
{
if (string.IsNullOrEmpty(idToken))
{
logger.LogWarning("Empty idToken received");
return BadRequest("IdToken is required");
}
var (isValid, user, error) = await googleAuthService.ValidateGoogleTokenAsync(idToken);
if (!isValid || user == null)
{
logger.LogWarning("Google token validation failed: {Error}", error);
return Unauthorized();
}
var jwt = jwtTokenService.GenerateToken(user);
logger.LogInformation("User authenticated successfully: {Email}", user.Email);
return Ok(new
{
token = jwt,
id = user.Id,
expirationTime = DateTime.UtcNow.AddDays(7) // z JwtSettings
});
}
catch (Exception ex)
{
logger.LogError(ex, "Error during authentication");
return StatusCode(500, "Internal server error");
}
}
}

View File

@@ -0,0 +1,222 @@
using System.Text;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Domain.Entities;
using DiunaBI.Application.DTOModels;
using DiunaBI.Application.DTOModels.Common;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class DataInboxController : Controller
{
private readonly AppDbContext _db;
private readonly IConfiguration _configuration;
private readonly ILogger<DataInboxController> _logger;
public DataInboxController(
AppDbContext db,
IConfiguration configuration,
ILogger<DataInboxController> logger)
{
_db = db;
_configuration = configuration;
_logger = logger;
}
[HttpPut]
[Route("Add/{apiKey}")]
[AllowAnonymous]
public IActionResult Add(string apiKey, [FromBody] DataInbox dataInbox)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("DataInbox: Unauthorized request - wrong apiKey for source {Source}", dataInbox.Source);
return Unauthorized();
}
try
{
if (!Request.Headers.TryGetValue("Authorization", out var authHeader))
{
_logger.LogWarning("DataInbox: Unauthorized request - no authorization header for source {Source}", dataInbox.Source);
return Unauthorized();
}
var credentialsArr = authHeader.ToString().Split(" ");
if (credentialsArr.Length != 2)
{
_logger.LogWarning("DataInbox: Unauthorized request - wrong auth header format for source {Source}", dataInbox.Source);
return Unauthorized();
}
var authValue = Encoding.UTF8.GetString(Convert.FromBase64String(credentialsArr[1]));
var username = authValue.Split(':')[0];
var password = authValue.Split(':')[1];
if (username != _configuration["apiUser"] || password != _configuration["apiPass"])
{
_logger.LogWarning("DataInbox: Unauthorized request - bad credentials for source {Source}", dataInbox.Source);
return Unauthorized();
}
// check if datainbox.data is base64 encoded value
if (!string.IsNullOrEmpty(dataInbox.Data))
{
// Limit data size to 10MB to prevent DoS
if (dataInbox.Data.Length > 10_000_000)
{
_logger.LogWarning("DataInbox: Data too large for source {Source}, size {Size}", dataInbox.Source, dataInbox.Data.Length);
return BadRequest("Data too large (max 10MB)");
}
if (!IsBase64String(dataInbox.Data))
{
_logger.LogWarning("DataInbox: Invalid data format - not base64 encoded for source {Source}", dataInbox.Source);
return BadRequest("Invalid data format - not base64 encoded");
}
}
dataInbox.Id = Guid.NewGuid();
dataInbox.CreatedAt = DateTime.UtcNow;
_db.DataInbox.Add(dataInbox);
_db.SaveChanges();
_logger.LogInformation("DataInbox: Insert success for source {Source}, name {Name}", dataInbox.Source, dataInbox.Name);
if (dataInbox.Name == "morska.d3.importer")
{
_logger.LogDebug("DataInbox: Detected morska.d3.importer - processing will be handled by AutoImport");
}
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DataInbox: Insert error for source {Source}, name {Name}", dataInbox.Source, dataInbox.Name);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("GetAll")]
public IActionResult GetAll([FromQuery] int start, [FromQuery] int limit, [FromQuery] string? search)
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.DataInbox.AsQueryable();
if (!string.IsNullOrEmpty(search))
{
query = query.Where(x => x.Name.Contains(search) || x.Source.Contains(search));
}
var totalCount = query.Count();
var items = query
.OrderByDescending(x => x.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.Select(x => new DataInboxDto
{
Id = x.Id,
Name = x.Name,
Source = x.Source,
Data = x.Data,
CreatedAt = x.CreatedAt
})
.ToList();
var pagedResult = new PagedResult<DataInboxDto>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} data inbox items (page {Page}) with filter search={Search}",
items.Count, totalCount, pagedResult.Page, search);
return Ok(pagedResult);
}
catch (Exception e)
{
_logger.LogError(e, "GetAll: Error retrieving data inbox items");
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{id:guid}")]
public IActionResult Get(Guid id)
{
try
{
var dataInbox = _db.DataInbox
.AsNoTracking()
.FirstOrDefault(x => x.Id == id);
if (dataInbox == null)
{
_logger.LogWarning("Get: Data inbox item {Id} not found", id);
return NotFound();
}
var dto = new DataInboxDto
{
Id = dataInbox.Id,
Name = dataInbox.Name,
Source = dataInbox.Source,
Data = dataInbox.Data,
CreatedAt = dataInbox.CreatedAt
};
_logger.LogDebug("Get: Retrieved data inbox item {Id} {Name}", id, dataInbox.Name);
return Ok(dto);
}
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving data inbox item {Id}", id);
return BadRequest("An error occurred processing your request");
}
}
// helpers
private bool IsBase64String(string data)
{
if (string.IsNullOrEmpty(data))
{
return false;
}
try
{
var base64Bytes = Convert.FromBase64String(data);
var utf8String = Encoding.UTF8.GetString(base64Bytes);
var reEncoded = Convert.ToBase64String(Encoding.UTF8.GetBytes(utf8String));
return data.TrimEnd('=') == reEncoded.TrimEnd('=');
}
catch (FormatException)
{
return false;
}
catch (DecoderFallbackException)
{
return false;
}
}
}

View File

@@ -0,0 +1,507 @@
using DiunaBI.API.Attributes;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class JobsController : Controller
{
private readonly AppDbContext _db;
private readonly JobSchedulerService _jobScheduler;
private readonly IConfiguration _configuration;
private readonly ILogger<JobsController> _logger;
public JobsController(
AppDbContext db,
JobSchedulerService jobScheduler,
IConfiguration configuration,
ILogger<JobsController> logger)
{
_db = db;
_jobScheduler = jobScheduler;
_configuration = configuration;
_logger = logger;
}
[HttpGet]
[Route("")]
public async Task<IActionResult> GetAll(
[FromQuery] int start = 0,
[FromQuery] int limit = 50,
[FromQuery] List<JobStatus>? statuses = null,
[FromQuery] JobType? jobType = null,
[FromQuery] Guid? layerId = null)
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.QueueJobs.AsQueryable();
if (statuses != null && statuses.Count > 0)
{
query = query.Where(j => statuses.Contains(j.Status));
}
if (jobType.HasValue)
{
query = query.Where(j => j.JobType == jobType.Value);
}
if (layerId.HasValue)
{
query = query.Where(j => j.LayerId == layerId.Value);
}
var totalCount = await query.CountAsync();
// Sort by: CreatedAt DESC (newest first), then Priority ASC (0=highest)
var items = await query
.OrderByDescending(j => j.CreatedAt)
.ThenBy(j => j.Priority)
.Skip(start)
.Take(limit)
.AsNoTracking()
.ToListAsync();
var pagedResult = new PagedResult<QueueJob>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} jobs", items.Count, totalCount);
return Ok(pagedResult);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetAll: Error retrieving jobs");
return BadRequest("An error occurred while retrieving jobs");
}
}
[HttpGet]
[Route("{id:guid}")]
public async Task<IActionResult> Get(Guid id)
{
try
{
var job = await _db.QueueJobs
.AsNoTracking()
.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("Get: Job {JobId} not found", id);
return NotFound("Job not found");
}
_logger.LogDebug("Get: Retrieved job {JobId}", id);
return Ok(job);
}
catch (Exception ex)
{
_logger.LogError(ex, "Get: Error retrieving job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleJobs([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobs: Created {Count} jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobs: Error scheduling jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/imports")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleImportJobs([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobs: Created {Count} import jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobs: Error scheduling import jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/processes")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleProcessJobs()
{
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobs: Created {Count} process jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobs: Error scheduling process jobs");
return BadRequest("An error occurred processing your request");
}
}
// UI-friendly endpoints (JWT auth)
[HttpPost]
[Route("ui/schedule")]
public async Task<IActionResult> ScheduleJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobsUI: Created {Count} jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobsUI: Error scheduling jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/imports")]
public async Task<IActionResult> ScheduleImportJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobsUI: Created {Count} import jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobsUI: Error scheduling import jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/processes")]
public async Task<IActionResult> ScheduleProcessJobsUI()
{
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobsUI: Created {Count} process jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobsUI: Error scheduling process jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("{id:guid}/retry")]
public async Task<IActionResult> RetryJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("RetryJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status != JobStatus.Failed)
{
_logger.LogWarning("RetryJob: Job {JobId} is not in Failed status (current: {Status})", id, job.Status);
return BadRequest($"Job is not in Failed status (current: {job.Status})");
}
job.Status = JobStatus.Pending;
job.RetryCount = 0;
job.LastError = null;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
_logger.LogInformation("RetryJob: Job {JobId} reset to Pending status", id);
return Ok(new
{
success = true,
message = "Job reset to Pending status and will be retried"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "RetryJob: Error retrying job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpDelete]
[Route("{id:guid}")]
public async Task<IActionResult> CancelJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("CancelJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status == JobStatus.Running)
{
_logger.LogWarning("CancelJob: Cannot cancel running job {JobId}", id);
return BadRequest("Cannot cancel a job that is currently running");
}
if (job.Status == JobStatus.Completed)
{
_logger.LogWarning("CancelJob: Cannot cancel completed job {JobId}", id);
return BadRequest("Cannot cancel a completed job");
}
job.Status = JobStatus.Failed;
job.LastError = "Cancelled by user";
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
_logger.LogInformation("CancelJob: Job {JobId} cancelled", id);
return Ok(new
{
success = true,
message = "Job cancelled successfully"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CancelJob: Error cancelling job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("stats")]
public async Task<IActionResult> GetStats()
{
try
{
var stats = new
{
pending = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Pending),
running = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Running),
completed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Completed),
failed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Failed),
retrying = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Retrying),
total = await _db.QueueJobs.CountAsync()
};
_logger.LogDebug("GetStats: Retrieved job statistics");
return Ok(stats);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetStats: Error retrieving job statistics");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("create-for-layer/{layerId:guid}")]
public async Task<IActionResult> CreateJobForLayer(Guid layerId)
{
try
{
var layer = await _db.Layers
.Include(x => x.Records)
.FirstOrDefaultAsync(l => l.Id == layerId);
if (layer == null)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} not found", layerId);
return NotFound($"Layer {layerId} not found");
}
if (layer.Type != LayerType.Administration)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not an Administration layer", layerId);
return BadRequest("Only Administration layers can be run as jobs");
}
// Get the Type record to determine if it's ImportWorker or ProcessWorker
var typeRecord = layer.Records?.FirstOrDefault(x => x.Code == "Type");
if (typeRecord?.Desc1 != "ImportWorker" && typeRecord?.Desc1 != "ProcessWorker")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not a valid worker type", layerId);
return BadRequest("Layer must be an ImportWorker or ProcessWorker");
}
// Check if enabled
var isEnabledRecord = layer.Records?.FirstOrDefault(x => x.Code == "IsEnabled");
if (isEnabledRecord?.Desc1 != "True")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not enabled", layerId);
return BadRequest("Layer is not enabled");
}
// Get plugin name
var pluginRecord = layer.Records?.FirstOrDefault(x => x.Code == "Plugin");
if (string.IsNullOrEmpty(pluginRecord?.Desc1))
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} has no Plugin configured", layerId);
return BadRequest("Layer has no Plugin configured");
}
// Get priority and max retries
var priorityRecord = layer.Records?.FirstOrDefault(x => x.Code == "Priority");
var maxRetriesRecord = layer.Records?.FirstOrDefault(x => x.Code == "MaxRetries");
var priority = int.TryParse(priorityRecord?.Desc1, out var p) ? p : 0;
var maxRetries = int.TryParse(maxRetriesRecord?.Desc1, out var m) ? m : 3;
var jobType = typeRecord.Desc1 == "ImportWorker" ? JobType.Import : JobType.Process;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == layer.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogInformation("CreateJobForLayer: Job already exists for layer {LayerId}, returning existing job", layerId);
return Ok(new
{
success = true,
jobId = existingJob.Id,
message = "Job already exists for this layer",
existing = true
});
}
// Create the job
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = layer.Id,
LayerName = layer.Name ?? "Unknown",
PluginName = pluginRecord.Desc1,
JobType = jobType,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = DiunaBI.Domain.Entities.User.AutoImportUserId,
ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId
};
_db.QueueJobs.Add(job);
await _db.SaveChangesAsync();
_logger.LogInformation("CreateJobForLayer: Created job {JobId} for layer {LayerName} ({LayerId})",
job.Id, layer.Name, layerId);
return Ok(new
{
success = true,
jobId = job.Id,
message = "Job created successfully",
existing = false
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
}

View File

@@ -1,18 +1,21 @@
using System.Globalization; using System.Globalization;
using System.Text; using System.Text;
using System.Text.Json;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using DiunaBI.Core.Models; using DiunaBI.Application.DTOModels;
using DiunaBI.Core.Database.Context; using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Core.Services; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Interfaces; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Services;
namespace DiunaBI.WebAPI.Controllers; namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController] [ApiController]
[Route("api/[controller]")] [Route("[controller]")]
public class LayersController : Controller public class LayersController : Controller
{ {
private readonly AppDbContext _db; private readonly AppDbContext _db;
@@ -40,34 +43,73 @@ public class LayersController : Controller
} }
[HttpGet] [HttpGet]
public IActionResult GetAll(int start, int limit, string? name, LayerType? type) [Route("")]
public IActionResult GetAll([FromQuery] int start, [FromQuery] int limit, [FromQuery] string? name, [FromQuery] Domain.Entities.LayerType? type)
{ {
try try
{ {
var response = _db.Layers.Where(x => !x.IsDeleted); // Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.Layers.Where(x => !x.IsDeleted);
if (name != null) if (name != null)
{ {
response = response.Where(x => x.Name != null && x.Name.Contains(name)); query = query.Where(x => x.Name != null && x.Name.Contains(name));
} }
if (type != null) if (type.HasValue)
{ {
response = response.Where(x => x.Type == type); query = query.Where(x => x.Type == type.Value);
} }
var result = response var totalCount = query.Count();
var items = query
.OrderByDescending(x => x.Number) .OrderByDescending(x => x.Number)
.Skip(start).Take(limit).AsNoTracking().ToList(); .Skip(start)
.Take(limit)
.AsNoTracking()
.Select(x => new LayerDto
{
Id = x.Id,
Number = x.Number,
Name = x.Name,
Type = (Application.DTOModels.LayerType)x.Type,
CreatedAt = x.CreatedAt,
ModifiedAt = x.ModifiedAt,
CreatedById = x.CreatedById,
ModifiedById = x.ModifiedById,
IsDeleted = x.IsDeleted,
IsCancelled = x.IsCancelled,
ParentId = x.ParentId
})
.ToList();
_logger.LogDebug("GetAll: Retrieved {Count} layers with filter name={Name}, type={Type}", var pagedResult = new PagedResult<LayerDto>
result.Count, name, type); {
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
return Ok(result); _logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} layers (page {Page}) with filter name={Name}, type={Type}",
items.Count, totalCount, pagedResult.Page, name, type);
return Ok(pagedResult);
} }
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "GetAll: Error retrieving layers"); _logger.LogError(e, "GetAll: Error retrieving layers");
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
} }
} }
[HttpGet] [HttpGet]
@@ -87,59 +129,12 @@ public class LayersController : Controller
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "Get: Error retrieving layer {LayerId}", id); _logger.LogError(e, "Get: Error retrieving layer {LayerId}", id);
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("getForPowerBI/{apiKey}/{number:int}")]
public IActionResult GetByNumber(string apiKey, int number)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("PowerBI: Unauthorized request - wrong apiKey for layer {LayerNumber}", number);
return Unauthorized();
}
try
{
if (!Request.Headers.TryGetValue("Authorization", out var authHeader))
{
_logger.LogWarning("PowerBI: Unauthorized request - no authorization header for layer {LayerNumber}", number);
return Unauthorized();
}
var credentialsArr = authHeader.ToString().Split(" ");
if (credentialsArr.Length != 2)
{
_logger.LogWarning("PowerBI: Unauthorized request - wrong auth header format for layer {LayerNumber}", number);
return Unauthorized();
}
var authValue = Encoding.UTF8.GetString(Convert.FromBase64String(credentialsArr[1]));
var username = authValue.Split(':')[0];
var password = authValue.Split(':')[1];
if (username != _configuration["powerBI-user"] || password != _configuration["powerBI-pass"])
{
_logger.LogWarning("PowerBI: Unauthorized request - bad credentials for layer {LayerNumber}", number);
return Unauthorized();
}
_logger.LogInformation("PowerBI: Sending data for layer {LayerNumber}", number);
var layer = _db.Layers
.Include(x => x.CreatedBy)
.Include(x => x.Records).AsNoTracking().First(x => x.Number == number && !x.IsDeleted);
return Ok(layer);
}
catch (Exception e)
{
_logger.LogError(e, "PowerBI: Error occurred while processing layer {LayerNumber}", number);
return BadRequest(e.ToString());
} }
} }
[HttpGet] [HttpGet]
[Route("getConfiguration/{apiKey}/{number:int}")] [Route("getConfiguration/{apiKey}/{number:int}")]
[AllowAnonymous]
public IActionResult GetConfigurationByNumber(string apiKey, int number) public IActionResult GetConfigurationByNumber(string apiKey, int number)
{ {
if (apiKey != _configuration["apiKey"]) if (apiKey != _configuration["apiKey"])
@@ -166,7 +161,7 @@ public class LayersController : Controller
var authValue = Encoding.UTF8.GetString(Convert.FromBase64String(credentialsArr[1])); var authValue = Encoding.UTF8.GetString(Convert.FromBase64String(credentialsArr[1]));
var username = authValue.Split(':')[0]; var username = authValue.Split(':')[0];
var password = authValue.Split(':')[1]; var password = authValue.Split(':')[1];
if (username != _configuration["morska-user"] || password != _configuration["morska-pass"]) if (username != _configuration["apiUser"] || password != _configuration["apiPass"])
{ {
_logger.LogWarning("Configuration: Unauthorized request - bad credentials for layer {LayerNumber}", number); _logger.LogWarning("Configuration: Unauthorized request - bad credentials for layer {LayerNumber}", number);
return Unauthorized(); return Unauthorized();
@@ -253,7 +248,7 @@ public class LayersController : Controller
[AllowAnonymous] [AllowAnonymous]
public IActionResult AutoImport(string apiKey, string nameFilter) public IActionResult AutoImport(string apiKey, string nameFilter)
{ {
if (Request.Host.Value != _configuration["apiLocalUrl"] || apiKey != _configuration["apiKey"]) if (apiKey != _configuration["apiKey"])
{ {
_logger.LogWarning("AutoImport: Unauthorized request with apiKey {ApiKey}", apiKey); _logger.LogWarning("AutoImport: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized(); return Unauthorized();
@@ -411,7 +406,7 @@ public class LayersController : Controller
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "AutoImport: Process error"); _logger.LogError(e, "AutoImport: Process error");
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
} }
} }
@@ -420,9 +415,9 @@ public class LayersController : Controller
[AllowAnonymous] [AllowAnonymous]
public IActionResult AutoProcess(string apiKey) public IActionResult AutoProcess(string apiKey)
{ {
if (Request.Host.Value != _configuration["apiLocalUrl"] || apiKey != _configuration["apiKey"]) if (apiKey != _configuration["apiKey"])
{ {
_logger.LogWarning("AutoProcess: Unauthorized request with apiKey {ApiKey}", apiKey); _logger.LogWarning("AutoImport: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized(); return Unauthorized();
} }
@@ -744,4 +739,398 @@ public class LayersController : Controller
throw; throw;
} }
} }
// Record CRUD operations
[HttpPost]
[Route("{layerId:guid}/records")]
public IActionResult CreateRecord(Guid layerId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("CreateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
var record = new Record
{
Id = Guid.NewGuid(),
Code = recordDto.Code,
Desc1 = recordDto.Desc1,
LayerId = layerId,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = Guid.Parse(userId),
ModifiedById = Guid.Parse(userId),
IsDeleted = false
};
_db.Records.Add(record);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Created, Guid.Parse(userId));
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("CreateRecord: Created record {RecordId} in layer {LayerId}", record.Id, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "CreateRecord: Error creating record in layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpPut]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult UpdateRecord(Guid layerId, Guid recordId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("UpdateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("UpdateRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
// Capture old values before updating
var oldCode = record.Code;
var oldDesc1 = record.Desc1;
record.Desc1 = recordDto.Desc1;
record.ModifiedAt = DateTime.UtcNow;
record.ModifiedById = Guid.Parse(userId);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Updated, Guid.Parse(userId), oldCode, oldDesc1);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("UpdateRecord: Updated record {RecordId} in layer {LayerId}", recordId, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "UpdateRecord: Error updating record {RecordId} in layer {LayerId}", recordId, layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpDelete]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult DeleteRecord(Guid layerId, Guid recordId)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("DeleteRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("DeleteRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
// Capture history before deleting
CaptureRecordHistory(record, RecordChangeType.Deleted, Guid.Parse(userId));
_db.Records.Remove(record);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("DeleteRecord: Deleted record {RecordId} from layer {LayerId}", recordId, layerId);
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DeleteRecord: Error deleting record {RecordId} from layer {LayerId}", recordId, layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{layerId:guid}/records/{recordId:guid}/history")]
public IActionResult GetRecordHistory(Guid layerId, Guid recordId)
{
try
{
var history = _db.RecordHistory
.Include(h => h.ChangedBy)
.Where(h => h.RecordId == recordId && h.LayerId == layerId)
.OrderByDescending(h => h.ChangedAt)
.AsNoTracking()
.Select(h => new RecordHistoryDto
{
Id = h.Id,
RecordId = h.RecordId,
LayerId = h.LayerId,
ChangedAt = h.ChangedAt,
ChangedById = h.ChangedById,
ChangedByName = h.ChangedBy != null ? h.ChangedBy.UserName ?? h.ChangedBy.Email : "Unknown",
ChangeType = h.ChangeType.ToString(),
Code = h.Code,
Desc1 = h.Desc1,
ChangedFields = h.ChangedFields,
ChangesSummary = h.ChangesSummary,
FormattedChange = FormatHistoryChange(h)
})
.ToList();
_logger.LogDebug("GetRecordHistory: Retrieved {Count} history entries for record {RecordId}", history.Count, recordId);
return Ok(history);
}
catch (Exception e)
{
_logger.LogError(e, "GetRecordHistory: Error retrieving history for record {RecordId}", recordId);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{layerId:guid}/records/deleted")]
public IActionResult GetDeletedRecords(Guid layerId)
{
try
{
// Get the most recent "Deleted" history entry for each unique RecordId in this layer
// First, get all deleted record history entries
var deletedHistoryEntries = _db.RecordHistory
.Where(h => h.LayerId == layerId && h.ChangeType == RecordChangeType.Deleted)
.ToList();
// Group in memory and get the most recent deletion for each record
var mostRecentDeletes = deletedHistoryEntries
.GroupBy(h => h.RecordId)
.Select(g => g.OrderByDescending(h => h.ChangedAt).First())
.ToList();
// Get all unique user IDs from the history entries
var userIds = mostRecentDeletes.Select(h => h.ChangedById).Distinct().ToList();
// Load the users
var users = _db.Users
.Where(u => userIds.Contains(u.Id))
.ToDictionary(u => u.Id, u => u.UserName ?? string.Empty);
// Build the DTOs
var deletedRecords = mostRecentDeletes
.Select(h => new DeletedRecordDto
{
RecordId = h.RecordId,
Code = h.Code,
Desc1 = h.Desc1,
DeletedAt = h.ChangedAt,
DeletedById = h.ChangedById,
DeletedByName = users.TryGetValue(h.ChangedById, out var userName) ? userName : string.Empty
})
.OrderByDescending(d => d.DeletedAt)
.ToList();
_logger.LogDebug("GetDeletedRecords: Retrieved {Count} deleted records for layer {LayerId}", deletedRecords.Count, layerId);
return Ok(deletedRecords);
}
catch (Exception e)
{
_logger.LogError(e, "GetDeletedRecords: Error retrieving deleted records for layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
// Helper method to capture record history
private void CaptureRecordHistory(Record record, RecordChangeType changeType, Guid userId, string? oldCode = null, string? oldDesc1 = null)
{
var changedFields = new List<string>();
var changesSummary = new Dictionary<string, Dictionary<string, string?>>();
if (changeType == RecordChangeType.Updated)
{
if (oldCode != record.Code)
{
changedFields.Add("Code");
changesSummary["Code"] = new Dictionary<string, string?>
{
["old"] = oldCode,
["new"] = record.Code
};
}
if (oldDesc1 != record.Desc1)
{
changedFields.Add("Desc1");
changesSummary["Desc1"] = new Dictionary<string, string?>
{
["old"] = oldDesc1,
["new"] = record.Desc1
};
}
}
var history = new RecordHistory
{
Id = Guid.NewGuid(),
RecordId = record.Id,
LayerId = record.LayerId,
ChangedAt = DateTime.UtcNow,
ChangedById = userId,
ChangeType = changeType,
Code = record.Code,
Desc1 = record.Desc1,
ChangedFields = changedFields.Any() ? string.Join(", ", changedFields) : null,
ChangesSummary = changesSummary.Any() ? JsonSerializer.Serialize(changesSummary) : null
};
_db.RecordHistory.Add(history);
_logger.LogInformation("CaptureRecordHistory: Captured {ChangeType} for record {RecordId}", changeType, record.Id);
}
// Helper method to format history change for display
private static string FormatHistoryChange(RecordHistory h)
{
if (h.ChangeType == RecordChangeType.Created)
{
return $"Created record with Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
if (h.ChangeType == RecordChangeType.Deleted)
{
return $"Deleted record Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
// Updated
if (!string.IsNullOrEmpty(h.ChangesSummary))
{
try
{
var changes = JsonSerializer.Deserialize<Dictionary<string, Dictionary<string, string?>>>(h.ChangesSummary);
if (changes != null)
{
var parts = new List<string>();
foreach (var (field, values) in changes)
{
var oldVal = values.GetValueOrDefault("old") ?? "empty";
var newVal = values.GetValueOrDefault("new") ?? "empty";
parts.Add($"{field}: \"{oldVal}\" → \"{newVal}\"");
}
return $"Updated: {string.Join(", ", parts)}";
}
}
catch
{
// Fall back to simple message
}
}
return $"Updated {h.ChangedFields ?? "record"}";
}
} }

View File

@@ -0,0 +1,50 @@
using DiunaBI.Infrastructure.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace DiunaBI.API.Controllers;
[ApiController]
[Route("[controller]")]
[Authorize]
public class TestsController : Controller
{
private readonly PluginManager _pluginManager;
private readonly ILogger<LayersController> _logger;
public TestsController(
PluginManager pluginManager,
ILogger<LayersController> logger)
{
_pluginManager = pluginManager;
_logger = logger;
}
[HttpGet]
[Route("Ping")]
[AllowAnonymous]
public IActionResult Ping()
{
var tmp = new
{
a = 2,
b = "test"
};
var tmp2 = new
{
a = 2,
b = "test"
};
var user = User.Identity;
_logger.LogInformation("LogTest: OldValue {tmp}, NewValue {tmp2}, ChangedBy: {user}", tmp, tmp2, user?.Name);
return Ok("Pong");
}
[HttpGet]
[Route("Plugins")]
[AllowAnonymous]
public IActionResult GetPlugins()
{
var plugins = _pluginManager.GetPluginsCount();
return Ok(plugins);
}
}

View File

@@ -1,26 +1,31 @@
<Project Sdk="Microsoft.NET.Sdk.Web"> <Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net8.0</TargetFramework> <TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<RootNamespace>DiunaBI.WebAPI</RootNamespace>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Google.Cloud.Firestore" Version="3.4.0" /> <PackageReference Include="Google.Cloud.Firestore" Version="3.4.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="8.0.0" /> <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="Google.Apis.Auth" Version="1.68.0" /> <PackageReference Include="Google.Apis.Auth" Version="1.68.0" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3627" /> <PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3627" />
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3624" /> <PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3624" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="8.0.0" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="10.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" /> <PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
<PackageReference Include="Serilog.Enrichers.Environment" Version="3.0.1" /> <PackageReference Include="Serilog.Enrichers.Environment" Version="3.0.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" /> <PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<PackageReference Include="Serilog.Sinks.Seq" Version="9.0.0" /> <PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="8.0.0" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<ProjectReference Include="..\DiunaBI.Core\DiunaBI.Core.csproj" /> <ProjectReference Include="..\DiunaBI.Infrastructure\DiunaBI.Infrastructure.csproj" />
<ProjectReference Include="..\DiunaBI.Application\DiunaBI.Application.csproj" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
@@ -31,11 +36,13 @@
</Content> </Content>
</ItemGroup> </ItemGroup>
<Target Name="CopyPlugins" AfterTargets="Build"> <Target Name="CopyPlugins" AfterTargets="Build" Condition="'$(SkipPluginCopy)' != 'true'">
<MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" /> <MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<MSBuild Projects="../DiunaBI.Plugins.PedrolloPL/DiunaBI.Plugins.PedrolloPL.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<ItemGroup> <ItemGroup>
<PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" /> <PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" />
<PluginFiles Include="../DiunaBI.Plugins.PedrolloPL/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.PedrolloPL.dll" />
</ItemGroup> </ItemGroup>
<MakeDir Directories="$(OutputPath)Plugins" /> <MakeDir Directories="$(OutputPath)Plugins" />
<Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" /> <Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" />

59
DiunaBI.API/Dockerfile Normal file
View File

@@ -0,0 +1,59 @@
# Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
ARG PLUGIN_PROJECT=DiunaBI.Plugins.Morska
WORKDIR /
# Copy solution and all project files for restore
COPY DiunaBI.sln ./
COPY DiunaBI.API/DiunaBI.API.csproj DiunaBI.API/
COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/
COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/
COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/
COPY ${PLUGIN_PROJECT}/${PLUGIN_PROJECT}.csproj ${PLUGIN_PROJECT}/
# Restore dependencies
RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
# Copy all source code
COPY . .
# Build plugin first
WORKDIR /${PLUGIN_PROJECT}
RUN dotnet build -c Release
# Build and publish API (skip automatic plugin copy since we handle it manually)
WORKDIR /DiunaBI.API
RUN dotnet publish -c Release -o /app/publish --no-restore -p:SkipPluginCopy=true
# Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \
cp /${PLUGIN_PROJECT}/bin/Release/net10.0/${PLUGIN_PROJECT}.dll /app/publish/Plugins/
# Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime
WORKDIR /app
# Install wget for health checks
RUN apt-get update && apt-get install -y wget && rm -rf /var/lib/apt/lists/*
# Set timezone
ENV TZ=Europe/Warsaw
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Copy published files
COPY --from=build /app/publish .
# Set environment variables (can be overridden)
ENV ASPNETCORE_ENVIRONMENT=Production
ENV ASPNETCORE_URLS=http://0.0.0.0:7142
# Expose port (default, can be remapped in docker-compose)
EXPOSE 7142
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
CMD wget --no-verbose --tries=1 --spider http://localhost:7142/health || exit 1
# Run the application
ENTRYPOINT ["dotnet", "DiunaBI.API.dll"]

View File

@@ -0,0 +1,15 @@
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.SignalR;
namespace DiunaBI.API.Hubs;
/// <summary>
/// SignalR hub for broadcasting entity change notifications to authenticated clients.
/// Clients can only listen - broadcasting is done server-side by EntityChangeInterceptor.
/// </summary>
[Authorize]
public class EntityChangeHub : Hub
{
// No public methods - clients can only listen for "EntityChanged" events
// Broadcasting is handled server-side by EntityChangeInterceptor via IHubContext
}

324
DiunaBI.API/Program.cs Normal file
View File

@@ -0,0 +1,324 @@
using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.RateLimiting;
using Microsoft.EntityFrameworkCore;
using Microsoft.IdentityModel.Tokens;
using System.IdentityModel.Tokens.Jwt;
using System.Reflection;
using System.Text;
using System.Threading.RateLimiting;
using DiunaBI.API.Hubs;
using DiunaBI.API.Services;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Interceptors;
using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4;
using Serilog;
var builder = WebApplication.CreateBuilder(args);
if (builder.Environment.IsProduction())
{
builder.Host.UseSerilog((context, configuration) =>
{
var instanceName = context.Configuration["InstanceName"] ?? "unknown";
configuration
.ReadFrom.Configuration(context.Configuration)
.Enrich.FromLogContext()
.Enrich.WithProperty("Application", $"DiunaBI-{instanceName}")
.Enrich.WithProperty("Version", Assembly.GetExecutingAssembly().GetName().Version?.ToString() ?? "unknown")
.Enrich.WithEnvironmentName()
.Enrich.WithMachineName();
});
}
var connectionString = builder.Configuration.GetConnectionString("SQLDatabase");
// Register EntityChangeInterceptor
builder.Services.AddSingleton<EntityChangeInterceptor>();
builder.Services.AddDbContext<AppDbContext>((serviceProvider, options) =>
{
options.UseSqlServer(connectionString, sqlOptions => sqlOptions.MigrationsAssembly("DiunaBI.Infrastructure"));
// Only log SQL parameters in development (may contain sensitive data)
if (builder.Environment.IsDevelopment())
{
options.EnableSensitiveDataLogging();
}
// Add EntityChangeInterceptor
var interceptor = serviceProvider.GetRequiredService<EntityChangeInterceptor>();
options.AddInterceptors(interceptor);
});
builder.Services.AddCors(options =>
{
options.AddPolicy("CORSPolicy", corsPolicyBuilder =>
{
corsPolicyBuilder.WithOrigins("http://localhost:4200")
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials();
corsPolicyBuilder.WithOrigins("https://diuna.bim-it.pl")
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials();
corsPolicyBuilder.WithOrigins("https://morska.diunabi.com")
.AllowAnyMethod()
.AllowAnyHeader()
.AllowCredentials();
});
});
builder.Services.AddControllers();
// Rate Limiting
builder.Services.AddRateLimiter(options =>
{
// Global API rate limit
options.AddFixedWindowLimiter("api", config =>
{
config.PermitLimit = 100;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0; // No queueing
});
// Strict limit for authentication endpoint
options.AddFixedWindowLimiter("auth", config =>
{
config.PermitLimit = 10;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0;
});
// Rejection response
options.OnRejected = async (context, token) =>
{
context.HttpContext.Response.StatusCode = 429; // Too Many Requests
await context.HttpContext.Response.WriteAsJsonAsync(new
{
error = "Too many requests. Please try again later.",
retryAfter = context.Lease.TryGetMetadata(MetadataName.RetryAfter, out var retryAfter)
? (double?)retryAfter.TotalSeconds
: (double?)null
}, cancellationToken: token);
};
});
// SignalR
builder.Services.AddSignalR();
builder.Services.AddAuthentication(options =>
{
options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
options.DefaultChallengeScheme = JwtBearerDefaults.AuthenticationScheme;
options.DefaultScheme = JwtBearerDefaults.AuthenticationScheme;
}).AddJwtBearer(options =>
{
options.TokenValidationParameters = new TokenValidationParameters
{
ValidateIssuer = true,
ValidateAudience = true,
ValidateLifetime = true,
ValidateIssuerSigningKey = true,
ValidIssuer = builder.Configuration["JwtSettings:Issuer"],
ValidAudience = builder.Configuration["JwtSettings:Audience"],
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(builder.Configuration["JwtSettings:SecurityKey"]!))
};
});
builder.Services.AddScoped<GoogleAuthService>();
builder.Services.AddScoped<JwtTokenService>();
// Google Sheets dependencies
Console.WriteLine("Adding Google Sheets dependencies...");
builder.Services.AddSingleton<GoogleSheetsHelper>();
builder.Services.AddSingleton<GoogleDriveHelper>();
builder.Services.AddSingleton<SpreadsheetsResource.ValuesResource>(provider =>
{
var googleSheetsHelper = provider.GetRequiredService<GoogleSheetsHelper>();
var valuesResource = googleSheetsHelper.Service?.Spreadsheets.Values;
if (valuesResource == null)
{
throw new InvalidOperationException("Google Sheets Service is not initialized properly");
}
return valuesResource;
});
builder.Services.AddSingleton<PluginManager>();
// Job Queue Services
builder.Services.AddScoped<JobSchedulerService>();
builder.Services.AddHostedService<JobWorkerService>();
var app = builder.Build();
// Auto-apply migrations on startup
using (var scope = app.Services.CreateScope())
{
var db = scope.ServiceProvider.GetRequiredService<AppDbContext>();
var logger = scope.ServiceProvider.GetRequiredService<ILogger<Program>>();
db.Database.SetCommandTimeout(TimeSpan.FromMinutes(5));
try
{
await db.Database.OpenConnectionAsync();
await db.Database.ExecuteSqlRawAsync(
"EXEC sp_getapplock @Resource = N'DiunaBI_Migrations', @LockMode = 'Exclusive', @LockTimeout = 60000;");
logger.LogInformation("Ensuring database is up to date...");
await db.Database.MigrateAsync();
logger.LogInformation("Database is up to date.");
}
catch (Exception ex)
{
logger.LogCritical(ex, "Migration failed - application will not start.");
throw;
}
finally
{
try
{
await db.Database.ExecuteSqlRawAsync(
"EXEC sp_releaseapplock @Resource = N'DiunaBI_Migrations';");
}
catch { /* ignore */ }
await db.Database.CloseConnectionAsync();
}
}
if (app.Environment.IsProduction())
{
app.UseSerilogRequestLogging(options =>
{
options.MessageTemplate = "HTTP {RequestMethod} {RequestPath} responded {StatusCode} in {Elapsed:0.0000} ms";
options.EnrichDiagnosticContext = (diagnosticContext, httpContext) =>
{
diagnosticContext.Set("RequestHost", httpContext.Request.Host.Value);
diagnosticContext.Set("RequestScheme", httpContext.Request.Scheme);
var userAgent = httpContext.Request.Headers.UserAgent.FirstOrDefault();
if (!string.IsNullOrEmpty(userAgent))
{
diagnosticContext.Set("UserAgent", userAgent);
}
diagnosticContext.Set("RemoteIP", httpContext.Connection.RemoteIpAddress?.ToString() ?? "unknown");
diagnosticContext.Set("RequestContentType", httpContext.Request.ContentType ?? "none");
};
});
}
// Plugin initialization
var pluginManager = app.Services.GetRequiredService<PluginManager>();
var executablePath = Assembly.GetExecutingAssembly().Location;
var executableDir = Path.GetDirectoryName(executablePath)!;
var pluginsPath = Path.Combine(executableDir, "Plugins");
if (app.Environment.IsProduction())
{
Log.Information("Starting DiunaBI application");
Log.Information("Loading plugins from: {PluginsPath}", pluginsPath);
}
else
{
var logger = app.Services.GetRequiredService<ILogger<Program>>();
logger.LogInformation("Starting DiunaBI application (Development)");
logger.LogInformation("Loading plugins from: {PluginsPath}", pluginsPath);
}
pluginManager.LoadPluginsFromDirectory(pluginsPath);
app.UseCors("CORSPolicy");
// Security Headers
app.Use(async (context, next) =>
{
context.Response.Headers.Append("X-Content-Type-Options", "nosniff");
context.Response.Headers.Append("X-Frame-Options", "DENY");
context.Response.Headers.Append("X-XSS-Protection", "1; mode=block");
context.Response.Headers.Append("Referrer-Policy", "strict-origin-when-cross-origin");
await next();
});
app.UseRateLimiter();
app.UseAuthentication();
app.UseAuthorization();
// Middleware to extract UserId from JWT token AFTER authentication
// This must run after UseAuthentication() so the JWT is already validated
app.Use(async (context, next) =>
{
var logger = context.RequestServices.GetRequiredService<ILogger<Program>>();
logger.LogInformation("🔍 UserId Extraction Middleware - Path: {Path}, Method: {Method}",
context.Request.Path, context.Request.Method);
var token = context.Request.Headers.Authorization.ToString();
logger.LogInformation("🔍 Authorization header: {Token}",
string.IsNullOrEmpty(token) ? "NULL/EMPTY" : $"{token[..Math.Min(30, token.Length)]}...");
if (!string.IsNullOrEmpty(token) && token.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
{
try
{
var handler = new JwtSecurityTokenHandler();
var jwtToken = handler.ReadJwtToken(token.Split(' ')[1]);
// Try to get UserId from Subject claim first, then fall back to NameIdentifier
var userId = jwtToken.Subject;
if (string.IsNullOrEmpty(userId))
{
// Try NameIdentifier claim (ClaimTypes.NameIdentifier)
var nameIdClaim = jwtToken.Claims.FirstOrDefault(c =>
c.Type == "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier" ||
c.Type == "nameid");
userId = nameIdClaim?.Value;
}
logger.LogInformation("🔍 JWT UserId: {UserId}", userId ?? "NULL");
if (!string.IsNullOrEmpty(userId))
{
// Use indexer to set/replace header value instead of Append
context.Request.Headers["UserId"] = userId;
logger.LogInformation("✅ Set UserId header to: {UserId}", userId);
}
else
{
logger.LogWarning("❌ UserId not found in JWT claims");
}
}
catch (Exception ex)
{
logger.LogError(ex, "❌ Failed to extract UserId from JWT token");
}
}
await next(context);
});
app.MapControllers();
// SignalR Hub - Requires JWT authentication
app.MapHub<EntityChangeHub>("/hubs/entitychanges").RequireAuthorization();
app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow }))
.AllowAnonymous();
app.Run();
if (app.Environment.IsProduction())
{
Log.CloseAndFlush();
}
// for testing purposes
public partial class Program { }

View File

@@ -0,0 +1,60 @@
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using Google.Apis.Auth;
using Microsoft.EntityFrameworkCore;
namespace DiunaBI.API.Services;
public class GoogleAuthService(AppDbContext context, IConfiguration configuration, ILogger<GoogleAuthService> logger)
{
private readonly AppDbContext _context = context;
private readonly IConfiguration _configuration = configuration;
private readonly ILogger<GoogleAuthService> _logger = logger;
public async Task<(bool IsValid, User? user, string? error)> ValidateGoogleTokenAsync(string idToken)
{
try
{
var clientId = _configuration["GoogleAuth:ClientId"];
if (string.IsNullOrEmpty(clientId))
{
_logger.LogError("Google Auth Client Id is not configured");
return (false, null, "Google Auth Client Id is not configured");
}
var payload = await GoogleJsonWebSignature.ValidateAsync(idToken,
new GoogleJsonWebSignature.ValidationSettings
{
Audience = new[] { clientId }
});
_logger.LogInformation("Google token validated for user: {Email}", payload.Email);
var user = await _context.Users
.FirstOrDefaultAsync(x => x.Email == payload.Email);
if (user == null)
{
_logger.LogError("User not found in DiunaBI database: {Email}", payload.Email);
return (false, null, "Authentication failed");
}
user.UserName = payload.Name;
await _context.SaveChangesAsync();
_logger.LogInformation("User logged in: {Email}", payload.Email);
return (true, user, null);
}
catch (InvalidJwtException ex)
{
_logger.LogError(ex, "Invalid JWT token");
return (false, null, "Invalid JWT token");
} catch (Exception ex)
{
_logger.LogError(ex, "Error validating Google token");
return (false, null, "Error validating Google token");
}
}
}

View File

@@ -0,0 +1,84 @@
using System.IdentityModel.Tokens.Jwt;
using System.Security.Claims;
using System.Text;
using DiunaBI.Domain.Entities;
using Microsoft.IdentityModel.Tokens;
namespace DiunaBI.API.Services;
public class JwtTokenService(IConfiguration configuration, ILogger<JwtTokenService> logger)
{
private readonly IConfiguration _configuration = configuration;
private readonly ILogger<JwtTokenService> _logger = logger;
public string GenerateToken(User user)
{
var jwtSettings = _configuration.GetSection("JwtSettings");
var securityKey = jwtSettings["SecurityKey"];
var issuer = jwtSettings["Issuer"];
var audience = jwtSettings["Audience"];
var expiryDays = int.Parse(jwtSettings["ExpiryDays"] ?? "7");
var claims = new[]
{
new Claim(ClaimTypes.NameIdentifier, user.Id.ToString()),
new Claim(ClaimTypes.Email, user.Email),
new Claim(ClaimTypes.Name, user.UserName),
new Claim(JwtRegisteredClaimNames.Jti, Guid.NewGuid().ToString()),
new Claim(JwtRegisteredClaimNames.Iat, new DateTimeOffset(DateTime.UtcNow).ToUnixTimeSeconds().ToString(),
ClaimValueTypes.Integer64)
};
var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(securityKey));
var creds = new SigningCredentials(key, SecurityAlgorithms.HmacSha256);
var token = new JwtSecurityToken(
issuer: issuer,
audience: audience,
claims: claims,
expires: DateTime.UtcNow.AddDays(expiryDays),
signingCredentials: creds
);
var tokenString = new JwtSecurityTokenHandler().WriteToken(token);
_logger.LogInformation("Generated JWT token for user: {Email}", user.Email);
return tokenString;
}
public ClaimsPrincipal? ValidateToken(string token)
{
try
{
var jwtSettings = _configuration.GetSection("JwtSettings");
var secretKey = jwtSettings["SecurityKey"];
var issuer = jwtSettings["Issuer"];
var audience = jwtSettings["Audience"];
var tokenHandler = new JwtSecurityTokenHandler();
var key = Encoding.UTF8.GetBytes(secretKey);
var validationParameters = new TokenValidationParameters
{
ValidateIssuer = true,
ValidateAudience = true,
ValidateLifetime = true,
ValidateIssuerSigningKey = true,
ValidIssuer = issuer,
ValidAudience = audience,
IssuerSigningKey = new SymmetricSecurityKey(key),
ClockSkew = TimeSpan.Zero
};
var principal = tokenHandler.ValidateToken(token, validationParameters, out _);
return principal;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error validating JWT token");
return null;
}
}
}

View File

@@ -1,6 +1,4 @@
{ {
"PONG": "#{PING}#",
"app-version": "#{buildId}#",
"Logging": { "Logging": {
"LogLevel": { "LogLevel": {
"Default": "Information", "Default": "Information",
@@ -34,36 +32,15 @@
"retainedFileCountLimit": 30, "retainedFileCountLimit": 30,
"outputTemplate": "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {SourceContext} {Message:lj} {Properties:j}{NewLine}{Exception}" "outputTemplate": "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {SourceContext} {Message:lj} {Properties:j}{NewLine}{Exception}"
} }
},
{
"Name": "Seq",
"Args": {
"serverUrl": "http://localhost:5341",
"restrictedToMinimumLevel": "Information"
}
} }
], ],
"Enrich": ["FromLogContext", "WithMachineName", "WithThreadId"] "Enrich": ["FromLogContext", "WithMachineName", "WithThreadId"]
}, },
"AllowedHosts": "*", "AllowedHosts": "*",
"ConnectionStrings": {
"SQLDatabase": "#{db-connection-string}#"
},
"InstanceName": "#{instance-name}#",
"GoogleClientId": "#{google-backend-login-client-id}#",
"Secret": "#{google-backend-login-secret}#",
"apiKey": "#{api-key}#",
"powerBI-user": "#{powerBI-user}#",
"powerBI-pass": "#{powerBI-pass}#",
"morska-user": "#{morska-user}#",
"morska-pass": "#{morska-pass}#",
"exportDirectory": "#{export-directory}#",
"appLogsFile": "#{app-logs-file}#",
"apiLocalUrl": "#{api-local-url}#",
"Kestrel": { "Kestrel": {
"Endpoints": { "Endpoints": {
"Http": { "Http": {
"Url": "http://#{api-local-url}#" "Url": "http://0.0.0.0:7142"
} }
} }
} }

View File

@@ -0,0 +1,32 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore.Database.Command": "Warning",
"Microsoft.EntityFrameworkCore.Infrastructure": "Warning",
"System.Net.Http.HttpClient": "Warning",
"Google.Apis": "Warning",
"DiunaBI.Core.Services.PluginManager": "Information"
}
},
"WriteTo": [
{
"Name": "Console",
"Args": {
"outputTemplate": "[{Timestamp:HH:mm:ss} {Level:u3}] {Message:lj}{NewLine}{Exception}"
}
}
],
"Enrich": ["FromLogContext", "WithMachineName", "WithThreadId"]
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,12 @@
namespace DiunaBI.Application.DTOModels.Common;
public class PagedResult<T>
{
public List<T> Items { get; set; } = new();
public int TotalCount { get; set; }
public int PageSize { get; set; }
public int Page { get; set; }
public int TotalPages => (int)Math.Ceiling(TotalCount / (double)PageSize);
public bool HasPreviousPage => Page > 1;
public bool HasNextPage => Page < TotalPages;
}

View File

@@ -0,0 +1,17 @@
namespace DiunaBI.Application.DTOModels;
public class DataInboxDto
{
public Guid Id { get; set; }
public string Name { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string Data { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
}
public class DataInboxFilterRequest
{
public string? Search { get; set; }
public int Page { get; set; } = 1;
public int PageSize { get; set; } = 50;
}

View File

@@ -0,0 +1,11 @@
namespace DiunaBI.Application.DTOModels;
public class DeletedRecordDto
{
public Guid RecordId { get; set; }
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
public DateTime DeletedAt { get; set; }
public Guid DeletedById { get; set; }
public string DeletedByName { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,37 @@
namespace DiunaBI.Application.DTOModels;
public class LayerDto
{
public Guid Id { get; set; }
public int Number { get; set; }
public string? Name { get; set; }
public LayerType Type { get; set; }
public DateTime CreatedAt { get; set; }
public DateTime ModifiedAt { get; set; }
public Guid CreatedById { get; set; }
public Guid ModifiedById { get; set; }
public bool IsDeleted { get; set; }
public bool IsCancelled { get; set; }
public Guid? ParentId { get; set; }
// Navigation properties
public List<RecordDto>? Records { get; set; }
public UserDto? CreatedBy { get; set; }
public UserDto? ModifiedBy { get; set; }
}
public enum LayerType
{
Import,
Processed,
Administration,
Dictionary
}
public class LayerFilterRequest
{
public string? Search { get; set; }
public LayerType? Type { get; set; }
public int Page { get; set; } = 1;
public int PageSize { get; set; } = 50;
}

View File

@@ -0,0 +1,50 @@
namespace DiunaBI.Application.DTOModels;
public class RecordDto
{
public Guid Id { get; set; }
public string? Code { get; set; }
public double? Value1 { get; set; }
public double? Value2 { get; set; }
public double? Value3 { get; set; }
public double? Value4 { get; set; }
public double? Value5 { get; set; }
public double? Value6 { get; set; }
public double? Value7 { get; set; }
public double? Value8 { get; set; }
public double? Value9 { get; set; }
public double? Value10 { get; set; }
public double? Value11 { get; set; }
public double? Value12 { get; set; }
public double? Value13 { get; set; }
public double? Value14 { get; set; }
public double? Value15 { get; set; }
public double? Value16 { get; set; }
public double? Value17 { get; set; }
public double? Value18 { get; set; }
public double? Value19 { get; set; }
public double? Value20 { get; set; }
public double? Value21 { get; set; }
public double? Value22 { get; set; }
public double? Value23 { get; set; }
public double? Value24 { get; set; }
public double? Value25 { get; set; }
public double? Value26 { get; set; }
public double? Value27 { get; set; }
public double? Value28 { get; set; }
public double? Value29 { get; set; }
public double? Value30 { get; set; }
public double? Value31 { get; set; }
public double? Value32 { get; set; }
public string? Desc1 { get; set; }
public DateTime CreatedAt { get; set; }
public DateTime ModifiedAt { get; set; }
public bool IsDeleted { get; set; }
public Guid CreatedById { get; set; }
public Guid ModifiedById { get; set; }
public Guid LayerId { get; set; }
}

View File

@@ -0,0 +1,27 @@
namespace DiunaBI.Application.DTOModels;
public class RecordHistoryDto
{
public Guid Id { get; set; }
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public string ChangedByName { get; set; } = string.Empty;
// Type of change
public string ChangeType { get; set; } = string.Empty; // "Created", "Updated", "Deleted"
// Snapshot values
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// What changed
public string? ChangedFields { get; set; } // "Code, Desc1"
public string? ChangesSummary { get; set; } // JSON: {"Code": {"old": "A", "new": "B"}}
// Formatted display text
public string FormattedChange { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,8 @@
namespace DiunaBI.Application.DTOModels;
public class UserDto
{
public Guid Id { get; set; }
public string? Username { get; set; }
public string? Email { get; set; }
}

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>13.0</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\DiunaBI.Domain\DiunaBI.Domain.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="AngouriMath" Version="1.4.0-preview.3" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>13.0</LangVersion>
</PropertyGroup>
</Project>

View File

@@ -1,18 +1,14 @@
using System; using System;
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Core.Models; namespace DiunaBI.Domain.Entities;
public class DataInbox public class DataInbox
{ {
#region Properties #region Properties
[Key]
public Guid Id { get; set; } public Guid Id { get; set; }
[StringLength(50)]
public required string Name { get; init; } public required string Name { get; init; }
[StringLength(50)]
public required string Source { get; set; } public required string Source { get; set; }
[StringLength(int.MaxValue)]
public required string Data { get; init; } public required string Data { get; init; }
public DateTime CreatedAt { get; set; } public DateTime CreatedAt { get; set; }
#endregion #endregion

View File

@@ -2,7 +2,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Core.Models; namespace DiunaBI.Domain.Entities;
public enum LayerType public enum LayerType
{ {
@@ -14,30 +14,19 @@ public enum LayerType
public class Layer public class Layer
{ {
#region Properties #region Properties
[Key]
public Guid Id { get; init; } public Guid Id { get; init; }
[Required]
public int Number { get; init; } public int Number { get; init; }
[Required]
[MaxLength(50)]
public string? Name { get; set; } public string? Name { get; set; }
[Required]
public LayerType Type { get; init; } public LayerType Type { get; init; }
[Required]
public DateTime CreatedAt { get; set; } public DateTime CreatedAt { get; set; }
[Required]
public DateTime ModifiedAt { get; set; } public DateTime ModifiedAt { get; set; }
[Required]
public bool IsDeleted { get; init; } = false; public bool IsDeleted { get; init; } = false;
[Required]
public bool IsCancelled { get; init; } = false; public bool IsCancelled { get; init; } = false;
#endregion #endregion
#region Relations #region Relations
public ICollection<Record>? Records { get; init; } public ICollection<Record>? Records { get; init; }
[Required]
public Guid CreatedById { get; set; } public Guid CreatedById { get; set; }
public User? CreatedBy { get; init; } public User? CreatedBy { get; init; }
[Required]
public Guid ModifiedById { get; set; } public Guid ModifiedById { get; set; }
public User? ModifiedBy { get; init; } public User? ModifiedBy { get; init; }
public Guid? ParentId { get; init; } public Guid? ParentId { get; init; }

View File

@@ -1,14 +1,12 @@
using System; using System;
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Core.Models; namespace DiunaBI.Domain.Entities;
public class ProcessSource public class ProcessSource
{ {
#region Relations #region Relations
[Required]
public Guid LayerId { get; init; } public Guid LayerId { get; init; }
[Required]
public Guid SourceId { get; init; } public Guid SourceId { get; init; }
public Layer? Source { get; init; } public Layer? Source { get; init; }
#endregion #endregion

View File

@@ -1,57 +1,26 @@
using System; using System;
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Core.Models; namespace DiunaBI.Domain.Entities;
public class QueueJob public class QueueJob
{ {
[Key]
public Guid Id { get; set; } = Guid.NewGuid(); public Guid Id { get; set; } = Guid.NewGuid();
[Required]
public Guid LayerId { get; set; } public Guid LayerId { get; set; }
[Required]
[MaxLength(200)]
public string LayerName { get; set; } = string.Empty; public string LayerName { get; set; } = string.Empty;
[Required]
[MaxLength(100)]
public string PluginName { get; set; } = string.Empty; public string PluginName { get; set; } = string.Empty;
[Required]
public JobType JobType { get; set; } public JobType JobType { get; set; }
public int Priority { get; set; } = 0; // 0 = highest priority public int Priority { get; set; } = 0; // 0 = highest priority
[Required]
public DateTime CreatedAt { get; set; } = DateTime.UtcNow; public DateTime CreatedAt { get; set; } = DateTime.UtcNow;
public DateTime ModifiedAt { get; set; } = DateTime.UtcNow;
public int RetryCount { get; set; } = 0; public int RetryCount { get; set; } = 0;
public int MaxRetries { get; set; } = 5; public int MaxRetries { get; set; } = 5;
[Required]
public JobStatus Status { get; set; } = JobStatus.Pending; public JobStatus Status { get; set; } = JobStatus.Pending;
[MaxLength(1000)]
public string? LastError { get; set; } public string? LastError { get; set; }
public DateTime? LastAttemptAt { get; set; } public DateTime? LastAttemptAt { get; set; }
public DateTime? CompletedAt { get; set; } public DateTime? CompletedAt { get; set; }
[Required]
public Guid CreatedById { get; set; } public Guid CreatedById { get; set; }
[Required]
public DateTime CreatedAtUtc { get; set; } = DateTime.UtcNow;
[Required]
public Guid ModifiedById { get; set; } public Guid ModifiedById { get; set; }
[Required]
public DateTime ModifiedAtUtc { get; set; } = DateTime.UtcNow;
} }
public enum JobType public enum JobType

View File

@@ -1,15 +1,12 @@
using System; using System;
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Core.Models; namespace DiunaBI.Domain.Entities;
public class Record public class Record
{ {
#region Properties #region Properties
[Key]
public Guid Id { get; set; } public Guid Id { get; set; }
[Required]
[StringLength(50)]
public string? Code { get; init; } public string? Code { get; init; }
public double? Value1 { get; set; } public double? Value1 { get; set; }
public double? Value2 { get; set; } public double? Value2 { get; set; }
@@ -43,18 +40,14 @@ public class Record
public double? Value30 { get; set; } public double? Value30 { get; set; }
public double? Value31 { get; set; } public double? Value31 { get; set; }
public double? Value32 { get; set; } public double? Value32 { get; set; }
//Description fields
[StringLength(10000)]
public string? Desc1 { get; set; } public string? Desc1 { get; set; }
public DateTime CreatedAt { get; set; } public DateTime CreatedAt { get; set; }
public DateTime ModifiedAt { get; set; } public DateTime ModifiedAt { get; set; }
public bool IsDeleted { get; init; } public bool IsDeleted { get; init; }
#endregion #endregion
#region Relations #region Relations
[Required]
public Guid CreatedById { get; set; } public Guid CreatedById { get; set; }
public User? CreatedBy { get; init; } public User? CreatedBy { get; init; }
[Required]
public Guid ModifiedById { get; set; } public Guid ModifiedById { get; set; }
public User? ModifiedBy { get; init; } public User? ModifiedBy { get; init; }
public Guid LayerId { get; set; } public Guid LayerId { get; set; }

View File

@@ -0,0 +1,37 @@
using System;
namespace DiunaBI.Domain.Entities;
public enum RecordChangeType
{
Created = 1,
Updated = 2,
Deleted = 3
}
public class RecordHistory
{
public Guid Id { get; set; }
// Reference to the original record
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public User? ChangedBy { get; set; }
// Type of change
public RecordChangeType ChangeType { get; set; }
// Snapshot of record state at this point
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// Comma-separated list of fields that changed (e.g., "Code,Desc1")
public string? ChangedFields { get; set; }
// JSON object with detailed changes: {"Code": {"old": "A", "new": "B"}}
public string? ChangesSummary { get; set; }
}

View File

@@ -0,0 +1,19 @@
using System;
using System.ComponentModel.DataAnnotations;
namespace DiunaBI.Domain.Entities;
public class User
{
/// <summary>
/// System user ID for automated operations (imports, scheduled jobs, etc.)
/// </summary>
public static readonly Guid AutoImportUserId = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d");
#region Properties
public Guid Id { get; init; }
public string? Email { get; init; }
public string? UserName { get; set; }
public DateTime CreatedAt { get; init; }
#endregion
}

View File

@@ -0,0 +1,223 @@
using Microsoft.EntityFrameworkCore;
using DiunaBI.Domain.Entities;
namespace DiunaBI.Infrastructure.Data;
public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(options)
{
public DbSet<User> Users { get; init; }
public DbSet<Layer> Layers { get; init; }
public DbSet<Record> Records { get; init; }
public DbSet<RecordHistory> RecordHistory { get; init; }
public DbSet<ProcessSource> ProcessSources { get; init; }
public DbSet<DataInbox> DataInbox { get; init; }
public DbSet<QueueJob> QueueJobs { get; init; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<User>().HasKey(x => x.Id);
modelBuilder.Entity<User>().Property(x => x.Email).HasMaxLength(50);
modelBuilder.Entity<User>().Property(x => x.UserName).HasMaxLength(50);
modelBuilder.Entity<Layer>().HasKey(x => x.Id);
modelBuilder.Entity<Layer>().Property(x => x.Number).IsRequired();
modelBuilder.Entity<Layer>().Property(x => x.Name).IsRequired().HasMaxLength(50);
modelBuilder.Entity<Layer>().Property(x => x.Type).IsRequired().HasConversion<int>();
modelBuilder.Entity<Layer>().Property(x => x.CreatedAt).IsRequired();
modelBuilder.Entity<Layer>().Property(x => x.ModifiedAt).IsRequired();
modelBuilder.Entity<Layer>().Property(x => x.IsDeleted).IsRequired().HasDefaultValue(false);
modelBuilder.Entity<Layer>().Property(x => x.IsCancelled).IsRequired().HasDefaultValue(false);
modelBuilder.Entity<Layer>().Property(x => x.CreatedById).IsRequired();
modelBuilder.Entity<Layer>().Property(x => x.ModifiedById).IsRequired();
modelBuilder.Entity<Layer>()
.HasOne(x => x.CreatedBy)
.WithMany()
.HasForeignKey(x => x.CreatedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<Layer>()
.HasOne(x => x.ModifiedBy)
.WithMany()
.HasForeignKey(x => x.ModifiedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<Layer>()
.HasMany(x => x.Records)
.WithOne()
.HasForeignKey(r => r.LayerId)
.OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<Record>().HasKey(x => x.Id);
modelBuilder.Entity<Record>().Property(x => x.Code).IsRequired().HasMaxLength(50);
modelBuilder.Entity<Record>().Property(x => x.Desc1).HasMaxLength(10000);
modelBuilder.Entity<Record>().Property(x => x.CreatedAt);
modelBuilder.Entity<Record>().Property(x => x.ModifiedAt);
modelBuilder.Entity<Record>().Property(x => x.IsDeleted);
modelBuilder.Entity<Record>().Property(x => x.CreatedById).IsRequired();
modelBuilder.Entity<Record>().Property(x => x.ModifiedById).IsRequired();
modelBuilder.Entity<Record>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<Record>()
.HasOne(x => x.CreatedBy)
.WithMany()
.HasForeignKey(x => x.CreatedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<Record>()
.HasOne(x => x.ModifiedBy)
.WithMany()
.HasForeignKey(x => x.ModifiedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<Record>()
.HasOne<Layer>()
.WithMany(l => l.Records!)
.HasForeignKey(x => x.LayerId)
.OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<RecordHistory>().HasKey(x => x.Id);
modelBuilder.Entity<RecordHistory>().Property(x => x.RecordId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedAt).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedById).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangeType).IsRequired().HasConversion<int>();
modelBuilder.Entity<RecordHistory>().Property(x => x.Code).IsRequired().HasMaxLength(50);
modelBuilder.Entity<RecordHistory>().Property(x => x.Desc1).HasMaxLength(10000);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedFields).HasMaxLength(200);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangesSummary).HasMaxLength(4000);
// Indexes for efficient history queries
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.RecordId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.LayerId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasOne(x => x.ChangedBy)
.WithMany()
.HasForeignKey(x => x.ChangedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId });
modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired();
modelBuilder.Entity<ProcessSource>()
.HasOne<Layer>()
.WithMany()
.HasForeignKey(x => x.LayerId)
.OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<ProcessSource>()
.HasOne(x => x.Source)
.WithMany()
.HasForeignKey(x => x.SourceId)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<DataInbox>().HasKey(x => x.Id);
modelBuilder.Entity<DataInbox>().Property(x => x.Name).IsRequired().HasMaxLength(50);
modelBuilder.Entity<DataInbox>().Property(x => x.Source).IsRequired().HasMaxLength(50);
modelBuilder.Entity<DataInbox>().Property(x => x.Data).IsRequired();
modelBuilder.Entity<DataInbox>().Property(x => x.CreatedAt);
modelBuilder.Entity<QueueJob>().HasKey(x => x.Id);
modelBuilder.Entity<QueueJob>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.LayerName).IsRequired().HasMaxLength(200);
modelBuilder.Entity<QueueJob>().Property(x => x.PluginName).IsRequired().HasMaxLength(100);
modelBuilder.Entity<QueueJob>().Property(x => x.JobType).IsRequired().HasConversion<int>();
modelBuilder.Entity<QueueJob>().Property(x => x.Priority);
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedAt).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.RetryCount);
modelBuilder.Entity<QueueJob>().Property(x => x.MaxRetries);
modelBuilder.Entity<QueueJob>().Property(x => x.Status).IsRequired().HasConversion<int>();
modelBuilder.Entity<QueueJob>().Property(x => x.LastError).HasMaxLength(1000);
modelBuilder.Entity<QueueJob>().Property(x => x.LastAttemptAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CompletedAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedAt).IsRequired();
// Configure automatic timestamps for entities with CreatedAt/ModifiedAt
ConfigureTimestamps(modelBuilder);
}
private void ConfigureTimestamps(ModelBuilder modelBuilder)
{
foreach (var entityType in modelBuilder.Model.GetEntityTypes())
{
// Check if entity has CreatedAt property
var createdAtProperty = entityType.FindProperty("CreatedAt");
if (createdAtProperty != null)
{
modelBuilder.Entity(entityType.ClrType)
.Property("CreatedAt")
.HasDefaultValueSql("GETUTCDATE()");
}
// Check if entity has ModifiedAt property
var modifiedAtProperty = entityType.FindProperty("ModifiedAt");
if (modifiedAtProperty != null)
{
modelBuilder.Entity(entityType.ClrType)
.Property("ModifiedAt")
.HasDefaultValueSql("GETUTCDATE()");
}
}
}
public override int SaveChanges()
{
UpdateTimestamps();
return base.SaveChanges();
}
public override Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
UpdateTimestamps();
return base.SaveChangesAsync(cancellationToken);
}
private void UpdateTimestamps()
{
var entities = ChangeTracker.Entries()
.Where(e => e.State == EntityState.Added || e.State == EntityState.Modified);
foreach (var entity in entities)
{
// Try to set CreatedAt for new entities
if (entity.State == EntityState.Added)
{
var createdAtProperty = entity.Properties.FirstOrDefault(p => p.Metadata.Name == "CreatedAt");
if (createdAtProperty != null)
{
createdAtProperty.CurrentValue = DateTime.UtcNow;
}
// Ensure IsDeleted and IsCancelled have default values for Layer entities
if (entity.Entity is Layer)
{
var isDeletedProperty = entity.Properties.FirstOrDefault(p => p.Metadata.Name == "IsDeleted");
if (isDeletedProperty != null && isDeletedProperty.CurrentValue == null)
{
isDeletedProperty.CurrentValue = false;
}
var isCancelledProperty = entity.Properties.FirstOrDefault(p => p.Metadata.Name == "IsCancelled");
if (isCancelledProperty != null && isCancelledProperty.CurrentValue == null)
{
isCancelledProperty.CurrentValue = false;
}
}
}
// Always update ModifiedAt
var modifiedAtProperty = entity.Properties.FirstOrDefault(p => p.Metadata.Name == "ModifiedAt");
if (modifiedAtProperty != null)
{
modifiedAtProperty.CurrentValue = DateTime.UtcNow;
}
}
}
}

View File

@@ -4,14 +4,14 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design; using Microsoft.EntityFrameworkCore.Design;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
namespace DiunaBI.Core.Database.Context; namespace DiunaBI.Infrastructure.Data;
public class DesignTimeDbContextFactory : IDesignTimeDbContextFactory<AppDbContext> public class DesignTimeDbContextFactory : IDesignTimeDbContextFactory<AppDbContext>
{ {
public AppDbContext CreateDbContext(string[] args) public AppDbContext CreateDbContext(string[] args)
{ {
var configuration = new ConfigurationBuilder() var configuration = new ConfigurationBuilder()
.SetBasePath(Path.Combine(Directory.GetCurrentDirectory(), "../DiunaBI.WebAPI")) .SetBasePath(Path.Combine(Directory.GetCurrentDirectory(), "../DiunaBI.API"))
.AddJsonFile("appsettings.json", optional: false) .AddJsonFile("appsettings.json", optional: false)
.AddJsonFile("appsettings.Development.json", optional: true) .AddJsonFile("appsettings.Development.json", optional: true)
.Build(); .Build();

View File

@@ -0,0 +1,31 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>13.0</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\DiunaBI.Domain\DiunaBI.Domain.csproj" />
<ProjectReference Include="..\DiunaBI.Application\DiunaBI.Application.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="10.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="10.0.0" />
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" />
</ItemGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,201 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Infrastructure.Interceptors;
public class EntityChangeInterceptor : SaveChangesInterceptor
{
private readonly object? _hubContext;
private readonly ILogger<EntityChangeInterceptor>? _logger;
private readonly List<(string Module, string Id, string Operation)> _pendingChanges = new();
public EntityChangeInterceptor(IServiceProvider serviceProvider)
{
_logger = serviceProvider.GetService(typeof(ILogger<EntityChangeInterceptor>)) as ILogger<EntityChangeInterceptor>;
// Try to get hub context - it may not be registered in some scenarios (e.g., migrations)
try
{
var hubType = Type.GetType("DiunaBI.API.Hubs.EntityChangeHub, DiunaBI.API");
if (hubType != null)
{
var hubContextType = typeof(IHubContext<>).MakeGenericType(hubType);
_hubContext = serviceProvider.GetService(hubContextType);
if (_hubContext != null)
{
_logger?.LogInformation("✅ EntityChangeInterceptor: Hub context initialized");
Console.WriteLine("✅ EntityChangeInterceptor: Hub context initialized");
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub context is null");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub context is null");
}
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub type not found");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub type not found");
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ EntityChangeInterceptor: Failed to initialize hub context");
Console.WriteLine($"❌ EntityChangeInterceptor: Failed to initialize hub context: {ex.Message}");
_hubContext = null;
}
}
public override ValueTask<InterceptionResult<int>> SavingChangesAsync(
DbContextEventData eventData,
InterceptionResult<int> result,
CancellationToken cancellationToken = default)
{
_pendingChanges.Clear();
Console.WriteLine($"🔍 EntityChangeInterceptor.SavingChangesAsync called. HubContext null? {_hubContext == null}, Context null? {eventData.Context == null}");
if (_hubContext != null && eventData.Context != null)
{
// Capture changes BEFORE save
var entries = eventData.Context.ChangeTracker.Entries().ToList();
Console.WriteLine($"🔍 Found {entries.Count} total entries in ChangeTracker");
foreach (var entry in entries)
{
Console.WriteLine($"🔍 Entry: {entry.Metadata.ClrType.Name}, State: {entry.State}");
if (entry.State == EntityState.Added ||
entry.State == EntityState.Modified ||
entry.State == EntityState.Deleted)
{
var module = entry.Metadata.GetTableName() ?? entry.Metadata.ClrType.Name;
var id = GetEntityId(entry);
var operation = entry.State switch
{
EntityState.Added => "created",
EntityState.Modified => "updated",
EntityState.Deleted => "deleted",
_ => "unknown"
};
Console.WriteLine($"🔍 Detected change: {module} {id} {operation}");
if (id != null)
{
_pendingChanges.Add((module, id, operation));
Console.WriteLine($"✅ Added to pending changes: {module} {id} {operation}");
}
else
{
Console.WriteLine($"⚠️ Skipped (id is null): {module} {operation}");
}
}
}
Console.WriteLine($"🔍 Total pending changes: {_pendingChanges.Count}");
}
return base.SavingChangesAsync(eventData, result, cancellationToken);
}
public override async ValueTask<int> SavedChangesAsync(
SaveChangesCompletedEventData eventData,
int result,
CancellationToken cancellationToken = default)
{
// Broadcast changes AFTER successful save
if (_hubContext != null && result > 0 && _pendingChanges.Any())
{
_logger?.LogInformation("📤 Broadcasting {Count} entity changes via SignalR", _pendingChanges.Count);
Console.WriteLine($"📤 Broadcasting {_pendingChanges.Count} entity changes via SignalR");
foreach (var (module, id, operation) in _pendingChanges)
{
try
{
Console.WriteLine($"📤 Broadcasting: {module} {id} {operation}");
// Use reflection to call hub methods since we can't reference the API project
var clientsProperty = _hubContext.GetType().GetProperty("Clients");
Console.WriteLine($" 🔍 Clients property: {clientsProperty != null}");
if (clientsProperty != null)
{
var clients = clientsProperty.GetValue(_hubContext);
Console.WriteLine($" 🔍 Clients value: {clients != null}, Type: {clients?.GetType().Name}");
if (clients != null)
{
var allProperty = clients.GetType().GetProperty("All");
Console.WriteLine($" 🔍 All property: {allProperty != null}");
if (allProperty != null)
{
var allClients = allProperty.GetValue(clients);
Console.WriteLine($" 🔍 AllClients value: {allClients != null}, Type: {allClients?.GetType().Name}");
if (allClients != null)
{
// SendAsync is an extension method, so we need to find it differently
// Look for the IClientProxy interface which has SendCoreAsync
var sendCoreAsyncMethod = allClients.GetType().GetMethod("SendCoreAsync");
Console.WriteLine($" 🔍 SendCoreAsync method found: {sendCoreAsyncMethod != null}");
if (sendCoreAsyncMethod != null)
{
// SendCoreAsync takes (string method, object?[] args, CancellationToken cancellationToken)
var task = sendCoreAsyncMethod.Invoke(allClients, new object[]
{
"EntityChanged",
new object[] { new { module, id, operation } },
cancellationToken
}) as Task;
Console.WriteLine($" 🔍 Task created: {task != null}");
if (task != null)
{
await task;
Console.WriteLine($"✅ Broadcast successful: {module} {id} {operation}");
}
else
{
Console.WriteLine($"❌ Task is null after invoke");
}
}
else
{
Console.WriteLine($"❌ SendCoreAsync method not found");
}
}
}
}
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ Failed to broadcast entity change");
Console.WriteLine($"❌ Failed to broadcast: {ex.Message}");
Console.WriteLine($"❌ Stack trace: {ex.StackTrace}");
}
}
}
_pendingChanges.Clear();
return await base.SavedChangesAsync(eventData, result, cancellationToken);
}
private static string? GetEntityId(Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry entry)
{
var keyProperty = entry.Metadata.FindPrimaryKey()?.Properties.FirstOrDefault();
if (keyProperty == null)
return null;
var value = entry.Property(keyProperty.Name).CurrentValue;
return value?.ToString();
}
}

View File

@@ -1,6 +1,6 @@
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
namespace DiunaBI.Core.Interfaces; namespace DiunaBI.Infrastructure.Interfaces;
public interface IDataExporter public interface IDataExporter
{ {

View File

@@ -1,6 +1,6 @@
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
namespace DiunaBI.Core.Interfaces; namespace DiunaBI.Infrastructure.Interfaces;
public interface IDataImporter public interface IDataImporter
{ {

View File

@@ -1,6 +1,6 @@
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
namespace DiunaBI.Core.Interfaces; namespace DiunaBI.Infrastructure.Interfaces;
public interface IDataProcessor public interface IDataProcessor
{ {

View File

@@ -1,4 +1,4 @@
namespace DiunaBI.Core.Interfaces; namespace DiunaBI.Infrastructure.Interfaces;
public interface IPlugin public interface IPlugin
{ {

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20221205190148_Initial")] [Migration("20221205190148_Initial")]
partial class Initial partial class Initial
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class Initial : Migration public partial class Initial : Migration

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20221211210507_DataSetsAndDataRows")] [Migration("20221211210507_DataSetsAndDataRows")]
partial class DataSetsAndDataRows partial class DataSetsAndDataRows
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class DataSetsAndDataRows : Migration public partial class DataSetsAndDataRows : Migration

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20221219163620_RenameFields")] [Migration("20221219163620_RenameFields")]
partial class RenameFields partial class RenameFields
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -2,7 +2,7 @@
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class RenameFields : Migration public partial class RenameFields : Migration

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20221221165749_DataSetIdOnDataRow")] [Migration("20221221165749_DataSetIdOnDataRow")]
partial class DataSetIdOnDataRow partial class DataSetIdOnDataRow
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class DataSetIdOnDataRow : Migration public partial class DataSetIdOnDataRow : Migration
@@ -15,6 +15,11 @@ namespace DiunaBI.Core.Migrations
name: "FK_DataRows_DataSets_DataSetId", name: "FK_DataRows_DataSets_DataSetId",
table: "DataRows"); table: "DataRows");
// DODAJ: Usuń index przed zmianą kolumny
migrationBuilder.DropIndex(
name: "IX_DataRows_DataSetId",
table: "DataRows");
migrationBuilder.AlterColumn<Guid>( migrationBuilder.AlterColumn<Guid>(
name: "DataSetId", name: "DataSetId",
table: "DataRows", table: "DataRows",
@@ -25,6 +30,12 @@ namespace DiunaBI.Core.Migrations
oldType: "uniqueidentifier", oldType: "uniqueidentifier",
oldNullable: true); oldNullable: true);
// DODAJ: Odtwórz index po zmianie kolumny
migrationBuilder.CreateIndex(
name: "IX_DataRows_DataSetId",
table: "DataRows",
column: "DataSetId");
migrationBuilder.AddForeignKey( migrationBuilder.AddForeignKey(
name: "FK_DataRows_DataSets_DataSetId", name: "FK_DataRows_DataSets_DataSetId",
table: "DataRows", table: "DataRows",
@@ -41,6 +52,10 @@ namespace DiunaBI.Core.Migrations
name: "FK_DataRows_DataSets_DataSetId", name: "FK_DataRows_DataSets_DataSetId",
table: "DataRows"); table: "DataRows");
migrationBuilder.DropIndex(
name: "IX_DataRows_DataSetId",
table: "DataRows");
migrationBuilder.AlterColumn<Guid>( migrationBuilder.AlterColumn<Guid>(
name: "DataSetId", name: "DataSetId",
table: "DataRows", table: "DataRows",
@@ -49,6 +64,11 @@ namespace DiunaBI.Core.Migrations
oldClrType: typeof(Guid), oldClrType: typeof(Guid),
oldType: "uniqueidentifier"); oldType: "uniqueidentifier");
migrationBuilder.CreateIndex(
name: "IX_DataRows_DataSetId",
table: "DataRows",
column: "DataSetId");
migrationBuilder.AddForeignKey( migrationBuilder.AddForeignKey(
name: "FK_DataRows_DataSets_DataSetId", name: "FK_DataRows_DataSets_DataSetId",
table: "DataRows", table: "DataRows",

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20230106095427_RenameModels")] [Migration("20230106095427_RenameModels")]
partial class RenameModels partial class RenameModels
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class RenameModels : Migration public partial class RenameModels : Migration

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20230626171614_LayerType")] [Migration("20230626171614_LayerType")]
partial class LayerType partial class LayerType
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -2,7 +2,7 @@
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class LayerType : Migration public partial class LayerType : Migration

View File

@@ -5,19 +5,19 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations; using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion; using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using DiunaBI.Core.Models; using DiunaBI.Domain.Entities;
using DiunaBI.Core.Database.Context; using DiunaBI.Infrastructure.Data;
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
[DbContext(typeof(AppDbContext))] [DbContext(typeof(AppDbContext))]
[Migration("20230821105757_Record.Values")] [Migration("20230821105757_Record.Values")]
partial class RecordValues partial class RecordValues
{ {
/// <inheritdoc /> /// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder) protected void BuildTargetModel(ModelBuilder modelBuilder)
{ {
#pragma warning disable 612, 618 #pragma warning disable 612, 618
modelBuilder modelBuilder

View File

@@ -2,7 +2,7 @@
#nullable disable #nullable disable
namespace DiunaBI.Core.Migrations namespace DiunaBI.Infrastructure.Migrations
{ {
/// <inheritdoc /> /// <inheritdoc />
public partial class RecordValues : Migration public partial class RecordValues : Migration

Some files were not shown because too many files have changed in this diff Show More