Compare commits

...

45 Commits

Author SHA1 Message Date
24f5f91704 update readme
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m27s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-08 22:07:16 +01:00
00c9584d03 Schedule Jobs from UI 2025-12-08 22:02:57 +01:00
c94a3b41c9 Duplicate models fields fix 2025-12-08 21:54:48 +01:00
e25cdc4441 UI timezone 2025-12-08 21:42:10 +01:00
1f95d57717 JobList filter fix 2025-12-08 21:28:24 +01:00
d2fb9b8071 Fix API Key Authorization for Cron Jobs by adding [AllowAnonymous] attribute to scheduling endpoints
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m29s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m46s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m49s
2025-12-06 00:50:20 +01:00
08abd96751 SignalR FIX
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m26s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m38s
2025-12-06 00:36:22 +01:00
eb570679ba UI Fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m26s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m39s
2025-12-06 00:03:46 +01:00
8713ed9686 LayerDetail improvement
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m27s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m39s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 23:49:16 +01:00
595076033b More security!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m40s
2025-12-05 23:41:56 +01:00
0c874575d4 SignalR Security 2025-12-05 23:17:02 +01:00
71c293320b Security: controllers and stack traces in logs
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 21:37:15 +01:00
46805fb196 Security: JWT
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m30s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m51s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m50s
2025-12-05 21:17:04 +01:00
51f2679732 Handle unauthorized
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m33s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m53s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m51s
2025-12-05 20:34:18 +01:00
6b0f936f40 P2 processor is working as a charm!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 19:10:28 +01:00
0eb2a457f7 PedrolloPL: P2 -> B3
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-05 10:35:35 +01:00
0cf0bad6b1 UI build fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m30s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m45s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 10:07:45 +01:00
c7d9acead0 UI refactor (structure cleanup)
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m18s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m38s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m37s
2025-12-05 09:51:04 +01:00
193127b86a SingalR for realtime entitychanges
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m36s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m31s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m55s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m53s
2025-12-04 22:20:00 +01:00
bf2beda390 build fix2
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m36s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m49s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m48s
2025-12-04 18:44:39 +01:00
942da18d85 Build fix
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m27s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m28s
2025-12-04 17:57:37 +01:00
a3fa8f9b91 P2 import is working
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m14s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m11s
2025-12-04 15:53:11 +01:00
0e3b3933f0 WIP: p2 plugin
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
2025-12-03 13:33:38 +01:00
445c07a8d8 Morska plugins refactor 2025-12-02 21:24:37 +01:00
3f8e62fbb8 WIP: queue engine 2025-12-02 15:35:04 +01:00
248106a239 Plugins little refactor 2025-12-02 15:21:27 +01:00
587d4d66f8 Pedrollo plugins 2025-12-02 14:31:21 +01:00
f68e57ce3b Small UI fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m42s
2025-12-02 13:43:01 +01:00
e70a8dda6e Remember list filters 2025-12-02 13:23:03 +01:00
89859cd4a3 Record histori is working 2025-12-02 13:14:09 +01:00
0c6848556b WIP: Record history 2025-12-01 18:37:09 +01:00
c8ded1f0a4 Edit Records 2025-12-01 17:56:17 +01:00
7ea5ed506e Filter Layers by Type
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-12-01 13:21:45 +01:00
4d7df85df1 DataInbox Detail 2025-12-01 13:00:01 +01:00
3d654d972e DataInbox list 2025-12-01 12:55:47 +01:00
a71b6feefc Pagination style fix 2025-12-01 12:35:22 +01:00
cb0d050ad4 Imports for 2025.12 2025-11-30 16:09:32 +01:00
24387bf96c debug
All checks were successful
Build Docker Images / test (push) Successful in 1m47s
Build Docker Images / build-and-push (push) Successful in 1m55s
2025-11-28 16:15:39 +01:00
87d19dcadf App logo
All checks were successful
Build Docker Images / test (push) Successful in 1m43s
Build Docker Images / build-and-push (push) Successful in 1m47s
2025-11-28 12:13:19 +01:00
a289690b6b Add custom app name per instance 2025-11-28 11:44:19 +01:00
57f1359c96 Bu9ild path fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m31s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-28 11:29:38 +01:00
b0e77ec835 Enable Main build
Some checks failed
Build Docker Images / test (push) Failing after 26s
Build Docker Images / build-and-push (push) Failing after 11s
2025-11-28 11:26:58 +01:00
b3053b859a Last refactor steps (I hope) 2025-11-28 11:26:17 +01:00
07423023a0 after refactor cleanup 2025-11-28 11:21:22 +01:00
5db6de1503 Merge pull request 'ddd-refactor' (#2) from ddd-refactor into main
Some checks failed
BuildApp / build-frontend (push) Successful in 1m54s
BuildApp / build-backend (push) Failing after 26s
Reviewed-on: #2
2025-11-28 11:14:42 +01:00
353 changed files with 8259 additions and 14075 deletions

View File

@@ -0,0 +1,3 @@
Read the project context file at `.claude/project-context.md` to quickly understand the DiunaBI project structure, architecture, key components, and recent development focus. This will bootstrap your knowledge without needing to explore the entire codebase.
After reading the context file, briefly acknowledge what you've learned and ask the user what they need help with.

View File

@@ -0,0 +1,27 @@
Update the `.claude/project-context.md` file by ONLY appending changes made during THIS session to the "RECENT CHANGES (This Session)" section at the top of the file.
**DO NOT re-scan or re-explore the entire codebase** - this wastes tokens and time.
**What to do:**
1. Review the conversation history to identify what was changed/added/fixed in THIS session
2. Read the current `.claude/project-context.md` file
3. Update ONLY the "RECENT CHANGES (This Session)" section at the top with:
- Date of changes (today's date)
- Brief bullet points describing what was modified
- Files that were changed with brief descriptions
- Any new functionality added
- Bug fixes completed
4. Leave the rest of the file unchanged
**Format for session changes:**
```markdown
## RECENT CHANGES (This Session)
**[Feature/Fix Name] ([Date]):**
- ✅ Brief description of change 1
- ✅ Brief description of change 2
- Files modified: [file1.cs](path/to/file1.cs), [file2.cs](path/to/file2.cs)
```
When done, provide a brief summary of what session changes were documented.

775
.claude/project-context.md Normal file
View File

@@ -0,0 +1,775 @@
# DiunaBI Project Context
> This file is auto-generated for Claude Code to quickly understand the project structure.
> Last updated: 2025-12-08
## RECENT CHANGES (This Session)
**Jobs List Sorting and Multi-Select Filtering (Dec 8, 2025):**
-**Fixed Job Sorting** - Changed from single CreatedAt DESC to Priority ASC → JobType → CreatedAt DESC
-**Multi-Select Status Filter** - Replaced single status dropdown with multi-select supporting multiple JobStatus values
-**Auto-Refresh on Filter Change** - Filters now automatically trigger data reload without requiring manual button click
-**API Updates** - JobsController GetAll endpoint accepts `List<JobStatus>? statuses` instead of single status
-**JobService Updates** - Sends status values as integers in query string for multi-select support
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: Jobs list now sortable by priority/type/date with working multi-select filters
**User Timezone Support (Dec 8, 2025):**
-**DateTimeHelper Service** - Created JS Interop service to detect user's browser timezone
-**UTC to Local Conversion** - All date displays now show user's local timezone instead of UTC
-**Database Consistency** - Database continues to store UTC (correct), conversion only for display
-**Updated Pages** - Applied timezone conversion to all date fields in:
- Jobs Index and Details pages
- Layers Details page (CreatedAt, ModifiedAt, record history)
- DataInbox Index page
-**Service Registration** - Registered DateTimeHelper as scoped service in DI container
- Files created: [DateTimeHelper.cs](DiunaBI.UI.Shared/Services/DateTimeHelper.cs)
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs), [Jobs/Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs), [Jobs/Details.razor](DiunaBI.UI.Shared/Pages/Jobs/Details.razor), [Layers/Details.razor](DiunaBI.UI.Shared/Pages/Layers/Details.razor), [Layers/Details.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Details.razor.cs), [DataInbox/Index.razor.cs](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor.cs)
- Status: All dates display in user's local timezone with format "yyyy-MM-dd HH:mm:ss"
**QueueJob Model Cleanup and AutoImport User (Dec 8, 2025):**
-**Removed Duplicate Fields** - Removed CreatedAtUtc and ModifiedAtUtc from QueueJob (were duplicates of CreatedAt/ModifiedAt)
-**Added ModifiedAt Field** - Was missing, now tracks job modification timestamp
-**AutoImport User ID** - Created User.AutoImportUserId constant: `f392209e-123e-4651-a5a4-0b1d6cf9ff9d`
-**System Operations** - All system-created/modified jobs now use AutoImportUserId for CreatedById and ModifiedById
-**Database Migration** - Created migration: RemoveQueueJobDuplicateUTCFields
- Files modified: [QueueJob.cs](DiunaBI.Domain/Entities/QueueJob.cs), [User.cs](DiunaBI.Domain/Entities/User.cs), [JobWorkerService.cs](DiunaBI.Infrastructure/Services/JobWorkerService.cs), [JobSchedulerService.cs](DiunaBI.Infrastructure/Services/JobSchedulerService.cs), [AppDbContext.cs](DiunaBI.Infrastructure/Data/AppDbContext.cs), [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Files created: [20251208205202_RemoveQueueJobDuplicateUTCFields.cs](DiunaBI.Infrastructure/Migrations/20251208205202_RemoveQueueJobDuplicateUTCFields.cs)
- Status: QueueJob model cleaned up, all automated operations tracked with AutoImport user ID
**Job Scheduling UI with JWT Authorization (Dec 8, 2025):**
-**New JWT Endpoints** - Created UI-specific endpoints at `/jobs/ui/schedule/*` with JWT authorization (parallel to API key endpoints)
-**Three Scheduling Options** - MudMenu dropdown in Jobs Index with:
- Run All Jobs - schedules all import and process jobs
- Run All Imports - schedules import jobs only
- Run All Processes - schedules process jobs only
-**JobService Methods** - Added three scheduling methods returning (success, jobsCreated, message) tuples
-**Auto-Refresh** - Jobs list automatically reloads after scheduling with success/failure notifications
-**Dual Authorization** - Existing `/jobs/schedule/{apiKey}` endpoints for automation, new `/jobs/ui/schedule` endpoints for UI users
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: UI users can now schedule jobs directly from Jobs page using JWT authentication
---
**API Key Authorization Fix for Cron Jobs (Dec 6, 2025):**
-**Fixed 401 Unauthorized on API Key Endpoints** - Cron jobs calling `/jobs/schedule` endpoints were getting rejected despite valid API keys
-**Added [AllowAnonymous] Attribute** - Bypasses controller-level `[Authorize]` to allow `[ApiKeyAuth]` filter to handle authorization
-**Three Endpoints Fixed** - Applied fix to all job scheduling endpoints:
- `POST /jobs/schedule` - Schedule all jobs (imports + processes)
- `POST /jobs/schedule/imports` - Schedule import jobs only
- `POST /jobs/schedule/processes` - Schedule process jobs only
- Root cause: Controller-level `[Authorize]` attribute required JWT Bearer auth for all endpoints, blocking API key authentication
- Solution: Add `[AllowAnonymous]` to allow `[ApiKeyAuth]` filter to validate X-API-Key header
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Status: Cron jobs can now authenticate with API key via X-API-Key header
**SignalR Authentication Token Flow Fix (Dec 6, 2025):**
-**TokenProvider Population** - Fixed `TokenProvider.Token` never being set with JWT, causing 401 Unauthorized on SignalR connections
-**AuthService Token Management** - Injected `TokenProvider` into `AuthService` and set token in 3 key places:
- `ValidateWithBackendAsync()` - on fresh Google login
- `CheckAuthenticationAsync()` - on session restore from localStorage
- `ClearAuthenticationAsync()` - clear token on logout
-**SignalR Initialization Timing** - Moved SignalR initialization from `MainLayout.OnInitializedAsync` to after authentication completes
-**Event-Driven Architecture** - `MainLayout` now subscribes to `AuthenticationStateChanged` event to initialize SignalR when user authenticates
-**Session Restore Support** - `CheckAuthenticationAsync()` now fires `AuthenticationStateChanged` event to initialize SignalR on page refresh
- Root cause: SignalR was initialized before authentication, so JWT token was empty during connection setup
- Solution: Initialize SignalR only after token is available via event subscription
- Files modified: [AuthService.cs](DiunaBI.UI.Shared/Services/AuthService.cs), [MainLayout.razor](DiunaBI.UI.Shared/Components/Layout/MainLayout.razor)
- Status: SignalR authentication working for both fresh login and restored sessions
**SignalR Authentication DI Fix (Dec 6, 2025):**
-**TokenProvider Registration** - Added missing `TokenProvider` service registration in DI container
-**EntityChangeHubService Scope Fix** - Changed from singleton to scoped to support user-specific JWT tokens
-**Bug Fix** - Resolved `InvalidOperationException` preventing app from starting after SignalR authentication was added
- Root cause: Singleton service (`EntityChangeHubService`) cannot depend on scoped service (`TokenProvider`) in DI
- Solution: Made `EntityChangeHubService` scoped so each user session has its own authenticated SignalR connection
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs)
---
**Security Audit & Hardening (Dec 5, 2025):**
-**JWT Token Validation** - Enabled issuer/audience validation in [Program.cs](DiunaBI.API/Program.cs), fixed config key mismatch in [JwtTokenService.cs](DiunaBI.API/Services/JwtTokenService.cs)
-**API Key Security** - Created [ApiKeyAuthAttribute.cs](DiunaBI.API/Attributes/ApiKeyAuthAttribute.cs) with X-API-Key header auth, constant-time comparison
-**Job Endpoints** - Migrated 3 job scheduling endpoints in [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs) from URL-based to header-based API keys
-**Stack Trace Exposure** - Fixed 20 instances across 3 controllers ([JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [LayersController.cs](DiunaBI.API/Controllers/LayersController.cs), [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)) - now returns generic error messages
-**SignalR Authentication** - Added [Authorize] to [EntityChangeHub.cs](DiunaBI.API/Hubs/EntityChangeHub.cs), configured JWT token in [EntityChangeHubService.cs](DiunaBI.UI.Shared/Services/EntityChangeHubService.cs)
-**Rate Limiting** - Implemented ASP.NET Core rate limiting: 100 req/min general, 10 req/min auth in [Program.cs](DiunaBI.API/Program.cs)
-**Security Headers** - Added XSS, clickjacking, MIME sniffing protection middleware in [Program.cs](DiunaBI.API/Program.cs)
-**Input Validation** - Added pagination limits (1-1000) to GetAll endpoints in 3 controllers
-**User Enumeration** - Fixed generic auth error in [GoogleAuthService.cs](DiunaBI.API/Services/GoogleAuthService.cs)
-**Sensitive Data Logging** - Made conditional on development only in [Program.cs](DiunaBI.API/Program.cs)
-**Base64 Size Limit** - Added 10MB limit to DataInbox in [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)
- Files modified: 12 files (API: Program.cs, 4 controllers, 3 services, 1 hub, 1 new attribute; UI: EntityChangeHubService.cs, ServiceCollectionExtensions.cs)
- Security status: 5/5 CRITICAL fixed, 3/3 HIGH fixed, 4/4 MEDIUM fixed
**Seq Removal - Logging Cleanup (Dec 5, 2025):**
- ✅ Removed Seq logging sink to eliminate commercial licensing concerns
- ✅ Removed `Serilog.Sinks.Seq` NuGet package from DiunaBI.API.csproj
- ✅ Removed Seq sink configuration from appsettings.Development.json
- ✅ Kept Serilog (free, open-source) with Console + File sinks for production-ready logging
- ✅ Build verified - no errors after Seq removal
- Files modified: [DiunaBI.API.csproj](DiunaBI.API/DiunaBI.API.csproj), [appsettings.Development.json](DiunaBI.API/appsettings.Development.json)
- Manual step required: Remove `seq` service from docker-compose.yml and add Docker log rotation config
**UI Reorganization (Dec 5, 2025):**
- ✅ Moved pages to feature-based folders: `Pages/Layers/`, `Pages/Jobs/`, `Pages/DataInbox/`
- ✅ Organized components: `Components/Layout/` (MainLayout, EmptyLayout, Routes), `Components/Auth/` (AuthGuard, LoginCard)
- ✅ Removed obsolete wrapper files (LayerListPage, JobListPage, DataInboxListPage, etc.)
- ✅ Removed duplicate component files (LayerListComponent, JobListComponent, DataInboxListComponent)
- ✅ Standardized code-behind: `.razor.cs` for complex logic, inline `@code` for simple pages
- ✅ Updated `_Imports.razor` with new namespaces: `DiunaBI.UI.Shared.Components.Layout`, `DiunaBI.UI.Shared.Components.Auth`
- ✅ All routes unchanged - backward compatible
---
## PROJECT TYPE & TECH STACK
**Application Type:** Full-stack Business Intelligence (BI) platform with multi-tier architecture, real-time capabilities, and plugin system
**Core Stack:**
- Backend: ASP.NET Core 10.0 Web API
- Frontend: Blazor Server + MAUI Mobile
- Database: SQL Server + EF Core 10.0
- UI: MudBlazor 8.0
- Real-time: SignalR (EntityChangeHub)
- Google: Sheets API, Drive API, OAuth
- Logging: Serilog (Console, File)
- Auth: JWT Bearer + Google OAuth
---
## SOLUTION STRUCTURE (10 Projects)
```
DiunaBI.API (Web API)
├── Controllers: Auth, Layers, Jobs, DataInbox
├── Hubs: EntityChangeHub (SignalR real-time updates)
└── Services: GoogleAuth, JwtToken
DiunaBI.Domain (Entities)
└── User, Layer, Record, RecordHistory, QueueJob, DataInbox, ProcessSource
DiunaBI.Application (DTOs)
└── LayerDto, RecordDto, UserDto, RecordHistoryDto, PagedResult, JobDto
DiunaBI.Infrastructure (Data + Services)
├── Data: AppDbContext, Migrations (47 total)
├── Interceptors: EntityChangeInterceptor (auto-broadcasts DB changes)
├── Services: PluginManager, JobScheduler, JobWorker, GoogleSheets/Drive
├── Plugins: BaseDataImporter, BaseDataProcessor, BaseDataExporter
└── Interfaces: IPlugin, IDataProcessor, IDataImporter, IDataExporter
DiunaBI.UI.Web (Blazor Server)
└── Server-side Blazor web application
DiunaBI.UI.Mobile (MAUI)
└── iOS, Android, Windows, macOS support
DiunaBI.UI.Shared (Blazor Component Library - Reorganized)
├── Pages/
│ ├── Layers/ (Index.razor, Details.razor)
│ ├── Jobs/ (Index.razor, Details.razor)
│ ├── DataInbox/ (Index.razor, Details.razor)
│ ├── Dashboard.razor, Login.razor, Index.razor
├── Components/
│ ├── Layout/ (MainLayout, EmptyLayout, Routes)
│ └── Auth/ (AuthGuard, LoginCard)
└── Services/
├── LayerService, JobService, DataInboxService
├── EntityChangeHubService (SignalR client)
├── FilterStateServices (remember filters)
└── AuthService, TokenProvider
DiunaBI.Plugins.Morska (Feature Plugin)
├── Importers: Standard, D1, D3, FK2 (4 total)
├── Processors: D6, T1, T3, T4, T5 variants (12 total)
└── Exporters: Google Sheets export (1)
DiunaBI.Plugins.PedrolloPL (Feature Plugin - NEW)
└── Importers: B3 (1 total)
DiunaBI.Tests (Testing)
└── Unit and integration tests
```
---
## CORE FUNCTIONALITY
**Purpose:** BI platform for data import, processing, transformation via modular plugin architecture. Multi-layer workflows with audit trails, real-time notifications, scheduled job processing.
**Main Features:**
1. **Layer Management** - 4 types (Import/Processed/Admin/Dictionary), parent-child relationships, soft deletes
2. **Data Records** - 32 numeric columns (Value1-32) + description, hierarchical, full audit trail
3. **Plugin Architecture** - Dynamic assembly loading, base classes in Infrastructure, 3 types (Importers/Processors/Exporters)
4. **Job Queue System** - Background worker with retry logic (30s → 2m → 5m), priority-based, auto-scheduling
5. **External Data** - DataInbox API, Google Sheets read/write, Google Drive integration
6. **Real-time Updates** - SignalR broadcasts entity changes (create/update/delete) to all connected clients
7. **Audit Trail** - RecordHistory tracks all record changes with field-level diffs and JSON summaries
8. **Filter Persistence** - UI filter states saved across sessions (LayerFilterStateService, DataInboxFilterStateService)
---
## KEY ENTITIES
**Layer**
- Id, Number, Name, Type (Import/Processed/Administration/Dictionary)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy (with user relations)
- IsDeleted (soft delete), IsCancelled (processing control), ParentId
- Relations: Records (1-to-many), ProcessSources (1-to-many)
**Record**
- Id, Code (unique identifier), LayerId
- Value1-Value32 (double?), Desc1 (string, max 10000 chars)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy, IsDeleted
- Audit: Full history tracked in RecordHistory table
**RecordHistory** (NEW - Migration 47)
- RecordId, LayerId, ChangedAt, ChangedById
- ChangeType (Created/Updated/Deleted)
- Code, Desc1 (snapshot at time of change)
- ChangedFields (comma-separated field names)
- ChangesSummary (JSON with old/new values)
- Indexes: (RecordId, ChangedAt), (LayerId, ChangedAt) for performance
**QueueJob**
- LayerId, LayerName, PluginName
- JobType (Import/Process)
- Priority (0 = highest), Status (Pending/Running/Completed/Failed/Retrying)
- RetryCount, MaxRetries (default 5)
- CreatedAt, LastAttemptAt, CompletedAt
- LastError (detailed error message)
**DataInbox**
- Id, Name, Source (identifiers)
- Data (base64-encoded JSON array)
- CreatedAt
- Used by importers to stage incoming data
**User**
- Id (Guid), Email, UserName
- CreatedAt, LastLoginAt
- Google OAuth identity
**ProcessSource**
- Id, SourceLayerId, TargetLayerId
- Defines layer processing relationships
---
## API ENDPOINTS
**Base:** `/` (ApiController routes)
### AuthController (/auth)
- `POST /auth/apiToken` - Exchange Google ID token for JWT (AllowAnonymous)
- `POST /auth/refresh` - Refresh expired JWT token
### LayersController (/layers)
- `GET /layers?page=1&pageSize=10&search=&type=` - List layers (paged, filterable)
- `GET /layers/{id}` - Get layer details with records
- `POST /layers` - Create new layer
- `PUT /layers/{id}` - Update layer
- `DELETE /layers/{id}` - Soft delete layer
- `POST /layers/{id}/records` - Add/update records
- `PUT /layers/{layerId}/records/{recordId}` - Update specific record
- `DELETE /layers/{layerId}/records/{recordId}` - Delete record
- `GET /layers/{layerId}/records/{recordId}/history` - Get record history
- `GET /layers/{layerId}/deleted-records` - Get deleted records with history
### JobsController (/jobs) - NEW
- `GET /jobs?page=1&pageSize=50&status=&jobType=` - List jobs (paged, filterable)
- `GET /jobs/{id}` - Get job details
- `GET /jobs/stats` - Get job statistics (counts by status)
- `POST /jobs/schedule/{apiKey}` - Schedule all jobs from layer configs
- `POST /jobs/schedule/imports/{apiKey}` - Schedule import jobs only
- `POST /jobs/schedule/processes/{apiKey}` - Schedule process jobs only
- `POST /jobs/create-for-layer/{layerId}` - Create job for specific layer (manual trigger)
- `POST /jobs/{id}/retry` - Retry failed job (resets to Pending)
- `DELETE /jobs/{id}` - Cancel pending/retrying job
### DataInboxController (/datainbox)
- `GET /datainbox?page=1&pageSize=10&search=` - List inbox items (paged, filterable)
- `GET /datainbox/{id}` - Get inbox item with decoded data
- `POST /datainbox` - Create inbox item
- `PUT /datainbox/Add/{apiKey}` - Add data (API key + Basic Auth)
- `DELETE /datainbox/{id}` - Delete inbox item
### SignalR Hub
- `/hubs/entitychanges` - SignalR hub for real-time entity change notifications
- Event: `EntityChanged(module, id, operation)` - broadcasts to all clients
- Modules: QueueJobs, Layers, Records, RecordHistory
---
## AUTHENTICATION & SECURITY
**Flow:**
1. Client exchanges Google ID token → `/auth/apiToken`
2. GoogleAuthService validates token with Google, maps to internal User
3. Returns JWT (7-day expiration, HS256 signing)
4. JWT required on all protected endpoints (except /auth/apiToken, /health)
5. UserId extraction middleware sets X-UserId header for audit trails
**Security:**
- Google OAuth 2.0 for identity verification
- JWT Bearer tokens for API access
- API key + Basic Auth for DataInbox external endpoints
- CORS configured for:
- http://localhost:4200
- https://diuna.bim-it.pl
- https://morska.diunabi.com
---
## KEY SERVICES
### Infrastructure Services
**PluginManager**
- Location: `DiunaBI.Infrastructure/Services/PluginManager.cs`
- Loads plugin assemblies from `bin/Plugins/` directory at startup
- Registers IDataProcessor, IDataImporter, IDataExporter implementations
- Provides plugin discovery and execution
**JobSchedulerService**
- Location: `DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Creates QueueJob entries from Administration layer configs
- Reads layer.Records with Code="Plugin", Code="Priority", Code="MaxRetries"
- Methods: ScheduleImportJobsAsync, ScheduleProcessJobsAsync, ScheduleAllJobsAsync
**JobWorkerService** (BackgroundService)
- Location: `DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- Polls QueueJobs table every 10 seconds
- Executes jobs via PluginManager (Import/Process)
- Retry logic with exponential backoff: 30s → 2m → 5m delays
- Rate limiting: 5-second delay after imports (Google Sheets API quota)
- Updates job status in real-time (triggers SignalR broadcasts)
**EntityChangeInterceptor**
- Location: `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Captures entity changes: Added, Modified, Deleted
- Broadcasts changes via SignalR EntityChangeHub after successful save
- Uses reflection to avoid circular dependencies with IHubContext
**GoogleSheetsHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleSheetsHelper.cs`
- Google Sheets API v4 integration
- Methods: ReadRange, WriteRange, CreateSpreadsheet, UpdateSpreadsheet
**GoogleDriveHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleDriveHelper.cs`
- Google Drive API v3 integration
- Methods: UploadFile, ListFiles, MoveFile
**GoogleAuthService / JwtTokenService**
- Authentication and token management
- JWT generation and validation
### UI Services
**EntityChangeHubService**
- Location: `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton service for SignalR client connection
- Auto-reconnect enabled
- Event: `EntityChanged` - UI components subscribe for real-time updates
- Initialized in MainLayout.OnInitializedAsync
**LayerService / JobService / DataInboxService**
- HTTP clients for API communication
- DTOs serialization/deserialization
- Paged result handling
**LayerFilterStateService / DataInboxFilterStateService**
- Persist filter state across navigation
- Singleton services remember search, type, page selections
---
## DATABASE SCHEMA
**Total Migrations:** 47
**Latest Migrations:**
**Migration 47: RecordHistory (Dec 1, 2025)**
- **NEW Table: RecordHistory**
- Tracks all record changes (Created, Updated, Deleted)
- Fields: Id, RecordId, LayerId, ChangedAt, ChangedById, ChangeType, Code, Desc1, ChangedFields, ChangesSummary
- Indexes: IX_RecordHistory_RecordId_ChangedAt, IX_RecordHistory_LayerId_ChangedAt
- Foreign key: RecordHistory.ChangedById → Users.Id
**Migration 46: FixLayerDefaultValues (Nov 20, 2025)**
- Set default value: Layers.IsDeleted = false
**Migration 45: UpdateModel (Nov 19, 2025)**
- Added GETUTCDATE() defaults for all timestamp fields
- Changed foreign key constraints from CASCADE to RESTRICT:
- Layers → Users (CreatedById, ModifiedById)
- Records → Users (CreatedById, ModifiedById)
- Added FK_ProcessSources_Layers_LayerId
**Core Tables:**
- Users (authentication, audit)
- Layers (4 types, soft deletes, parent-child)
- Records (32 Value fields + Desc1, audit, soft deletes)
- RecordHistory (change tracking, field diffs, JSON summaries)
- QueueJobs (job queue, retry logic, status tracking)
- DataInbox (incoming data staging, base64 encoded)
- ProcessSources (layer relationships)
---
## PLUGIN SYSTEM
### Base Classes (Infrastructure/Plugins/)
**BaseDataImporter** (`DiunaBI.Infrastructure/Plugins/BaseDataImporter.cs`)
- Abstract base for all importers
- Methods: ImportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager, GoogleSheetsHelper, GoogleDriveHelper
**BaseDataProcessor** (`DiunaBI.Infrastructure/Plugins/BaseDataProcessor.cs`)
- Abstract base for all processors
- Methods: ProcessAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager
**BaseDataExporter** (`DiunaBI.Infrastructure/Plugins/BaseDataExporter.cs`)
- Abstract base for all exporters
- Methods: ExportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, GoogleSheetsHelper, GoogleDriveHelper
### Morska Plugin (DiunaBI.Plugins.Morska)
**Importers (4):**
- MorskaStandardImporter - Generic CSV/Excel import
- MorskaD1Importer - D1 data format
- MorskaD3Importer - D3 data format
- MorskaFK2Importer - FK2 data format
**Processors (12):**
- MorskaD6Processor
- MorskaT1R1Processor
- MorskaT1R3Processor
- MorskaT3SingleSourceProcessor
- MorskaT3SourceYearSummaryProcessor
- MorskaT3MultiSourceSummaryProcessor
- MorskaT3MultiSourceYearSummaryProcessor
- MorskaT4R2Processor
- MorskaT4SingleSourceProcessor
- MorskaT5LastValuesProcessor
- MorskaT3MultiSourceCopySelectedCodesProcessor-TO_REMOVE (deprecated)
- MorskaT3MultiSourceCopySelectedCodesYearSummaryProcessor-TO_REMOVE (deprecated)
**Exporters (1):**
- googleSheet.export.cs - Google Sheets export
**Total:** ~6,566 lines of code
### PedrolloPL Plugin (DiunaBI.Plugins.PedrolloPL) - NEW
**Importers (1):**
- **PedrolloPLImportB3** (`DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportB3.cs`)
- Imports B3 data from DataInbox
- Uses L1-D-B3-CODES dictionary layer for region code mapping
- Creates 12 monthly records per region (Value1-Value12)
- Generates Import layers: L{Number}-I-B3-{Year}-{Timestamp}
- Handles base64 JSON data decoding
---
## UI STRUCTURE (DiunaBI.UI.Shared)
### Reorganized Structure (Dec 5, 2025)
**Pages/** (Routable pages with @page directive)
```
Pages/
├── Layers/
│ ├── Index.razor + Index.razor.cs - /layers (list with filters, pagination)
│ └── Details.razor + Details.razor.cs - /layers/{id} (detail, edit, history)
├── Jobs/
│ ├── Index.razor + Index.razor.cs - /jobs (list with filters, real-time updates)
│ └── Details.razor - /jobs/{id} (detail, retry, cancel, real-time)
├── DataInbox/
│ ├── Index.razor + Index.razor.cs - /datainbox (list with filters)
│ └── Details.razor + Details.razor.cs - /datainbox/{id} (detail, base64 decode)
├── Dashboard.razor - /dashboard (user info)
├── Login.razor - /login (Google OAuth)
└── Index.razor - / (redirects to /dashboard)
```
**Components/** (Reusable components, no routes)
```
Components/
├── Layout/
│ ├── MainLayout.razor - Main app layout with drawer, nav menu
│ ├── EmptyLayout.razor - Minimal layout for login page
│ └── Routes.razor - Router configuration
└── Auth/
├── AuthGuard.razor - Authentication guard wrapper
└── LoginCard.razor - Google login button component
```
**Navigation Menu:**
- Dashboard (/dashboard) - User profile
- Layers (/layers) - Layer management
- Data Inbox (/datainbox) - Incoming data review
- Jobs (/jobs) - Job queue monitoring (with real-time status updates)
**Code-Behind Pattern:**
- Complex pages (50+ lines logic): Separate `.razor.cs` files
- Simple pages: Inline `@code` blocks
- Namespaces: `DiunaBI.UI.Shared.Pages.{Feature}`
---
## REAL-TIME FEATURES (SignalR)
### Architecture
**Hub:** `DiunaBI.API/Hubs/EntityChangeHub.cs`
- Endpoint: `/hubs/entitychanges`
- Method: `SendEntityChange(string module, string id, string operation)`
- Broadcasts: `EntityChanged` event to all connected clients
**Interceptor:** `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Detects: Added, Modified, Deleted entities
- Broadcasts: After successful SaveChanges
- Modules: QueueJobs, Layers, Records, RecordHistory
**UI Service:** `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton initialized in MainLayout
- Auto-reconnect enabled
- Components subscribe: `HubService.EntityChanged += OnEntityChanged`
### Real-time Update Flow
1. User action → API endpoint
2. DbContext.SaveChangesAsync()
3. EntityChangeInterceptor captures changes
4. SignalR broadcast to all clients: `EntityChanged(module, id, operation)`
5. UI components receive event and refresh data
6. StateHasChanged() updates UI
**Example:** Job status changes appear instantly on JobDetailPage and JobListPage
---
## JOB QUEUE SYSTEM
### Components
**Entity:** `QueueJob` (DiunaBI.Domain/Entities/QueueJob.cs)
- JobType: Import, Process
- JobStatus: Pending, Running, Completed, Failed, Retrying
- Priority: 0 = highest priority
- Retry: 30s → 2m → 5m delays, max 5 attempts
**Scheduler:** `JobSchedulerService`
- Reads Administration layer configs (Type=ImportWorker/ProcessWorker)
- Auto-creates jobs based on layer.Records configuration
- API endpoints: `/jobs/schedule/{apiKey}`, `/jobs/schedule/imports/{apiKey}`, `/jobs/schedule/processes/{apiKey}`
**Worker:** `JobWorkerService` (BackgroundService)
- Polls every 10 seconds
- Executes via PluginManager
- Exponential backoff on failures
- Rate limiting for Google API quota
- Real-time status updates via SignalR
**UI:** `Pages/Jobs/`
- Index.razor - Job list with filters, real-time updates
- Details.razor - Job detail with retry/cancel, real-time status
### Job Lifecycle
1. **Creation** - JobSchedulerService or manual via API
2. **Queued** - Status: Pending, sorted by Priority
3. **Execution** - JobWorkerService picks up, Status: Running
4. **Completion** - Status: Completed or Failed
5. **Retry** - On failure, Status: Retrying with exponential backoff
6. **Real-time** - All status changes broadcast via SignalR
**Statistics Endpoint:** `GET /jobs/stats`
```json
{
"pending": 5,
"running": 2,
"completed": 150,
"failed": 3,
"retrying": 1,
"total": 161
}
```
---
## RECENT DEVELOPMENT
**Recent Commits (Dec 2-5, 2025):**
- **193127b:** SignalR for realtime entitychanges (Dec 4)
- **bf2beda, 942da18:** Build fixes (Dec 4)
- **a3fa8f9:** B3 import is working (Dec 4)
- **0e3b393:** WIP: b3 plugin (Dec 3)
- **445c07a:** Morska plugins refactor (Dec 2)
- **3f8e62f:** WIP: queue engine (Dec 2)
- **248106a:** Plugins little refactor (Dec 2)
- **587d4d6:** Pedrollo plugins (Dec 2)
- **e70a8dd:** Remember list filters (Dec 2)
- **89859cd:** Record history is working (Dec 1)
**Development Focus (Last 30 Days):**
1. ✅ Real-time updates (SignalR integration)
2. ✅ Job queue system (background worker, retry logic)
3. ✅ PedrolloPL plugin (B3 importer)
4. ✅ Record history tracking (audit trail)
5. ✅ UI reorganization (feature-based folders)
6. ✅ Plugin refactoring (base classes in Infrastructure)
7. ✅ Filter persistence (UI state management)
**Major Features Added:**
- SignalR real-time entity change notifications
- Background job processing with retry logic
- Record history with field-level diffs
- PedrolloPL B3 data importer
- UI reorganization (Pages/Layers, Pages/Jobs, Pages/DataInbox)
- Filter state persistence across sessions
---
## CONFIGURATION
**Key Settings (appsettings.Development.json):**
- ConnectionStrings:SQLDatabase - SQL Server (localhost:21433, DB: DiunaBI-PedrolloPL)
- JwtSettings:SecurityKey, ExpiryDays (7)
- GoogleAuth:ClientId, RedirectUri
- apiKey, apiUser, apiPass - DataInbox API security
- exportDirectory - Google Drive folder ID for exports
- apiLocalUrl - localhost:5400
- InstanceName - DEV/PROD environment identifier
**Logging Configuration:**
```json
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore.Database.Command": "Warning",
"Microsoft.EntityFrameworkCore.Infrastructure": "Warning",
"System.Net.Http.HttpClient": "Warning",
"Google.Apis": "Warning",
"DiunaBI.Core.Services.PluginManager": "Information"
}
}
}
```
**CORS Origins:**
- http://localhost:4200 (development)
- https://diuna.bim-it.pl (production)
- https://morska.diunabi.com (production)
---
## PATTERNS & ARCHITECTURE
**Design Patterns:**
- Clean Architecture (Domain → Application → Infrastructure → API)
- Plugin Pattern (dynamic loading, base classes, interface contracts)
- Interceptor Pattern (EF Core SaveChangesInterceptor for change tracking)
- Hub Pattern (SignalR for real-time notifications)
- Service Pattern (dependency injection throughout)
- Repository Pattern (EF Core DbContext as repository)
- Background Service Pattern (JobWorkerService for async processing)
**Tech Versions:**
- .NET 10.0 (upgraded from .NET 8.0)
- EF Core 10.0
- C# 13.0
- Blazor Server (net10.0)
- MAUI (net10.0-ios/android/windows/macos)
- MudBlazor 8.0
**Architectural Decisions:**
- Plugin base classes in Infrastructure for reusability
- SignalR for real-time updates (no polling)
- Background service for job processing (no external scheduler)
- Soft deletes with audit trails
- Foreign key RESTRICT to prevent accidental cascades
- Feature-based folder structure in UI
---
## QUICK REFERENCE
**Database:**
- SQL Server with 47 EF Core migrations
- Auto-timestamps via GETUTCDATE() defaults
- Soft deletes (IsDeleted flag)
- Audit trails (CreatedBy, ModifiedBy, RecordHistory table)
**Build Process:**
- MSBuild target copies plugin DLLs to `bin/Plugins/` after build
- Plugins: DiunaBI.Plugins.Morska.dll, DiunaBI.Plugins.PedrolloPL.dll
**SignalR:**
- Hub: `/hubs/entitychanges`
- Broadcasts: `EntityChanged(module, id, operation)`
- Auto-reconnect enabled in UI
- Real-time updates for QueueJobs, Layers, Records
**Job Queue:**
- Auto-scheduling from layer configs (Type=ImportWorker/ProcessWorker)
- Background processing every 10 seconds
- Retry logic: 30s → 2m → 5m (max 5 retries)
- Priority-based execution (0 = highest)
- Real-time status updates via SignalR
**Plugins:**
- **Morska:** 4 importers, 12 processors, 1 exporter (~6,566 LOC)
- **PedrolloPL:** 1 importer (B3 data)
- Base classes: BaseDataImporter, BaseDataProcessor, BaseDataExporter
- Dynamic loading from `bin/Plugins/` at startup
**UI Structure:**
- Feature-based folders: Pages/Layers, Pages/Jobs, Pages/DataInbox
- Separate code-behind for complex logic (.razor.cs files)
- Inline @code for simple pages
- Organized components: Layout/, Auth/
- Filter state persistence across navigation
---
## FILE PATHS REFERENCE
**Key Configuration:**
- API: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/appsettings.json`
- API Startup: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Program.cs`
**SignalR:**
- Hub: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Hubs/EntityChangeHub.cs`
- Interceptor: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- UI Service: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
**Job System:**
- Controller: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Controllers/JobsController.cs`
- Scheduler: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Worker: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- UI Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/Jobs/`
**Plugins:**
- Base Classes: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Plugins/`
- Morska: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.Morska/`
- PedrolloPL: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.PedrolloPL/`
**Migrations:**
- Latest: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Migrations/20251201165810_RecordHistory.cs`
**UI Components:**
- Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/`
- Components: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Components/`
- Services: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/`

View File

@@ -1,119 +1,149 @@
name: BuildApp name: Build Docker Images
on: on:
workflow_dispatch: {}
push: push:
branches: branches:
- main - main
workflow_dispatch: {}
concurrency: concurrency:
group: build-${{ github.ref }} group: build-${{ github.ref }}
cancel-in-progress: false cancel-in-progress: false
jobs: jobs:
build-frontend: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
customer:
- name: Morska
plugin_project: DiunaBI.Plugins.Morska
- name: PedrolloPL
plugin_project: DiunaBI.Plugins.PedrolloPL
steps: steps:
- name: Checkout - name: Checkout
uses: https://github.com/actions/checkout@v4 uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20 - name: Setup .NET 10
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install Angular CLI
run: npm install -g @angular/cli
- name: Install PNPM
run: npm install -g pnpm
- name: Install dependencies
working-directory: src/Frontend
run: pnpm install
- name: Build Angular
working-directory: src/Frontend
run: ng build --configuration=production
- name: Upload artifact (frontend)
uses: https://github.com/actions/upload-artifact@v3
with:
name: frontend
path: src/Frontend/dist
if-no-files-found: error
retention-days: 7
build-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 8
uses: https://github.com/actions/setup-dotnet@v4 uses: https://github.com/actions/setup-dotnet@v4
with: with:
dotnet-version: 8.0.x dotnet-version: 10.0.x
- name: Restore dependencies - name: Restore dependencies
working-directory: src/Backend working-directory: .
run: dotnet restore DiunaBI.sln run: |
dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins - name: Build solution and prepare plugins
working-directory: src/Backend working-directory: .
run: | run: |
set -e set -e
dotnet build DiunaBI.sln --configuration Release # Build plugin first to avoid missing dependency issues
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release --no-restore
mkdir -p DiunaBI.Tests/bin/Release/net8.0/Plugins # Skip automatic plugin copy in API build since we only have one plugin restored
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/ dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release --no-restore -p:SkipPluginCopy=true
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Core.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/ dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release --no-restore
ls -la DiunaBI.Tests/bin/Release/net8.0/Plugins/
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp ${{ matrix.customer.plugin_project }}/bin/Release/net10.0/${{ matrix.customer.plugin_project }}.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests - name: Run Tests
working-directory: src/Backend working-directory: .
run: | run: |
dotnet add DiunaBI.Tests/DiunaBI.Tests.csproj package coverlet.collector
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \ dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \ --configuration Release \
--no-restore \ --no-restore \
--logger "trx;LogFileName=test-results.trx" \ --logger "trx;LogFileName=test-results-${{ matrix.customer.name }}.trx" \
--collect:"XPlat Code Coverage" \ --collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" --filter "Category!=LocalOnly" || true
- name: Publish Test Results - name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/upload-artifact@v3
if: success() || failure() if: success() || failure()
with: with:
name: test-results name: test-results-${{ matrix.customer.name }}
path: | path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7 retention-days: 7
- name: Publish WebAPI build-and-push:
if: success() runs-on: ubuntu-latest
working-directory: src/Backend needs: test
run: | if: success() || failure()
dotnet publish DiunaBI.WebAPI/DiunaBI.WebAPI.csproj \ strategy:
--configuration Release \ matrix:
--framework net8.0 \ customer:
--self-contained false \ - name: Morska
--output ../../build/webapi plugin_project: DiunaBI.Plugins.Morska
mkdir -p ../../build/webapi/Plugins image_suffix: morska
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll ../../build/webapi/Plugins/ - name: PedrolloPL
ls -la ../../build/webapi/Plugins/ plugin_project: DiunaBI.Plugins.PedrolloPL
image_suffix: pedrollopl
- name: Clean up sensitive files steps:
working-directory: build/webapi - name: Debug secrets
run: | run: |
rm -f appsettings.Development.json || true echo "User length: ${#REGISTRY_USER}"
rm -f client_secrets.Development.json || true echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Upload artifact (webapi) - name: Checkout code
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/checkout@v4
with:
name: webapi - name: Set up Docker Buildx
path: build/webapi uses: https://github.com/docker/setup-buildx-action@v3
if-no-files-found: error
retention-days: 7 - name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
--build-arg PLUGIN_PROJECT=${{ matrix.customer.plugin_project }} \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built - ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "**Customer:** ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,130 +0,0 @@
name: Build Docker Images
on:
push:
branches:
- ddd-refactor
workflow_dispatch: {}
concurrency:
group: build-${{ github.ref }}
cancel-in-progress: false
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 10
uses: https://github.com/actions/setup-dotnet@v4
with:
dotnet-version: 10.0.x
- name: Restore dependencies
working-directory: src/Backend
run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins
working-directory: src/Backend
run: |
set -e
# Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests
working-directory: src/Backend
run: |
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \
--no-restore \
--logger "trx;LogFileName=test-results.trx" \
--collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" || true
- name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3
if: success() || failure()
with:
name: test-results
path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7
build-and-push:
runs-on: ubuntu-latest
needs: test
if: success() || failure()
steps:
- name: Debug secrets
run: |
echo "User length: ${#REGISTRY_USER}"
echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Checkout code
uses: https://github.com/actions/checkout@v4
- name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3
- name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api:latest \
-t code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui:latest \
-t code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,122 +0,0 @@
name: ReleaseApp (JS finder + download)
on:
workflow_dispatch: {}
jobs:
release:
runs-on: ubuntu-latest
env:
GITEA_BASE_URL: https://code.bim-it.pl
OWNER: mz
REPO: DiunaBI
REQUIRED_ARTIFACTS: frontend,webapi
SCAN_LIMIT: "100"
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install unzip (for extraction)
run: |
sudo apt-get update
sudo apt-get install -y unzip
- name: Resolve latest run that exposes required artifacts
id: resolve
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
run: |
node .gitea/scripts/getLatestRunWithArtifacts.js
echo "Resolved run_id: $(cat .gitea/.cache/run_id)"
echo "run_id=$(cat .gitea/.cache/run_id)" >> "$GITHUB_OUTPUT"
- name: Download frontend artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: frontend
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/frontend
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Download webapi artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: webapi
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/webapi
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Show artifact structure
run: |
echo "::group::frontend"
ls -laR artifacts/frontend || true
echo "::endgroup::"
echo "::group::webapi"
ls -laR artifacts/webapi || true
echo "::endgroup::"
- name: Tokenize (replace #{...}# from secrets/vars)
env:
SECRETS: ${{ toJson(secrets) }}
VARIABLES: ${{ toJson(vars) }}
RUN_ID: ${{ steps.resolve.outputs.run_id }}
run: |
set -euo pipefail
node .gitea/scripts/replaceTokens.js
- name: Package artifacts as ZIPs
run: |
mkdir -p build
(cd artifacts/frontend && zip -rq ../../build/DiunaBI-Morska-Frontend.zip .)
(cd artifacts/webapi && zip -rq ../../build/DiunaBI-Morska-WebApi.zip .)
ls -la build
- name: Upload artifacts to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
REMOTE_DIR: deployment
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "mkdir -p ~/$REMOTE_DIR"
scp -i private_key build/DiunaBI-Morska-Frontend.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
scp -i private_key build/DiunaBI-Morska-WebApi.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
shred -u private_key
- name: Run release script on remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "./deployment/DiunaBI-Morska.Release.sh"
shred -u private_key

9
.gitignore vendored
View File

@@ -562,4 +562,11 @@ coverage/
## Temporary folders ## Temporary folders
## ##
tmp/ tmp/
temp/ temp/
##
## LocalDB Development Files
##
DevTools/LocalDB/backups/*.bak
DevTools/LocalDB/backups/*.bacpac
DevTools/LocalDB/data/

View File

@@ -30,17 +30,6 @@
}, },
"env": { "env": {
"ASPNETCORE_ENVIRONMENT": "Development" "ASPNETCORE_ENVIRONMENT": "Development"
},
"launchBrowser": {
"enabled": true,
"args": "${auto-detect-url}",
"browser": [
{
"osx": "Google Chrome",
"linux": "chrome",
"windows": "chrome"
}
]
} }
} }
] ]

View File

@@ -0,0 +1 @@
POST http://localhost:5400/jobs/schedule/10763478CB738D4ecb2h76g803478CB738D4e

View File

@@ -0,0 +1,99 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2024;
DECLARE @Type NVARCHAR(5) = 'B3';
DECLARE @StartDate NVARCHAR(10) = '2025.01.02';
DECLARE @EndDate NVARCHAR(10) = '2026.12.31'
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-IW_', @Type, '-', @Year,'-', @CurrentTimestamp
);
DECLARE @Plugin NVARCHAR(100);
SET @Plugin =
CASE @Type
WHEN 'B3' THEN 'PedrolloPL.Import.B3'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxName NVARCHAR(100);
SET @DataInboxName =
CASE @Type
WHEN 'B3' THEN 'B3_2024'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxSource NVARCHAR(100);
SET @DataInboxSource =
CASE @Type
WHEN 'B3' THEN 'Comarch'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @StartDate AS StartDate, @EndDate AS EndDate, @Type AS Type, @Year AS Year, @Plugin AS Plugin,
@DataInboxName AS DataInboxName, @DataInboxSource AS DataInboxSource;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'StartDate', @StartDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'EndDate', @EndDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'DataInbox', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportName', @Type, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportYear', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ImportWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', @Plugin, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxName', @DataInboxName, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxSource', @DataInboxSource, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '10', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'MaxRetries', '3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,58 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2024;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-PW_P2-', @Year, '-', @CurrentTimestamp
);
DECLARE @SourceNameFilter NVARCHAR(50) = CONCAT('%-A-IW_B3', '-', @Year, '-%');
DECLARE @SourceLayer NVARCHAR(50) = (SELECT TOP 1 [Name] FROM [DiunaBI-PedrolloPL].[dbo].[Layers] WHERE [Name] LIKE @SourceNameFilter);
IF @SourceLayer IS NULL
BEGIN
SELECT 'SourceLayer is NULL' AS Logger;
RETURN;
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @SourceLayer AS SourceLayer;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'B3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'SourceLayer', @SourceLayer, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ProcessWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Year', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', 'PedrolloPL.Process.P2', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '110', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -2,7 +2,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D3'; DECLARE @Type NVARCHAR(3) = 'D3';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('D3') IF @Type NOT IN ('D3')
@@ -14,7 +14,7 @@ END;
DECLARE @ImportType NVARCHAR(20) = 'Import-D3'; DECLARE @ImportType NVARCHAR(20) = 'Import-D3';
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,9 +2,9 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D1'; DECLARE @Type NVARCHAR(3) = 'D1';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @MonthName NVARCHAR(20) = 'Pazdziernik_2025'; DECLARE @MonthName NVARCHAR(20) = 'Grudzien_2025';
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2')
BEGIN BEGIN
@@ -27,7 +27,7 @@ SET @ImportType =
ELSE 'Standard' ELSE 'Standard'
END; END;
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK'; DECLARE @Type NVARCHAR(3) = 'FK';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK')

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK2'; DECLARE @Type NVARCHAR(3) = 'FK2';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,9 +2,9 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @Name NVARCHAR(50) = CONCAT( DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-D-D6-SELL-CODES' 'L', @Number, 'D-P2-CODES'
); );
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID(); DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
@@ -16,7 +16,7 @@ BEGIN
RETURN; RETURN;
END; END;
INSERT INTO [diunabi-morska].[dbo].[Layers] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type]) ([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3); VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3);
@@ -27,16 +27,23 @@ DECLARE @Array TABLE (
INSERT INTO @Array (Code, Desc1) INSERT INTO @Array (Code, Desc1)
VALUES VALUES
('1002', '1102'), ('01','<nieznany>'),
('1003','1202'), ('02','DOLNOŚLĄSKIE'),
('1008','1302'), ('03','KUJAWSKO-POMORSKIE'),
('1009','1302'), ('04','LUBELSKIE'),
('9085','1203'), ('05','LUBUSKIE'),
('1010','1304'), ('06','ŁÓDZKIE'),
('9086','1005'), ('07','MAŁOPOLSKIE'),
('1021','1206'), ('08','MAZOWIECKIE'),
('9089','1207'), ('09','OPOLSKIE'),
('9091','1208') ('10','PODKARPACKIE'),
('11','PODLASKIE'),
('12','POMORSKIE'),
('13','ŚLĄSKIE'),
('14','ŚWIĘTOKRZYSKIE'),
('15','WARMIŃSKO-MAZURSKIE'),
('16','WIELKOPOLSKIE'),
('17','ZACHODNIOPOMORSKIE');
-- Loop through the array and insert into the target table -- Loop through the array and insert into the target table
DECLARE @Code NVARCHAR(50); DECLARE @Code NVARCHAR(50);
@@ -51,7 +58,7 @@ FETCH NEXT FROM CursorArray INTO @Code, @Desc1;
WHILE @@FETCH_STATUS = 0 WHILE @@FETCH_STATUS = 0
BEGIN BEGIN
INSERT INTO [diunabi-morska].[dbo].[Records] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) ([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,63 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
namespace DiunaBI.API.Attributes;
/// <summary>
/// Authorization attribute that validates API key from X-API-Key header.
/// Uses constant-time comparison to prevent timing attacks.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public class ApiKeyAuthAttribute : Attribute, IAuthorizationFilter
{
private const string ApiKeyHeaderName = "X-API-Key";
public void OnAuthorization(AuthorizationFilterContext context)
{
var configuration = context.HttpContext.RequestServices.GetRequiredService<IConfiguration>();
var logger = context.HttpContext.RequestServices.GetRequiredService<ILogger<ApiKeyAuthAttribute>>();
// Get expected API key from configuration
var expectedApiKey = configuration["apiKey"];
if (string.IsNullOrEmpty(expectedApiKey))
{
logger.LogError("API key not configured in appsettings");
context.Result = new StatusCodeResult(StatusCodes.Status500InternalServerError);
return;
}
// Get API key from header
if (!context.HttpContext.Request.Headers.TryGetValue(ApiKeyHeaderName, out var extractedApiKey))
{
logger.LogWarning("API key missing from request header");
context.Result = new UnauthorizedObjectResult(new { error = "API key is required" });
return;
}
// Constant-time comparison to prevent timing attacks
if (!IsApiKeyValid(extractedApiKey!, expectedApiKey))
{
logger.LogWarning("Invalid API key provided from {RemoteIp}", context.HttpContext.Connection.RemoteIpAddress);
context.Result = new UnauthorizedObjectResult(new { error = "Invalid API key" });
return;
}
// API key is valid - allow the request to proceed
}
/// <summary>
/// Constant-time string comparison to prevent timing attacks.
/// </summary>
private static bool IsApiKeyValid(string providedKey, string expectedKey)
{
if (providedKey == null || expectedKey == null)
return false;
var providedBytes = Encoding.UTF8.GetBytes(providedKey);
var expectedBytes = Encoding.UTF8.GetBytes(expectedKey);
return CryptographicOperations.FixedTimeEquals(providedBytes, expectedBytes);
}
}

View File

@@ -1,9 +1,12 @@
using DiunaBI.API.Services; using DiunaBI.API.Services;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
namespace DiunaBI.API.Controllers; namespace DiunaBI.API.Controllers;
[AllowAnonymous]
[ApiController] [ApiController]
[Route("[controller]")] [Route("[controller]")]
public class AuthController( public class AuthController(
@@ -13,6 +16,7 @@ public class AuthController(
: ControllerBase : ControllerBase
{ {
[HttpPost("apiToken")] [HttpPost("apiToken")]
[EnableRateLimiting("auth")]
public async Task<IActionResult> ApiToken([FromBody] string idToken) public async Task<IActionResult> ApiToken([FromBody] string idToken)
{ {
try try

View File

@@ -0,0 +1,222 @@
using System.Text;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Domain.Entities;
using DiunaBI.Application.DTOModels;
using DiunaBI.Application.DTOModels.Common;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class DataInboxController : Controller
{
private readonly AppDbContext _db;
private readonly IConfiguration _configuration;
private readonly ILogger<DataInboxController> _logger;
public DataInboxController(
AppDbContext db,
IConfiguration configuration,
ILogger<DataInboxController> logger)
{
_db = db;
_configuration = configuration;
_logger = logger;
}
[HttpPut]
[Route("Add/{apiKey}")]
[AllowAnonymous]
public IActionResult Add(string apiKey, [FromBody] DataInbox dataInbox)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("DataInbox: Unauthorized request - wrong apiKey for source {Source}", dataInbox.Source);
return Unauthorized();
}
try
{
if (!Request.Headers.TryGetValue("Authorization", out var authHeader))
{
_logger.LogWarning("DataInbox: Unauthorized request - no authorization header for source {Source}", dataInbox.Source);
return Unauthorized();
}
var credentialsArr = authHeader.ToString().Split(" ");
if (credentialsArr.Length != 2)
{
_logger.LogWarning("DataInbox: Unauthorized request - wrong auth header format for source {Source}", dataInbox.Source);
return Unauthorized();
}
var authValue = Encoding.UTF8.GetString(Convert.FromBase64String(credentialsArr[1]));
var username = authValue.Split(':')[0];
var password = authValue.Split(':')[1];
if (username != _configuration["apiUser"] || password != _configuration["apiPass"])
{
_logger.LogWarning("DataInbox: Unauthorized request - bad credentials for source {Source}", dataInbox.Source);
return Unauthorized();
}
// check if datainbox.data is base64 encoded value
if (!string.IsNullOrEmpty(dataInbox.Data))
{
// Limit data size to 10MB to prevent DoS
if (dataInbox.Data.Length > 10_000_000)
{
_logger.LogWarning("DataInbox: Data too large for source {Source}, size {Size}", dataInbox.Source, dataInbox.Data.Length);
return BadRequest("Data too large (max 10MB)");
}
if (!IsBase64String(dataInbox.Data))
{
_logger.LogWarning("DataInbox: Invalid data format - not base64 encoded for source {Source}", dataInbox.Source);
return BadRequest("Invalid data format - not base64 encoded");
}
}
dataInbox.Id = Guid.NewGuid();
dataInbox.CreatedAt = DateTime.UtcNow;
_db.DataInbox.Add(dataInbox);
_db.SaveChanges();
_logger.LogInformation("DataInbox: Insert success for source {Source}, name {Name}", dataInbox.Source, dataInbox.Name);
if (dataInbox.Name == "morska.d3.importer")
{
_logger.LogDebug("DataInbox: Detected morska.d3.importer - processing will be handled by AutoImport");
}
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DataInbox: Insert error for source {Source}, name {Name}", dataInbox.Source, dataInbox.Name);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("GetAll")]
public IActionResult GetAll([FromQuery] int start, [FromQuery] int limit, [FromQuery] string? search)
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.DataInbox.AsQueryable();
if (!string.IsNullOrEmpty(search))
{
query = query.Where(x => x.Name.Contains(search) || x.Source.Contains(search));
}
var totalCount = query.Count();
var items = query
.OrderByDescending(x => x.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.Select(x => new DataInboxDto
{
Id = x.Id,
Name = x.Name,
Source = x.Source,
Data = x.Data,
CreatedAt = x.CreatedAt
})
.ToList();
var pagedResult = new PagedResult<DataInboxDto>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} data inbox items (page {Page}) with filter search={Search}",
items.Count, totalCount, pagedResult.Page, search);
return Ok(pagedResult);
}
catch (Exception e)
{
_logger.LogError(e, "GetAll: Error retrieving data inbox items");
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{id:guid}")]
public IActionResult Get(Guid id)
{
try
{
var dataInbox = _db.DataInbox
.AsNoTracking()
.FirstOrDefault(x => x.Id == id);
if (dataInbox == null)
{
_logger.LogWarning("Get: Data inbox item {Id} not found", id);
return NotFound();
}
var dto = new DataInboxDto
{
Id = dataInbox.Id,
Name = dataInbox.Name,
Source = dataInbox.Source,
Data = dataInbox.Data,
CreatedAt = dataInbox.CreatedAt
};
_logger.LogDebug("Get: Retrieved data inbox item {Id} {Name}", id, dataInbox.Name);
return Ok(dto);
}
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving data inbox item {Id}", id);
return BadRequest("An error occurred processing your request");
}
}
// helpers
private bool IsBase64String(string data)
{
if (string.IsNullOrEmpty(data))
{
return false;
}
try
{
var base64Bytes = Convert.FromBase64String(data);
var utf8String = Encoding.UTF8.GetString(base64Bytes);
var reEncoded = Convert.ToBase64String(Encoding.UTF8.GetBytes(utf8String));
return data.TrimEnd('=') == reEncoded.TrimEnd('=');
}
catch (FormatException)
{
return false;
}
catch (DecoderFallbackException)
{
return false;
}
}
}

View File

@@ -0,0 +1,508 @@
using DiunaBI.API.Attributes;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class JobsController : Controller
{
private readonly AppDbContext _db;
private readonly JobSchedulerService _jobScheduler;
private readonly IConfiguration _configuration;
private readonly ILogger<JobsController> _logger;
public JobsController(
AppDbContext db,
JobSchedulerService jobScheduler,
IConfiguration configuration,
ILogger<JobsController> logger)
{
_db = db;
_jobScheduler = jobScheduler;
_configuration = configuration;
_logger = logger;
}
[HttpGet]
[Route("")]
public async Task<IActionResult> GetAll(
[FromQuery] int start = 0,
[FromQuery] int limit = 50,
[FromQuery] List<JobStatus>? statuses = null,
[FromQuery] JobType? jobType = null,
[FromQuery] Guid? layerId = null)
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.QueueJobs.AsQueryable();
if (statuses != null && statuses.Count > 0)
{
query = query.Where(j => statuses.Contains(j.Status));
}
if (jobType.HasValue)
{
query = query.Where(j => j.JobType == jobType.Value);
}
if (layerId.HasValue)
{
query = query.Where(j => j.LayerId == layerId.Value);
}
var totalCount = await query.CountAsync();
// Sort by: Priority ASC (0=highest), JobType, then CreatedAt DESC
var items = await query
.OrderBy(j => j.Priority)
.ThenBy(j => j.JobType)
.ThenByDescending(j => j.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.ToListAsync();
var pagedResult = new PagedResult<QueueJob>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} jobs", items.Count, totalCount);
return Ok(pagedResult);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetAll: Error retrieving jobs");
return BadRequest("An error occurred while retrieving jobs");
}
}
[HttpGet]
[Route("{id:guid}")]
public async Task<IActionResult> Get(Guid id)
{
try
{
var job = await _db.QueueJobs
.AsNoTracking()
.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("Get: Job {JobId} not found", id);
return NotFound("Job not found");
}
_logger.LogDebug("Get: Retrieved job {JobId}", id);
return Ok(job);
}
catch (Exception ex)
{
_logger.LogError(ex, "Get: Error retrieving job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleJobs([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobs: Created {Count} jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobs: Error scheduling jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/imports")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleImportJobs([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobs: Created {Count} import jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobs: Error scheduling import jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/processes")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleProcessJobs()
{
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobs: Created {Count} process jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobs: Error scheduling process jobs");
return BadRequest("An error occurred processing your request");
}
}
// UI-friendly endpoints (JWT auth)
[HttpPost]
[Route("ui/schedule")]
public async Task<IActionResult> ScheduleJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobsUI: Created {Count} jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobsUI: Error scheduling jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/imports")]
public async Task<IActionResult> ScheduleImportJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobsUI: Created {Count} import jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobsUI: Error scheduling import jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/processes")]
public async Task<IActionResult> ScheduleProcessJobsUI()
{
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobsUI: Created {Count} process jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobsUI: Error scheduling process jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("{id:guid}/retry")]
public async Task<IActionResult> RetryJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("RetryJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status != JobStatus.Failed)
{
_logger.LogWarning("RetryJob: Job {JobId} is not in Failed status (current: {Status})", id, job.Status);
return BadRequest($"Job is not in Failed status (current: {job.Status})");
}
job.Status = JobStatus.Pending;
job.RetryCount = 0;
job.LastError = null;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
_logger.LogInformation("RetryJob: Job {JobId} reset to Pending status", id);
return Ok(new
{
success = true,
message = "Job reset to Pending status and will be retried"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "RetryJob: Error retrying job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpDelete]
[Route("{id:guid}")]
public async Task<IActionResult> CancelJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("CancelJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status == JobStatus.Running)
{
_logger.LogWarning("CancelJob: Cannot cancel running job {JobId}", id);
return BadRequest("Cannot cancel a job that is currently running");
}
if (job.Status == JobStatus.Completed)
{
_logger.LogWarning("CancelJob: Cannot cancel completed job {JobId}", id);
return BadRequest("Cannot cancel a completed job");
}
job.Status = JobStatus.Failed;
job.LastError = "Cancelled by user";
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
_logger.LogInformation("CancelJob: Job {JobId} cancelled", id);
return Ok(new
{
success = true,
message = "Job cancelled successfully"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CancelJob: Error cancelling job {JobId}", id);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("stats")]
public async Task<IActionResult> GetStats()
{
try
{
var stats = new
{
pending = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Pending),
running = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Running),
completed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Completed),
failed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Failed),
retrying = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Retrying),
total = await _db.QueueJobs.CountAsync()
};
_logger.LogDebug("GetStats: Retrieved job statistics");
return Ok(stats);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetStats: Error retrieving job statistics");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("create-for-layer/{layerId:guid}")]
public async Task<IActionResult> CreateJobForLayer(Guid layerId)
{
try
{
var layer = await _db.Layers
.Include(x => x.Records)
.FirstOrDefaultAsync(l => l.Id == layerId);
if (layer == null)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} not found", layerId);
return NotFound($"Layer {layerId} not found");
}
if (layer.Type != LayerType.Administration)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not an Administration layer", layerId);
return BadRequest("Only Administration layers can be run as jobs");
}
// Get the Type record to determine if it's ImportWorker or ProcessWorker
var typeRecord = layer.Records?.FirstOrDefault(x => x.Code == "Type");
if (typeRecord?.Desc1 != "ImportWorker" && typeRecord?.Desc1 != "ProcessWorker")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not a valid worker type", layerId);
return BadRequest("Layer must be an ImportWorker or ProcessWorker");
}
// Check if enabled
var isEnabledRecord = layer.Records?.FirstOrDefault(x => x.Code == "IsEnabled");
if (isEnabledRecord?.Desc1 != "True")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not enabled", layerId);
return BadRequest("Layer is not enabled");
}
// Get plugin name
var pluginRecord = layer.Records?.FirstOrDefault(x => x.Code == "Plugin");
if (string.IsNullOrEmpty(pluginRecord?.Desc1))
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} has no Plugin configured", layerId);
return BadRequest("Layer has no Plugin configured");
}
// Get priority and max retries
var priorityRecord = layer.Records?.FirstOrDefault(x => x.Code == "Priority");
var maxRetriesRecord = layer.Records?.FirstOrDefault(x => x.Code == "MaxRetries");
var priority = int.TryParse(priorityRecord?.Desc1, out var p) ? p : 0;
var maxRetries = int.TryParse(maxRetriesRecord?.Desc1, out var m) ? m : 3;
var jobType = typeRecord.Desc1 == "ImportWorker" ? JobType.Import : JobType.Process;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == layer.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogInformation("CreateJobForLayer: Job already exists for layer {LayerId}, returning existing job", layerId);
return Ok(new
{
success = true,
jobId = existingJob.Id,
message = "Job already exists for this layer",
existing = true
});
}
// Create the job
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = layer.Id,
LayerName = layer.Name ?? "Unknown",
PluginName = pluginRecord.Desc1,
JobType = jobType,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = DiunaBI.Domain.Entities.User.AutoImportUserId,
ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId
};
_db.QueueJobs.Add(job);
await _db.SaveChangesAsync();
_logger.LogInformation("CreateJobForLayer: Created job {JobId} for layer {LayerName} ({LayerId})",
job.Id, layer.Name, layerId);
return Ok(new
{
success = true,
jobId = job.Id,
message = "Job created successfully",
existing = false
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Globalization; using System.Globalization;
using System.Text; using System.Text;
using System.Text.Json;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@@ -12,6 +13,7 @@ using DiunaBI.Infrastructure.Services;
namespace DiunaBI.API.Controllers; namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController] [ApiController]
[Route("[controller]")] [Route("[controller]")]
public class LayersController : Controller public class LayersController : Controller
@@ -46,6 +48,16 @@ public class LayersController : Controller
{ {
try try
{ {
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.Layers.Where(x => !x.IsDeleted); var query = _db.Layers.Where(x => !x.IsDeleted);
if (name != null) if (name != null)
@@ -97,7 +109,7 @@ public class LayersController : Controller
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "GetAll: Error retrieving layers"); _logger.LogError(e, "GetAll: Error retrieving layers");
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
} }
} }
[HttpGet] [HttpGet]
@@ -117,7 +129,7 @@ public class LayersController : Controller
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "Get: Error retrieving layer {LayerId}", id); _logger.LogError(e, "Get: Error retrieving layer {LayerId}", id);
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
} }
} }
[HttpGet] [HttpGet]
@@ -394,7 +406,7 @@ public class LayersController : Controller
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "AutoImport: Process error"); _logger.LogError(e, "AutoImport: Process error");
return BadRequest(e.ToString()); return BadRequest("An error occurred processing your request");
} }
} }
@@ -727,4 +739,398 @@ public class LayersController : Controller
throw; throw;
} }
} }
// Record CRUD operations
[HttpPost]
[Route("{layerId:guid}/records")]
public IActionResult CreateRecord(Guid layerId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("CreateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
var record = new Record
{
Id = Guid.NewGuid(),
Code = recordDto.Code,
Desc1 = recordDto.Desc1,
LayerId = layerId,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = Guid.Parse(userId),
ModifiedById = Guid.Parse(userId),
IsDeleted = false
};
_db.Records.Add(record);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Created, Guid.Parse(userId));
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("CreateRecord: Created record {RecordId} in layer {LayerId}", record.Id, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "CreateRecord: Error creating record in layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpPut]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult UpdateRecord(Guid layerId, Guid recordId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("UpdateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("UpdateRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
// Capture old values before updating
var oldCode = record.Code;
var oldDesc1 = record.Desc1;
record.Desc1 = recordDto.Desc1;
record.ModifiedAt = DateTime.UtcNow;
record.ModifiedById = Guid.Parse(userId);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Updated, Guid.Parse(userId), oldCode, oldDesc1);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("UpdateRecord: Updated record {RecordId} in layer {LayerId}", recordId, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "UpdateRecord: Error updating record {RecordId} in layer {LayerId}", recordId, layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpDelete]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult DeleteRecord(Guid layerId, Guid recordId)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("DeleteRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("DeleteRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
// Capture history before deleting
CaptureRecordHistory(record, RecordChangeType.Deleted, Guid.Parse(userId));
_db.Records.Remove(record);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("DeleteRecord: Deleted record {RecordId} from layer {LayerId}", recordId, layerId);
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DeleteRecord: Error deleting record {RecordId} from layer {LayerId}", recordId, layerId);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{layerId:guid}/records/{recordId:guid}/history")]
public IActionResult GetRecordHistory(Guid layerId, Guid recordId)
{
try
{
var history = _db.RecordHistory
.Include(h => h.ChangedBy)
.Where(h => h.RecordId == recordId && h.LayerId == layerId)
.OrderByDescending(h => h.ChangedAt)
.AsNoTracking()
.Select(h => new RecordHistoryDto
{
Id = h.Id,
RecordId = h.RecordId,
LayerId = h.LayerId,
ChangedAt = h.ChangedAt,
ChangedById = h.ChangedById,
ChangedByName = h.ChangedBy != null ? h.ChangedBy.UserName ?? h.ChangedBy.Email : "Unknown",
ChangeType = h.ChangeType.ToString(),
Code = h.Code,
Desc1 = h.Desc1,
ChangedFields = h.ChangedFields,
ChangesSummary = h.ChangesSummary,
FormattedChange = FormatHistoryChange(h)
})
.ToList();
_logger.LogDebug("GetRecordHistory: Retrieved {Count} history entries for record {RecordId}", history.Count, recordId);
return Ok(history);
}
catch (Exception e)
{
_logger.LogError(e, "GetRecordHistory: Error retrieving history for record {RecordId}", recordId);
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
[Route("{layerId:guid}/records/deleted")]
public IActionResult GetDeletedRecords(Guid layerId)
{
try
{
// Get the most recent "Deleted" history entry for each unique RecordId in this layer
// First, get all deleted record history entries
var deletedHistoryEntries = _db.RecordHistory
.Where(h => h.LayerId == layerId && h.ChangeType == RecordChangeType.Deleted)
.ToList();
// Group in memory and get the most recent deletion for each record
var mostRecentDeletes = deletedHistoryEntries
.GroupBy(h => h.RecordId)
.Select(g => g.OrderByDescending(h => h.ChangedAt).First())
.ToList();
// Get all unique user IDs from the history entries
var userIds = mostRecentDeletes.Select(h => h.ChangedById).Distinct().ToList();
// Load the users
var users = _db.Users
.Where(u => userIds.Contains(u.Id))
.ToDictionary(u => u.Id, u => u.UserName ?? string.Empty);
// Build the DTOs
var deletedRecords = mostRecentDeletes
.Select(h => new DeletedRecordDto
{
RecordId = h.RecordId,
Code = h.Code,
Desc1 = h.Desc1,
DeletedAt = h.ChangedAt,
DeletedById = h.ChangedById,
DeletedByName = users.TryGetValue(h.ChangedById, out var userName) ? userName : string.Empty
})
.OrderByDescending(d => d.DeletedAt)
.ToList();
_logger.LogDebug("GetDeletedRecords: Retrieved {Count} deleted records for layer {LayerId}", deletedRecords.Count, layerId);
return Ok(deletedRecords);
}
catch (Exception e)
{
_logger.LogError(e, "GetDeletedRecords: Error retrieving deleted records for layer {LayerId}", layerId);
return BadRequest("An error occurred processing your request");
}
}
// Helper method to capture record history
private void CaptureRecordHistory(Record record, RecordChangeType changeType, Guid userId, string? oldCode = null, string? oldDesc1 = null)
{
var changedFields = new List<string>();
var changesSummary = new Dictionary<string, Dictionary<string, string?>>();
if (changeType == RecordChangeType.Updated)
{
if (oldCode != record.Code)
{
changedFields.Add("Code");
changesSummary["Code"] = new Dictionary<string, string?>
{
["old"] = oldCode,
["new"] = record.Code
};
}
if (oldDesc1 != record.Desc1)
{
changedFields.Add("Desc1");
changesSummary["Desc1"] = new Dictionary<string, string?>
{
["old"] = oldDesc1,
["new"] = record.Desc1
};
}
}
var history = new RecordHistory
{
Id = Guid.NewGuid(),
RecordId = record.Id,
LayerId = record.LayerId,
ChangedAt = DateTime.UtcNow,
ChangedById = userId,
ChangeType = changeType,
Code = record.Code,
Desc1 = record.Desc1,
ChangedFields = changedFields.Any() ? string.Join(", ", changedFields) : null,
ChangesSummary = changesSummary.Any() ? JsonSerializer.Serialize(changesSummary) : null
};
_db.RecordHistory.Add(history);
_logger.LogInformation("CaptureRecordHistory: Captured {ChangeType} for record {RecordId}", changeType, record.Id);
}
// Helper method to format history change for display
private static string FormatHistoryChange(RecordHistory h)
{
if (h.ChangeType == RecordChangeType.Created)
{
return $"Created record with Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
if (h.ChangeType == RecordChangeType.Deleted)
{
return $"Deleted record Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
// Updated
if (!string.IsNullOrEmpty(h.ChangesSummary))
{
try
{
var changes = JsonSerializer.Deserialize<Dictionary<string, Dictionary<string, string?>>>(h.ChangesSummary);
if (changes != null)
{
var parts = new List<string>();
foreach (var (field, values) in changes)
{
var oldVal = values.GetValueOrDefault("old") ?? "empty";
var newVal = values.GetValueOrDefault("new") ?? "empty";
parts.Add($"{field}: \"{oldVal}\" → \"{newVal}\"");
}
return $"Updated: {string.Join(", ", parts)}";
}
}
catch
{
// Fall back to simple message
}
}
return $"Updated {h.ChangedFields ?? "record"}";
}
} }

View File

@@ -20,7 +20,6 @@
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" /> <PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
<PackageReference Include="Serilog.Enrichers.Environment" Version="3.0.1" /> <PackageReference Include="Serilog.Enrichers.Environment" Version="3.0.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" /> <PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<PackageReference Include="Serilog.Sinks.Seq" Version="9.0.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" /> <PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" />
</ItemGroup> </ItemGroup>
@@ -37,11 +36,13 @@
</Content> </Content>
</ItemGroup> </ItemGroup>
<Target Name="CopyPlugins" AfterTargets="Build"> <Target Name="CopyPlugins" AfterTargets="Build" Condition="'$(SkipPluginCopy)' != 'true'">
<MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" /> <MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<MSBuild Projects="../DiunaBI.Plugins.PedrolloPL/DiunaBI.Plugins.PedrolloPL.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<ItemGroup> <ItemGroup>
<PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" /> <PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" />
<PluginFiles Include="../DiunaBI.Plugins.PedrolloPL/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.PedrolloPL.dll" />
</ItemGroup> </ItemGroup>
<MakeDir Directories="$(OutputPath)Plugins" /> <MakeDir Directories="$(OutputPath)Plugins" />
<Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" /> <Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" />

View File

@@ -1,7 +1,8 @@
# Stage 1: Build # Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
WORKDIR /src/Backend ARG PLUGIN_PROJECT=DiunaBI.Plugins.Morska
WORKDIR /
# Copy solution and all project files for restore # Copy solution and all project files for restore
COPY DiunaBI.sln ./ COPY DiunaBI.sln ./
@@ -9,7 +10,7 @@ COPY DiunaBI.API/DiunaBI.API.csproj DiunaBI.API/
COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/ COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/
COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/ COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/
COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/ COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/
COPY DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj DiunaBI.Plugins.Morska/ COPY ${PLUGIN_PROJECT}/${PLUGIN_PROJECT}.csproj ${PLUGIN_PROJECT}/
# Restore dependencies # Restore dependencies
RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
@@ -18,16 +19,16 @@ RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
COPY . . COPY . .
# Build plugin first # Build plugin first
WORKDIR /src/Backend/DiunaBI.Plugins.Morska WORKDIR /${PLUGIN_PROJECT}
RUN dotnet build -c Release RUN dotnet build -c Release
# Build and publish API # Build and publish API (skip automatic plugin copy since we handle it manually)
WORKDIR /src/Backend/DiunaBI.API WORKDIR /DiunaBI.API
RUN dotnet publish -c Release -o /app/publish --no-restore RUN dotnet publish -c Release -o /app/publish --no-restore -p:SkipPluginCopy=true
# Copy plugin DLL to publish output # Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \ RUN mkdir -p /app/publish/Plugins && \
cp /src/Backend/DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll /app/publish/Plugins/ cp /${PLUGIN_PROJECT}/bin/Release/net10.0/${PLUGIN_PROJECT}.dll /app/publish/Plugins/
# Stage 2: Runtime # Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime

View File

@@ -0,0 +1,15 @@
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.SignalR;
namespace DiunaBI.API.Hubs;
/// <summary>
/// SignalR hub for broadcasting entity change notifications to authenticated clients.
/// Clients can only listen - broadcasting is done server-side by EntityChangeInterceptor.
/// </summary>
[Authorize]
public class EntityChangeHub : Hub
{
// No public methods - clients can only listen for "EntityChanged" events
// Broadcasting is handled server-side by EntityChangeInterceptor via IHubContext
}

View File

@@ -1,11 +1,15 @@
using Microsoft.AspNetCore.Authentication.JwtBearer; using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.RateLimiting;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.IdentityModel.Tokens; using Microsoft.IdentityModel.Tokens;
using System.IdentityModel.Tokens.Jwt; using System.IdentityModel.Tokens.Jwt;
using System.Reflection; using System.Reflection;
using System.Text; using System.Text;
using System.Threading.RateLimiting;
using DiunaBI.API.Hubs;
using DiunaBI.API.Services; using DiunaBI.API.Services;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Interceptors;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Serilog; using Serilog;
@@ -29,10 +33,22 @@ if (builder.Environment.IsProduction())
var connectionString = builder.Configuration.GetConnectionString("SQLDatabase"); var connectionString = builder.Configuration.GetConnectionString("SQLDatabase");
builder.Services.AddDbContext<AppDbContext>(x => // Register EntityChangeInterceptor
builder.Services.AddSingleton<EntityChangeInterceptor>();
builder.Services.AddDbContext<AppDbContext>((serviceProvider, options) =>
{ {
x.UseSqlServer(connectionString, sqlOptions => sqlOptions.MigrationsAssembly("DiunaBI.Infrastructure")); options.UseSqlServer(connectionString, sqlOptions => sqlOptions.MigrationsAssembly("DiunaBI.Infrastructure"));
x.EnableSensitiveDataLogging();
// Only log SQL parameters in development (may contain sensitive data)
if (builder.Environment.IsDevelopment())
{
options.EnableSensitiveDataLogging();
}
// Add EntityChangeInterceptor
var interceptor = serviceProvider.GetRequiredService<EntityChangeInterceptor>();
options.AddInterceptors(interceptor);
}); });
builder.Services.AddCors(options => builder.Services.AddCors(options =>
@@ -58,6 +74,44 @@ builder.Services.AddCors(options =>
builder.Services.AddControllers(); builder.Services.AddControllers();
// Rate Limiting
builder.Services.AddRateLimiter(options =>
{
// Global API rate limit
options.AddFixedWindowLimiter("api", config =>
{
config.PermitLimit = 100;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0; // No queueing
});
// Strict limit for authentication endpoint
options.AddFixedWindowLimiter("auth", config =>
{
config.PermitLimit = 10;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0;
});
// Rejection response
options.OnRejected = async (context, token) =>
{
context.HttpContext.Response.StatusCode = 429; // Too Many Requests
await context.HttpContext.Response.WriteAsJsonAsync(new
{
error = "Too many requests. Please try again later.",
retryAfter = context.Lease.TryGetMetadata(MetadataName.RetryAfter, out var retryAfter)
? (double?)retryAfter.TotalSeconds
: (double?)null
}, cancellationToken: token);
};
});
// SignalR
builder.Services.AddSignalR();
builder.Services.AddAuthentication(options => builder.Services.AddAuthentication(options =>
{ {
options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme; options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
@@ -67,10 +121,12 @@ builder.Services.AddAuthentication(options =>
{ {
options.TokenValidationParameters = new TokenValidationParameters options.TokenValidationParameters = new TokenValidationParameters
{ {
ValidateIssuer = false, ValidateIssuer = true,
ValidateAudience = false, ValidateAudience = true,
ValidateLifetime = true, ValidateLifetime = true,
ValidateIssuerSigningKey = true, ValidateIssuerSigningKey = true,
ValidIssuer = builder.Configuration["JwtSettings:Issuer"],
ValidAudience = builder.Configuration["JwtSettings:Audience"],
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(builder.Configuration["JwtSettings:SecurityKey"]!)) IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(builder.Configuration["JwtSettings:SecurityKey"]!))
}; };
}); });
@@ -97,6 +153,10 @@ builder.Services.AddSingleton<SpreadsheetsResource.ValuesResource>(provider =>
builder.Services.AddSingleton<PluginManager>(); builder.Services.AddSingleton<PluginManager>();
// Job Queue Services
builder.Services.AddScoped<JobSchedulerService>();
builder.Services.AddHostedService<JobWorkerService>();
var app = builder.Build(); var app = builder.Build();
// Auto-apply migrations on startup // Auto-apply migrations on startup
@@ -177,28 +237,80 @@ else
pluginManager.LoadPluginsFromDirectory(pluginsPath); pluginManager.LoadPluginsFromDirectory(pluginsPath);
app.UseCors("CORSPolicy");
// Security Headers
app.Use(async (context, next) => app.Use(async (context, next) =>
{ {
var token = context.Request.Headers.Authorization.ToString(); context.Response.Headers.Append("X-Content-Type-Options", "nosniff");
if (token.Length > 0 context.Response.Headers.Append("X-Frame-Options", "DENY");
&& !context.Request.Path.ToString().Contains("getForPowerBI") context.Response.Headers.Append("X-XSS-Protection", "1; mode=block");
&& !context.Request.Path.ToString().Contains("getConfiguration") context.Response.Headers.Append("Referrer-Policy", "strict-origin-when-cross-origin");
&& !context.Request.Path.ToString().Contains("DataInbox/Add")) await next();
{
var handler = new JwtSecurityTokenHandler();
var data = handler.ReadJwtToken(token.Split(' ')[1]);
context.Request.Headers.Append("UserId", new Microsoft.Extensions.Primitives.StringValues(data.Subject));
}
await next(context);
}); });
app.UseCors("CORSPolicy"); app.UseRateLimiter();
app.UseAuthentication(); app.UseAuthentication();
app.UseAuthorization(); app.UseAuthorization();
// Middleware to extract UserId from JWT token AFTER authentication
// This must run after UseAuthentication() so the JWT is already validated
app.Use(async (context, next) =>
{
var logger = context.RequestServices.GetRequiredService<ILogger<Program>>();
logger.LogInformation("🔍 UserId Extraction Middleware - Path: {Path}, Method: {Method}",
context.Request.Path, context.Request.Method);
var token = context.Request.Headers.Authorization.ToString();
logger.LogInformation("🔍 Authorization header: {Token}",
string.IsNullOrEmpty(token) ? "NULL/EMPTY" : $"{token[..Math.Min(30, token.Length)]}...");
if (!string.IsNullOrEmpty(token) && token.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
{
try
{
var handler = new JwtSecurityTokenHandler();
var jwtToken = handler.ReadJwtToken(token.Split(' ')[1]);
// Try to get UserId from Subject claim first, then fall back to NameIdentifier
var userId = jwtToken.Subject;
if (string.IsNullOrEmpty(userId))
{
// Try NameIdentifier claim (ClaimTypes.NameIdentifier)
var nameIdClaim = jwtToken.Claims.FirstOrDefault(c =>
c.Type == "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier" ||
c.Type == "nameid");
userId = nameIdClaim?.Value;
}
logger.LogInformation("🔍 JWT UserId: {UserId}", userId ?? "NULL");
if (!string.IsNullOrEmpty(userId))
{
// Use indexer to set/replace header value instead of Append
context.Request.Headers["UserId"] = userId;
logger.LogInformation("✅ Set UserId header to: {UserId}", userId);
}
else
{
logger.LogWarning("❌ UserId not found in JWT claims");
}
}
catch (Exception ex)
{
logger.LogError(ex, "❌ Failed to extract UserId from JWT token");
}
}
await next(context);
});
app.MapControllers(); app.MapControllers();
// SignalR Hub - Requires JWT authentication
app.MapHub<EntityChangeHub>("/hubs/entitychanges").RequireAuthorization();
app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow })) app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow }))
.AllowAnonymous(); .AllowAnonymous();

View File

@@ -36,7 +36,7 @@ public class GoogleAuthService(AppDbContext context, IConfiguration configuratio
if (user == null) if (user == null)
{ {
_logger.LogError("User not found in DiunaBI database: {Email}", payload.Email); _logger.LogError("User not found in DiunaBI database: {Email}", payload.Email);
return (false, null, "User not found in DiunaBI database"); return (false, null, "Authentication failed");
} }
user.UserName = payload.Name; user.UserName = payload.Name;

View File

@@ -52,7 +52,7 @@ public class JwtTokenService(IConfiguration configuration, ILogger<JwtTokenServi
try try
{ {
var jwtSettings = _configuration.GetSection("JwtSettings"); var jwtSettings = _configuration.GetSection("JwtSettings");
var secretKey = jwtSettings["SecretKey"]; var secretKey = jwtSettings["SecurityKey"];
var issuer = jwtSettings["Issuer"]; var issuer = jwtSettings["Issuer"];
var audience = jwtSettings["Audience"]; var audience = jwtSettings["Audience"];

View File

@@ -0,0 +1,17 @@
namespace DiunaBI.Application.DTOModels;
public class DataInboxDto
{
public Guid Id { get; set; }
public string Name { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string Data { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
}
public class DataInboxFilterRequest
{
public string? Search { get; set; }
public int Page { get; set; } = 1;
public int PageSize { get; set; } = 50;
}

View File

@@ -0,0 +1,11 @@
namespace DiunaBI.Application.DTOModels;
public class DeletedRecordDto
{
public Guid RecordId { get; set; }
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
public DateTime DeletedAt { get; set; }
public Guid DeletedById { get; set; }
public string DeletedByName { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,27 @@
namespace DiunaBI.Application.DTOModels;
public class RecordHistoryDto
{
public Guid Id { get; set; }
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public string ChangedByName { get; set; } = string.Empty;
// Type of change
public string ChangeType { get; set; } = string.Empty; // "Created", "Updated", "Deleted"
// Snapshot values
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// What changed
public string? ChangedFields { get; set; } // "Code, Desc1"
public string? ChangesSummary { get; set; } // JSON: {"Code": {"old": "A", "new": "B"}}
// Formatted display text
public string FormattedChange { get; set; } = string.Empty;
}

View File

@@ -12,6 +12,7 @@ public class QueueJob
public JobType JobType { get; set; } public JobType JobType { get; set; }
public int Priority { get; set; } = 0; // 0 = highest priority public int Priority { get; set; } = 0; // 0 = highest priority
public DateTime CreatedAt { get; set; } = DateTime.UtcNow; public DateTime CreatedAt { get; set; } = DateTime.UtcNow;
public DateTime ModifiedAt { get; set; } = DateTime.UtcNow;
public int RetryCount { get; set; } = 0; public int RetryCount { get; set; } = 0;
public int MaxRetries { get; set; } = 5; public int MaxRetries { get; set; } = 5;
public JobStatus Status { get; set; } = JobStatus.Pending; public JobStatus Status { get; set; } = JobStatus.Pending;
@@ -19,9 +20,7 @@ public class QueueJob
public DateTime? LastAttemptAt { get; set; } public DateTime? LastAttemptAt { get; set; }
public DateTime? CompletedAt { get; set; } public DateTime? CompletedAt { get; set; }
public Guid CreatedById { get; set; } public Guid CreatedById { get; set; }
public DateTime CreatedAtUtc { get; set; } = DateTime.UtcNow;
public Guid ModifiedById { get; set; } public Guid ModifiedById { get; set; }
public DateTime ModifiedAtUtc { get; set; } = DateTime.UtcNow;
} }
public enum JobType public enum JobType

View File

@@ -0,0 +1,37 @@
using System;
namespace DiunaBI.Domain.Entities;
public enum RecordChangeType
{
Created = 1,
Updated = 2,
Deleted = 3
}
public class RecordHistory
{
public Guid Id { get; set; }
// Reference to the original record
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public User? ChangedBy { get; set; }
// Type of change
public RecordChangeType ChangeType { get; set; }
// Snapshot of record state at this point
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// Comma-separated list of fields that changed (e.g., "Code,Desc1")
public string? ChangedFields { get; set; }
// JSON object with detailed changes: {"Code": {"old": "A", "new": "B"}}
public string? ChangesSummary { get; set; }
}

View File

@@ -5,6 +5,11 @@ namespace DiunaBI.Domain.Entities;
public class User public class User
{ {
/// <summary>
/// System user ID for automated operations (imports, scheduled jobs, etc.)
/// </summary>
public static readonly Guid AutoImportUserId = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d");
#region Properties #region Properties
public Guid Id { get; init; } public Guid Id { get; init; }
public string? Email { get; init; } public string? Email { get; init; }

View File

@@ -8,6 +8,7 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
public DbSet<User> Users { get; init; } public DbSet<User> Users { get; init; }
public DbSet<Layer> Layers { get; init; } public DbSet<Layer> Layers { get; init; }
public DbSet<Record> Records { get; init; } public DbSet<Record> Records { get; init; }
public DbSet<RecordHistory> RecordHistory { get; init; }
public DbSet<ProcessSource> ProcessSources { get; init; } public DbSet<ProcessSource> ProcessSources { get; init; }
public DbSet<DataInbox> DataInbox { get; init; } public DbSet<DataInbox> DataInbox { get; init; }
public DbSet<QueueJob> QueueJobs { get; init; } public DbSet<QueueJob> QueueJobs { get; init; }
@@ -75,6 +76,30 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
.HasForeignKey(x => x.LayerId) .HasForeignKey(x => x.LayerId)
.OnDelete(DeleteBehavior.Cascade); .OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<RecordHistory>().HasKey(x => x.Id);
modelBuilder.Entity<RecordHistory>().Property(x => x.RecordId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedAt).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedById).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangeType).IsRequired().HasConversion<int>();
modelBuilder.Entity<RecordHistory>().Property(x => x.Code).IsRequired().HasMaxLength(50);
modelBuilder.Entity<RecordHistory>().Property(x => x.Desc1).HasMaxLength(10000);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedFields).HasMaxLength(200);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangesSummary).HasMaxLength(4000);
// Indexes for efficient history queries
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.RecordId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.LayerId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasOne(x => x.ChangedBy)
.WithMany()
.HasForeignKey(x => x.ChangedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId }); modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId });
modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired(); modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired(); modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired();
@@ -111,9 +136,8 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
modelBuilder.Entity<QueueJob>().Property(x => x.LastAttemptAt); modelBuilder.Entity<QueueJob>().Property(x => x.LastAttemptAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CompletedAt); modelBuilder.Entity<QueueJob>().Property(x => x.CompletedAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedById).IsRequired(); modelBuilder.Entity<QueueJob>().Property(x => x.CreatedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedAtUtc).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedById).IsRequired(); modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedAtUtc).IsRequired(); modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedAt).IsRequired();
// Configure automatic timestamps for entities with CreatedAt/ModifiedAt // Configure automatic timestamps for entities with CreatedAt/ModifiedAt
ConfigureTimestamps(modelBuilder); ConfigureTimestamps(modelBuilder);

View File

@@ -22,7 +22,10 @@
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="10.0.0" /> <PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="10.0.0" />
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" /> <PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" /> <PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" /> </ItemGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup> </ItemGroup>
</Project> </Project>

View File

@@ -0,0 +1,201 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Infrastructure.Interceptors;
public class EntityChangeInterceptor : SaveChangesInterceptor
{
private readonly object? _hubContext;
private readonly ILogger<EntityChangeInterceptor>? _logger;
private readonly List<(string Module, string Id, string Operation)> _pendingChanges = new();
public EntityChangeInterceptor(IServiceProvider serviceProvider)
{
_logger = serviceProvider.GetService(typeof(ILogger<EntityChangeInterceptor>)) as ILogger<EntityChangeInterceptor>;
// Try to get hub context - it may not be registered in some scenarios (e.g., migrations)
try
{
var hubType = Type.GetType("DiunaBI.API.Hubs.EntityChangeHub, DiunaBI.API");
if (hubType != null)
{
var hubContextType = typeof(IHubContext<>).MakeGenericType(hubType);
_hubContext = serviceProvider.GetService(hubContextType);
if (_hubContext != null)
{
_logger?.LogInformation("✅ EntityChangeInterceptor: Hub context initialized");
Console.WriteLine("✅ EntityChangeInterceptor: Hub context initialized");
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub context is null");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub context is null");
}
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub type not found");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub type not found");
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ EntityChangeInterceptor: Failed to initialize hub context");
Console.WriteLine($"❌ EntityChangeInterceptor: Failed to initialize hub context: {ex.Message}");
_hubContext = null;
}
}
public override ValueTask<InterceptionResult<int>> SavingChangesAsync(
DbContextEventData eventData,
InterceptionResult<int> result,
CancellationToken cancellationToken = default)
{
_pendingChanges.Clear();
Console.WriteLine($"🔍 EntityChangeInterceptor.SavingChangesAsync called. HubContext null? {_hubContext == null}, Context null? {eventData.Context == null}");
if (_hubContext != null && eventData.Context != null)
{
// Capture changes BEFORE save
var entries = eventData.Context.ChangeTracker.Entries().ToList();
Console.WriteLine($"🔍 Found {entries.Count} total entries in ChangeTracker");
foreach (var entry in entries)
{
Console.WriteLine($"🔍 Entry: {entry.Metadata.ClrType.Name}, State: {entry.State}");
if (entry.State == EntityState.Added ||
entry.State == EntityState.Modified ||
entry.State == EntityState.Deleted)
{
var module = entry.Metadata.GetTableName() ?? entry.Metadata.ClrType.Name;
var id = GetEntityId(entry);
var operation = entry.State switch
{
EntityState.Added => "created",
EntityState.Modified => "updated",
EntityState.Deleted => "deleted",
_ => "unknown"
};
Console.WriteLine($"🔍 Detected change: {module} {id} {operation}");
if (id != null)
{
_pendingChanges.Add((module, id, operation));
Console.WriteLine($"✅ Added to pending changes: {module} {id} {operation}");
}
else
{
Console.WriteLine($"⚠️ Skipped (id is null): {module} {operation}");
}
}
}
Console.WriteLine($"🔍 Total pending changes: {_pendingChanges.Count}");
}
return base.SavingChangesAsync(eventData, result, cancellationToken);
}
public override async ValueTask<int> SavedChangesAsync(
SaveChangesCompletedEventData eventData,
int result,
CancellationToken cancellationToken = default)
{
// Broadcast changes AFTER successful save
if (_hubContext != null && result > 0 && _pendingChanges.Any())
{
_logger?.LogInformation("📤 Broadcasting {Count} entity changes via SignalR", _pendingChanges.Count);
Console.WriteLine($"📤 Broadcasting {_pendingChanges.Count} entity changes via SignalR");
foreach (var (module, id, operation) in _pendingChanges)
{
try
{
Console.WriteLine($"📤 Broadcasting: {module} {id} {operation}");
// Use reflection to call hub methods since we can't reference the API project
var clientsProperty = _hubContext.GetType().GetProperty("Clients");
Console.WriteLine($" 🔍 Clients property: {clientsProperty != null}");
if (clientsProperty != null)
{
var clients = clientsProperty.GetValue(_hubContext);
Console.WriteLine($" 🔍 Clients value: {clients != null}, Type: {clients?.GetType().Name}");
if (clients != null)
{
var allProperty = clients.GetType().GetProperty("All");
Console.WriteLine($" 🔍 All property: {allProperty != null}");
if (allProperty != null)
{
var allClients = allProperty.GetValue(clients);
Console.WriteLine($" 🔍 AllClients value: {allClients != null}, Type: {allClients?.GetType().Name}");
if (allClients != null)
{
// SendAsync is an extension method, so we need to find it differently
// Look for the IClientProxy interface which has SendCoreAsync
var sendCoreAsyncMethod = allClients.GetType().GetMethod("SendCoreAsync");
Console.WriteLine($" 🔍 SendCoreAsync method found: {sendCoreAsyncMethod != null}");
if (sendCoreAsyncMethod != null)
{
// SendCoreAsync takes (string method, object?[] args, CancellationToken cancellationToken)
var task = sendCoreAsyncMethod.Invoke(allClients, new object[]
{
"EntityChanged",
new object[] { new { module, id, operation } },
cancellationToken
}) as Task;
Console.WriteLine($" 🔍 Task created: {task != null}");
if (task != null)
{
await task;
Console.WriteLine($"✅ Broadcast successful: {module} {id} {operation}");
}
else
{
Console.WriteLine($"❌ Task is null after invoke");
}
}
else
{
Console.WriteLine($"❌ SendCoreAsync method not found");
}
}
}
}
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ Failed to broadcast entity change");
Console.WriteLine($"❌ Failed to broadcast: {ex.Message}");
Console.WriteLine($"❌ Stack trace: {ex.StackTrace}");
}
}
}
_pendingChanges.Clear();
return await base.SavedChangesAsync(eventData, result, cancellationToken);
}
private static string? GetEntityId(Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry entry)
{
var keyProperty = entry.Metadata.FindPrimaryKey()?.Properties.FirstOrDefault();
if (keyProperty == null)
return null;
var value = entry.Property(keyProperty.Name).CurrentValue;
return value?.ToString();
}
}

Some files were not shown because too many files have changed in this diff Show More