Compare commits

...

27 Commits

Author SHA1 Message Date
16eb688607 Clients logo 2025-12-10 12:28:36 +01:00
2132c130a3 update changelog 2025-12-08 23:09:54 +01:00
dffbc31432 Refactor job sorting logic, reduce poll interval, and implement SignalR subscriptions for real-time updates in DataInbox and Layers pages
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m26s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m38s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m38s
2025-12-08 23:08:46 +01:00
151ecaa98f Fix job scheduler race condition and enhance Blazor reconnection UI
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m27s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m23s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m43s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m39s
2025-12-08 22:45:31 +01:00
b917aa5077 Add Blazor Server reconnection modal and timer functionality 2025-12-08 22:30:31 +01:00
24f5f91704 update readme
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m27s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-08 22:07:16 +01:00
00c9584d03 Schedule Jobs from UI 2025-12-08 22:02:57 +01:00
c94a3b41c9 Duplicate models fields fix 2025-12-08 21:54:48 +01:00
e25cdc4441 UI timezone 2025-12-08 21:42:10 +01:00
1f95d57717 JobList filter fix 2025-12-08 21:28:24 +01:00
d2fb9b8071 Fix API Key Authorization for Cron Jobs by adding [AllowAnonymous] attribute to scheduling endpoints
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m29s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m46s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m49s
2025-12-06 00:50:20 +01:00
08abd96751 SignalR FIX
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m26s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m38s
2025-12-06 00:36:22 +01:00
eb570679ba UI Fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m28s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m26s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m39s
2025-12-06 00:03:46 +01:00
8713ed9686 LayerDetail improvement
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m27s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m39s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 23:49:16 +01:00
595076033b More security!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m40s
2025-12-05 23:41:56 +01:00
0c874575d4 SignalR Security 2025-12-05 23:17:02 +01:00
71c293320b Security: controllers and stack traces in logs
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m29s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 21:37:15 +01:00
46805fb196 Security: JWT
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m32s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m30s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m51s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m50s
2025-12-05 21:17:04 +01:00
51f2679732 Handle unauthorized
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m33s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m53s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m51s
2025-12-05 20:34:18 +01:00
6b0f936f40 P2 processor is working as a charm!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
2025-12-05 19:10:28 +01:00
0eb2a457f7 PedrolloPL: P2 -> B3
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m25s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m24s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m41s
2025-12-05 10:35:35 +01:00
0cf0bad6b1 UI build fix
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m30s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m25s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m45s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m42s
2025-12-05 10:07:45 +01:00
c7d9acead0 UI refactor (structure cleanup)
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m18s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m38s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m37s
2025-12-05 09:51:04 +01:00
193127b86a SingalR for realtime entitychanges
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m36s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m31s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m55s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m53s
2025-12-04 22:20:00 +01:00
bf2beda390 build fix2
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m41s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m36s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m49s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m48s
2025-12-04 18:44:39 +01:00
942da18d85 Build fix
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m47s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m47s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m27s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m28s
2025-12-04 17:57:37 +01:00
a3fa8f9b91 P2 import is working
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m18s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m14s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m11s
2025-12-04 15:53:11 +01:00
69 changed files with 3821 additions and 376 deletions

View File

@@ -0,0 +1,3 @@
Read the project context file at `.claude/project-context.md` to quickly understand the DiunaBI project structure, architecture, key components, and recent development focus. This will bootstrap your knowledge without needing to explore the entire codebase.
After reading the context file, briefly acknowledge what you've learned and ask the user what they need help with.

View File

@@ -0,0 +1,27 @@
Update the `.claude/project-context.md` file by ONLY appending changes made during THIS session to the "RECENT CHANGES (This Session)" section at the top of the file.
**DO NOT re-scan or re-explore the entire codebase** - this wastes tokens and time.
**What to do:**
1. Review the conversation history to identify what was changed/added/fixed in THIS session
2. Read the current `.claude/project-context.md` file
3. Update ONLY the "RECENT CHANGES (This Session)" section at the top with:
- Date of changes (today's date)
- Brief bullet points describing what was modified
- Files that were changed with brief descriptions
- Any new functionality added
- Bug fixes completed
4. Leave the rest of the file unchanged
**Format for session changes:**
```markdown
## RECENT CHANGES (This Session)
**[Feature/Fix Name] ([Date]):**
- ✅ Brief description of change 1
- ✅ Brief description of change 2
- Files modified: [file1.cs](path/to/file1.cs), [file2.cs](path/to/file2.cs)
```
When done, provide a brief summary of what session changes were documented.

819
.claude/project-context.md Normal file
View File

@@ -0,0 +1,819 @@
# DiunaBI Project Context
> This file is auto-generated for Claude Code to quickly understand the project structure.
> Last updated: 2025-12-08
## RECENT CHANGES (This Session)
**SignalR Real-Time Updates & UI Consistency (Dec 8, 2025):**
-**Removed Manual Refresh Button** - Removed refresh button from Jobs/Index.razor (SignalR auto-refresh eliminates need)
-**SignalR on Layers List** - Added real-time updates to Layers/Index with EntityChangeHubService subscription
-**SignalR on DataInbox List** - Added real-time updates to DataInbox/Index with EntityChangeHubService subscription
-**SignalR on Layer Details** - Added real-time updates to Layers/Details for both layer and record changes
-**Consistent UI Behavior** - All lists now have uniform SignalR-based real-time updates
-**Proper Cleanup** - Implemented IDisposable pattern to unsubscribe from SignalR events on all pages
-**Jobs Sorting Fix** - Changed sorting from Priority→JobType→CreatedAt DESC to CreatedAt DESC→Priority ASC (newest jobs first, then by priority)
-**Faster Job Processing** - Reduced JobWorkerService poll interval from 10 seconds to 5 seconds
- Files modified:
- [Jobs/Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor) - removed refresh button
- [Layers/Index.razor](DiunaBI.UI.Shared/Pages/Layers/Index.razor), [Layers/Index.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Index.razor.cs) - added SignalR + IDisposable
- [DataInbox/Index.razor](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor), [DataInbox/Index.razor.cs](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor.cs) - added SignalR + IDisposable
- [Layers/Details.razor](DiunaBI.UI.Shared/Pages/Layers/Details.razor), [Layers/Details.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Details.razor.cs) - added SignalR + IDisposable
- [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs) - fixed sorting logic
- [JobWorkerService.cs](DiunaBI.Infrastructure/Services/JobWorkerService.cs) - reduced poll interval to 5 seconds
- Status: All lists have consistent real-time behavior, no manual refresh needed, jobs sorted by date first
---
**Job Scheduler Race Condition Fix (Dec 8, 2025):**
-**In-Memory Deduplication** - Added `HashSet<Guid>` to track LayerIds scheduled within the same batch
-**Prevents Duplicate Jobs** - Fixed race condition where same layer could be scheduled multiple times during single "Run All Jobs" operation
-**Two-Level Protection** - In-memory check (HashSet) runs before database check for O(1) performance
-**Applied to Both Methods** - Fixed both ScheduleImportJobsAsync and ScheduleProcessJobsAsync
-**Better Logging** - Added debug log message "Job already scheduled in this batch" for transparency
- Root cause: When multiple layers had same ID in query results or import plugins created new layers during scheduling loop, database check couldn't detect duplicates added in same batch before SaveChangesAsync()
- Solution: Track scheduled LayerIds in HashSet during loop iteration to prevent within-batch duplicates
- Files modified: [JobSchedulerService.cs](DiunaBI.Infrastructure/Services/JobSchedulerService.cs)
- Status: Race condition resolved, duplicate job creation prevented
---
**Blazor Server Reconnection UI Customization (Dec 8, 2025):**
-**Custom Reconnection Modal** - Replaced default Blazor "Rejoin failed..." dialog with custom-styled modal
-**Theme-Matched Styling** - Changed loader and button colors from blue to app's primary red (#e7163d) matching navbar
-**Timer with Elapsed Seconds** - Added real-time timer showing elapsed reconnection time (0s, 1s, 2s...)
-**CSS Classes Integration** - Used Blazor's built-in `.components-reconnect-show/failed/rejected` classes for state management
-**MutationObserver Timer** - JavaScript watches for CSS class changes to start/stop elapsed time counter
-**Professional Design** - Modal backdrop blur, spinner animation, red reload button with hover effects
- Files modified: [App.razor](DiunaBI.UI.Web/Components/App.razor), [app.css](DiunaBI.UI.Web/wwwroot/app.css)
- Files created: [reconnect.js](DiunaBI.UI.Web/wwwroot/js/reconnect.js)
- Status: Blazor reconnection UI now matches app theme with timer indicator
**Jobs List Sorting and Multi-Select Filtering (Dec 8, 2025):**
-**Fixed Job Sorting** - Changed from single CreatedAt DESC to Priority ASC → JobType → CreatedAt DESC
-**Multi-Select Status Filter** - Replaced single status dropdown with multi-select supporting multiple JobStatus values
-**Auto-Refresh on Filter Change** - Filters now automatically trigger data reload without requiring manual button click
-**API Updates** - JobsController GetAll endpoint accepts `List<JobStatus>? statuses` instead of single status
-**JobService Updates** - Sends status values as integers in query string for multi-select support
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: Jobs list now sortable by priority/type/date with working multi-select filters
**User Timezone Support (Dec 8, 2025):**
-**DateTimeHelper Service** - Created JS Interop service to detect user's browser timezone
-**UTC to Local Conversion** - All date displays now show user's local timezone instead of UTC
-**Database Consistency** - Database continues to store UTC (correct), conversion only for display
-**Updated Pages** - Applied timezone conversion to all date fields in:
- Jobs Index and Details pages
- Layers Details page (CreatedAt, ModifiedAt, record history)
- DataInbox Index page
-**Service Registration** - Registered DateTimeHelper as scoped service in DI container
- Files created: [DateTimeHelper.cs](DiunaBI.UI.Shared/Services/DateTimeHelper.cs)
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs), [Jobs/Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs), [Jobs/Details.razor](DiunaBI.UI.Shared/Pages/Jobs/Details.razor), [Layers/Details.razor](DiunaBI.UI.Shared/Pages/Layers/Details.razor), [Layers/Details.razor.cs](DiunaBI.UI.Shared/Pages/Layers/Details.razor.cs), [DataInbox/Index.razor.cs](DiunaBI.UI.Shared/Pages/DataInbox/Index.razor.cs)
- Status: All dates display in user's local timezone with format "yyyy-MM-dd HH:mm:ss"
**QueueJob Model Cleanup and AutoImport User (Dec 8, 2025):**
-**Removed Duplicate Fields** - Removed CreatedAtUtc and ModifiedAtUtc from QueueJob (were duplicates of CreatedAt/ModifiedAt)
-**Added ModifiedAt Field** - Was missing, now tracks job modification timestamp
-**AutoImport User ID** - Created User.AutoImportUserId constant: `f392209e-123e-4651-a5a4-0b1d6cf9ff9d`
-**System Operations** - All system-created/modified jobs now use AutoImportUserId for CreatedById and ModifiedById
-**Database Migration** - Created migration: RemoveQueueJobDuplicateUTCFields
- Files modified: [QueueJob.cs](DiunaBI.Domain/Entities/QueueJob.cs), [User.cs](DiunaBI.Domain/Entities/User.cs), [JobWorkerService.cs](DiunaBI.Infrastructure/Services/JobWorkerService.cs), [JobSchedulerService.cs](DiunaBI.Infrastructure/Services/JobSchedulerService.cs), [AppDbContext.cs](DiunaBI.Infrastructure/Data/AppDbContext.cs), [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Files created: [20251208205202_RemoveQueueJobDuplicateUTCFields.cs](DiunaBI.Infrastructure/Migrations/20251208205202_RemoveQueueJobDuplicateUTCFields.cs)
- Status: QueueJob model cleaned up, all automated operations tracked with AutoImport user ID
**Job Scheduling UI with JWT Authorization (Dec 8, 2025):**
-**New JWT Endpoints** - Created UI-specific endpoints at `/jobs/ui/schedule/*` with JWT authorization (parallel to API key endpoints)
-**Three Scheduling Options** - MudMenu dropdown in Jobs Index with:
- Run All Jobs - schedules all import and process jobs
- Run All Imports - schedules import jobs only
- Run All Processes - schedules process jobs only
-**JobService Methods** - Added three scheduling methods returning (success, jobsCreated, message) tuples
-**Auto-Refresh** - Jobs list automatically reloads after scheduling with success/failure notifications
-**Dual Authorization** - Existing `/jobs/schedule/{apiKey}` endpoints for automation, new `/jobs/ui/schedule` endpoints for UI users
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [JobService.cs](DiunaBI.UI.Shared/Services/JobService.cs), [Index.razor](DiunaBI.UI.Shared/Pages/Jobs/Index.razor), [Index.razor.cs](DiunaBI.UI.Shared/Pages/Jobs/Index.razor.cs)
- Status: UI users can now schedule jobs directly from Jobs page using JWT authentication
---
**API Key Authorization Fix for Cron Jobs (Dec 6, 2025):**
-**Fixed 401 Unauthorized on API Key Endpoints** - Cron jobs calling `/jobs/schedule` endpoints were getting rejected despite valid API keys
-**Added [AllowAnonymous] Attribute** - Bypasses controller-level `[Authorize]` to allow `[ApiKeyAuth]` filter to handle authorization
-**Three Endpoints Fixed** - Applied fix to all job scheduling endpoints:
- `POST /jobs/schedule` - Schedule all jobs (imports + processes)
- `POST /jobs/schedule/imports` - Schedule import jobs only
- `POST /jobs/schedule/processes` - Schedule process jobs only
- Root cause: Controller-level `[Authorize]` attribute required JWT Bearer auth for all endpoints, blocking API key authentication
- Solution: Add `[AllowAnonymous]` to allow `[ApiKeyAuth]` filter to validate X-API-Key header
- Files modified: [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs)
- Status: Cron jobs can now authenticate with API key via X-API-Key header
**SignalR Authentication Token Flow Fix (Dec 6, 2025):**
-**TokenProvider Population** - Fixed `TokenProvider.Token` never being set with JWT, causing 401 Unauthorized on SignalR connections
-**AuthService Token Management** - Injected `TokenProvider` into `AuthService` and set token in 3 key places:
- `ValidateWithBackendAsync()` - on fresh Google login
- `CheckAuthenticationAsync()` - on session restore from localStorage
- `ClearAuthenticationAsync()` - clear token on logout
-**SignalR Initialization Timing** - Moved SignalR initialization from `MainLayout.OnInitializedAsync` to after authentication completes
-**Event-Driven Architecture** - `MainLayout` now subscribes to `AuthenticationStateChanged` event to initialize SignalR when user authenticates
-**Session Restore Support** - `CheckAuthenticationAsync()` now fires `AuthenticationStateChanged` event to initialize SignalR on page refresh
- Root cause: SignalR was initialized before authentication, so JWT token was empty during connection setup
- Solution: Initialize SignalR only after token is available via event subscription
- Files modified: [AuthService.cs](DiunaBI.UI.Shared/Services/AuthService.cs), [MainLayout.razor](DiunaBI.UI.Shared/Components/Layout/MainLayout.razor)
- Status: SignalR authentication working for both fresh login and restored sessions
**SignalR Authentication DI Fix (Dec 6, 2025):**
-**TokenProvider Registration** - Added missing `TokenProvider` service registration in DI container
-**EntityChangeHubService Scope Fix** - Changed from singleton to scoped to support user-specific JWT tokens
-**Bug Fix** - Resolved `InvalidOperationException` preventing app from starting after SignalR authentication was added
- Root cause: Singleton service (`EntityChangeHubService`) cannot depend on scoped service (`TokenProvider`) in DI
- Solution: Made `EntityChangeHubService` scoped so each user session has its own authenticated SignalR connection
- Files modified: [ServiceCollectionExtensions.cs](DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs)
---
**Security Audit & Hardening (Dec 5, 2025):**
-**JWT Token Validation** - Enabled issuer/audience validation in [Program.cs](DiunaBI.API/Program.cs), fixed config key mismatch in [JwtTokenService.cs](DiunaBI.API/Services/JwtTokenService.cs)
-**API Key Security** - Created [ApiKeyAuthAttribute.cs](DiunaBI.API/Attributes/ApiKeyAuthAttribute.cs) with X-API-Key header auth, constant-time comparison
-**Job Endpoints** - Migrated 3 job scheduling endpoints in [JobsController.cs](DiunaBI.API/Controllers/JobsController.cs) from URL-based to header-based API keys
-**Stack Trace Exposure** - Fixed 20 instances across 3 controllers ([JobsController.cs](DiunaBI.API/Controllers/JobsController.cs), [LayersController.cs](DiunaBI.API/Controllers/LayersController.cs), [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)) - now returns generic error messages
-**SignalR Authentication** - Added [Authorize] to [EntityChangeHub.cs](DiunaBI.API/Hubs/EntityChangeHub.cs), configured JWT token in [EntityChangeHubService.cs](DiunaBI.UI.Shared/Services/EntityChangeHubService.cs)
-**Rate Limiting** - Implemented ASP.NET Core rate limiting: 100 req/min general, 10 req/min auth in [Program.cs](DiunaBI.API/Program.cs)
-**Security Headers** - Added XSS, clickjacking, MIME sniffing protection middleware in [Program.cs](DiunaBI.API/Program.cs)
-**Input Validation** - Added pagination limits (1-1000) to GetAll endpoints in 3 controllers
-**User Enumeration** - Fixed generic auth error in [GoogleAuthService.cs](DiunaBI.API/Services/GoogleAuthService.cs)
-**Sensitive Data Logging** - Made conditional on development only in [Program.cs](DiunaBI.API/Program.cs)
-**Base64 Size Limit** - Added 10MB limit to DataInbox in [DataInboxController.cs](DiunaBI.API/Controllers/DataInboxController.cs)
- Files modified: 12 files (API: Program.cs, 4 controllers, 3 services, 1 hub, 1 new attribute; UI: EntityChangeHubService.cs, ServiceCollectionExtensions.cs)
- Security status: 5/5 CRITICAL fixed, 3/3 HIGH fixed, 4/4 MEDIUM fixed
**Seq Removal - Logging Cleanup (Dec 5, 2025):**
- ✅ Removed Seq logging sink to eliminate commercial licensing concerns
- ✅ Removed `Serilog.Sinks.Seq` NuGet package from DiunaBI.API.csproj
- ✅ Removed Seq sink configuration from appsettings.Development.json
- ✅ Kept Serilog (free, open-source) with Console + File sinks for production-ready logging
- ✅ Build verified - no errors after Seq removal
- Files modified: [DiunaBI.API.csproj](DiunaBI.API/DiunaBI.API.csproj), [appsettings.Development.json](DiunaBI.API/appsettings.Development.json)
- Manual step required: Remove `seq` service from docker-compose.yml and add Docker log rotation config
**UI Reorganization (Dec 5, 2025):**
- ✅ Moved pages to feature-based folders: `Pages/Layers/`, `Pages/Jobs/`, `Pages/DataInbox/`
- ✅ Organized components: `Components/Layout/` (MainLayout, EmptyLayout, Routes), `Components/Auth/` (AuthGuard, LoginCard)
- ✅ Removed obsolete wrapper files (LayerListPage, JobListPage, DataInboxListPage, etc.)
- ✅ Removed duplicate component files (LayerListComponent, JobListComponent, DataInboxListComponent)
- ✅ Standardized code-behind: `.razor.cs` for complex logic, inline `@code` for simple pages
- ✅ Updated `_Imports.razor` with new namespaces: `DiunaBI.UI.Shared.Components.Layout`, `DiunaBI.UI.Shared.Components.Auth`
- ✅ All routes unchanged - backward compatible
---
## PROJECT TYPE & TECH STACK
**Application Type:** Full-stack Business Intelligence (BI) platform with multi-tier architecture, real-time capabilities, and plugin system
**Core Stack:**
- Backend: ASP.NET Core 10.0 Web API
- Frontend: Blazor Server + MAUI Mobile
- Database: SQL Server + EF Core 10.0
- UI: MudBlazor 8.0
- Real-time: SignalR (EntityChangeHub)
- Google: Sheets API, Drive API, OAuth
- Logging: Serilog (Console, File)
- Auth: JWT Bearer + Google OAuth
---
## SOLUTION STRUCTURE (10 Projects)
```
DiunaBI.API (Web API)
├── Controllers: Auth, Layers, Jobs, DataInbox
├── Hubs: EntityChangeHub (SignalR real-time updates)
└── Services: GoogleAuth, JwtToken
DiunaBI.Domain (Entities)
└── User, Layer, Record, RecordHistory, QueueJob, DataInbox, ProcessSource
DiunaBI.Application (DTOs)
└── LayerDto, RecordDto, UserDto, RecordHistoryDto, PagedResult, JobDto
DiunaBI.Infrastructure (Data + Services)
├── Data: AppDbContext, Migrations (47 total)
├── Interceptors: EntityChangeInterceptor (auto-broadcasts DB changes)
├── Services: PluginManager, JobScheduler, JobWorker, GoogleSheets/Drive
├── Plugins: BaseDataImporter, BaseDataProcessor, BaseDataExporter
└── Interfaces: IPlugin, IDataProcessor, IDataImporter, IDataExporter
DiunaBI.UI.Web (Blazor Server)
└── Server-side Blazor web application
DiunaBI.UI.Mobile (MAUI)
└── iOS, Android, Windows, macOS support
DiunaBI.UI.Shared (Blazor Component Library - Reorganized)
├── Pages/
│ ├── Layers/ (Index.razor, Details.razor)
│ ├── Jobs/ (Index.razor, Details.razor)
│ ├── DataInbox/ (Index.razor, Details.razor)
│ ├── Dashboard.razor, Login.razor, Index.razor
├── Components/
│ ├── Layout/ (MainLayout, EmptyLayout, Routes)
│ └── Auth/ (AuthGuard, LoginCard)
└── Services/
├── LayerService, JobService, DataInboxService
├── EntityChangeHubService (SignalR client)
├── FilterStateServices (remember filters)
└── AuthService, TokenProvider
DiunaBI.Plugins.Morska (Feature Plugin)
├── Importers: Standard, D1, D3, FK2 (4 total)
├── Processors: D6, T1, T3, T4, T5 variants (12 total)
└── Exporters: Google Sheets export (1)
DiunaBI.Plugins.PedrolloPL (Feature Plugin - NEW)
└── Importers: B3 (1 total)
DiunaBI.Tests (Testing)
└── Unit and integration tests
```
---
## CORE FUNCTIONALITY
**Purpose:** BI platform for data import, processing, transformation via modular plugin architecture. Multi-layer workflows with audit trails, real-time notifications, scheduled job processing.
**Main Features:**
1. **Layer Management** - 4 types (Import/Processed/Admin/Dictionary), parent-child relationships, soft deletes
2. **Data Records** - 32 numeric columns (Value1-32) + description, hierarchical, full audit trail
3. **Plugin Architecture** - Dynamic assembly loading, base classes in Infrastructure, 3 types (Importers/Processors/Exporters)
4. **Job Queue System** - Background worker with retry logic (30s → 2m → 5m), priority-based, auto-scheduling
5. **External Data** - DataInbox API, Google Sheets read/write, Google Drive integration
6. **Real-time Updates** - SignalR broadcasts entity changes (create/update/delete) to all connected clients
7. **Audit Trail** - RecordHistory tracks all record changes with field-level diffs and JSON summaries
8. **Filter Persistence** - UI filter states saved across sessions (LayerFilterStateService, DataInboxFilterStateService)
---
## KEY ENTITIES
**Layer**
- Id, Number, Name, Type (Import/Processed/Administration/Dictionary)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy (with user relations)
- IsDeleted (soft delete), IsCancelled (processing control), ParentId
- Relations: Records (1-to-many), ProcessSources (1-to-many)
**Record**
- Id, Code (unique identifier), LayerId
- Value1-Value32 (double?), Desc1 (string, max 10000 chars)
- CreatedAt/ModifiedAt, CreatedBy/ModifiedBy, IsDeleted
- Audit: Full history tracked in RecordHistory table
**RecordHistory** (NEW - Migration 47)
- RecordId, LayerId, ChangedAt, ChangedById
- ChangeType (Created/Updated/Deleted)
- Code, Desc1 (snapshot at time of change)
- ChangedFields (comma-separated field names)
- ChangesSummary (JSON with old/new values)
- Indexes: (RecordId, ChangedAt), (LayerId, ChangedAt) for performance
**QueueJob**
- LayerId, LayerName, PluginName
- JobType (Import/Process)
- Priority (0 = highest), Status (Pending/Running/Completed/Failed/Retrying)
- RetryCount, MaxRetries (default 5)
- CreatedAt, LastAttemptAt, CompletedAt
- LastError (detailed error message)
**DataInbox**
- Id, Name, Source (identifiers)
- Data (base64-encoded JSON array)
- CreatedAt
- Used by importers to stage incoming data
**User**
- Id (Guid), Email, UserName
- CreatedAt, LastLoginAt
- Google OAuth identity
**ProcessSource**
- Id, SourceLayerId, TargetLayerId
- Defines layer processing relationships
---
## API ENDPOINTS
**Base:** `/` (ApiController routes)
### AuthController (/auth)
- `POST /auth/apiToken` - Exchange Google ID token for JWT (AllowAnonymous)
- `POST /auth/refresh` - Refresh expired JWT token
### LayersController (/layers)
- `GET /layers?page=1&pageSize=10&search=&type=` - List layers (paged, filterable)
- `GET /layers/{id}` - Get layer details with records
- `POST /layers` - Create new layer
- `PUT /layers/{id}` - Update layer
- `DELETE /layers/{id}` - Soft delete layer
- `POST /layers/{id}/records` - Add/update records
- `PUT /layers/{layerId}/records/{recordId}` - Update specific record
- `DELETE /layers/{layerId}/records/{recordId}` - Delete record
- `GET /layers/{layerId}/records/{recordId}/history` - Get record history
- `GET /layers/{layerId}/deleted-records` - Get deleted records with history
### JobsController (/jobs) - NEW
- `GET /jobs?page=1&pageSize=50&status=&jobType=` - List jobs (paged, filterable)
- `GET /jobs/{id}` - Get job details
- `GET /jobs/stats` - Get job statistics (counts by status)
- `POST /jobs/schedule/{apiKey}` - Schedule all jobs from layer configs
- `POST /jobs/schedule/imports/{apiKey}` - Schedule import jobs only
- `POST /jobs/schedule/processes/{apiKey}` - Schedule process jobs only
- `POST /jobs/create-for-layer/{layerId}` - Create job for specific layer (manual trigger)
- `POST /jobs/{id}/retry` - Retry failed job (resets to Pending)
- `DELETE /jobs/{id}` - Cancel pending/retrying job
### DataInboxController (/datainbox)
- `GET /datainbox?page=1&pageSize=10&search=` - List inbox items (paged, filterable)
- `GET /datainbox/{id}` - Get inbox item with decoded data
- `POST /datainbox` - Create inbox item
- `PUT /datainbox/Add/{apiKey}` - Add data (API key + Basic Auth)
- `DELETE /datainbox/{id}` - Delete inbox item
### SignalR Hub
- `/hubs/entitychanges` - SignalR hub for real-time entity change notifications
- Event: `EntityChanged(module, id, operation)` - broadcasts to all clients
- Modules: QueueJobs, Layers, Records, RecordHistory
---
## AUTHENTICATION & SECURITY
**Flow:**
1. Client exchanges Google ID token → `/auth/apiToken`
2. GoogleAuthService validates token with Google, maps to internal User
3. Returns JWT (7-day expiration, HS256 signing)
4. JWT required on all protected endpoints (except /auth/apiToken, /health)
5. UserId extraction middleware sets X-UserId header for audit trails
**Security:**
- Google OAuth 2.0 for identity verification
- JWT Bearer tokens for API access
- API key + Basic Auth for DataInbox external endpoints
- CORS configured for:
- http://localhost:4200
- https://diuna.bim-it.pl
- https://morska.diunabi.com
---
## KEY SERVICES
### Infrastructure Services
**PluginManager**
- Location: `DiunaBI.Infrastructure/Services/PluginManager.cs`
- Loads plugin assemblies from `bin/Plugins/` directory at startup
- Registers IDataProcessor, IDataImporter, IDataExporter implementations
- Provides plugin discovery and execution
**JobSchedulerService**
- Location: `DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Creates QueueJob entries from Administration layer configs
- Reads layer.Records with Code="Plugin", Code="Priority", Code="MaxRetries"
- Methods: ScheduleImportJobsAsync, ScheduleProcessJobsAsync, ScheduleAllJobsAsync
**JobWorkerService** (BackgroundService)
- Location: `DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- Polls QueueJobs table every 10 seconds
- Executes jobs via PluginManager (Import/Process)
- Retry logic with exponential backoff: 30s → 2m → 5m delays
- Rate limiting: 5-second delay after imports (Google Sheets API quota)
- Updates job status in real-time (triggers SignalR broadcasts)
**EntityChangeInterceptor**
- Location: `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Captures entity changes: Added, Modified, Deleted
- Broadcasts changes via SignalR EntityChangeHub after successful save
- Uses reflection to avoid circular dependencies with IHubContext
**GoogleSheetsHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleSheetsHelper.cs`
- Google Sheets API v4 integration
- Methods: ReadRange, WriteRange, CreateSpreadsheet, UpdateSpreadsheet
**GoogleDriveHelper**
- Location: `DiunaBI.Infrastructure/Helpers/GoogleDriveHelper.cs`
- Google Drive API v3 integration
- Methods: UploadFile, ListFiles, MoveFile
**GoogleAuthService / JwtTokenService**
- Authentication and token management
- JWT generation and validation
### UI Services
**EntityChangeHubService**
- Location: `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton service for SignalR client connection
- Auto-reconnect enabled
- Event: `EntityChanged` - UI components subscribe for real-time updates
- Initialized in MainLayout.OnInitializedAsync
**LayerService / JobService / DataInboxService**
- HTTP clients for API communication
- DTOs serialization/deserialization
- Paged result handling
**LayerFilterStateService / DataInboxFilterStateService**
- Persist filter state across navigation
- Singleton services remember search, type, page selections
---
## DATABASE SCHEMA
**Total Migrations:** 47
**Latest Migrations:**
**Migration 47: RecordHistory (Dec 1, 2025)**
- **NEW Table: RecordHistory**
- Tracks all record changes (Created, Updated, Deleted)
- Fields: Id, RecordId, LayerId, ChangedAt, ChangedById, ChangeType, Code, Desc1, ChangedFields, ChangesSummary
- Indexes: IX_RecordHistory_RecordId_ChangedAt, IX_RecordHistory_LayerId_ChangedAt
- Foreign key: RecordHistory.ChangedById → Users.Id
**Migration 46: FixLayerDefaultValues (Nov 20, 2025)**
- Set default value: Layers.IsDeleted = false
**Migration 45: UpdateModel (Nov 19, 2025)**
- Added GETUTCDATE() defaults for all timestamp fields
- Changed foreign key constraints from CASCADE to RESTRICT:
- Layers → Users (CreatedById, ModifiedById)
- Records → Users (CreatedById, ModifiedById)
- Added FK_ProcessSources_Layers_LayerId
**Core Tables:**
- Users (authentication, audit)
- Layers (4 types, soft deletes, parent-child)
- Records (32 Value fields + Desc1, audit, soft deletes)
- RecordHistory (change tracking, field diffs, JSON summaries)
- QueueJobs (job queue, retry logic, status tracking)
- DataInbox (incoming data staging, base64 encoded)
- ProcessSources (layer relationships)
---
## PLUGIN SYSTEM
### Base Classes (Infrastructure/Plugins/)
**BaseDataImporter** (`DiunaBI.Infrastructure/Plugins/BaseDataImporter.cs`)
- Abstract base for all importers
- Methods: ImportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager, GoogleSheetsHelper, GoogleDriveHelper
**BaseDataProcessor** (`DiunaBI.Infrastructure/Plugins/BaseDataProcessor.cs`)
- Abstract base for all processors
- Methods: ProcessAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, PluginManager
**BaseDataExporter** (`DiunaBI.Infrastructure/Plugins/BaseDataExporter.cs`)
- Abstract base for all exporters
- Methods: ExportAsync(layerId, jobId), ValidateConfiguration()
- Access: AppDbContext, GoogleSheetsHelper, GoogleDriveHelper
### Morska Plugin (DiunaBI.Plugins.Morska)
**Importers (4):**
- MorskaStandardImporter - Generic CSV/Excel import
- MorskaD1Importer - D1 data format
- MorskaD3Importer - D3 data format
- MorskaFK2Importer - FK2 data format
**Processors (12):**
- MorskaD6Processor
- MorskaT1R1Processor
- MorskaT1R3Processor
- MorskaT3SingleSourceProcessor
- MorskaT3SourceYearSummaryProcessor
- MorskaT3MultiSourceSummaryProcessor
- MorskaT3MultiSourceYearSummaryProcessor
- MorskaT4R2Processor
- MorskaT4SingleSourceProcessor
- MorskaT5LastValuesProcessor
- MorskaT3MultiSourceCopySelectedCodesProcessor-TO_REMOVE (deprecated)
- MorskaT3MultiSourceCopySelectedCodesYearSummaryProcessor-TO_REMOVE (deprecated)
**Exporters (1):**
- googleSheet.export.cs - Google Sheets export
**Total:** ~6,566 lines of code
### PedrolloPL Plugin (DiunaBI.Plugins.PedrolloPL) - NEW
**Importers (1):**
- **PedrolloPLImportB3** (`DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportB3.cs`)
- Imports B3 data from DataInbox
- Uses L1-D-B3-CODES dictionary layer for region code mapping
- Creates 12 monthly records per region (Value1-Value12)
- Generates Import layers: L{Number}-I-B3-{Year}-{Timestamp}
- Handles base64 JSON data decoding
---
## UI STRUCTURE (DiunaBI.UI.Shared)
### Reorganized Structure (Dec 5, 2025)
**Pages/** (Routable pages with @page directive)
```
Pages/
├── Layers/
│ ├── Index.razor + Index.razor.cs - /layers (list with filters, pagination)
│ └── Details.razor + Details.razor.cs - /layers/{id} (detail, edit, history)
├── Jobs/
│ ├── Index.razor + Index.razor.cs - /jobs (list with filters, real-time updates)
│ └── Details.razor - /jobs/{id} (detail, retry, cancel, real-time)
├── DataInbox/
│ ├── Index.razor + Index.razor.cs - /datainbox (list with filters)
│ └── Details.razor + Details.razor.cs - /datainbox/{id} (detail, base64 decode)
├── Dashboard.razor - /dashboard (user info)
├── Login.razor - /login (Google OAuth)
└── Index.razor - / (redirects to /dashboard)
```
**Components/** (Reusable components, no routes)
```
Components/
├── Layout/
│ ├── MainLayout.razor - Main app layout with drawer, nav menu
│ ├── EmptyLayout.razor - Minimal layout for login page
│ └── Routes.razor - Router configuration
└── Auth/
├── AuthGuard.razor - Authentication guard wrapper
└── LoginCard.razor - Google login button component
```
**Navigation Menu:**
- Dashboard (/dashboard) - User profile
- Layers (/layers) - Layer management
- Data Inbox (/datainbox) - Incoming data review
- Jobs (/jobs) - Job queue monitoring (with real-time status updates)
**Code-Behind Pattern:**
- Complex pages (50+ lines logic): Separate `.razor.cs` files
- Simple pages: Inline `@code` blocks
- Namespaces: `DiunaBI.UI.Shared.Pages.{Feature}`
---
## REAL-TIME FEATURES (SignalR)
### Architecture
**Hub:** `DiunaBI.API/Hubs/EntityChangeHub.cs`
- Endpoint: `/hubs/entitychanges`
- Method: `SendEntityChange(string module, string id, string operation)`
- Broadcasts: `EntityChanged` event to all connected clients
**Interceptor:** `DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- EF Core SaveChangesInterceptor
- Detects: Added, Modified, Deleted entities
- Broadcasts: After successful SaveChanges
- Modules: QueueJobs, Layers, Records, RecordHistory
**UI Service:** `DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
- Singleton initialized in MainLayout
- Auto-reconnect enabled
- Components subscribe: `HubService.EntityChanged += OnEntityChanged`
### Real-time Update Flow
1. User action → API endpoint
2. DbContext.SaveChangesAsync()
3. EntityChangeInterceptor captures changes
4. SignalR broadcast to all clients: `EntityChanged(module, id, operation)`
5. UI components receive event and refresh data
6. StateHasChanged() updates UI
**Example:** Job status changes appear instantly on JobDetailPage and JobListPage
---
## JOB QUEUE SYSTEM
### Components
**Entity:** `QueueJob` (DiunaBI.Domain/Entities/QueueJob.cs)
- JobType: Import, Process
- JobStatus: Pending, Running, Completed, Failed, Retrying
- Priority: 0 = highest priority
- Retry: 30s → 2m → 5m delays, max 5 attempts
**Scheduler:** `JobSchedulerService`
- Reads Administration layer configs (Type=ImportWorker/ProcessWorker)
- Auto-creates jobs based on layer.Records configuration
- API endpoints: `/jobs/schedule/{apiKey}`, `/jobs/schedule/imports/{apiKey}`, `/jobs/schedule/processes/{apiKey}`
**Worker:** `JobWorkerService` (BackgroundService)
- Polls every 10 seconds
- Executes via PluginManager
- Exponential backoff on failures
- Rate limiting for Google API quota
- Real-time status updates via SignalR
**UI:** `Pages/Jobs/`
- Index.razor - Job list with filters, real-time updates
- Details.razor - Job detail with retry/cancel, real-time status
### Job Lifecycle
1. **Creation** - JobSchedulerService or manual via API
2. **Queued** - Status: Pending, sorted by Priority
3. **Execution** - JobWorkerService picks up, Status: Running
4. **Completion** - Status: Completed or Failed
5. **Retry** - On failure, Status: Retrying with exponential backoff
6. **Real-time** - All status changes broadcast via SignalR
**Statistics Endpoint:** `GET /jobs/stats`
```json
{
"pending": 5,
"running": 2,
"completed": 150,
"failed": 3,
"retrying": 1,
"total": 161
}
```
---
## RECENT DEVELOPMENT
**Recent Commits (Dec 2-5, 2025):**
- **193127b:** SignalR for realtime entitychanges (Dec 4)
- **bf2beda, 942da18:** Build fixes (Dec 4)
- **a3fa8f9:** B3 import is working (Dec 4)
- **0e3b393:** WIP: b3 plugin (Dec 3)
- **445c07a:** Morska plugins refactor (Dec 2)
- **3f8e62f:** WIP: queue engine (Dec 2)
- **248106a:** Plugins little refactor (Dec 2)
- **587d4d6:** Pedrollo plugins (Dec 2)
- **e70a8dd:** Remember list filters (Dec 2)
- **89859cd:** Record history is working (Dec 1)
**Development Focus (Last 30 Days):**
1. ✅ Real-time updates (SignalR integration)
2. ✅ Job queue system (background worker, retry logic)
3. ✅ PedrolloPL plugin (B3 importer)
4. ✅ Record history tracking (audit trail)
5. ✅ UI reorganization (feature-based folders)
6. ✅ Plugin refactoring (base classes in Infrastructure)
7. ✅ Filter persistence (UI state management)
**Major Features Added:**
- SignalR real-time entity change notifications
- Background job processing with retry logic
- Record history with field-level diffs
- PedrolloPL B3 data importer
- UI reorganization (Pages/Layers, Pages/Jobs, Pages/DataInbox)
- Filter state persistence across sessions
---
## CONFIGURATION
**Key Settings (appsettings.Development.json):**
- ConnectionStrings:SQLDatabase - SQL Server (localhost:21433, DB: DiunaBI-PedrolloPL)
- JwtSettings:SecurityKey, ExpiryDays (7)
- GoogleAuth:ClientId, RedirectUri
- apiKey, apiUser, apiPass - DataInbox API security
- exportDirectory - Google Drive folder ID for exports
- apiLocalUrl - localhost:5400
- InstanceName - DEV/PROD environment identifier
**Logging Configuration:**
```json
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft.AspNetCore": "Warning",
"Microsoft.EntityFrameworkCore.Database.Command": "Warning",
"Microsoft.EntityFrameworkCore.Infrastructure": "Warning",
"System.Net.Http.HttpClient": "Warning",
"Google.Apis": "Warning",
"DiunaBI.Core.Services.PluginManager": "Information"
}
}
}
```
**CORS Origins:**
- http://localhost:4200 (development)
- https://diuna.bim-it.pl (production)
- https://morska.diunabi.com (production)
---
## PATTERNS & ARCHITECTURE
**Design Patterns:**
- Clean Architecture (Domain → Application → Infrastructure → API)
- Plugin Pattern (dynamic loading, base classes, interface contracts)
- Interceptor Pattern (EF Core SaveChangesInterceptor for change tracking)
- Hub Pattern (SignalR for real-time notifications)
- Service Pattern (dependency injection throughout)
- Repository Pattern (EF Core DbContext as repository)
- Background Service Pattern (JobWorkerService for async processing)
**Tech Versions:**
- .NET 10.0 (upgraded from .NET 8.0)
- EF Core 10.0
- C# 13.0
- Blazor Server (net10.0)
- MAUI (net10.0-ios/android/windows/macos)
- MudBlazor 8.0
**Architectural Decisions:**
- Plugin base classes in Infrastructure for reusability
- SignalR for real-time updates (no polling)
- Background service for job processing (no external scheduler)
- Soft deletes with audit trails
- Foreign key RESTRICT to prevent accidental cascades
- Feature-based folder structure in UI
---
## QUICK REFERENCE
**Database:**
- SQL Server with 47 EF Core migrations
- Auto-timestamps via GETUTCDATE() defaults
- Soft deletes (IsDeleted flag)
- Audit trails (CreatedBy, ModifiedBy, RecordHistory table)
**Build Process:**
- MSBuild target copies plugin DLLs to `bin/Plugins/` after build
- Plugins: DiunaBI.Plugins.Morska.dll, DiunaBI.Plugins.PedrolloPL.dll
**SignalR:**
- Hub: `/hubs/entitychanges`
- Broadcasts: `EntityChanged(module, id, operation)`
- Auto-reconnect enabled in UI
- Real-time updates for QueueJobs, Layers, Records
**Job Queue:**
- Auto-scheduling from layer configs (Type=ImportWorker/ProcessWorker)
- Background processing every 10 seconds
- Retry logic: 30s → 2m → 5m (max 5 retries)
- Priority-based execution (0 = highest)
- Real-time status updates via SignalR
**Plugins:**
- **Morska:** 4 importers, 12 processors, 1 exporter (~6,566 LOC)
- **PedrolloPL:** 1 importer (B3 data)
- Base classes: BaseDataImporter, BaseDataProcessor, BaseDataExporter
- Dynamic loading from `bin/Plugins/` at startup
**UI Structure:**
- Feature-based folders: Pages/Layers, Pages/Jobs, Pages/DataInbox
- Separate code-behind for complex logic (.razor.cs files)
- Inline @code for simple pages
- Organized components: Layout/, Auth/
- Filter state persistence across navigation
---
## FILE PATHS REFERENCE
**Key Configuration:**
- API: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/appsettings.json`
- API Startup: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Program.cs`
**SignalR:**
- Hub: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Hubs/EntityChangeHub.cs`
- Interceptor: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Interceptors/EntityChangeInterceptor.cs`
- UI Service: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/EntityChangeHubService.cs`
**Job System:**
- Controller: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.API/Controllers/JobsController.cs`
- Scheduler: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobSchedulerService.cs`
- Worker: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Services/JobWorkerService.cs`
- UI Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/Jobs/`
**Plugins:**
- Base Classes: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Plugins/`
- Morska: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.Morska/`
- PedrolloPL: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Plugins.PedrolloPL/`
**Migrations:**
- Latest: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.Infrastructure/Migrations/20251201165810_RecordHistory.cs`
**UI Components:**
- Pages: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Pages/`
- Components: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Components/`
- Services: `/Users/mz/Projects/Diuna/DiunaBI/DiunaBI.UI.Shared/Services/`

View File

@@ -32,19 +32,20 @@ jobs:
- name: Restore dependencies
working-directory: .
run: |
dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins
working-directory: .
run: |
set -e
# Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release
# Build plugin first to avoid missing dependency issues
dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release --no-restore
# Skip automatic plugin copy in API build since we only have one plugin restored
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release --no-restore -p:SkipPluginCopy=true
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release --no-restore
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp ${{ matrix.customer.plugin_project }}/bin/Release/net10.0/${{ matrix.customer.plugin_project }}.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true

9
.gitignore vendored
View File

@@ -562,4 +562,11 @@ coverage/
## Temporary folders
##
tmp/
temp/
temp/
##
## LocalDB Development Files
##
DevTools/LocalDB/backups/*.bak
DevTools/LocalDB/backups/*.bacpac
DevTools/LocalDB/data/

11
.vscode/launch.json vendored
View File

@@ -30,17 +30,6 @@
},
"env": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"launchBrowser": {
"enabled": true,
"args": "${auto-detect-url}",
"browser": [
{
"osx": "Google Chrome",
"linux": "chrome",
"windows": "chrome"
}
]
}
}
]

View File

@@ -1,8 +1,10 @@
DECLARE @JustForDebug TINYINT = 0;
-- FIX DATAINBOX!
-- SETUP VARIABLES
DECLARE @Year INT = 2024;
DECLARE @Type NVARCHAR(5) = 'P2';
DECLARE @Year INT = 2025;
DECLARE @Type NVARCHAR(5) = 'B3';
DECLARE @StartDate NVARCHAR(10) = '2025.01.02';
DECLARE @EndDate NVARCHAR(10) = '2026.12.31'
@@ -15,21 +17,21 @@ DECLARE @Name NVARCHAR(50) = CONCAT(
DECLARE @Plugin NVARCHAR(100);
SET @Plugin =
CASE @Type
WHEN 'P2' THEN 'PedrolloPL.Import.P2'
WHEN 'B3' THEN 'PedrolloPL.Import.B3'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxName NVARCHAR(100);
SET @DataInboxName =
CASE @Type
WHEN 'P2' THEN 'P2_2024'
WHEN 'B3' THEN 'P2_2025'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxSource NVARCHAR(100);
SET @DataInboxSource =
CASE @Type
WHEN 'P2' THEN 'Comarch'
WHEN 'B3' THEN 'Comarch'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;

View File

@@ -0,0 +1,71 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-PW_P2-', @Year, '-', @CurrentTimestamp
);
DECLARE @SourceNameFilter NVARCHAR(50) = CONCAT('%-A-IW_B3', '-', @Year, '-%');
DECLARE @SourceLayer NVARCHAR(50) = (SELECT TOP 1 [Name] FROM [DiunaBI-PedrolloPL].[dbo].[Layers] WHERE [Name] LIKE @SourceNameFilter);
IF @SourceLayer IS NULL
BEGIN
SELECT 'SourceLayer is NULL' AS Logger;
RETURN;
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @SourceLayer AS SourceLayer;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'B3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'SourceLayer', @SourceLayer, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ProcessWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Year', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', 'PedrolloPL.Process.P2', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '110', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
--
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetId', '1jI-3QrlBADm5slEl2Balf29cKmHwkYi4pboaHY-gRqc', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetTab', 'P2_Export_DiunaBI', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'GoogleSheetRange', 'C32:O48', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,63 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
namespace DiunaBI.API.Attributes;
/// <summary>
/// Authorization attribute that validates API key from X-API-Key header.
/// Uses constant-time comparison to prevent timing attacks.
/// </summary>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public class ApiKeyAuthAttribute : Attribute, IAuthorizationFilter
{
private const string ApiKeyHeaderName = "X-API-Key";
public void OnAuthorization(AuthorizationFilterContext context)
{
var configuration = context.HttpContext.RequestServices.GetRequiredService<IConfiguration>();
var logger = context.HttpContext.RequestServices.GetRequiredService<ILogger<ApiKeyAuthAttribute>>();
// Get expected API key from configuration
var expectedApiKey = configuration["apiKey"];
if (string.IsNullOrEmpty(expectedApiKey))
{
logger.LogError("API key not configured in appsettings");
context.Result = new StatusCodeResult(StatusCodes.Status500InternalServerError);
return;
}
// Get API key from header
if (!context.HttpContext.Request.Headers.TryGetValue(ApiKeyHeaderName, out var extractedApiKey))
{
logger.LogWarning("API key missing from request header");
context.Result = new UnauthorizedObjectResult(new { error = "API key is required" });
return;
}
// Constant-time comparison to prevent timing attacks
if (!IsApiKeyValid(extractedApiKey!, expectedApiKey))
{
logger.LogWarning("Invalid API key provided from {RemoteIp}", context.HttpContext.Connection.RemoteIpAddress);
context.Result = new UnauthorizedObjectResult(new { error = "Invalid API key" });
return;
}
// API key is valid - allow the request to proceed
}
/// <summary>
/// Constant-time string comparison to prevent timing attacks.
/// </summary>
private static bool IsApiKeyValid(string providedKey, string expectedKey)
{
if (providedKey == null || expectedKey == null)
return false;
var providedBytes = Encoding.UTF8.GetBytes(providedKey);
var expectedBytes = Encoding.UTF8.GetBytes(expectedKey);
return CryptographicOperations.FixedTimeEquals(providedBytes, expectedBytes);
}
}

View File

@@ -2,6 +2,7 @@ using DiunaBI.API.Services;
using DiunaBI.Domain.Entities;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
namespace DiunaBI.API.Controllers;
@@ -15,6 +16,7 @@ public class AuthController(
: ControllerBase
{
[HttpPost("apiToken")]
[EnableRateLimiting("auth")]
public async Task<IActionResult> ApiToken([FromBody] string idToken)
{
try

View File

@@ -64,10 +64,20 @@ public class DataInboxController : Controller
}
// check if datainbox.data is base64 encoded value
if (!string.IsNullOrEmpty(dataInbox.Data) && !IsBase64String(dataInbox.Data))
if (!string.IsNullOrEmpty(dataInbox.Data))
{
_logger.LogWarning("DataInbox: Invalid data format - not base64 encoded for source {Source}", dataInbox.Source);
return BadRequest("Invalid data format - not base64 encoded");
// Limit data size to 10MB to prevent DoS
if (dataInbox.Data.Length > 10_000_000)
{
_logger.LogWarning("DataInbox: Data too large for source {Source}, size {Size}", dataInbox.Source, dataInbox.Data.Length);
return BadRequest("Data too large (max 10MB)");
}
if (!IsBase64String(dataInbox.Data))
{
_logger.LogWarning("DataInbox: Invalid data format - not base64 encoded for source {Source}", dataInbox.Source);
return BadRequest("Invalid data format - not base64 encoded");
}
}
dataInbox.Id = Guid.NewGuid();
@@ -87,7 +97,7 @@ public class DataInboxController : Controller
catch (Exception e)
{
_logger.LogError(e, "DataInbox: Insert error for source {Source}, name {Name}", dataInbox.Source, dataInbox.Name);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -97,6 +107,16 @@ public class DataInboxController : Controller
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.DataInbox.AsQueryable();
if (!string.IsNullOrEmpty(search))
@@ -137,7 +157,7 @@ public class DataInboxController : Controller
catch (Exception e)
{
_logger.LogError(e, "GetAll: Error retrieving data inbox items");
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -172,7 +192,7 @@ public class DataInboxController : Controller
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving data inbox item {Id}", id);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}

View File

@@ -1,3 +1,4 @@
using DiunaBI.API.Attributes;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
@@ -35,17 +36,27 @@ public class JobsController : Controller
public async Task<IActionResult> GetAll(
[FromQuery] int start = 0,
[FromQuery] int limit = 50,
[FromQuery] JobStatus? status = null,
[FromQuery] List<JobStatus>? statuses = null,
[FromQuery] JobType? jobType = null,
[FromQuery] Guid? layerId = null)
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.QueueJobs.AsQueryable();
if (status.HasValue)
if (statuses != null && statuses.Count > 0)
{
query = query.Where(j => j.Status == status.Value);
query = query.Where(j => statuses.Contains(j.Status));
}
if (jobType.HasValue)
@@ -60,8 +71,10 @@ public class JobsController : Controller
var totalCount = await query.CountAsync();
// Sort by: CreatedAt DESC (newest first), then Priority ASC (0=highest)
var items = await query
.OrderByDescending(j => j.CreatedAt)
.ThenBy(j => j.Priority)
.Skip(start)
.Take(limit)
.AsNoTracking()
@@ -82,7 +95,7 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "GetAll: Error retrieving jobs");
return BadRequest(ex.ToString());
return BadRequest("An error occurred while retrieving jobs");
}
}
@@ -108,21 +121,16 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "Get: Error retrieving job {JobId}", id);
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleJobs(string apiKey, [FromQuery] string? nameFilter = null)
[Route("schedule")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleJobs([FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
@@ -139,21 +147,16 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobs: Error scheduling jobs");
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/imports/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleImportJobs(string apiKey, [FromQuery] string? nameFilter = null)
[Route("schedule/imports")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleImportJobs([FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleImportJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
@@ -170,21 +173,16 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobs: Error scheduling import jobs");
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("schedule/processes/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleProcessJobs(string apiKey)
[Route("schedule/processes")]
[AllowAnonymous] // Bypass controller-level [Authorize] to allow API key auth
[ApiKeyAuth]
public async Task<IActionResult> ScheduleProcessJobs()
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleProcessJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
@@ -201,7 +199,80 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobs: Error scheduling process jobs");
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
// UI-friendly endpoints (JWT auth)
[HttpPost]
[Route("ui/schedule")]
public async Task<IActionResult> ScheduleJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobsUI: Created {Count} jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobsUI: Error scheduling jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/imports")]
public async Task<IActionResult> ScheduleImportJobsUI([FromQuery] string? nameFilter = null)
{
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobsUI: Created {Count} import jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobsUI: Error scheduling import jobs");
return BadRequest("An error occurred processing your request");
}
}
[HttpPost]
[Route("ui/schedule/processes")]
public async Task<IActionResult> ScheduleProcessJobsUI()
{
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobsUI: Created {Count} process jobs by user {UserId}", jobsCreated, User.Identity?.Name);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobsUI: Error scheduling process jobs");
return BadRequest("An error occurred processing your request");
}
}
@@ -228,7 +299,8 @@ public class JobsController : Controller
job.Status = JobStatus.Pending;
job.RetryCount = 0;
job.LastError = null;
job.ModifiedAtUtc = DateTime.UtcNow;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
@@ -243,7 +315,7 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "RetryJob: Error retrying job {JobId}", id);
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -275,7 +347,8 @@ public class JobsController : Controller
job.Status = JobStatus.Failed;
job.LastError = "Cancelled by user";
job.ModifiedAtUtc = DateTime.UtcNow;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await _db.SaveChangesAsync();
@@ -290,7 +363,7 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "CancelJob: Error cancelling job {JobId}", id);
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -317,7 +390,7 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "GetStats: Error retrieving job statistics");
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -406,10 +479,9 @@ public class JobsController : Controller
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty,
ModifiedById = Guid.Empty
ModifiedAt = DateTime.UtcNow,
CreatedById = DiunaBI.Domain.Entities.User.AutoImportUserId,
ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId
};
_db.QueueJobs.Add(job);
@@ -429,7 +501,7 @@ public class JobsController : Controller
catch (Exception ex)
{
_logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId);
return BadRequest(ex.ToString());
return BadRequest("An error occurred processing your request");
}
}
}

View File

@@ -48,6 +48,16 @@ public class LayersController : Controller
{
try
{
// Validate pagination parameters
if (limit <= 0 || limit > 1000)
{
return BadRequest("Limit must be between 1 and 1000");
}
if (start < 0)
{
return BadRequest("Start must be non-negative");
}
var query = _db.Layers.Where(x => !x.IsDeleted);
if (name != null)
@@ -99,7 +109,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "GetAll: Error retrieving layers");
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
@@ -119,7 +129,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving layer {LayerId}", id);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
[HttpGet]
@@ -396,7 +406,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "AutoImport: Process error");
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -808,7 +818,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "CreateRecord: Error creating record in layer {LayerId}", layerId);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -889,7 +899,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "UpdateRecord: Error updating record {RecordId} in layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -944,7 +954,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "DeleteRecord: Error deleting record {RecordId} from layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -983,7 +993,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "GetRecordHistory: Error retrieving history for record {RecordId}", recordId);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}
@@ -1033,7 +1043,7 @@ public class LayersController : Controller
catch (Exception e)
{
_logger.LogError(e, "GetDeletedRecords: Error retrieving deleted records for layer {LayerId}", layerId);
return BadRequest(e.ToString());
return BadRequest("An error occurred processing your request");
}
}

View File

@@ -20,7 +20,6 @@
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
<PackageReference Include="Serilog.Enrichers.Environment" Version="3.0.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<PackageReference Include="Serilog.Sinks.Seq" Version="9.0.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" />
</ItemGroup>
@@ -37,7 +36,7 @@
</Content>
</ItemGroup>
<Target Name="CopyPlugins" AfterTargets="Build">
<Target Name="CopyPlugins" AfterTargets="Build" Condition="'$(SkipPluginCopy)' != 'true'">
<MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<MSBuild Projects="../DiunaBI.Plugins.PedrolloPL/DiunaBI.Plugins.PedrolloPL.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />

View File

@@ -22,9 +22,9 @@ COPY . .
WORKDIR /${PLUGIN_PROJECT}
RUN dotnet build -c Release
# Build and publish API
# Build and publish API (skip automatic plugin copy since we handle it manually)
WORKDIR /DiunaBI.API
RUN dotnet publish -c Release -o /app/publish --no-restore
RUN dotnet publish -c Release -o /app/publish --no-restore -p:SkipPluginCopy=true
# Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \

View File

@@ -0,0 +1,15 @@
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.SignalR;
namespace DiunaBI.API.Hubs;
/// <summary>
/// SignalR hub for broadcasting entity change notifications to authenticated clients.
/// Clients can only listen - broadcasting is done server-side by EntityChangeInterceptor.
/// </summary>
[Authorize]
public class EntityChangeHub : Hub
{
// No public methods - clients can only listen for "EntityChanged" events
// Broadcasting is handled server-side by EntityChangeInterceptor via IHubContext
}

View File

@@ -1,11 +1,15 @@
using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.RateLimiting;
using Microsoft.EntityFrameworkCore;
using Microsoft.IdentityModel.Tokens;
using System.IdentityModel.Tokens.Jwt;
using System.Reflection;
using System.Text;
using System.Threading.RateLimiting;
using DiunaBI.API.Hubs;
using DiunaBI.API.Services;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Interceptors;
using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4;
using Serilog;
@@ -29,10 +33,22 @@ if (builder.Environment.IsProduction())
var connectionString = builder.Configuration.GetConnectionString("SQLDatabase");
builder.Services.AddDbContext<AppDbContext>(x =>
// Register EntityChangeInterceptor
builder.Services.AddSingleton<EntityChangeInterceptor>();
builder.Services.AddDbContext<AppDbContext>((serviceProvider, options) =>
{
x.UseSqlServer(connectionString, sqlOptions => sqlOptions.MigrationsAssembly("DiunaBI.Infrastructure"));
x.EnableSensitiveDataLogging();
options.UseSqlServer(connectionString, sqlOptions => sqlOptions.MigrationsAssembly("DiunaBI.Infrastructure"));
// Only log SQL parameters in development (may contain sensitive data)
if (builder.Environment.IsDevelopment())
{
options.EnableSensitiveDataLogging();
}
// Add EntityChangeInterceptor
var interceptor = serviceProvider.GetRequiredService<EntityChangeInterceptor>();
options.AddInterceptors(interceptor);
});
builder.Services.AddCors(options =>
@@ -58,6 +74,44 @@ builder.Services.AddCors(options =>
builder.Services.AddControllers();
// Rate Limiting
builder.Services.AddRateLimiter(options =>
{
// Global API rate limit
options.AddFixedWindowLimiter("api", config =>
{
config.PermitLimit = 100;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0; // No queueing
});
// Strict limit for authentication endpoint
options.AddFixedWindowLimiter("auth", config =>
{
config.PermitLimit = 10;
config.Window = TimeSpan.FromMinutes(1);
config.QueueProcessingOrder = System.Threading.RateLimiting.QueueProcessingOrder.OldestFirst;
config.QueueLimit = 0;
});
// Rejection response
options.OnRejected = async (context, token) =>
{
context.HttpContext.Response.StatusCode = 429; // Too Many Requests
await context.HttpContext.Response.WriteAsJsonAsync(new
{
error = "Too many requests. Please try again later.",
retryAfter = context.Lease.TryGetMetadata(MetadataName.RetryAfter, out var retryAfter)
? (double?)retryAfter.TotalSeconds
: (double?)null
}, cancellationToken: token);
};
});
// SignalR
builder.Services.AddSignalR();
builder.Services.AddAuthentication(options =>
{
options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
@@ -67,10 +121,12 @@ builder.Services.AddAuthentication(options =>
{
options.TokenValidationParameters = new TokenValidationParameters
{
ValidateIssuer = false,
ValidateAudience = false,
ValidateIssuer = true,
ValidateAudience = true,
ValidateLifetime = true,
ValidateIssuerSigningKey = true,
ValidIssuer = builder.Configuration["JwtSettings:Issuer"],
ValidAudience = builder.Configuration["JwtSettings:Audience"],
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(builder.Configuration["JwtSettings:SecurityKey"]!))
};
});
@@ -183,6 +239,18 @@ pluginManager.LoadPluginsFromDirectory(pluginsPath);
app.UseCors("CORSPolicy");
// Security Headers
app.Use(async (context, next) =>
{
context.Response.Headers.Append("X-Content-Type-Options", "nosniff");
context.Response.Headers.Append("X-Frame-Options", "DENY");
context.Response.Headers.Append("X-XSS-Protection", "1; mode=block");
context.Response.Headers.Append("Referrer-Policy", "strict-origin-when-cross-origin");
await next();
});
app.UseRateLimiter();
app.UseAuthentication();
app.UseAuthorization();
@@ -234,16 +302,15 @@ app.Use(async (context, next) =>
logger.LogError(ex, "❌ Failed to extract UserId from JWT token");
}
}
else
{
logger.LogWarning("❌ No valid Bearer token found");
}
await next(context);
});
app.MapControllers();
// SignalR Hub - Requires JWT authentication
app.MapHub<EntityChangeHub>("/hubs/entitychanges").RequireAuthorization();
app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow }))
.AllowAnonymous();

View File

@@ -36,7 +36,7 @@ public class GoogleAuthService(AppDbContext context, IConfiguration configuratio
if (user == null)
{
_logger.LogError("User not found in DiunaBI database: {Email}", payload.Email);
return (false, null, "User not found in DiunaBI database");
return (false, null, "Authentication failed");
}
user.UserName = payload.Name;

View File

@@ -52,7 +52,7 @@ public class JwtTokenService(IConfiguration configuration, ILogger<JwtTokenServi
try
{
var jwtSettings = _configuration.GetSection("JwtSettings");
var secretKey = jwtSettings["SecretKey"];
var secretKey = jwtSettings["SecurityKey"];
var issuer = jwtSettings["Issuer"];
var audience = jwtSettings["Audience"];

View File

@@ -12,6 +12,7 @@ public class QueueJob
public JobType JobType { get; set; }
public int Priority { get; set; } = 0; // 0 = highest priority
public DateTime CreatedAt { get; set; } = DateTime.UtcNow;
public DateTime ModifiedAt { get; set; } = DateTime.UtcNow;
public int RetryCount { get; set; } = 0;
public int MaxRetries { get; set; } = 5;
public JobStatus Status { get; set; } = JobStatus.Pending;
@@ -19,9 +20,7 @@ public class QueueJob
public DateTime? LastAttemptAt { get; set; }
public DateTime? CompletedAt { get; set; }
public Guid CreatedById { get; set; }
public DateTime CreatedAtUtc { get; set; } = DateTime.UtcNow;
public Guid ModifiedById { get; set; }
public DateTime ModifiedAtUtc { get; set; } = DateTime.UtcNow;
}
public enum JobType

View File

@@ -5,6 +5,11 @@ namespace DiunaBI.Domain.Entities;
public class User
{
/// <summary>
/// System user ID for automated operations (imports, scheduled jobs, etc.)
/// </summary>
public static readonly Guid AutoImportUserId = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d");
#region Properties
public Guid Id { get; init; }
public string? Email { get; init; }

View File

@@ -136,9 +136,8 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
modelBuilder.Entity<QueueJob>().Property(x => x.LastAttemptAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CompletedAt);
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.CreatedAtUtc).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedById).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedAtUtc).IsRequired();
modelBuilder.Entity<QueueJob>().Property(x => x.ModifiedAt).IsRequired();
// Configure automatic timestamps for entities with CreatedAt/ModifiedAt
ConfigureTimestamps(modelBuilder);

View File

@@ -22,8 +22,10 @@
<PackageReference Include="Microsoft.EntityFrameworkCore.SqlServer" Version="10.0.0" />
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,201 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Infrastructure.Interceptors;
public class EntityChangeInterceptor : SaveChangesInterceptor
{
private readonly object? _hubContext;
private readonly ILogger<EntityChangeInterceptor>? _logger;
private readonly List<(string Module, string Id, string Operation)> _pendingChanges = new();
public EntityChangeInterceptor(IServiceProvider serviceProvider)
{
_logger = serviceProvider.GetService(typeof(ILogger<EntityChangeInterceptor>)) as ILogger<EntityChangeInterceptor>;
// Try to get hub context - it may not be registered in some scenarios (e.g., migrations)
try
{
var hubType = Type.GetType("DiunaBI.API.Hubs.EntityChangeHub, DiunaBI.API");
if (hubType != null)
{
var hubContextType = typeof(IHubContext<>).MakeGenericType(hubType);
_hubContext = serviceProvider.GetService(hubContextType);
if (_hubContext != null)
{
_logger?.LogInformation("✅ EntityChangeInterceptor: Hub context initialized");
Console.WriteLine("✅ EntityChangeInterceptor: Hub context initialized");
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub context is null");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub context is null");
}
}
else
{
_logger?.LogWarning("⚠️ EntityChangeInterceptor: Hub type not found");
Console.WriteLine("⚠️ EntityChangeInterceptor: Hub type not found");
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ EntityChangeInterceptor: Failed to initialize hub context");
Console.WriteLine($"❌ EntityChangeInterceptor: Failed to initialize hub context: {ex.Message}");
_hubContext = null;
}
}
public override ValueTask<InterceptionResult<int>> SavingChangesAsync(
DbContextEventData eventData,
InterceptionResult<int> result,
CancellationToken cancellationToken = default)
{
_pendingChanges.Clear();
Console.WriteLine($"🔍 EntityChangeInterceptor.SavingChangesAsync called. HubContext null? {_hubContext == null}, Context null? {eventData.Context == null}");
if (_hubContext != null && eventData.Context != null)
{
// Capture changes BEFORE save
var entries = eventData.Context.ChangeTracker.Entries().ToList();
Console.WriteLine($"🔍 Found {entries.Count} total entries in ChangeTracker");
foreach (var entry in entries)
{
Console.WriteLine($"🔍 Entry: {entry.Metadata.ClrType.Name}, State: {entry.State}");
if (entry.State == EntityState.Added ||
entry.State == EntityState.Modified ||
entry.State == EntityState.Deleted)
{
var module = entry.Metadata.GetTableName() ?? entry.Metadata.ClrType.Name;
var id = GetEntityId(entry);
var operation = entry.State switch
{
EntityState.Added => "created",
EntityState.Modified => "updated",
EntityState.Deleted => "deleted",
_ => "unknown"
};
Console.WriteLine($"🔍 Detected change: {module} {id} {operation}");
if (id != null)
{
_pendingChanges.Add((module, id, operation));
Console.WriteLine($"✅ Added to pending changes: {module} {id} {operation}");
}
else
{
Console.WriteLine($"⚠️ Skipped (id is null): {module} {operation}");
}
}
}
Console.WriteLine($"🔍 Total pending changes: {_pendingChanges.Count}");
}
return base.SavingChangesAsync(eventData, result, cancellationToken);
}
public override async ValueTask<int> SavedChangesAsync(
SaveChangesCompletedEventData eventData,
int result,
CancellationToken cancellationToken = default)
{
// Broadcast changes AFTER successful save
if (_hubContext != null && result > 0 && _pendingChanges.Any())
{
_logger?.LogInformation("📤 Broadcasting {Count} entity changes via SignalR", _pendingChanges.Count);
Console.WriteLine($"📤 Broadcasting {_pendingChanges.Count} entity changes via SignalR");
foreach (var (module, id, operation) in _pendingChanges)
{
try
{
Console.WriteLine($"📤 Broadcasting: {module} {id} {operation}");
// Use reflection to call hub methods since we can't reference the API project
var clientsProperty = _hubContext.GetType().GetProperty("Clients");
Console.WriteLine($" 🔍 Clients property: {clientsProperty != null}");
if (clientsProperty != null)
{
var clients = clientsProperty.GetValue(_hubContext);
Console.WriteLine($" 🔍 Clients value: {clients != null}, Type: {clients?.GetType().Name}");
if (clients != null)
{
var allProperty = clients.GetType().GetProperty("All");
Console.WriteLine($" 🔍 All property: {allProperty != null}");
if (allProperty != null)
{
var allClients = allProperty.GetValue(clients);
Console.WriteLine($" 🔍 AllClients value: {allClients != null}, Type: {allClients?.GetType().Name}");
if (allClients != null)
{
// SendAsync is an extension method, so we need to find it differently
// Look for the IClientProxy interface which has SendCoreAsync
var sendCoreAsyncMethod = allClients.GetType().GetMethod("SendCoreAsync");
Console.WriteLine($" 🔍 SendCoreAsync method found: {sendCoreAsyncMethod != null}");
if (sendCoreAsyncMethod != null)
{
// SendCoreAsync takes (string method, object?[] args, CancellationToken cancellationToken)
var task = sendCoreAsyncMethod.Invoke(allClients, new object[]
{
"EntityChanged",
new object[] { new { module, id, operation } },
cancellationToken
}) as Task;
Console.WriteLine($" 🔍 Task created: {task != null}");
if (task != null)
{
await task;
Console.WriteLine($"✅ Broadcast successful: {module} {id} {operation}");
}
else
{
Console.WriteLine($"❌ Task is null after invoke");
}
}
else
{
Console.WriteLine($"❌ SendCoreAsync method not found");
}
}
}
}
}
}
catch (Exception ex)
{
_logger?.LogError(ex, "❌ Failed to broadcast entity change");
Console.WriteLine($"❌ Failed to broadcast: {ex.Message}");
Console.WriteLine($"❌ Stack trace: {ex.StackTrace}");
}
}
}
_pendingChanges.Clear();
return await base.SavedChangesAsync(eventData, result, cancellationToken);
}
private static string? GetEntityId(Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry entry)
{
var keyProperty = entry.Metadata.FindPrimaryKey()?.Properties.FirstOrDefault();
if (keyProperty == null)
return null;
var value = entry.Property(keyProperty.Name).CurrentValue;
return value?.ToString();
}
}

View File

@@ -0,0 +1,489 @@
// <auto-generated />
using System;
using DiunaBI.Infrastructure.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
#nullable disable
namespace DiunaBI.Infrastructure.Migrations
{
[DbContext(typeof(AppDbContext))]
[Migration("20251208205202_RemoveQueueJobDuplicateUTCFields")]
partial class RemoveQueueJobDuplicateUTCFields
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "10.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 128);
SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder);
modelBuilder.Entity("DiunaBI.Domain.Entities.DataInbox", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreatedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<string>("Data")
.IsRequired()
.HasColumnType("nvarchar(max)");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.Property<string>("Source")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.HasKey("Id");
b.ToTable("DataInbox");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreatedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("CreatedById")
.HasColumnType("uniqueidentifier");
b.Property<bool>("IsCancelled")
.ValueGeneratedOnAdd()
.HasColumnType("bit")
.HasDefaultValue(false);
b.Property<bool>("IsDeleted")
.ValueGeneratedOnAdd()
.HasColumnType("bit")
.HasDefaultValue(false);
b.Property<DateTime>("ModifiedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("ModifiedById")
.HasColumnType("uniqueidentifier");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.Property<int>("Number")
.HasColumnType("int");
b.Property<Guid?>("ParentId")
.HasColumnType("uniqueidentifier");
b.Property<int>("Type")
.HasColumnType("int");
b.HasKey("Id");
b.HasIndex("CreatedById");
b.HasIndex("ModifiedById");
b.ToTable("Layers");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b =>
{
b.Property<Guid>("LayerId")
.HasColumnType("uniqueidentifier");
b.Property<Guid>("SourceId")
.HasColumnType("uniqueidentifier");
b.HasKey("LayerId", "SourceId");
b.HasIndex("SourceId");
b.ToTable("ProcessSources");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.QueueJob", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime?>("CompletedAt")
.HasColumnType("datetime2");
b.Property<DateTime>("CreatedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("CreatedById")
.HasColumnType("uniqueidentifier");
b.Property<int>("JobType")
.HasColumnType("int");
b.Property<DateTime?>("LastAttemptAt")
.HasColumnType("datetime2");
b.Property<string>("LastError")
.HasMaxLength(1000)
.HasColumnType("nvarchar(1000)");
b.Property<Guid>("LayerId")
.HasColumnType("uniqueidentifier");
b.Property<string>("LayerName")
.IsRequired()
.HasMaxLength(200)
.HasColumnType("nvarchar(200)");
b.Property<int>("MaxRetries")
.HasColumnType("int");
b.Property<DateTime>("ModifiedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("ModifiedById")
.HasColumnType("uniqueidentifier");
b.Property<string>("PluginName")
.IsRequired()
.HasMaxLength(100)
.HasColumnType("nvarchar(100)");
b.Property<int>("Priority")
.HasColumnType("int");
b.Property<int>("RetryCount")
.HasColumnType("int");
b.Property<int>("Status")
.HasColumnType("int");
b.HasKey("Id");
b.ToTable("QueueJobs");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<string>("Code")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.Property<DateTime>("CreatedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("CreatedById")
.HasColumnType("uniqueidentifier");
b.Property<string>("Desc1")
.HasMaxLength(10000)
.HasColumnType("nvarchar(max)");
b.Property<bool>("IsDeleted")
.HasColumnType("bit");
b.Property<Guid>("LayerId")
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("ModifiedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("ModifiedById")
.HasColumnType("uniqueidentifier");
b.Property<double?>("Value1")
.HasColumnType("float");
b.Property<double?>("Value10")
.HasColumnType("float");
b.Property<double?>("Value11")
.HasColumnType("float");
b.Property<double?>("Value12")
.HasColumnType("float");
b.Property<double?>("Value13")
.HasColumnType("float");
b.Property<double?>("Value14")
.HasColumnType("float");
b.Property<double?>("Value15")
.HasColumnType("float");
b.Property<double?>("Value16")
.HasColumnType("float");
b.Property<double?>("Value17")
.HasColumnType("float");
b.Property<double?>("Value18")
.HasColumnType("float");
b.Property<double?>("Value19")
.HasColumnType("float");
b.Property<double?>("Value2")
.HasColumnType("float");
b.Property<double?>("Value20")
.HasColumnType("float");
b.Property<double?>("Value21")
.HasColumnType("float");
b.Property<double?>("Value22")
.HasColumnType("float");
b.Property<double?>("Value23")
.HasColumnType("float");
b.Property<double?>("Value24")
.HasColumnType("float");
b.Property<double?>("Value25")
.HasColumnType("float");
b.Property<double?>("Value26")
.HasColumnType("float");
b.Property<double?>("Value27")
.HasColumnType("float");
b.Property<double?>("Value28")
.HasColumnType("float");
b.Property<double?>("Value29")
.HasColumnType("float");
b.Property<double?>("Value3")
.HasColumnType("float");
b.Property<double?>("Value30")
.HasColumnType("float");
b.Property<double?>("Value31")
.HasColumnType("float");
b.Property<double?>("Value32")
.HasColumnType("float");
b.Property<double?>("Value4")
.HasColumnType("float");
b.Property<double?>("Value5")
.HasColumnType("float");
b.Property<double?>("Value6")
.HasColumnType("float");
b.Property<double?>("Value7")
.HasColumnType("float");
b.Property<double?>("Value8")
.HasColumnType("float");
b.Property<double?>("Value9")
.HasColumnType("float");
b.HasKey("Id");
b.HasIndex("CreatedById");
b.HasIndex("LayerId");
b.HasIndex("ModifiedById");
b.ToTable("Records");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<int>("ChangeType")
.HasColumnType("int");
b.Property<DateTime>("ChangedAt")
.HasColumnType("datetime2");
b.Property<Guid>("ChangedById")
.HasColumnType("uniqueidentifier");
b.Property<string>("ChangedFields")
.HasMaxLength(200)
.HasColumnType("nvarchar(200)");
b.Property<string>("ChangesSummary")
.HasMaxLength(4000)
.HasColumnType("nvarchar(4000)");
b.Property<string>("Code")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.Property<string>("Desc1")
.HasMaxLength(10000)
.HasColumnType("nvarchar(max)");
b.Property<Guid>("LayerId")
.HasColumnType("uniqueidentifier");
b.Property<Guid>("RecordId")
.HasColumnType("uniqueidentifier");
b.HasKey("Id");
b.HasIndex("ChangedById");
b.HasIndex("LayerId", "ChangedAt");
b.HasIndex("RecordId", "ChangedAt");
b.ToTable("RecordHistory");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.User", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uniqueidentifier");
b.Property<DateTime>("CreatedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<string>("Email")
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.Property<string>("UserName")
.HasMaxLength(50)
.HasColumnType("nvarchar(50)");
b.HasKey("Id");
b.ToTable("Users");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>
{
b.HasOne("DiunaBI.Domain.Entities.User", "CreatedBy")
.WithMany()
.HasForeignKey("CreatedById")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.HasOne("DiunaBI.Domain.Entities.User", "ModifiedBy")
.WithMany()
.HasForeignKey("ModifiedById")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.Navigation("CreatedBy");
b.Navigation("ModifiedBy");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b =>
{
b.HasOne("DiunaBI.Domain.Entities.Layer", null)
.WithMany()
.HasForeignKey("LayerId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DiunaBI.Domain.Entities.Layer", "Source")
.WithMany()
.HasForeignKey("SourceId")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.Navigation("Source");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b =>
{
b.HasOne("DiunaBI.Domain.Entities.User", "CreatedBy")
.WithMany()
.HasForeignKey("CreatedById")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.HasOne("DiunaBI.Domain.Entities.Layer", null)
.WithMany("Records")
.HasForeignKey("LayerId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DiunaBI.Domain.Entities.User", "ModifiedBy")
.WithMany()
.HasForeignKey("ModifiedById")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.Navigation("CreatedBy");
b.Navigation("ModifiedBy");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b =>
{
b.HasOne("DiunaBI.Domain.Entities.User", "ChangedBy")
.WithMany()
.HasForeignKey("ChangedById")
.OnDelete(DeleteBehavior.Restrict)
.IsRequired();
b.Navigation("ChangedBy");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>
{
b.Navigation("Records");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,52 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DiunaBI.Infrastructure.Migrations
{
/// <inheritdoc />
public partial class RemoveQueueJobDuplicateUTCFields : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "CreatedAtUtc",
table: "QueueJobs");
migrationBuilder.DropColumn(
name: "ModifiedAtUtc",
table: "QueueJobs");
migrationBuilder.AddColumn<DateTime>(
name: "ModifiedAt",
table: "QueueJobs",
type: "datetime2",
nullable: false,
defaultValueSql: "GETUTCDATE()");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "ModifiedAt",
table: "QueueJobs");
migrationBuilder.AddColumn<DateTime>(
name: "CreatedAtUtc",
table: "QueueJobs",
type: "datetime2",
nullable: false,
defaultValue: new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified));
migrationBuilder.AddColumn<DateTime>(
name: "ModifiedAtUtc",
table: "QueueJobs",
type: "datetime2",
nullable: false,
defaultValue: new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified));
}
}
}

View File

@@ -49,7 +49,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id");
b.ToTable("DataInbox", (string)null);
b.ToTable("DataInbox");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>
@@ -104,7 +104,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("ModifiedById");
b.ToTable("Layers", (string)null);
b.ToTable("Layers");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b =>
@@ -119,7 +119,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("SourceId");
b.ToTable("ProcessSources", (string)null);
b.ToTable("ProcessSources");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.QueueJob", b =>
@@ -136,9 +136,6 @@ namespace DiunaBI.Infrastructure.Migrations
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<DateTime>("CreatedAtUtc")
.HasColumnType("datetime2");
b.Property<Guid>("CreatedById")
.HasColumnType("uniqueidentifier");
@@ -163,8 +160,10 @@ namespace DiunaBI.Infrastructure.Migrations
b.Property<int>("MaxRetries")
.HasColumnType("int");
b.Property<DateTime>("ModifiedAtUtc")
.HasColumnType("datetime2");
b.Property<DateTime>("ModifiedAt")
.ValueGeneratedOnAdd()
.HasColumnType("datetime2")
.HasDefaultValueSql("GETUTCDATE()");
b.Property<Guid>("ModifiedById")
.HasColumnType("uniqueidentifier");
@@ -185,7 +184,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id");
b.ToTable("QueueJobs", (string)null);
b.ToTable("QueueJobs");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b =>
@@ -329,7 +328,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("ModifiedById");
b.ToTable("Records", (string)null);
b.ToTable("Records");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b =>
@@ -378,7 +377,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("RecordId", "ChangedAt");
b.ToTable("RecordHistory", (string)null);
b.ToTable("RecordHistory");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.User", b =>
@@ -402,7 +401,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id");
b.ToTable("Users", (string)null);
b.ToTable("Users");
});
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>

View File

@@ -40,6 +40,7 @@ public class JobSchedulerService
_logger.LogInformation("JobScheduler: Found {Count} import workers to schedule", importWorkers.Count);
var jobsCreated = 0;
var scheduledLayerIds = new HashSet<Guid>(); // Track LayerIds scheduled in this batch
foreach (var worker in importWorkers)
{
@@ -61,7 +62,15 @@ public class JobSchedulerService
var maxRetriesStr = worker.Records?.FirstOrDefault(r => r.Code == "MaxRetries")?.Desc1;
var maxRetries = int.TryParse(maxRetriesStr, out var mr) ? mr : 3;
// Check if there's already a pending/running job for this layer
// Check in-memory: already scheduled in this batch?
if (scheduledLayerIds.Contains(worker.Id))
{
_logger.LogDebug("JobScheduler: Job already scheduled in this batch for {LayerName} ({LayerId})",
worker.Name, worker.Id);
continue;
}
// Check if there's already a pending/running job for this layer in database
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == worker.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
@@ -85,13 +94,13 @@ public class JobSchedulerService
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty, // System user
ModifiedById = Guid.Empty
ModifiedAt = DateTime.UtcNow,
CreatedById = DiunaBI.Domain.Entities.User.AutoImportUserId,
ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId
};
_db.QueueJobs.Add(job);
scheduledLayerIds.Add(worker.Id); // Track that we've scheduled this layer
jobsCreated++;
_logger.LogInformation("JobScheduler: Created import job for {LayerName} ({LayerId}) with priority {Priority}",
@@ -130,6 +139,7 @@ public class JobSchedulerService
_logger.LogInformation("JobScheduler: Found {Count} process workers to schedule", processWorkers.Count);
var jobsCreated = 0;
var scheduledLayerIds = new HashSet<Guid>(); // Track LayerIds scheduled in this batch
foreach (var worker in processWorkers)
{
@@ -151,7 +161,15 @@ public class JobSchedulerService
var maxRetriesStr = worker.Records?.FirstOrDefault(r => r.Code == "MaxRetries")?.Desc1;
var maxRetries = int.TryParse(maxRetriesStr, out var mr) ? mr : 3;
// Check if there's already a pending/running job for this layer
// Check in-memory: already scheduled in this batch?
if (scheduledLayerIds.Contains(worker.Id))
{
_logger.LogDebug("JobScheduler: Job already scheduled in this batch for {LayerName} ({LayerId})",
worker.Name, worker.Id);
continue;
}
// Check if there's already a pending/running job for this layer in database
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == worker.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
@@ -175,13 +193,13 @@ public class JobSchedulerService
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty,
ModifiedById = Guid.Empty
ModifiedAt = DateTime.UtcNow,
CreatedById = DiunaBI.Domain.Entities.User.AutoImportUserId,
ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId
};
_db.QueueJobs.Add(job);
scheduledLayerIds.Add(worker.Id); // Track that we've scheduled this layer
jobsCreated++;
_logger.LogInformation("JobScheduler: Created process job for {LayerName} ({LayerId}) with priority {Priority}",

View File

@@ -11,7 +11,7 @@ public class JobWorkerService : BackgroundService
{
private readonly IServiceProvider _serviceProvider;
private readonly ILogger<JobWorkerService> _logger;
private readonly TimeSpan _pollInterval = TimeSpan.FromSeconds(10);
private readonly TimeSpan _pollInterval = TimeSpan.FromSeconds(5);
private readonly TimeSpan _rateLimitDelay = TimeSpan.FromSeconds(5);
public JobWorkerService(IServiceProvider serviceProvider, ILogger<JobWorkerService> logger)
@@ -60,13 +60,14 @@ public class JobWorkerService : BackgroundService
return;
}
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType})",
job.Id, job.LayerName, job.JobType);
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType}) - Current RetryCount: {RetryCount}, MaxRetries: {MaxRetries}, Status: {Status}",
job.Id, job.LayerName, job.JobType, job.RetryCount, job.MaxRetries, job.Status);
// Mark job as running
job.Status = JobStatus.Running;
job.LastAttemptAt = DateTime.UtcNow;
job.ModifiedAtUtc = DateTime.UtcNow;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
await db.SaveChangesAsync(stoppingToken);
try
@@ -114,7 +115,8 @@ public class JobWorkerService : BackgroundService
job.Status = JobStatus.Completed;
job.CompletedAt = DateTime.UtcNow;
job.LastError = null;
job.ModifiedAtUtc = DateTime.UtcNow;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
_logger.LogInformation("JobWorker: Job {JobId} completed successfully", job.Id);
@@ -129,29 +131,37 @@ public class JobWorkerService : BackgroundService
{
_logger.LogError(ex, "JobWorker: Job {JobId} failed - {LayerName}", job.Id, job.LayerName);
job.RetryCount++;
job.LastError = ex.Message;
job.ModifiedAtUtc = DateTime.UtcNow;
// Capture full error details including inner exceptions
job.LastError = GetFullErrorMessage(ex);
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
if (job.RetryCount >= job.MaxRetries)
{
job.Status = JobStatus.Failed;
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed after {RetryCount} attempts",
job.Id, job.RetryCount);
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed - no more retries available (RetryCount: {RetryCount}, MaxRetries: {MaxRetries})",
job.Id, job.RetryCount, job.MaxRetries);
}
else
{
job.Status = JobStatus.Retrying;
// Exponential backoff: wait before retrying based on attempt number
var backoffDelay = GetBackoffDelay(job.RetryCount);
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (attempt {RetryCount}/{MaxRetries})",
job.Id, backoffDelay, job.RetryCount, job.MaxRetries);
// Wait before marking as pending again
await Task.Delay(backoffDelay, stoppingToken);
job.Status = JobStatus.Pending;
// Exponential backoff: wait before retrying
var backoffDelay = GetBackoffDelay(job.RetryCount + 1);
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (retry {RetryNumber} of {MaxRetries})",
job.Id, backoffDelay, job.RetryCount + 1, job.MaxRetries);
// Save current state with error message
await db.SaveChangesAsync(stoppingToken);
// Wait before next attempt
await Task.Delay(backoffDelay, stoppingToken);
// Increment retry count for next attempt
job.RetryCount++;
job.ModifiedAt = DateTime.UtcNow;
job.ModifiedById = DiunaBI.Domain.Entities.User.AutoImportUserId;
}
}
finally
@@ -175,4 +185,18 @@ public class JobWorkerService : BackgroundService
_ => TimeSpan.FromMinutes(5) // 3rd+ retry: 5 minutes
};
}
private static string GetFullErrorMessage(Exception ex)
{
var messages = new List<string>();
var currentException = ex;
while (currentException != null)
{
messages.Add(currentException.Message);
currentException = currentException.InnerException;
}
return string.Join(" → ", messages);
}
}

View File

@@ -7,12 +7,12 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.PedrolloPL.Importers;
public class PedrolloPLImportP2 : BaseDataImporter
public class PedrolloPLImportB3 : BaseDataImporter
{
public override string ImporterType => "PedrolloPL.Import.P2";
public override string ImporterType => "PedrolloPL.Import.B3";
private readonly AppDbContext _db;
private readonly ILogger<PedrolloPLImportP2> _logger;
private readonly ILogger<PedrolloPLImportB3> _logger;
// Configuration properties
private string? DataInboxName { get; set; }
@@ -27,9 +27,9 @@ public class PedrolloPLImportP2 : BaseDataImporter
private DataInbox? _cachedDataInbox;
private Dictionary<string, string>? _regionCodeMap;
public PedrolloPLImportP2(
public PedrolloPLImportB3(
AppDbContext db,
ILogger<PedrolloPLImportP2> logger)
ILogger<PedrolloPLImportB3> logger)
{
_db = db;
_logger = logger;
@@ -338,12 +338,11 @@ public class PedrolloPLImportP2 : BaseDataImporter
IsCancelled = false,
CreatedAt = now,
ModifiedAt = now,
CreatedById = importWorker.CreatedById,
ModifiedById = importWorker.ModifiedById
CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"), // System user
ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d") // System user
};
// Format: L{Number}-I-P2-{Year}-{Timestamp}
importLayer.Name = $"L{importLayer.Number}-I-P2-{ImportYear}-{now:yyyyMMddHHmm}";
importLayer.Name = $"L{importLayer.Number}-I-B3-{ImportYear}-{now:yyyyMMddHHmm}";
_logger.LogDebug("{ImporterType}: Creating import layer '{LayerName}' (Number: {Number})",
ImporterType, importLayer.Name, importLayer.Number);
@@ -362,13 +361,24 @@ public class PedrolloPLImportP2 : BaseDataImporter
_logger.LogDebug("{ImporterType}: Saving {RecordCount} records to layer {LayerId}",
ImporterType, records.Count, importLayer.Id);
// Set LayerId for all records
// Delete any existing records for this layer (shouldn't be any, but just in case)
var toDelete = _db.Records.Where(x => x.LayerId == importLayer.Id).ToList();
if (toDelete.Count > 0)
{
_logger.LogWarning("{ImporterType}: Found {ExistingCount} existing records for layer {LayerId}, removing them",
ImporterType, toDelete.Count, importLayer.Id);
_db.Records.RemoveRange(toDelete);
}
// Set all required properties for each record
foreach (var record in records)
{
record.LayerId = importLayer.Id;
record.CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
record.ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
_db.Records.Add(record);
}
_db.Records.AddRange(records);
_db.SaveChanges();
_logger.LogInformation("{ImporterType}: Successfully saved {RecordCount} records to layer '{LayerName}'",

View File

@@ -0,0 +1,542 @@
using System.Text;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Google.Apis.Sheets.v4;
using Google.Apis.Sheets.v4.Data;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.PedrolloPL.Processors;
public class PedrolloPLProcessP2 : BaseDataProcessor
{
public override string ProcessorType => "PedrolloPL.Process.P2";
private readonly AppDbContext _db;
private readonly ILogger<PedrolloPLProcessP2> _logger;
private readonly SpreadsheetsResource.ValuesResource _googleSheetValues;
// Configuration properties
private string? Year { get; set; }
private bool IsEnabled { get; set; }
private string? GoogleSheetId { get; set; }
private string? GoogleSheetTab { get; set; }
private string? GoogleSheetRange { get; set; }
// Cached data
private Layer? _sourceLayer;
private Layer? _processedLayer;
private Dictionary<string, string>? _codeToRegionMap;
public PedrolloPLProcessP2(
AppDbContext db,
ILogger<PedrolloPLProcessP2> logger,
SpreadsheetsResource.ValuesResource googleSheetValues)
{
_db = db;
_logger = logger;
_googleSheetValues = googleSheetValues;
}
public override void Process(Layer processWorker)
{
try
{
_logger.LogInformation("{ProcessorType}: Starting process for {ProcessWorkerName} ({ProcessWorkerId})",
ProcessorType, processWorker.Name, processWorker.Id);
// Clear cache at start
_sourceLayer = null;
_processedLayer = null;
_codeToRegionMap = null;
LoadConfiguration(processWorker);
ValidateConfiguration();
if (!IsEnabled)
{
_logger.LogInformation("{ProcessorType}: Process disabled for {ProcessWorkerName}",
ProcessorType, processWorker.Name);
return;
}
// Find latest B3 import layer for the configured year
FindSourceLayer();
// Find or create processed layer
FindOrCreateProcessedLayer(processWorker);
// Transform data from source to processed layer
var transformedRecords = TransformData();
// Save records to processed layer
SaveRecordsToLayer(_processedLayer!, transformedRecords);
// Export to Google Sheets if configured
if (!string.IsNullOrEmpty(GoogleSheetId) && !string.IsNullOrEmpty(GoogleSheetTab) && !string.IsNullOrEmpty(GoogleSheetRange))
{
ExportToGoogleSheet();
}
else
{
_logger.LogInformation("{ProcessorType}: Google Sheet export skipped - configuration not provided",
ProcessorType);
}
_logger.LogInformation("{ProcessorType}: Successfully completed process for {ProcessWorkerName} - Processed {RecordCount} records",
ProcessorType, processWorker.Name, transformedRecords.Count);
}
catch (Exception e)
{
_logger.LogError(e, "{ProcessorType}: Failed to process {ProcessWorkerName} ({ProcessWorkerId})",
ProcessorType, processWorker.Name, processWorker.Id);
throw;
}
finally
{
// Clear cache after process
_sourceLayer = null;
_processedLayer = null;
_codeToRegionMap = null;
}
}
private void LoadConfiguration(Layer processWorker)
{
if (processWorker.Records == null) return;
Year = GetRecordValue(processWorker.Records, "Year");
IsEnabled = GetRecordValue(processWorker.Records, "IsEnabled") == "True";
GoogleSheetId = GetRecordValue(processWorker.Records, "GoogleSheetId");
GoogleSheetTab = GetRecordValue(processWorker.Records, "GoogleSheetTab");
GoogleSheetRange = GetRecordValue(processWorker.Records, "GoogleSheetRange");
_logger.LogDebug(
"{ProcessorType}: Configuration loaded - Year: {Year}, Enabled: {IsEnabled}, SheetId: {SheetId}, Tab: {Tab}, Range: {Range}",
ProcessorType, Year, IsEnabled, GoogleSheetId, GoogleSheetTab, GoogleSheetRange);
}
private void ValidateConfiguration()
{
var errors = new List<string>();
if (string.IsNullOrEmpty(Year)) errors.Add("Year is required");
if (errors.Any())
{
throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}");
}
_logger.LogDebug("{ProcessorType}: Configuration validated successfully", ProcessorType);
}
private void FindSourceLayer()
{
_logger.LogDebug("{ProcessorType}: Searching for latest B3 import layer for year {Year}",
ProcessorType, Year);
// Find latest B3 import layer matching pattern: L*-I-B3-{Year}-*
var layerNamePattern = $"-I-B3-{Year}-";
_sourceLayer = _db.Layers
.Where(x => x.Name != null && x.Name.Contains(layerNamePattern) && x.Type == LayerType.Import)
.OrderByDescending(x => x.CreatedAt)
.FirstOrDefault();
if (_sourceLayer == null)
{
throw new InvalidOperationException(
$"Source B3 import layer not found for year {Year} (pattern: *{layerNamePattern}*)");
}
_logger.LogInformation("{ProcessorType}: Found source layer - Id: {LayerId}, Name: {LayerName}, CreatedAt: {CreatedAt}",
ProcessorType, _sourceLayer.Id, _sourceLayer.Name, _sourceLayer.CreatedAt);
}
private void FindOrCreateProcessedLayer(Layer processWorker)
{
_logger.LogDebug("{ProcessorType}: Looking for existing processed layer with ParentId={ParentId}",
ProcessorType, processWorker.Id);
// Check if processed layer already exists with ParentId = ProcessWorker.Id
_processedLayer = _db.Layers
.Where(x => x.ParentId == processWorker.Id && x.Type == LayerType.Processed)
.FirstOrDefault();
if (_processedLayer != null)
{
_logger.LogInformation("{ProcessorType}: Found existing processed layer - Id: {LayerId}, Name: {LayerName}",
ProcessorType, _processedLayer.Id, _processedLayer.Name);
}
else
{
_logger.LogInformation("{ProcessorType}: No existing processed layer found, creating new one",
ProcessorType);
_processedLayer = CreateProcessedLayer(processWorker);
}
}
private Layer CreateProcessedLayer(Layer processWorker)
{
var now = DateTime.UtcNow;
var processedLayer = new Layer
{
Id = Guid.NewGuid(),
Number = _db.Layers.Count() + 1,
ParentId = processWorker.Id,
Type = LayerType.Processed,
IsCancelled = false,
CreatedAt = now,
ModifiedAt = now,
CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"), // System user
ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d") // System user
};
processedLayer.Name = $"L{processedLayer.Number}-P-P2-{Year}-{now:yyyyMMddHHmm}";
_logger.LogDebug("{ProcessorType}: Creating processed layer '{LayerName}' (Number: {Number})",
ProcessorType, processedLayer.Name, processedLayer.Number);
_db.Layers.Add(processedLayer);
_db.SaveChanges();
_logger.LogInformation("{ProcessorType}: Created processed layer '{LayerName}' with Id: {LayerId}",
ProcessorType, processedLayer.Name, processedLayer.Id);
return processedLayer;
}
private List<Record> TransformData()
{
if (_sourceLayer == null)
{
throw new InvalidOperationException("Source layer not loaded. Call FindSourceLayer first.");
}
_logger.LogDebug("{ProcessorType}: Loading records from source layer {LayerId}",
ProcessorType, _sourceLayer.Id);
// Load all records from source layer
var sourceRecords = _db.Records
.Where(x => x.LayerId == _sourceLayer.Id && !x.IsDeleted)
.ToList();
_logger.LogInformation("{ProcessorType}: Loaded {RecordCount} records from source layer",
ProcessorType, sourceRecords.Count);
// Group records by first 2 digits of Code (region code)
var groupedByRegion = sourceRecords
.Where(x => !string.IsNullOrEmpty(x.Code) && x.Code.Length >= 2)
.GroupBy(x => x.Code!.Substring(0, 2))
.ToList();
_logger.LogDebug("{ProcessorType}: Grouped into {GroupCount} regions",
ProcessorType, groupedByRegion.Count);
var transformedRecords = new List<Record>();
var now = DateTime.UtcNow;
foreach (var regionGroup in groupedByRegion)
{
var regionCode = regionGroup.Key;
// Create array for 12 months (initialize with 0)
var monthValues = new double?[12];
for (int i = 0; i < 12; i++)
{
monthValues[i] = 0;
}
// Fill in values for each month
foreach (var sourceRecord in regionGroup)
{
if (sourceRecord.Code!.Length >= 4)
{
// Extract month from last 2 digits of code (e.g., "0105" -> month 5)
var monthStr = sourceRecord.Code.Substring(2, 2);
if (int.TryParse(monthStr, out var month) && month >= 1 && month <= 12)
{
var monthIndex = month - 1; // Convert to 0-based index
monthValues[monthIndex] = sourceRecord.Value1 ?? 0;
_logger.LogDebug("{ProcessorType}: Region {RegionCode}, Month {Month}: Value = {Value}",
ProcessorType, regionCode, month, sourceRecord.Value1);
}
}
}
// Create transformed record with Code = region code and Value1-12 = monthly values
var record = new Record
{
Id = Guid.NewGuid(),
Code = regionCode,
Value1 = monthValues[0],
Value2 = monthValues[1],
Value3 = monthValues[2],
Value4 = monthValues[3],
Value5 = monthValues[4],
Value6 = monthValues[5],
Value7 = monthValues[6],
Value8 = monthValues[7],
Value9 = monthValues[8],
Value10 = monthValues[9],
Value11 = monthValues[10],
Value12 = monthValues[11],
CreatedAt = now,
ModifiedAt = now
};
transformedRecords.Add(record);
_logger.LogDebug("{ProcessorType}: Transformed region '{RegionCode}' - Values: [{Values}]",
ProcessorType, regionCode,
string.Join(", ", monthValues.Select(v => v?.ToString() ?? "0")));
}
_logger.LogInformation("{ProcessorType}: Successfully transformed {RecordCount} records from {SourceCount} source records",
ProcessorType, transformedRecords.Count, sourceRecords.Count);
return transformedRecords;
}
private void SaveRecordsToLayer(Layer processedLayer, List<Record> records)
{
_logger.LogDebug("{ProcessorType}: Saving {RecordCount} records to layer {LayerId}",
ProcessorType, records.Count, processedLayer.Id);
// Delete any existing records for this layer
var toDelete = _db.Records.Where(x => x.LayerId == processedLayer.Id).ToList();
if (toDelete.Count > 0)
{
_logger.LogInformation("{ProcessorType}: Found {ExistingCount} existing records for layer {LayerId}, removing them",
ProcessorType, toDelete.Count, processedLayer.Id);
_db.Records.RemoveRange(toDelete);
}
// Set all required properties for each record
foreach (var record in records)
{
record.LayerId = processedLayer.Id;
record.CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
record.ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
_db.Records.Add(record);
}
_db.SaveChanges();
_logger.LogInformation("{ProcessorType}: Successfully saved {RecordCount} records to layer '{LayerName}'",
ProcessorType, records.Count, processedLayer.Name);
}
private void ExportToGoogleSheet()
{
try
{
_logger.LogInformation("{ProcessorType}: Starting Google Sheet export to {SheetId}, Tab: {Tab}, Range: {Range}",
ProcessorType, GoogleSheetId, GoogleSheetTab, GoogleSheetRange);
// Load dictionary for code to region name translation
LoadCodeToRegionDictionary();
// Download current sheet data
var sheetData = DownloadSheetData();
// Update sheet data with processed layer values
var updatedData = UpdateSheetDataWithProcessedValues(sheetData);
// Upload updated data back to sheet
UploadSheetData(updatedData);
_logger.LogInformation("{ProcessorType}: Successfully exported data to Google Sheet",
ProcessorType);
}
catch (Exception e)
{
_logger.LogError(e, "{ProcessorType}: Failed to export to Google Sheet",
ProcessorType);
throw;
}
}
private void LoadCodeToRegionDictionary()
{
const string dictionaryLayerName = "L1-D-P2-CODES";
_logger.LogDebug("{ProcessorType}: Loading code to region mapping from dictionary layer '{DictionaryLayerName}'",
ProcessorType, dictionaryLayerName);
var dictionaryLayer = _db.Layers
.Where(x => x.Name == dictionaryLayerName && x.Type == LayerType.Dictionary)
.FirstOrDefault();
if (dictionaryLayer == null)
{
throw new InvalidOperationException($"Dictionary layer '{dictionaryLayerName}' not found");
}
// Load records for the dictionary layer
var records = _db.Records
.Where(x => x.LayerId == dictionaryLayer.Id)
.ToList();
// Build mapping: Code -> Desc1 (region name)
_codeToRegionMap = records.ToDictionary(
r => r.Code ?? string.Empty,
r => r.Desc1 ?? string.Empty,
StringComparer.OrdinalIgnoreCase);
_logger.LogInformation("{ProcessorType}: Loaded {MappingCount} code to region mappings",
ProcessorType, _codeToRegionMap.Count);
}
private IList<IList<object>> DownloadSheetData()
{
_logger.LogDebug("{ProcessorType}: Downloading sheet data from range {Range}",
ProcessorType, $"{GoogleSheetTab}!{GoogleSheetRange}");
var range = $"{GoogleSheetTab}!{GoogleSheetRange}";
ValueRange? response;
try
{
response = _googleSheetValues.Get(GoogleSheetId, range).Execute();
}
catch (Exception e)
{
_logger.LogError(e, "{ProcessorType}: Failed to download sheet data from {Range}",
ProcessorType, range);
throw new InvalidOperationException($"Failed to download sheet data from {range}", e);
}
if (response?.Values == null || response.Values.Count == 0)
{
throw new InvalidOperationException($"No data found in sheet range {range}");
}
_logger.LogInformation("{ProcessorType}: Downloaded {RowCount} rows from Google Sheet",
ProcessorType, response.Values.Count);
return response.Values;
}
private IList<IList<object>> UpdateSheetDataWithProcessedValues(IList<IList<object>> sheetData)
{
if (_processedLayer == null)
{
throw new InvalidOperationException("Processed layer not loaded");
}
if (_codeToRegionMap == null)
{
throw new InvalidOperationException("Code to region mapping not loaded");
}
_logger.LogDebug("{ProcessorType}: Updating sheet data with processed values from layer {LayerId}",
ProcessorType, _processedLayer.Id);
// Load all records from processed layer
var processedRecords = _db.Records
.Where(x => x.LayerId == _processedLayer.Id && !x.IsDeleted)
.ToList();
_logger.LogDebug("{ProcessorType}: Loaded {RecordCount} records from processed layer",
ProcessorType, processedRecords.Count);
var updatedRowCount = 0;
// Iterate through sheet data and update matching rows
foreach (var row in sheetData)
{
if (row.Count == 0) continue;
// First column (index 0) contains the region name (Kontrola column)
var regionName = row[0]?.ToString()?.Trim();
if (string.IsNullOrEmpty(regionName)) continue;
// Find the code for this region name
var regionCode = _codeToRegionMap
.FirstOrDefault(x => x.Value.Equals(regionName, StringComparison.OrdinalIgnoreCase))
.Key;
if (string.IsNullOrEmpty(regionCode))
{
_logger.LogWarning("{ProcessorType}: No code found for region '{RegionName}' in dictionary - skipping",
ProcessorType, regionName);
continue;
}
// Find the processed record for this code
var processedRecord = processedRecords.FirstOrDefault(x => x.Code == regionCode);
if (processedRecord == null)
{
_logger.LogWarning("{ProcessorType}: No processed record found for code '{RegionCode}' (region: '{RegionName}') - skipping",
ProcessorType, regionCode, regionName);
continue;
}
// Update columns 1-12 (monthly values) in the row
// Column 0 is Kontrola (region name), columns 1-12 are monthly values
// Ensure row has enough columns (13 total: 1 for region + 12 for months)
while (row.Count < 13)
{
row.Add("");
}
// Update monthly values (Value1 through Value12)
row[1] = processedRecord.Value1 ?? 0;
row[2] = processedRecord.Value2 ?? 0;
row[3] = processedRecord.Value3 ?? 0;
row[4] = processedRecord.Value4 ?? 0;
row[5] = processedRecord.Value5 ?? 0;
row[6] = processedRecord.Value6 ?? 0;
row[7] = processedRecord.Value7 ?? 0;
row[8] = processedRecord.Value8 ?? 0;
row[9] = processedRecord.Value9 ?? 0;
row[10] = processedRecord.Value10 ?? 0;
row[11] = processedRecord.Value11 ?? 0;
row[12] = processedRecord.Value12 ?? 0;
updatedRowCount++;
_logger.LogDebug("{ProcessorType}: Updated row for region '{RegionName}' (code: {RegionCode}) with 12 monthly values",
ProcessorType, regionName, regionCode);
}
_logger.LogInformation("{ProcessorType}: Updated {UpdatedRowCount} rows with processed data",
ProcessorType, updatedRowCount);
return sheetData;
}
private void UploadSheetData(IList<IList<object>> data)
{
_logger.LogDebug("{ProcessorType}: Uploading {RowCount} rows to Google Sheet range {Range}",
ProcessorType, data.Count, $"{GoogleSheetTab}!{GoogleSheetRange}");
var range = $"{GoogleSheetTab}!{GoogleSheetRange}";
var valueRange = new ValueRange { Values = data };
try
{
var updateRequest = _googleSheetValues.Update(valueRange, GoogleSheetId, range);
updateRequest.ValueInputOption = SpreadsheetsResource.ValuesResource.UpdateRequest.ValueInputOptionEnum.USERENTERED;
var response = updateRequest.Execute();
_logger.LogInformation("{ProcessorType}: Successfully uploaded data to Google Sheet - Updated {UpdatedCells} cells",
ProcessorType, response.UpdatedCells);
}
catch (Exception e)
{
_logger.LogError(e, "{ProcessorType}: Failed to upload data to Google Sheet range {Range}",
ProcessorType, range);
throw new InvalidOperationException($"Failed to upload data to Google Sheet range {range}", e);
}
}
private string? GetRecordValue(ICollection<Record> records, string code)
{
return records.FirstOrDefault(x => x.Code == code)?.Desc1;
}
}

View File

@@ -37,15 +37,36 @@
@_errorMessage
</MudAlert>
}
@if (_sessionExpired)
{
<MudAlert Severity="Severity.Warning" Class="mt-4" Dense="true">
Your session has expired. Please sign in again.
</MudAlert>
}
</MudCardContent>
</MudCard>
@code {
private bool _isLoading = false;
private string _errorMessage = string.Empty;
private bool _sessionExpired = false;
private static LoginCard? _instance;
private bool _isInitialized = false;
protected override void OnInitialized()
{
// Check if sessionExpired query parameter is present
var uri = new Uri(NavigationManager.Uri);
var query = System.Web.HttpUtility.ParseQueryString(uri.Query);
_sessionExpired = query["sessionExpired"] == "true";
if (_sessionExpired)
{
Console.WriteLine("⚠️ Session expired - user redirected to login");
}
}
protected override async Task OnAfterRenderAsync(bool firstRender)
{
if (firstRender)

View File

@@ -1,110 +0,0 @@
using DiunaBI.UI.Shared.Services;
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.Components.Web;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using MudBlazor;
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Components;
public partial class JobListComponent : ComponentBase
{
[Inject] private JobService JobService { get; set; } = default!;
[Inject] private ISnackbar Snackbar { get; set; } = default!;
[Inject] private NavigationManager NavigationManager { get; set; } = default!;
[Inject] private IJSRuntime JSRuntime { get; set; } = default!;
private PagedResult<QueueJob> jobs = new();
private bool isLoading = false;
private int currentPage = 1;
private int pageSize = 50;
private JobStatus? selectedStatus = null;
private JobType? selectedJobType = null;
protected override async Task OnInitializedAsync()
{
await LoadJobs();
}
private async Task LoadJobs()
{
isLoading = true;
try
{
jobs = await JobService.GetJobsAsync(currentPage, pageSize, selectedStatus, selectedJobType);
}
catch (Exception ex)
{
Console.WriteLine($"Loading jobs failed: {ex.Message}");
Snackbar.Add("Failed to load jobs", Severity.Error);
}
finally
{
isLoading = false;
}
}
private async Task OnPageChanged(int page)
{
currentPage = page;
await LoadJobs();
}
private async Task ClearFilters()
{
selectedStatus = null;
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private async Task OnStatusClear()
{
selectedStatus = null;
currentPage = 1;
await LoadJobs();
}
private async Task OnJobTypeClear()
{
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private void OnRowClick(QueueJob job)
{
NavigationManager.NavigateTo($"/jobs/{job.Id}");
}
private async Task OnRowRightClick(MouseEventArgs e, QueueJob job)
{
var url = NavigationManager.ToAbsoluteUri($"/jobs/{job.Id}").ToString();
await JSRuntime.InvokeVoidAsync("open", url, "_blank");
}
private Color GetStatusColor(JobStatus status)
{
return status switch
{
JobStatus.Pending => Color.Default,
JobStatus.Running => Color.Info,
JobStatus.Completed => Color.Success,
JobStatus.Failed => Color.Error,
JobStatus.Retrying => Color.Warning,
_ => Color.Default
};
}
private Color GetJobTypeColor(JobType jobType)
{
return jobType switch
{
JobType.Import => Color.Primary,
JobType.Process => Color.Secondary,
_ => Color.Default
};
}
}

View File

@@ -1,36 +1,32 @@
@using MudBlazor
@using DiunaBI.UI.Shared.Services
@inject AppConfig AppConfig
@inject EntityChangeHubService HubService
@inject AuthService AuthService
@inherits LayoutComponentBase
@implements IDisposable
<AuthGuard>
<MudThemeProvider Theme="_theme"/>
<MudPopoverProvider/>
<MudDialogProvider/>
<MudSnackbarProvider/>
<MudThemeProvider Theme="_theme" />
<MudPopoverProvider />
<MudDialogProvider />
<MudSnackbarProvider />
<MudLayout>
<MudBreakpointProvider OnBreakpointChanged="OnBreakpointChanged"></MudBreakpointProvider>
<MudAppBar Elevation="0">
<MudIconButton
Icon="@Icons.Material.Filled.Menu"
Color="Color.Inherit"
Edge="Edge.Start"
OnClick="ToggleDrawer"
Class="mud-hidden-md-up"/>
<MudSpacer/>
<MudIconButton Icon="@Icons.Material.Filled.Menu" Color="Color.Inherit" Edge="Edge.Start"
OnClick="ToggleDrawer" Class="mud-hidden-md-up" />
<MudSpacer />
<MudText Typo="Typo.h6">@AppConfig.AppName</MudText>
</MudAppBar>
<MudDrawer @bind-Open="_drawerOpen"
Anchor="Anchor.Start"
Variant="@_drawerVariant"
Elevation="1"
ClipMode="DrawerClipMode.Always"
Class="mud-width-250">
<MudDrawer @bind-Open="_drawerOpen" Anchor="Anchor.Start" Variant="@_drawerVariant" Elevation="1"
ClipMode="DrawerClipMode.Always" Class="mud-width-250">
<div class="nav-logo" style="text-align: center; padding: 20px;">
<a href="https://www.diunabi.com" target="_blank">
<img src="_content/DiunaBI.UI.Shared/images/logo.png" alt="DiunaBI" style="max-width: 180px; height: auto;" />
<img src="_content/DiunaBI.UI.Shared/images/logo.png" alt="DiunaBI"
style="max-width: 180px; height: auto;" />
</a>
</div>
<MudNavMenu>
@@ -39,6 +35,10 @@
<MudNavLink Href="/datainbox" Icon="@Icons.Material.Filled.Inbox">Data Inbox</MudNavLink>
<MudNavLink Href="/jobs" Icon="@Icons.Material.Filled.WorkHistory">Jobs</MudNavLink>
</MudNavMenu>
<div class="nav-logo" style="text-align: center; padding: 20px;">
<img src="_content/DiunaBI.UI.Shared/images/clients/@AppConfig.ClientLogo" alt="DiunaBI"
style="max-width: 180px; height: auto;" />
</div>
</MudDrawer>
<MudMainContent>
@@ -54,6 +54,32 @@
private bool _drawerOpen = true;
private DrawerVariant _drawerVariant = DrawerVariant.Persistent;
protected override void OnInitialized()
{
// Subscribe to authentication state changes
AuthService.AuthenticationStateChanged += OnAuthenticationStateChanged;
// If already authenticated (e.g., from restored session), initialize SignalR
if (AuthService.IsAuthenticated)
{
_ = HubService.InitializeAsync();
}
}
private async void OnAuthenticationStateChanged(bool isAuthenticated)
{
if (isAuthenticated)
{
Console.WriteLine("🔐 MainLayout: User authenticated, initializing SignalR...");
await HubService.InitializeAsync();
}
}
public void Dispose()
{
AuthService.AuthenticationStateChanged -= OnAuthenticationStateChanged;
}
private MudTheme _theme = new MudTheme()
{
PaletteLight = new PaletteLight()

View File

@@ -17,6 +17,7 @@
<PackageReference Include="Microsoft.AspNetCore.WebUtilities" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.AspNetCore.SignalR.Client" Version="10.0.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,4 +1,6 @@
using Microsoft.AspNetCore.Components;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using DiunaBI.UI.Shared.Services;
using DiunaBI.UI.Shared.Handlers;
@@ -15,14 +17,16 @@ public static class ServiceCollectionExtensions
Console.WriteLine($"🔧 Configuring HttpClient with BaseAddress: {baseUri}");
services.AddTransient<HttpLoggingHandler>();
services.AddTransient<UnauthorizedResponseHandler>();
// Configure named HttpClient with logging handler
// Configure named HttpClient with logging and 401 handling
// Note: Authentication is handled by AuthService setting DefaultRequestHeaders.Authorization
services.AddHttpClient("DiunaBI", client =>
{
client.BaseAddress = new Uri(baseUri);
Console.WriteLine($"✅ HttpClient BaseAddress set to: {client.BaseAddress}");
})
.AddHttpMessageHandler<UnauthorizedResponseHandler>()
.AddHttpMessageHandler<HttpLoggingHandler>();
// Register a scoped HttpClient factory that services will use
@@ -35,15 +39,25 @@ public static class ServiceCollectionExtensions
});
// Services
services.AddScoped<TokenProvider>();
services.AddScoped<AuthService>();
services.AddScoped<LayerService>();
services.AddScoped<DataInboxService>();
services.AddScoped<JobService>();
services.AddScoped<DateTimeHelper>();
// Filter state services (scoped to maintain state during user session)
services.AddScoped<LayerFilterStateService>();
services.AddScoped<DataInboxFilterStateService>();
// SignalR Hub Service (scoped per user session for authenticated connections)
services.AddScoped(sp =>
{
var logger = sp.GetRequiredService<ILogger<EntityChangeHubService>>();
var tokenProvider = sp.GetRequiredService<TokenProvider>();
return new EntityChangeHubService(apiBaseUrl, sp, logger, tokenProvider);
});
return services;
}
}

View File

@@ -0,0 +1,41 @@
using Microsoft.AspNetCore.Components;
using Microsoft.Extensions.DependencyInjection;
using DiunaBI.UI.Shared.Services;
namespace DiunaBI.UI.Shared.Handlers;
public class UnauthorizedResponseHandler : DelegatingHandler
{
private readonly IServiceProvider _serviceProvider;
public UnauthorizedResponseHandler(IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider;
}
protected override async Task<HttpResponseMessage> SendAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
var response = await base.SendAsync(request, cancellationToken);
// Check if response is 401 Unauthorized
if (response.StatusCode == System.Net.HttpStatusCode.Unauthorized)
{
Console.WriteLine("⚠️ 401 Unauthorized response detected - clearing credentials and redirecting to login");
// Create a scope to get scoped services
using var scope = _serviceProvider.CreateScope();
var authService = scope.ServiceProvider.GetRequiredService<AuthService>();
var navigationManager = scope.ServiceProvider.GetRequiredService<NavigationManager>();
// Clear authentication
await authService.ClearAuthenticationAsync();
// Navigate to login page with session expired message
navigationManager.NavigateTo("/login?sessionExpired=true", forceLoad: true);
}
return response;
}
}

View File

@@ -2,8 +2,6 @@
@using DiunaBI.UI.Shared.Services
@using DiunaBI.Application.DTOModels
@using MudBlazor
@inject DataInboxService DataInboxService
@inject NavigationManager NavigationManager
<MudCard>
<MudCardHeader>

View File

@@ -1,15 +1,22 @@
using DiunaBI.Application.DTOModels;
using DiunaBI.UI.Shared.Services;
using Microsoft.AspNetCore.Components;
using MudBlazor;
using System.Text;
namespace DiunaBI.UI.Shared.Pages;
namespace DiunaBI.UI.Shared.Pages.DataInbox;
public partial class DataInboxDetailPage : ComponentBase
public partial class Details : ComponentBase
{
[Parameter]
public Guid Id { get; set; }
[Inject]
private DataInboxService DataInboxService { get; set; } = null!;
[Inject]
private NavigationManager NavigationManager { get; set; } = null!;
[Inject]
private ISnackbar Snackbar { get; set; } = null!;

View File

@@ -1,4 +1,11 @@
@page "/datainbox"
@using MudBlazor.Internal
@using DiunaBI.Application.DTOModels
@implements IDisposable
<PageTitle>Data Inbox</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<MudExpansionPanels Class="mb-4">
<MudExpansionPanel Icon="@Icons.Material.Filled.FilterList"
Text="Filters"
@@ -46,7 +53,7 @@
<RowTemplate Context="row">
<MudTd DataLabel="Name"><div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">@row.Name</div></MudTd>
<MudTd DataLabel="Source"><div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">@row.Source</div></MudTd>
<MudTd DataLabel="Created At"><div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">@row.CreatedAt.ToString("yyyy-MM-dd HH:mm:ss")</div></MudTd>
<MudTd DataLabel="Created At"><div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">@DateTimeHelper.FormatDateTime(row.CreatedAt)</div></MudTd>
</RowTemplate>
<NoRecordsContent>
<MudText>No data inbox items to display</MudText>
@@ -76,3 +83,4 @@
</MudItem>
</MudGrid>
}
</MudContainer>

View File

@@ -6,15 +6,17 @@ using DiunaBI.Application.DTOModels.Common;
using MudBlazor;
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Components;
namespace DiunaBI.UI.Shared.Pages.DataInbox;
public partial class DataInboxListComponent : ComponentBase
public partial class Index : ComponentBase, IDisposable
{
[Inject] private DataInboxService DataInboxService { get; set; } = default!;
[Inject] private EntityChangeHubService HubService { get; set; } = default!;
[Inject] private ISnackbar Snackbar { get; set; } = default!;
[Inject] private NavigationManager NavigationManager { get; set; } = default!;
[Inject] private DataInboxFilterStateService FilterStateService { get; set; } = default!;
[Inject] private IJSRuntime JSRuntime { get; set; } = default!;
[Inject] private DateTimeHelper DateTimeHelper { get; set; } = default!;
private PagedResult<DataInboxDto> dataInbox = new();
@@ -23,8 +25,25 @@ public partial class DataInboxListComponent : ComponentBase
protected override async Task OnInitializedAsync()
{
await DateTimeHelper.InitializeAsync();
filterRequest = FilterStateService.FilterRequest;
await LoadDataInbox();
// Subscribe to SignalR entity changes
HubService.EntityChanged += OnEntityChanged;
}
private async void OnEntityChanged(string module, string id, string operation)
{
// Only react if it's a DataInbox change
if (module.Equals("DataInbox", StringComparison.OrdinalIgnoreCase))
{
await InvokeAsync(async () =>
{
await LoadDataInbox();
StateHasChanged();
});
}
}
private async Task LoadDataInbox()
@@ -75,4 +94,9 @@ public partial class DataInboxListComponent : ComponentBase
var url = NavigationManager.ToAbsoluteUri($"/datainbox/{dataInboxItem.Id}").ToString();
await JSRuntime.InvokeVoidAsync("open", url, "_blank");
}
public void Dispose()
{
HubService.EntityChanged -= OnEntityChanged;
}
}

View File

@@ -1,8 +0,0 @@
@page "/datainbox"
@using DiunaBI.UI.Shared.Components
<PageTitle>Data Inbox</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<DataInboxListComponent />
</MudContainer>

View File

@@ -1,8 +0,0 @@
@page "/jobs"
@using DiunaBI.UI.Shared.Components
<PageTitle>Jobs</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<JobListComponent />
</MudContainer>

View File

@@ -3,8 +3,11 @@
@using DiunaBI.Domain.Entities
@using MudBlazor
@inject JobService JobService
@inject EntityChangeHubService HubService
@inject NavigationManager NavigationManager
@inject ISnackbar Snackbar
@inject DateTimeHelper DateTimeHelper
@implements IDisposable
<MudCard>
<MudCardHeader>
@@ -90,14 +93,14 @@
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@job.CreatedAt.ToString("yyyy-MM-dd HH:mm:ss")"
<MudTextField Value="@DateTimeHelper.FormatDateTime(job.CreatedAt)"
Label="Created At"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@(job.LastAttemptAt?.ToString("yyyy-MM-dd HH:mm:ss") ?? "-")"
<MudTextField Value="@DateTimeHelper.FormatDateTime(job.LastAttemptAt)"
Label="Last Attempt At"
Variant="Variant.Outlined"
ReadOnly="true"
@@ -105,7 +108,7 @@
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@(job.CompletedAt?.ToString("yyyy-MM-dd HH:mm:ss") ?? "-")"
<MudTextField Value="@DateTimeHelper.FormatDateTime(job.CompletedAt)"
Label="Completed At"
Variant="Variant.Outlined"
ReadOnly="true"
@@ -159,7 +162,26 @@
protected override async Task OnInitializedAsync()
{
await DateTimeHelper.InitializeAsync();
await LoadJob();
// Subscribe to SignalR entity changes
HubService.EntityChanged += OnEntityChanged;
}
private async void OnEntityChanged(string module, string id, string operation)
{
// Only react if it's a QueueJobs change for this specific job
if (module.Equals("QueueJobs", StringComparison.OrdinalIgnoreCase) &&
Guid.TryParse(id, out var jobId) && jobId == Id)
{
Console.WriteLine($"📨 Job {jobId} changed, refreshing detail page");
await InvokeAsync(async () =>
{
await LoadJob();
StateHasChanged();
});
}
}
private async Task LoadJob()
@@ -242,4 +264,9 @@
_ => Icons.Material.Filled.Help
};
}
public void Dispose()
{
HubService.EntityChanged -= OnEntityChanged;
}
}

View File

@@ -1,30 +1,39 @@
@page "/jobs"
@using MudBlazor.Internal
@using DiunaBI.Domain.Entities
@implements IDisposable
<PageTitle>Jobs</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<MudExpansionPanels Class="mb-4">
<MudExpansionPanel Icon="@Icons.Material.Filled.FilterList"
Text="Filters"
Expanded="true">
<MudGrid AlignItems="Center">
<MudItem xs="12" sm="6" md="3">
<MudSelect T="JobStatus?"
@bind-Value="selectedStatus"
<MudSelect T="JobStatus"
SelectedValues="selectedStatuses"
Label="Status"
Placeholder="All statuses"
MultiSelection="true"
Clearable="true"
SelectedValuesChanged="OnStatusFilterChanged"
OnClearButtonClick="OnStatusClear">
@foreach (JobStatus status in Enum.GetValues(typeof(JobStatus)))
{
<MudSelectItem T="JobStatus?" Value="@status">@status.ToString()</MudSelectItem>
<MudSelectItem T="JobStatus" Value="@status">@status.ToString()</MudSelectItem>
}
</MudSelect>
</MudItem>
<MudItem xs="12" sm="6" md="3">
<MudSelect T="JobType?"
@bind-Value="selectedJobType"
Value="selectedJobType"
Label="Job Type"
Placeholder="All types"
Clearable="true"
ValueChanged="OnJobTypeFilterChanged"
OnClearButtonClick="OnJobTypeClear">
@foreach (JobType type in Enum.GetValues(typeof(JobType)))
{
@@ -33,12 +42,33 @@
</MudSelect>
</MudItem>
<MudItem xs="12" sm="12" md="6" Class="d-flex justify-end align-center">
<MudIconButton Icon="@Icons.Material.Filled.Refresh"
OnClick="LoadJobs"
Color="Color.Primary"
Size="Size.Medium"
Title="Refresh"/>
<MudItem xs="12" sm="12" md="6" Class="d-flex justify-end align-center gap-2">
<MudMenu Icon="@Icons.Material.Filled.PlayArrow"
Label="Schedule Jobs"
Variant="Variant.Filled"
Color="Color.Success"
Size="Size.Medium"
EndIcon="@Icons.Material.Filled.KeyboardArrowDown">
<MudMenuItem OnClick="@(() => ScheduleJobs("all"))">
<div class="d-flex align-center">
<MudIcon Icon="@Icons.Material.Filled.PlayCircle" Class="mr-2" />
<span>Run All Jobs</span>
</div>
</MudMenuItem>
<MudMenuItem OnClick="@(() => ScheduleJobs("imports"))">
<div class="d-flex align-center">
<MudIcon Icon="@Icons.Material.Filled.FileDownload" Class="mr-2" />
<span>Run All Imports</span>
</div>
</MudMenuItem>
<MudMenuItem OnClick="@(() => ScheduleJobs("processes"))">
<div class="d-flex align-center">
<MudIcon Icon="@Icons.Material.Filled.Settings" Class="mr-2" />
<span>Run All Processes</span>
</div>
</MudMenuItem>
</MudMenu>
<MudIconButton Icon="@Icons.Material.Filled.Clear"
OnClick="ClearFilters"
Color="Color.Default"
@@ -102,12 +132,12 @@
</MudTd>
<MudTd DataLabel="Created">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.CreatedAt.ToString("yyyy-MM-dd HH:mm")
@DateTimeHelper.FormatDateTime(row.CreatedAt, "yyyy-MM-dd HH:mm")
</div>
</MudTd>
<MudTd DataLabel="Last Attempt">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@(row.LastAttemptAt?.ToString("yyyy-MM-dd HH:mm") ?? "-")
@DateTimeHelper.FormatDateTime(row.LastAttemptAt, "yyyy-MM-dd HH:mm")
</div>
</MudTd>
</RowTemplate>
@@ -139,3 +169,4 @@
</MudItem>
</MudGrid>
}
</MudContainer>

View File

@@ -0,0 +1,194 @@
using DiunaBI.UI.Shared.Services;
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.Components.Web;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using MudBlazor;
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Pages.Jobs;
public partial class Index : ComponentBase, IDisposable
{
[Inject] private JobService JobService { get; set; } = default!;
[Inject] private EntityChangeHubService HubService { get; set; } = default!;
[Inject] private ISnackbar Snackbar { get; set; } = default!;
[Inject] private NavigationManager NavigationManager { get; set; } = default!;
[Inject] private IJSRuntime JSRuntime { get; set; } = default!;
[Inject] private DateTimeHelper DateTimeHelper { get; set; } = default!;
private PagedResult<QueueJob> jobs = new();
private bool isLoading = false;
private int currentPage = 1;
private int pageSize = 50;
private IEnumerable<JobStatus> selectedStatuses = new HashSet<JobStatus>();
private JobType? selectedJobType = null;
protected override async Task OnInitializedAsync()
{
await DateTimeHelper.InitializeAsync();
await LoadJobs();
// Subscribe to SignalR entity changes
HubService.EntityChanged += OnEntityChanged;
}
private async void OnEntityChanged(string module, string id, string operation)
{
Console.WriteLine($"🔔 JobListComponent.OnEntityChanged called: module={module}, id={id}, operation={operation}");
// Only react if it's a QueueJobs change
if (module.Equals("QueueJobs", StringComparison.OrdinalIgnoreCase))
{
Console.WriteLine($"📨 Job {id} changed, refreshing job list");
await InvokeAsync(async () =>
{
Console.WriteLine($"🔄 LoadJobs starting...");
await LoadJobs();
Console.WriteLine($"🔄 StateHasChanged calling...");
StateHasChanged();
Console.WriteLine($"✅ Job list refresh complete");
});
}
else
{
Console.WriteLine($"⏭️ Skipping - module '{module}' is not QueueJobs");
}
}
private async Task LoadJobs()
{
isLoading = true;
try
{
var statusList = selectedStatuses?.ToList();
jobs = await JobService.GetJobsAsync(currentPage, pageSize, statusList, selectedJobType);
}
catch (Exception ex)
{
Console.WriteLine($"Loading jobs failed: {ex.Message}");
Snackbar.Add("Failed to load jobs", Severity.Error);
}
finally
{
isLoading = false;
}
}
private async Task OnPageChanged(int page)
{
currentPage = page;
await LoadJobs();
}
private async Task ClearFilters()
{
selectedStatuses = new HashSet<JobStatus>();
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private async Task OnStatusFilterChanged(IEnumerable<JobStatus> values)
{
selectedStatuses = values;
currentPage = 1;
await LoadJobs();
}
private async Task OnJobTypeFilterChanged(JobType? value)
{
selectedJobType = value;
currentPage = 1;
await LoadJobs();
}
private async Task OnStatusClear()
{
selectedStatuses = new HashSet<JobStatus>();
currentPage = 1;
await LoadJobs();
}
private async Task OnJobTypeClear()
{
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private void OnRowClick(QueueJob job)
{
NavigationManager.NavigateTo($"/jobs/{job.Id}");
}
private async Task OnRowRightClick(MouseEventArgs e, QueueJob job)
{
var url = NavigationManager.ToAbsoluteUri($"/jobs/{job.Id}").ToString();
await JSRuntime.InvokeVoidAsync("open", url, "_blank");
}
private async Task ScheduleJobs(string type)
{
isLoading = true;
try
{
(bool success, int jobsCreated, string message) result = type switch
{
"all" => await JobService.ScheduleAllJobsAsync(),
"imports" => await JobService.ScheduleImportJobsAsync(),
"processes" => await JobService.ScheduleProcessJobsAsync(),
_ => (false, 0, "Unknown job type")
};
if (result.success)
{
Snackbar.Add($"{result.message} ({result.jobsCreated} jobs created)", Severity.Success);
await LoadJobs();
}
else
{
Snackbar.Add(result.message, Severity.Error);
}
}
catch (Exception ex)
{
Console.WriteLine($"Scheduling jobs failed: {ex.Message}");
Snackbar.Add($"Failed to schedule jobs: {ex.Message}", Severity.Error);
}
finally
{
isLoading = false;
}
}
private Color GetStatusColor(JobStatus status)
{
return status switch
{
JobStatus.Pending => Color.Default,
JobStatus.Running => Color.Info,
JobStatus.Completed => Color.Success,
JobStatus.Failed => Color.Error,
JobStatus.Retrying => Color.Warning,
_ => Color.Default
};
}
private Color GetJobTypeColor(JobType jobType)
{
return jobType switch
{
JobType.Import => Color.Primary,
JobType.Process => Color.Secondary,
_ => Color.Default
};
}
public void Dispose()
{
HubService.EntityChanged -= OnEntityChanged;
}
}

View File

@@ -1,8 +0,0 @@
@page "/layers"
@using DiunaBI.UI.Shared.Components
<PageTitle>Layers</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<LayerListComponent />
</MudContainer>

View File

@@ -2,10 +2,7 @@
@using DiunaBI.UI.Shared.Services
@using DiunaBI.Application.DTOModels
@using MudBlazor
@inject LayerService LayerService
@inject JobService JobService
@inject NavigationManager NavigationManager
@inject ISnackbar Snackbar
@implements IDisposable
<MudCard>
<MudCardHeader>
@@ -62,7 +59,7 @@
}
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@layer.CreatedAt.ToString("g")"
<MudTextField Value="@DateTimeHelper.FormatDateTime(layer.CreatedAt, "yyyy-MM-dd HH:mm")"
Label="Created"
Variant="Variant.Outlined"
ReadOnly="true"
@@ -71,7 +68,7 @@
AdornmentText="@(layer.CreatedBy?.Username ?? "")"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@layer.ModifiedAt.ToString("g")"
<MudTextField Value="@DateTimeHelper.FormatDateTime(layer.ModifiedAt, "yyyy-MM-dd HH:mm")"
Label="Modified"
Variant="Variant.Outlined"
ReadOnly="true"
@@ -166,22 +163,25 @@
}
</RowTemplate>
<FooterContent>
<MudTd><b>Value1 sum</b></MudTd>
@foreach (var column in displayedColumns)
@if (showSummary)
{
@if (column == "Value1")
<MudTd><b>@totalSum.ToString("N2")</b></MudTd>
@foreach (var column in displayedColumns)
{
<MudTd><b>@valueSum.ToString("N2")</b></MudTd>
@if (column.StartsWith("Value") && columnSums.ContainsKey(column))
{
<MudTd><b>@columnSums[column].ToString("N2")</b></MudTd>
}
else
{
<MudTd></MudTd>
}
}
else
@if (isEditable)
{
<MudTd></MudTd>
}
}
@if (isEditable)
{
<MudTd></MudTd>
}
</FooterContent>
</MudTable>
@@ -234,7 +234,9 @@
}
</MudTabPanel>
<MudTabPanel Text="History" Icon="@Icons.Material.Filled.History">
@if (showHistoryTab)
{
<MudTabPanel Text="History" Icon="@Icons.Material.Filled.History">
@if (isLoadingHistory)
{
<MudProgressLinear Color="Color.Primary" Indeterminate="true" />
@@ -315,7 +317,7 @@
<RowTemplate>
<MudTd DataLabel="Code">@context.Code</MudTd>
<MudTd DataLabel="Description">@context.Desc1</MudTd>
<MudTd DataLabel="Modified">@context.ModifiedAt.ToString("g")</MudTd>
<MudTd DataLabel="Modified">@DateTimeHelper.FormatDateTime(context.ModifiedAt, "yyyy-MM-dd HH:mm")</MudTd>
<MudTd DataLabel="Modified By">@GetModifiedByUsername(context.ModifiedById)</MudTd>
</RowTemplate>
</MudTable>
@@ -358,6 +360,7 @@
}
}
</MudTabPanel>
}
</MudTabs>
}
</MudCardContent>

View File

@@ -1,11 +1,12 @@
using DiunaBI.Application.DTOModels;
using DiunaBI.UI.Shared.Services;
using Microsoft.AspNetCore.Components;
using MudBlazor;
using System.Reflection;
namespace DiunaBI.UI.Shared.Pages;
namespace DiunaBI.UI.Shared.Pages.Layers;
public partial class LayerDetailPage : ComponentBase
public partial class Details : ComponentBase, IDisposable
{
[Parameter]
public Guid Id { get; set; }
@@ -13,16 +14,38 @@ public partial class LayerDetailPage : ComponentBase
[Inject]
private IDialogService DialogService { get; set; } = null!;
[Inject]
private LayerService LayerService { get; set; } = null!;
[Inject]
private JobService JobService { get; set; } = null!;
[Inject]
private EntityChangeHubService HubService { get; set; } = null!;
[Inject]
private NavigationManager NavigationManager { get; set; } = null!;
[Inject]
private ISnackbar Snackbar { get; set; } = null!;
[Inject]
private DateTimeHelper DateTimeHelper { get; set; } = null!;
private LayerDto? layer;
private List<RecordDto> records = new();
private List<string> displayedColumns = new();
private double valueSum = 0;
private Dictionary<string, double> columnSums = new();
private double totalSum = 0;
private bool isLoading = false;
private Guid? editingRecordId = null;
private RecordDto? editingRecord = null;
private bool isAddingNew = false;
private RecordDto newRecord = new();
private bool isEditable => layer?.Type == LayerType.Dictionary || layer?.Type == LayerType.Administration;
private bool showHistoryTab => layer?.Type == LayerType.Administration || layer?.Type == LayerType.Dictionary;
private bool showSummary => layer?.Type == LayerType.Import || layer?.Type == LayerType.Processed;
// History tab state
private bool isLoadingHistory = false;
@@ -35,7 +58,41 @@ public partial class LayerDetailPage : ComponentBase
protected override async Task OnInitializedAsync()
{
await DateTimeHelper.InitializeAsync();
await LoadLayer();
// Subscribe to SignalR entity changes
HubService.EntityChanged += OnEntityChanged;
}
private async void OnEntityChanged(string module, string id, string operation)
{
// React to Layers or Records changes for this layer
if (module.Equals("Layers", StringComparison.OrdinalIgnoreCase) ||
module.Equals("Records", StringComparison.OrdinalIgnoreCase))
{
// Check if it's this layer or its records that changed
if (Guid.TryParse(id, out var changedId))
{
if (module.Equals("Layers", StringComparison.OrdinalIgnoreCase) && changedId == Id)
{
await InvokeAsync(async () =>
{
await LoadLayer();
StateHasChanged();
});
}
else if (module.Equals("Records", StringComparison.OrdinalIgnoreCase))
{
// For records, we reload to get the latest data
await InvokeAsync(async () =>
{
await LoadLayer();
StateHasChanged();
});
}
}
}
}
protected override async Task OnParametersSetAsync()
@@ -64,9 +121,9 @@ public partial class LayerDetailPage : ComponentBase
if (layer != null && layer.Records != null)
{
records = layer.Records;
records = layer.Records.OrderBy(r => r.Code).ToList();
CalculateDisplayedColumns();
CalculateValueSum();
CalculateColumnSums();
BuildUserCache();
}
}
@@ -105,11 +162,25 @@ public partial class LayerDetailPage : ComponentBase
}
}
private void CalculateValueSum()
private void CalculateColumnSums()
{
valueSum = records
.Where(r => r.Value1.HasValue)
.Sum(r => r.Value1!.Value);
columnSums.Clear();
totalSum = 0;
// Calculate sum for each displayed value column
foreach (var columnName in displayedColumns.Where(c => c.StartsWith("Value")))
{
var sum = records
.Select(r => GetRecordValueByName(r, columnName))
.Where(v => v.HasValue)
.Sum(v => v!.Value);
columnSums[columnName] = sum;
totalSum += sum;
}
// Keep valueSum for backward compatibility (Value1 sum)
valueSum = columnSums.ContainsKey("Value1") ? columnSums["Value1"] : 0;
}
private string GetRecordValue(RecordDto record, string columnName)
@@ -233,7 +304,7 @@ public partial class LayerDetailPage : ComponentBase
{
records.Remove(record);
CalculateDisplayedColumns();
CalculateValueSum();
CalculateColumnSums();
Snackbar.Add("Record deleted successfully", Severity.Success);
}
else
@@ -284,7 +355,7 @@ public partial class LayerDetailPage : ComponentBase
{
records.Add(created);
CalculateDisplayedColumns();
CalculateValueSum();
CalculateColumnSums();
isAddingNew = false;
newRecord = new();
Snackbar.Add("Record added successfully", Severity.Success);
@@ -439,10 +510,10 @@ public partial class LayerDetailPage : ComponentBase
}
else
{
Snackbar.Add("Job created successfully!", Severity.Success);
Snackbar.Add("Job created successfully! Watch real-time status updates.", Severity.Success);
}
// Navigate to job detail page
// Navigate to job detail page to see real-time updates
NavigationManager.NavigateTo($"/jobs/{result.JobId}");
}
else
@@ -460,4 +531,9 @@ public partial class LayerDetailPage : ComponentBase
isRunningJob = false;
}
}
public void Dispose()
{
HubService.EntityChanged -= OnEntityChanged;
}
}

View File

@@ -1,5 +1,11 @@
@page "/layers"
@using MudBlazor.Internal
@using DiunaBI.Application.DTOModels
@implements IDisposable
<PageTitle>Layers</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<MudExpansionPanels Class="mb-4">
<MudExpansionPanel Icon="@Icons.Material.Filled.FilterList"
Text="Filters"
@@ -42,7 +48,7 @@
</MudExpansionPanels>
<MudDivider Class="my-4"></MudDivider>
<MudTable Items="layers.Items"
Dense="true"
Hover="true"
@@ -86,4 +92,5 @@
/>
</MudItem>
</MudGrid>
}
}
</MudContainer>

View File

@@ -6,11 +6,12 @@ using DiunaBI.Application.DTOModels.Common;
using MudBlazor;
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Components;
namespace DiunaBI.UI.Shared.Pages.Layers;
public partial class LayerListComponent : ComponentBase
public partial class Index : ComponentBase, IDisposable
{
[Inject] private LayerService LayerService { get; set; } = default!;
[Inject] private EntityChangeHubService HubService { get; set; } = default!;
[Inject] private ISnackbar Snackbar { get; set; } = default!;
[Inject] private NavigationManager NavigationManager { get; set; } = default!;
[Inject] private LayerFilterStateService FilterStateService { get; set; } = default!;
@@ -25,6 +26,22 @@ public partial class LayerListComponent : ComponentBase
{
filterRequest = FilterStateService.FilterRequest;
await LoadLayers();
// Subscribe to SignalR entity changes
HubService.EntityChanged += OnEntityChanged;
}
private async void OnEntityChanged(string module, string id, string operation)
{
// Only react if it's a Layers change
if (module.Equals("Layers", StringComparison.OrdinalIgnoreCase))
{
await InvokeAsync(async () =>
{
await LoadLayers();
StateHasChanged();
});
}
}
private async Task LoadLayers()
@@ -51,7 +68,7 @@ public partial class LayerListComponent : ComponentBase
filterRequest.Page = 1;
await LoadLayers();
}
private async Task OnPageChanged(int page)
{
filterRequest.Page = page;
@@ -89,4 +106,9 @@ public partial class LayerListComponent : ComponentBase
var url = NavigationManager.ToAbsoluteUri($"/layers/{layer.Id}").ToString();
await JSRuntime.InvokeVoidAsync("open", url, "_blank");
}
}
public void Dispose()
{
HubService.EntityChanged -= OnEntityChanged;
}
}

View File

@@ -3,4 +3,5 @@ namespace DiunaBI.UI.Shared.Services;
public class AppConfig
{
public string AppName { get; set; } = "DiunaBI";
public string ClientLogo {get; set;} = "pedrollopl.png";
}

View File

@@ -15,16 +15,18 @@ public class AuthService
{
private readonly HttpClient _httpClient;
private readonly IJSRuntime _jsRuntime;
private readonly TokenProvider _tokenProvider;
private bool? _isAuthenticated;
private UserInfo? _userInfo = null;
private string? _apiToken;
public event Action<bool>? AuthenticationStateChanged;
public AuthService(HttpClient httpClient, IJSRuntime jsRuntime)
public AuthService(HttpClient httpClient, IJSRuntime jsRuntime, TokenProvider tokenProvider)
{
_httpClient = httpClient;
_jsRuntime = jsRuntime;
_tokenProvider = tokenProvider;
}
public bool IsAuthenticated => _isAuthenticated ?? false;
@@ -44,6 +46,7 @@ public class AuthService
if (result != null)
{
_apiToken = result.Token;
_tokenProvider.Token = result.Token; // Set token for SignalR
_userInfo = new UserInfo
{
Id = result.Id,
@@ -104,6 +107,7 @@ public class AuthService
if (_isAuthenticated.Value && !string.IsNullOrEmpty(userInfoJson))
{
_apiToken = token;
_tokenProvider.Token = token; // Set token for SignalR
_userInfo = JsonSerializer.Deserialize<UserInfo>(userInfoJson);
// Restore header
@@ -111,14 +115,17 @@ public class AuthService
new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", _apiToken);
Console.WriteLine($"✅ Session restored: {_userInfo?.Email}");
// Notify that authentication state changed (for SignalR initialization)
AuthenticationStateChanged?.Invoke(true);
}
else
{
Console.WriteLine("❌ No valid session");
}
Console.WriteLine($"=== AuthService.CheckAuthenticationAsync END (authenticated={_isAuthenticated}) ===");
return _isAuthenticated.Value;
}
catch (Exception ex)
@@ -139,11 +146,12 @@ public class AuthService
await _jsRuntime.InvokeVoidAsync("localStorage.removeItem", "user_info");
_apiToken = null;
_tokenProvider.Token = null; // Clear token for SignalR
_isAuthenticated = false;
_userInfo = null;
_httpClient.DefaultRequestHeaders.Authorization = null;
Console.WriteLine("✅ Authentication cleared");
AuthenticationStateChanged?.Invoke(false);
}

View File

@@ -0,0 +1,80 @@
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Services;
public class DateTimeHelper
{
private readonly IJSRuntime _jsRuntime;
private TimeZoneInfo? _userTimeZone;
private bool _initialized = false;
public DateTimeHelper(IJSRuntime jsRuntime)
{
_jsRuntime = jsRuntime;
}
public async Task InitializeAsync()
{
if (_initialized) return;
try
{
// Get the user's timezone from JavaScript
var timeZoneId = await _jsRuntime.InvokeAsync<string>("eval", "Intl.DateTimeFormat().resolvedOptions().timeZone");
// Try to find the TimeZoneInfo
try
{
_userTimeZone = TimeZoneInfo.FindSystemTimeZoneById(timeZoneId);
}
catch
{
// Fallback to local timezone if the IANA timezone ID is not found
_userTimeZone = TimeZoneInfo.Local;
}
}
catch
{
// Fallback to local timezone if JavaScript interop fails
_userTimeZone = TimeZoneInfo.Local;
}
_initialized = true;
}
public string FormatDateTime(DateTime? dateTime, string format = "yyyy-MM-dd HH:mm:ss")
{
if (!dateTime.HasValue)
return "-";
if (!_initialized)
{
// If not initialized yet, just format as-is (will be UTC)
return dateTime.Value.ToString(format);
}
// Convert UTC to user's timezone
var localDateTime = TimeZoneInfo.ConvertTimeFromUtc(dateTime.Value, _userTimeZone ?? TimeZoneInfo.Local);
return localDateTime.ToString(format);
}
public string FormatDate(DateTime? dateTime, string format = "yyyy-MM-dd")
{
return FormatDateTime(dateTime, format);
}
public string FormatTime(DateTime? dateTime, string format = "HH:mm:ss")
{
return FormatDateTime(dateTime, format);
}
public string GetTimeZoneAbbreviation()
{
if (!_initialized || _userTimeZone == null)
return "UTC";
return _userTimeZone.IsDaylightSavingTime(DateTime.Now)
? _userTimeZone.DaylightName
: _userTimeZone.StandardName;
}
}

View File

@@ -0,0 +1,208 @@
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.Extensions.Logging;
namespace DiunaBI.UI.Shared.Services;
public class EntityChangeHubService : IAsyncDisposable
{
private readonly string _hubUrl;
private readonly ILogger<EntityChangeHubService> _logger;
private readonly TokenProvider _tokenProvider;
private HubConnection? _hubConnection;
private bool _isInitialized;
private readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1);
private static int _instanceCounter = 0;
private readonly int _instanceId;
// Events that components can subscribe to
public event Action<string, string, string>? EntityChanged;
public EntityChangeHubService(
string apiBaseUrl,
IServiceProvider serviceProvider,
ILogger<EntityChangeHubService> logger,
TokenProvider tokenProvider)
{
_instanceId = Interlocked.Increment(ref _instanceCounter);
// Convert HTTP URL to SignalR hub URL
var baseUrl = apiBaseUrl.TrimEnd('/');
_hubUrl = baseUrl + "/hubs/entitychanges";
_logger = logger;
_tokenProvider = tokenProvider;
_logger.LogInformation("🏗️ EntityChangeHubService instance #{InstanceId} created. Hub URL: {HubUrl}", _instanceId, _hubUrl);
Console.WriteLine($"🏗️ EntityChangeHubService instance #{_instanceId} created. Hub URL: {_hubUrl}, _isInitialized = {_isInitialized}");
}
public async Task InitializeAsync()
{
_logger.LogInformation("🔍 Instance #{InstanceId} InitializeAsync called. _isInitialized = {IsInitialized}, _hubConnection null? {IsNull}", _instanceId, _isInitialized, _hubConnection == null);
Console.WriteLine($"🔍 Instance #{_instanceId} InitializeAsync called. _isInitialized = {_isInitialized}, _hubConnection null? {_hubConnection == null}");
if (_isInitialized)
{
_logger.LogInformation("⏭️ Instance #{InstanceId} SignalR already initialized, skipping", _instanceId);
Console.WriteLine($"⏭️ Instance #{_instanceId} SignalR already initialized, skipping");
return;
}
await _initializationLock.WaitAsync();
try
{
// Double-check after acquiring lock
if (_isInitialized)
{
Console.WriteLine($"⏭️ SignalR already initialized (after lock), skipping");
return;
}
_logger.LogInformation("🔌 Initializing SignalR connection to {HubUrl}", _hubUrl);
Console.WriteLine($"🔌 Initializing SignalR connection to {_hubUrl}");
_hubConnection = new HubConnectionBuilder()
.WithUrl(_hubUrl, options =>
{
// Add JWT token to SignalR connection
if (!string.IsNullOrEmpty(_tokenProvider.Token))
{
options.AccessTokenProvider = () => Task.FromResult<string?>(_tokenProvider.Token);
_logger.LogInformation("✅ JWT token added to SignalR connection");
Console.WriteLine($"✅ JWT token added to SignalR connection");
}
else
{
_logger.LogWarning("⚠️ No JWT token available for SignalR connection");
Console.WriteLine($"⚠️ No JWT token available for SignalR connection");
}
})
.WithAutomaticReconnect()
.Build();
// Subscribe to EntityChanged messages
_hubConnection.On<object>("EntityChanged", (data) =>
{
Console.WriteLine($"🔔 RAW SignalR message received at {DateTime.Now:HH:mm:ss.fff}");
Console.WriteLine($"🔔 Data type: {data?.GetType().FullName}");
try
{
// Parse the anonymous object
var json = System.Text.Json.JsonSerializer.Serialize(data);
Console.WriteLine($"📨 Received SignalR message: {json}");
// Use case-insensitive deserialization (backend sends camelCase: module, id, operation)
var options = new System.Text.Json.JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
};
var change = System.Text.Json.JsonSerializer.Deserialize<EntityChangeMessage>(json, options);
if (change != null)
{
_logger.LogInformation("📨 Received entity change: {Module} {Id} {Operation}",
change.Module, change.Id, change.Operation);
Console.WriteLine($"📨 Entity change: {change.Module} {change.Id} {change.Operation}");
// Notify all subscribers
Console.WriteLine($"🔔 Invoking EntityChanged event, subscribers: {EntityChanged?.GetInvocationList().Length ?? 0}");
EntityChanged?.Invoke(change.Module, change.Id, change.Operation);
Console.WriteLine($"🔔 EntityChanged event invoked successfully");
}
else
{
Console.WriteLine($"⚠️ Deserialized change is null");
}
}
catch (Exception ex)
{
_logger.LogError(ex, "❌ Error processing entity change message");
Console.WriteLine($"❌ Error processing message: {ex.Message}");
Console.WriteLine($"❌ Stack trace: {ex.StackTrace}");
}
});
_hubConnection.Reconnecting += (error) =>
{
_logger.LogWarning("SignalR reconnecting: {Error}", error?.Message);
Console.WriteLine($"⚠️ SignalR reconnecting: {error?.Message}");
return Task.CompletedTask;
};
_hubConnection.Reconnected += (connectionId) =>
{
_logger.LogInformation("✅ SignalR reconnected: {ConnectionId}", connectionId);
Console.WriteLine($"✅ SignalR reconnected: {connectionId}");
return Task.CompletedTask;
};
_hubConnection.Closed += (error) =>
{
_logger.LogError(error, "❌ SignalR connection closed");
Console.WriteLine($"❌ SignalR connection closed: {error?.Message}");
return Task.CompletedTask;
};
await StartConnectionAsync();
_isInitialized = true;
_logger.LogInformation("✅ Instance #{InstanceId} _isInitialized set to true", _instanceId);
Console.WriteLine($"✅ Instance #{_instanceId} _isInitialized set to true");
}
catch (Exception ex)
{
_logger.LogError(ex, "❌ Instance #{InstanceId} Failed to initialize SignalR connection", _instanceId);
Console.WriteLine($"❌ Instance #{_instanceId} Failed to initialize SignalR: {ex.Message}");
}
finally
{
_initializationLock.Release();
}
}
private async Task StartConnectionAsync()
{
if (_hubConnection == null)
{
_logger.LogWarning("Hub connection is null, cannot start");
return;
}
try
{
Console.WriteLine($"🔌 Starting SignalR connection...");
await _hubConnection.StartAsync();
_logger.LogInformation("✅ SignalR connected successfully");
Console.WriteLine($"✅ SignalR connected successfully to {_hubUrl}");
}
catch (Exception ex)
{
_logger.LogError(ex, "❌ Failed to start SignalR connection");
Console.WriteLine($"❌ Failed to start SignalR: {ex.Message}\n{ex.StackTrace}");
}
}
public async ValueTask DisposeAsync()
{
if (_hubConnection != null)
{
try
{
await _hubConnection.StopAsync();
await _hubConnection.DisposeAsync();
}
catch (Exception ex)
{
_logger.LogError(ex, "Error disposing SignalR connection");
}
}
_initializationLock?.Dispose();
}
}
public class EntityChangeMessage
{
public string Module { get; set; } = string.Empty;
public string Id { get; set; } = string.Empty;
public string Operation { get; set; } = string.Empty;
}

View File

@@ -19,13 +19,18 @@ public class JobService
PropertyNameCaseInsensitive = true
};
public async Task<PagedResult<QueueJob>> GetJobsAsync(int page = 1, int pageSize = 50, JobStatus? status = null, JobType? jobType = null, Guid? layerId = null)
public async Task<PagedResult<QueueJob>> GetJobsAsync(int page = 1, int pageSize = 50, List<JobStatus>? statuses = null, JobType? jobType = null, Guid? layerId = null)
{
var start = (page - 1) * pageSize;
var query = $"Jobs?start={start}&limit={pageSize}";
if (status.HasValue)
query += $"&status={(int)status.Value}";
if (statuses != null && statuses.Count > 0)
{
foreach (var status in statuses)
{
query += $"&statuses={(int)status}";
}
}
if (jobType.HasValue)
query += $"&jobType={(int)jobType.Value}";
@@ -83,6 +88,89 @@ public class JobService
return await response.Content.ReadFromJsonAsync<CreateJobResult>();
}
public async Task<(bool success, int jobsCreated, string message)> ScheduleAllJobsAsync(string? nameFilter = null)
{
try
{
var query = string.IsNullOrEmpty(nameFilter) ? "" : $"?nameFilter={Uri.EscapeDataString(nameFilter)}";
var response = await _httpClient.PostAsync($"Jobs/ui/schedule{query}", null);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync();
return (false, 0, $"Failed to schedule jobs: {error}");
}
var json = await response.Content.ReadAsStringAsync();
var result = JsonSerializer.Deserialize<JsonElement>(json, _jsonOptions);
var jobsCreated = result.GetProperty("jobsCreated").GetInt32();
var message = result.GetProperty("message").GetString() ?? "Jobs scheduled";
return (true, jobsCreated, message);
}
catch (Exception ex)
{
Console.WriteLine($"Scheduling jobs failed: {ex.Message}");
return (false, 0, $"Error: {ex.Message}");
}
}
public async Task<(bool success, int jobsCreated, string message)> ScheduleImportJobsAsync(string? nameFilter = null)
{
try
{
var query = string.IsNullOrEmpty(nameFilter) ? "" : $"?nameFilter={Uri.EscapeDataString(nameFilter)}";
var response = await _httpClient.PostAsync($"Jobs/ui/schedule/imports{query}", null);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync();
return (false, 0, $"Failed to schedule import jobs: {error}");
}
var json = await response.Content.ReadAsStringAsync();
var result = JsonSerializer.Deserialize<JsonElement>(json, _jsonOptions);
var jobsCreated = result.GetProperty("jobsCreated").GetInt32();
var message = result.GetProperty("message").GetString() ?? "Import jobs scheduled";
return (true, jobsCreated, message);
}
catch (Exception ex)
{
Console.WriteLine($"Scheduling import jobs failed: {ex.Message}");
return (false, 0, $"Error: {ex.Message}");
}
}
public async Task<(bool success, int jobsCreated, string message)> ScheduleProcessJobsAsync()
{
try
{
var response = await _httpClient.PostAsync("Jobs/ui/schedule/processes", null);
if (!response.IsSuccessStatusCode)
{
var error = await response.Content.ReadAsStringAsync();
return (false, 0, $"Failed to schedule process jobs: {error}");
}
var json = await response.Content.ReadAsStringAsync();
var result = JsonSerializer.Deserialize<JsonElement>(json, _jsonOptions);
var jobsCreated = result.GetProperty("jobsCreated").GetInt32();
var message = result.GetProperty("message").GetString() ?? "Process jobs scheduled";
return (true, jobsCreated, message);
}
catch (Exception ex)
{
Console.WriteLine($"Scheduling process jobs failed: {ex.Message}");
return (false, 0, $"Error: {ex.Message}");
}
}
}
public class JobStats

View File

@@ -8,5 +8,7 @@
@using Microsoft.JSInterop
@using DiunaBI.UI.Shared
@using DiunaBI.UI.Shared.Components
@using DiunaBI.UI.Shared.Components.Layout
@using DiunaBI.UI.Shared.Components.Auth
@using DiunaBI.Application.DTOModels
@using MudBlazor

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

View File

@@ -17,7 +17,7 @@
</head>
<body>
<DiunaBI.UI.Shared.Components.Routes @rendermode="InteractiveServer" />
<DiunaBI.UI.Shared.Components.Layout.Routes @rendermode="InteractiveServer" />
<div id="blazor-error-ui">
@@ -31,9 +31,24 @@
<a class="dismiss">🗙</a>
</div>
<div id="components-reconnect-modal" data-nosnippet>
<div class="reconnect-content">
<div class="reconnect-spinner"></div>
<h5>Connection Lost</h5>
<div class="reconnect-message">
Attempting to reconnect to the server...
</div>
<div class="reconnect-timer">
<span id="reconnect-elapsed-time">0s</span>
</div>
<button onclick="location.reload()">Reload Page</button>
</div>
</div>
<script src="_framework/blazor.web.js"></script>
<script src="_content/MudBlazor/MudBlazor.min.js"></script>
<script src="_content/DiunaBI.UI.Shared/js/auth.js"></script>
<script src="js/reconnect.js"></script>
</body>
</html>

View File

@@ -1,4 +1,5 @@
using DiunaBI.UI.Shared;
using DiunaBI.UI.Shared.Components.Layout;
using DiunaBI.UI.Shared.Extensions;
using DiunaBI.UI.Shared.Services;
using DiunaBI.UI.Web.Components;
@@ -16,9 +17,6 @@ builder.Services.AddSharedServices(apiBaseUrl);
// Configure App settings
var appConfig = builder.Configuration.GetSection("App").Get<AppConfig>() ?? new AppConfig();
Console.WriteLine($"[DEBUG] AppConfig.AppName from config: {appConfig.AppName}");
Console.WriteLine($"[DEBUG] App:AppName from Configuration: {builder.Configuration["App:AppName"]}");
Console.WriteLine($"[DEBUG] App__AppName env var: {Environment.GetEnvironmentVariable("App__AppName")}");
builder.Services.AddSingleton(appConfig);
builder.Services.AddScoped<IGoogleAuthService, WebGoogleAuthService>();

View File

@@ -58,3 +58,93 @@ h1:focus {
.mud-pagination li::marker {
display: none;
}
/* Blazor Server Reconnection UI Customization */
#components-reconnect-modal {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: rgba(0, 0, 0, 0.5);
backdrop-filter: blur(4px);
z-index: 9999;
font-family: 'Roboto', 'Helvetica Neue', Helvetica, Arial, sans-serif;
display: none !important;
align-items: center;
justify-content: center;
}
/* Show modal when Blazor applies these classes */
#components-reconnect-modal.components-reconnect-show,
#components-reconnect-modal.components-reconnect-failed,
#components-reconnect-modal.components-reconnect-rejected {
display: flex !important;
}
#components-reconnect-modal .reconnect-content {
background: white;
border-radius: 8px;
padding: 32px;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2);
max-width: 400px;
text-align: center;
}
#components-reconnect-modal h5 {
margin: 0 0 16px 0;
color: #424242;
font-size: 20px;
font-weight: 500;
}
#components-reconnect-modal .reconnect-message {
color: #666;
margin-bottom: 24px;
font-size: 14px;
line-height: 1.5;
}
#components-reconnect-modal .reconnect-spinner {
width: 48px;
height: 48px;
border: 4px solid #f3f3f3;
border-top: 4px solid #e7163d;
border-radius: 50%;
animation: spin 1s linear infinite;
margin: 0 auto 16px;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
#components-reconnect-modal .reconnect-timer {
color: #e7163d;
font-size: 16px;
font-weight: 500;
margin-bottom: 16px;
}
#components-reconnect-modal button {
background-color: #e7163d;
color: white;
border: none;
border-radius: 4px;
padding: 10px 24px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: background-color 0.2s;
text-transform: uppercase;
letter-spacing: 0.5px;
}
#components-reconnect-modal button:hover {
background-color: #c01234;
}
#components-reconnect-modal button:active {
background-color: #a01028;
}

View File

@@ -0,0 +1,82 @@
// Blazor Server Reconnection Timer
(function() {
let reconnectTimer = null;
let startTime = null;
function startTimer() {
if (reconnectTimer) return; // Already running
console.log('Blazor reconnection started, timer running...');
startTime = Date.now();
reconnectTimer = setInterval(() => {
const elapsedSeconds = Math.floor((Date.now() - startTime) / 1000);
const timerElement = document.getElementById('reconnect-elapsed-time');
if (timerElement) {
timerElement.textContent = `${elapsedSeconds}s`;
}
}, 1000);
}
function stopTimer() {
if (reconnectTimer) {
console.log('Blazor reconnection ended, stopping timer');
clearInterval(reconnectTimer);
reconnectTimer = null;
// Reset timer display
const timerElement = document.getElementById('reconnect-elapsed-time');
if (timerElement) {
timerElement.textContent = '0s';
}
}
}
function checkReconnectionState() {
const modal = document.getElementById('components-reconnect-modal');
if (!modal) return;
// Check if modal has the "show" class (Blazor applies this when reconnecting)
if (modal.classList.contains('components-reconnect-show')) {
startTimer();
} else {
stopTimer();
}
}
// MutationObserver to watch for class changes on the modal
const observer = new MutationObserver((mutations) => {
mutations.forEach((mutation) => {
if (mutation.type === 'attributes' && mutation.attributeName === 'class') {
checkReconnectionState();
}
});
});
// Start observing when DOM is ready
function init() {
const modal = document.getElementById('components-reconnect-modal');
if (modal) {
observer.observe(modal, {
attributes: true,
attributeFilter: ['class']
});
// Check initial state
checkReconnectionState();
console.log('Blazor reconnection timer initialized');
} else {
console.warn('components-reconnect-modal not found, retrying...');
setTimeout(init, 100);
}
}
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', init);
} else {
init();
}
})();