diff --git a/DevTools/PedrolloImport.sql b/DevTools/PedrolloImport.sql new file mode 100644 index 0000000..f487c90 --- /dev/null +++ b/DevTools/PedrolloImport.sql @@ -0,0 +1,99 @@ +DECLARE @JustForDebug TINYINT = 0; + +-- SETUP VARIABLES +DECLARE @Year INT = 2024; +DECLARE @Type NVARCHAR(5) = 'P2'; +DECLARE @StartDate NVARCHAR(10) = '2025.01.02'; +DECLARE @EndDate NVARCHAR(10) = '2026.12.31' + + +DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]); +DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); +DECLARE @Name NVARCHAR(50) = CONCAT( + 'L', @Number, '-A-IW_', @Type, '-', @Year,'-', @CurrentTimestamp +); +DECLARE @Plugin NVARCHAR(100); +SET @Plugin = + CASE @Type + WHEN 'P2' THEN 'PedrolloPL.Import.P2' + ELSE NULL -- If @Type doesn't match, set it to NULL + END; + +DECLARE @DataInboxName NVARCHAR(100); +SET @DataInboxName = + CASE @Type + WHEN 'P2' THEN 'P2_2024' + ELSE NULL -- If @Type doesn't match, set it to NULL + END; + +DECLARE @DataInboxSource NVARCHAR(100); +SET @DataInboxSource = + CASE @Type + WHEN 'P2' THEN 'Comarch' + ELSE NULL -- If @Type doesn't match, set it to NULL + END; + + +DECLARE @LayerId UNIQUEIDENTIFIER = NEWID(); + +SELECT @Name AS Name, @StartDate AS StartDate, @EndDate AS EndDate, @Type AS Type, @Year AS Year, @Plugin AS Plugin, + @DataInboxName AS DataInboxName, @DataInboxSource AS DataInboxSource; + +IF @JustForDebug = 1 +BEGIN + SELECT 'Just for debug' AS Logger; + RETURN; +END; + + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers] +([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type]) +VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'StartDate', @StartDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'EndDate', @EndDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'Source', 'DataInbox', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'ImportName', @Type, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'ImportYear', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'Type', 'ImportWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'Plugin', @Plugin, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'DataInboxName', @DataInboxName, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'DataInboxSource', @DataInboxSource, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'Priority', '10', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); + +INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records] +([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) +VALUES ((SELECT NEWID()), 'MaxRetries', '3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); \ No newline at end of file diff --git a/DevTools/RESTRequests/test.rest b/DevTools/RESTRequests/test.rest index 684230f..7c63780 100644 --- a/DevTools/RESTRequests/test.rest +++ b/DevTools/RESTRequests/test.rest @@ -1,9 +1 @@ -PUT https://pedrollopl.diunabi.com/api/DataInbox/Add/8kL2mN4pQ6rojshf8704i34p4eim1hs -Content-Type: application/json -Authorization: Basic cGVkcm9sbG9wbDo0MjU4dlc2eFk4TjRwUQ== - -{ - "Source": "morska.import", - "Name": "morska.d3.importer", - "Data": "eyJrZXkiOiAidmFsdWUifQ==" -} \ No newline at end of file +POST http://localhost:5400/jobs/schedule/10763478CB738D4ecb2h76g803478CB738D4e \ No newline at end of file diff --git a/DiunaBI.API/Controllers/JobsController.cs b/DiunaBI.API/Controllers/JobsController.cs index a4de76d..e95fad0 100644 --- a/DiunaBI.API/Controllers/JobsController.cs +++ b/DiunaBI.API/Controllers/JobsController.cs @@ -320,4 +320,116 @@ public class JobsController : Controller return BadRequest(ex.ToString()); } } + + [HttpPost] + [Route("create-for-layer/{layerId:guid}")] + public async Task CreateJobForLayer(Guid layerId) + { + try + { + var layer = await _db.Layers + .Include(x => x.Records) + .FirstOrDefaultAsync(l => l.Id == layerId); + + if (layer == null) + { + _logger.LogWarning("CreateJobForLayer: Layer {LayerId} not found", layerId); + return NotFound($"Layer {layerId} not found"); + } + + if (layer.Type != LayerType.Administration) + { + _logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not an Administration layer", layerId); + return BadRequest("Only Administration layers can be run as jobs"); + } + + // Get the Type record to determine if it's ImportWorker or ProcessWorker + var typeRecord = layer.Records?.FirstOrDefault(x => x.Code == "Type"); + if (typeRecord?.Desc1 != "ImportWorker" && typeRecord?.Desc1 != "ProcessWorker") + { + _logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not a valid worker type", layerId); + return BadRequest("Layer must be an ImportWorker or ProcessWorker"); + } + + // Check if enabled + var isEnabledRecord = layer.Records?.FirstOrDefault(x => x.Code == "IsEnabled"); + if (isEnabledRecord?.Desc1 != "True") + { + _logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not enabled", layerId); + return BadRequest("Layer is not enabled"); + } + + // Get plugin name + var pluginRecord = layer.Records?.FirstOrDefault(x => x.Code == "Plugin"); + if (string.IsNullOrEmpty(pluginRecord?.Desc1)) + { + _logger.LogWarning("CreateJobForLayer: Layer {LayerId} has no Plugin configured", layerId); + return BadRequest("Layer has no Plugin configured"); + } + + // Get priority and max retries + var priorityRecord = layer.Records?.FirstOrDefault(x => x.Code == "Priority"); + var maxRetriesRecord = layer.Records?.FirstOrDefault(x => x.Code == "MaxRetries"); + + var priority = int.TryParse(priorityRecord?.Desc1, out var p) ? p : 0; + var maxRetries = int.TryParse(maxRetriesRecord?.Desc1, out var m) ? m : 3; + + var jobType = typeRecord.Desc1 == "ImportWorker" ? JobType.Import : JobType.Process; + + // Check if there's already a pending/running job for this layer + var existingJob = await _db.QueueJobs + .Where(j => j.LayerId == layer.Id && + (j.Status == JobStatus.Pending || j.Status == JobStatus.Running)) + .FirstOrDefaultAsync(); + + if (existingJob != null) + { + _logger.LogInformation("CreateJobForLayer: Job already exists for layer {LayerId}, returning existing job", layerId); + return Ok(new + { + success = true, + jobId = existingJob.Id, + message = "Job already exists for this layer", + existing = true + }); + } + + // Create the job + var job = new QueueJob + { + Id = Guid.NewGuid(), + LayerId = layer.Id, + LayerName = layer.Name ?? "Unknown", + PluginName = pluginRecord.Desc1, + JobType = jobType, + Priority = priority, + MaxRetries = maxRetries, + Status = JobStatus.Pending, + CreatedAt = DateTime.UtcNow, + CreatedAtUtc = DateTime.UtcNow, + ModifiedAtUtc = DateTime.UtcNow, + CreatedById = Guid.Empty, + ModifiedById = Guid.Empty + }; + + _db.QueueJobs.Add(job); + await _db.SaveChangesAsync(); + + _logger.LogInformation("CreateJobForLayer: Created job {JobId} for layer {LayerName} ({LayerId})", + job.Id, layer.Name, layerId); + + return Ok(new + { + success = true, + jobId = job.Id, + message = "Job created successfully", + existing = false + }); + } + catch (Exception ex) + { + _logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId); + return BadRequest(ex.ToString()); + } + } } diff --git a/DiunaBI.API/DiunaBI.API.csproj b/DiunaBI.API/DiunaBI.API.csproj index a33bde2..b5c21b0 100644 --- a/DiunaBI.API/DiunaBI.API.csproj +++ b/DiunaBI.API/DiunaBI.API.csproj @@ -39,9 +39,11 @@ - + + + diff --git a/DiunaBI.Infrastructure/Migrations/AppDbContextModelSnapshot.cs b/DiunaBI.Infrastructure/Migrations/AppDbContextModelSnapshot.cs index 9b50f25..dd69cee 100644 --- a/DiunaBI.Infrastructure/Migrations/AppDbContextModelSnapshot.cs +++ b/DiunaBI.Infrastructure/Migrations/AppDbContextModelSnapshot.cs @@ -49,7 +49,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasKey("Id"); - b.ToTable("DataInbox"); + b.ToTable("DataInbox", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b => @@ -104,7 +104,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasIndex("ModifiedById"); - b.ToTable("Layers"); + b.ToTable("Layers", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b => @@ -119,7 +119,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasIndex("SourceId"); - b.ToTable("ProcessSources"); + b.ToTable("ProcessSources", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.QueueJob", b => @@ -185,7 +185,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasKey("Id"); - b.ToTable("QueueJobs"); + b.ToTable("QueueJobs", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b => @@ -329,7 +329,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasIndex("ModifiedById"); - b.ToTable("Records"); + b.ToTable("Records", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b => @@ -378,7 +378,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasIndex("RecordId", "ChangedAt"); - b.ToTable("RecordHistory"); + b.ToTable("RecordHistory", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.User", b => @@ -402,7 +402,7 @@ namespace DiunaBI.Infrastructure.Migrations b.HasKey("Id"); - b.ToTable("Users"); + b.ToTable("Users", (string)null); }); modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b => diff --git a/DiunaBI.Infrastructure/Services/JobWorkerService.cs b/DiunaBI.Infrastructure/Services/JobWorkerService.cs index 413b11a..acb9a9c 100644 --- a/DiunaBI.Infrastructure/Services/JobWorkerService.cs +++ b/DiunaBI.Infrastructure/Services/JobWorkerService.cs @@ -142,8 +142,16 @@ public class JobWorkerService : BackgroundService else { job.Status = JobStatus.Retrying; - _logger.LogInformation("JobWorker: Job {JobId} will retry (attempt {RetryCount}/{MaxRetries})", - job.Id, job.RetryCount, job.MaxRetries); + + // Exponential backoff: wait before retrying based on attempt number + var backoffDelay = GetBackoffDelay(job.RetryCount); + + _logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (attempt {RetryCount}/{MaxRetries})", + job.Id, backoffDelay, job.RetryCount, job.MaxRetries); + + // Wait before marking as pending again + await Task.Delay(backoffDelay, stoppingToken); + job.Status = JobStatus.Pending; } } finally @@ -157,4 +165,14 @@ public class JobWorkerService : BackgroundService _logger.LogInformation("JobWorker: Stopping service..."); await base.StopAsync(stoppingToken); } + + private static TimeSpan GetBackoffDelay(int retryCount) + { + return retryCount switch + { + 1 => TimeSpan.FromSeconds(30), // 1st retry: 30 seconds + 2 => TimeSpan.FromMinutes(2), // 2nd retry: 2 minutes + _ => TimeSpan.FromMinutes(5) // 3rd+ retry: 5 minutes + }; + } } diff --git a/DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportP2.cs b/DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportP2.cs new file mode 100644 index 0000000..0bdedae --- /dev/null +++ b/DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportP2.cs @@ -0,0 +1,382 @@ +using System.Text; +using System.Text.Json; +using DiunaBI.Domain.Entities; +using DiunaBI.Infrastructure.Data; +using DiunaBI.Infrastructure.Plugins; +using Microsoft.Extensions.Logging; + +namespace DiunaBI.Plugins.PedrolloPL.Importers; + +public class PedrolloPLImportP2 : BaseDataImporter +{ + public override string ImporterType => "PedrolloPL.Import.P2"; + + private readonly AppDbContext _db; + private readonly ILogger _logger; + + // Configuration properties + private string? DataInboxName { get; set; } + private string? DataInboxSource { get; set; } + private string? StartDate { get; set; } + private string? EndDate { get; set; } + private string? ImportYear { get; set; } + private bool IsEnabled { get; set; } + + // Cached deserialized data + private List>? _cachedRawData; + private DataInbox? _cachedDataInbox; + private Dictionary? _regionCodeMap; + + public PedrolloPLImportP2( + AppDbContext db, + ILogger logger) + { + _db = db; + _logger = logger; + } + + public override void Import(Layer importWorker) + { + try + { + _logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})", + ImporterType, importWorker.Name, importWorker.Id); + + // Clear cache at start + _cachedRawData = null; + _cachedDataInbox = null; + _regionCodeMap = null; + + LoadConfiguration(importWorker); + ValidateConfiguration(); + + if (!IsEnabled) + { + _logger.LogInformation("{ImporterType}: Import disabled for {ImportWorkerName}", + ImporterType, importWorker.Name); + return; + } + + // Find and deserialize DataInbox data + FindAndDeserializeDataInbox(); + + // Load region code mapping from dictionary layer + LoadRegionCodeMapping(); + + // Map data from DataInbox to Layer records + var mappedRecords = MapDataToRecords(); + + // Create new Import layer + var importLayer = CreateImportLayer(importWorker); + + // Save records to database + SaveRecordsToLayer(importLayer, mappedRecords); + + _logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName} - Created {RecordCount} records", + ImporterType, importWorker.Name, mappedRecords.Count); + } + catch (Exception e) + { + _logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})", + ImporterType, importWorker.Name, importWorker.Id); + throw; + } + finally + { + // Clear cache after import + _cachedRawData = null; + _cachedDataInbox = null; + _regionCodeMap = null; + } + } + + private void LoadConfiguration(Layer importWorker) + { + if (importWorker.Records == null) return; + + DataInboxName = GetRecordValue(importWorker.Records, "DataInboxName"); + DataInboxSource = GetRecordValue(importWorker.Records, "DataInboxSource"); + StartDate = GetRecordValue(importWorker.Records, "StartDate"); + EndDate = GetRecordValue(importWorker.Records, "EndDate"); + ImportYear = GetRecordValue(importWorker.Records, "ImportYear"); + IsEnabled = GetRecordValue(importWorker.Records, "IsEnabled") == "True"; + + _logger.LogDebug( + "{ImporterType}: Configuration loaded - DataInboxName: {DataInboxName}, Source: {Source}, Year: {Year}, Period: {StartDate} to {EndDate}, Enabled: {IsEnabled}", + ImporterType, DataInboxName, DataInboxSource, ImportYear, StartDate, EndDate, IsEnabled); + } + + private void ValidateConfiguration() + { + var errors = new List(); + + if (string.IsNullOrEmpty(DataInboxName)) errors.Add("DataInboxName is required"); + if (string.IsNullOrEmpty(DataInboxSource)) errors.Add("DataInboxSource is required"); + if (string.IsNullOrEmpty(StartDate)) errors.Add("StartDate is required"); + if (string.IsNullOrEmpty(EndDate)) errors.Add("EndDate is required"); + + if (errors.Any()) + { + throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}"); + } + + _logger.LogDebug("{ImporterType}: Configuration validated successfully", ImporterType); + } + + private void FindAndDeserializeDataInbox() + { + _logger.LogDebug("{ImporterType}: Searching for DataInbox with Name='{DataInboxName}' and Source='{DataInboxSource}'", + ImporterType, DataInboxName, DataInboxSource); + + // Find DataInbox by Name and Source, order by CreatedAt descending to get the latest + var dataInbox = _db.DataInbox + .Where(x => x.Name == DataInboxName && x.Source == DataInboxSource) + .OrderByDescending(x => x.CreatedAt) + .FirstOrDefault(); + + if (dataInbox == null) + { + throw new InvalidOperationException( + $"DataInbox not found with Name='{DataInboxName}' and Source='{DataInboxSource}'"); + } + + _logger.LogInformation("{ImporterType}: Found DataInbox - Id: {DataInboxId}, Name: {Name}, Source: {Source}, CreatedAt: {CreatedAt}", + ImporterType, dataInbox.Id, dataInbox.Name, dataInbox.Source, dataInbox.CreatedAt); + + // Deserialize the data + try + { + var data = Convert.FromBase64String(dataInbox.Data); + var jsonString = Encoding.UTF8.GetString(data); + + _logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64", + ImporterType, data.Length); + + // Deserialize as array of arrays: [["", 1183.15, ...], ["DOLNOŚLĄSKIE", ...]] + var rawData = JsonSerializer.Deserialize>>(jsonString); + if (rawData == null || rawData.Count == 0) + { + throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}"); + } + + _logger.LogInformation("{ImporterType}: Successfully deserialized {RowCount} rows from DataInbox", + ImporterType, rawData.Count); + + // Log first few rows for debugging + if (rawData.Count > 0) + { + var sampleSize = Math.Min(3, rawData.Count); + _logger.LogDebug("{ImporterType}: Sample rows (first {SampleSize}):", ImporterType, sampleSize); + for (int i = 0; i < sampleSize; i++) + { + var row = rawData[i]; + if (row.Count > 0) + { + var regionName = row[0]?.ToString() ?? "null"; + var valueCount = row.Count - 1; + _logger.LogDebug(" [{Index}] Region: {Region}, Values: {ValueCount}", + i, regionName, valueCount); + } + } + } + + // Cache the deserialized data + _cachedRawData = rawData; + _cachedDataInbox = dataInbox; + } + catch (FormatException e) + { + _logger.LogError(e, "{ImporterType}: Invalid base64 data in DataInbox {DataInboxId}", + ImporterType, dataInbox.Id); + throw new InvalidOperationException($"Invalid base64 data in DataInbox: {dataInbox.Name}", e); + } + catch (JsonException e) + { + _logger.LogError(e, "{ImporterType}: Invalid JSON data in DataInbox {DataInboxId}", + ImporterType, dataInbox.Id); + throw new InvalidOperationException($"Invalid JSON data in DataInbox: {dataInbox.Name}", e); + } + } + + private void LoadRegionCodeMapping() + { + const string dictionaryLayerName = "L1-D-P2-CODES"; + + _logger.LogDebug("{ImporterType}: Loading region code mapping from dictionary layer '{DictionaryLayerName}'", + ImporterType, dictionaryLayerName); + + var dictionaryLayer = _db.Layers + .Where(x => x.Name == dictionaryLayerName && x.Type == LayerType.Dictionary) + .FirstOrDefault(); + + if (dictionaryLayer == null) + { + throw new InvalidOperationException($"Dictionary layer '{dictionaryLayerName}' not found"); + } + + // Load records for the dictionary layer + var records = _db.Records + .Where(x => x.LayerId == dictionaryLayer.Id) + .ToList(); + + // Build mapping: Desc1 (region name) -> Code + _regionCodeMap = records.ToDictionary( + r => r.Desc1 ?? string.Empty, + r => r.Code ?? string.Empty, + StringComparer.OrdinalIgnoreCase); + + _logger.LogInformation("{ImporterType}: Loaded {MappingCount} region code mappings", + ImporterType, _regionCodeMap.Count); + } + + private List MapDataToRecords() + { + if (_cachedRawData == null) + { + throw new InvalidOperationException("Raw data not loaded. Call FindAndDeserializeDataInbox first."); + } + + if (_regionCodeMap == null) + { + throw new InvalidOperationException("Region code mapping not loaded. Call LoadRegionCodeMapping first."); + } + + var records = new List(); + var now = DateTime.UtcNow; + + _logger.LogDebug("{ImporterType}: Starting data mapping for {RowCount} rows", + ImporterType, _cachedRawData.Count); + + foreach (var row in _cachedRawData) + { + if (row.Count < 13) + { + _logger.LogWarning("{ImporterType}: Skipping row with insufficient data - expected 13 elements, got {Count}", + ImporterType, row.Count); + continue; + } + + // First element is region name + var regionName = row[0]?.ToString(); + if (string.IsNullOrEmpty(regionName)) + { + _logger.LogWarning("{ImporterType}: Skipping row with empty region name", ImporterType); + continue; + } + + // Find region code from dictionary + if (!_regionCodeMap.TryGetValue(regionName, out var regionCode)) + { + _logger.LogWarning("{ImporterType}: Region code not found for '{RegionName}' - skipping", + ImporterType, regionName); + continue; + } + + // Create 12 records (one per month) + for (int month = 1; month <= 12; month++) + { + var valueIndex = month; // row[1] = January, row[2] = February, etc. + var valueObj = row[valueIndex]; + + // Convert value to double + double? value = null; + if (valueObj != null) + { + // Handle JsonElement if deserialized from JSON + if (valueObj is JsonElement jsonElement) + { + if (jsonElement.ValueKind == JsonValueKind.Number) + { + value = jsonElement.GetDouble(); + } + } + else if (valueObj is double d) + { + value = d; + } + else if (double.TryParse(valueObj.ToString(), out var parsed)) + { + value = parsed; + } + } + + // Create code: {regionCode}{month:00} + var code = $"{regionCode}{month:00}"; + + var record = new Record + { + Id = Guid.NewGuid(), + Code = code, + Value1 = value, + CreatedAt = now, + ModifiedAt = now + }; + + records.Add(record); + } + + _logger.LogDebug("{ImporterType}: Mapped region '{RegionName}' (code: {RegionCode}) to 12 records", + ImporterType, regionName, regionCode); + } + + _logger.LogInformation("{ImporterType}: Successfully mapped {RecordCount} records from {RowCount} rows", + ImporterType, records.Count, _cachedRawData.Count); + + return records; + } + + private Layer CreateImportLayer(Layer importWorker) + { + var now = DateTime.UtcNow; + + var importLayer = new Layer + { + Id = Guid.NewGuid(), + Number = _db.Layers.Count() + 1, + ParentId = importWorker.Id, + Type = LayerType.Import, + IsCancelled = false, + CreatedAt = now, + ModifiedAt = now, + CreatedById = importWorker.CreatedById, + ModifiedById = importWorker.ModifiedById + }; + + // Format: L{Number}-I-P2-{Year}-{Timestamp} + importLayer.Name = $"L{importLayer.Number}-I-P2-{ImportYear}-{now:yyyyMMddHHmm}"; + + _logger.LogDebug("{ImporterType}: Creating import layer '{LayerName}' (Number: {Number})", + ImporterType, importLayer.Name, importLayer.Number); + + _db.Layers.Add(importLayer); + _db.SaveChanges(); + + _logger.LogInformation("{ImporterType}: Created import layer '{LayerName}' with Id: {LayerId}", + ImporterType, importLayer.Name, importLayer.Id); + + return importLayer; + } + + private void SaveRecordsToLayer(Layer importLayer, List records) + { + _logger.LogDebug("{ImporterType}: Saving {RecordCount} records to layer {LayerId}", + ImporterType, records.Count, importLayer.Id); + + // Set LayerId for all records + foreach (var record in records) + { + record.LayerId = importLayer.Id; + } + + _db.Records.AddRange(records); + _db.SaveChanges(); + + _logger.LogInformation("{ImporterType}: Successfully saved {RecordCount} records to layer '{LayerName}'", + ImporterType, records.Count, importLayer.Name); + } + + private string? GetRecordValue(ICollection records, string code) + { + return records.FirstOrDefault(x => x.Code == code)?.Desc1; + } +} diff --git a/DiunaBI.UI.Shared/Components/JobListComponent.razor b/DiunaBI.UI.Shared/Components/JobListComponent.razor new file mode 100644 index 0000000..4861380 --- /dev/null +++ b/DiunaBI.UI.Shared/Components/JobListComponent.razor @@ -0,0 +1,141 @@ +@using MudBlazor.Internal +@using DiunaBI.Domain.Entities + + + + + + @foreach (JobStatus status in Enum.GetValues(typeof(JobStatus))) + { + @status.ToString() + } + + + + + + @foreach (JobType type in Enum.GetValues(typeof(JobType))) + { + @type.ToString() + } + + + + + + + + + + + + + + + + Layer Name + Plugin + Type + Status + Priority + Retry + Created + Last Attempt + + + +
+ @row.LayerName +
+
+ +
+ @row.PluginName +
+
+ +
+ @row.JobType +
+
+ +
+ @row.Status +
+
+ +
+ @row.Priority +
+
+ +
+ @row.RetryCount / @row.MaxRetries +
+
+ +
+ @row.CreatedAt.ToString("yyyy-MM-dd HH:mm") +
+
+ +
+ @(row.LastAttemptAt?.ToString("yyyy-MM-dd HH:mm") ?? "-") +
+
+
+ + No jobs to display + + + Loading... + +
+ + @if (jobs.TotalCount > 0) + { + + + + Results @((jobs.Page - 1) * jobs.PageSize + 1) - @Math.Min(jobs.Page * jobs.PageSize, jobs.TotalCount) + of @jobs.TotalCount + + + + + + + } diff --git a/DiunaBI.UI.Shared/Components/JobListComponent.razor.cs b/DiunaBI.UI.Shared/Components/JobListComponent.razor.cs new file mode 100644 index 0000000..a1bb1ea --- /dev/null +++ b/DiunaBI.UI.Shared/Components/JobListComponent.razor.cs @@ -0,0 +1,110 @@ +using DiunaBI.UI.Shared.Services; +using Microsoft.AspNetCore.Components; +using Microsoft.AspNetCore.Components.Web; +using DiunaBI.Application.DTOModels.Common; +using DiunaBI.Domain.Entities; +using MudBlazor; +using Microsoft.JSInterop; + +namespace DiunaBI.UI.Shared.Components; + +public partial class JobListComponent : ComponentBase +{ + [Inject] private JobService JobService { get; set; } = default!; + [Inject] private ISnackbar Snackbar { get; set; } = default!; + [Inject] private NavigationManager NavigationManager { get; set; } = default!; + [Inject] private IJSRuntime JSRuntime { get; set; } = default!; + + private PagedResult jobs = new(); + private bool isLoading = false; + private int currentPage = 1; + private int pageSize = 50; + private JobStatus? selectedStatus = null; + private JobType? selectedJobType = null; + + protected override async Task OnInitializedAsync() + { + await LoadJobs(); + } + + private async Task LoadJobs() + { + isLoading = true; + + try + { + jobs = await JobService.GetJobsAsync(currentPage, pageSize, selectedStatus, selectedJobType); + } + catch (Exception ex) + { + Console.WriteLine($"Loading jobs failed: {ex.Message}"); + Snackbar.Add("Failed to load jobs", Severity.Error); + } + finally + { + isLoading = false; + } + } + + private async Task OnPageChanged(int page) + { + currentPage = page; + await LoadJobs(); + } + + private async Task ClearFilters() + { + selectedStatus = null; + selectedJobType = null; + currentPage = 1; + await LoadJobs(); + } + + private async Task OnStatusClear() + { + selectedStatus = null; + currentPage = 1; + await LoadJobs(); + } + + private async Task OnJobTypeClear() + { + selectedJobType = null; + currentPage = 1; + await LoadJobs(); + } + + private void OnRowClick(QueueJob job) + { + NavigationManager.NavigateTo($"/jobs/{job.Id}"); + } + + private async Task OnRowRightClick(MouseEventArgs e, QueueJob job) + { + var url = NavigationManager.ToAbsoluteUri($"/jobs/{job.Id}").ToString(); + await JSRuntime.InvokeVoidAsync("open", url, "_blank"); + } + + private Color GetStatusColor(JobStatus status) + { + return status switch + { + JobStatus.Pending => Color.Default, + JobStatus.Running => Color.Info, + JobStatus.Completed => Color.Success, + JobStatus.Failed => Color.Error, + JobStatus.Retrying => Color.Warning, + _ => Color.Default + }; + } + + private Color GetJobTypeColor(JobType jobType) + { + return jobType switch + { + JobType.Import => Color.Primary, + JobType.Process => Color.Secondary, + _ => Color.Default + }; + } +} diff --git a/DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs b/DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs index 9f8fbba..9dd942c 100644 --- a/DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs +++ b/DiunaBI.UI.Shared/Extensions/ServiceCollectionExtensions.cs @@ -38,6 +38,7 @@ public static class ServiceCollectionExtensions services.AddScoped(); services.AddScoped(); services.AddScoped(); + services.AddScoped(); // Filter state services (scoped to maintain state during user session) services.AddScoped(); diff --git a/DiunaBI.UI.Shared/MainLayout.razor b/DiunaBI.UI.Shared/MainLayout.razor index e2d1649..793dacc 100644 --- a/DiunaBI.UI.Shared/MainLayout.razor +++ b/DiunaBI.UI.Shared/MainLayout.razor @@ -37,6 +37,7 @@ Dashboard Layers Data Inbox + Jobs diff --git a/DiunaBI.UI.Shared/Pages/JobDetailPage.razor b/DiunaBI.UI.Shared/Pages/JobDetailPage.razor new file mode 100644 index 0000000..779c55a --- /dev/null +++ b/DiunaBI.UI.Shared/Pages/JobDetailPage.razor @@ -0,0 +1,245 @@ +@page "/jobs/{id:guid}" +@using DiunaBI.UI.Shared.Services +@using DiunaBI.Domain.Entities +@using MudBlazor +@inject JobService JobService +@inject NavigationManager NavigationManager +@inject ISnackbar Snackbar + + + + + Job Details + + + @if (job != null && job.Status == JobStatus.Failed) + { + + Retry + + } + @if (job != null && (job.Status == JobStatus.Pending || job.Status == JobStatus.Retrying)) + { + + Cancel + + } + + Back to List + + + + + @if (isLoading) + { + + } + else if (job == null) + { + Job not found + } + else + { + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @if (!string.IsNullOrEmpty(job.LastError)) + { + + + + } + + + + + + + + View Layer Details + + + + } + + + +@code { + [Parameter] + public Guid Id { get; set; } + + private QueueJob? job; + private bool isLoading = true; + + protected override async Task OnInitializedAsync() + { + await LoadJob(); + } + + private async Task LoadJob() + { + isLoading = true; + try + { + job = await JobService.GetJobByIdAsync(Id); + } + catch (Exception ex) + { + Console.WriteLine($"Loading job failed: {ex.Message}"); + Snackbar.Add("Failed to load job", Severity.Error); + } + finally + { + isLoading = false; + } + } + + private async Task RetryJob() + { + if (job == null) return; + + var success = await JobService.RetryJobAsync(job.Id); + if (success) + { + Snackbar.Add("Job reset to Pending status", Severity.Success); + await LoadJob(); + } + else + { + Snackbar.Add("Failed to retry job", Severity.Error); + } + } + + private async Task CancelJob() + { + if (job == null) return; + + var success = await JobService.CancelJobAsync(job.Id); + if (success) + { + Snackbar.Add("Job cancelled", Severity.Success); + await LoadJob(); + } + else + { + Snackbar.Add("Failed to cancel job", Severity.Error); + } + } + + private void GoBack() + { + NavigationManager.NavigateTo("/jobs"); + } + + private Color GetStatusColor(JobStatus status) + { + return status switch + { + JobStatus.Pending => Color.Default, + JobStatus.Running => Color.Info, + JobStatus.Completed => Color.Success, + JobStatus.Failed => Color.Error, + JobStatus.Retrying => Color.Warning, + _ => Color.Default + }; + } + + private string GetStatusIcon(JobStatus status) + { + return status switch + { + JobStatus.Pending => Icons.Material.Filled.HourglassEmpty, + JobStatus.Running => Icons.Material.Filled.PlayArrow, + JobStatus.Completed => Icons.Material.Filled.CheckCircle, + JobStatus.Failed => Icons.Material.Filled.Error, + JobStatus.Retrying => Icons.Material.Filled.Refresh, + _ => Icons.Material.Filled.Help + }; + } +} diff --git a/DiunaBI.UI.Shared/Pages/JobListPage.razor b/DiunaBI.UI.Shared/Pages/JobListPage.razor new file mode 100644 index 0000000..9f9bc78 --- /dev/null +++ b/DiunaBI.UI.Shared/Pages/JobListPage.razor @@ -0,0 +1,8 @@ +@page "/jobs" +@using DiunaBI.UI.Shared.Components + +Jobs + + + + diff --git a/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor b/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor index cfb91c9..ad10fd9 100644 --- a/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor +++ b/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor @@ -3,7 +3,9 @@ @using DiunaBI.Application.DTOModels @using MudBlazor @inject LayerService LayerService +@inject JobService JobService @inject NavigationManager NavigationManager +@inject ISnackbar Snackbar @@ -11,18 +13,24 @@ Layer Details - Back to List diff --git a/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor.cs b/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor.cs index 72b9b0c..65abbef 100644 --- a/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor.cs +++ b/DiunaBI.UI.Shared/Pages/LayerDetailPage.razor.cs @@ -10,9 +10,6 @@ public partial class LayerDetailPage : ComponentBase [Parameter] public Guid Id { get; set; } - [Inject] - private ISnackbar Snackbar { get; set; } = null!; - [Inject] private IDialogService DialogService { get; set; } = null!; @@ -413,4 +410,54 @@ public partial class LayerDetailPage : ComponentBase { return userCache.TryGetValue(userId, out var username) ? username : string.Empty; } + + // Run Now button methods + private bool isRunningJob = false; + + private bool IsWorkerLayer() + { + if (layer?.Records == null) return false; + + var typeRecord = layer.Records.FirstOrDefault(x => x.Code == "Type"); + return typeRecord?.Desc1 == "ImportWorker" || typeRecord?.Desc1 == "ProcessWorker"; + } + + private async Task RunNow() + { + if (layer == null) return; + + isRunningJob = true; + try + { + var result = await JobService.CreateJobForLayerAsync(layer.Id); + + if (result != null && result.Success) + { + if (result.Existing) + { + Snackbar.Add($"Job already exists: {result.Message}", Severity.Info); + } + else + { + Snackbar.Add("Job created successfully!", Severity.Success); + } + + // Navigate to job detail page + NavigationManager.NavigateTo($"/jobs/{result.JobId}"); + } + else + { + Snackbar.Add("Failed to create job", Severity.Error); + } + } + catch (Exception ex) + { + Console.WriteLine($"Error creating job: {ex.Message}"); + Snackbar.Add($"Error creating job: {ex.Message}", Severity.Error); + } + finally + { + isRunningJob = false; + } + } } diff --git a/DiunaBI.UI.Shared/Services/JobService.cs b/DiunaBI.UI.Shared/Services/JobService.cs new file mode 100644 index 0000000..61b0723 --- /dev/null +++ b/DiunaBI.UI.Shared/Services/JobService.cs @@ -0,0 +1,104 @@ +using System.Net.Http.Json; +using System.Text.Json; +using DiunaBI.Application.DTOModels.Common; +using DiunaBI.Domain.Entities; + +namespace DiunaBI.UI.Shared.Services; + +public class JobService +{ + private readonly HttpClient _httpClient; + + public JobService(HttpClient httpClient) + { + _httpClient = httpClient; + } + + private readonly JsonSerializerOptions _jsonOptions = new() + { + PropertyNameCaseInsensitive = true + }; + + public async Task> GetJobsAsync(int page = 1, int pageSize = 50, JobStatus? status = null, JobType? jobType = null, Guid? layerId = null) + { + var start = (page - 1) * pageSize; + var query = $"Jobs?start={start}&limit={pageSize}"; + + if (status.HasValue) + query += $"&status={(int)status.Value}"; + + if (jobType.HasValue) + query += $"&jobType={(int)jobType.Value}"; + + if (layerId.HasValue) + query += $"&layerId={layerId.Value}"; + + var response = await _httpClient.GetAsync(query); + response.EnsureSuccessStatusCode(); + + var json = await response.Content.ReadAsStringAsync(); + var result = JsonSerializer.Deserialize>(json, _jsonOptions); + + return result ?? new PagedResult(); + } + + public async Task GetJobByIdAsync(Guid id) + { + var response = await _httpClient.GetAsync($"Jobs/{id}"); + + if (!response.IsSuccessStatusCode) + return null; + + return await response.Content.ReadFromJsonAsync(); + } + + public async Task RetryJobAsync(Guid id) + { + var response = await _httpClient.PostAsync($"Jobs/{id}/retry", null); + return response.IsSuccessStatusCode; + } + + public async Task CancelJobAsync(Guid id) + { + var response = await _httpClient.DeleteAsync($"Jobs/{id}"); + return response.IsSuccessStatusCode; + } + + public async Task GetStatsAsync() + { + var response = await _httpClient.GetAsync("Jobs/stats"); + + if (!response.IsSuccessStatusCode) + return null; + + return await response.Content.ReadFromJsonAsync(); + } + + public async Task CreateJobForLayerAsync(Guid layerId) + { + var response = await _httpClient.PostAsync($"Jobs/create-for-layer/{layerId}", null); + + if (!response.IsSuccessStatusCode) + return null; + + return await response.Content.ReadFromJsonAsync(); + } +} + +public class JobStats +{ + public int Pending { get; set; } + public int Running { get; set; } + public int Completed { get; set; } + public int Failed { get; set; } + public int Retrying { get; set; } + public int Total { get; set; } +} + +public class CreateJobResult +{ + public bool Success { get; set; } + public Guid JobId { get; set; } + public string? Message { get; set; } + public bool Existing { get; set; } +}