P2 processor is working as a charm!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s
This commit is contained in:
@@ -1,18 +1,27 @@
|
||||
Re-explore the DiunaBI codebase comprehensively and update the `.claude/project-context.md` file with the latest:
|
||||
Update the `.claude/project-context.md` file by ONLY appending changes made during THIS session to the "RECENT CHANGES (This Session)" section at the top of the file.
|
||||
|
||||
1. Project structure changes (new projects, folders, or major reorganization)
|
||||
2. New features and functionality added
|
||||
3. Database schema changes (new entities, significant field additions)
|
||||
4. New API endpoints or controllers
|
||||
5. New services or business logic components
|
||||
6. Changes to authentication/authorization
|
||||
7. New external integrations or plugins
|
||||
8. Recent git commits (last 5-10) to understand development direction
|
||||
9. Updated configuration or environment changes
|
||||
10. New design patterns or architectural changes
|
||||
**DO NOT re-scan or re-explore the entire codebase** - this wastes tokens and time.
|
||||
|
||||
Use the Task tool with subagent_type='Explore' and thoroughness level 'very thorough' to comprehensively analyze the codebase.
|
||||
**What to do:**
|
||||
|
||||
After exploration, update the `.claude/project-context.md` file with all relevant changes while maintaining the existing structure and format. Preserve information that hasn't changed.
|
||||
1. Review the conversation history to identify what was changed/added/fixed in THIS session
|
||||
2. Read the current `.claude/project-context.md` file
|
||||
3. Update ONLY the "RECENT CHANGES (This Session)" section at the top with:
|
||||
- Date of changes (today's date)
|
||||
- Brief bullet points describing what was modified
|
||||
- Files that were changed with brief descriptions
|
||||
- Any new functionality added
|
||||
- Bug fixes completed
|
||||
4. Leave the rest of the file unchanged
|
||||
|
||||
When done, provide a brief summary of what was updated in the context file.
|
||||
**Format for session changes:**
|
||||
```markdown
|
||||
## RECENT CHANGES (This Session)
|
||||
|
||||
**[Feature/Fix Name] ([Date]):**
|
||||
- ✅ Brief description of change 1
|
||||
- ✅ Brief description of change 2
|
||||
- Files modified: [file1.cs](path/to/file1.cs), [file2.cs](path/to/file2.cs)
|
||||
```
|
||||
|
||||
When done, provide a brief summary of what session changes were documented.
|
||||
|
||||
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -30,17 +30,6 @@
|
||||
},
|
||||
"env": {
|
||||
"ASPNETCORE_ENVIRONMENT": "Development"
|
||||
},
|
||||
"launchBrowser": {
|
||||
"enabled": true,
|
||||
"args": "${auto-detect-url}",
|
||||
"browser": [
|
||||
{
|
||||
"osx": "Google Chrome",
|
||||
"linux": "chrome",
|
||||
"windows": "chrome"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
58
DevTools/sql-scripts/PedrolloPL/PedrolloProcessP2
Normal file
58
DevTools/sql-scripts/PedrolloPL/PedrolloProcessP2
Normal file
@@ -0,0 +1,58 @@
|
||||
DECLARE @JustForDebug TINYINT = 0;
|
||||
|
||||
-- SETUP VARIABLES
|
||||
DECLARE @Year INT = 2024;
|
||||
|
||||
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
|
||||
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
|
||||
DECLARE @Name NVARCHAR(50) = CONCAT(
|
||||
'L', @Number, '-A-PW_P2-', @Year, '-', @CurrentTimestamp
|
||||
);
|
||||
DECLARE @SourceNameFilter NVARCHAR(50) = CONCAT('%-A-IW_B3', '-', @Year, '-%');
|
||||
DECLARE @SourceLayer NVARCHAR(50) = (SELECT TOP 1 [Name] FROM [DiunaBI-PedrolloPL].[dbo].[Layers] WHERE [Name] LIKE @SourceNameFilter);
|
||||
IF @SourceLayer IS NULL
|
||||
BEGIN
|
||||
SELECT 'SourceLayer is NULL' AS Logger;
|
||||
RETURN;
|
||||
END;
|
||||
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
|
||||
|
||||
SELECT @Name AS Name, @SourceLayer AS SourceLayer;
|
||||
|
||||
IF @JustForDebug = 1
|
||||
BEGIN
|
||||
SELECT 'Just for debug' AS Logger;
|
||||
RETURN;
|
||||
END;
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
|
||||
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
|
||||
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'Source', 'B3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'SourceLayer', @SourceLayer, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'Type', 'ProcessWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'Year', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'Plugin', 'PedrolloPL.Process.P2', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
|
||||
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
|
||||
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
|
||||
VALUES ((SELECT NEWID()), 'Priority', '110', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
|
||||
@@ -60,8 +60,8 @@ public class JobWorkerService : BackgroundService
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType})",
|
||||
job.Id, job.LayerName, job.JobType);
|
||||
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType}) - Current RetryCount: {RetryCount}, MaxRetries: {MaxRetries}, Status: {Status}",
|
||||
job.Id, job.LayerName, job.JobType, job.RetryCount, job.MaxRetries, job.Status);
|
||||
|
||||
// Mark job as running
|
||||
job.Status = JobStatus.Running;
|
||||
@@ -129,29 +129,35 @@ public class JobWorkerService : BackgroundService
|
||||
{
|
||||
_logger.LogError(ex, "JobWorker: Job {JobId} failed - {LayerName}", job.Id, job.LayerName);
|
||||
|
||||
job.RetryCount++;
|
||||
job.LastError = ex.Message;
|
||||
// Capture full error details including inner exceptions
|
||||
job.LastError = GetFullErrorMessage(ex);
|
||||
job.ModifiedAtUtc = DateTime.UtcNow;
|
||||
|
||||
if (job.RetryCount >= job.MaxRetries)
|
||||
{
|
||||
job.Status = JobStatus.Failed;
|
||||
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed after {RetryCount} attempts",
|
||||
job.Id, job.RetryCount);
|
||||
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed - no more retries available (RetryCount: {RetryCount}, MaxRetries: {MaxRetries})",
|
||||
job.Id, job.RetryCount, job.MaxRetries);
|
||||
}
|
||||
else
|
||||
{
|
||||
job.Status = JobStatus.Retrying;
|
||||
|
||||
// Exponential backoff: wait before retrying based on attempt number
|
||||
var backoffDelay = GetBackoffDelay(job.RetryCount);
|
||||
|
||||
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (attempt {RetryCount}/{MaxRetries})",
|
||||
job.Id, backoffDelay, job.RetryCount, job.MaxRetries);
|
||||
|
||||
// Wait before marking as pending again
|
||||
await Task.Delay(backoffDelay, stoppingToken);
|
||||
job.Status = JobStatus.Pending;
|
||||
|
||||
// Exponential backoff: wait before retrying
|
||||
var backoffDelay = GetBackoffDelay(job.RetryCount + 1);
|
||||
|
||||
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (retry {RetryNumber} of {MaxRetries})",
|
||||
job.Id, backoffDelay, job.RetryCount + 1, job.MaxRetries);
|
||||
|
||||
// Save current state with error message
|
||||
await db.SaveChangesAsync(stoppingToken);
|
||||
|
||||
// Wait before next attempt
|
||||
await Task.Delay(backoffDelay, stoppingToken);
|
||||
|
||||
// Increment retry count for next attempt
|
||||
job.RetryCount++;
|
||||
job.ModifiedAtUtc = DateTime.UtcNow;
|
||||
}
|
||||
}
|
||||
finally
|
||||
@@ -175,4 +181,18 @@ public class JobWorkerService : BackgroundService
|
||||
_ => TimeSpan.FromMinutes(5) // 3rd+ retry: 5 minutes
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetFullErrorMessage(Exception ex)
|
||||
{
|
||||
var messages = new List<string>();
|
||||
var currentException = ex;
|
||||
|
||||
while (currentException != null)
|
||||
{
|
||||
messages.Add(currentException.Message);
|
||||
currentException = currentException.InnerException;
|
||||
}
|
||||
|
||||
return string.Join(" → ", messages);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,8 +338,8 @@ public class PedrolloPLImportB3 : BaseDataImporter
|
||||
IsCancelled = false,
|
||||
CreatedAt = now,
|
||||
ModifiedAt = now,
|
||||
CreatedById = importWorker.CreatedById,
|
||||
ModifiedById = importWorker.ModifiedById
|
||||
CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"), // System user
|
||||
ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d") // System user
|
||||
};
|
||||
|
||||
importLayer.Name = $"L{importLayer.Number}-I-B3-{ImportYear}-{now:yyyyMMddHHmm}";
|
||||
@@ -374,8 +374,8 @@ public class PedrolloPLImportB3 : BaseDataImporter
|
||||
foreach (var record in records)
|
||||
{
|
||||
record.LayerId = importLayer.Id;
|
||||
record.CreatedById = importLayer.CreatedById;
|
||||
record.ModifiedById = importLayer.ModifiedById;
|
||||
record.CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
|
||||
record.ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
|
||||
_db.Records.Add(record);
|
||||
}
|
||||
|
||||
|
||||
542
DiunaBI.Plugins.PedrolloPL/Processors/PedrolloPLProcessP2.cs
Normal file
542
DiunaBI.Plugins.PedrolloPL/Processors/PedrolloPLProcessP2.cs
Normal file
@@ -0,0 +1,542 @@
|
||||
using System.Text;
|
||||
using DiunaBI.Domain.Entities;
|
||||
using DiunaBI.Infrastructure.Data;
|
||||
using DiunaBI.Infrastructure.Plugins;
|
||||
using Google.Apis.Sheets.v4;
|
||||
using Google.Apis.Sheets.v4.Data;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace DiunaBI.Plugins.PedrolloPL.Processors;
|
||||
|
||||
public class PedrolloPLProcessP2 : BaseDataProcessor
|
||||
{
|
||||
public override string ProcessorType => "PedrolloPL.Process.P2";
|
||||
|
||||
private readonly AppDbContext _db;
|
||||
private readonly ILogger<PedrolloPLProcessP2> _logger;
|
||||
private readonly SpreadsheetsResource.ValuesResource _googleSheetValues;
|
||||
|
||||
// Configuration properties
|
||||
private string? Year { get; set; }
|
||||
private bool IsEnabled { get; set; }
|
||||
private string? GoogleSheetId { get; set; }
|
||||
private string? GoogleSheetTab { get; set; }
|
||||
private string? GoogleSheetRange { get; set; }
|
||||
|
||||
// Cached data
|
||||
private Layer? _sourceLayer;
|
||||
private Layer? _processedLayer;
|
||||
private Dictionary<string, string>? _codeToRegionMap;
|
||||
|
||||
public PedrolloPLProcessP2(
|
||||
AppDbContext db,
|
||||
ILogger<PedrolloPLProcessP2> logger,
|
||||
SpreadsheetsResource.ValuesResource googleSheetValues)
|
||||
{
|
||||
_db = db;
|
||||
_logger = logger;
|
||||
_googleSheetValues = googleSheetValues;
|
||||
}
|
||||
|
||||
public override void Process(Layer processWorker)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Starting process for {ProcessWorkerName} ({ProcessWorkerId})",
|
||||
ProcessorType, processWorker.Name, processWorker.Id);
|
||||
|
||||
// Clear cache at start
|
||||
_sourceLayer = null;
|
||||
_processedLayer = null;
|
||||
_codeToRegionMap = null;
|
||||
|
||||
LoadConfiguration(processWorker);
|
||||
ValidateConfiguration();
|
||||
|
||||
if (!IsEnabled)
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Process disabled for {ProcessWorkerName}",
|
||||
ProcessorType, processWorker.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find latest B3 import layer for the configured year
|
||||
FindSourceLayer();
|
||||
|
||||
// Find or create processed layer
|
||||
FindOrCreateProcessedLayer(processWorker);
|
||||
|
||||
// Transform data from source to processed layer
|
||||
var transformedRecords = TransformData();
|
||||
|
||||
// Save records to processed layer
|
||||
SaveRecordsToLayer(_processedLayer!, transformedRecords);
|
||||
|
||||
// Export to Google Sheets if configured
|
||||
if (!string.IsNullOrEmpty(GoogleSheetId) && !string.IsNullOrEmpty(GoogleSheetTab) && !string.IsNullOrEmpty(GoogleSheetRange))
|
||||
{
|
||||
ExportToGoogleSheet();
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Google Sheet export skipped - configuration not provided",
|
||||
ProcessorType);
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Successfully completed process for {ProcessWorkerName} - Processed {RecordCount} records",
|
||||
ProcessorType, processWorker.Name, transformedRecords.Count);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ProcessorType}: Failed to process {ProcessWorkerName} ({ProcessWorkerId})",
|
||||
ProcessorType, processWorker.Name, processWorker.Id);
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clear cache after process
|
||||
_sourceLayer = null;
|
||||
_processedLayer = null;
|
||||
_codeToRegionMap = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadConfiguration(Layer processWorker)
|
||||
{
|
||||
if (processWorker.Records == null) return;
|
||||
|
||||
Year = GetRecordValue(processWorker.Records, "Year");
|
||||
IsEnabled = GetRecordValue(processWorker.Records, "IsEnabled") == "True";
|
||||
GoogleSheetId = GetRecordValue(processWorker.Records, "GoogleSheetId");
|
||||
GoogleSheetTab = GetRecordValue(processWorker.Records, "GoogleSheetTab");
|
||||
GoogleSheetRange = GetRecordValue(processWorker.Records, "GoogleSheetRange");
|
||||
|
||||
_logger.LogDebug(
|
||||
"{ProcessorType}: Configuration loaded - Year: {Year}, Enabled: {IsEnabled}, SheetId: {SheetId}, Tab: {Tab}, Range: {Range}",
|
||||
ProcessorType, Year, IsEnabled, GoogleSheetId, GoogleSheetTab, GoogleSheetRange);
|
||||
}
|
||||
|
||||
private void ValidateConfiguration()
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrEmpty(Year)) errors.Add("Year is required");
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Configuration validated successfully", ProcessorType);
|
||||
}
|
||||
|
||||
private void FindSourceLayer()
|
||||
{
|
||||
_logger.LogDebug("{ProcessorType}: Searching for latest B3 import layer for year {Year}",
|
||||
ProcessorType, Year);
|
||||
|
||||
// Find latest B3 import layer matching pattern: L*-I-B3-{Year}-*
|
||||
var layerNamePattern = $"-I-B3-{Year}-";
|
||||
|
||||
_sourceLayer = _db.Layers
|
||||
.Where(x => x.Name != null && x.Name.Contains(layerNamePattern) && x.Type == LayerType.Import)
|
||||
.OrderByDescending(x => x.CreatedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (_sourceLayer == null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Source B3 import layer not found for year {Year} (pattern: *{layerNamePattern}*)");
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Found source layer - Id: {LayerId}, Name: {LayerName}, CreatedAt: {CreatedAt}",
|
||||
ProcessorType, _sourceLayer.Id, _sourceLayer.Name, _sourceLayer.CreatedAt);
|
||||
}
|
||||
|
||||
private void FindOrCreateProcessedLayer(Layer processWorker)
|
||||
{
|
||||
_logger.LogDebug("{ProcessorType}: Looking for existing processed layer with ParentId={ParentId}",
|
||||
ProcessorType, processWorker.Id);
|
||||
|
||||
// Check if processed layer already exists with ParentId = ProcessWorker.Id
|
||||
_processedLayer = _db.Layers
|
||||
.Where(x => x.ParentId == processWorker.Id && x.Type == LayerType.Processed)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (_processedLayer != null)
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Found existing processed layer - Id: {LayerId}, Name: {LayerName}",
|
||||
ProcessorType, _processedLayer.Id, _processedLayer.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: No existing processed layer found, creating new one",
|
||||
ProcessorType);
|
||||
|
||||
_processedLayer = CreateProcessedLayer(processWorker);
|
||||
}
|
||||
}
|
||||
|
||||
private Layer CreateProcessedLayer(Layer processWorker)
|
||||
{
|
||||
var now = DateTime.UtcNow;
|
||||
|
||||
var processedLayer = new Layer
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Number = _db.Layers.Count() + 1,
|
||||
ParentId = processWorker.Id,
|
||||
Type = LayerType.Processed,
|
||||
IsCancelled = false,
|
||||
CreatedAt = now,
|
||||
ModifiedAt = now,
|
||||
CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"), // System user
|
||||
ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d") // System user
|
||||
};
|
||||
|
||||
processedLayer.Name = $"L{processedLayer.Number}-P-P2-{Year}-{now:yyyyMMddHHmm}";
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Creating processed layer '{LayerName}' (Number: {Number})",
|
||||
ProcessorType, processedLayer.Name, processedLayer.Number);
|
||||
|
||||
_db.Layers.Add(processedLayer);
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Created processed layer '{LayerName}' with Id: {LayerId}",
|
||||
ProcessorType, processedLayer.Name, processedLayer.Id);
|
||||
|
||||
return processedLayer;
|
||||
}
|
||||
|
||||
private List<Record> TransformData()
|
||||
{
|
||||
if (_sourceLayer == null)
|
||||
{
|
||||
throw new InvalidOperationException("Source layer not loaded. Call FindSourceLayer first.");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Loading records from source layer {LayerId}",
|
||||
ProcessorType, _sourceLayer.Id);
|
||||
|
||||
// Load all records from source layer
|
||||
var sourceRecords = _db.Records
|
||||
.Where(x => x.LayerId == _sourceLayer.Id && !x.IsDeleted)
|
||||
.ToList();
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Loaded {RecordCount} records from source layer",
|
||||
ProcessorType, sourceRecords.Count);
|
||||
|
||||
// Group records by first 2 digits of Code (region code)
|
||||
var groupedByRegion = sourceRecords
|
||||
.Where(x => !string.IsNullOrEmpty(x.Code) && x.Code.Length >= 2)
|
||||
.GroupBy(x => x.Code!.Substring(0, 2))
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Grouped into {GroupCount} regions",
|
||||
ProcessorType, groupedByRegion.Count);
|
||||
|
||||
var transformedRecords = new List<Record>();
|
||||
var now = DateTime.UtcNow;
|
||||
|
||||
foreach (var regionGroup in groupedByRegion)
|
||||
{
|
||||
var regionCode = regionGroup.Key;
|
||||
|
||||
// Create array for 12 months (initialize with 0)
|
||||
var monthValues = new double?[12];
|
||||
for (int i = 0; i < 12; i++)
|
||||
{
|
||||
monthValues[i] = 0;
|
||||
}
|
||||
|
||||
// Fill in values for each month
|
||||
foreach (var sourceRecord in regionGroup)
|
||||
{
|
||||
if (sourceRecord.Code!.Length >= 4)
|
||||
{
|
||||
// Extract month from last 2 digits of code (e.g., "0105" -> month 5)
|
||||
var monthStr = sourceRecord.Code.Substring(2, 2);
|
||||
if (int.TryParse(monthStr, out var month) && month >= 1 && month <= 12)
|
||||
{
|
||||
var monthIndex = month - 1; // Convert to 0-based index
|
||||
monthValues[monthIndex] = sourceRecord.Value1 ?? 0;
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Region {RegionCode}, Month {Month}: Value = {Value}",
|
||||
ProcessorType, regionCode, month, sourceRecord.Value1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create transformed record with Code = region code and Value1-12 = monthly values
|
||||
var record = new Record
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Code = regionCode,
|
||||
Value1 = monthValues[0],
|
||||
Value2 = monthValues[1],
|
||||
Value3 = monthValues[2],
|
||||
Value4 = monthValues[3],
|
||||
Value5 = monthValues[4],
|
||||
Value6 = monthValues[5],
|
||||
Value7 = monthValues[6],
|
||||
Value8 = monthValues[7],
|
||||
Value9 = monthValues[8],
|
||||
Value10 = monthValues[9],
|
||||
Value11 = monthValues[10],
|
||||
Value12 = monthValues[11],
|
||||
CreatedAt = now,
|
||||
ModifiedAt = now
|
||||
};
|
||||
|
||||
transformedRecords.Add(record);
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Transformed region '{RegionCode}' - Values: [{Values}]",
|
||||
ProcessorType, regionCode,
|
||||
string.Join(", ", monthValues.Select(v => v?.ToString() ?? "0")));
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Successfully transformed {RecordCount} records from {SourceCount} source records",
|
||||
ProcessorType, transformedRecords.Count, sourceRecords.Count);
|
||||
|
||||
return transformedRecords;
|
||||
}
|
||||
|
||||
private void SaveRecordsToLayer(Layer processedLayer, List<Record> records)
|
||||
{
|
||||
_logger.LogDebug("{ProcessorType}: Saving {RecordCount} records to layer {LayerId}",
|
||||
ProcessorType, records.Count, processedLayer.Id);
|
||||
|
||||
// Delete any existing records for this layer
|
||||
var toDelete = _db.Records.Where(x => x.LayerId == processedLayer.Id).ToList();
|
||||
if (toDelete.Count > 0)
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Found {ExistingCount} existing records for layer {LayerId}, removing them",
|
||||
ProcessorType, toDelete.Count, processedLayer.Id);
|
||||
_db.Records.RemoveRange(toDelete);
|
||||
}
|
||||
|
||||
// Set all required properties for each record
|
||||
foreach (var record in records)
|
||||
{
|
||||
record.LayerId = processedLayer.Id;
|
||||
record.CreatedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
|
||||
record.ModifiedById = Guid.Parse("f392209e-123e-4651-a5a4-0b1d6cf9ff9d"); // System user
|
||||
_db.Records.Add(record);
|
||||
}
|
||||
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Successfully saved {RecordCount} records to layer '{LayerName}'",
|
||||
ProcessorType, records.Count, processedLayer.Name);
|
||||
}
|
||||
|
||||
private void ExportToGoogleSheet()
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("{ProcessorType}: Starting Google Sheet export to {SheetId}, Tab: {Tab}, Range: {Range}",
|
||||
ProcessorType, GoogleSheetId, GoogleSheetTab, GoogleSheetRange);
|
||||
|
||||
// Load dictionary for code to region name translation
|
||||
LoadCodeToRegionDictionary();
|
||||
|
||||
// Download current sheet data
|
||||
var sheetData = DownloadSheetData();
|
||||
|
||||
// Update sheet data with processed layer values
|
||||
var updatedData = UpdateSheetDataWithProcessedValues(sheetData);
|
||||
|
||||
// Upload updated data back to sheet
|
||||
UploadSheetData(updatedData);
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Successfully exported data to Google Sheet",
|
||||
ProcessorType);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ProcessorType}: Failed to export to Google Sheet",
|
||||
ProcessorType);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadCodeToRegionDictionary()
|
||||
{
|
||||
const string dictionaryLayerName = "L1-D-P2-CODES";
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Loading code to region mapping from dictionary layer '{DictionaryLayerName}'",
|
||||
ProcessorType, dictionaryLayerName);
|
||||
|
||||
var dictionaryLayer = _db.Layers
|
||||
.Where(x => x.Name == dictionaryLayerName && x.Type == LayerType.Dictionary)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (dictionaryLayer == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Dictionary layer '{dictionaryLayerName}' not found");
|
||||
}
|
||||
|
||||
// Load records for the dictionary layer
|
||||
var records = _db.Records
|
||||
.Where(x => x.LayerId == dictionaryLayer.Id)
|
||||
.ToList();
|
||||
|
||||
// Build mapping: Code -> Desc1 (region name)
|
||||
_codeToRegionMap = records.ToDictionary(
|
||||
r => r.Code ?? string.Empty,
|
||||
r => r.Desc1 ?? string.Empty,
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Loaded {MappingCount} code to region mappings",
|
||||
ProcessorType, _codeToRegionMap.Count);
|
||||
}
|
||||
|
||||
private IList<IList<object>> DownloadSheetData()
|
||||
{
|
||||
_logger.LogDebug("{ProcessorType}: Downloading sheet data from range {Range}",
|
||||
ProcessorType, $"{GoogleSheetTab}!{GoogleSheetRange}");
|
||||
|
||||
var range = $"{GoogleSheetTab}!{GoogleSheetRange}";
|
||||
ValueRange? response;
|
||||
|
||||
try
|
||||
{
|
||||
response = _googleSheetValues.Get(GoogleSheetId, range).Execute();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ProcessorType}: Failed to download sheet data from {Range}",
|
||||
ProcessorType, range);
|
||||
throw new InvalidOperationException($"Failed to download sheet data from {range}", e);
|
||||
}
|
||||
|
||||
if (response?.Values == null || response.Values.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No data found in sheet range {range}");
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Downloaded {RowCount} rows from Google Sheet",
|
||||
ProcessorType, response.Values.Count);
|
||||
|
||||
return response.Values;
|
||||
}
|
||||
|
||||
private IList<IList<object>> UpdateSheetDataWithProcessedValues(IList<IList<object>> sheetData)
|
||||
{
|
||||
if (_processedLayer == null)
|
||||
{
|
||||
throw new InvalidOperationException("Processed layer not loaded");
|
||||
}
|
||||
|
||||
if (_codeToRegionMap == null)
|
||||
{
|
||||
throw new InvalidOperationException("Code to region mapping not loaded");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Updating sheet data with processed values from layer {LayerId}",
|
||||
ProcessorType, _processedLayer.Id);
|
||||
|
||||
// Load all records from processed layer
|
||||
var processedRecords = _db.Records
|
||||
.Where(x => x.LayerId == _processedLayer.Id && !x.IsDeleted)
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Loaded {RecordCount} records from processed layer",
|
||||
ProcessorType, processedRecords.Count);
|
||||
|
||||
var updatedRowCount = 0;
|
||||
|
||||
// Iterate through sheet data and update matching rows
|
||||
foreach (var row in sheetData)
|
||||
{
|
||||
if (row.Count == 0) continue;
|
||||
|
||||
// First column (index 0) contains the region name (Kontrola column)
|
||||
var regionName = row[0]?.ToString()?.Trim();
|
||||
if (string.IsNullOrEmpty(regionName)) continue;
|
||||
|
||||
// Find the code for this region name
|
||||
var regionCode = _codeToRegionMap
|
||||
.FirstOrDefault(x => x.Value.Equals(regionName, StringComparison.OrdinalIgnoreCase))
|
||||
.Key;
|
||||
|
||||
if (string.IsNullOrEmpty(regionCode))
|
||||
{
|
||||
_logger.LogWarning("{ProcessorType}: No code found for region '{RegionName}' in dictionary - skipping",
|
||||
ProcessorType, regionName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find the processed record for this code
|
||||
var processedRecord = processedRecords.FirstOrDefault(x => x.Code == regionCode);
|
||||
if (processedRecord == null)
|
||||
{
|
||||
_logger.LogWarning("{ProcessorType}: No processed record found for code '{RegionCode}' (region: '{RegionName}') - skipping",
|
||||
ProcessorType, regionCode, regionName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Update columns 1-12 (monthly values) in the row
|
||||
// Column 0 is Kontrola (region name), columns 1-12 are monthly values
|
||||
// Ensure row has enough columns (13 total: 1 for region + 12 for months)
|
||||
while (row.Count < 13)
|
||||
{
|
||||
row.Add("");
|
||||
}
|
||||
|
||||
// Update monthly values (Value1 through Value12)
|
||||
row[1] = processedRecord.Value1 ?? 0;
|
||||
row[2] = processedRecord.Value2 ?? 0;
|
||||
row[3] = processedRecord.Value3 ?? 0;
|
||||
row[4] = processedRecord.Value4 ?? 0;
|
||||
row[5] = processedRecord.Value5 ?? 0;
|
||||
row[6] = processedRecord.Value6 ?? 0;
|
||||
row[7] = processedRecord.Value7 ?? 0;
|
||||
row[8] = processedRecord.Value8 ?? 0;
|
||||
row[9] = processedRecord.Value9 ?? 0;
|
||||
row[10] = processedRecord.Value10 ?? 0;
|
||||
row[11] = processedRecord.Value11 ?? 0;
|
||||
row[12] = processedRecord.Value12 ?? 0;
|
||||
|
||||
updatedRowCount++;
|
||||
|
||||
_logger.LogDebug("{ProcessorType}: Updated row for region '{RegionName}' (code: {RegionCode}) with 12 monthly values",
|
||||
ProcessorType, regionName, regionCode);
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Updated {UpdatedRowCount} rows with processed data",
|
||||
ProcessorType, updatedRowCount);
|
||||
|
||||
return sheetData;
|
||||
}
|
||||
|
||||
private void UploadSheetData(IList<IList<object>> data)
|
||||
{
|
||||
_logger.LogDebug("{ProcessorType}: Uploading {RowCount} rows to Google Sheet range {Range}",
|
||||
ProcessorType, data.Count, $"{GoogleSheetTab}!{GoogleSheetRange}");
|
||||
|
||||
var range = $"{GoogleSheetTab}!{GoogleSheetRange}";
|
||||
var valueRange = new ValueRange { Values = data };
|
||||
|
||||
try
|
||||
{
|
||||
var updateRequest = _googleSheetValues.Update(valueRange, GoogleSheetId, range);
|
||||
updateRequest.ValueInputOption = SpreadsheetsResource.ValuesResource.UpdateRequest.ValueInputOptionEnum.USERENTERED;
|
||||
var response = updateRequest.Execute();
|
||||
|
||||
_logger.LogInformation("{ProcessorType}: Successfully uploaded data to Google Sheet - Updated {UpdatedCells} cells",
|
||||
ProcessorType, response.UpdatedCells);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ProcessorType}: Failed to upload data to Google Sheet range {Range}",
|
||||
ProcessorType, range);
|
||||
throw new InvalidOperationException($"Failed to upload data to Google Sheet range {range}", e);
|
||||
}
|
||||
}
|
||||
|
||||
private string? GetRecordValue(ICollection<Record> records, string code)
|
||||
{
|
||||
return records.FirstOrDefault(x => x.Code == code)?.Desc1;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user