WIP: p2 plugin
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
This commit is contained in:
382
DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportP2.cs
Normal file
382
DiunaBI.Plugins.PedrolloPL/Importers/PedrolloPLImportP2.cs
Normal file
@@ -0,0 +1,382 @@
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using DiunaBI.Domain.Entities;
|
||||
using DiunaBI.Infrastructure.Data;
|
||||
using DiunaBI.Infrastructure.Plugins;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace DiunaBI.Plugins.PedrolloPL.Importers;
|
||||
|
||||
public class PedrolloPLImportP2 : BaseDataImporter
|
||||
{
|
||||
public override string ImporterType => "PedrolloPL.Import.P2";
|
||||
|
||||
private readonly AppDbContext _db;
|
||||
private readonly ILogger<PedrolloPLImportP2> _logger;
|
||||
|
||||
// Configuration properties
|
||||
private string? DataInboxName { get; set; }
|
||||
private string? DataInboxSource { get; set; }
|
||||
private string? StartDate { get; set; }
|
||||
private string? EndDate { get; set; }
|
||||
private string? ImportYear { get; set; }
|
||||
private bool IsEnabled { get; set; }
|
||||
|
||||
// Cached deserialized data
|
||||
private List<List<object>>? _cachedRawData;
|
||||
private DataInbox? _cachedDataInbox;
|
||||
private Dictionary<string, string>? _regionCodeMap;
|
||||
|
||||
public PedrolloPLImportP2(
|
||||
AppDbContext db,
|
||||
ILogger<PedrolloPLImportP2> logger)
|
||||
{
|
||||
_db = db;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public override void Import(Layer importWorker)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})",
|
||||
ImporterType, importWorker.Name, importWorker.Id);
|
||||
|
||||
// Clear cache at start
|
||||
_cachedRawData = null;
|
||||
_cachedDataInbox = null;
|
||||
_regionCodeMap = null;
|
||||
|
||||
LoadConfiguration(importWorker);
|
||||
ValidateConfiguration();
|
||||
|
||||
if (!IsEnabled)
|
||||
{
|
||||
_logger.LogInformation("{ImporterType}: Import disabled for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find and deserialize DataInbox data
|
||||
FindAndDeserializeDataInbox();
|
||||
|
||||
// Load region code mapping from dictionary layer
|
||||
LoadRegionCodeMapping();
|
||||
|
||||
// Map data from DataInbox to Layer records
|
||||
var mappedRecords = MapDataToRecords();
|
||||
|
||||
// Create new Import layer
|
||||
var importLayer = CreateImportLayer(importWorker);
|
||||
|
||||
// Save records to database
|
||||
SaveRecordsToLayer(importLayer, mappedRecords);
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName} - Created {RecordCount} records",
|
||||
ImporterType, importWorker.Name, mappedRecords.Count);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})",
|
||||
ImporterType, importWorker.Name, importWorker.Id);
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clear cache after import
|
||||
_cachedRawData = null;
|
||||
_cachedDataInbox = null;
|
||||
_regionCodeMap = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadConfiguration(Layer importWorker)
|
||||
{
|
||||
if (importWorker.Records == null) return;
|
||||
|
||||
DataInboxName = GetRecordValue(importWorker.Records, "DataInboxName");
|
||||
DataInboxSource = GetRecordValue(importWorker.Records, "DataInboxSource");
|
||||
StartDate = GetRecordValue(importWorker.Records, "StartDate");
|
||||
EndDate = GetRecordValue(importWorker.Records, "EndDate");
|
||||
ImportYear = GetRecordValue(importWorker.Records, "ImportYear");
|
||||
IsEnabled = GetRecordValue(importWorker.Records, "IsEnabled") == "True";
|
||||
|
||||
_logger.LogDebug(
|
||||
"{ImporterType}: Configuration loaded - DataInboxName: {DataInboxName}, Source: {Source}, Year: {Year}, Period: {StartDate} to {EndDate}, Enabled: {IsEnabled}",
|
||||
ImporterType, DataInboxName, DataInboxSource, ImportYear, StartDate, EndDate, IsEnabled);
|
||||
}
|
||||
|
||||
private void ValidateConfiguration()
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrEmpty(DataInboxName)) errors.Add("DataInboxName is required");
|
||||
if (string.IsNullOrEmpty(DataInboxSource)) errors.Add("DataInboxSource is required");
|
||||
if (string.IsNullOrEmpty(StartDate)) errors.Add("StartDate is required");
|
||||
if (string.IsNullOrEmpty(EndDate)) errors.Add("EndDate is required");
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Configuration validated successfully", ImporterType);
|
||||
}
|
||||
|
||||
private void FindAndDeserializeDataInbox()
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Searching for DataInbox with Name='{DataInboxName}' and Source='{DataInboxSource}'",
|
||||
ImporterType, DataInboxName, DataInboxSource);
|
||||
|
||||
// Find DataInbox by Name and Source, order by CreatedAt descending to get the latest
|
||||
var dataInbox = _db.DataInbox
|
||||
.Where(x => x.Name == DataInboxName && x.Source == DataInboxSource)
|
||||
.OrderByDescending(x => x.CreatedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (dataInbox == null)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"DataInbox not found with Name='{DataInboxName}' and Source='{DataInboxSource}'");
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Found DataInbox - Id: {DataInboxId}, Name: {Name}, Source: {Source}, CreatedAt: {CreatedAt}",
|
||||
ImporterType, dataInbox.Id, dataInbox.Name, dataInbox.Source, dataInbox.CreatedAt);
|
||||
|
||||
// Deserialize the data
|
||||
try
|
||||
{
|
||||
var data = Convert.FromBase64String(dataInbox.Data);
|
||||
var jsonString = Encoding.UTF8.GetString(data);
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64",
|
||||
ImporterType, data.Length);
|
||||
|
||||
// Deserialize as array of arrays: [["<nieznany>", 1183.15, ...], ["DOLNOŚLĄSKIE", ...]]
|
||||
var rawData = JsonSerializer.Deserialize<List<List<object>>>(jsonString);
|
||||
if (rawData == null || rawData.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}");
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully deserialized {RowCount} rows from DataInbox",
|
||||
ImporterType, rawData.Count);
|
||||
|
||||
// Log first few rows for debugging
|
||||
if (rawData.Count > 0)
|
||||
{
|
||||
var sampleSize = Math.Min(3, rawData.Count);
|
||||
_logger.LogDebug("{ImporterType}: Sample rows (first {SampleSize}):", ImporterType, sampleSize);
|
||||
for (int i = 0; i < sampleSize; i++)
|
||||
{
|
||||
var row = rawData[i];
|
||||
if (row.Count > 0)
|
||||
{
|
||||
var regionName = row[0]?.ToString() ?? "null";
|
||||
var valueCount = row.Count - 1;
|
||||
_logger.LogDebug(" [{Index}] Region: {Region}, Values: {ValueCount}",
|
||||
i, regionName, valueCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache the deserialized data
|
||||
_cachedRawData = rawData;
|
||||
_cachedDataInbox = dataInbox;
|
||||
}
|
||||
catch (FormatException e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Invalid base64 data in DataInbox {DataInboxId}",
|
||||
ImporterType, dataInbox.Id);
|
||||
throw new InvalidOperationException($"Invalid base64 data in DataInbox: {dataInbox.Name}", e);
|
||||
}
|
||||
catch (JsonException e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Invalid JSON data in DataInbox {DataInboxId}",
|
||||
ImporterType, dataInbox.Id);
|
||||
throw new InvalidOperationException($"Invalid JSON data in DataInbox: {dataInbox.Name}", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadRegionCodeMapping()
|
||||
{
|
||||
const string dictionaryLayerName = "L1-D-P2-CODES";
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Loading region code mapping from dictionary layer '{DictionaryLayerName}'",
|
||||
ImporterType, dictionaryLayerName);
|
||||
|
||||
var dictionaryLayer = _db.Layers
|
||||
.Where(x => x.Name == dictionaryLayerName && x.Type == LayerType.Dictionary)
|
||||
.FirstOrDefault();
|
||||
|
||||
if (dictionaryLayer == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Dictionary layer '{dictionaryLayerName}' not found");
|
||||
}
|
||||
|
||||
// Load records for the dictionary layer
|
||||
var records = _db.Records
|
||||
.Where(x => x.LayerId == dictionaryLayer.Id)
|
||||
.ToList();
|
||||
|
||||
// Build mapping: Desc1 (region name) -> Code
|
||||
_regionCodeMap = records.ToDictionary(
|
||||
r => r.Desc1 ?? string.Empty,
|
||||
r => r.Code ?? string.Empty,
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Loaded {MappingCount} region code mappings",
|
||||
ImporterType, _regionCodeMap.Count);
|
||||
}
|
||||
|
||||
private List<Record> MapDataToRecords()
|
||||
{
|
||||
if (_cachedRawData == null)
|
||||
{
|
||||
throw new InvalidOperationException("Raw data not loaded. Call FindAndDeserializeDataInbox first.");
|
||||
}
|
||||
|
||||
if (_regionCodeMap == null)
|
||||
{
|
||||
throw new InvalidOperationException("Region code mapping not loaded. Call LoadRegionCodeMapping first.");
|
||||
}
|
||||
|
||||
var records = new List<Record>();
|
||||
var now = DateTime.UtcNow;
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Starting data mapping for {RowCount} rows",
|
||||
ImporterType, _cachedRawData.Count);
|
||||
|
||||
foreach (var row in _cachedRawData)
|
||||
{
|
||||
if (row.Count < 13)
|
||||
{
|
||||
_logger.LogWarning("{ImporterType}: Skipping row with insufficient data - expected 13 elements, got {Count}",
|
||||
ImporterType, row.Count);
|
||||
continue;
|
||||
}
|
||||
|
||||
// First element is region name
|
||||
var regionName = row[0]?.ToString();
|
||||
if (string.IsNullOrEmpty(regionName))
|
||||
{
|
||||
_logger.LogWarning("{ImporterType}: Skipping row with empty region name", ImporterType);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find region code from dictionary
|
||||
if (!_regionCodeMap.TryGetValue(regionName, out var regionCode))
|
||||
{
|
||||
_logger.LogWarning("{ImporterType}: Region code not found for '{RegionName}' - skipping",
|
||||
ImporterType, regionName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create 12 records (one per month)
|
||||
for (int month = 1; month <= 12; month++)
|
||||
{
|
||||
var valueIndex = month; // row[1] = January, row[2] = February, etc.
|
||||
var valueObj = row[valueIndex];
|
||||
|
||||
// Convert value to double
|
||||
double? value = null;
|
||||
if (valueObj != null)
|
||||
{
|
||||
// Handle JsonElement if deserialized from JSON
|
||||
if (valueObj is JsonElement jsonElement)
|
||||
{
|
||||
if (jsonElement.ValueKind == JsonValueKind.Number)
|
||||
{
|
||||
value = jsonElement.GetDouble();
|
||||
}
|
||||
}
|
||||
else if (valueObj is double d)
|
||||
{
|
||||
value = d;
|
||||
}
|
||||
else if (double.TryParse(valueObj.ToString(), out var parsed))
|
||||
{
|
||||
value = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
// Create code: {regionCode}{month:00}
|
||||
var code = $"{regionCode}{month:00}";
|
||||
|
||||
var record = new Record
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Code = code,
|
||||
Value1 = value,
|
||||
CreatedAt = now,
|
||||
ModifiedAt = now
|
||||
};
|
||||
|
||||
records.Add(record);
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Mapped region '{RegionName}' (code: {RegionCode}) to 12 records",
|
||||
ImporterType, regionName, regionCode);
|
||||
}
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully mapped {RecordCount} records from {RowCount} rows",
|
||||
ImporterType, records.Count, _cachedRawData.Count);
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
private Layer CreateImportLayer(Layer importWorker)
|
||||
{
|
||||
var now = DateTime.UtcNow;
|
||||
|
||||
var importLayer = new Layer
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Number = _db.Layers.Count() + 1,
|
||||
ParentId = importWorker.Id,
|
||||
Type = LayerType.Import,
|
||||
IsCancelled = false,
|
||||
CreatedAt = now,
|
||||
ModifiedAt = now,
|
||||
CreatedById = importWorker.CreatedById,
|
||||
ModifiedById = importWorker.ModifiedById
|
||||
};
|
||||
|
||||
// Format: L{Number}-I-P2-{Year}-{Timestamp}
|
||||
importLayer.Name = $"L{importLayer.Number}-I-P2-{ImportYear}-{now:yyyyMMddHHmm}";
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Creating import layer '{LayerName}' (Number: {Number})",
|
||||
ImporterType, importLayer.Name, importLayer.Number);
|
||||
|
||||
_db.Layers.Add(importLayer);
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Created import layer '{LayerName}' with Id: {LayerId}",
|
||||
ImporterType, importLayer.Name, importLayer.Id);
|
||||
|
||||
return importLayer;
|
||||
}
|
||||
|
||||
private void SaveRecordsToLayer(Layer importLayer, List<Record> records)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Saving {RecordCount} records to layer {LayerId}",
|
||||
ImporterType, records.Count, importLayer.Id);
|
||||
|
||||
// Set LayerId for all records
|
||||
foreach (var record in records)
|
||||
{
|
||||
record.LayerId = importLayer.Id;
|
||||
}
|
||||
|
||||
_db.Records.AddRange(records);
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully saved {RecordCount} records to layer '{LayerName}'",
|
||||
ImporterType, records.Count, importLayer.Name);
|
||||
}
|
||||
|
||||
private string? GetRecordValue(ICollection<Record> records, string code)
|
||||
{
|
||||
return records.FirstOrDefault(x => x.Code == code)?.Desc1;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user