2025-05-31 19:26:02 +02:00
|
|
|
|
using System.Globalization;
|
|
|
|
|
|
using System.Text;
|
|
|
|
|
|
using System.Text.Json;
|
2025-06-02 20:11:29 +02:00
|
|
|
|
using Microsoft.Extensions.Logging;
|
2025-06-06 20:49:09 +02:00
|
|
|
|
using Microsoft.EntityFrameworkCore;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
using DiunaBI.Core.Models;
|
2025-06-06 22:15:23 +02:00
|
|
|
|
using DiunaBI.Core.Database.Context;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
|
|
|
|
|
|
namespace DiunaBI.Plugins.Morska.Importers;
|
|
|
|
|
|
|
2025-06-02 16:54:33 +02:00
|
|
|
|
public class MorskaD3Importer : MorskaBaseImporter
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:23:36 +02:00
|
|
|
|
public override string ImporterType => "Morska.Import.D3";
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-02 16:54:33 +02:00
|
|
|
|
private readonly AppDbContext _db;
|
2025-06-02 20:11:29 +02:00
|
|
|
|
private readonly ILogger<MorskaD3Importer> _logger;
|
2025-06-02 16:54:33 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
// Configuration properties
|
|
|
|
|
|
private string? ImportYear { get; set; }
|
|
|
|
|
|
private string? ImportMonth { get; set; }
|
|
|
|
|
|
private string? ImportName { get; set; }
|
|
|
|
|
|
private string? ImportType { get; set; }
|
|
|
|
|
|
private DateTime? StartDate { get; set; }
|
|
|
|
|
|
private DateTime? EndDate { get; set; }
|
|
|
|
|
|
private bool IsEnabled { get; set; }
|
|
|
|
|
|
|
2025-06-02 16:54:33 +02:00
|
|
|
|
public MorskaD3Importer(
|
2025-06-02 20:11:29 +02:00
|
|
|
|
AppDbContext db,
|
|
|
|
|
|
ILogger<MorskaD3Importer> logger)
|
2025-06-02 16:54:33 +02:00
|
|
|
|
{
|
|
|
|
|
|
_db = db;
|
2025-06-02 20:11:29 +02:00
|
|
|
|
_logger = logger;
|
2025-06-02 16:54:33 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-02 16:54:33 +02:00
|
|
|
|
public override void Import(Layer importWorker)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})",
|
|
|
|
|
|
ImporterType, importWorker.Name, importWorker.Id);
|
|
|
|
|
|
|
|
|
|
|
|
LoadConfiguration(importWorker);
|
|
|
|
|
|
|
|
|
|
|
|
if (!ShouldPerformImport(importWorker))
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("{ImporterType}: Import not needed for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
ValidateConfiguration();
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
PerformImport(importWorker);
|
|
|
|
|
|
|
|
|
|
|
|
_logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception e)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})",
|
|
|
|
|
|
ImporterType, importWorker.Name, importWorker.Id);
|
|
|
|
|
|
throw;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-06 20:49:09 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private void LoadConfiguration(Layer importWorker)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (importWorker.Records == null) return;
|
|
|
|
|
|
|
|
|
|
|
|
ImportYear = GetRecordValue(importWorker.Records, "ImportYear");
|
|
|
|
|
|
ImportMonth = GetRecordValue(importWorker.Records, "ImportMonth");
|
|
|
|
|
|
ImportName = GetRecordValue(importWorker.Records, "ImportName");
|
|
|
|
|
|
ImportType = GetRecordValue(importWorker.Records, "ImportType");
|
|
|
|
|
|
IsEnabled = GetRecordValue(importWorker.Records, "IsEnabled") == "True";
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
var startDateStr = GetRecordValue(importWorker.Records, "StartDate");
|
|
|
|
|
|
if (startDateStr != null && DateTime.TryParseExact(startDateStr, "yyyy.MM.dd", null, DateTimeStyles.None, out var startDate))
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
StartDate = startDate;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
var endDateStr = GetRecordValue(importWorker.Records, "EndDate");
|
|
|
|
|
|
if (endDateStr != null && DateTime.TryParseExact(endDateStr, "yyyy.MM.dd", null, DateTimeStyles.None, out var endDate))
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
EndDate = endDate;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Configuration loaded for {ImportWorkerName} - Type: {ImportType}",
|
|
|
|
|
|
ImporterType, importWorker.Name, ImportType);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private bool ShouldPerformImport(Layer importWorker)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (!IsEnabled)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Import disabled for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return false;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
if (StartDate.HasValue && EndDate.HasValue)
|
|
|
|
|
|
{
|
|
|
|
|
|
var now = DateTime.UtcNow.Date;
|
|
|
|
|
|
if (now >= StartDate.Value.Date && now <= EndDate.Value.Date)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: Within date range, import needed for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (!IsImportedLayerUpToDate(importWorker))
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: Outside date range but layer is out of date, import needed for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: Outside date range and layer is up to date for {ImportWorkerName}",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return false;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private void ValidateConfiguration()
|
|
|
|
|
|
{
|
|
|
|
|
|
var errors = new List<string>();
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
if (string.IsNullOrEmpty(ImportYear)) errors.Add("ImportYear is required");
|
|
|
|
|
|
if (string.IsNullOrEmpty(ImportMonth)) errors.Add("ImportMonth is required");
|
|
|
|
|
|
if (string.IsNullOrEmpty(ImportName)) errors.Add("ImportName is required");
|
|
|
|
|
|
if (string.IsNullOrEmpty(ImportType)) errors.Add("ImportType is required");
|
|
|
|
|
|
|
|
|
|
|
|
if (errors.Any())
|
|
|
|
|
|
{
|
|
|
|
|
|
throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private bool IsImportedLayerUpToDate(Layer importWorker)
|
|
|
|
|
|
{
|
|
|
|
|
|
var newestLayer = _db.Layers
|
|
|
|
|
|
.Include(x => x.Records)
|
|
|
|
|
|
.Where(x => x.ParentId == importWorker.Id)
|
|
|
|
|
|
.OrderByDescending(x => x.CreatedAt)
|
|
|
|
|
|
.AsNoTracking()
|
|
|
|
|
|
.FirstOrDefault();
|
|
|
|
|
|
|
|
|
|
|
|
if (newestLayer == null)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: No child layers found for {ImportWorkerName}, treating as up to date",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
return true;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
|
|
|
|
|
if (dataInbox == null)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogWarning("{ImporterType}: No DataInbox found for type {ImportType}",
|
|
|
|
|
|
ImporterType, ImportType);
|
|
|
|
|
|
return true; // Assume up to date if no data source
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Compare timestamps - if DataInbox is newer than our layer, we need to import
|
|
|
|
|
|
var isUpToDate = newestLayer.CreatedAt >= dataInbox.CreatedAt;
|
|
|
|
|
|
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: Layer created at {LayerTime}, DataInbox created at {DataTime}, up to date: {IsUpToDate}",
|
|
|
|
|
|
ImporterType, newestLayer.CreatedAt, dataInbox.CreatedAt, isUpToDate);
|
|
|
|
|
|
|
|
|
|
|
|
return isUpToDate;
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception e)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogError(e, "{ImporterType}: Error checking if layer {ImportWorkerName} is up to date",
|
|
|
|
|
|
ImporterType, importWorker.Name);
|
|
|
|
|
|
throw;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
private void PerformImport(Layer importWorker)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogDebug("{ImporterType}: Looking for DataInbox with type {ImportType}",
|
|
|
|
|
|
ImporterType, ImportType);
|
|
|
|
|
|
|
|
|
|
|
|
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
2025-05-31 19:26:02 +02:00
|
|
|
|
if (dataInbox == null)
|
|
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
throw new InvalidOperationException($"DataInbox not found for type: {ImportType}");
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Found DataInbox {DataInboxId}, created at {CreatedAt}",
|
|
|
|
|
|
ImporterType, dataInbox.Id, dataInbox.CreatedAt);
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
|
|
|
|
|
try
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-02 20:11:29 +02:00
|
|
|
|
var data = Convert.FromBase64String(dataInbox.Data);
|
|
|
|
|
|
var jsonString = Encoding.UTF8.GetString(data);
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64",
|
|
|
|
|
|
ImporterType, data.Length);
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
|
|
|
|
|
var records = JsonSerializer.Deserialize<List<Record>>(jsonString);
|
|
|
|
|
|
if (records == null)
|
|
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}");
|
2025-06-02 20:11:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Deserialized {RecordCount} records from JSON",
|
|
|
|
|
|
ImporterType, records.Count);
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
records = records.Where(x => x.Code!.StartsWith($"{ImportYear}{ImportMonth}")).ToList();
|
2025-06-02 20:11:29 +02:00
|
|
|
|
if (records.Count == 0)
|
|
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
throw new InvalidOperationException($"No records found for period: {ImportYear}{ImportMonth}");
|
2025-06-02 20:11:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Filtered to {FilteredCount} records for period {Year}{Month}",
|
|
|
|
|
|
ImporterType, records.Count, ImportYear, ImportMonth);
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
|
|
|
|
|
records = records.Select(x =>
|
|
|
|
|
|
{
|
|
|
|
|
|
x.Id = Guid.NewGuid();
|
|
|
|
|
|
x.CreatedAt = DateTime.UtcNow;
|
|
|
|
|
|
x.ModifiedAt = DateTime.UtcNow;
|
|
|
|
|
|
return x;
|
|
|
|
|
|
}).ToList();
|
|
|
|
|
|
|
|
|
|
|
|
var layer = new Layer
|
|
|
|
|
|
{
|
|
|
|
|
|
Id = Guid.NewGuid(),
|
|
|
|
|
|
Number = _db.Layers.Count() + 1,
|
|
|
|
|
|
ParentId = importWorker.Id,
|
|
|
|
|
|
Type = LayerType.Import,
|
|
|
|
|
|
CreatedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
|
|
|
|
|
ModifiedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
|
|
|
|
|
CreatedAt = DateTime.UtcNow,
|
|
|
|
|
|
ModifiedAt = DateTime.UtcNow
|
|
|
|
|
|
};
|
2025-06-06 20:49:09 +02:00
|
|
|
|
layer.Name = $"L{layer.Number}-I-{ImportName}-{ImportYear}/{ImportMonth}-{DateTime.Now:yyyyMMddHHmm}";
|
2025-06-02 20:11:29 +02:00
|
|
|
|
|
|
|
|
|
|
_db.Layers.Add(layer);
|
|
|
|
|
|
SaveRecords(layer.Id, records);
|
|
|
|
|
|
_db.SaveChanges();
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogInformation("{ImporterType}: Successfully imported {RecordCount} records for layer {LayerName} ({LayerId})",
|
|
|
|
|
|
ImporterType, records.Count, layer.Name, layer.Id);
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
catch (Exception e)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogError(e, "{ImporterType}: Error processing DataInbox {DataInboxId}",
|
|
|
|
|
|
ImporterType, dataInbox.Id);
|
2025-06-02 20:11:29 +02:00
|
|
|
|
throw;
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
2025-06-02 20:11:29 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
private string? GetRecordValue(ICollection<Record> records, string code)
|
|
|
|
|
|
{
|
|
|
|
|
|
return records.FirstOrDefault(x => x.Code == code)?.Desc1;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-02 20:11:29 +02:00
|
|
|
|
private void SaveRecords(Guid layerId, ICollection<Record> records)
|
|
|
|
|
|
{
|
|
|
|
|
|
var toDelete = _db.Records.Where(x => x.LayerId == layerId).ToList();
|
|
|
|
|
|
if (toDelete.Count > 0)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-02 20:11:29 +02:00
|
|
|
|
_db.Records.RemoveRange(toDelete);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
foreach (var record in records)
|
2025-05-31 19:26:02 +02:00
|
|
|
|
{
|
2025-06-02 20:11:29 +02:00
|
|
|
|
record.CreatedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D");
|
|
|
|
|
|
record.CreatedAt = DateTime.UtcNow;
|
|
|
|
|
|
record.ModifiedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D");
|
|
|
|
|
|
record.ModifiedAt = DateTime.UtcNow;
|
|
|
|
|
|
record.LayerId = layerId;
|
|
|
|
|
|
_db.Records.Add(record);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-06 20:49:09 +02:00
|
|
|
|
_logger.LogDebug("{ImporterType}: Saved {RecordCount} records for layer {LayerId}",
|
|
|
|
|
|
ImporterType, records.Count, layerId);
|
2025-05-31 19:26:02 +02:00
|
|
|
|
}
|
|
|
|
|
|
}
|