D3 Importer
This commit is contained in:
@@ -5,14 +5,18 @@ using Microsoft.Extensions.Logging;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using DiunaBI.Core.Models;
|
||||
using DiunaBI.Core.Database.Context;
|
||||
using DiunaBI.Core.Services;
|
||||
using Google.Apis.Sheets.v4;
|
||||
using Google.Apis.Sheets.v4.Data;
|
||||
|
||||
namespace DiunaBI.Plugins.Morska.Importers;
|
||||
|
||||
public class MorskaD3Importer : MorskaBaseImporter
|
||||
{
|
||||
public override string ImporterType => "Morska.Import.D3";
|
||||
|
||||
|
||||
private readonly AppDbContext _db;
|
||||
private readonly SpreadsheetsResource.ValuesResource _googleSheetValues;
|
||||
private readonly ILogger<MorskaD3Importer> _logger;
|
||||
|
||||
// Configuration properties
|
||||
@@ -20,15 +24,20 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
private string? ImportMonth { get; set; }
|
||||
private string? ImportName { get; set; }
|
||||
private string? ImportType { get; set; }
|
||||
private DateTime? StartDate { get; set; }
|
||||
private DateTime? EndDate { get; set; }
|
||||
private string? SheetId { get; set; }
|
||||
private bool IsEnabled { get; set; }
|
||||
|
||||
// Cached deserialized data
|
||||
private List<Record>? _cachedRecords;
|
||||
private string? _cachedDataInboxId;
|
||||
|
||||
public MorskaD3Importer(
|
||||
AppDbContext db,
|
||||
SpreadsheetsResource.ValuesResource googleSheetValues,
|
||||
ILogger<MorskaD3Importer> logger)
|
||||
{
|
||||
_db = db;
|
||||
_googleSheetValues = googleSheetValues;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
@@ -36,14 +45,21 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})",
|
||||
_logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})",
|
||||
ImporterType, importWorker.Name, importWorker.Id);
|
||||
|
||||
// Clear cache at start
|
||||
_cachedRecords = null;
|
||||
_cachedDataInboxId = null;
|
||||
|
||||
LoadConfiguration(importWorker);
|
||||
|
||||
|
||||
// Deserialize data early - right after LoadConfiguration
|
||||
DeserializeDataInboxData();
|
||||
|
||||
if (!ShouldPerformImport(importWorker))
|
||||
{
|
||||
_logger.LogInformation("{ImporterType}: Import not needed for {ImportWorkerName}",
|
||||
_logger.LogInformation("{ImporterType}: Import not needed for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return;
|
||||
}
|
||||
@@ -51,16 +67,25 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
ValidateConfiguration();
|
||||
|
||||
PerformImport(importWorker);
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName}",
|
||||
|
||||
// Export to Google Sheets after successful import
|
||||
ExportToGoogleSheets();
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})",
|
||||
_logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})",
|
||||
ImporterType, importWorker.Name, importWorker.Id);
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clear cache after import
|
||||
_cachedRecords = null;
|
||||
_cachedDataInboxId = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadConfiguration(Layer importWorker)
|
||||
@@ -71,56 +96,107 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
ImportMonth = GetRecordValue(importWorker.Records, "ImportMonth");
|
||||
ImportName = GetRecordValue(importWorker.Records, "ImportName");
|
||||
ImportType = GetRecordValue(importWorker.Records, "ImportType");
|
||||
SheetId = GetRecordValue(importWorker.Records, "SheetId");
|
||||
IsEnabled = GetRecordValue(importWorker.Records, "IsEnabled") == "True";
|
||||
|
||||
var startDateStr = GetRecordValue(importWorker.Records, "StartDate");
|
||||
if (startDateStr != null && DateTime.TryParseExact(startDateStr, "yyyy.MM.dd", null, DateTimeStyles.None, out var startDate))
|
||||
_logger.LogDebug(
|
||||
"{ImporterType}: Configuration loaded for {ImportWorkerName} - Type: {ImportType}, SheetId: {SheetId}",
|
||||
ImporterType, importWorker.Name, ImportType, SheetId);
|
||||
}
|
||||
|
||||
private void DeserializeDataInboxData()
|
||||
{
|
||||
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
||||
if (dataInbox == null)
|
||||
{
|
||||
StartDate = startDate;
|
||||
throw new InvalidOperationException($"DataInbox not found for type: {ImportType}");
|
||||
}
|
||||
|
||||
var endDateStr = GetRecordValue(importWorker.Records, "EndDate");
|
||||
if (endDateStr != null && DateTime.TryParseExact(endDateStr, "yyyy.MM.dd", null, DateTimeStyles.None, out var endDate))
|
||||
_logger.LogDebug("{ImporterType}: Found DataInbox {DataInboxId}, created at {CreatedAt}",
|
||||
ImporterType, dataInbox.Id, dataInbox.CreatedAt);
|
||||
|
||||
try
|
||||
{
|
||||
EndDate = endDate;
|
||||
var data = Convert.FromBase64String(dataInbox.Data);
|
||||
var jsonString = Encoding.UTF8.GetString(data);
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64",
|
||||
ImporterType, data.Length);
|
||||
|
||||
var records = JsonSerializer.Deserialize<List<Record>>(jsonString);
|
||||
if (records == null)
|
||||
{
|
||||
throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Deserialized {RecordCount} records from JSON",
|
||||
ImporterType, records.Count);
|
||||
|
||||
// Cache the deserialized data
|
||||
_cachedRecords = records;
|
||||
_cachedDataInboxId = dataInbox.Id.ToString();
|
||||
}
|
||||
catch (FormatException e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Invalid base64 data in DataInbox {DataInboxName}",
|
||||
ImporterType, ImportType);
|
||||
throw new InvalidOperationException($"Invalid base64 data in DataInbox: {ImportType}", e);
|
||||
}
|
||||
catch (JsonException e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Invalid JSON data in DataInbox {DataInboxName}",
|
||||
ImporterType, ImportType);
|
||||
throw new InvalidOperationException($"Invalid JSON data in DataInbox: {ImportType}", e);
|
||||
}
|
||||
}
|
||||
|
||||
private List<Record> GetFilteredRecords()
|
||||
{
|
||||
if (_cachedRecords == null)
|
||||
{
|
||||
throw new InvalidOperationException("Data has not been deserialized yet");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Configuration loaded for {ImportWorkerName} - Type: {ImportType}",
|
||||
ImporterType, importWorker.Name, ImportType);
|
||||
var filteredRecords = _cachedRecords.Where(x => x.Code!.StartsWith($"{ImportYear}{ImportMonth}")).ToList();
|
||||
if (filteredRecords.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No records found for period: {ImportYear}{ImportMonth}");
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Filtered to {FilteredCount} records for period {Year}{Month}",
|
||||
ImporterType, filteredRecords.Count, ImportYear, ImportMonth);
|
||||
|
||||
return filteredRecords;
|
||||
}
|
||||
|
||||
private bool ShouldPerformImport(Layer importWorker)
|
||||
{
|
||||
if (!IsEnabled)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Import disabled for {ImportWorkerName}",
|
||||
_logger.LogDebug("{ImporterType}: Import disabled for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (StartDate.HasValue && EndDate.HasValue)
|
||||
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
||||
if (dataInbox == null)
|
||||
{
|
||||
var now = DateTime.UtcNow.Date;
|
||||
if (now >= StartDate.Value.Date && now <= EndDate.Value.Date)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Within date range, import needed for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!IsImportedLayerUpToDate(importWorker))
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Outside date range but layer is out of date, import needed for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return true;
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Outside date range and layer is up to date for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
_logger.LogDebug("{ImporterType}: No DataInbox found for type {ImportType}",
|
||||
ImporterType, ImportType);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if imported layer is up to date
|
||||
if (!IsImportedLayerUpToDate(importWorker))
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Layer is out of date, import needed for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
_logger.LogDebug("{ImporterType}: Layer is up to date for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return false;
|
||||
}
|
||||
|
||||
private void ValidateConfiguration()
|
||||
@@ -131,6 +207,7 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
if (string.IsNullOrEmpty(ImportMonth)) errors.Add("ImportMonth is required");
|
||||
if (string.IsNullOrEmpty(ImportName)) errors.Add("ImportName is required");
|
||||
if (string.IsNullOrEmpty(ImportType)) errors.Add("ImportType is required");
|
||||
if (string.IsNullOrEmpty(SheetId)) errors.Add("SheetId is required");
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
@@ -142,116 +219,182 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
{
|
||||
var newestLayer = _db.Layers
|
||||
.Include(x => x.Records)
|
||||
.Where(x => x.ParentId == importWorker.Id)
|
||||
.Where(x =>
|
||||
x.ParentId == importWorker.Id
|
||||
&& x.Name!.Contains($"I-{ImportName}-{ImportYear}/{ImportMonth}"))
|
||||
.OrderByDescending(x => x.CreatedAt)
|
||||
.AsNoTracking()
|
||||
.FirstOrDefault();
|
||||
|
||||
if (newestLayer == null)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: No child layers found for {ImportWorkerName}, treating as up to date",
|
||||
_logger.LogDebug("{ImporterType}: No child layers found for {ImportWorkerName}, import needed",
|
||||
ImporterType, importWorker.Name);
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
||||
if (dataInbox == null)
|
||||
var currentRecords = GetFilteredRecords();
|
||||
|
||||
// Compare record counts first
|
||||
if (newestLayer.Records?.Count != currentRecords.Count)
|
||||
{
|
||||
_logger.LogWarning("{ImporterType}: No DataInbox found for type {ImportType}",
|
||||
ImporterType, ImportType);
|
||||
return true; // Assume up to date if no data source
|
||||
_logger.LogDebug("{ImporterType}: Record count mismatch - DB: {DbCount}, DataInbox: {DataCount}",
|
||||
ImporterType, newestLayer.Records?.Count ?? 0, currentRecords.Count);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare timestamps - if DataInbox is newer than our layer, we need to import
|
||||
var isUpToDate = newestLayer.CreatedAt >= dataInbox.CreatedAt;
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Layer created at {LayerTime}, DataInbox created at {DataTime}, up to date: {IsUpToDate}",
|
||||
ImporterType, newestLayer.CreatedAt, dataInbox.CreatedAt, isUpToDate);
|
||||
// Compare individual records
|
||||
foreach (var currentRecord in currentRecords)
|
||||
{
|
||||
var existingRecord = newestLayer.Records?.FirstOrDefault(x => x.Code == currentRecord.Code);
|
||||
if (existingRecord == null)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Record with code {Code} not found in existing layer",
|
||||
ImporterType, currentRecord.Code);
|
||||
return false;
|
||||
}
|
||||
|
||||
return isUpToDate;
|
||||
// Compare all relevant fields
|
||||
if (Math.Abs((existingRecord.Value1 ?? 0) - (currentRecord.Value1 ?? 0)) > 0.001 ||
|
||||
existingRecord.Desc1 != currentRecord.Desc1)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Data difference found for code {Code}",
|
||||
ImporterType, currentRecord.Code);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: All records match, layer is up to date for {ImportWorkerName}",
|
||||
ImporterType, importWorker.Name);
|
||||
return true;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Error checking if layer {ImportWorkerName} is up to date",
|
||||
_logger.LogError(e, "{ImporterType}: Error checking if layer {ImportWorkerName} is up to date",
|
||||
ImporterType, importWorker.Name);
|
||||
throw;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void PerformImport(Layer importWorker)
|
||||
{
|
||||
_logger.LogDebug("{ImporterType}: Looking for DataInbox with type {ImportType}",
|
||||
_logger.LogDebug("{ImporterType}: Starting import for DataInbox type {ImportType}",
|
||||
ImporterType, ImportType);
|
||||
|
||||
var dataInbox = _db.DataInbox.OrderByDescending(x => x.CreatedAt).FirstOrDefault(x => x.Name == ImportType);
|
||||
if (dataInbox == null)
|
||||
{
|
||||
throw new InvalidOperationException($"DataInbox not found for type: {ImportType}");
|
||||
}
|
||||
var filteredRecords = GetFilteredRecords();
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Found DataInbox {DataInboxId}, created at {CreatedAt}",
|
||||
ImporterType, dataInbox.Id, dataInbox.CreatedAt);
|
||||
// Prepare records for saving
|
||||
var recordsToSave = filteredRecords.Select(x =>
|
||||
{
|
||||
x.Id = Guid.NewGuid();
|
||||
x.CreatedAt = DateTime.UtcNow;
|
||||
x.ModifiedAt = DateTime.UtcNow;
|
||||
return x;
|
||||
}).ToList();
|
||||
|
||||
var layer = new Layer
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Number = _db.Layers.Count() + 1,
|
||||
ParentId = importWorker.Id,
|
||||
Type = LayerType.Import,
|
||||
CreatedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
||||
ModifiedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
||||
CreatedAt = DateTime.UtcNow,
|
||||
ModifiedAt = DateTime.UtcNow
|
||||
};
|
||||
layer.Name = $"L{layer.Number}-I-{ImportName}-{ImportYear}/{ImportMonth}-{DateTime.Now:yyyyMMddHHmm}";
|
||||
|
||||
_db.Layers.Add(layer);
|
||||
SaveRecords(layer.Id, recordsToSave);
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation(
|
||||
"{ImporterType}: Successfully imported {RecordCount} records for layer {LayerName} ({LayerId})",
|
||||
ImporterType, recordsToSave.Count, layer.Name, layer.Id);
|
||||
}
|
||||
|
||||
private void ExportToGoogleSheets()
|
||||
{
|
||||
if (string.IsNullOrEmpty(SheetId))
|
||||
{
|
||||
_logger.LogWarning("{ImporterType}: SheetId not configured, skipping Google Sheets export",
|
||||
ImporterType);
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var data = Convert.FromBase64String(dataInbox.Data);
|
||||
var jsonString = Encoding.UTF8.GetString(data);
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64",
|
||||
ImporterType, data.Length);
|
||||
_logger.LogDebug("{ImporterType}: Starting Google Sheets export to {SheetId}",
|
||||
ImporterType, SheetId);
|
||||
|
||||
var records = JsonSerializer.Deserialize<List<Record>>(jsonString);
|
||||
if (records == null)
|
||||
var filteredRecords = GetFilteredRecords();
|
||||
var sheetTabName = ProcessHelper.GetSheetName(int.Parse(ImportMonth!), int.Parse(ImportYear!));
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Using sheet tab name: {SheetTabName}",
|
||||
ImporterType, sheetTabName);
|
||||
|
||||
// Get current sheet data
|
||||
var currentSheetData = _googleSheetValues.Get(SheetId!, $"{sheetTabName}!C7:D200").Execute();
|
||||
|
||||
var updateRequests = new List<ValueRange>();
|
||||
|
||||
for (var rowIndex = 0; rowIndex < 194; rowIndex++)
|
||||
{
|
||||
throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}");
|
||||
if (currentSheetData.Values == null || rowIndex >= currentSheetData.Values.Count)
|
||||
continue;
|
||||
|
||||
var existingRow = currentSheetData.Values[rowIndex];
|
||||
if (existingRow.Count == 0 || string.IsNullOrEmpty(existingRow[0]?.ToString()))
|
||||
continue;
|
||||
|
||||
var accountCode = existingRow[0].ToString()!;
|
||||
var matchingRecord = filteredRecords.FirstOrDefault(x => x.Desc1 == accountCode);
|
||||
if (matchingRecord == null)
|
||||
continue;
|
||||
|
||||
var newValue = matchingRecord.Value1?.ToString(CultureInfo.GetCultureInfo("pl-PL")) ?? "0";
|
||||
var existingValue = (existingRow.Count > 1 ? existingRow[1] : "")?.ToString();
|
||||
|
||||
if (existingValue != newValue)
|
||||
{
|
||||
var range = $"{sheetTabName}!D{rowIndex + 7}";
|
||||
updateRequests.Add(new ValueRange
|
||||
{
|
||||
Range = range,
|
||||
Values = new List<IList<object>> { new List<object> { newValue } }
|
||||
});
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Scheduled update for {AccountCode} at {Range} with value {Value}",
|
||||
ImporterType, accountCode, range, newValue);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Deserialized {RecordCount} records from JSON",
|
||||
ImporterType, records.Count);
|
||||
|
||||
records = records.Where(x => x.Code!.StartsWith($"{ImportYear}{ImportMonth}")).ToList();
|
||||
if (records.Count == 0)
|
||||
if (updateRequests.Count > 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No records found for period: {ImportYear}{ImportMonth}");
|
||||
var batchUpdate = new BatchUpdateValuesRequest
|
||||
{
|
||||
Data = updateRequests,
|
||||
ValueInputOption = "USER_ENTERED"
|
||||
};
|
||||
|
||||
_googleSheetValues.BatchUpdate(batchUpdate, SheetId!).Execute();
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Updated {Count} cells in Google Sheet {SheetId}",
|
||||
ImporterType, updateRequests.Count, SheetId);
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Filtered to {FilteredCount} records for period {Year}{Month}",
|
||||
ImporterType, records.Count, ImportYear, ImportMonth);
|
||||
|
||||
records = records.Select(x =>
|
||||
else
|
||||
{
|
||||
x.Id = Guid.NewGuid();
|
||||
x.CreatedAt = DateTime.UtcNow;
|
||||
x.ModifiedAt = DateTime.UtcNow;
|
||||
return x;
|
||||
}).ToList();
|
||||
|
||||
var layer = new Layer
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Number = _db.Layers.Count() + 1,
|
||||
ParentId = importWorker.Id,
|
||||
Type = LayerType.Import,
|
||||
CreatedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
||||
ModifiedById = Guid.Parse("F392209E-123E-4651-A5A4-0B1D6CF9FF9D"),
|
||||
CreatedAt = DateTime.UtcNow,
|
||||
ModifiedAt = DateTime.UtcNow
|
||||
};
|
||||
layer.Name = $"L{layer.Number}-I-{ImportName}-{ImportYear}/{ImportMonth}-{DateTime.Now:yyyyMMddHHmm}";
|
||||
|
||||
_db.Layers.Add(layer);
|
||||
SaveRecords(layer.Id, records);
|
||||
_db.SaveChanges();
|
||||
|
||||
_logger.LogInformation("{ImporterType}: Successfully imported {RecordCount} records for layer {LayerName} ({LayerId})",
|
||||
ImporterType, records.Count, layer.Name, layer.Id);
|
||||
_logger.LogInformation("{ImporterType}: No changes to export to Google Sheet {SheetId}",
|
||||
ImporterType, SheetId);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_logger.LogError(e, "{ImporterType}: Error processing DataInbox {DataInboxId}",
|
||||
ImporterType, dataInbox.Id);
|
||||
_logger.LogError(e, "{ImporterType}: Failed to export to Google Sheets {SheetId}",
|
||||
ImporterType, SheetId);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
@@ -279,7 +422,7 @@ public class MorskaD3Importer : MorskaBaseImporter
|
||||
_db.Records.Add(record);
|
||||
}
|
||||
|
||||
_logger.LogDebug("{ImporterType}: Saved {RecordCount} records for layer {LayerId}",
|
||||
_logger.LogDebug("{ImporterType}: Saved {RecordCount} records for layer {LayerId}",
|
||||
ImporterType, records.Count, layerId);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user