Compare commits

...

5 Commits

Author SHA1 Message Date
0e3b3933f0 WIP: p2 plugin
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
2025-12-03 13:33:38 +01:00
445c07a8d8 Morska plugins refactor 2025-12-02 21:24:37 +01:00
3f8e62fbb8 WIP: queue engine 2025-12-02 15:35:04 +01:00
248106a239 Plugins little refactor 2025-12-02 15:21:27 +01:00
587d4d66f8 Pedrollo plugins 2025-12-02 14:31:21 +01:00
44 changed files with 2194 additions and 129 deletions

View File

@@ -13,6 +13,13 @@ concurrency:
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
customer:
- name: Morska
plugin_project: DiunaBI.Plugins.Morska
- name: PedrolloPL
plugin_project: DiunaBI.Plugins.PedrolloPL
steps: steps:
- name: Checkout - name: Checkout
uses: https://github.com/actions/checkout@v4 uses: https://github.com/actions/checkout@v4
@@ -27,7 +34,7 @@ jobs:
run: | run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins - name: Build solution and prepare plugins
@@ -37,10 +44,10 @@ jobs:
# Build only required projects — skip DiunaBI.UI.Mobile # Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true cp ${{ matrix.customer.plugin_project }}/bin/Release/net10.0/${{ matrix.customer.plugin_project }}.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests - name: Run Tests
@@ -49,7 +56,7 @@ jobs:
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \ dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \ --configuration Release \
--no-restore \ --no-restore \
--logger "trx;LogFileName=test-results.trx" \ --logger "trx;LogFileName=test-results-${{ matrix.customer.name }}.trx" \
--collect:"XPlat Code Coverage" \ --collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" || true --filter "Category!=LocalOnly" || true
@@ -57,7 +64,7 @@ jobs:
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/upload-artifact@v3
if: success() || failure() if: success() || failure()
with: with:
name: test-results name: test-results-${{ matrix.customer.name }}
path: | path: |
DiunaBI.Tests/TestResults/*.trx DiunaBI.Tests/TestResults/*.trx
DiunaBI.Tests/TestResults/**/coverage.cobertura.xml DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
@@ -67,6 +74,15 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: test needs: test
if: success() || failure() if: success() || failure()
strategy:
matrix:
customer:
- name: Morska
plugin_project: DiunaBI.Plugins.Morska
image_suffix: morska
- name: PedrolloPL
plugin_project: DiunaBI.Plugins.PedrolloPL
image_suffix: pedrollopl
steps: steps:
- name: Debug secrets - name: Debug secrets
@@ -93,9 +109,10 @@ jobs:
docker buildx build \ docker buildx build \
--platform linux/amd64 \ --platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \ --label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
--build-arg PLUGIN_PROJECT=${{ matrix.customer.plugin_project }} \
-f DiunaBI.API/Dockerfile \ -f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api:latest \ -t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }} \ -t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \ --push \
. .
@@ -106,25 +123,26 @@ jobs:
--platform linux/amd64 \ --platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \ --label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \ -f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui:latest \ -t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }} \ -t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \ --push \
. .
- name: Output build info - name: Output build info
run: | run: |
echo "## 🐳 Docker Images Built" >> $GITHUB_STEP_SUMMARY echo "## 🐳 Docker Images Built - ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "**Customer:** ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:latest" >> $GITHUB_STEP_SUMMARY echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:latest" >> $GITHUB_STEP_SUMMARY echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -0,0 +1,99 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2024;
DECLARE @Type NVARCHAR(5) = 'P2';
DECLARE @StartDate NVARCHAR(10) = '2025.01.02';
DECLARE @EndDate NVARCHAR(10) = '2026.12.31'
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-IW_', @Type, '-', @Year,'-', @CurrentTimestamp
);
DECLARE @Plugin NVARCHAR(100);
SET @Plugin =
CASE @Type
WHEN 'P2' THEN 'PedrolloPL.Import.P2'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxName NVARCHAR(100);
SET @DataInboxName =
CASE @Type
WHEN 'P2' THEN 'P2_2024'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxSource NVARCHAR(100);
SET @DataInboxSource =
CASE @Type
WHEN 'P2' THEN 'Comarch'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @StartDate AS StartDate, @EndDate AS EndDate, @Type AS Type, @Year AS Year, @Plugin AS Plugin,
@DataInboxName AS DataInboxName, @DataInboxSource AS DataInboxSource;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'StartDate', @StartDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'EndDate', @EndDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'DataInbox', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportName', @Type, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportYear', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ImportWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', @Plugin, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxName', @DataInboxName, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxSource', @DataInboxSource, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '10', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'MaxRetries', '3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -1,9 +1 @@
PUT https://pedrollopl.diunabi.com/api/DataInbox/Add/8kL2mN4pQ6rojshf8704i34p4eim1hs POST http://localhost:5400/jobs/schedule/10763478CB738D4ecb2h76g803478CB738D4e
Content-Type: application/json
Authorization: Basic cGVkcm9sbG9wbDo0MjU4dlc2eFk4TjRwUQ==
{
"Source": "morska.import",
"Name": "morska.d3.importer",
"Data": "eyJrZXkiOiAidmFsdWUifQ=="
}

View File

@@ -2,9 +2,9 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @Name NVARCHAR(50) = CONCAT( DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-D-D6-SELL-CODES' 'L', @Number, 'D-P2-CODES'
); );
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID(); DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
@@ -16,7 +16,7 @@ BEGIN
RETURN; RETURN;
END; END;
INSERT INTO [diunabi-morska].[dbo].[Layers] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type]) ([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3); VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3);
@@ -27,16 +27,23 @@ DECLARE @Array TABLE (
INSERT INTO @Array (Code, Desc1) INSERT INTO @Array (Code, Desc1)
VALUES VALUES
('1002', '1102'), ('01','<nieznany>'),
('1003','1202'), ('02','DOLNOŚLĄSKIE'),
('1008','1302'), ('03','KUJAWSKO-POMORSKIE'),
('1009','1302'), ('04','LUBELSKIE'),
('9085','1203'), ('05','LUBUSKIE'),
('1010','1304'), ('06','ŁÓDZKIE'),
('9086','1005'), ('07','MAŁOPOLSKIE'),
('1021','1206'), ('08','MAZOWIECKIE'),
('9089','1207'), ('09','OPOLSKIE'),
('9091','1208') ('10','PODKARPACKIE'),
('11','PODLASKIE'),
('12','POMORSKIE'),
('13','ŚLĄSKIE'),
('14','ŚWIĘTOKRZYSKIE'),
('15','WARMIŃSKO-MAZURSKIE'),
('16','WIELKOPOLSKIE'),
('17','ZACHODNIOPOMORSKIE');
-- Loop through the array and insert into the target table -- Loop through the array and insert into the target table
DECLARE @Code NVARCHAR(50); DECLARE @Code NVARCHAR(50);
@@ -51,7 +58,7 @@ FETCH NEXT FROM CursorArray INTO @Code, @Desc1;
WHILE @@FETCH_STATUS = 0 WHILE @@FETCH_STATUS = 0
BEGIN BEGIN
INSERT INTO [diunabi-morska].[dbo].[Records] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) ([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1,435 @@
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class JobsController : Controller
{
private readonly AppDbContext _db;
private readonly JobSchedulerService _jobScheduler;
private readonly IConfiguration _configuration;
private readonly ILogger<JobsController> _logger;
public JobsController(
AppDbContext db,
JobSchedulerService jobScheduler,
IConfiguration configuration,
ILogger<JobsController> logger)
{
_db = db;
_jobScheduler = jobScheduler;
_configuration = configuration;
_logger = logger;
}
[HttpGet]
[Route("")]
public async Task<IActionResult> GetAll(
[FromQuery] int start = 0,
[FromQuery] int limit = 50,
[FromQuery] JobStatus? status = null,
[FromQuery] JobType? jobType = null,
[FromQuery] Guid? layerId = null)
{
try
{
var query = _db.QueueJobs.AsQueryable();
if (status.HasValue)
{
query = query.Where(j => j.Status == status.Value);
}
if (jobType.HasValue)
{
query = query.Where(j => j.JobType == jobType.Value);
}
if (layerId.HasValue)
{
query = query.Where(j => j.LayerId == layerId.Value);
}
var totalCount = await query.CountAsync();
var items = await query
.OrderByDescending(j => j.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.ToListAsync();
var pagedResult = new PagedResult<QueueJob>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} jobs", items.Count, totalCount);
return Ok(pagedResult);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetAll: Error retrieving jobs");
return BadRequest(ex.ToString());
}
}
[HttpGet]
[Route("{id:guid}")]
public async Task<IActionResult> Get(Guid id)
{
try
{
var job = await _db.QueueJobs
.AsNoTracking()
.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("Get: Job {JobId} not found", id);
return NotFound("Job not found");
}
_logger.LogDebug("Get: Retrieved job {JobId}", id);
return Ok(job);
}
catch (Exception ex)
{
_logger.LogError(ex, "Get: Error retrieving job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleJobs(string apiKey, [FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobs: Created {Count} jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobs: Error scheduling jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/imports/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleImportJobs(string apiKey, [FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleImportJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobs: Created {Count} import jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobs: Error scheduling import jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/processes/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleProcessJobs(string apiKey)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleProcessJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobs: Created {Count} process jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobs: Error scheduling process jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("{id:guid}/retry")]
public async Task<IActionResult> RetryJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("RetryJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status != JobStatus.Failed)
{
_logger.LogWarning("RetryJob: Job {JobId} is not in Failed status (current: {Status})", id, job.Status);
return BadRequest($"Job is not in Failed status (current: {job.Status})");
}
job.Status = JobStatus.Pending;
job.RetryCount = 0;
job.LastError = null;
job.ModifiedAtUtc = DateTime.UtcNow;
await _db.SaveChangesAsync();
_logger.LogInformation("RetryJob: Job {JobId} reset to Pending status", id);
return Ok(new
{
success = true,
message = "Job reset to Pending status and will be retried"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "RetryJob: Error retrying job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpDelete]
[Route("{id:guid}")]
public async Task<IActionResult> CancelJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("CancelJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status == JobStatus.Running)
{
_logger.LogWarning("CancelJob: Cannot cancel running job {JobId}", id);
return BadRequest("Cannot cancel a job that is currently running");
}
if (job.Status == JobStatus.Completed)
{
_logger.LogWarning("CancelJob: Cannot cancel completed job {JobId}", id);
return BadRequest("Cannot cancel a completed job");
}
job.Status = JobStatus.Failed;
job.LastError = "Cancelled by user";
job.ModifiedAtUtc = DateTime.UtcNow;
await _db.SaveChangesAsync();
_logger.LogInformation("CancelJob: Job {JobId} cancelled", id);
return Ok(new
{
success = true,
message = "Job cancelled successfully"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CancelJob: Error cancelling job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpGet]
[Route("stats")]
public async Task<IActionResult> GetStats()
{
try
{
var stats = new
{
pending = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Pending),
running = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Running),
completed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Completed),
failed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Failed),
retrying = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Retrying),
total = await _db.QueueJobs.CountAsync()
};
_logger.LogDebug("GetStats: Retrieved job statistics");
return Ok(stats);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetStats: Error retrieving job statistics");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("create-for-layer/{layerId:guid}")]
public async Task<IActionResult> CreateJobForLayer(Guid layerId)
{
try
{
var layer = await _db.Layers
.Include(x => x.Records)
.FirstOrDefaultAsync(l => l.Id == layerId);
if (layer == null)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} not found", layerId);
return NotFound($"Layer {layerId} not found");
}
if (layer.Type != LayerType.Administration)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not an Administration layer", layerId);
return BadRequest("Only Administration layers can be run as jobs");
}
// Get the Type record to determine if it's ImportWorker or ProcessWorker
var typeRecord = layer.Records?.FirstOrDefault(x => x.Code == "Type");
if (typeRecord?.Desc1 != "ImportWorker" && typeRecord?.Desc1 != "ProcessWorker")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not a valid worker type", layerId);
return BadRequest("Layer must be an ImportWorker or ProcessWorker");
}
// Check if enabled
var isEnabledRecord = layer.Records?.FirstOrDefault(x => x.Code == "IsEnabled");
if (isEnabledRecord?.Desc1 != "True")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not enabled", layerId);
return BadRequest("Layer is not enabled");
}
// Get plugin name
var pluginRecord = layer.Records?.FirstOrDefault(x => x.Code == "Plugin");
if (string.IsNullOrEmpty(pluginRecord?.Desc1))
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} has no Plugin configured", layerId);
return BadRequest("Layer has no Plugin configured");
}
// Get priority and max retries
var priorityRecord = layer.Records?.FirstOrDefault(x => x.Code == "Priority");
var maxRetriesRecord = layer.Records?.FirstOrDefault(x => x.Code == "MaxRetries");
var priority = int.TryParse(priorityRecord?.Desc1, out var p) ? p : 0;
var maxRetries = int.TryParse(maxRetriesRecord?.Desc1, out var m) ? m : 3;
var jobType = typeRecord.Desc1 == "ImportWorker" ? JobType.Import : JobType.Process;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == layer.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogInformation("CreateJobForLayer: Job already exists for layer {LayerId}, returning existing job", layerId);
return Ok(new
{
success = true,
jobId = existingJob.Id,
message = "Job already exists for this layer",
existing = true
});
}
// Create the job
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = layer.Id,
LayerName = layer.Name ?? "Unknown",
PluginName = pluginRecord.Desc1,
JobType = jobType,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty,
ModifiedById = Guid.Empty
};
_db.QueueJobs.Add(job);
await _db.SaveChangesAsync();
_logger.LogInformation("CreateJobForLayer: Created job {JobId} for layer {LayerName} ({LayerId})",
job.Id, layer.Name, layerId);
return Ok(new
{
success = true,
jobId = job.Id,
message = "Job created successfully",
existing = false
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId);
return BadRequest(ex.ToString());
}
}
}

View File

@@ -39,9 +39,11 @@
<Target Name="CopyPlugins" AfterTargets="Build"> <Target Name="CopyPlugins" AfterTargets="Build">
<MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" /> <MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<MSBuild Projects="../DiunaBI.Plugins.PedrolloPL/DiunaBI.Plugins.PedrolloPL.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<ItemGroup> <ItemGroup>
<PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" /> <PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" />
<PluginFiles Include="../DiunaBI.Plugins.PedrolloPL/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.PedrolloPL.dll" />
</ItemGroup> </ItemGroup>
<MakeDir Directories="$(OutputPath)Plugins" /> <MakeDir Directories="$(OutputPath)Plugins" />
<Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" /> <Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" />

View File

@@ -1,6 +1,7 @@
# Stage 1: Build # Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
ARG PLUGIN_PROJECT=DiunaBI.Plugins.Morska
WORKDIR / WORKDIR /
# Copy solution and all project files for restore # Copy solution and all project files for restore
@@ -9,7 +10,7 @@ COPY DiunaBI.API/DiunaBI.API.csproj DiunaBI.API/
COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/ COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/
COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/ COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/
COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/ COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/
COPY DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj DiunaBI.Plugins.Morska/ COPY ${PLUGIN_PROJECT}/${PLUGIN_PROJECT}.csproj ${PLUGIN_PROJECT}/
# Restore dependencies # Restore dependencies
RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
@@ -18,7 +19,7 @@ RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
COPY . . COPY . .
# Build plugin first # Build plugin first
WORKDIR /DiunaBI.Plugins.Morska WORKDIR /${PLUGIN_PROJECT}
RUN dotnet build -c Release RUN dotnet build -c Release
# Build and publish API # Build and publish API
@@ -27,7 +28,7 @@ RUN dotnet publish -c Release -o /app/publish --no-restore
# Copy plugin DLL to publish output # Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \ RUN mkdir -p /app/publish/Plugins && \
cp /DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll /app/publish/Plugins/ cp /${PLUGIN_PROJECT}/bin/Release/net10.0/${PLUGIN_PROJECT}.dll /app/publish/Plugins/
# Stage 2: Runtime # Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime

View File

@@ -97,6 +97,10 @@ builder.Services.AddSingleton<SpreadsheetsResource.ValuesResource>(provider =>
builder.Services.AddSingleton<PluginManager>(); builder.Services.AddSingleton<PluginManager>();
// Job Queue Services
builder.Services.AddScoped<JobSchedulerService>();
builder.Services.AddHostedService<JobWorkerService>();
var app = builder.Build(); var app = builder.Build();
// Auto-apply migrations on startup // Auto-apply migrations on startup

View File

@@ -23,6 +23,7 @@
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" /> <PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" /> <PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" /> <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
</ItemGroup> </ItemGroup>
</Project> </Project>

View File

@@ -49,7 +49,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id"); b.HasKey("Id");
b.ToTable("DataInbox"); b.ToTable("DataInbox", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b => modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>
@@ -104,7 +104,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("ModifiedById"); b.HasIndex("ModifiedById");
b.ToTable("Layers"); b.ToTable("Layers", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b => modelBuilder.Entity("DiunaBI.Domain.Entities.ProcessSource", b =>
@@ -119,7 +119,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("SourceId"); b.HasIndex("SourceId");
b.ToTable("ProcessSources"); b.ToTable("ProcessSources", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.QueueJob", b => modelBuilder.Entity("DiunaBI.Domain.Entities.QueueJob", b =>
@@ -185,7 +185,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id"); b.HasKey("Id");
b.ToTable("QueueJobs"); b.ToTable("QueueJobs", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b => modelBuilder.Entity("DiunaBI.Domain.Entities.Record", b =>
@@ -329,7 +329,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("ModifiedById"); b.HasIndex("ModifiedById");
b.ToTable("Records"); b.ToTable("Records", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b => modelBuilder.Entity("DiunaBI.Domain.Entities.RecordHistory", b =>
@@ -378,7 +378,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasIndex("RecordId", "ChangedAt"); b.HasIndex("RecordId", "ChangedAt");
b.ToTable("RecordHistory"); b.ToTable("RecordHistory", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.User", b => modelBuilder.Entity("DiunaBI.Domain.Entities.User", b =>
@@ -402,7 +402,7 @@ namespace DiunaBI.Infrastructure.Migrations
b.HasKey("Id"); b.HasKey("Id");
b.ToTable("Users"); b.ToTable("Users", (string)null);
}); });
modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b => modelBuilder.Entity("DiunaBI.Domain.Entities.Layer", b =>

View File

@@ -1,11 +1,13 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Interfaces; using DiunaBI.Infrastructure.Interfaces;
namespace DiunaBI.Plugins.Morska.Exporters; namespace DiunaBI.Infrastructure.Plugins;
public abstract class MorskaBaseExporter : IDataExporter public abstract class BaseDataExporter : IDataExporter
{ {
public abstract string ExporterType { get; } public abstract string ExporterType { get; }
public virtual bool CanExport(string exporterType) => ExporterType == exporterType; public virtual bool CanExport(string exporterType) => ExporterType == exporterType;
public abstract void Export(Layer layer); public abstract void Export(Layer layer);
} }

View File

@@ -1,9 +1,9 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Interfaces; using DiunaBI.Infrastructure.Interfaces;
namespace DiunaBI.Plugins.Morska.Importers; namespace DiunaBI.Infrastructure.Plugins;
public abstract class MorskaBaseImporter : IDataImporter public abstract class BaseDataImporter : IDataImporter
{ {
public abstract string ImporterType { get; } public abstract string ImporterType { get; }

View File

@@ -1,9 +1,9 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Interfaces; using DiunaBI.Infrastructure.Interfaces;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Infrastructure.Plugins;
public abstract class MorskaBaseProcessor : IDataProcessor public abstract class BaseDataProcessor : IDataProcessor
{ {
public abstract string ProcessorType { get; } public abstract string ProcessorType { get; }

View File

@@ -0,0 +1,216 @@
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Infrastructure.Services;
public class JobSchedulerService
{
private readonly AppDbContext _db;
private readonly ILogger<JobSchedulerService> _logger;
public JobSchedulerService(AppDbContext db, ILogger<JobSchedulerService> logger)
{
_db = db;
_logger = logger;
}
public async Task<int> ScheduleImportJobsAsync(string? nameFilter = null)
{
_logger.LogInformation("JobScheduler: Starting import job scheduling with filter: {NameFilter}", nameFilter ?? "none");
var query = _db.Layers
.Include(x => x.Records)
.Where(x =>
x.Records!.Any(r => r.Code == "Type" && r.Desc1 == "ImportWorker") &&
x.Records!.Any(r => r.Code == "IsEnabled" && r.Desc1 == "True")
);
if (!string.IsNullOrEmpty(nameFilter))
{
query = query.Where(x => x.Name != null && x.Name.Contains(nameFilter));
}
var importWorkers = await query
.OrderBy(x => x.CreatedAt)
.AsNoTracking()
.ToListAsync();
_logger.LogInformation("JobScheduler: Found {Count} import workers to schedule", importWorkers.Count);
var jobsCreated = 0;
foreach (var worker in importWorkers)
{
try
{
var plugin = worker.Records?.FirstOrDefault(r => r.Code == "Plugin")?.Desc1;
if (string.IsNullOrEmpty(plugin))
{
_logger.LogWarning("JobScheduler: Import worker {LayerName} ({LayerId}) has no Plugin configured, skipping",
worker.Name, worker.Id);
continue;
}
// Get priority from config (default: 50)
var priorityStr = worker.Records?.FirstOrDefault(r => r.Code == "Priority")?.Desc1;
var priority = int.TryParse(priorityStr, out var p) ? p : 50;
// Get max retries from config (default: 3)
var maxRetriesStr = worker.Records?.FirstOrDefault(r => r.Code == "MaxRetries")?.Desc1;
var maxRetries = int.TryParse(maxRetriesStr, out var mr) ? mr : 3;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == worker.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogDebug("JobScheduler: Job already exists for {LayerName} ({LayerId}), status: {Status}",
worker.Name, worker.Id, existingJob.Status);
continue;
}
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = worker.Id,
LayerName = worker.Name ?? "Unknown",
PluginName = plugin,
JobType = JobType.Import,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty, // System user
ModifiedById = Guid.Empty
};
_db.QueueJobs.Add(job);
jobsCreated++;
_logger.LogInformation("JobScheduler: Created import job for {LayerName} ({LayerId}) with priority {Priority}",
worker.Name, worker.Id, priority);
}
catch (Exception ex)
{
_logger.LogError(ex, "JobScheduler: Failed to create job for {LayerName} ({LayerId})",
worker.Name, worker.Id);
}
}
if (jobsCreated > 0)
{
await _db.SaveChangesAsync();
_logger.LogInformation("JobScheduler: Successfully created {Count} import jobs", jobsCreated);
}
return jobsCreated;
}
public async Task<int> ScheduleProcessJobsAsync()
{
_logger.LogInformation("JobScheduler: Starting process job scheduling");
var processWorkers = await _db.Layers
.Include(x => x.Records)
.Where(x =>
x.Records!.Any(r => r.Code == "Type" && r.Desc1 == "ProcessWorker") &&
x.Records!.Any(r => r.Code == "IsEnabled" && r.Desc1 == "True")
)
.OrderBy(x => x.CreatedAt)
.AsNoTracking()
.ToListAsync();
_logger.LogInformation("JobScheduler: Found {Count} process workers to schedule", processWorkers.Count);
var jobsCreated = 0;
foreach (var worker in processWorkers)
{
try
{
var plugin = worker.Records?.FirstOrDefault(r => r.Code == "Plugin")?.Desc1;
if (string.IsNullOrEmpty(plugin))
{
_logger.LogWarning("JobScheduler: Process worker {LayerName} ({LayerId}) has no Plugin configured, skipping",
worker.Name, worker.Id);
continue;
}
// Get priority from config (default: 100 for processes - higher than imports)
var priorityStr = worker.Records?.FirstOrDefault(r => r.Code == "Priority")?.Desc1;
var priority = int.TryParse(priorityStr, out var p) ? p : 100;
// Get max retries from config (default: 3)
var maxRetriesStr = worker.Records?.FirstOrDefault(r => r.Code == "MaxRetries")?.Desc1;
var maxRetries = int.TryParse(maxRetriesStr, out var mr) ? mr : 3;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == worker.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogDebug("JobScheduler: Job already exists for {LayerName} ({LayerId}), status: {Status}",
worker.Name, worker.Id, existingJob.Status);
continue;
}
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = worker.Id,
LayerName = worker.Name ?? "Unknown",
PluginName = plugin,
JobType = JobType.Process,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty,
ModifiedById = Guid.Empty
};
_db.QueueJobs.Add(job);
jobsCreated++;
_logger.LogInformation("JobScheduler: Created process job for {LayerName} ({LayerId}) with priority {Priority}",
worker.Name, worker.Id, priority);
}
catch (Exception ex)
{
_logger.LogError(ex, "JobScheduler: Failed to create job for {LayerName} ({LayerId})",
worker.Name, worker.Id);
}
}
if (jobsCreated > 0)
{
await _db.SaveChangesAsync();
_logger.LogInformation("JobScheduler: Successfully created {Count} process jobs", jobsCreated);
}
return jobsCreated;
}
public async Task<int> ScheduleAllJobsAsync(string? nameFilter = null)
{
var importCount = await ScheduleImportJobsAsync(nameFilter);
var processCount = await ScheduleProcessJobsAsync();
_logger.LogInformation("JobScheduler: Scheduled {ImportCount} import jobs and {ProcessCount} process jobs",
importCount, processCount);
return importCount + processCount;
}
}

View File

@@ -0,0 +1,178 @@
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Infrastructure.Services;
public class JobWorkerService : BackgroundService
{
private readonly IServiceProvider _serviceProvider;
private readonly ILogger<JobWorkerService> _logger;
private readonly TimeSpan _pollInterval = TimeSpan.FromSeconds(10);
private readonly TimeSpan _rateLimitDelay = TimeSpan.FromSeconds(5);
public JobWorkerService(IServiceProvider serviceProvider, ILogger<JobWorkerService> logger)
{
_serviceProvider = serviceProvider;
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("JobWorker: Service started");
while (!stoppingToken.IsCancellationRequested)
{
try
{
await ProcessNextJobAsync(stoppingToken);
}
catch (Exception ex)
{
_logger.LogError(ex, "JobWorker: Unexpected error in main loop");
}
await Task.Delay(_pollInterval, stoppingToken);
}
_logger.LogInformation("JobWorker: Service stopped");
}
private async Task ProcessNextJobAsync(CancellationToken stoppingToken)
{
using var scope = _serviceProvider.CreateScope();
var db = scope.ServiceProvider.GetRequiredService<AppDbContext>();
var pluginManager = scope.ServiceProvider.GetRequiredService<PluginManager>();
// Get next pending job (ordered by priority, then creation time)
var job = await db.QueueJobs
.Where(j => j.Status == JobStatus.Pending || j.Status == JobStatus.Retrying)
.OrderBy(j => j.Priority)
.ThenBy(j => j.CreatedAt)
.FirstOrDefaultAsync(stoppingToken);
if (job == null)
{
// No jobs to process
return;
}
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType})",
job.Id, job.LayerName, job.JobType);
// Mark job as running
job.Status = JobStatus.Running;
job.LastAttemptAt = DateTime.UtcNow;
job.ModifiedAtUtc = DateTime.UtcNow;
await db.SaveChangesAsync(stoppingToken);
try
{
// Load the layer with its configuration
var layer = await db.Layers
.Include(l => l.Records)
.AsNoTracking()
.FirstOrDefaultAsync(l => l.Id == job.LayerId, stoppingToken);
if (layer == null)
{
throw new Exception($"Layer {job.LayerId} not found");
}
// Execute the job based on type
if (job.JobType == JobType.Import)
{
var importer = pluginManager.GetImporter(job.PluginName);
if (importer == null)
{
throw new Exception($"Importer '{job.PluginName}' not found");
}
_logger.LogInformation("JobWorker: Executing import for {LayerName} using {PluginName}",
job.LayerName, job.PluginName);
importer.Import(layer);
}
else if (job.JobType == JobType.Process)
{
var processor = pluginManager.GetProcessor(job.PluginName);
if (processor == null)
{
throw new Exception($"Processor '{job.PluginName}' not found");
}
_logger.LogInformation("JobWorker: Executing process for {LayerName} using {PluginName}",
job.LayerName, job.PluginName);
processor.Process(layer);
}
// Job completed successfully
job.Status = JobStatus.Completed;
job.CompletedAt = DateTime.UtcNow;
job.LastError = null;
job.ModifiedAtUtc = DateTime.UtcNow;
_logger.LogInformation("JobWorker: Job {JobId} completed successfully", job.Id);
// Rate limiting delay (for Google Sheets API quota)
if (job.JobType == JobType.Import)
{
_logger.LogDebug("JobWorker: Applying rate limit delay of {Delay} seconds", _rateLimitDelay.TotalSeconds);
await Task.Delay(_rateLimitDelay, stoppingToken);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "JobWorker: Job {JobId} failed - {LayerName}", job.Id, job.LayerName);
job.RetryCount++;
job.LastError = ex.Message;
job.ModifiedAtUtc = DateTime.UtcNow;
if (job.RetryCount >= job.MaxRetries)
{
job.Status = JobStatus.Failed;
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed after {RetryCount} attempts",
job.Id, job.RetryCount);
}
else
{
job.Status = JobStatus.Retrying;
// Exponential backoff: wait before retrying based on attempt number
var backoffDelay = GetBackoffDelay(job.RetryCount);
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (attempt {RetryCount}/{MaxRetries})",
job.Id, backoffDelay, job.RetryCount, job.MaxRetries);
// Wait before marking as pending again
await Task.Delay(backoffDelay, stoppingToken);
job.Status = JobStatus.Pending;
}
}
finally
{
await db.SaveChangesAsync(stoppingToken);
}
}
public override async Task StopAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("JobWorker: Stopping service...");
await base.StopAsync(stoppingToken);
}
private static TimeSpan GetBackoffDelay(int retryCount)
{
return retryCount switch
{
1 => TimeSpan.FromSeconds(30), // 1st retry: 30 seconds
2 => TimeSpan.FromMinutes(2), // 2nd retry: 2 minutes
_ => TimeSpan.FromMinutes(5) // 3rd+ retry: 5 minutes
};
}
}

View File

@@ -11,7 +11,7 @@ public class PluginManager
private readonly IServiceProvider _serviceProvider; private readonly IServiceProvider _serviceProvider;
private readonly List<Type> _processorTypes = new(); private readonly List<Type> _processorTypes = new();
private readonly List<Type> _importerTypes = new(); private readonly List<Type> _importerTypes = new();
private readonly List<IDataExporter> _exporters = new(); private readonly List<Type> _exporterTypes = new();
private readonly List<IPlugin> _plugins = new(); private readonly List<IPlugin> _plugins = new();
public PluginManager(ILogger<PluginManager> logger, IServiceProvider serviceProvider) public PluginManager(ILogger<PluginManager> logger, IServiceProvider serviceProvider)
@@ -42,10 +42,11 @@ public class PluginManager
} }
} }
_logger.LogInformation("Loaded {ProcessorCount} processors and {ImporterCount} importers from {AssemblyCount} assemblies", _logger.LogInformation("Loaded {ProcessorCount} processors, {ImporterCount} importers, and {ExporterCount} exporters from {AssemblyCount} assemblies",
_processorTypes.Count, _processorTypes.Count,
_importerTypes.Count, _importerTypes.Count,
dllFiles.Length); // Zmień z _plugins.Count na assemblyFiles.Length _exporterTypes.Count,
dllFiles.Length);
} }
private void LoadPluginFromAssembly(string assemblyPath) private void LoadPluginFromAssembly(string assemblyPath)
@@ -70,6 +71,12 @@ public class PluginManager
_importerTypes.Add(type); _importerTypes.Add(type);
_logger.LogDebug("Registered importer: {Type}", type.Name); // Information -> Debug _logger.LogDebug("Registered importer: {Type}", type.Name); // Information -> Debug
} }
if (typeof(IDataExporter).IsAssignableFrom(type) && !type.IsInterface && !type.IsAbstract)
{
_exporterTypes.Add(type);
_logger.LogDebug("Registered exporter: {Type}", type.Name);
}
} }
} }
catch (Exception ex) catch (Exception ex)
@@ -84,14 +91,15 @@ public class PluginManager
{ {
try try
{ {
using var scope = _serviceProvider.CreateScope(); var scope = _serviceProvider.CreateScope();
var instance = (IDataProcessor)ActivatorUtilities.CreateInstance(scope.ServiceProvider, type); var instance = (IDataProcessor)ActivatorUtilities.CreateInstance(scope.ServiceProvider, type);
if (instance.CanProcess(processorType)) if (instance.CanProcess(processorType))
{ {
var scopedProvider = _serviceProvider.CreateScope().ServiceProvider; return instance;
return (IDataProcessor)ActivatorUtilities.CreateInstance(scopedProvider, type);
} }
scope.Dispose();
} }
catch (Exception ex) catch (Exception ex)
{ {
@@ -107,14 +115,15 @@ public class PluginManager
{ {
try try
{ {
using var scope = _serviceProvider.CreateScope(); var scope = _serviceProvider.CreateScope();
var instance = (IDataImporter)ActivatorUtilities.CreateInstance(scope.ServiceProvider, type); var instance = (IDataImporter)ActivatorUtilities.CreateInstance(scope.ServiceProvider, type);
if (instance.CanImport(importerType)) if (instance.CanImport(importerType))
{ {
var scopedProvider = _serviceProvider.CreateScope().ServiceProvider; return instance;
return (IDataImporter)ActivatorUtilities.CreateInstance(scopedProvider, type);
} }
scope.Dispose();
} }
catch (Exception ex) catch (Exception ex)
{ {
@@ -126,7 +135,27 @@ public class PluginManager
public IDataExporter? GetExporter(string exporterType) public IDataExporter? GetExporter(string exporterType)
{ {
return _exporters.FirstOrDefault(e => e.CanExport(exporterType)); foreach (var type in _exporterTypes)
{
try
{
var scope = _serviceProvider.CreateScope();
var instance = (IDataExporter)ActivatorUtilities.CreateInstance(scope.ServiceProvider, type);
if (instance.CanExport(exporterType))
{
return instance;
} }
public int GetPluginsCount() => _processorTypes.Count + _importerTypes.Count + _exporters.Count;
scope.Dispose();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create exporter instance of type {Type}", type.Name);
}
}
return null;
}
public int GetPluginsCount() => _processorTypes.Count + _importerTypes.Count + _exporterTypes.Count;
} }

View File

@@ -1,5 +1,6 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Google.Apis.Sheets.v4.Data; using Google.Apis.Sheets.v4.Data;
@@ -7,7 +8,7 @@ using Microsoft.Extensions.Configuration;
namespace DiunaBI.Plugins.Morska.Exporters; namespace DiunaBI.Plugins.Morska.Exporters;
public class GoogleSheetExport : MorskaBaseExporter public class GoogleSheetExport : BaseDataExporter
{ {
public override string ExporterType => "GoogleSheet"; public override string ExporterType => "GoogleSheet";
private readonly GoogleDriveHelper _googleDriveHelper; private readonly GoogleDriveHelper _googleDriveHelper;

View File

@@ -1,6 +1,7 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@@ -8,7 +9,7 @@ using Microsoft.EntityFrameworkCore;
namespace DiunaBI.Plugins.Morska.Importers; namespace DiunaBI.Plugins.Morska.Importers;
public class MorskaD1Importer : MorskaBaseImporter public class MorskaD1Importer : BaseDataImporter
{ {
public override string ImporterType => "Morska.Import.D1"; public override string ImporterType => "Morska.Import.D1";

View File

@@ -3,6 +3,7 @@ using System.Text;
using System.Text.Json; using System.Text.Json;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@@ -11,7 +12,7 @@ using Google.Apis.Sheets.v4.Data;
namespace DiunaBI.Plugins.Morska.Importers; namespace DiunaBI.Plugins.Morska.Importers;
public class MorskaD3Importer : MorskaBaseImporter public class MorskaD3Importer : BaseDataImporter
{ {
public override string ImporterType => "Morska.Import.D3"; public override string ImporterType => "Morska.Import.D3";

View File

@@ -1,13 +1,14 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace DiunaBI.Plugins.Morska.Importers; namespace DiunaBI.Plugins.Morska.Importers;
public class MorskaFk2Importer : MorskaBaseImporter public class MorskaFk2Importer : BaseDataImporter
{ {
public override string ImporterType => "Morska.Import.FK2"; public override string ImporterType => "Morska.Import.FK2";

View File

@@ -1,6 +1,7 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@@ -8,7 +9,7 @@ using Microsoft.EntityFrameworkCore;
namespace DiunaBI.Plugins.Morska.Importers; namespace DiunaBI.Plugins.Morska.Importers;
public class MorskaStandardImporter : MorskaBaseImporter public class MorskaStandardImporter : BaseDataImporter
{ {
public override string ImporterType => "Morska.Import.Standard"; public override string ImporterType => "Morska.Import.Standard";

View File

@@ -2,6 +2,7 @@
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services.Calculations; using DiunaBI.Infrastructure.Services.Calculations;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Google.Apis.Sheets.v4.Data; using Google.Apis.Sheets.v4.Data;
@@ -10,7 +11,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaD6Processor : MorskaBaseProcessor public class MorskaD6Processor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.D6"; public override string ProcessorType => "Morska.Process.D6";

View File

@@ -1,6 +1,7 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using DiunaBI.Infrastructure.Services.Calculations; using DiunaBI.Infrastructure.Services.Calculations;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
@@ -10,7 +11,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT1R1Processor : MorskaBaseProcessor public class MorskaT1R1Processor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T1.R1"; public override string ProcessorType => "Morska.Process.T1.R1";

View File

@@ -2,6 +2,7 @@
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Google.Apis.Sheets.v4.Data; using Google.Apis.Sheets.v4.Data;
@@ -10,7 +11,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT1R3Processor : MorskaBaseProcessor public class MorskaT1R3Processor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T1.R3"; public override string ProcessorType => "Morska.Process.T1.R3";

View File

@@ -1,12 +1,13 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3MultiSourceCopySelectedCodesProcessor : MorskaBaseProcessor public class MorskaT3MultiSourceCopySelectedCodesProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "T3.MultiSourceCopySelectedCodes"; public override string ProcessorType => "T3.MultiSourceCopySelectedCodes";

View File

@@ -1,12 +1,13 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3MultiSourceCopySelectedCodesYearSummaryProcessor : MorskaBaseProcessor public class MorskaT3MultiSourceCopySelectedCodesYearSummaryProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "T3.MultiSourceCopySelectedCodesYearSummary"; public override string ProcessorType => "T3.MultiSourceCopySelectedCodesYearSummary";

View File

@@ -1,5 +1,6 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using DiunaBI.Infrastructure.Services.Calculations; using DiunaBI.Infrastructure.Services.Calculations;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@@ -7,7 +8,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3MultiSourceSummaryProcessor : MorskaBaseProcessor public class MorskaT3MultiSourceSummaryProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T3.MultiSourceSummary"; public override string ProcessorType => "Morska.Process.T3.MultiSourceSummary";

View File

@@ -1,5 +1,6 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using DiunaBI.Infrastructure.Services.Calculations; using DiunaBI.Infrastructure.Services.Calculations;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@@ -7,7 +8,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3MultiSourceYearSummaryProcessor : MorskaBaseProcessor public class MorskaT3MultiSourceYearSummaryProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T3.MultiSourceYearSummary"; public override string ProcessorType => "Morska.Process.T3.MultiSourceYearSummary";

View File

@@ -1,5 +1,6 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@@ -7,7 +8,7 @@ using Google.Apis.Sheets.v4;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3SingleSourceProcessor : MorskaBaseProcessor public class MorskaT3SingleSourceProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T3.SingleSource"; public override string ProcessorType => "Morska.Process.T3.SingleSource";

View File

@@ -1,5 +1,6 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@@ -7,7 +8,7 @@ using Google.Apis.Sheets.v4;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT3SourceYearSummaryProcessor : MorskaBaseProcessor public class MorskaT3SourceYearSummaryProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T3.SourceYearSummary"; public override string ProcessorType => "Morska.Process.T3.SourceYearSummary";

View File

@@ -1,6 +1,7 @@
using System.Globalization; using System.Globalization;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Google.Apis.Sheets.v4.Data; using Google.Apis.Sheets.v4.Data;
@@ -9,7 +10,7 @@ using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT4R2Processor : MorskaBaseProcessor public class MorskaT4R2Processor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T4.R2"; public override string ProcessorType => "Morska.Process.T4.R2";

View File

@@ -1,12 +1,13 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT4SingleSourceProcessor : MorskaBaseProcessor public class MorskaT4SingleSourceProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T4.SingleSource"; public override string ProcessorType => "Morska.Process.T4.SingleSource";

View File

@@ -1,12 +1,13 @@
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using DiunaBI.Infrastructure.Services; using DiunaBI.Infrastructure.Services;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.Morska.Processors; namespace DiunaBI.Plugins.Morska.Processors;
public class MorskaT5LastValuesProcessor : MorskaBaseProcessor public class MorskaT5LastValuesProcessor : BaseDataProcessor
{ {
public override string ProcessorType => "Morska.Process.T5.LastValues"; public override string ProcessorType => "Morska.Process.T5.LastValues";

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging" Version="10.0.0" />
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DiunaBI.Domain\DiunaBI.Domain.csproj" />
<ProjectReference Include="..\DiunaBI.Infrastructure\DiunaBI.Infrastructure.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,382 @@
using System.Text;
using System.Text.Json;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Plugins;
using Microsoft.Extensions.Logging;
namespace DiunaBI.Plugins.PedrolloPL.Importers;
public class PedrolloPLImportP2 : BaseDataImporter
{
public override string ImporterType => "PedrolloPL.Import.P2";
private readonly AppDbContext _db;
private readonly ILogger<PedrolloPLImportP2> _logger;
// Configuration properties
private string? DataInboxName { get; set; }
private string? DataInboxSource { get; set; }
private string? StartDate { get; set; }
private string? EndDate { get; set; }
private string? ImportYear { get; set; }
private bool IsEnabled { get; set; }
// Cached deserialized data
private List<List<object>>? _cachedRawData;
private DataInbox? _cachedDataInbox;
private Dictionary<string, string>? _regionCodeMap;
public PedrolloPLImportP2(
AppDbContext db,
ILogger<PedrolloPLImportP2> logger)
{
_db = db;
_logger = logger;
}
public override void Import(Layer importWorker)
{
try
{
_logger.LogInformation("{ImporterType}: Starting import for {ImportWorkerName} ({ImportWorkerId})",
ImporterType, importWorker.Name, importWorker.Id);
// Clear cache at start
_cachedRawData = null;
_cachedDataInbox = null;
_regionCodeMap = null;
LoadConfiguration(importWorker);
ValidateConfiguration();
if (!IsEnabled)
{
_logger.LogInformation("{ImporterType}: Import disabled for {ImportWorkerName}",
ImporterType, importWorker.Name);
return;
}
// Find and deserialize DataInbox data
FindAndDeserializeDataInbox();
// Load region code mapping from dictionary layer
LoadRegionCodeMapping();
// Map data from DataInbox to Layer records
var mappedRecords = MapDataToRecords();
// Create new Import layer
var importLayer = CreateImportLayer(importWorker);
// Save records to database
SaveRecordsToLayer(importLayer, mappedRecords);
_logger.LogInformation("{ImporterType}: Successfully completed import for {ImportWorkerName} - Created {RecordCount} records",
ImporterType, importWorker.Name, mappedRecords.Count);
}
catch (Exception e)
{
_logger.LogError(e, "{ImporterType}: Failed to import {ImportWorkerName} ({ImportWorkerId})",
ImporterType, importWorker.Name, importWorker.Id);
throw;
}
finally
{
// Clear cache after import
_cachedRawData = null;
_cachedDataInbox = null;
_regionCodeMap = null;
}
}
private void LoadConfiguration(Layer importWorker)
{
if (importWorker.Records == null) return;
DataInboxName = GetRecordValue(importWorker.Records, "DataInboxName");
DataInboxSource = GetRecordValue(importWorker.Records, "DataInboxSource");
StartDate = GetRecordValue(importWorker.Records, "StartDate");
EndDate = GetRecordValue(importWorker.Records, "EndDate");
ImportYear = GetRecordValue(importWorker.Records, "ImportYear");
IsEnabled = GetRecordValue(importWorker.Records, "IsEnabled") == "True";
_logger.LogDebug(
"{ImporterType}: Configuration loaded - DataInboxName: {DataInboxName}, Source: {Source}, Year: {Year}, Period: {StartDate} to {EndDate}, Enabled: {IsEnabled}",
ImporterType, DataInboxName, DataInboxSource, ImportYear, StartDate, EndDate, IsEnabled);
}
private void ValidateConfiguration()
{
var errors = new List<string>();
if (string.IsNullOrEmpty(DataInboxName)) errors.Add("DataInboxName is required");
if (string.IsNullOrEmpty(DataInboxSource)) errors.Add("DataInboxSource is required");
if (string.IsNullOrEmpty(StartDate)) errors.Add("StartDate is required");
if (string.IsNullOrEmpty(EndDate)) errors.Add("EndDate is required");
if (errors.Any())
{
throw new InvalidOperationException($"Configuration validation failed: {string.Join(", ", errors)}");
}
_logger.LogDebug("{ImporterType}: Configuration validated successfully", ImporterType);
}
private void FindAndDeserializeDataInbox()
{
_logger.LogDebug("{ImporterType}: Searching for DataInbox with Name='{DataInboxName}' and Source='{DataInboxSource}'",
ImporterType, DataInboxName, DataInboxSource);
// Find DataInbox by Name and Source, order by CreatedAt descending to get the latest
var dataInbox = _db.DataInbox
.Where(x => x.Name == DataInboxName && x.Source == DataInboxSource)
.OrderByDescending(x => x.CreatedAt)
.FirstOrDefault();
if (dataInbox == null)
{
throw new InvalidOperationException(
$"DataInbox not found with Name='{DataInboxName}' and Source='{DataInboxSource}'");
}
_logger.LogInformation("{ImporterType}: Found DataInbox - Id: {DataInboxId}, Name: {Name}, Source: {Source}, CreatedAt: {CreatedAt}",
ImporterType, dataInbox.Id, dataInbox.Name, dataInbox.Source, dataInbox.CreatedAt);
// Deserialize the data
try
{
var data = Convert.FromBase64String(dataInbox.Data);
var jsonString = Encoding.UTF8.GetString(data);
_logger.LogDebug("{ImporterType}: Decoded {DataSize} bytes from base64",
ImporterType, data.Length);
// Deserialize as array of arrays: [["<nieznany>", 1183.15, ...], ["DOLNOŚLĄSKIE", ...]]
var rawData = JsonSerializer.Deserialize<List<List<object>>>(jsonString);
if (rawData == null || rawData.Count == 0)
{
throw new InvalidOperationException($"DataInbox.Data is empty for: {dataInbox.Name}");
}
_logger.LogInformation("{ImporterType}: Successfully deserialized {RowCount} rows from DataInbox",
ImporterType, rawData.Count);
// Log first few rows for debugging
if (rawData.Count > 0)
{
var sampleSize = Math.Min(3, rawData.Count);
_logger.LogDebug("{ImporterType}: Sample rows (first {SampleSize}):", ImporterType, sampleSize);
for (int i = 0; i < sampleSize; i++)
{
var row = rawData[i];
if (row.Count > 0)
{
var regionName = row[0]?.ToString() ?? "null";
var valueCount = row.Count - 1;
_logger.LogDebug(" [{Index}] Region: {Region}, Values: {ValueCount}",
i, regionName, valueCount);
}
}
}
// Cache the deserialized data
_cachedRawData = rawData;
_cachedDataInbox = dataInbox;
}
catch (FormatException e)
{
_logger.LogError(e, "{ImporterType}: Invalid base64 data in DataInbox {DataInboxId}",
ImporterType, dataInbox.Id);
throw new InvalidOperationException($"Invalid base64 data in DataInbox: {dataInbox.Name}", e);
}
catch (JsonException e)
{
_logger.LogError(e, "{ImporterType}: Invalid JSON data in DataInbox {DataInboxId}",
ImporterType, dataInbox.Id);
throw new InvalidOperationException($"Invalid JSON data in DataInbox: {dataInbox.Name}", e);
}
}
private void LoadRegionCodeMapping()
{
const string dictionaryLayerName = "L1-D-P2-CODES";
_logger.LogDebug("{ImporterType}: Loading region code mapping from dictionary layer '{DictionaryLayerName}'",
ImporterType, dictionaryLayerName);
var dictionaryLayer = _db.Layers
.Where(x => x.Name == dictionaryLayerName && x.Type == LayerType.Dictionary)
.FirstOrDefault();
if (dictionaryLayer == null)
{
throw new InvalidOperationException($"Dictionary layer '{dictionaryLayerName}' not found");
}
// Load records for the dictionary layer
var records = _db.Records
.Where(x => x.LayerId == dictionaryLayer.Id)
.ToList();
// Build mapping: Desc1 (region name) -> Code
_regionCodeMap = records.ToDictionary(
r => r.Desc1 ?? string.Empty,
r => r.Code ?? string.Empty,
StringComparer.OrdinalIgnoreCase);
_logger.LogInformation("{ImporterType}: Loaded {MappingCount} region code mappings",
ImporterType, _regionCodeMap.Count);
}
private List<Record> MapDataToRecords()
{
if (_cachedRawData == null)
{
throw new InvalidOperationException("Raw data not loaded. Call FindAndDeserializeDataInbox first.");
}
if (_regionCodeMap == null)
{
throw new InvalidOperationException("Region code mapping not loaded. Call LoadRegionCodeMapping first.");
}
var records = new List<Record>();
var now = DateTime.UtcNow;
_logger.LogDebug("{ImporterType}: Starting data mapping for {RowCount} rows",
ImporterType, _cachedRawData.Count);
foreach (var row in _cachedRawData)
{
if (row.Count < 13)
{
_logger.LogWarning("{ImporterType}: Skipping row with insufficient data - expected 13 elements, got {Count}",
ImporterType, row.Count);
continue;
}
// First element is region name
var regionName = row[0]?.ToString();
if (string.IsNullOrEmpty(regionName))
{
_logger.LogWarning("{ImporterType}: Skipping row with empty region name", ImporterType);
continue;
}
// Find region code from dictionary
if (!_regionCodeMap.TryGetValue(regionName, out var regionCode))
{
_logger.LogWarning("{ImporterType}: Region code not found for '{RegionName}' - skipping",
ImporterType, regionName);
continue;
}
// Create 12 records (one per month)
for (int month = 1; month <= 12; month++)
{
var valueIndex = month; // row[1] = January, row[2] = February, etc.
var valueObj = row[valueIndex];
// Convert value to double
double? value = null;
if (valueObj != null)
{
// Handle JsonElement if deserialized from JSON
if (valueObj is JsonElement jsonElement)
{
if (jsonElement.ValueKind == JsonValueKind.Number)
{
value = jsonElement.GetDouble();
}
}
else if (valueObj is double d)
{
value = d;
}
else if (double.TryParse(valueObj.ToString(), out var parsed))
{
value = parsed;
}
}
// Create code: {regionCode}{month:00}
var code = $"{regionCode}{month:00}";
var record = new Record
{
Id = Guid.NewGuid(),
Code = code,
Value1 = value,
CreatedAt = now,
ModifiedAt = now
};
records.Add(record);
}
_logger.LogDebug("{ImporterType}: Mapped region '{RegionName}' (code: {RegionCode}) to 12 records",
ImporterType, regionName, regionCode);
}
_logger.LogInformation("{ImporterType}: Successfully mapped {RecordCount} records from {RowCount} rows",
ImporterType, records.Count, _cachedRawData.Count);
return records;
}
private Layer CreateImportLayer(Layer importWorker)
{
var now = DateTime.UtcNow;
var importLayer = new Layer
{
Id = Guid.NewGuid(),
Number = _db.Layers.Count() + 1,
ParentId = importWorker.Id,
Type = LayerType.Import,
IsCancelled = false,
CreatedAt = now,
ModifiedAt = now,
CreatedById = importWorker.CreatedById,
ModifiedById = importWorker.ModifiedById
};
// Format: L{Number}-I-P2-{Year}-{Timestamp}
importLayer.Name = $"L{importLayer.Number}-I-P2-{ImportYear}-{now:yyyyMMddHHmm}";
_logger.LogDebug("{ImporterType}: Creating import layer '{LayerName}' (Number: {Number})",
ImporterType, importLayer.Name, importLayer.Number);
_db.Layers.Add(importLayer);
_db.SaveChanges();
_logger.LogInformation("{ImporterType}: Created import layer '{LayerName}' with Id: {LayerId}",
ImporterType, importLayer.Name, importLayer.Id);
return importLayer;
}
private void SaveRecordsToLayer(Layer importLayer, List<Record> records)
{
_logger.LogDebug("{ImporterType}: Saving {RecordCount} records to layer {LayerId}",
ImporterType, records.Count, importLayer.Id);
// Set LayerId for all records
foreach (var record in records)
{
record.LayerId = importLayer.Id;
}
_db.Records.AddRange(records);
_db.SaveChanges();
_logger.LogInformation("{ImporterType}: Successfully saved {RecordCount} records to layer '{LayerName}'",
ImporterType, records.Count, importLayer.Name);
}
private string? GetRecordValue(ICollection<Record> records, string code)
{
return records.FirstOrDefault(x => x.Code == code)?.Desc1;
}
}

View File

@@ -0,0 +1,141 @@
@using MudBlazor.Internal
@using DiunaBI.Domain.Entities
<MudExpansionPanels Class="mb-4">
<MudExpansionPanel Icon="@Icons.Material.Filled.FilterList"
Text="Filters"
Expanded="true">
<MudGrid AlignItems="Center">
<MudItem xs="12" sm="6" md="3">
<MudSelect T="JobStatus?"
@bind-Value="selectedStatus"
Label="Status"
Placeholder="All statuses"
Clearable="true"
OnClearButtonClick="OnStatusClear">
@foreach (JobStatus status in Enum.GetValues(typeof(JobStatus)))
{
<MudSelectItem T="JobStatus?" Value="@status">@status.ToString()</MudSelectItem>
}
</MudSelect>
</MudItem>
<MudItem xs="12" sm="6" md="3">
<MudSelect T="JobType?"
@bind-Value="selectedJobType"
Label="Job Type"
Placeholder="All types"
Clearable="true"
OnClearButtonClick="OnJobTypeClear">
@foreach (JobType type in Enum.GetValues(typeof(JobType)))
{
<MudSelectItem T="JobType?" Value="@type">@type.ToString()</MudSelectItem>
}
</MudSelect>
</MudItem>
<MudItem xs="12" sm="12" md="6" Class="d-flex justify-end align-center">
<MudIconButton Icon="@Icons.Material.Filled.Refresh"
OnClick="LoadJobs"
Color="Color.Primary"
Size="Size.Medium"
Title="Refresh"/>
<MudIconButton Icon="@Icons.Material.Filled.Clear"
OnClick="ClearFilters"
Color="Color.Default"
Size="Size.Medium"
Title="Clear filters"/>
</MudItem>
</MudGrid>
</MudExpansionPanel>
</MudExpansionPanels>
<MudDivider Class="my-4"></MudDivider>
<MudTable Items="jobs.Items"
Dense="true"
Hover="true"
Loading="isLoading"
LoadingProgressColor="Color.Primary"
OnRowClick="@((TableRowClickEventArgs<QueueJob> args) => OnRowClick(args.Item))"
T="QueueJob"
Style="cursor: pointer;">
<HeaderContent>
<MudTh>Layer Name</MudTh>
<MudTh>Plugin</MudTh>
<MudTh>Type</MudTh>
<MudTh>Status</MudTh>
<MudTh>Priority</MudTh>
<MudTh>Retry</MudTh>
<MudTh>Created</MudTh>
<MudTh>Last Attempt</MudTh>
</HeaderContent>
<RowTemplate Context="row">
<MudTd DataLabel="Layer Name">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.LayerName
</div>
</MudTd>
<MudTd DataLabel="Plugin">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.PluginName
</div>
</MudTd>
<MudTd DataLabel="Type">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
<MudChip T="string" Size="Size.Small" Color="@GetJobTypeColor(row.JobType)">@row.JobType</MudChip>
</div>
</MudTd>
<MudTd DataLabel="Status">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
<MudChip T="string" Size="Size.Small" Color="@GetStatusColor(row.Status)">@row.Status</MudChip>
</div>
</MudTd>
<MudTd DataLabel="Priority">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.Priority
</div>
</MudTd>
<MudTd DataLabel="Retry">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.RetryCount / @row.MaxRetries
</div>
</MudTd>
<MudTd DataLabel="Created">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@row.CreatedAt.ToString("yyyy-MM-dd HH:mm")
</div>
</MudTd>
<MudTd DataLabel="Last Attempt">
<div @oncontextmenu="@(async (e) => await OnRowRightClick(e, row))" @oncontextmenu:preventDefault="true">
@(row.LastAttemptAt?.ToString("yyyy-MM-dd HH:mm") ?? "-")
</div>
</MudTd>
</RowTemplate>
<NoRecordsContent>
<MudText>No jobs to display</MudText>
</NoRecordsContent>
<LoadingContent>
Loading...
</LoadingContent>
</MudTable>
@if (jobs.TotalCount > 0)
{
<MudGrid Class="mt-4" AlignItems="Center.Center">
<MudItem xs="12" sm="6">
<MudText Typo="Typo.body2">
Results @((jobs.Page - 1) * jobs.PageSize + 1) - @Math.Min(jobs.Page * jobs.PageSize, jobs.TotalCount)
of @jobs.TotalCount
</MudText>
</MudItem>
<MudItem xs="12" sm="6" Class="d-flex justify-end">
<MudPagination Count="jobs.TotalPages"
Selected="jobs.Page"
SelectedChanged="OnPageChanged"
ShowFirstButton="true"
ShowLastButton="true"
Variant="Variant.Outlined"
/>
</MudItem>
</MudGrid>
}

View File

@@ -0,0 +1,110 @@
using DiunaBI.UI.Shared.Services;
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.Components.Web;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using MudBlazor;
using Microsoft.JSInterop;
namespace DiunaBI.UI.Shared.Components;
public partial class JobListComponent : ComponentBase
{
[Inject] private JobService JobService { get; set; } = default!;
[Inject] private ISnackbar Snackbar { get; set; } = default!;
[Inject] private NavigationManager NavigationManager { get; set; } = default!;
[Inject] private IJSRuntime JSRuntime { get; set; } = default!;
private PagedResult<QueueJob> jobs = new();
private bool isLoading = false;
private int currentPage = 1;
private int pageSize = 50;
private JobStatus? selectedStatus = null;
private JobType? selectedJobType = null;
protected override async Task OnInitializedAsync()
{
await LoadJobs();
}
private async Task LoadJobs()
{
isLoading = true;
try
{
jobs = await JobService.GetJobsAsync(currentPage, pageSize, selectedStatus, selectedJobType);
}
catch (Exception ex)
{
Console.WriteLine($"Loading jobs failed: {ex.Message}");
Snackbar.Add("Failed to load jobs", Severity.Error);
}
finally
{
isLoading = false;
}
}
private async Task OnPageChanged(int page)
{
currentPage = page;
await LoadJobs();
}
private async Task ClearFilters()
{
selectedStatus = null;
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private async Task OnStatusClear()
{
selectedStatus = null;
currentPage = 1;
await LoadJobs();
}
private async Task OnJobTypeClear()
{
selectedJobType = null;
currentPage = 1;
await LoadJobs();
}
private void OnRowClick(QueueJob job)
{
NavigationManager.NavigateTo($"/jobs/{job.Id}");
}
private async Task OnRowRightClick(MouseEventArgs e, QueueJob job)
{
var url = NavigationManager.ToAbsoluteUri($"/jobs/{job.Id}").ToString();
await JSRuntime.InvokeVoidAsync("open", url, "_blank");
}
private Color GetStatusColor(JobStatus status)
{
return status switch
{
JobStatus.Pending => Color.Default,
JobStatus.Running => Color.Info,
JobStatus.Completed => Color.Success,
JobStatus.Failed => Color.Error,
JobStatus.Retrying => Color.Warning,
_ => Color.Default
};
}
private Color GetJobTypeColor(JobType jobType)
{
return jobType switch
{
JobType.Import => Color.Primary,
JobType.Process => Color.Secondary,
_ => Color.Default
};
}
}

View File

@@ -38,6 +38,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<AuthService>(); services.AddScoped<AuthService>();
services.AddScoped<LayerService>(); services.AddScoped<LayerService>();
services.AddScoped<DataInboxService>(); services.AddScoped<DataInboxService>();
services.AddScoped<JobService>();
// Filter state services (scoped to maintain state during user session) // Filter state services (scoped to maintain state during user session)
services.AddScoped<LayerFilterStateService>(); services.AddScoped<LayerFilterStateService>();

View File

@@ -37,6 +37,7 @@
<MudNavLink Href="/dashboard" Icon="@Icons.Material.Filled.Dashboard">Dashboard</MudNavLink> <MudNavLink Href="/dashboard" Icon="@Icons.Material.Filled.Dashboard">Dashboard</MudNavLink>
<MudNavLink Href="/layers" Icon="@Icons.Material.Filled.Inventory">Layers</MudNavLink> <MudNavLink Href="/layers" Icon="@Icons.Material.Filled.Inventory">Layers</MudNavLink>
<MudNavLink Href="/datainbox" Icon="@Icons.Material.Filled.Inbox">Data Inbox</MudNavLink> <MudNavLink Href="/datainbox" Icon="@Icons.Material.Filled.Inbox">Data Inbox</MudNavLink>
<MudNavLink Href="/jobs" Icon="@Icons.Material.Filled.WorkHistory">Jobs</MudNavLink>
</MudNavMenu> </MudNavMenu>
</MudDrawer> </MudDrawer>

View File

@@ -0,0 +1,245 @@
@page "/jobs/{id:guid}"
@using DiunaBI.UI.Shared.Services
@using DiunaBI.Domain.Entities
@using MudBlazor
@inject JobService JobService
@inject NavigationManager NavigationManager
@inject ISnackbar Snackbar
<MudCard>
<MudCardHeader>
<CardHeaderContent>
<MudText Typo="Typo.h5">Job Details</MudText>
</CardHeaderContent>
<CardHeaderActions>
@if (job != null && job.Status == JobStatus.Failed)
{
<MudButton Variant="Variant.Filled"
Color="Color.Warning"
OnClick="RetryJob"
StartIcon="@Icons.Material.Filled.Refresh">
Retry
</MudButton>
}
@if (job != null && (job.Status == JobStatus.Pending || job.Status == JobStatus.Retrying))
{
<MudButton Variant="Variant.Filled"
Color="Color.Error"
OnClick="CancelJob"
StartIcon="@Icons.Material.Filled.Cancel">
Cancel
</MudButton>
}
<MudButton Variant="Variant.Text"
OnClick="GoBack"
StartIcon="@Icons.Material.Filled.ArrowBack">
Back to List
</MudButton>
</CardHeaderActions>
</MudCardHeader>
<MudCardContent>
@if (isLoading)
{
<MudProgressLinear Color="Color.Primary" Indeterminate="true" />
}
else if (job == null)
{
<MudAlert Severity="Severity.Error">Job not found</MudAlert>
}
else
{
<MudGrid>
<MudItem xs="12" md="6">
<MudTextField Value="@job.LayerName"
Label="Layer Name"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@job.PluginName"
Label="Plugin Name"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="4">
<MudTextField Value="@job.JobType.ToString()"
Label="Job Type"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="4">
<MudTextField Value="@job.Status.ToString()"
Label="Status"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"
Adornment="Adornment.Start"
AdornmentIcon="@GetStatusIcon(job.Status)"
AdornmentColor="@GetStatusColor(job.Status)"/>
</MudItem>
<MudItem xs="12" md="4">
<MudTextField Value="@job.Priority.ToString()"
Label="Priority"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@job.CreatedAt.ToString("yyyy-MM-dd HH:mm:ss")"
Label="Created At"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@(job.LastAttemptAt?.ToString("yyyy-MM-dd HH:mm:ss") ?? "-")"
Label="Last Attempt At"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@(job.CompletedAt?.ToString("yyyy-MM-dd HH:mm:ss") ?? "-")"
Label="Completed At"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
<MudItem xs="12" md="6">
<MudTextField Value="@($"{job.RetryCount} / {job.MaxRetries}")"
Label="Retry Count / Max Retries"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"/>
</MudItem>
@if (!string.IsNullOrEmpty(job.LastError))
{
<MudItem xs="12">
<MudTextField Value="@job.LastError"
Label="Last Error"
Variant="Variant.Outlined"
ReadOnly="true"
FullWidth="true"
Lines="5"
AdornmentIcon="@Icons.Material.Filled.Error"
AdornmentColor="Color.Error"/>
</MudItem>
}
<MudItem xs="12">
<MudDivider Class="my-4"/>
</MudItem>
<MudItem xs="12">
<MudButton Variant="Variant.Outlined"
Color="Color.Primary"
OnClick="@(() => NavigationManager.NavigateTo($"/layers/{job.LayerId}"))"
StartIcon="@Icons.Material.Filled.Layers">
View Layer Details
</MudButton>
</MudItem>
</MudGrid>
}
</MudCardContent>
</MudCard>
@code {
[Parameter]
public Guid Id { get; set; }
private QueueJob? job;
private bool isLoading = true;
protected override async Task OnInitializedAsync()
{
await LoadJob();
}
private async Task LoadJob()
{
isLoading = true;
try
{
job = await JobService.GetJobByIdAsync(Id);
}
catch (Exception ex)
{
Console.WriteLine($"Loading job failed: {ex.Message}");
Snackbar.Add("Failed to load job", Severity.Error);
}
finally
{
isLoading = false;
}
}
private async Task RetryJob()
{
if (job == null) return;
var success = await JobService.RetryJobAsync(job.Id);
if (success)
{
Snackbar.Add("Job reset to Pending status", Severity.Success);
await LoadJob();
}
else
{
Snackbar.Add("Failed to retry job", Severity.Error);
}
}
private async Task CancelJob()
{
if (job == null) return;
var success = await JobService.CancelJobAsync(job.Id);
if (success)
{
Snackbar.Add("Job cancelled", Severity.Success);
await LoadJob();
}
else
{
Snackbar.Add("Failed to cancel job", Severity.Error);
}
}
private void GoBack()
{
NavigationManager.NavigateTo("/jobs");
}
private Color GetStatusColor(JobStatus status)
{
return status switch
{
JobStatus.Pending => Color.Default,
JobStatus.Running => Color.Info,
JobStatus.Completed => Color.Success,
JobStatus.Failed => Color.Error,
JobStatus.Retrying => Color.Warning,
_ => Color.Default
};
}
private string GetStatusIcon(JobStatus status)
{
return status switch
{
JobStatus.Pending => Icons.Material.Filled.HourglassEmpty,
JobStatus.Running => Icons.Material.Filled.PlayArrow,
JobStatus.Completed => Icons.Material.Filled.CheckCircle,
JobStatus.Failed => Icons.Material.Filled.Error,
JobStatus.Retrying => Icons.Material.Filled.Refresh,
_ => Icons.Material.Filled.Help
};
}
}

View File

@@ -0,0 +1,8 @@
@page "/jobs"
@using DiunaBI.UI.Shared.Components
<PageTitle>Jobs</PageTitle>
<MudContainer MaxWidth="MaxWidth.ExtraExtraLarge">
<JobListComponent />
</MudContainer>

View File

@@ -3,7 +3,9 @@
@using DiunaBI.Application.DTOModels @using DiunaBI.Application.DTOModels
@using MudBlazor @using MudBlazor
@inject LayerService LayerService @inject LayerService LayerService
@inject JobService JobService
@inject NavigationManager NavigationManager @inject NavigationManager NavigationManager
@inject ISnackbar Snackbar
<MudCard> <MudCard>
<MudCardHeader> <MudCardHeader>
@@ -11,18 +13,24 @@
<MudText Typo="Typo.h5">Layer Details</MudText> <MudText Typo="Typo.h5">Layer Details</MudText>
</CardHeaderContent> </CardHeaderContent>
<CardHeaderActions> <CardHeaderActions>
<!-- @if (layer != null && layer.Type == LayerType.Administration && IsWorkerLayer())
<MudButton Variant="Variant.Text" OnClick="Export">Export</MudButton>
@if (layer != null && layer.Type == LayerType.Administration)
{ {
<MudButton Variant="Variant.Text" Href="@($"/layers/edit/{layer.Id}/duplicate")">Duplicate</MudButton> <MudButton Variant="Variant.Filled"
<MudButton Variant="Variant.Text" Href="@($"/layers/edit/{layer.Id}")">Edit</MudButton> Color="Color.Primary"
} OnClick="RunNow"
@if (layer != null && layer.Type == LayerType.Processed) Disabled="isRunningJob"
StartIcon="@Icons.Material.Filled.PlayArrow">
@if (isRunningJob)
{ {
<MudButton Variant="Variant.Text" OnClick="ProcessLayer">Process Layer</MudButton> <MudProgressCircular Size="Size.Small" Indeterminate="true"/>
<span style="margin-left: 8px;">Creating Job...</span>
}
else
{
<span>Run Now</span>
}
</MudButton>
} }
-->
<MudButton Variant="Variant.Text" OnClick="GoBack" StartIcon="@Icons.Material.Filled.ArrowBack">Back to List</MudButton> <MudButton Variant="Variant.Text" OnClick="GoBack" StartIcon="@Icons.Material.Filled.ArrowBack">Back to List</MudButton>
</CardHeaderActions> </CardHeaderActions>
</MudCardHeader> </MudCardHeader>

View File

@@ -10,9 +10,6 @@ public partial class LayerDetailPage : ComponentBase
[Parameter] [Parameter]
public Guid Id { get; set; } public Guid Id { get; set; }
[Inject]
private ISnackbar Snackbar { get; set; } = null!;
[Inject] [Inject]
private IDialogService DialogService { get; set; } = null!; private IDialogService DialogService { get; set; } = null!;
@@ -413,4 +410,54 @@ public partial class LayerDetailPage : ComponentBase
{ {
return userCache.TryGetValue(userId, out var username) ? username : string.Empty; return userCache.TryGetValue(userId, out var username) ? username : string.Empty;
} }
// Run Now button methods
private bool isRunningJob = false;
private bool IsWorkerLayer()
{
if (layer?.Records == null) return false;
var typeRecord = layer.Records.FirstOrDefault(x => x.Code == "Type");
return typeRecord?.Desc1 == "ImportWorker" || typeRecord?.Desc1 == "ProcessWorker";
}
private async Task RunNow()
{
if (layer == null) return;
isRunningJob = true;
try
{
var result = await JobService.CreateJobForLayerAsync(layer.Id);
if (result != null && result.Success)
{
if (result.Existing)
{
Snackbar.Add($"Job already exists: {result.Message}", Severity.Info);
}
else
{
Snackbar.Add("Job created successfully!", Severity.Success);
}
// Navigate to job detail page
NavigationManager.NavigateTo($"/jobs/{result.JobId}");
}
else
{
Snackbar.Add("Failed to create job", Severity.Error);
}
}
catch (Exception ex)
{
Console.WriteLine($"Error creating job: {ex.Message}");
Snackbar.Add($"Error creating job: {ex.Message}", Severity.Error);
}
finally
{
isRunningJob = false;
}
}
} }

View File

@@ -0,0 +1,104 @@
using System.Net.Http.Json;
using System.Text.Json;
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
namespace DiunaBI.UI.Shared.Services;
public class JobService
{
private readonly HttpClient _httpClient;
public JobService(HttpClient httpClient)
{
_httpClient = httpClient;
}
private readonly JsonSerializerOptions _jsonOptions = new()
{
PropertyNameCaseInsensitive = true
};
public async Task<PagedResult<QueueJob>> GetJobsAsync(int page = 1, int pageSize = 50, JobStatus? status = null, JobType? jobType = null, Guid? layerId = null)
{
var start = (page - 1) * pageSize;
var query = $"Jobs?start={start}&limit={pageSize}";
if (status.HasValue)
query += $"&status={(int)status.Value}";
if (jobType.HasValue)
query += $"&jobType={(int)jobType.Value}";
if (layerId.HasValue)
query += $"&layerId={layerId.Value}";
var response = await _httpClient.GetAsync(query);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync();
var result = JsonSerializer.Deserialize<PagedResult<QueueJob>>(json, _jsonOptions);
return result ?? new PagedResult<QueueJob>();
}
public async Task<QueueJob?> GetJobByIdAsync(Guid id)
{
var response = await _httpClient.GetAsync($"Jobs/{id}");
if (!response.IsSuccessStatusCode)
return null;
return await response.Content.ReadFromJsonAsync<QueueJob>();
}
public async Task<bool> RetryJobAsync(Guid id)
{
var response = await _httpClient.PostAsync($"Jobs/{id}/retry", null);
return response.IsSuccessStatusCode;
}
public async Task<bool> CancelJobAsync(Guid id)
{
var response = await _httpClient.DeleteAsync($"Jobs/{id}");
return response.IsSuccessStatusCode;
}
public async Task<JobStats?> GetStatsAsync()
{
var response = await _httpClient.GetAsync("Jobs/stats");
if (!response.IsSuccessStatusCode)
return null;
return await response.Content.ReadFromJsonAsync<JobStats>();
}
public async Task<CreateJobResult?> CreateJobForLayerAsync(Guid layerId)
{
var response = await _httpClient.PostAsync($"Jobs/create-for-layer/{layerId}", null);
if (!response.IsSuccessStatusCode)
return null;
return await response.Content.ReadFromJsonAsync<CreateJobResult>();
}
}
public class JobStats
{
public int Pending { get; set; }
public int Running { get; set; }
public int Completed { get; set; }
public int Failed { get; set; }
public int Retrying { get; set; }
public int Total { get; set; }
}
public class CreateJobResult
{
public bool Success { get; set; }
public Guid JobId { get; set; }
public string? Message { get; set; }
public bool Existing { get; set; }
}