Compare commits

...

23 Commits

Author SHA1 Message Date
0e3b3933f0 WIP: p2 plugin
Some checks failed
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m14s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m10s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Failing after 1m12s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Failing after 1m7s
2025-12-03 13:33:38 +01:00
445c07a8d8 Morska plugins refactor 2025-12-02 21:24:37 +01:00
3f8e62fbb8 WIP: queue engine 2025-12-02 15:35:04 +01:00
248106a239 Plugins little refactor 2025-12-02 15:21:27 +01:00
587d4d66f8 Pedrollo plugins 2025-12-02 14:31:21 +01:00
f68e57ce3b Small UI fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m42s
2025-12-02 13:43:01 +01:00
e70a8dda6e Remember list filters 2025-12-02 13:23:03 +01:00
89859cd4a3 Record histori is working 2025-12-02 13:14:09 +01:00
0c6848556b WIP: Record history 2025-12-01 18:37:09 +01:00
c8ded1f0a4 Edit Records 2025-12-01 17:56:17 +01:00
7ea5ed506e Filter Layers by Type
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-12-01 13:21:45 +01:00
4d7df85df1 DataInbox Detail 2025-12-01 13:00:01 +01:00
3d654d972e DataInbox list 2025-12-01 12:55:47 +01:00
a71b6feefc Pagination style fix 2025-12-01 12:35:22 +01:00
cb0d050ad4 Imports for 2025.12 2025-11-30 16:09:32 +01:00
24387bf96c debug
All checks were successful
Build Docker Images / test (push) Successful in 1m47s
Build Docker Images / build-and-push (push) Successful in 1m55s
2025-11-28 16:15:39 +01:00
87d19dcadf App logo
All checks were successful
Build Docker Images / test (push) Successful in 1m43s
Build Docker Images / build-and-push (push) Successful in 1m47s
2025-11-28 12:13:19 +01:00
a289690b6b Add custom app name per instance 2025-11-28 11:44:19 +01:00
57f1359c96 Bu9ild path fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m31s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-28 11:29:38 +01:00
b0e77ec835 Enable Main build
Some checks failed
Build Docker Images / test (push) Failing after 26s
Build Docker Images / build-and-push (push) Failing after 11s
2025-11-28 11:26:58 +01:00
b3053b859a Last refactor steps (I hope) 2025-11-28 11:26:17 +01:00
07423023a0 after refactor cleanup 2025-11-28 11:21:22 +01:00
5db6de1503 Merge pull request 'ddd-refactor' (#2) from ddd-refactor into main
Some checks failed
BuildApp / build-frontend (push) Successful in 1m54s
BuildApp / build-backend (push) Failing after 26s
Reviewed-on: #2
2025-11-28 11:14:42 +01:00
340 changed files with 4987 additions and 13905 deletions

View File

@@ -1,119 +1,148 @@
name: BuildApp name: Build Docker Images
on: on:
workflow_dispatch: {}
push: push:
branches: branches:
- main - main
workflow_dispatch: {}
concurrency: concurrency:
group: build-${{ github.ref }} group: build-${{ github.ref }}
cancel-in-progress: false cancel-in-progress: false
jobs: jobs:
build-frontend: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
customer:
- name: Morska
plugin_project: DiunaBI.Plugins.Morska
- name: PedrolloPL
plugin_project: DiunaBI.Plugins.PedrolloPL
steps: steps:
- name: Checkout - name: Checkout
uses: https://github.com/actions/checkout@v4 uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20 - name: Setup .NET 10
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install Angular CLI
run: npm install -g @angular/cli
- name: Install PNPM
run: npm install -g pnpm
- name: Install dependencies
working-directory: src/Frontend
run: pnpm install
- name: Build Angular
working-directory: src/Frontend
run: ng build --configuration=production
- name: Upload artifact (frontend)
uses: https://github.com/actions/upload-artifact@v3
with:
name: frontend
path: src/Frontend/dist
if-no-files-found: error
retention-days: 7
build-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 8
uses: https://github.com/actions/setup-dotnet@v4 uses: https://github.com/actions/setup-dotnet@v4
with: with:
dotnet-version: 8.0.x dotnet-version: 10.0.x
- name: Restore dependencies - name: Restore dependencies
working-directory: src/Backend working-directory: .
run: dotnet restore DiunaBI.sln run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins - name: Build solution and prepare plugins
working-directory: src/Backend working-directory: .
run: | run: |
set -e set -e
dotnet build DiunaBI.sln --configuration Release # Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net8.0/Plugins dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/ dotnet build ${{ matrix.customer.plugin_project }}/${{ matrix.customer.plugin_project }}.csproj --configuration Release
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Core.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/
ls -la DiunaBI.Tests/bin/Release/net8.0/Plugins/ mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp ${{ matrix.customer.plugin_project }}/bin/Release/net10.0/${{ matrix.customer.plugin_project }}.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests - name: Run Tests
working-directory: src/Backend working-directory: .
run: | run: |
dotnet add DiunaBI.Tests/DiunaBI.Tests.csproj package coverlet.collector
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \ dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \ --configuration Release \
--no-restore \ --no-restore \
--logger "trx;LogFileName=test-results.trx" \ --logger "trx;LogFileName=test-results-${{ matrix.customer.name }}.trx" \
--collect:"XPlat Code Coverage" \ --collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" --filter "Category!=LocalOnly" || true
- name: Publish Test Results - name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/upload-artifact@v3
if: success() || failure() if: success() || failure()
with: with:
name: test-results name: test-results-${{ matrix.customer.name }}
path: | path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7 retention-days: 7
- name: Publish WebAPI build-and-push:
if: success() runs-on: ubuntu-latest
working-directory: src/Backend needs: test
run: | if: success() || failure()
dotnet publish DiunaBI.WebAPI/DiunaBI.WebAPI.csproj \ strategy:
--configuration Release \ matrix:
--framework net8.0 \ customer:
--self-contained false \ - name: Morska
--output ../../build/webapi plugin_project: DiunaBI.Plugins.Morska
mkdir -p ../../build/webapi/Plugins image_suffix: morska
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll ../../build/webapi/Plugins/ - name: PedrolloPL
ls -la ../../build/webapi/Plugins/ plugin_project: DiunaBI.Plugins.PedrolloPL
image_suffix: pedrollopl
- name: Clean up sensitive files steps:
working-directory: build/webapi - name: Debug secrets
run: | run: |
rm -f appsettings.Development.json || true echo "User length: ${#REGISTRY_USER}"
rm -f client_secrets.Development.json || true echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Upload artifact (webapi) - name: Checkout code
uses: https://github.com/actions/upload-artifact@v3 uses: https://github.com/actions/checkout@v4
with:
name: webapi - name: Set up Docker Buildx
path: build/webapi uses: https://github.com/docker/setup-buildx-action@v3
if-no-files-found: error
retention-days: 7 - name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
--build-arg PLUGIN_PROJECT=${{ matrix.customer.plugin_project }} \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest \
-t code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built - ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "**Customer:** ${{ matrix.customer.name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui-${{ matrix.customer.image_suffix }}:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,130 +0,0 @@
name: Build Docker Images
on:
push:
branches:
- ddd-refactor
workflow_dispatch: {}
concurrency:
group: build-${{ github.ref }}
cancel-in-progress: false
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 10
uses: https://github.com/actions/setup-dotnet@v4
with:
dotnet-version: 10.0.x
- name: Restore dependencies
working-directory: src/Backend
run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins
working-directory: src/Backend
run: |
set -e
# Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests
working-directory: src/Backend
run: |
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \
--no-restore \
--logger "trx;LogFileName=test-results.trx" \
--collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" || true
- name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3
if: success() || failure()
with:
name: test-results
path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7
build-and-push:
runs-on: ubuntu-latest
needs: test
if: success() || failure()
steps:
- name: Debug secrets
run: |
echo "User length: ${#REGISTRY_USER}"
echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Checkout code
uses: https://github.com/actions/checkout@v4
- name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3
- name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api:latest \
-t code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui:latest \
-t code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,122 +0,0 @@
name: ReleaseApp (JS finder + download)
on:
workflow_dispatch: {}
jobs:
release:
runs-on: ubuntu-latest
env:
GITEA_BASE_URL: https://code.bim-it.pl
OWNER: mz
REPO: DiunaBI
REQUIRED_ARTIFACTS: frontend,webapi
SCAN_LIMIT: "100"
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install unzip (for extraction)
run: |
sudo apt-get update
sudo apt-get install -y unzip
- name: Resolve latest run that exposes required artifacts
id: resolve
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
run: |
node .gitea/scripts/getLatestRunWithArtifacts.js
echo "Resolved run_id: $(cat .gitea/.cache/run_id)"
echo "run_id=$(cat .gitea/.cache/run_id)" >> "$GITHUB_OUTPUT"
- name: Download frontend artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: frontend
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/frontend
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Download webapi artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: webapi
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/webapi
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Show artifact structure
run: |
echo "::group::frontend"
ls -laR artifacts/frontend || true
echo "::endgroup::"
echo "::group::webapi"
ls -laR artifacts/webapi || true
echo "::endgroup::"
- name: Tokenize (replace #{...}# from secrets/vars)
env:
SECRETS: ${{ toJson(secrets) }}
VARIABLES: ${{ toJson(vars) }}
RUN_ID: ${{ steps.resolve.outputs.run_id }}
run: |
set -euo pipefail
node .gitea/scripts/replaceTokens.js
- name: Package artifacts as ZIPs
run: |
mkdir -p build
(cd artifacts/frontend && zip -rq ../../build/DiunaBI-Morska-Frontend.zip .)
(cd artifacts/webapi && zip -rq ../../build/DiunaBI-Morska-WebApi.zip .)
ls -la build
- name: Upload artifacts to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
REMOTE_DIR: deployment
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "mkdir -p ~/$REMOTE_DIR"
scp -i private_key build/DiunaBI-Morska-Frontend.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
scp -i private_key build/DiunaBI-Morska-WebApi.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
shred -u private_key
- name: Run release script on remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "./deployment/DiunaBI-Morska.Release.sh"
shred -u private_key

View File

@@ -0,0 +1,99 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Year INT = 2024;
DECLARE @Type NVARCHAR(5) = 'P2';
DECLARE @StartDate NVARCHAR(10) = '2025.01.02';
DECLARE @EndDate NVARCHAR(10) = '2026.12.31'
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-A-IW_', @Type, '-', @Year,'-', @CurrentTimestamp
);
DECLARE @Plugin NVARCHAR(100);
SET @Plugin =
CASE @Type
WHEN 'P2' THEN 'PedrolloPL.Import.P2'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxName NVARCHAR(100);
SET @DataInboxName =
CASE @Type
WHEN 'P2' THEN 'P2_2024'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @DataInboxSource NVARCHAR(100);
SET @DataInboxSource =
CASE @Type
WHEN 'P2' THEN 'Comarch'
ELSE NULL -- If @Type doesn't match, set it to NULL
END;
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
SELECT @Name AS Name, @StartDate AS StartDate, @EndDate AS EndDate, @Type AS Type, @Year AS Year, @Plugin AS Plugin,
@DataInboxName AS DataInboxName, @DataInboxSource AS DataInboxSource;
IF @JustForDebug = 1
BEGIN
SELECT 'Just for debug' AS Logger;
RETURN;
END;
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [IsCancelled], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 2);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'StartDate', @StartDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'EndDate', @EndDate, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Source', 'DataInbox', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportName', @Type, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'ImportYear', @Year, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Type', 'ImportWorker', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Plugin', @Plugin, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'IsEnabled', 'True', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxName', @DataInboxName, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'DataInboxSource', @DataInboxSource, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'Priority', '10', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);
INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES ((SELECT NEWID()), 'MaxRetries', '3', GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -0,0 +1 @@
POST http://localhost:5400/jobs/schedule/10763478CB738D4ecb2h76g803478CB738D4e

View File

@@ -2,7 +2,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D3'; DECLARE @Type NVARCHAR(3) = 'D3';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('D3') IF @Type NOT IN ('D3')
@@ -14,7 +14,7 @@ END;
DECLARE @ImportType NVARCHAR(20) = 'Import-D3'; DECLARE @ImportType NVARCHAR(20) = 'Import-D3';
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,9 +2,9 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D1'; DECLARE @Type NVARCHAR(3) = 'D1';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @MonthName NVARCHAR(20) = 'Pazdziernik_2025'; DECLARE @MonthName NVARCHAR(20) = 'Grudzien_2025';
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2')
BEGIN BEGIN
@@ -27,7 +27,7 @@ SET @ImportType =
ELSE 'Standard' ELSE 'Standard'
END; END;
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd'); DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd'); DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm'); DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00'); DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK'; DECLARE @Type NVARCHAR(3) = 'FK';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK') IF @Type NOT IN ('K5', 'PU', 'AK', 'FK')

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK2'; DECLARE @Type NVARCHAR(3) = 'FK2';
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Month INT = 11; DECLARE @Month INT = 12;
DECLARE @Year INT = 2025; DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,9 +2,9 @@
DECLARE @JustForDebug TINYINT = 0; DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES -- SETUP VARIABLES
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]); DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [DiunaBI-PedrolloPL].[dbo].[Layers]);
DECLARE @Name NVARCHAR(50) = CONCAT( DECLARE @Name NVARCHAR(50) = CONCAT(
'L', @Number, '-D-D6-SELL-CODES' 'L', @Number, 'D-P2-CODES'
); );
DECLARE @LayerId UNIQUEIDENTIFIER = NEWID(); DECLARE @LayerId UNIQUEIDENTIFIER = NEWID();
@@ -16,7 +16,7 @@ BEGIN
RETURN; RETURN;
END; END;
INSERT INTO [diunabi-morska].[dbo].[Layers] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Layers]
([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type]) ([Id], [Number], [Name], [CreatedAt], [ModifiedAt], [IsDeleted], [CreatedById], [ModifiedById], [Type])
VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3); VALUES (@LayerId, @Number, @Name, GETDATE(), GETDATE(), 0, '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 3);
@@ -27,16 +27,23 @@ DECLARE @Array TABLE (
INSERT INTO @Array (Code, Desc1) INSERT INTO @Array (Code, Desc1)
VALUES VALUES
('1002', '1102'), ('01','<nieznany>'),
('1003','1202'), ('02','DOLNOŚLĄSKIE'),
('1008','1302'), ('03','KUJAWSKO-POMORSKIE'),
('1009','1302'), ('04','LUBELSKIE'),
('9085','1203'), ('05','LUBUSKIE'),
('1010','1304'), ('06','ŁÓDZKIE'),
('9086','1005'), ('07','MAŁOPOLSKIE'),
('1021','1206'), ('08','MAZOWIECKIE'),
('9089','1207'), ('09','OPOLSKIE'),
('9091','1208') ('10','PODKARPACKIE'),
('11','PODLASKIE'),
('12','POMORSKIE'),
('13','ŚLĄSKIE'),
('14','ŚWIĘTOKRZYSKIE'),
('15','WARMIŃSKO-MAZURSKIE'),
('16','WIELKOPOLSKIE'),
('17','ZACHODNIOPOMORSKIE');
-- Loop through the array and insert into the target table -- Loop through the array and insert into the target table
DECLARE @Code NVARCHAR(50); DECLARE @Code NVARCHAR(50);
@@ -51,7 +58,7 @@ FETCH NEXT FROM CursorArray INTO @Code, @Desc1;
WHILE @@FETCH_STATUS = 0 WHILE @@FETCH_STATUS = 0
BEGIN BEGIN
INSERT INTO [diunabi-morska].[dbo].[Records] INSERT INTO [DiunaBI-PedrolloPL].[dbo].[Records]
([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId]) ([Id], [Code], [Desc1], [CreatedAt], [ModifiedAt], [CreatedById], [ModifiedById], [IsDeleted], [LayerId])
VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId); VALUES (NEWID(), @Code, @Desc1, GETDATE(), GETDATE(), '117be4f0-b5d1-41a1-a962-39dc30cce368', '117be4f0-b5d1-41a1-a962-39dc30cce368', 0, @LayerId);

View File

@@ -1,9 +1,11 @@
using DiunaBI.API.Services; using DiunaBI.API.Services;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace DiunaBI.API.Controllers; namespace DiunaBI.API.Controllers;
[AllowAnonymous]
[ApiController] [ApiController]
[Route("[controller]")] [Route("[controller]")]
public class AuthController( public class AuthController(

View File

@@ -4,9 +4,12 @@ using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using DiunaBI.Infrastructure.Data; using DiunaBI.Infrastructure.Data;
using DiunaBI.Domain.Entities; using DiunaBI.Domain.Entities;
using DiunaBI.Application.DTOModels;
using DiunaBI.Application.DTOModels.Common;
namespace DiunaBI.API.Controllers; namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController] [ApiController]
[Route("[controller]")] [Route("[controller]")]
public class DataInboxController : Controller public class DataInboxController : Controller
@@ -89,17 +92,86 @@ public class DataInboxController : Controller
} }
[HttpGet] [HttpGet]
public IActionResult GetAll() [Route("GetAll")]
public IActionResult GetAll([FromQuery] int start, [FromQuery] int limit, [FromQuery] string? search)
{ {
try try
{ {
var dataInbox = _db.DataInbox.AsNoTracking().ToList(); var query = _db.DataInbox.AsQueryable();
_logger.LogDebug("DataInbox: Retrieved {Count} records", dataInbox.Count);
return Ok(dataInbox); if (!string.IsNullOrEmpty(search))
{
query = query.Where(x => x.Name.Contains(search) || x.Source.Contains(search));
}
var totalCount = query.Count();
var items = query
.OrderByDescending(x => x.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.Select(x => new DataInboxDto
{
Id = x.Id,
Name = x.Name,
Source = x.Source,
Data = x.Data,
CreatedAt = x.CreatedAt
})
.ToList();
var pagedResult = new PagedResult<DataInboxDto>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} data inbox items (page {Page}) with filter search={Search}",
items.Count, totalCount, pagedResult.Page, search);
return Ok(pagedResult);
} }
catch (Exception e) catch (Exception e)
{ {
_logger.LogError(e, "DataInbox: Error retrieving records"); _logger.LogError(e, "GetAll: Error retrieving data inbox items");
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{id:guid}")]
public IActionResult Get(Guid id)
{
try
{
var dataInbox = _db.DataInbox
.AsNoTracking()
.FirstOrDefault(x => x.Id == id);
if (dataInbox == null)
{
_logger.LogWarning("Get: Data inbox item {Id} not found", id);
return NotFound();
}
var dto = new DataInboxDto
{
Id = dataInbox.Id,
Name = dataInbox.Name,
Source = dataInbox.Source,
Data = dataInbox.Data,
CreatedAt = dataInbox.CreatedAt
};
_logger.LogDebug("Get: Retrieved data inbox item {Id} {Name}", id, dataInbox.Name);
return Ok(dto);
}
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving data inbox item {Id}", id);
return BadRequest(e.ToString()); return BadRequest(e.ToString());
} }
} }

View File

@@ -0,0 +1,435 @@
using DiunaBI.Application.DTOModels.Common;
using DiunaBI.Domain.Entities;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Infrastructure.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class JobsController : Controller
{
private readonly AppDbContext _db;
private readonly JobSchedulerService _jobScheduler;
private readonly IConfiguration _configuration;
private readonly ILogger<JobsController> _logger;
public JobsController(
AppDbContext db,
JobSchedulerService jobScheduler,
IConfiguration configuration,
ILogger<JobsController> logger)
{
_db = db;
_jobScheduler = jobScheduler;
_configuration = configuration;
_logger = logger;
}
[HttpGet]
[Route("")]
public async Task<IActionResult> GetAll(
[FromQuery] int start = 0,
[FromQuery] int limit = 50,
[FromQuery] JobStatus? status = null,
[FromQuery] JobType? jobType = null,
[FromQuery] Guid? layerId = null)
{
try
{
var query = _db.QueueJobs.AsQueryable();
if (status.HasValue)
{
query = query.Where(j => j.Status == status.Value);
}
if (jobType.HasValue)
{
query = query.Where(j => j.JobType == jobType.Value);
}
if (layerId.HasValue)
{
query = query.Where(j => j.LayerId == layerId.Value);
}
var totalCount = await query.CountAsync();
var items = await query
.OrderByDescending(j => j.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.ToListAsync();
var pagedResult = new PagedResult<QueueJob>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} jobs", items.Count, totalCount);
return Ok(pagedResult);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetAll: Error retrieving jobs");
return BadRequest(ex.ToString());
}
}
[HttpGet]
[Route("{id:guid}")]
public async Task<IActionResult> Get(Guid id)
{
try
{
var job = await _db.QueueJobs
.AsNoTracking()
.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("Get: Job {JobId} not found", id);
return NotFound("Job not found");
}
_logger.LogDebug("Get: Retrieved job {JobId}", id);
return Ok(job);
}
catch (Exception ex)
{
_logger.LogError(ex, "Get: Error retrieving job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleJobs(string apiKey, [FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleAllJobsAsync(nameFilter);
_logger.LogInformation("ScheduleJobs: Created {Count} jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleJobs: Error scheduling jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/imports/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleImportJobs(string apiKey, [FromQuery] string? nameFilter = null)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleImportJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleImportJobsAsync(nameFilter);
_logger.LogInformation("ScheduleImportJobs: Created {Count} import jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} import jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleImportJobs: Error scheduling import jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("schedule/processes/{apiKey}")]
[AllowAnonymous]
public async Task<IActionResult> ScheduleProcessJobs(string apiKey)
{
if (apiKey != _configuration["apiKey"])
{
_logger.LogWarning("ScheduleProcessJobs: Unauthorized request with apiKey {ApiKey}", apiKey);
return Unauthorized();
}
try
{
var jobsCreated = await _jobScheduler.ScheduleProcessJobsAsync();
_logger.LogInformation("ScheduleProcessJobs: Created {Count} process jobs", jobsCreated);
return Ok(new
{
success = true,
jobsCreated,
message = $"Successfully scheduled {jobsCreated} process jobs"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "ScheduleProcessJobs: Error scheduling process jobs");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("{id:guid}/retry")]
public async Task<IActionResult> RetryJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("RetryJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status != JobStatus.Failed)
{
_logger.LogWarning("RetryJob: Job {JobId} is not in Failed status (current: {Status})", id, job.Status);
return BadRequest($"Job is not in Failed status (current: {job.Status})");
}
job.Status = JobStatus.Pending;
job.RetryCount = 0;
job.LastError = null;
job.ModifiedAtUtc = DateTime.UtcNow;
await _db.SaveChangesAsync();
_logger.LogInformation("RetryJob: Job {JobId} reset to Pending status", id);
return Ok(new
{
success = true,
message = "Job reset to Pending status and will be retried"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "RetryJob: Error retrying job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpDelete]
[Route("{id:guid}")]
public async Task<IActionResult> CancelJob(Guid id)
{
try
{
var job = await _db.QueueJobs.FirstOrDefaultAsync(j => j.Id == id);
if (job == null)
{
_logger.LogWarning("CancelJob: Job {JobId} not found", id);
return NotFound("Job not found");
}
if (job.Status == JobStatus.Running)
{
_logger.LogWarning("CancelJob: Cannot cancel running job {JobId}", id);
return BadRequest("Cannot cancel a job that is currently running");
}
if (job.Status == JobStatus.Completed)
{
_logger.LogWarning("CancelJob: Cannot cancel completed job {JobId}", id);
return BadRequest("Cannot cancel a completed job");
}
job.Status = JobStatus.Failed;
job.LastError = "Cancelled by user";
job.ModifiedAtUtc = DateTime.UtcNow;
await _db.SaveChangesAsync();
_logger.LogInformation("CancelJob: Job {JobId} cancelled", id);
return Ok(new
{
success = true,
message = "Job cancelled successfully"
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CancelJob: Error cancelling job {JobId}", id);
return BadRequest(ex.ToString());
}
}
[HttpGet]
[Route("stats")]
public async Task<IActionResult> GetStats()
{
try
{
var stats = new
{
pending = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Pending),
running = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Running),
completed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Completed),
failed = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Failed),
retrying = await _db.QueueJobs.CountAsync(j => j.Status == JobStatus.Retrying),
total = await _db.QueueJobs.CountAsync()
};
_logger.LogDebug("GetStats: Retrieved job statistics");
return Ok(stats);
}
catch (Exception ex)
{
_logger.LogError(ex, "GetStats: Error retrieving job statistics");
return BadRequest(ex.ToString());
}
}
[HttpPost]
[Route("create-for-layer/{layerId:guid}")]
public async Task<IActionResult> CreateJobForLayer(Guid layerId)
{
try
{
var layer = await _db.Layers
.Include(x => x.Records)
.FirstOrDefaultAsync(l => l.Id == layerId);
if (layer == null)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} not found", layerId);
return NotFound($"Layer {layerId} not found");
}
if (layer.Type != LayerType.Administration)
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not an Administration layer", layerId);
return BadRequest("Only Administration layers can be run as jobs");
}
// Get the Type record to determine if it's ImportWorker or ProcessWorker
var typeRecord = layer.Records?.FirstOrDefault(x => x.Code == "Type");
if (typeRecord?.Desc1 != "ImportWorker" && typeRecord?.Desc1 != "ProcessWorker")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not a valid worker type", layerId);
return BadRequest("Layer must be an ImportWorker or ProcessWorker");
}
// Check if enabled
var isEnabledRecord = layer.Records?.FirstOrDefault(x => x.Code == "IsEnabled");
if (isEnabledRecord?.Desc1 != "True")
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} is not enabled", layerId);
return BadRequest("Layer is not enabled");
}
// Get plugin name
var pluginRecord = layer.Records?.FirstOrDefault(x => x.Code == "Plugin");
if (string.IsNullOrEmpty(pluginRecord?.Desc1))
{
_logger.LogWarning("CreateJobForLayer: Layer {LayerId} has no Plugin configured", layerId);
return BadRequest("Layer has no Plugin configured");
}
// Get priority and max retries
var priorityRecord = layer.Records?.FirstOrDefault(x => x.Code == "Priority");
var maxRetriesRecord = layer.Records?.FirstOrDefault(x => x.Code == "MaxRetries");
var priority = int.TryParse(priorityRecord?.Desc1, out var p) ? p : 0;
var maxRetries = int.TryParse(maxRetriesRecord?.Desc1, out var m) ? m : 3;
var jobType = typeRecord.Desc1 == "ImportWorker" ? JobType.Import : JobType.Process;
// Check if there's already a pending/running job for this layer
var existingJob = await _db.QueueJobs
.Where(j => j.LayerId == layer.Id &&
(j.Status == JobStatus.Pending || j.Status == JobStatus.Running))
.FirstOrDefaultAsync();
if (existingJob != null)
{
_logger.LogInformation("CreateJobForLayer: Job already exists for layer {LayerId}, returning existing job", layerId);
return Ok(new
{
success = true,
jobId = existingJob.Id,
message = "Job already exists for this layer",
existing = true
});
}
// Create the job
var job = new QueueJob
{
Id = Guid.NewGuid(),
LayerId = layer.Id,
LayerName = layer.Name ?? "Unknown",
PluginName = pluginRecord.Desc1,
JobType = jobType,
Priority = priority,
MaxRetries = maxRetries,
Status = JobStatus.Pending,
CreatedAt = DateTime.UtcNow,
CreatedAtUtc = DateTime.UtcNow,
ModifiedAtUtc = DateTime.UtcNow,
CreatedById = Guid.Empty,
ModifiedById = Guid.Empty
};
_db.QueueJobs.Add(job);
await _db.SaveChangesAsync();
_logger.LogInformation("CreateJobForLayer: Created job {JobId} for layer {LayerName} ({LayerId})",
job.Id, layer.Name, layerId);
return Ok(new
{
success = true,
jobId = job.Id,
message = "Job created successfully",
existing = false
});
}
catch (Exception ex)
{
_logger.LogError(ex, "CreateJobForLayer: Error creating job for layer {LayerId}", layerId);
return BadRequest(ex.ToString());
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Globalization; using System.Globalization;
using System.Text; using System.Text;
using System.Text.Json;
using Google.Apis.Sheets.v4; using Google.Apis.Sheets.v4;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@@ -12,6 +13,7 @@ using DiunaBI.Infrastructure.Services;
namespace DiunaBI.API.Controllers; namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController] [ApiController]
[Route("[controller]")] [Route("[controller]")]
public class LayersController : Controller public class LayersController : Controller
@@ -727,4 +729,398 @@ public class LayersController : Controller
throw; throw;
} }
} }
// Record CRUD operations
[HttpPost]
[Route("{layerId:guid}/records")]
public IActionResult CreateRecord(Guid layerId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("CreateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
var record = new Record
{
Id = Guid.NewGuid(),
Code = recordDto.Code,
Desc1 = recordDto.Desc1,
LayerId = layerId,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = Guid.Parse(userId),
ModifiedById = Guid.Parse(userId),
IsDeleted = false
};
_db.Records.Add(record);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Created, Guid.Parse(userId));
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("CreateRecord: Created record {RecordId} in layer {LayerId}", record.Id, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "CreateRecord: Error creating record in layer {LayerId}", layerId);
return BadRequest(e.ToString());
}
}
[HttpPut]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult UpdateRecord(Guid layerId, Guid recordId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("UpdateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("UpdateRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
// Capture old values before updating
var oldCode = record.Code;
var oldDesc1 = record.Desc1;
record.Desc1 = recordDto.Desc1;
record.ModifiedAt = DateTime.UtcNow;
record.ModifiedById = Guid.Parse(userId);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Updated, Guid.Parse(userId), oldCode, oldDesc1);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("UpdateRecord: Updated record {RecordId} in layer {LayerId}", recordId, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "UpdateRecord: Error updating record {RecordId} in layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
}
}
[HttpDelete]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult DeleteRecord(Guid layerId, Guid recordId)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("DeleteRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("DeleteRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
// Capture history before deleting
CaptureRecordHistory(record, RecordChangeType.Deleted, Guid.Parse(userId));
_db.Records.Remove(record);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("DeleteRecord: Deleted record {RecordId} from layer {LayerId}", recordId, layerId);
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DeleteRecord: Error deleting record {RecordId} from layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{layerId:guid}/records/{recordId:guid}/history")]
public IActionResult GetRecordHistory(Guid layerId, Guid recordId)
{
try
{
var history = _db.RecordHistory
.Include(h => h.ChangedBy)
.Where(h => h.RecordId == recordId && h.LayerId == layerId)
.OrderByDescending(h => h.ChangedAt)
.AsNoTracking()
.Select(h => new RecordHistoryDto
{
Id = h.Id,
RecordId = h.RecordId,
LayerId = h.LayerId,
ChangedAt = h.ChangedAt,
ChangedById = h.ChangedById,
ChangedByName = h.ChangedBy != null ? h.ChangedBy.UserName ?? h.ChangedBy.Email : "Unknown",
ChangeType = h.ChangeType.ToString(),
Code = h.Code,
Desc1 = h.Desc1,
ChangedFields = h.ChangedFields,
ChangesSummary = h.ChangesSummary,
FormattedChange = FormatHistoryChange(h)
})
.ToList();
_logger.LogDebug("GetRecordHistory: Retrieved {Count} history entries for record {RecordId}", history.Count, recordId);
return Ok(history);
}
catch (Exception e)
{
_logger.LogError(e, "GetRecordHistory: Error retrieving history for record {RecordId}", recordId);
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{layerId:guid}/records/deleted")]
public IActionResult GetDeletedRecords(Guid layerId)
{
try
{
// Get the most recent "Deleted" history entry for each unique RecordId in this layer
// First, get all deleted record history entries
var deletedHistoryEntries = _db.RecordHistory
.Where(h => h.LayerId == layerId && h.ChangeType == RecordChangeType.Deleted)
.ToList();
// Group in memory and get the most recent deletion for each record
var mostRecentDeletes = deletedHistoryEntries
.GroupBy(h => h.RecordId)
.Select(g => g.OrderByDescending(h => h.ChangedAt).First())
.ToList();
// Get all unique user IDs from the history entries
var userIds = mostRecentDeletes.Select(h => h.ChangedById).Distinct().ToList();
// Load the users
var users = _db.Users
.Where(u => userIds.Contains(u.Id))
.ToDictionary(u => u.Id, u => u.UserName ?? string.Empty);
// Build the DTOs
var deletedRecords = mostRecentDeletes
.Select(h => new DeletedRecordDto
{
RecordId = h.RecordId,
Code = h.Code,
Desc1 = h.Desc1,
DeletedAt = h.ChangedAt,
DeletedById = h.ChangedById,
DeletedByName = users.TryGetValue(h.ChangedById, out var userName) ? userName : string.Empty
})
.OrderByDescending(d => d.DeletedAt)
.ToList();
_logger.LogDebug("GetDeletedRecords: Retrieved {Count} deleted records for layer {LayerId}", deletedRecords.Count, layerId);
return Ok(deletedRecords);
}
catch (Exception e)
{
_logger.LogError(e, "GetDeletedRecords: Error retrieving deleted records for layer {LayerId}", layerId);
return BadRequest(e.ToString());
}
}
// Helper method to capture record history
private void CaptureRecordHistory(Record record, RecordChangeType changeType, Guid userId, string? oldCode = null, string? oldDesc1 = null)
{
var changedFields = new List<string>();
var changesSummary = new Dictionary<string, Dictionary<string, string?>>();
if (changeType == RecordChangeType.Updated)
{
if (oldCode != record.Code)
{
changedFields.Add("Code");
changesSummary["Code"] = new Dictionary<string, string?>
{
["old"] = oldCode,
["new"] = record.Code
};
}
if (oldDesc1 != record.Desc1)
{
changedFields.Add("Desc1");
changesSummary["Desc1"] = new Dictionary<string, string?>
{
["old"] = oldDesc1,
["new"] = record.Desc1
};
}
}
var history = new RecordHistory
{
Id = Guid.NewGuid(),
RecordId = record.Id,
LayerId = record.LayerId,
ChangedAt = DateTime.UtcNow,
ChangedById = userId,
ChangeType = changeType,
Code = record.Code,
Desc1 = record.Desc1,
ChangedFields = changedFields.Any() ? string.Join(", ", changedFields) : null,
ChangesSummary = changesSummary.Any() ? JsonSerializer.Serialize(changesSummary) : null
};
_db.RecordHistory.Add(history);
_logger.LogInformation("CaptureRecordHistory: Captured {ChangeType} for record {RecordId}", changeType, record.Id);
}
// Helper method to format history change for display
private static string FormatHistoryChange(RecordHistory h)
{
if (h.ChangeType == RecordChangeType.Created)
{
return $"Created record with Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
if (h.ChangeType == RecordChangeType.Deleted)
{
return $"Deleted record Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
// Updated
if (!string.IsNullOrEmpty(h.ChangesSummary))
{
try
{
var changes = JsonSerializer.Deserialize<Dictionary<string, Dictionary<string, string?>>>(h.ChangesSummary);
if (changes != null)
{
var parts = new List<string>();
foreach (var (field, values) in changes)
{
var oldVal = values.GetValueOrDefault("old") ?? "empty";
var newVal = values.GetValueOrDefault("new") ?? "empty";
parts.Add($"{field}: \"{oldVal}\" → \"{newVal}\"");
}
return $"Updated: {string.Join(", ", parts)}";
}
}
catch
{
// Fall back to simple message
}
}
return $"Updated {h.ChangedFields ?? "record"}";
}
} }

View File

@@ -39,9 +39,11 @@
<Target Name="CopyPlugins" AfterTargets="Build"> <Target Name="CopyPlugins" AfterTargets="Build">
<MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" /> <MSBuild Projects="../DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<MSBuild Projects="../DiunaBI.Plugins.PedrolloPL/DiunaBI.Plugins.PedrolloPL.csproj" Properties="Configuration=$(Configuration);TargetFramework=$(TargetFramework)" />
<ItemGroup> <ItemGroup>
<PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" /> <PluginFiles Include="../DiunaBI.Plugins.Morska/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.Morska.dll" />
<PluginFiles Include="../DiunaBI.Plugins.PedrolloPL/bin/$(Configuration)/$(TargetFramework)/DiunaBI.Plugins.PedrolloPL.dll" />
</ItemGroup> </ItemGroup>
<MakeDir Directories="$(OutputPath)Plugins" /> <MakeDir Directories="$(OutputPath)Plugins" />
<Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" /> <Copy SourceFiles="@(PluginFiles)" DestinationFolder="$(OutputPath)Plugins" />

View File

@@ -1,7 +1,8 @@
# Stage 1: Build # Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
WORKDIR /src/Backend ARG PLUGIN_PROJECT=DiunaBI.Plugins.Morska
WORKDIR /
# Copy solution and all project files for restore # Copy solution and all project files for restore
COPY DiunaBI.sln ./ COPY DiunaBI.sln ./
@@ -9,7 +10,7 @@ COPY DiunaBI.API/DiunaBI.API.csproj DiunaBI.API/
COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/ COPY DiunaBI.Domain/DiunaBI.Domain.csproj DiunaBI.Domain/
COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/ COPY DiunaBI.Application/DiunaBI.Application.csproj DiunaBI.Application/
COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/ COPY DiunaBI.Infrastructure/DiunaBI.Infrastructure.csproj DiunaBI.Infrastructure/
COPY DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj DiunaBI.Plugins.Morska/ COPY ${PLUGIN_PROJECT}/${PLUGIN_PROJECT}.csproj ${PLUGIN_PROJECT}/
# Restore dependencies # Restore dependencies
RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
@@ -18,16 +19,16 @@ RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
COPY . . COPY . .
# Build plugin first # Build plugin first
WORKDIR /src/Backend/DiunaBI.Plugins.Morska WORKDIR /${PLUGIN_PROJECT}
RUN dotnet build -c Release RUN dotnet build -c Release
# Build and publish API # Build and publish API
WORKDIR /src/Backend/DiunaBI.API WORKDIR /DiunaBI.API
RUN dotnet publish -c Release -o /app/publish --no-restore RUN dotnet publish -c Release -o /app/publish --no-restore
# Copy plugin DLL to publish output # Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \ RUN mkdir -p /app/publish/Plugins && \
cp /src/Backend/DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll /app/publish/Plugins/ cp /${PLUGIN_PROJECT}/bin/Release/net10.0/${PLUGIN_PROJECT}.dll /app/publish/Plugins/
# Stage 2: Runtime # Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime

View File

@@ -97,6 +97,10 @@ builder.Services.AddSingleton<SpreadsheetsResource.ValuesResource>(provider =>
builder.Services.AddSingleton<PluginManager>(); builder.Services.AddSingleton<PluginManager>();
// Job Queue Services
builder.Services.AddScoped<JobSchedulerService>();
builder.Services.AddHostedService<JobWorkerService>();
var app = builder.Build(); var app = builder.Build();
// Auto-apply migrations on startup // Auto-apply migrations on startup
@@ -177,26 +181,67 @@ else
pluginManager.LoadPluginsFromDirectory(pluginsPath); pluginManager.LoadPluginsFromDirectory(pluginsPath);
app.Use(async (context, next) =>
{
var token = context.Request.Headers.Authorization.ToString();
if (token.Length > 0
&& !context.Request.Path.ToString().Contains("getForPowerBI")
&& !context.Request.Path.ToString().Contains("getConfiguration")
&& !context.Request.Path.ToString().Contains("DataInbox/Add"))
{
var handler = new JwtSecurityTokenHandler();
var data = handler.ReadJwtToken(token.Split(' ')[1]);
context.Request.Headers.Append("UserId", new Microsoft.Extensions.Primitives.StringValues(data.Subject));
}
await next(context);
});
app.UseCors("CORSPolicy"); app.UseCors("CORSPolicy");
app.UseAuthentication(); app.UseAuthentication();
app.UseAuthorization(); app.UseAuthorization();
// Middleware to extract UserId from JWT token AFTER authentication
// This must run after UseAuthentication() so the JWT is already validated
app.Use(async (context, next) =>
{
var logger = context.RequestServices.GetRequiredService<ILogger<Program>>();
logger.LogInformation("🔍 UserId Extraction Middleware - Path: {Path}, Method: {Method}",
context.Request.Path, context.Request.Method);
var token = context.Request.Headers.Authorization.ToString();
logger.LogInformation("🔍 Authorization header: {Token}",
string.IsNullOrEmpty(token) ? "NULL/EMPTY" : $"{token[..Math.Min(30, token.Length)]}...");
if (!string.IsNullOrEmpty(token) && token.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
{
try
{
var handler = new JwtSecurityTokenHandler();
var jwtToken = handler.ReadJwtToken(token.Split(' ')[1]);
// Try to get UserId from Subject claim first, then fall back to NameIdentifier
var userId = jwtToken.Subject;
if (string.IsNullOrEmpty(userId))
{
// Try NameIdentifier claim (ClaimTypes.NameIdentifier)
var nameIdClaim = jwtToken.Claims.FirstOrDefault(c =>
c.Type == "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier" ||
c.Type == "nameid");
userId = nameIdClaim?.Value;
}
logger.LogInformation("🔍 JWT UserId: {UserId}", userId ?? "NULL");
if (!string.IsNullOrEmpty(userId))
{
// Use indexer to set/replace header value instead of Append
context.Request.Headers["UserId"] = userId;
logger.LogInformation("✅ Set UserId header to: {UserId}", userId);
}
else
{
logger.LogWarning("❌ UserId not found in JWT claims");
}
}
catch (Exception ex)
{
logger.LogError(ex, "❌ Failed to extract UserId from JWT token");
}
}
else
{
logger.LogWarning("❌ No valid Bearer token found");
}
await next(context);
});
app.MapControllers(); app.MapControllers();
app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow })) app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow }))

View File

@@ -0,0 +1,17 @@
namespace DiunaBI.Application.DTOModels;
public class DataInboxDto
{
public Guid Id { get; set; }
public string Name { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string Data { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
}
public class DataInboxFilterRequest
{
public string? Search { get; set; }
public int Page { get; set; } = 1;
public int PageSize { get; set; } = 50;
}

View File

@@ -0,0 +1,11 @@
namespace DiunaBI.Application.DTOModels;
public class DeletedRecordDto
{
public Guid RecordId { get; set; }
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
public DateTime DeletedAt { get; set; }
public Guid DeletedById { get; set; }
public string DeletedByName { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,27 @@
namespace DiunaBI.Application.DTOModels;
public class RecordHistoryDto
{
public Guid Id { get; set; }
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public string ChangedByName { get; set; } = string.Empty;
// Type of change
public string ChangeType { get; set; } = string.Empty; // "Created", "Updated", "Deleted"
// Snapshot values
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// What changed
public string? ChangedFields { get; set; } // "Code, Desc1"
public string? ChangesSummary { get; set; } // JSON: {"Code": {"old": "A", "new": "B"}}
// Formatted display text
public string FormattedChange { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,37 @@
using System;
namespace DiunaBI.Domain.Entities;
public enum RecordChangeType
{
Created = 1,
Updated = 2,
Deleted = 3
}
public class RecordHistory
{
public Guid Id { get; set; }
// Reference to the original record
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public User? ChangedBy { get; set; }
// Type of change
public RecordChangeType ChangeType { get; set; }
// Snapshot of record state at this point
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// Comma-separated list of fields that changed (e.g., "Code,Desc1")
public string? ChangedFields { get; set; }
// JSON object with detailed changes: {"Code": {"old": "A", "new": "B"}}
public string? ChangesSummary { get; set; }
}

View File

@@ -8,6 +8,7 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
public DbSet<User> Users { get; init; } public DbSet<User> Users { get; init; }
public DbSet<Layer> Layers { get; init; } public DbSet<Layer> Layers { get; init; }
public DbSet<Record> Records { get; init; } public DbSet<Record> Records { get; init; }
public DbSet<RecordHistory> RecordHistory { get; init; }
public DbSet<ProcessSource> ProcessSources { get; init; } public DbSet<ProcessSource> ProcessSources { get; init; }
public DbSet<DataInbox> DataInbox { get; init; } public DbSet<DataInbox> DataInbox { get; init; }
public DbSet<QueueJob> QueueJobs { get; init; } public DbSet<QueueJob> QueueJobs { get; init; }
@@ -75,6 +76,30 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
.HasForeignKey(x => x.LayerId) .HasForeignKey(x => x.LayerId)
.OnDelete(DeleteBehavior.Cascade); .OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<RecordHistory>().HasKey(x => x.Id);
modelBuilder.Entity<RecordHistory>().Property(x => x.RecordId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedAt).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedById).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangeType).IsRequired().HasConversion<int>();
modelBuilder.Entity<RecordHistory>().Property(x => x.Code).IsRequired().HasMaxLength(50);
modelBuilder.Entity<RecordHistory>().Property(x => x.Desc1).HasMaxLength(10000);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedFields).HasMaxLength(200);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangesSummary).HasMaxLength(4000);
// Indexes for efficient history queries
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.RecordId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.LayerId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasOne(x => x.ChangedBy)
.WithMany()
.HasForeignKey(x => x.ChangedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId }); modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId });
modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired(); modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired(); modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired();

View File

@@ -23,6 +23,7 @@
<PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" /> <PackageReference Include="Google.Apis.Sheets.v4" Version="1.68.0.3525" />
<PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" /> <PackageReference Include="Google.Apis.Drive.v3" Version="1.68.0.3490" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" /> <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
</ItemGroup> </ItemGroup>
</Project> </Project>

Some files were not shown because too many files have changed in this diff Show More