Compare commits

...

18 Commits

Author SHA1 Message Date
f68e57ce3b Small UI fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m35s
Build Docker Images / build-and-push (push) Successful in 1m42s
2025-12-02 13:43:01 +01:00
e70a8dda6e Remember list filters 2025-12-02 13:23:03 +01:00
89859cd4a3 Record histori is working 2025-12-02 13:14:09 +01:00
0c6848556b WIP: Record history 2025-12-01 18:37:09 +01:00
c8ded1f0a4 Edit Records 2025-12-01 17:56:17 +01:00
7ea5ed506e Filter Layers by Type
All checks were successful
Build Docker Images / test (push) Successful in 1m37s
Build Docker Images / build-and-push (push) Successful in 1m35s
2025-12-01 13:21:45 +01:00
4d7df85df1 DataInbox Detail 2025-12-01 13:00:01 +01:00
3d654d972e DataInbox list 2025-12-01 12:55:47 +01:00
a71b6feefc Pagination style fix 2025-12-01 12:35:22 +01:00
cb0d050ad4 Imports for 2025.12 2025-11-30 16:09:32 +01:00
24387bf96c debug
All checks were successful
Build Docker Images / test (push) Successful in 1m47s
Build Docker Images / build-and-push (push) Successful in 1m55s
2025-11-28 16:15:39 +01:00
87d19dcadf App logo
All checks were successful
Build Docker Images / test (push) Successful in 1m43s
Build Docker Images / build-and-push (push) Successful in 1m47s
2025-11-28 12:13:19 +01:00
a289690b6b Add custom app name per instance 2025-11-28 11:44:19 +01:00
57f1359c96 Bu9ild path fixes
All checks were successful
Build Docker Images / test (push) Successful in 1m31s
Build Docker Images / build-and-push (push) Successful in 1m37s
2025-11-28 11:29:38 +01:00
b0e77ec835 Enable Main build
Some checks failed
Build Docker Images / test (push) Failing after 26s
Build Docker Images / build-and-push (push) Failing after 11s
2025-11-28 11:26:58 +01:00
b3053b859a Last refactor steps (I hope) 2025-11-28 11:26:17 +01:00
07423023a0 after refactor cleanup 2025-11-28 11:21:22 +01:00
5db6de1503 Merge pull request 'ddd-refactor' (#2) from ddd-refactor into main
Some checks failed
BuildApp / build-frontend (push) Successful in 1m54s
BuildApp / build-backend (push) Failing after 26s
Reviewed-on: #2
2025-11-28 11:14:42 +01:00
329 changed files with 2830 additions and 13813 deletions

View File

@@ -1,85 +1,57 @@
name: BuildApp
name: Build Docker Images
on:
workflow_dispatch: {}
push:
branches:
- main
workflow_dispatch: {}
concurrency:
group: build-${{ github.ref }}
cancel-in-progress: false
jobs:
build-frontend:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install Angular CLI
run: npm install -g @angular/cli
- name: Install PNPM
run: npm install -g pnpm
- name: Install dependencies
working-directory: src/Frontend
run: pnpm install
- name: Build Angular
working-directory: src/Frontend
run: ng build --configuration=production
- name: Upload artifact (frontend)
uses: https://github.com/actions/upload-artifact@v3
with:
name: frontend
path: src/Frontend/dist
if-no-files-found: error
retention-days: 7
build-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 8
- name: Setup .NET 10
uses: https://github.com/actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
dotnet-version: 10.0.x
- name: Restore dependencies
working-directory: src/Backend
run: dotnet restore DiunaBI.sln
working-directory: .
run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins
working-directory: src/Backend
working-directory: .
run: |
set -e
dotnet build DiunaBI.sln --configuration Release
# Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net8.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Core.dll DiunaBI.Tests/bin/Release/net8.0/Plugins/
ls -la DiunaBI.Tests/bin/Release/net8.0/Plugins/
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests
working-directory: src/Backend
working-directory: .
run: |
dotnet add DiunaBI.Tests/DiunaBI.Tests.csproj package coverlet.collector
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \
--no-restore \
--logger "trx;LogFileName=test-results.trx" \
--collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly"
--filter "Category!=LocalOnly" || true
- name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3
@@ -87,33 +59,72 @@ jobs:
with:
name: test-results
path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
DiunaBI.Tests/TestResults/*.trx
DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7
- name: Publish WebAPI
if: success()
working-directory: src/Backend
run: |
dotnet publish DiunaBI.WebAPI/DiunaBI.WebAPI.csproj \
--configuration Release \
--framework net8.0 \
--self-contained false \
--output ../../build/webapi
mkdir -p ../../build/webapi/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net8.0/DiunaBI.Plugins.Morska.dll ../../build/webapi/Plugins/
ls -la ../../build/webapi/Plugins/
build-and-push:
runs-on: ubuntu-latest
needs: test
if: success() || failure()
- name: Clean up sensitive files
working-directory: build/webapi
steps:
- name: Debug secrets
run: |
rm -f appsettings.Development.json || true
rm -f client_secrets.Development.json || true
echo "User length: ${#REGISTRY_USER}"
echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Upload artifact (webapi)
uses: https://github.com/actions/upload-artifact@v3
with:
name: webapi
path: build/webapi
if-no-files-found: error
retention-days: 7
- name: Checkout code
uses: https://github.com/actions/checkout@v4
- name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3
- name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api:latest \
-t code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: .
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui:latest \
-t code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,130 +0,0 @@
name: Build Docker Images
on:
push:
branches:
- ddd-refactor
workflow_dispatch: {}
concurrency:
group: build-${{ github.ref }}
cancel-in-progress: false
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Setup .NET 10
uses: https://github.com/actions/setup-dotnet@v4
with:
dotnet-version: 10.0.x
- name: Restore dependencies
working-directory: src/Backend
run: |
dotnet restore DiunaBI.API/DiunaBI.API.csproj
dotnet restore DiunaBI.UI.Web/DiunaBI.UI.Web.csproj
dotnet restore DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj
dotnet restore DiunaBI.Tests/DiunaBI.Tests.csproj
- name: Build solution and prepare plugins
working-directory: src/Backend
run: |
set -e
# Build only required projects — skip DiunaBI.UI.Mobile
dotnet build DiunaBI.API/DiunaBI.API.csproj --configuration Release
dotnet build DiunaBI.UI.Web/DiunaBI.UI.Web.csproj --configuration Release
dotnet build DiunaBI.Plugins.Morska/DiunaBI.Plugins.Morska.csproj --configuration Release
mkdir -p DiunaBI.Tests/bin/Release/net10.0/Plugins
cp DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
ls -la DiunaBI.Tests/bin/Release/net10.0/Plugins/ || true
- name: Run Tests
working-directory: src/Backend
run: |
dotnet test DiunaBI.Tests/DiunaBI.Tests.csproj \
--configuration Release \
--no-restore \
--logger "trx;LogFileName=test-results.trx" \
--collect:"XPlat Code Coverage" \
--filter "Category!=LocalOnly" || true
- name: Publish Test Results
uses: https://github.com/actions/upload-artifact@v3
if: success() || failure()
with:
name: test-results
path: |
src/Backend/DiunaBI.Tests/TestResults/*.trx
src/Backend/DiunaBI.Tests/TestResults/**/coverage.cobertura.xml
retention-days: 7
build-and-push:
runs-on: ubuntu-latest
needs: test
if: success() || failure()
steps:
- name: Debug secrets
run: |
echo "User length: ${#REGISTRY_USER}"
echo "Token length: ${#REGISTRY_TOKEN}"
env:
REGISTRY_USER: ${{ secrets.REGISTRY_USER }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
- name: Checkout code
uses: https://github.com/actions/checkout@v4
- name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3
- name: Log in to Gitea Container Registry
run: |
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login code.bim-it.pl -u "${{ secrets.REGISTRY_USER }}" --password-stdin
- name: Build and push API image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.API/Dockerfile \
-t code.bim-it.pl/mz/diunabi-api:latest \
-t code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }} \
--push \
.
- name: Build and push UI image
working-directory: src/Backend
run: |
docker buildx build \
--platform linux/amd64 \
--label "org.opencontainers.image.source=https://code.bim-it.pl/mz/DiunaBI" \
-f DiunaBI.UI.Web/Dockerfile \
-t code.bim-it.pl/mz/diunabi-ui:latest \
-t code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }} \
--push \
.
- name: Output build info
run: |
echo "## 🐳 Docker Images Built" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Build ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** ${{ github.sha }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Images pushed:" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "# Latest (for release)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:latest" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:latest" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific build (for rollback)" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-api:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "docker pull code.bim-it.pl/mz/diunabi-ui:build-${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,122 +0,0 @@
name: ReleaseApp (JS finder + download)
on:
workflow_dispatch: {}
jobs:
release:
runs-on: ubuntu-latest
env:
GITEA_BASE_URL: https://code.bim-it.pl
OWNER: mz
REPO: DiunaBI
REQUIRED_ARTIFACTS: frontend,webapi
SCAN_LIMIT: "100"
steps:
- name: Checkout
uses: https://github.com/actions/checkout@v4
- name: Use Node.js 20
uses: https://github.com/actions/setup-node@v4
with:
node-version: 20
- name: Install unzip (for extraction)
run: |
sudo apt-get update
sudo apt-get install -y unzip
- name: Resolve latest run that exposes required artifacts
id: resolve
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
run: |
node .gitea/scripts/getLatestRunWithArtifacts.js
echo "Resolved run_id: $(cat .gitea/.cache/run_id)"
echo "run_id=$(cat .gitea/.cache/run_id)" >> "$GITHUB_OUTPUT"
- name: Download frontend artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: frontend
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/frontend
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Download webapi artifact
env:
GITEA_PAT: ${{ secrets.GITEATOKEN }}
ARTIFACT_NAME: webapi
RUN_ID: ${{ steps.resolve.outputs.run_id }}
OUTPUT_DIR: artifacts/webapi
run: |
node .gitea/scripts/downloadArtifactByName.js
- name: Show artifact structure
run: |
echo "::group::frontend"
ls -laR artifacts/frontend || true
echo "::endgroup::"
echo "::group::webapi"
ls -laR artifacts/webapi || true
echo "::endgroup::"
- name: Tokenize (replace #{...}# from secrets/vars)
env:
SECRETS: ${{ toJson(secrets) }}
VARIABLES: ${{ toJson(vars) }}
RUN_ID: ${{ steps.resolve.outputs.run_id }}
run: |
set -euo pipefail
node .gitea/scripts/replaceTokens.js
- name: Package artifacts as ZIPs
run: |
mkdir -p build
(cd artifacts/frontend && zip -rq ../../build/DiunaBI-Morska-Frontend.zip .)
(cd artifacts/webapi && zip -rq ../../build/DiunaBI-Morska-WebApi.zip .)
ls -la build
- name: Upload artifacts to remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
REMOTE_DIR: deployment
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "mkdir -p ~/$REMOTE_DIR"
scp -i private_key build/DiunaBI-Morska-Frontend.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
scp -i private_key build/DiunaBI-Morska-WebApi.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
shred -u private_key
- name: Run release script on remote server
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEARUNNER_SSH_KEY }}
SSH_USER: mz
SSH_HOST: bim-it.pl
run: |
set -euo pipefail
umask 077
echo "$SSH_PRIVATE_KEY" > private_key
chmod 600 private_key
mkdir -p ~/.ssh
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
ssh -i private_key "$SSH_USER@$SSH_HOST" "./deployment/DiunaBI-Morska.Release.sh"
shred -u private_key

View File

@@ -0,0 +1,9 @@
PUT https://pedrollopl.diunabi.com/api/DataInbox/Add/8kL2mN4pQ6rojshf8704i34p4eim1hs
Content-Type: application/json
Authorization: Basic cGVkcm9sbG9wbDo0MjU4dlc2eFk4TjRwUQ==
{
"Source": "morska.import",
"Name": "morska.d3.importer",
"Data": "eyJrZXkiOiAidmFsdWUifQ=="
}

View File

@@ -2,7 +2,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D3';
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
IF @Type NOT IN ('D3')
@@ -14,7 +14,7 @@ END;
DECLARE @ImportType NVARCHAR(20) = 'Import-D3';
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,9 +2,9 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'D1';
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
DECLARE @MonthName NVARCHAR(20) = 'Pazdziernik_2025';
DECLARE @MonthName NVARCHAR(20) = 'Grudzien_2025';
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK', 'D1', 'FK2')
BEGIN
@@ -27,7 +27,7 @@ SET @ImportType =
ELSE 'Standard'
END;
DECLARE @StartDate NVARCHAR(10) = FORMAT(DATEADD(DAY, 24, DATEADD(MONTH, @Month - 2, DATEFROMPARTS(YEAR(GETDATE()), 1, 1))), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(YEAR(GETDATE()), @Month + 1, 5), 'yyyy.MM.dd');
DECLARE @EndDate NVARCHAR(10) = FORMAT(DATEFROMPARTS(CASE WHEN @Month = 12 THEN @Year + 1 ELSE @Year END, CASE WHEN @Month = 12 THEN 1 ELSE @Month + 1 END, 5), 'yyyy.MM.dd');
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);
DECLARE @CurrentTimestamp NVARCHAR(14) = FORMAT(GETDATE(), 'yyyyMMddHHmm');
DECLARE @FormattedMonth NVARCHAR(2) = FORMAT(@Month, '00');

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK';
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
IF @Type NOT IN ('K5', 'PU', 'AK', 'FK')

View File

@@ -4,7 +4,7 @@ DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Type NVARCHAR(3) = 'FK2';
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -2,7 +2,7 @@
DECLARE @JustForDebug TINYINT = 0;
-- SETUP VARIABLES
DECLARE @Month INT = 11;
DECLARE @Month INT = 12;
DECLARE @Year INT = 2025;
DECLARE @Number INT = (SELECT COUNT(id) + 1 FROM [diunabi-morska].[dbo].[Layers]);

View File

@@ -1,9 +1,11 @@
using DiunaBI.API.Services;
using DiunaBI.Domain.Entities;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace DiunaBI.API.Controllers;
[AllowAnonymous]
[ApiController]
[Route("[controller]")]
public class AuthController(

View File

@@ -4,9 +4,12 @@ using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using DiunaBI.Infrastructure.Data;
using DiunaBI.Domain.Entities;
using DiunaBI.Application.DTOModels;
using DiunaBI.Application.DTOModels.Common;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class DataInboxController : Controller
@@ -89,17 +92,86 @@ public class DataInboxController : Controller
}
[HttpGet]
public IActionResult GetAll()
[Route("GetAll")]
public IActionResult GetAll([FromQuery] int start, [FromQuery] int limit, [FromQuery] string? search)
{
try
{
var dataInbox = _db.DataInbox.AsNoTracking().ToList();
_logger.LogDebug("DataInbox: Retrieved {Count} records", dataInbox.Count);
return Ok(dataInbox);
var query = _db.DataInbox.AsQueryable();
if (!string.IsNullOrEmpty(search))
{
query = query.Where(x => x.Name.Contains(search) || x.Source.Contains(search));
}
var totalCount = query.Count();
var items = query
.OrderByDescending(x => x.CreatedAt)
.Skip(start)
.Take(limit)
.AsNoTracking()
.Select(x => new DataInboxDto
{
Id = x.Id,
Name = x.Name,
Source = x.Source,
Data = x.Data,
CreatedAt = x.CreatedAt
})
.ToList();
var pagedResult = new PagedResult<DataInboxDto>
{
Items = items,
TotalCount = totalCount,
Page = (start / limit) + 1,
PageSize = limit
};
_logger.LogDebug("GetAll: Retrieved {Count} of {TotalCount} data inbox items (page {Page}) with filter search={Search}",
items.Count, totalCount, pagedResult.Page, search);
return Ok(pagedResult);
}
catch (Exception e)
{
_logger.LogError(e, "DataInbox: Error retrieving records");
_logger.LogError(e, "GetAll: Error retrieving data inbox items");
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{id:guid}")]
public IActionResult Get(Guid id)
{
try
{
var dataInbox = _db.DataInbox
.AsNoTracking()
.FirstOrDefault(x => x.Id == id);
if (dataInbox == null)
{
_logger.LogWarning("Get: Data inbox item {Id} not found", id);
return NotFound();
}
var dto = new DataInboxDto
{
Id = dataInbox.Id,
Name = dataInbox.Name,
Source = dataInbox.Source,
Data = dataInbox.Data,
CreatedAt = dataInbox.CreatedAt
};
_logger.LogDebug("Get: Retrieved data inbox item {Id} {Name}", id, dataInbox.Name);
return Ok(dto);
}
catch (Exception e)
{
_logger.LogError(e, "Get: Error retrieving data inbox item {Id}", id);
return BadRequest(e.ToString());
}
}

View File

@@ -1,5 +1,6 @@
using System.Globalization;
using System.Text;
using System.Text.Json;
using Google.Apis.Sheets.v4;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
@@ -12,6 +13,7 @@ using DiunaBI.Infrastructure.Services;
namespace DiunaBI.API.Controllers;
[Authorize]
[ApiController]
[Route("[controller]")]
public class LayersController : Controller
@@ -727,4 +729,398 @@ public class LayersController : Controller
throw;
}
}
// Record CRUD operations
[HttpPost]
[Route("{layerId:guid}/records")]
public IActionResult CreateRecord(Guid layerId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("CreateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("CreateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
var record = new Record
{
Id = Guid.NewGuid(),
Code = recordDto.Code,
Desc1 = recordDto.Desc1,
LayerId = layerId,
CreatedAt = DateTime.UtcNow,
ModifiedAt = DateTime.UtcNow,
CreatedById = Guid.Parse(userId),
ModifiedById = Guid.Parse(userId),
IsDeleted = false
};
_db.Records.Add(record);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Created, Guid.Parse(userId));
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("CreateRecord: Created record {RecordId} in layer {LayerId}", record.Id, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "CreateRecord: Error creating record in layer {LayerId}", layerId);
return BadRequest(e.ToString());
}
}
[HttpPut]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult UpdateRecord(Guid layerId, Guid recordId, [FromBody] RecordDto recordDto)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("UpdateRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("UpdateRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("UpdateRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
if (string.IsNullOrWhiteSpace(recordDto.Code))
{
return BadRequest("Code is required");
}
if (string.IsNullOrWhiteSpace(recordDto.Desc1))
{
return BadRequest("Desc1 is required");
}
// Capture old values before updating
var oldCode = record.Code;
var oldDesc1 = record.Desc1;
record.Desc1 = recordDto.Desc1;
record.ModifiedAt = DateTime.UtcNow;
record.ModifiedById = Guid.Parse(userId);
// Capture history
CaptureRecordHistory(record, RecordChangeType.Updated, Guid.Parse(userId), oldCode, oldDesc1);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("UpdateRecord: Updated record {RecordId} in layer {LayerId}", recordId, layerId);
return Ok(new RecordDto
{
Id = record.Id,
Code = record.Code,
Desc1 = record.Desc1,
LayerId = record.LayerId,
CreatedAt = record.CreatedAt,
ModifiedAt = record.ModifiedAt,
CreatedById = record.CreatedById,
ModifiedById = record.ModifiedById
});
}
catch (Exception e)
{
_logger.LogError(e, "UpdateRecord: Error updating record {RecordId} in layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
}
}
[HttpDelete]
[Route("{layerId:guid}/records/{recordId:guid}")]
public IActionResult DeleteRecord(Guid layerId, Guid recordId)
{
try
{
var userId = Request.Headers["UserId"].ToString();
if (string.IsNullOrEmpty(userId))
{
_logger.LogWarning("DeleteRecord: No UserId in request headers");
return Unauthorized();
}
var layer = _db.Layers.FirstOrDefault(x => x.Id == layerId && !x.IsDeleted);
if (layer == null)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} not found", layerId);
return NotFound("Layer not found");
}
if (layer.Type != Domain.Entities.LayerType.Dictionary && layer.Type != Domain.Entities.LayerType.Administration)
{
_logger.LogWarning("DeleteRecord: Layer {LayerId} is not editable (type: {LayerType})", layerId, layer.Type);
return BadRequest("Only Dictionary and Administration layers can be edited");
}
var record = _db.Records.FirstOrDefault(x => x.Id == recordId && x.LayerId == layerId);
if (record == null)
{
_logger.LogWarning("DeleteRecord: Record {RecordId} not found in layer {LayerId}", recordId, layerId);
return NotFound("Record not found");
}
// Capture history before deleting
CaptureRecordHistory(record, RecordChangeType.Deleted, Guid.Parse(userId));
_db.Records.Remove(record);
// Update layer modified info
layer.ModifiedAt = DateTime.UtcNow;
layer.ModifiedById = Guid.Parse(userId);
_db.SaveChanges();
_logger.LogInformation("DeleteRecord: Deleted record {RecordId} from layer {LayerId}", recordId, layerId);
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "DeleteRecord: Error deleting record {RecordId} from layer {LayerId}", recordId, layerId);
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{layerId:guid}/records/{recordId:guid}/history")]
public IActionResult GetRecordHistory(Guid layerId, Guid recordId)
{
try
{
var history = _db.RecordHistory
.Include(h => h.ChangedBy)
.Where(h => h.RecordId == recordId && h.LayerId == layerId)
.OrderByDescending(h => h.ChangedAt)
.AsNoTracking()
.Select(h => new RecordHistoryDto
{
Id = h.Id,
RecordId = h.RecordId,
LayerId = h.LayerId,
ChangedAt = h.ChangedAt,
ChangedById = h.ChangedById,
ChangedByName = h.ChangedBy != null ? h.ChangedBy.UserName ?? h.ChangedBy.Email : "Unknown",
ChangeType = h.ChangeType.ToString(),
Code = h.Code,
Desc1 = h.Desc1,
ChangedFields = h.ChangedFields,
ChangesSummary = h.ChangesSummary,
FormattedChange = FormatHistoryChange(h)
})
.ToList();
_logger.LogDebug("GetRecordHistory: Retrieved {Count} history entries for record {RecordId}", history.Count, recordId);
return Ok(history);
}
catch (Exception e)
{
_logger.LogError(e, "GetRecordHistory: Error retrieving history for record {RecordId}", recordId);
return BadRequest(e.ToString());
}
}
[HttpGet]
[Route("{layerId:guid}/records/deleted")]
public IActionResult GetDeletedRecords(Guid layerId)
{
try
{
// Get the most recent "Deleted" history entry for each unique RecordId in this layer
// First, get all deleted record history entries
var deletedHistoryEntries = _db.RecordHistory
.Where(h => h.LayerId == layerId && h.ChangeType == RecordChangeType.Deleted)
.ToList();
// Group in memory and get the most recent deletion for each record
var mostRecentDeletes = deletedHistoryEntries
.GroupBy(h => h.RecordId)
.Select(g => g.OrderByDescending(h => h.ChangedAt).First())
.ToList();
// Get all unique user IDs from the history entries
var userIds = mostRecentDeletes.Select(h => h.ChangedById).Distinct().ToList();
// Load the users
var users = _db.Users
.Where(u => userIds.Contains(u.Id))
.ToDictionary(u => u.Id, u => u.UserName ?? string.Empty);
// Build the DTOs
var deletedRecords = mostRecentDeletes
.Select(h => new DeletedRecordDto
{
RecordId = h.RecordId,
Code = h.Code,
Desc1 = h.Desc1,
DeletedAt = h.ChangedAt,
DeletedById = h.ChangedById,
DeletedByName = users.TryGetValue(h.ChangedById, out var userName) ? userName : string.Empty
})
.OrderByDescending(d => d.DeletedAt)
.ToList();
_logger.LogDebug("GetDeletedRecords: Retrieved {Count} deleted records for layer {LayerId}", deletedRecords.Count, layerId);
return Ok(deletedRecords);
}
catch (Exception e)
{
_logger.LogError(e, "GetDeletedRecords: Error retrieving deleted records for layer {LayerId}", layerId);
return BadRequest(e.ToString());
}
}
// Helper method to capture record history
private void CaptureRecordHistory(Record record, RecordChangeType changeType, Guid userId, string? oldCode = null, string? oldDesc1 = null)
{
var changedFields = new List<string>();
var changesSummary = new Dictionary<string, Dictionary<string, string?>>();
if (changeType == RecordChangeType.Updated)
{
if (oldCode != record.Code)
{
changedFields.Add("Code");
changesSummary["Code"] = new Dictionary<string, string?>
{
["old"] = oldCode,
["new"] = record.Code
};
}
if (oldDesc1 != record.Desc1)
{
changedFields.Add("Desc1");
changesSummary["Desc1"] = new Dictionary<string, string?>
{
["old"] = oldDesc1,
["new"] = record.Desc1
};
}
}
var history = new RecordHistory
{
Id = Guid.NewGuid(),
RecordId = record.Id,
LayerId = record.LayerId,
ChangedAt = DateTime.UtcNow,
ChangedById = userId,
ChangeType = changeType,
Code = record.Code,
Desc1 = record.Desc1,
ChangedFields = changedFields.Any() ? string.Join(", ", changedFields) : null,
ChangesSummary = changesSummary.Any() ? JsonSerializer.Serialize(changesSummary) : null
};
_db.RecordHistory.Add(history);
_logger.LogInformation("CaptureRecordHistory: Captured {ChangeType} for record {RecordId}", changeType, record.Id);
}
// Helper method to format history change for display
private static string FormatHistoryChange(RecordHistory h)
{
if (h.ChangeType == RecordChangeType.Created)
{
return $"Created record with Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
if (h.ChangeType == RecordChangeType.Deleted)
{
return $"Deleted record Code: \"{h.Code}\", Description: \"{h.Desc1}\"";
}
// Updated
if (!string.IsNullOrEmpty(h.ChangesSummary))
{
try
{
var changes = JsonSerializer.Deserialize<Dictionary<string, Dictionary<string, string?>>>(h.ChangesSummary);
if (changes != null)
{
var parts = new List<string>();
foreach (var (field, values) in changes)
{
var oldVal = values.GetValueOrDefault("old") ?? "empty";
var newVal = values.GetValueOrDefault("new") ?? "empty";
parts.Add($"{field}: \"{oldVal}\" → \"{newVal}\"");
}
return $"Updated: {string.Join(", ", parts)}";
}
}
catch
{
// Fall back to simple message
}
}
return $"Updated {h.ChangedFields ?? "record"}";
}
}

View File

@@ -1,7 +1,7 @@
# Stage 1: Build
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
WORKDIR /src/Backend
WORKDIR /
# Copy solution and all project files for restore
COPY DiunaBI.sln ./
@@ -18,16 +18,16 @@ RUN dotnet restore DiunaBI.API/DiunaBI.API.csproj
COPY . .
# Build plugin first
WORKDIR /src/Backend/DiunaBI.Plugins.Morska
WORKDIR /DiunaBI.Plugins.Morska
RUN dotnet build -c Release
# Build and publish API
WORKDIR /src/Backend/DiunaBI.API
WORKDIR /DiunaBI.API
RUN dotnet publish -c Release -o /app/publish --no-restore
# Copy plugin DLL to publish output
RUN mkdir -p /app/publish/Plugins && \
cp /src/Backend/DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll /app/publish/Plugins/
cp /DiunaBI.Plugins.Morska/bin/Release/net10.0/DiunaBI.Plugins.Morska.dll /app/publish/Plugins/
# Stage 2: Runtime
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS runtime

View File

@@ -177,26 +177,67 @@ else
pluginManager.LoadPluginsFromDirectory(pluginsPath);
app.Use(async (context, next) =>
{
var token = context.Request.Headers.Authorization.ToString();
if (token.Length > 0
&& !context.Request.Path.ToString().Contains("getForPowerBI")
&& !context.Request.Path.ToString().Contains("getConfiguration")
&& !context.Request.Path.ToString().Contains("DataInbox/Add"))
{
var handler = new JwtSecurityTokenHandler();
var data = handler.ReadJwtToken(token.Split(' ')[1]);
context.Request.Headers.Append("UserId", new Microsoft.Extensions.Primitives.StringValues(data.Subject));
}
await next(context);
});
app.UseCors("CORSPolicy");
app.UseAuthentication();
app.UseAuthorization();
// Middleware to extract UserId from JWT token AFTER authentication
// This must run after UseAuthentication() so the JWT is already validated
app.Use(async (context, next) =>
{
var logger = context.RequestServices.GetRequiredService<ILogger<Program>>();
logger.LogInformation("🔍 UserId Extraction Middleware - Path: {Path}, Method: {Method}",
context.Request.Path, context.Request.Method);
var token = context.Request.Headers.Authorization.ToString();
logger.LogInformation("🔍 Authorization header: {Token}",
string.IsNullOrEmpty(token) ? "NULL/EMPTY" : $"{token[..Math.Min(30, token.Length)]}...");
if (!string.IsNullOrEmpty(token) && token.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
{
try
{
var handler = new JwtSecurityTokenHandler();
var jwtToken = handler.ReadJwtToken(token.Split(' ')[1]);
// Try to get UserId from Subject claim first, then fall back to NameIdentifier
var userId = jwtToken.Subject;
if (string.IsNullOrEmpty(userId))
{
// Try NameIdentifier claim (ClaimTypes.NameIdentifier)
var nameIdClaim = jwtToken.Claims.FirstOrDefault(c =>
c.Type == "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier" ||
c.Type == "nameid");
userId = nameIdClaim?.Value;
}
logger.LogInformation("🔍 JWT UserId: {UserId}", userId ?? "NULL");
if (!string.IsNullOrEmpty(userId))
{
// Use indexer to set/replace header value instead of Append
context.Request.Headers["UserId"] = userId;
logger.LogInformation("✅ Set UserId header to: {UserId}", userId);
}
else
{
logger.LogWarning("❌ UserId not found in JWT claims");
}
}
catch (Exception ex)
{
logger.LogError(ex, "❌ Failed to extract UserId from JWT token");
}
}
else
{
logger.LogWarning("❌ No valid Bearer token found");
}
await next(context);
});
app.MapControllers();
app.MapGet("/health", () => Results.Ok(new { status = "OK", timestamp = DateTime.UtcNow }))

View File

@@ -0,0 +1,17 @@
namespace DiunaBI.Application.DTOModels;
public class DataInboxDto
{
public Guid Id { get; set; }
public string Name { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string Data { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
}
public class DataInboxFilterRequest
{
public string? Search { get; set; }
public int Page { get; set; } = 1;
public int PageSize { get; set; } = 50;
}

View File

@@ -0,0 +1,11 @@
namespace DiunaBI.Application.DTOModels;
public class DeletedRecordDto
{
public Guid RecordId { get; set; }
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
public DateTime DeletedAt { get; set; }
public Guid DeletedById { get; set; }
public string DeletedByName { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,27 @@
namespace DiunaBI.Application.DTOModels;
public class RecordHistoryDto
{
public Guid Id { get; set; }
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public string ChangedByName { get; set; } = string.Empty;
// Type of change
public string ChangeType { get; set; } = string.Empty; // "Created", "Updated", "Deleted"
// Snapshot values
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// What changed
public string? ChangedFields { get; set; } // "Code, Desc1"
public string? ChangesSummary { get; set; } // JSON: {"Code": {"old": "A", "new": "B"}}
// Formatted display text
public string FormattedChange { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,37 @@
using System;
namespace DiunaBI.Domain.Entities;
public enum RecordChangeType
{
Created = 1,
Updated = 2,
Deleted = 3
}
public class RecordHistory
{
public Guid Id { get; set; }
// Reference to the original record
public Guid RecordId { get; set; }
public Guid LayerId { get; set; }
// When and who
public DateTime ChangedAt { get; set; }
public Guid ChangedById { get; set; }
public User? ChangedBy { get; set; }
// Type of change
public RecordChangeType ChangeType { get; set; }
// Snapshot of record state at this point
public string Code { get; set; } = string.Empty;
public string? Desc1 { get; set; }
// Comma-separated list of fields that changed (e.g., "Code,Desc1")
public string? ChangedFields { get; set; }
// JSON object with detailed changes: {"Code": {"old": "A", "new": "B"}}
public string? ChangesSummary { get; set; }
}

View File

@@ -8,6 +8,7 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
public DbSet<User> Users { get; init; }
public DbSet<Layer> Layers { get; init; }
public DbSet<Record> Records { get; init; }
public DbSet<RecordHistory> RecordHistory { get; init; }
public DbSet<ProcessSource> ProcessSources { get; init; }
public DbSet<DataInbox> DataInbox { get; init; }
public DbSet<QueueJob> QueueJobs { get; init; }
@@ -75,6 +76,30 @@ public class AppDbContext(DbContextOptions<AppDbContext> options) : DbContext(op
.HasForeignKey(x => x.LayerId)
.OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<RecordHistory>().HasKey(x => x.Id);
modelBuilder.Entity<RecordHistory>().Property(x => x.RecordId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedAt).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedById).IsRequired();
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangeType).IsRequired().HasConversion<int>();
modelBuilder.Entity<RecordHistory>().Property(x => x.Code).IsRequired().HasMaxLength(50);
modelBuilder.Entity<RecordHistory>().Property(x => x.Desc1).HasMaxLength(10000);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangedFields).HasMaxLength(200);
modelBuilder.Entity<RecordHistory>().Property(x => x.ChangesSummary).HasMaxLength(4000);
// Indexes for efficient history queries
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.RecordId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasIndex(x => new { x.LayerId, x.ChangedAt });
modelBuilder.Entity<RecordHistory>()
.HasOne(x => x.ChangedBy)
.WithMany()
.HasForeignKey(x => x.ChangedById)
.OnDelete(DeleteBehavior.Restrict);
modelBuilder.Entity<ProcessSource>().HasKey(x => new { x.LayerId, x.SourceId });
modelBuilder.Entity<ProcessSource>().Property(x => x.LayerId).IsRequired();
modelBuilder.Entity<ProcessSource>().Property(x => x.SourceId).IsRequired();

Some files were not shown because too many files have changed in this diff Show More