P2 processor is working as a charm!
All checks were successful
Build Docker Images / test (map[name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m23s
Build Docker Images / test (map[name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m28s
Build Docker Images / build-and-push (map[image_suffix:morska name:Morska plugin_project:DiunaBI.Plugins.Morska]) (push) Successful in 1m40s
Build Docker Images / build-and-push (map[image_suffix:pedrollopl name:PedrolloPL plugin_project:DiunaBI.Plugins.PedrolloPL]) (push) Successful in 1m37s

This commit is contained in:
2025-12-05 19:10:28 +01:00
parent 0eb2a457f7
commit 6b0f936f40
6 changed files with 663 additions and 45 deletions

View File

@@ -60,8 +60,8 @@ public class JobWorkerService : BackgroundService
return;
}
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType})",
job.Id, job.LayerName, job.JobType);
_logger.LogInformation("JobWorker: Processing job {JobId} - {LayerName} ({JobType}) - Current RetryCount: {RetryCount}, MaxRetries: {MaxRetries}, Status: {Status}",
job.Id, job.LayerName, job.JobType, job.RetryCount, job.MaxRetries, job.Status);
// Mark job as running
job.Status = JobStatus.Running;
@@ -129,29 +129,35 @@ public class JobWorkerService : BackgroundService
{
_logger.LogError(ex, "JobWorker: Job {JobId} failed - {LayerName}", job.Id, job.LayerName);
job.RetryCount++;
job.LastError = ex.Message;
// Capture full error details including inner exceptions
job.LastError = GetFullErrorMessage(ex);
job.ModifiedAtUtc = DateTime.UtcNow;
if (job.RetryCount >= job.MaxRetries)
{
job.Status = JobStatus.Failed;
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed after {RetryCount} attempts",
job.Id, job.RetryCount);
_logger.LogWarning("JobWorker: Job {JobId} marked as Failed - no more retries available (RetryCount: {RetryCount}, MaxRetries: {MaxRetries})",
job.Id, job.RetryCount, job.MaxRetries);
}
else
{
job.Status = JobStatus.Retrying;
// Exponential backoff: wait before retrying based on attempt number
var backoffDelay = GetBackoffDelay(job.RetryCount);
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (attempt {RetryCount}/{MaxRetries})",
job.Id, backoffDelay, job.RetryCount, job.MaxRetries);
// Wait before marking as pending again
await Task.Delay(backoffDelay, stoppingToken);
job.Status = JobStatus.Pending;
// Exponential backoff: wait before retrying
var backoffDelay = GetBackoffDelay(job.RetryCount + 1);
_logger.LogInformation("JobWorker: Job {JobId} will retry in {Delay} (retry {RetryNumber} of {MaxRetries})",
job.Id, backoffDelay, job.RetryCount + 1, job.MaxRetries);
// Save current state with error message
await db.SaveChangesAsync(stoppingToken);
// Wait before next attempt
await Task.Delay(backoffDelay, stoppingToken);
// Increment retry count for next attempt
job.RetryCount++;
job.ModifiedAtUtc = DateTime.UtcNow;
}
}
finally
@@ -175,4 +181,18 @@ public class JobWorkerService : BackgroundService
_ => TimeSpan.FromMinutes(5) // 3rd+ retry: 5 minutes
};
}
private static string GetFullErrorMessage(Exception ex)
{
var messages = new List<string>();
var currentException = ex;
while (currentException != null)
{
messages.Add(currentException.Message);
currentException = currentException.InnerException;
}
return string.Join(" → ", messages);
}
}