All checks were successful
Build Docker Images / build-and-push (push) Successful in 1m18s
72 lines
2.4 KiB
JavaScript
72 lines
2.4 KiB
JavaScript
// .gitea/scripts/replaceTokens.js
|
|
// Skanuje:
|
|
// - artifacts/api/appsettings.Production.json (jeśli jest)
|
|
// - artifacts/ui/appsettings.Production.json (jeśli jest)
|
|
// Tokeny: #{NAME}# -> wartość z VARIABLES/SECRETS (NAME: uppercased, '-'->'_')
|
|
// Dodatkowo: #{BUILDID}# -> RUN_ID (z ENV)
|
|
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
|
|
function replaceInFile(file, mapToken) {
|
|
let data = fs.readFileSync(file, 'utf8');
|
|
const re = /#\{(.*?)\}#/g;
|
|
let changed = false;
|
|
data = data.replace(re, (_, raw) => {
|
|
const token = (raw || '').replace(/-/g, '_').toUpperCase();
|
|
const val = mapToken(token);
|
|
if (val == null || val === '') return `#{${raw}}#`; // zostaw bez zmian, podbijemy błąd później
|
|
changed = true;
|
|
return String(val);
|
|
});
|
|
fs.writeFileSync(file, data, 'utf8');
|
|
return changed;
|
|
}
|
|
|
|
(async () => {
|
|
const secrets = JSON.parse(process.env.SECRETS || '{}');
|
|
const variables = JSON.parse(process.env.VARIABLES || '{}');
|
|
const RUN_ID = process.env.RUN_ID || process.env.GITHUB_RUN_ID || '';
|
|
|
|
const mapToken = (token) => {
|
|
if (token === 'BUILDID') return RUN_ID;
|
|
return (variables[token] != null ? variables[token] : secrets[token]);
|
|
};
|
|
|
|
const beRoot = path.resolve('artifacts');
|
|
const beFiles = [];
|
|
['api/appsettings.Production.json', 'ui/appsettings.Production.json'].forEach((name) => {
|
|
const p = path.join(beRoot, name);
|
|
if (fs.existsSync(p)) beFiles.push(p);
|
|
});
|
|
|
|
const files = beFiles;
|
|
|
|
if (files.length === 0) {
|
|
console.error('❌ No candidate files found to tokenize (artifacts/api or artifacts/ui appsettings.Production.json).');
|
|
process.exit(1);
|
|
}
|
|
|
|
console.log(`🔎 Tokenizing ${files.length} file(s)`);
|
|
const missing = new Set();
|
|
|
|
for (const file of files) {
|
|
replaceInFile(file, mapToken);
|
|
}
|
|
for (const file of files) {
|
|
const content = fs.readFileSync(file, 'utf8');
|
|
const reLeft = /#\{(.*?)\}#/g;
|
|
let m;
|
|
while ((m = reLeft.exec(content))) {
|
|
const token = (m[1] || '').replace(/-/g, '_').toUpperCase();
|
|
missing.add(token);
|
|
}
|
|
}
|
|
|
|
if (missing.size > 0) {
|
|
console.error(`❌ Missing values for tokens: ${Array.from(missing).join(', ')}`);
|
|
process.exit(1);
|
|
}
|
|
|
|
console.log('✅ Tokenization complete.');
|
|
})(); |