release
This commit is contained in:
@@ -1,38 +1,91 @@
|
|||||||
module.exports = async ({ github, context, core, jobId }) => {
|
// .gitea/scripts/replaceTokens.js
|
||||||
|
// Skanuje:
|
||||||
const frontendPath = `./${jobId}/frontend/diunaBI/browser/`;
|
// - artifacts/frontend/**/*.js
|
||||||
const files = (require('fs').readdirSync(frontendPath).filter(file => file.endsWith('.js')))
|
// - artifacts/webapi/appsettings.json (jeśli jest)
|
||||||
.map(file => `${frontendPath}${file}`);
|
// - artifacts/webapi/client_secrets.json (jeśli jest)
|
||||||
if (files.length === 0) {
|
// Tokeny: #{NAME}# -> wartość z VARIABLES/SECRETS (NAME: uppercased, '-'->'_')
|
||||||
core.setFailed("Frontend JS files not found");
|
// Dodatkowo: #{BUILDID}# -> RUN_ID (z ENV)
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
files.push(`./${jobId}/webapi/appsettings.json`);
|
|
||||||
files.push(`./${jobId}/webapi/client_secrets.json`);
|
|
||||||
|
|
||||||
files.forEach(file => {
|
const fs = require('fs');
|
||||||
let data = require('fs').readFileSync(file, 'utf8');
|
const path = require('path');
|
||||||
const regex = /#{(.*?)}#/g;
|
|
||||||
let match;
|
function walk(dir, predicate) {
|
||||||
while (match = regex.exec(data)) {
|
const out = [];
|
||||||
const original = match[0];
|
if (!fs.existsSync(dir)) return out;
|
||||||
const token = match[1].replace(/-/g, '_').toUpperCase();
|
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
||||||
const value = getValue(token, jobId);
|
const full = path.join(dir, entry.name);
|
||||||
console.log(`Replacing ${original} with ${value} for ${token}`);
|
if (entry.isDirectory()) out.push(...walk(full, predicate));
|
||||||
if (!value) {
|
else if (predicate(full)) out.push(full);
|
||||||
core.setFailed(`Token ${token} not found`);
|
}
|
||||||
return false;
|
return out;
|
||||||
}
|
|
||||||
data = data.replace(new RegExp(original, 'g'), value);
|
|
||||||
}
|
|
||||||
require('fs').writeFileSync(file, data, 'utf8');
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getValue(token, jobId) {
|
function replaceInFile(file, mapToken) {
|
||||||
if (token == 'BUILDID') { return jobId; }
|
let data = fs.readFileSync(file, 'utf8');
|
||||||
const secrets = JSON.parse(process.env.SECRETS);
|
const re = /#\{(.*?)\}#/g;
|
||||||
const variables = JSON.parse(process.env.VARIABLES);
|
let changed = false;
|
||||||
return variables[token] || secrets[token];
|
data = data.replace(re, (_, raw) => {
|
||||||
}
|
const token = (raw || '').replace(/-/g, '_').toUpperCase();
|
||||||
|
const val = mapToken(token);
|
||||||
|
if (val == null || val === '') return `#{${raw}}#`; // zostaw bez zmian, podbijemy błąd później
|
||||||
|
changed = true;
|
||||||
|
return String(val);
|
||||||
|
});
|
||||||
|
fs.writeFileSync(file, data, 'utf8');
|
||||||
|
return changed;
|
||||||
|
}
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
const secrets = JSON.parse(process.env.SECRETS || '{}');
|
||||||
|
const variables = JSON.parse(process.env.VARIABLES || '{}');
|
||||||
|
const RUN_ID = process.env.RUN_ID || process.env.GITHUB_RUN_ID || '';
|
||||||
|
|
||||||
|
const mapToken = (token) => {
|
||||||
|
if (token === 'BUILDID') return RUN_ID;
|
||||||
|
return (variables[token] != null ? variables[token] : secrets[token]);
|
||||||
|
};
|
||||||
|
|
||||||
|
// 1) Frontend: wszystkie .js
|
||||||
|
const feRoot = path.resolve('artifacts/frontend');
|
||||||
|
const feFiles = walk(feRoot, (f) => f.endsWith('.js'));
|
||||||
|
|
||||||
|
// 2) Backend: wybrane pliki jeśli istnieją
|
||||||
|
const beRoot = path.resolve('artifacts/webapi');
|
||||||
|
const beFiles = []
|
||||||
|
;['appsettings.json', 'client_secrets.json'].forEach((name) => {
|
||||||
|
const p = path.join(beRoot, name);
|
||||||
|
if (fs.existsSync(p)) beFiles.push(p);
|
||||||
|
});
|
||||||
|
|
||||||
|
const files = [...feFiles, ...beFiles];
|
||||||
|
|
||||||
|
if (files.length === 0) {
|
||||||
|
console.error('❌ No candidate files found to tokenize (frontend .js / backend json).');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`🔎 Tokenizing ${files.length} file(s)`);
|
||||||
|
const missing = new Set();
|
||||||
|
|
||||||
|
// drugi przebieg: wypisz brakujące tokeny, jeśli jakieś zostały w plikach
|
||||||
|
for (const file of files) {
|
||||||
|
// pierwsze podejście: podstaw wartości
|
||||||
|
replaceInFile(file, mapToken);
|
||||||
|
}
|
||||||
|
for (const file of files) {
|
||||||
|
const content = fs.readFileSync(file, 'utf8');
|
||||||
|
const reLeft = /#\{(.*?)\}#/g;
|
||||||
|
let m;
|
||||||
|
while ((m = reLeft.exec(content))) {
|
||||||
|
const token = (m[1] || '').replace(/-/g, '_').toUpperCase();
|
||||||
|
missing.add(token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missing.size > 0) {
|
||||||
|
console.error(`❌ Missing values for tokens: ${Array.from(missing).join(', ')}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✅ Tokenization complete.');
|
||||||
|
})();
|
||||||
@@ -62,13 +62,13 @@ jobs:
|
|||||||
ls -laR artifacts/webapi || true
|
ls -laR artifacts/webapi || true
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Tokenize (replace #{...}# from secrets/vars)
|
- name: Resolve latest run that exposes required artifacts
|
||||||
|
id: resolve
|
||||||
env:
|
env:
|
||||||
SECRETS: ${{ toJson(secrets) }}
|
GITEA_PAT: ${{ secrets.GITEATOKEN }}
|
||||||
VARIABLES: ${{ toJson(vars) }}
|
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
node .gitea/scripts/getLatestRunWithArtifacts.js
|
||||||
node -e "require('./.gitea/scripts/replaceTokens.js')({ github: {}, context: {}, core: {} });"
|
echo "run_id=$(cat .gitea/.cache/run_id)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Package artifacts as ZIPs
|
- name: Package artifacts as ZIPs
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
Reference in New Issue
Block a user