Compare commits
12 Commits
d323989c0f
...
8dc8f11f56
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8dc8f11f56 | ||
|
|
8ef949d0fc | ||
|
|
0fe2a555b4 | ||
|
|
ac9d35de76 | ||
|
|
77587c064d | ||
|
|
73836dd9fc | ||
|
|
e0da58fb20 | ||
|
|
da33ccfa49 | ||
|
|
4923f0e3c7 | ||
|
|
256d1a59c6 | ||
|
|
ab368ae706 | ||
|
|
fd4e854e7c |
67
.gitea/scripts/downloadArtifactByName.js
Normal file
67
.gitea/scripts/downloadArtifactByName.js
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
// .gitea/scripts/downloadArtifactByName.js
|
||||||
|
// Purpose: Download and extract a single artifact by name from a given run.
|
||||||
|
// Env inputs:
|
||||||
|
// GITEA_BASE_URL, OWNER, REPO, GITEA_PAT
|
||||||
|
// RUN_ID -> numeric/string
|
||||||
|
// ARTIFACT_NAME -> e.g. "frontend" or "webapi"
|
||||||
|
// OUTPUT_DIR -> e.g. "artifacts/frontend"
|
||||||
|
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
const { execSync } = require("child_process");
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
const BASE = process.env.GITEA_BASE_URL;
|
||||||
|
const OWNER = process.env.OWNER;
|
||||||
|
const REPO = process.env.REPO;
|
||||||
|
const TOKEN = process.env.GITEA_PAT;
|
||||||
|
const RUN_ID = process.env.RUN_ID;
|
||||||
|
const NAME = process.env.ARTIFACT_NAME;
|
||||||
|
const OUT_DIR = process.env.OUTPUT_DIR || path.join("artifacts", NAME || "");
|
||||||
|
|
||||||
|
if (!BASE || !OWNER || !REPO) {
|
||||||
|
console.error("Missing one of: GITEA_BASE_URL, OWNER, REPO");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (!TOKEN) {
|
||||||
|
console.error("Missing GITEA_PAT");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (!RUN_ID || !NAME) {
|
||||||
|
console.error("Missing RUN_ID or ARTIFACT_NAME");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `${BASE}/${OWNER}/${REPO}/actions/runs/${RUN_ID}/artifacts/${encodeURIComponent(NAME)}`;
|
||||||
|
const zipPath = path.join(process.cwd(), `${NAME}.zip`);
|
||||||
|
fs.mkdirSync(OUT_DIR, { recursive: true });
|
||||||
|
|
||||||
|
console.log(`Downloading artifact "${NAME}" from run ${RUN_ID}`);
|
||||||
|
console.log(`GET ${url}`);
|
||||||
|
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method: "GET",
|
||||||
|
redirect: "follow",
|
||||||
|
headers: { Authorization: `token ${TOKEN}` }
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const text = await res.text().catch(() => "");
|
||||||
|
console.error(`Download failed: ${res.status} ${res.statusText}\n${text}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const buf = Buffer.from(await res.arrayBuffer());
|
||||||
|
fs.writeFileSync(zipPath, buf);
|
||||||
|
|
||||||
|
console.log(`Saved ZIP -> ${zipPath}`);
|
||||||
|
console.log(`Extracting to -> ${OUT_DIR}`);
|
||||||
|
|
||||||
|
execSync(`unzip -o "${zipPath}" -d "${OUT_DIR}"`, { stdio: "inherit" });
|
||||||
|
fs.unlinkSync(zipPath);
|
||||||
|
|
||||||
|
console.log("Done.");
|
||||||
|
})().catch(err => {
|
||||||
|
console.error(err.stack || err.message || String(err));
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
126
.gitea/scripts/getLatestRunWithArtifacts.js
Normal file
126
.gitea/scripts/getLatestRunWithArtifacts.js
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
// .gitea/scripts/getLatestRunWithArtifacts.js
|
||||||
|
// Purpose: Find latest successful run that exposes all REQUIRED_ARTIFACTS via GUI URLs.
|
||||||
|
// Outputs: sets `run_id` to GITHUB_OUTPUT and writes .gitea/.cache/run_id file.
|
||||||
|
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
// --- Config from environment ---
|
||||||
|
const BASE = process.env.GITEA_BASE_URL; // e.g. https://code.bim-it.pl
|
||||||
|
const OWNER = process.env.OWNER; // e.g. mz
|
||||||
|
const REPO = process.env.REPO; // e.g. DiunaBI
|
||||||
|
const TOKEN = process.env.GITEA_PAT; // PAT
|
||||||
|
const SCAN_LIMIT = Number(process.env.SCAN_LIMIT || "100");
|
||||||
|
const REQUIRED_ARTIFACTS = (process.env.REQUIRED_ARTIFACTS || "frontend,webapi")
|
||||||
|
.split(",")
|
||||||
|
.map(s => s.trim())
|
||||||
|
.filter(Boolean);
|
||||||
|
|
||||||
|
if (!BASE || !OWNER || !REPO) {
|
||||||
|
console.error("Missing one of: GITEA_BASE_URL, OWNER, REPO");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (!TOKEN) {
|
||||||
|
console.error("Missing GITEA_PAT");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure cache dir exists
|
||||||
|
const cacheDir = path.join(".gitea", ".cache");
|
||||||
|
fs.mkdirSync(cacheDir, { recursive: true });
|
||||||
|
|
||||||
|
// Helpers
|
||||||
|
const api = async (url) => {
|
||||||
|
const res = await fetch(url, {
|
||||||
|
headers: { Authorization: `token ${TOKEN}` }
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
const text = await res.text().catch(() => "");
|
||||||
|
throw new Error(`API ${res.status} ${res.statusText} for ${url}\n${text}`);
|
||||||
|
}
|
||||||
|
return res.json();
|
||||||
|
};
|
||||||
|
|
||||||
|
const headOk = async (url) => {
|
||||||
|
// Try HEAD first; some instances may require GET for redirects
|
||||||
|
let res = await fetch(url, {
|
||||||
|
method: "HEAD",
|
||||||
|
redirect: "follow",
|
||||||
|
headers: { Authorization: `token ${TOKEN}` }
|
||||||
|
});
|
||||||
|
if (res.ok) return true;
|
||||||
|
|
||||||
|
// Fallback to GET (no download) just to test availability
|
||||||
|
res = await fetch(url, {
|
||||||
|
method: "GET",
|
||||||
|
redirect: "manual",
|
||||||
|
headers: { Authorization: `token ${TOKEN}` }
|
||||||
|
});
|
||||||
|
// Accept 200 OK, or 3xx redirect to a signed download URL
|
||||||
|
return res.status >= 200 && res.status < 400;
|
||||||
|
};
|
||||||
|
|
||||||
|
// 1) Get recent workflow runs (a.k.a. tasks) via REST
|
||||||
|
const listUrl = `${BASE}/api/v1/repos/${OWNER}/${REPO}/actions/tasks?limit=${SCAN_LIMIT}`;
|
||||||
|
const resp = await api(listUrl);
|
||||||
|
|
||||||
|
// 2) Build candidate list: only status == "success", newest first by id
|
||||||
|
const runs = Array.isArray(resp.workflow_runs) ? resp.workflow_runs : [];
|
||||||
|
const candidates = runs
|
||||||
|
.filter(r => r && r.status === "success")
|
||||||
|
.sort((a, b) => (b.id ?? 0) - (a.id ?? 0));
|
||||||
|
|
||||||
|
if (!candidates.length) {
|
||||||
|
console.error("No successful runs found.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Scanning ${candidates.length} successful runs for artifacts: ${REQUIRED_ARTIFACTS.join(", ")}`);
|
||||||
|
|
||||||
|
// 3) Find the first run that exposes all required artifacts via GUI URLs
|
||||||
|
let picked = null;
|
||||||
|
for (const r of candidates) {
|
||||||
|
const runId = r.id;
|
||||||
|
const urls = REQUIRED_ARTIFACTS.map(name =>
|
||||||
|
`${BASE}/${OWNER}/${REPO}/actions/runs/${runId}/artifacts/${encodeURIComponent(name)}`
|
||||||
|
);
|
||||||
|
|
||||||
|
let allPresent = true;
|
||||||
|
for (const u of urls) {
|
||||||
|
const ok = await headOk(u).catch(() => false);
|
||||||
|
if (!ok) {
|
||||||
|
allPresent = false;
|
||||||
|
console.log(`Run ${runId}: artifact not accessible -> ${u}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (allPresent) {
|
||||||
|
picked = { id: runId };
|
||||||
|
console.log(`Picked run_id=${runId}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!picked) {
|
||||||
|
console.error("No run exposes all required artifacts. Consider increasing SCAN_LIMIT or verify artifact names.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4) Write outputs
|
||||||
|
const runIdStr = String(picked.id);
|
||||||
|
// Write to cache (handy for debugging)
|
||||||
|
fs.writeFileSync(path.join(cacheDir, "run_id"), runIdStr, "utf8");
|
||||||
|
|
||||||
|
// Export as GitHub-style output (supported by Gitea runners)
|
||||||
|
const outFile = process.env.GITHUB_OUTPUT;
|
||||||
|
if (outFile) {
|
||||||
|
fs.appendFileSync(outFile, `run_id=${runIdStr}\n`);
|
||||||
|
} else {
|
||||||
|
// Fallback: also print for visibility
|
||||||
|
console.log(`::set-output name=run_id::${runIdStr}`);
|
||||||
|
}
|
||||||
|
})().catch(err => {
|
||||||
|
console.error(err.stack || err.message || String(err));
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -3,7 +3,6 @@ name: BuildApp
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch: {}
|
workflow_dispatch: {}
|
||||||
|
|
||||||
# (opcjonalnie; jeśli coś krzyczy w Twojej wersji Gitei, usuń całą sekcję concurrency)
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: build-${{ github.ref }}
|
group: build-${{ github.ref }}
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
@@ -114,48 +113,4 @@ jobs:
|
|||||||
name: webapi
|
name: webapi
|
||||||
path: build/webapi
|
path: build/webapi
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Diagnose mounts
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
mount | grep runner-cache || true
|
|
||||||
ls -la /runner-cache || true
|
|
||||||
echo "MARKER $(date -Iseconds)" | tee /runner-cache/__ok.txt
|
|
||||||
store-artifacts:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build-frontend, build-backend]
|
|
||||||
if: ${{ success() }}
|
|
||||||
steps:
|
|
||||||
- name: Download frontend artifacts
|
|
||||||
uses: https://github.com/actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: frontend
|
|
||||||
path: frontend
|
|
||||||
|
|
||||||
- name: Download webapi artifacts
|
|
||||||
uses: https://github.com/actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: webapi
|
|
||||||
path: webapi
|
|
||||||
|
|
||||||
- name: Store artifacts locally on runner
|
|
||||||
env:
|
|
||||||
BUILD_DIR: /runner-cache/builds/${{ github.run_id }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
mkdir -p "$BUILD_DIR"
|
|
||||||
cp -r webapi "$BUILD_DIR/"
|
|
||||||
cp -r frontend "$BUILD_DIR/"
|
|
||||||
mkdir -p /runner-cache/builds
|
|
||||||
ln -sfn "$BUILD_DIR" /runner-cache/builds/latest
|
|
||||||
|
|
||||||
{
|
|
||||||
echo "BUILD_TIME=$(date -Iseconds)"
|
|
||||||
echo "COMMIT_SHA=${GITHUB_SHA}"
|
|
||||||
echo "BRANCH=${GITHUB_REF_NAME}"
|
|
||||||
echo "BUILD_ID=${GITHUB_RUN_ID}"
|
|
||||||
} > "$BUILD_DIR/build-info.txt"
|
|
||||||
|
|
||||||
echo "Build artifacts stored in: $BUILD_DIR"
|
|
||||||
ls -la "$BUILD_DIR/"
|
|
||||||
echo "Symlink 'latest' -> $(readlink -f /runner-cache/builds/latest)"
|
|
||||||
@@ -1,9 +1,23 @@
|
|||||||
name: Hello
|
name: _debug-mount
|
||||||
on:
|
on: { workflow_dispatch: {} }
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: echo "Hello from stack-runner-01!"
|
- name: Who/where
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "uname -a:"; uname -a || true
|
||||||
|
echo "--- cgroup ---"; cat /proc/1/cgroup || true
|
||||||
|
- name: Show docker image info
|
||||||
|
run: |
|
||||||
|
cat /etc/os-release || true
|
||||||
|
- name: Mounts & write marker
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "== grep mount =="
|
||||||
|
mount | grep -E 'runner-cache|ci-keys' || true
|
||||||
|
echo "== ls /runner-cache =="
|
||||||
|
ls -la /runner-cache || true
|
||||||
|
echo "MARKER $(date -Iseconds)" | tee /runner-cache/__ok.txt
|
||||||
|
echo "OK"
|
||||||
@@ -1,67 +1,102 @@
|
|||||||
name: Release Morska (from latest build cache)
|
name: ReleaseApp (JS finder + download)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch: {}
|
workflow_dispatch: {}
|
||||||
|
|
||||||
env:
|
|
||||||
DEPLOY_HOST: "bim-it.pl"
|
|
||||||
DEPLOY_USER: "mz"
|
|
||||||
DEPLOY_PATH: "./deployment/"
|
|
||||||
SSH_KEYFILE: "/ci-keys/morska"
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
GITEA_BASE_URL: https://code.bim-it.pl
|
||||||
|
OWNER: mz
|
||||||
|
REPO: DiunaBI
|
||||||
|
# Comma-separated artifact names that must exist
|
||||||
|
REQUIRED_ARTIFACTS: frontend,webapi
|
||||||
|
# How many recent successful runs to scan
|
||||||
|
SCAN_LIMIT: "100"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout (for completeness)
|
- name: Checkout
|
||||||
uses: https://github.com/actions/checkout@v4
|
uses: https://github.com/actions/checkout@v4
|
||||||
|
|
||||||
- name: Tools
|
- name: Use Node.js 20
|
||||||
run: |
|
uses: https://github.com/actions/setup-node@v4
|
||||||
set -euo pipefail
|
with:
|
||||||
apt-get update -y
|
node-version: 20
|
||||||
apt-get install -y zip openssh-client
|
|
||||||
|
|
||||||
- name: Verify runner cache
|
- name: Install unzip (for extraction)
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y unzip
|
||||||
|
|
||||||
|
- name: Resolve latest run that exposes required artifacts
|
||||||
|
id: resolve
|
||||||
env:
|
env:
|
||||||
SRC: /runner-cache/builds/latest
|
GITEA_PAT: ${{ secrets.GITEATOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
node .gitea/scripts/getLatestRunWithArtifacts.js
|
||||||
echo "Expecting artifacts in: $SRC"
|
echo "Resolved run_id: $(cat .gitea/.cache/run_id)"
|
||||||
test -d "$SRC/frontend" || { echo "Missing $SRC/frontend"; exit 1; }
|
|
||||||
test -d "$SRC/webapi" || { echo "Missing $SRC/webapi"; exit 1; }
|
|
||||||
ls -la "$SRC"
|
|
||||||
|
|
||||||
- name: Create archives
|
- name: Download frontend artifact
|
||||||
env:
|
env:
|
||||||
SRC: /runner-cache/builds/latest
|
GITEA_PAT: ${{ secrets.GITEATOKEN }}
|
||||||
|
ARTIFACT_NAME: frontend
|
||||||
|
RUN_ID: ${{ steps.resolve.outputs.run_id }}
|
||||||
|
OUTPUT_DIR: artifacts/frontend
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
node .gitea/scripts/downloadArtifactByName.js
|
||||||
mkdir -p release
|
|
||||||
(cd "$SRC/frontend" && zip -r "$GITHUB_WORKSPACE/release/DiunaBI-Morska-Frontend.zip" .)
|
|
||||||
(cd "$SRC/webapi" && zip -r "$GITHUB_WORKSPACE/release/DiunaBI-Morska-WebApi.zip" .)
|
|
||||||
ls -la release
|
|
||||||
|
|
||||||
- name: Prepare known_hosts
|
- name: Download webapi artifact
|
||||||
|
env:
|
||||||
|
GITEA_PAT: ${{ secrets.GITEATOKEN }}
|
||||||
|
ARTIFACT_NAME: webapi
|
||||||
|
RUN_ID: ${{ steps.resolve.outputs.run_id }}
|
||||||
|
OUTPUT_DIR: artifacts/webapi
|
||||||
|
run: |
|
||||||
|
node .gitea/scripts/downloadArtifactByName.js
|
||||||
|
|
||||||
|
- name: Show artifact structure
|
||||||
|
run: |
|
||||||
|
echo "::group::frontend"
|
||||||
|
ls -laR artifacts/frontend || true
|
||||||
|
echo "::endgroup::"
|
||||||
|
echo "::group::webapi"
|
||||||
|
ls -laR artifacts/webapi || true
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
# 3) Package artifacts as ZIPs for transfer
|
||||||
|
- name: Package artifacts as ZIPs
|
||||||
|
run: |
|
||||||
|
mkdir -p build
|
||||||
|
(cd artifacts/frontend && zip -rq ../../build/DiunaBI-Morska-Frontend.zip .)
|
||||||
|
(cd artifacts/webapi && zip -rq ../../build/DiunaBI-Morska-WebApi.zip .)
|
||||||
|
ls -la build
|
||||||
|
|
||||||
|
# 4) Upload ZIPs to remote server via SSH (using secret key)
|
||||||
|
- name: Upload artifacts to remote server
|
||||||
|
env:
|
||||||
|
SSH_PRIVATE_KEY: ${{ secrets.BIMIT_SSH_KEY }}
|
||||||
|
SSH_USER: mz
|
||||||
|
SSH_HOST: bim-it.pl
|
||||||
|
REMOTE_DIR: deployment
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Prepare key
|
||||||
|
umask 077
|
||||||
|
echo "$SSH_PRIVATE_KEY" > private_key
|
||||||
|
chmod 600 private_key
|
||||||
|
|
||||||
|
# Preload known_hosts (safer than StrictHostKeyChecking=no)
|
||||||
mkdir -p ~/.ssh
|
mkdir -p ~/.ssh
|
||||||
ssh-keyscan -H "${{ env.DEPLOY_HOST }}" >> ~/.ssh/known_hosts 2>/dev/null || true
|
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
- name: Upload via SCP
|
# Ensure remote dir exists
|
||||||
run: |
|
ssh -i private_key "$SSH_USER@$SSH_HOST" "mkdir -p ~/$REMOTE_DIR"
|
||||||
set -euo pipefail
|
|
||||||
scp -i "${{ env.SSH_KEYFILE }}" -o IdentitiesOnly=yes -o StrictHostKeyChecking=yes \
|
# Upload files
|
||||||
./release/DiunaBI-Morska-Frontend.zip \
|
scp -i private_key build/DiunaBI-Morska-Frontend.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
|
||||||
"${{ env.DEPLOY_USER }}@${{ env.DEPLOY_HOST }}:${{ env.DEPLOY_PATH }}"
|
scp -i private_key build/DiunaBI-Morska-WebApi.zip "$SSH_USER@$SSH_HOST:~/$REMOTE_DIR/"
|
||||||
scp -i "${{ env.SSH_KEYFILE }}" -o IdentitiesOnly=yes -o StrictHostKeyChecking=yes \
|
|
||||||
./release/DiunaBI-Morska-WebApi.zip \
|
# Cleanup
|
||||||
"${{ env.DEPLOY_USER }}@${{ env.DEPLOY_HOST }}:${{ env.DEPLOY_PATH }}"
|
shred -u private_key
|
||||||
|
|
||||||
- name: Remote deploy
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
ssh -i "${{ env.SSH_KEYFILE }}" -o IdentitiesOnly=yes -o StrictHostKeyChecking=yes \
|
|
||||||
"${{ env.DEPLOY_USER }}@${{ env.DEPLOY_HOST }}" << 'EOF'
|
|
||||||
./deployment/DiunaBI-Morska.Release.sh
|
|
||||||
EOF
|
|
||||||
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
@@ -1,11 +1,11 @@
|
|||||||
name: BuildApp
|
name: BuildApp
|
||||||
on:
|
on:
|
||||||
push:
|
#push:
|
||||||
branches:
|
# branches:
|
||||||
- main
|
# - main
|
||||||
pull_request:
|
#pull_request:
|
||||||
branches:
|
# branches:
|
||||||
- main
|
# - main
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
Reference in New Issue
Block a user