mirror of
https://github.com/vcmi/vcmi.git
synced 2025-10-08 23:22:25 +02:00
Add files via upload
This commit is contained in:
@@ -1,13 +1,27 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo apt-get update
|
||||
set -euo pipefail
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Dependencies
|
||||
# In case of change in dependencies list please also update:
|
||||
# - developer docs at docs/developer/Building_Linux.md
|
||||
# - debian build settings at debian/control
|
||||
sudo apt-get install libboost-dev libboost-filesystem-dev libboost-system-dev libboost-thread-dev libboost-program-options-dev libboost-locale-dev libboost-iostreams-dev \
|
||||
libsdl2-dev libsdl2-image-dev libsdl2-mixer-dev libsdl2-ttf-dev \
|
||||
qtbase5-dev qttools5-dev libqt5svg5-dev \
|
||||
ninja-build zlib1g-dev libavformat-dev libswscale-dev libtbb-dev libluajit-5.1-dev \
|
||||
libminizip-dev libfuzzylite-dev libsqlite3-dev # Optional dependencies
|
||||
APT_CACHE="${APT_CACHE:-${RUNNER_TEMP:-/tmp}/apt-cache}"
|
||||
sudo mkdir -p "$APT_CACHE"
|
||||
|
||||
sudo apt -yq -o Acquire::Retries=3 update
|
||||
sudo apt -yq install eatmydata
|
||||
|
||||
sudo eatmydata apt -yq --no-install-recommends \
|
||||
-o Dir::Cache::archives="$APT_CACHE" \
|
||||
-o APT::Keep-Downloaded-Packages=true \
|
||||
-o Acquire::Retries=3 -o Dpkg::Use-Pty=0 \
|
||||
install \
|
||||
libboost-dev libboost-filesystem-dev libboost-system-dev libboost-thread-dev \
|
||||
libboost-program-options-dev libboost-locale-dev libboost-iostreams-dev \
|
||||
libsdl2-dev libsdl2-image-dev libsdl2-mixer-dev libsdl2-ttf-dev \
|
||||
qtbase5-dev qtbase5-dev-tools qttools5-dev qttools5-dev-tools \
|
||||
libqt5svg5-dev \
|
||||
ninja-build zlib1g-dev libavformat-dev libswscale-dev libtbb-dev \
|
||||
libluajit-5.1-dev libminizip-dev libfuzzylite-dev libsqlite3-dev
|
||||
|
||||
sudo rm -f "$APT_CACHE/lock" || true
|
||||
sudo rm -rf "$APT_CACHE/partial" || true
|
||||
sudo chown -R "$USER:$USER" "$APT_CACHE"
|
@@ -1,13 +1,27 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo apt-get update
|
||||
set -euo pipefail
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Dependencies
|
||||
# In case of change in dependencies list please also update:
|
||||
# - developer docs at docs/developer/Building_Linux.md
|
||||
# - debian build settings at debian/control
|
||||
sudo apt-get install libboost-dev libboost-filesystem-dev libboost-system-dev libboost-thread-dev libboost-program-options-dev libboost-locale-dev libboost-iostreams-dev \
|
||||
libsdl2-dev libsdl2-image-dev libsdl2-mixer-dev libsdl2-ttf-dev \
|
||||
qt6-base-dev qt6-base-dev-tools qt6-tools-dev qt6-tools-dev-tools qt6-l10n-tools qt6-svg-dev libqt6svg6-dev \
|
||||
ninja-build zlib1g-dev libavformat-dev libswscale-dev libtbb-dev libluajit-5.1-dev \
|
||||
libminizip-dev libfuzzylite-dev libsqlite3-dev # Optional dependencies
|
||||
APT_CACHE="${APT_CACHE:-${RUNNER_TEMP:-/tmp}/apt-cache}"
|
||||
sudo mkdir -p "$APT_CACHE"
|
||||
|
||||
sudo apt -yq -o Acquire::Retries=3 update
|
||||
sudo apt -yq install eatmydata
|
||||
|
||||
sudo eatmydata apt -yq --no-install-recommends \
|
||||
-o Dir::Cache::archives="$APT_CACHE" \
|
||||
-o APT::Keep-Downloaded-Packages=true \
|
||||
-o Acquire::Retries=3 -o Dpkg::Use-Pty=0 \
|
||||
install \
|
||||
libboost-dev libboost-filesystem-dev libboost-system-dev libboost-thread-dev \
|
||||
libboost-program-options-dev libboost-locale-dev libboost-iostreams-dev \
|
||||
libsdl2-dev libsdl2-image-dev libsdl2-mixer-dev libsdl2-ttf-dev \
|
||||
qt6-base-dev qt6-base-dev-tools qt6-tools-dev qt6-tools-dev-tools \
|
||||
qt6-l10n-tools qt6-svg-dev \
|
||||
ninja-build zlib1g-dev libavformat-dev libswscale-dev libtbb-dev \
|
||||
libluajit-5.1-dev libminizip-dev libfuzzylite-dev libsqlite3-dev
|
||||
|
||||
sudo rm -f "$APT_CACHE/lock" || true
|
||||
sudo rm -rf "$APT_CACHE/partial" || true
|
||||
sudo chown -R "$USER:$USER" "$APT_CACHE"
|
@@ -1,7 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install ninja-build mingw-w64 nsis
|
||||
set -euo pipefail
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
sudo update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix
|
||||
sudo update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix
|
||||
ARCH="${1:-x86_64}"
|
||||
case "$ARCH" in
|
||||
x86) triplet=i686-w64-mingw32 ;;
|
||||
x86_64) triplet=x86_64-w64-mingw32 ;;
|
||||
*) echo "Unsupported ARCH '$ARCH' (use: x86 | x86_64)"; exit 2 ;;
|
||||
esac
|
||||
|
||||
APT_CACHE="${APT_CACHE:-${RUNNER_TEMP:-/tmp}/apt-cache}"
|
||||
sudo mkdir -p "$APT_CACHE"
|
||||
|
||||
sudo apt -yq -o Acquire::Retries=3 update
|
||||
sudo apt -yq install eatmydata
|
||||
|
||||
sudo eatmydata apt -yq --no-install-recommends \
|
||||
-o Dir::Cache::archives="$APT_CACHE" \
|
||||
-o APT::Keep-Downloaded-Packages=true \
|
||||
-o Acquire::Retries=3 -o Dpkg::Use-Pty=0 \
|
||||
install \
|
||||
ninja-build nsis mingw-w64 g++-mingw-w64
|
||||
|
||||
if [[ -x "/usr/bin/${triplet}-g++-posix" ]]; then
|
||||
sudo update-alternatives --set "${triplet}-g++" "/usr/bin/${triplet}-g++-posix"
|
||||
fi
|
||||
if [[ -x "/usr/bin/${triplet}-gcc-posix" ]]; then
|
||||
sudo update-alternatives --set "${triplet}-gcc" "/usr/bin/${triplet}-gcc-posix"
|
||||
fi
|
||||
|
||||
sudo rm -f "$APT_CACHE/lock" || true
|
||||
sudo rm -rf "$APT_CACHE/partial" || true
|
||||
sudo chown -R "$USER:$USER" "$APT_CACHE"
|
139
CI/emit_partial.py
Normal file
139
CI/emit_partial.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Tuple, Optional, List
|
||||
|
||||
|
||||
def run(cmd: List[str]) -> str:
|
||||
"""Run a command and return stdout as text; return empty string on any error."""
|
||||
try:
|
||||
return subprocess.check_output(cmd, text=True, stderr=subprocess.STDOUT)
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def detect(platform: str) -> Tuple[str, List[str], str]:
|
||||
"""Detect cache tool, command to print stats, and a family label from platform."""
|
||||
if platform.startswith("msvc"):
|
||||
return ("sccache", ["sccache", "--show-stats"], "windows-msvc")
|
||||
if platform.startswith("mingw_"):
|
||||
return ("ccache", ["ccache", "-s"], "windows-mingw")
|
||||
if platform.startswith("mac"):
|
||||
return ("ccache", ["ccache", "-s"], "macos")
|
||||
if platform == "ios":
|
||||
return ("ccache", ["ccache", "-s"], "ios")
|
||||
if platform.startswith("android"):
|
||||
return ("ccache", ["ccache", "-s"], "android")
|
||||
return ("ccache", ["ccache", "-s"], "other")
|
||||
|
||||
|
||||
def parse_ccache(text: str) -> Tuple[int, int]:
|
||||
"""
|
||||
Parse ccache stats. Supports:
|
||||
- Legacy lines: "Hits: 123" / "Misses: 45"
|
||||
- Modern lines: "cache hit (direct) 10"
|
||||
"cache hit (preprocessed) 5"
|
||||
"cache hit (remote) 2" (optional)
|
||||
"cache miss 12"
|
||||
Returns (hits, misses).
|
||||
"""
|
||||
# Legacy format
|
||||
m_hits = re.search(r"^\s*Hits:\s*(\d+)\b", text, re.M)
|
||||
m_miss = re.search(r"^\s*Misses:\s*(\d+)\b", text, re.M)
|
||||
if m_hits and m_miss:
|
||||
return int(m_hits.group(1)), int(m_miss.group(1))
|
||||
|
||||
# Modern format: sum all hit buckets
|
||||
def pick(pattern: str) -> int:
|
||||
m = re.search(pattern, text, re.M | re.I)
|
||||
return int(m.group(1)) if m else 0
|
||||
|
||||
hits_direct = pick(r"^cache hit\s*\(direct\)\s+(\d+)\b")
|
||||
hits_pre = pick(r"^cache hit\s*\(preprocessed\)\s+(\d+)\b")
|
||||
hits_remote = pick(r"^cache hit\s*\(remote\)\s+(\d+)\b") # may be absent
|
||||
misses = pick(r"^cache miss\s+(\d+)\b")
|
||||
hits_total = hits_direct + hits_pre + hits_remote
|
||||
return hits_total, misses
|
||||
|
||||
|
||||
def parse_sccache(text: str) -> Tuple[int, int]:
|
||||
"""
|
||||
Parse sccache --show-stats lines:
|
||||
"Cache hits 123"
|
||||
"Cache misses 45"
|
||||
Returns (hits, misses).
|
||||
"""
|
||||
def pick(label: str) -> int:
|
||||
m = re.search(rf"^{re.escape(label)}\s+(\d+)\b", text, re.M | re.I)
|
||||
return int(m.group(1)) if m else 0
|
||||
|
||||
hits = pick("Cache hits")
|
||||
misses = pick("Cache misses")
|
||||
return hits, misses
|
||||
|
||||
|
||||
def arch_label(platform: str, arch_env: Optional[str]) -> str:
|
||||
"""Produce a nice arch label; prefer ARCH env when present."""
|
||||
if arch_env:
|
||||
return arch_env
|
||||
mapping = {
|
||||
"mac-intel": "Intel",
|
||||
"mac-arm": "ARM64",
|
||||
"ios": "ARM64",
|
||||
"msvc-x64": "x64",
|
||||
"msvc-x86": "x86",
|
||||
"msvc-arm64": "ARM64",
|
||||
"mingw_x86": "x86",
|
||||
"mingw_x86_64": "x64",
|
||||
"android-32": "ARMv7",
|
||||
"android-64": "ARM64",
|
||||
}
|
||||
return mapping.get(platform, platform)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
# Prefer our explicit PLATFORM env; fall back to VS's "Platform" on Windows if needed.
|
||||
platform = os.getenv("PLATFORM") or os.getenv("Platform") or "unknown"
|
||||
arch = arch_label(platform, os.getenv("ARCH"))
|
||||
tool, cmd, family = detect(platform)
|
||||
|
||||
stats_raw = run(cmd)
|
||||
if tool == "sccache":
|
||||
hits, misses = parse_sccache(stats_raw)
|
||||
else:
|
||||
hits, misses = parse_ccache(stats_raw)
|
||||
|
||||
total = hits + misses
|
||||
rate = f"{(100.0 * hits / total):.2f}%" if total else "n/a"
|
||||
|
||||
payload = {
|
||||
"platform": platform,
|
||||
"family": family,
|
||||
"arch": arch,
|
||||
"tool": tool,
|
||||
"hits": hits,
|
||||
"misses": misses,
|
||||
"total": total,
|
||||
"rate": rate,
|
||||
"artifact_url": os.getenv("ARTIFACT_URL", ""),
|
||||
"debug_symbols_url": os.getenv("DEBUG_SYMBOLS_URL", ""),
|
||||
"aab_url": os.getenv("AAB_URL", ""),
|
||||
"stats_cmd": " ".join(cmd),
|
||||
"stats_raw": stats_raw,
|
||||
}
|
||||
|
||||
outdir = pathlib.Path(".summary")
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
outpath = outdir / f"{platform}.json"
|
||||
outpath.write_text(json.dumps(payload, ensure_ascii=False, indent=2))
|
||||
print(f"Wrote {outpath}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
408
CI/final_summary.py
Normal file
408
CI/final_summary.py
Normal file
@@ -0,0 +1,408 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import glob
|
||||
import pathlib
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import datetime
|
||||
from typing import Any, Dict, Iterable, List, Tuple
|
||||
|
||||
# ----------------------- Constants -----------------------
|
||||
|
||||
ICON_WIN = "https://raw.githubusercontent.com/EgoistDeveloper/operating-system-logos/master/src/32x32/WIN.png"
|
||||
ICON_MAC = "https://raw.githubusercontent.com/EgoistDeveloper/operating-system-logos/master/src/32x32/MAC.png"
|
||||
ICON_IOS = "https://raw.githubusercontent.com/EgoistDeveloper/operating-system-logos/master/src/32x32/IOS.png"
|
||||
ICON_AND = "https://raw.githubusercontent.com/EgoistDeveloper/operating-system-logos/master/src/32x32/AND.png"
|
||||
ICON_CPP = "https://raw.githubusercontent.com/isocpp/logos/master/cpp_logo.png"
|
||||
ICON_PM = "https://avatars.githubusercontent.com/u/96267164?s=32"
|
||||
|
||||
ALIGN_4_COLS = "|:--|:--:|:--:|:--:|\n" # reused in Validation/Tests/Build matrix sections
|
||||
|
||||
FAMILIES = ("windows-msvc", "windows-mingw", "macos", "ios", "android")
|
||||
|
||||
VALIDATION_ORDER = {"LF line endings": 0, "JSON": 1, "Markdown": 2}
|
||||
TESTS_ORDER = {"Clang Latest": 0, "GCC Latest": 1, "Clang Oldest": 2, "GCC Oldest": 3}
|
||||
|
||||
# ----------------------- Helpers -----------------------
|
||||
|
||||
def env(name: str, default: str = "") -> str:
|
||||
v = os.getenv(name)
|
||||
return v if v is not None else default
|
||||
|
||||
def now_utc() -> datetime.datetime:
|
||||
# Sonar: avoid utcnow(); use tz-aware now()
|
||||
return datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
def hms_from_ms(ms: int) -> str:
|
||||
s = max(0, round(ms / 1000))
|
||||
hh = f"{s // 3600:02d}"
|
||||
mm = f"{(s % 3600) // 60:02d}"
|
||||
ss = f"{s % 60:02d}"
|
||||
return f"{hh}:{mm}:{ss}"
|
||||
|
||||
def parse_iso8601(s: str | None) -> datetime.datetime | None:
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return datetime.datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def gh_api(path: str, method: str = "GET", data: Any = None, token: str | None = None,
|
||||
params: Dict[str, Any] | None = None) -> Any:
|
||||
base = "https://api.github.com"
|
||||
url = f"{base}{path}"
|
||||
if params:
|
||||
url = f"{url}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"User-Agent": "final-summary-script",
|
||||
}
|
||||
if token:
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
|
||||
req = urllib.request.Request(url, method=method, headers=headers)
|
||||
payload = None
|
||||
if data is not None:
|
||||
payload = json.dumps(data).encode("utf-8")
|
||||
req.add_header("Content-Type", "application/json")
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req, payload, timeout=60) as r:
|
||||
raw = r.read()
|
||||
return {} if not raw else json.loads(raw.decode("utf-8"))
|
||||
except urllib.error.HTTPError as e:
|
||||
msg = e.read().decode("utf-8", errors="replace")
|
||||
print(f"[WARN] GitHub API {method} {url} -> {e.code}: {msg}")
|
||||
raise
|
||||
except Exception as e:
|
||||
print(f"[WARN] GitHub API {method} {url} -> {e}")
|
||||
raise
|
||||
|
||||
def read_json_file(p: str) -> Any:
|
||||
try:
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"[WARN] Cannot read JSON '{p}': {e}")
|
||||
return None
|
||||
|
||||
def write_json_file(p: str, data: Any) -> None:
|
||||
pathlib.Path(p).parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(p, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
def append_summary(md: str) -> None:
|
||||
summary_path = env("GITHUB_STEP_SUMMARY") or "SUMMARY.md"
|
||||
with open(summary_path, "a", encoding="utf-8") as f:
|
||||
f.write(md)
|
||||
|
||||
def status_icon(status: str) -> str:
|
||||
return {
|
||||
"success": "✅",
|
||||
"failure": "❌",
|
||||
"cancelled": "🚫",
|
||||
"timed_out": "⌛",
|
||||
"skipped": "⏭",
|
||||
"neutral": "⚠️",
|
||||
"action_required": "⚠️",
|
||||
}.get(status, "❓")
|
||||
|
||||
def family_title_and_icon(fam: str) -> Tuple[str, str]:
|
||||
match fam:
|
||||
case "windows-msvc": return "Windows (MSVC)", ICON_WIN
|
||||
case "windows-mingw": return "Windows (MinGW)", ICON_WIN
|
||||
case "macos": return "macOS", ICON_MAC
|
||||
case "ios": return "iOS", ICON_IOS
|
||||
case "android": return "Android", ICON_AND
|
||||
case _: return fam, ICON_PM
|
||||
|
||||
# ----------------------- 1) Collect validation & tests -----------------------
|
||||
|
||||
def _job_duration(job: Dict[str, Any]) -> str:
|
||||
st = parse_iso8601(job.get("started_at"))
|
||||
en = parse_iso8601(job.get("completed_at"))
|
||||
if st and en:
|
||||
return hms_from_ms(int((en - st).total_seconds() * 1000))
|
||||
return ""
|
||||
|
||||
def _test_pretty_name(name: str) -> str:
|
||||
pretty = name.replace("Test (", "").removesuffix(")")
|
||||
low = pretty.lower()
|
||||
if "gcc-latest" in low: return "GCC Latest"
|
||||
if "gcc-oldest" in low: return "GCC Oldest"
|
||||
if "clang-latest" in low: return "Clang Latest"
|
||||
if "clang-oldest" in low: return "Clang Oldest"
|
||||
return pretty
|
||||
|
||||
def _rows_for_job(j: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
rows: List[Dict[str, Any]] = []
|
||||
dur = _job_duration(j)
|
||||
name = j.get("name") or ""
|
||||
|
||||
# Build matrix
|
||||
if name.startswith("Build "):
|
||||
pretty = name.replace("Build (", "").removesuffix(")")
|
||||
rows.append({
|
||||
"group": "builds",
|
||||
"name": pretty,
|
||||
"status": j.get("conclusion") or "neutral",
|
||||
"duration": dur,
|
||||
"url": j.get("html_url"),
|
||||
})
|
||||
|
||||
# Code validation
|
||||
if name == "Validate Code":
|
||||
mapping = {
|
||||
"Validate JSON": "JSON",
|
||||
"Validate Markdown": "Markdown",
|
||||
"Ensure LF line endings": "LF line endings",
|
||||
}
|
||||
for st in (j.get("steps") or []):
|
||||
stname = st.get("name")
|
||||
if stname in mapping:
|
||||
rows.append({
|
||||
"group": "validation",
|
||||
"name": mapping[stname],
|
||||
"status": st.get("conclusion") or "skipped",
|
||||
"duration": dur,
|
||||
"url": j.get("html_url"),
|
||||
})
|
||||
|
||||
# Tests matrix
|
||||
if name.startswith("Test "):
|
||||
pretty = _test_pretty_name(name)
|
||||
steps = j.get("steps") or []
|
||||
test_step = next((s for s in steps if s.get("name") == "Test"), None)
|
||||
status = (test_step.get("conclusion") if test_step else None) or j.get("conclusion") or "neutral"
|
||||
rows.append({
|
||||
"group": "tests",
|
||||
"name": pretty,
|
||||
"status": status,
|
||||
"duration": dur,
|
||||
"url": j.get("html_url"),
|
||||
})
|
||||
|
||||
return rows
|
||||
|
||||
def collect_validation_and_tests() -> None:
|
||||
token = env("GITHUB_TOKEN")
|
||||
repo_full = env("GITHUB_REPOSITORY") # "owner/repo"
|
||||
run_id = env("GITHUB_RUN_ID")
|
||||
|
||||
if not (token and repo_full and run_id):
|
||||
print("[INFO] Missing GITHUB_TOKEN / GITHUB_REPOSITORY / GITHUB_RUN_ID; skipping GH API collect.")
|
||||
return
|
||||
|
||||
owner, repo = repo_full.split("/", 1)
|
||||
r = gh_api(f"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
|
||||
method="GET", token=token, params={"per_page": 100})
|
||||
jobs = r.get("jobs") or []
|
||||
|
||||
rows: List[Dict[str, Any]] = []
|
||||
for j in jobs:
|
||||
rows.extend(_rows_for_job(j))
|
||||
|
||||
pathlib.Path("partials").mkdir(parents=True, exist_ok=True)
|
||||
write_json_file("partials/validation.json", rows)
|
||||
|
||||
# ----------------------- 2) Compose Summary -----------------------
|
||||
|
||||
def _load_primary_items() -> List[Dict[str, Any]]:
|
||||
files = [p for p in glob.glob("partials/**/*.json", recursive=True)
|
||||
if not any(p.endswith(x) for x in ("validation.json", "source.json"))
|
||||
and not pathlib.Path(p).name.startswith("installer-")]
|
||||
items: List[Dict[str, Any]] = []
|
||||
for p in files:
|
||||
data = read_json_file(p)
|
||||
if isinstance(data, dict) and "family" in data:
|
||||
items.append(data)
|
||||
return items
|
||||
|
||||
def _load_installer_map() -> Dict[str, str]:
|
||||
inst_map: Dict[str, str] = {}
|
||||
for p in glob.glob("partials/installer-*.json"):
|
||||
obj = read_json_file(p) or {}
|
||||
plat, url = obj.get("platform"), obj.get("installer_url")
|
||||
if plat and url:
|
||||
inst_map[plat] = url
|
||||
return inst_map
|
||||
|
||||
def _durations_map(val_rows: List[Dict[str, Any]]) -> Dict[str, str]:
|
||||
return {r["name"]: r.get("duration", "-") for r in val_rows if r.get("group") == "builds"}
|
||||
|
||||
def _render_family_table(fam: str, rows: List[Dict[str, Any]],
|
||||
inst_map: Dict[str, str], dur_map: Dict[str, str]) -> None:
|
||||
title, icon = family_title_and_icon(fam)
|
||||
append_summary(f'### <img src="{icon}" width="22"/> {title}\n\n')
|
||||
|
||||
cols_arch = ["| Architecture |"]
|
||||
cols_stats = ["| Cache statistic |"]
|
||||
cols_time = ["| Build time |"]
|
||||
cols_down = ["| Download |"]
|
||||
|
||||
for it in rows:
|
||||
plat = it.get("platform", "")
|
||||
arch = it.get("arch", "")
|
||||
hits = it.get("hits", "")
|
||||
total = it.get("total", "")
|
||||
rate = it.get("rate", "")
|
||||
hms = dur_map.get(plat, "-")
|
||||
|
||||
main = it.get("artifact_url") or ""
|
||||
dbg = it.get("debug_symbols_url") or ""
|
||||
aab = it.get("aab_url") or ""
|
||||
inst = inst_map.get(plat, "")
|
||||
|
||||
cols_arch.append(f" {arch} |")
|
||||
cols_stats.append(f" {rate} ({hits} / {total}) |")
|
||||
cols_time.append(f" {hms} |")
|
||||
|
||||
dl_parts = []
|
||||
if inst: dl_parts.append(f"[Installer]({inst})")
|
||||
if dbg: dl_parts.append(f"[Debug symbols]({dbg})")
|
||||
if main: dl_parts.append(f"[Archive]({main})")
|
||||
if aab: dl_parts.append(f"[AAB]({aab})")
|
||||
dl = "<br/>".join(dl_parts) if dl_parts else "—"
|
||||
cols_down.append(f" {dl} |")
|
||||
|
||||
count = len(rows)
|
||||
align = "|:--|" + ":--:|" * count
|
||||
|
||||
append_summary("".join(cols_arch) + "\n")
|
||||
append_summary(align + "\n")
|
||||
append_summary("".join(cols_stats) + "\n")
|
||||
append_summary("".join(cols_time) + "\n")
|
||||
append_summary("".join(cols_down) + "\n\n")
|
||||
|
||||
def _render_validation_section(val_rows: List[Dict[str, Any]]) -> None:
|
||||
rows = sorted((r for r in val_rows if r.get("group") == "validation"),
|
||||
key=lambda r: (VALIDATION_ORDER.get(r.get("name"), 999), r.get("name", "")))
|
||||
if not rows:
|
||||
return
|
||||
|
||||
append_summary("### 🔍 Validation\n")
|
||||
append_summary("| Check | Status | Time | Logs |\n")
|
||||
append_summary(ALIGN_4_COLS)
|
||||
|
||||
for r in rows:
|
||||
icon = status_icon(r.get("status", ""))
|
||||
dur = r.get("duration") or "-"
|
||||
url = r.get("url") or ""
|
||||
logs = f"[Logs]({url})" if url else "—"
|
||||
append_summary(f"| {r.get('name','')} | {icon} | {dur} | {logs} |\n")
|
||||
append_summary("\n")
|
||||
|
||||
def _render_tests_section(val_rows: List[Dict[str, Any]]) -> None:
|
||||
rows = sorted((r for r in val_rows if r.get("group") == "tests"),
|
||||
key=lambda r: (TESTS_ORDER.get(r.get("name"), 999), r.get("name", "")))
|
||||
if not rows:
|
||||
return
|
||||
|
||||
append_summary("### 🧪 Tests\n")
|
||||
append_summary("| Matrix | Status | Time | Logs |\n")
|
||||
append_summary(ALIGN_4_COLS)
|
||||
|
||||
for r in rows:
|
||||
icon = status_icon(r.get("status", ""))
|
||||
dur = r.get("duration") or "-"
|
||||
url = r.get("url") or ""
|
||||
logs = f"[Logs]({url})" if url else "—"
|
||||
append_summary(f"| {r.get('name','')} | {icon} | {dur} | {logs} |\n")
|
||||
append_summary("\n")
|
||||
|
||||
def _render_build_matrix_section(val_rows: List[Dict[str, Any]]) -> None:
|
||||
rows = sorted((r for r in val_rows if r.get("group") == "builds"),
|
||||
key=lambda r: r.get("name", ""))
|
||||
if not rows:
|
||||
return
|
||||
|
||||
append_summary("### 🚦 Build matrix\n")
|
||||
append_summary("| Platform | Status | Time | Logs |\n")
|
||||
append_summary(ALIGN_4_COLS)
|
||||
|
||||
for r in rows:
|
||||
icon = status_icon(r.get("status", ""))
|
||||
dur = r.get("duration") or "-"
|
||||
url = r.get("url") or ""
|
||||
logs = f"[Logs]({url})" if url else "—"
|
||||
append_summary(f"| {r.get('name','')} | {icon} | {dur} | {logs} |\n")
|
||||
append_summary("\n")
|
||||
|
||||
def compose_summary() -> None:
|
||||
# Source code section
|
||||
src_json = read_json_file("partials/source.json")
|
||||
if src_json and src_json.get("source_url"):
|
||||
append_summary("\n\n")
|
||||
append_summary(
|
||||
f'### <img src="{ICON_CPP}" width="20"/> Source code - [Download]({src_json["source_url"]})\n\n\n'
|
||||
)
|
||||
|
||||
items = _load_primary_items()
|
||||
inst_map = _load_installer_map()
|
||||
val_rows: List[Dict[str, Any]] = read_json_file("partials/validation.json") or []
|
||||
dur_map = _durations_map(val_rows)
|
||||
|
||||
# Family tables
|
||||
for fam in FAMILIES:
|
||||
fam_rows = [x for x in items if x.get("family") == fam]
|
||||
if fam_rows:
|
||||
_render_family_table(fam, fam_rows, inst_map, dur_map)
|
||||
|
||||
# Validation, Tests, Build matrix
|
||||
_render_validation_section(val_rows)
|
||||
_render_tests_section(val_rows)
|
||||
_render_build_matrix_section(val_rows)
|
||||
|
||||
# ----------------------- 3) Delete partial artifacts -----------------------
|
||||
|
||||
def delete_partial_artifacts() -> None:
|
||||
token = env("GITHUB_TOKEN")
|
||||
repo_full = env("GITHUB_REPOSITORY")
|
||||
run_id = env("GITHUB_RUN_ID")
|
||||
if not (token and repo_full and run_id):
|
||||
print("[INFO] Missing env for deleting artifacts; skipping.")
|
||||
return
|
||||
|
||||
owner, repo = repo_full.split("/", 1)
|
||||
r = gh_api(f"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts",
|
||||
token=token, params={"per_page": 100})
|
||||
arts = r.get("artifacts") or []
|
||||
for a in arts:
|
||||
name = a.get("name", "")
|
||||
if name.startswith("partial-json-"):
|
||||
aid = a.get("id")
|
||||
print(f"Deleting artifact {name} (id={aid})")
|
||||
gh_api(f"/repos/{owner}/{repo}/actions/artifacts/{aid}",
|
||||
method="DELETE", token=token)
|
||||
|
||||
# ----------------------- Main -----------------------
|
||||
|
||||
def main() -> None:
|
||||
pathlib.Path("partials").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
collect_validation_and_tests()
|
||||
except Exception as e:
|
||||
print(f"[WARN] collect_validation_and_tests failed: {e}")
|
||||
|
||||
try:
|
||||
compose_summary()
|
||||
except Exception as e:
|
||||
print(f"[WARN] compose_summary failed: {e}")
|
||||
|
||||
try:
|
||||
delete_partial_artifacts()
|
||||
except Exception as e:
|
||||
print(f"[WARN] delete_partial_artifacts failed: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -7,13 +7,20 @@ ACCOUNT="vcmi"
|
||||
|
||||
RELEASE_TAG="v1.9"
|
||||
|
||||
DEP_FILENAME="dependencies-$1"
|
||||
DEP_URL="https://github.com/$ACCOUNT/vcmi-deps-windows/releases/download/$RELEASE_TAG/$DEP_FILENAME.txz"
|
||||
# 2. parameter: all | vcpkg | ucrt (default: all)
|
||||
PART="${2:-all}"
|
||||
|
||||
curl -L "$DEP_URL" | tar -xf - --xz
|
||||
# --- VCPKG ---
|
||||
if [[ "$PART" == "all" || "$PART" == "vcpkg" ]]; then
|
||||
DEP_FILENAME="dependencies-$1"
|
||||
DEP_URL="https://github.com/$ACCOUNT/vcmi-deps-windows/releases/download/$RELEASE_TAG/$DEP_FILENAME.txz"
|
||||
curl -L "$DEP_URL" | tar -xf - --xz
|
||||
fi
|
||||
|
||||
UCRT_FILENAME="ucrtRedist-$1"
|
||||
UCRT_URL="https://github.com/$ACCOUNT/vcmi-deps-windows/releases/download/$RELEASE_TAG/$UCRT_FILENAME.txz"
|
||||
|
||||
mkdir -p ucrt
|
||||
curl -L "$UCRT_URL" | tar -xf - --xz -C ucrt
|
||||
# --- UCRT ---
|
||||
if [[ "$PART" == "all" || "$PART" == "ucrt" ]]; then
|
||||
UCRT_FILENAME="ucrtRedist-$1"
|
||||
UCRT_URL="https://github.com/$ACCOUNT/vcmi-deps-windows/releases/download/$RELEASE_TAG/$UCRT_FILENAME.txz"
|
||||
mkdir -p ucrt
|
||||
curl -L "$UCRT_URL" | tar -xf - --xz -C ucrt
|
||||
fi
|
@@ -128,14 +128,31 @@ if(ENABLE_COLORIZED_COMPILER_OUTPUT)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Prefer sccache on Windows/MSVC; fall back to ccache elsewhere
|
||||
if(ENABLE_CCACHE)
|
||||
find_program(CCACHE ccache REQUIRED)
|
||||
endif()
|
||||
|
||||
# The XCode and MSVC builds each require some more configuration further down.
|
||||
if(ENABLE_CCACHE)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "ccache")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "ccache")
|
||||
if(MSVC)
|
||||
find_program(SCCACHE sccache)
|
||||
if(SCCACHE)
|
||||
message(STATUS "Enabling compiler launcher: sccache")
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${SCCACHE}")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${SCCACHE}")
|
||||
else()
|
||||
# Optional fallback (only if ccache is present)
|
||||
find_program(CCACHE ccache REQUIRED)
|
||||
if(CCACHE)
|
||||
message(STATUS "Enabling compiler launcher: ccache (fallback)")
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE}")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE}")
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
find_program(CCACHE ccache REQUIRED)
|
||||
if(CCACHE)
|
||||
message(STATUS "Enabling compiler launcher: ccache")
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE}")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(ENABLE_CCACHE AND (CMAKE_GENERATOR STREQUAL "Xcode"))
|
||||
@@ -302,18 +319,22 @@ if(MINGW OR MSVC)
|
||||
set(CMAKE_SHARED_LIBRARY_PREFIX "")
|
||||
|
||||
if(MSVC)
|
||||
if(ENABLE_CCACHE)
|
||||
# https://github.com/ccache/ccache/discussions/1154#discussioncomment-3611387
|
||||
file(COPY_FILE
|
||||
${CCACHE} ${CMAKE_BINARY_DIR}/cl.exe
|
||||
ONLY_IF_DIFFERENT)
|
||||
# Use ccache with MSVC + Visual Studio generator by shimming cl.exe.
|
||||
if(ENABLE_CCACHE AND (CMAKE_GENERATOR MATCHES "Visual Studio"))
|
||||
# Wrapper only for VS generator; Ninja řešíme přes *CMAKE_*_LAUNCHER
|
||||
if(CCACHE)
|
||||
set(_shim "${CMAKE_BINARY_DIR}/cl.exe")
|
||||
# Prefer a symlink; if it fails (e.g. Windows without Developer Mode),
|
||||
# CMake will fallback to copying the file.
|
||||
file(CREATE_LINK "${CCACHE}" "${_shim}" SYMBOLIC COPY_ON_ERROR)
|
||||
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"CLToolExe=cl.exe"
|
||||
"CLToolPath=${CMAKE_BINARY_DIR}"
|
||||
"TrackFileAccess=false"
|
||||
"UseMultiToolTask=true"
|
||||
)
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"CLToolExe=cl.exe"
|
||||
"CLToolPath=${CMAKE_BINARY_DIR}"
|
||||
"TrackFileAccess=false"
|
||||
"UseMultiToolTask=true"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
add_definitions(-DBOOST_ALL_NO_LIB)
|
||||
@@ -810,6 +831,7 @@ endif(WIN32)
|
||||
# Packaging section #
|
||||
#######################################
|
||||
|
||||
# This needs to be removed in future when successfully set OpenSign CI step for binaries signing to avoid resigning MS Redist / UCRT
|
||||
if(MSVC)
|
||||
SET(CMAKE_INSTALL_SYSTEM_RUNTIME_DESTINATION ${BIN_DIR})
|
||||
Include(InstallRequiredSystemLibraries)
|
||||
@@ -838,6 +860,7 @@ endif()
|
||||
set(CPACK_PACKAGE_VENDOR "VCMI team")
|
||||
|
||||
if(WIN32)
|
||||
# This needs to be removed in future as we already migrated to Inno Setup and artifacts in CI are always packed to ZIP
|
||||
# Note: due to NSI script generation process all of the backward slashes here are useful
|
||||
set(CPACK_GENERATOR ZIP)
|
||||
set(CPACK_MONOLITHIC_INSTALL 1)
|
||||
|
@@ -203,9 +203,46 @@
|
||||
"inherits": "windows-msvc-release",
|
||||
"cacheVariables": {
|
||||
"ENABLE_CCACHE": "ON"
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release",
|
||||
"displayName": "Windows x64 RelWithDebInfo (Ninja)",
|
||||
"description": "VCMI RelWithDebInfo build using Ninja + sccache",
|
||||
"inherits": "default-release",
|
||||
"generator": "Ninja",
|
||||
"cacheVariables": {
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake",
|
||||
"CMAKE_POLICY_DEFAULT_CMP0091": "NEW",
|
||||
"FORCE_BUNDLED_MINIZIP": "ON",
|
||||
"ENABLE_CCACHE": "ON",
|
||||
"CMAKE_C_COMPILER_LAUNCHER": "sccache",
|
||||
"CMAKE_CXX_COMPILER_LAUNCHER": "sccache",
|
||||
"CMAKE_MSVC_DEBUG_INFORMATION_FORMAT": "Embedded",
|
||||
"ENABLE_MULTI_PROCESS_BUILDS": "OFF",
|
||||
"CMAKE_BUILD_TYPE": "RelWithDebInfo",
|
||||
"CMAKE_C_FLAGS_RELWITHDEBINFO": "/O2 /Ob1 /DNDEBUG /Z7",
|
||||
"CMAKE_CXX_FLAGS_RELWITHDEBINFO": "/O2 /Ob1 /DNDEBUG /Z7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release-x86",
|
||||
"displayName": "Windows x86 RelWithDebInfo (Ninja)",
|
||||
"description": "VCMI RelWithDebInfo build using Ninja + sccache (x86)",
|
||||
"inherits": "windows-msvc-ninja-release",
|
||||
"cacheVariables": {
|
||||
"VCPKG_TARGET_TRIPLET": "x86-windows"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release-arm64",
|
||||
"displayName": "Windows ARM64 RelWithDebInfo (Ninja)",
|
||||
"description": "VCMI RelWithDebInfo build using Ninja + sccache (ARM64)",
|
||||
"inherits": "windows-msvc-ninja-release",
|
||||
"cacheVariables": {
|
||||
"VCPKG_TARGET_TRIPLET": "arm64-windows"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-mingw-conan-linux",
|
||||
"displayName": "Ninja+Conan release",
|
||||
@@ -470,6 +507,21 @@
|
||||
"configurePreset": "windows-msvc-release",
|
||||
"inherits": "default-release"
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release",
|
||||
"configurePreset": "windows-msvc-ninja-release",
|
||||
"inherits": "default-release"
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release-x86",
|
||||
"configurePreset": "windows-msvc-ninja-release-x86",
|
||||
"inherits": "default-release"
|
||||
},
|
||||
{
|
||||
"name": "windows-msvc-ninja-release-arm64",
|
||||
"configurePreset": "windows-msvc-ninja-release-arm64",
|
||||
"inherits": "default-release"
|
||||
},
|
||||
{
|
||||
"name": "windows-mingw-conan-linux",
|
||||
"configurePreset": "windows-mingw-conan-linux",
|
||||
|
Reference in New Issue
Block a user