mirror of
https://github.com/daylinmorgan/oizys.git
synced 2024-12-22 01:50:44 -06:00
cli: add support for GHA based update action
This commit is contained in:
parent
3e0609d1f8
commit
c450271582
7 changed files with 202 additions and 34 deletions
|
@ -1,5 +1,5 @@
|
|||
## nix begat oizys
|
||||
import std/[os, tables, sequtils, strformat,]
|
||||
import std/[os, tables, sequtils, strformat,strutils]
|
||||
|
||||
import cligen, bbansi
|
||||
import oizys/[context, github, nix, overlay, logging]
|
||||
|
@ -13,6 +13,17 @@ addHandler(
|
|||
)
|
||||
)
|
||||
|
||||
proc confirm(q: string): bool =
|
||||
stderr.write $(q & bb"[yellow] (Y/n) ")
|
||||
while true:
|
||||
let ans = readLine(stdin)
|
||||
case ans.strip().toLowerAscii():
|
||||
of "y","yes": return true
|
||||
of "n","no": return false
|
||||
else:
|
||||
stderr.write($bb("[red]Please answer Yes/no\nexpected one of [b]Y,yes,N,no "))
|
||||
stderr.write "\n"
|
||||
|
||||
overlay:
|
||||
proc pre(
|
||||
flake: string = "",
|
||||
|
@ -32,9 +43,20 @@ overlay:
|
|||
## output
|
||||
echo nixosConfigAttrs().join(" ")
|
||||
|
||||
proc update(yes: bool = false) =
|
||||
## *TBI* update and run nixos-rebuild
|
||||
fatal "not implemented"
|
||||
proc update(
|
||||
yes: bool = false,
|
||||
preview: bool = false
|
||||
) =
|
||||
## update and run nixos-rebuild
|
||||
let hosts = getHosts()
|
||||
if hosts.len > 1: fatalQuit "operation only supports one host"
|
||||
let run = getLastUpdateRun()
|
||||
echo fmt"run created at: {run.created_at}"
|
||||
echo "nvd diff:\n", getUpdateSummary(run.id, hosts[0])
|
||||
if preview: quit 0
|
||||
if yes or confirm("Proceed with system update?"):
|
||||
updateRepo()
|
||||
nixosRebuild("switch")
|
||||
|
||||
proc build(minimal: bool = false) =
|
||||
## nix build
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import std/[logging, os, strformat, strutils]
|
||||
from std/nativesockets import getHostname
|
||||
|
||||
import bbansi
|
||||
import ./logging
|
||||
|
||||
type
|
||||
OizysContext* = object
|
||||
flake, host: string
|
||||
flake: string
|
||||
hosts: seq[string]
|
||||
debug: bool
|
||||
ci: bool
|
||||
|
@ -39,13 +39,12 @@ proc updateContext*(
|
|||
) =
|
||||
oc.debug = debug
|
||||
oc.resetCache = resetCache
|
||||
if host.len > 0:
|
||||
oc.hosts = host
|
||||
if flake != "":
|
||||
oc.flake =
|
||||
if flake.startsWith("github") or flake.startsWith("git+"): flake
|
||||
else: checkPath(flake.normalizedPath().absolutePath())
|
||||
debug oc
|
||||
|
||||
debug bb(fmt"""[b]flake[/]: {oc.flake}, [b]hosts[/]: {oc.hosts.join(" ")}""")
|
||||
|
||||
proc getHosts*(): seq[string] = return oc.hosts
|
||||
proc getFlake*(): string = return oc.flake
|
||||
|
|
|
@ -23,7 +23,7 @@ type
|
|||
|
||||
proc runCmdCapt*(
|
||||
cmd: string,
|
||||
capture: set[CaptureGrp],
|
||||
capture: set[CaptureGrp] = {CaptStdout},
|
||||
): tuple[stdout, stderr: string, exitCode: int] =
|
||||
debug fmt"running cmd: {cmd}"
|
||||
let args = cmd.splitWhitespace()
|
||||
|
|
|
@ -1,21 +1,43 @@
|
|||
import std/[httpclient,logging, os, strformat, strutils, json]
|
||||
import ./logging
|
||||
import std/[httpclient,logging, os, strformat, strutils, json, tables, tempfiles]
|
||||
import jsony, bbansi, zippy/ziparchives
|
||||
import ./[logging, exec, context]
|
||||
|
||||
# localPassC is used by zippy but the additional
|
||||
# module mangling on nixos somehow breaks localPassC
|
||||
when defined(amd64) and (defined(gcc) or defined(clang)):
|
||||
{.passC: "-msse4.1 -mpclmul".}
|
||||
|
||||
template withTmpDir(body: untyped): untyped =
|
||||
let tmpDir {.inject.} = createTempDir("oizys","")
|
||||
body
|
||||
removeDir(tmpDir)
|
||||
|
||||
var ghToken = getEnv("GITHUB_TOKEN")
|
||||
|
||||
proc checkToken() {.inline.} =
|
||||
if ghToken == "": fatalQuit "GITHUB_TOKEN not set"
|
||||
|
||||
#[curl -L \
|
||||
-X POST \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer <YOUR-TOKEN>" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/OWNER/REPO/actions/workflows/WORKFLOW_ID/dispatches \
|
||||
-d '{"ref":"topic-branch","inputs":{"name":"Mona the Octocat","home":"San Francisco, CA"}}'
|
||||
]#
|
||||
proc ghClient(
|
||||
maxRedirects = 5
|
||||
): HttpClient =
|
||||
checkToken()
|
||||
result = newHttpClient(maxRedirects = maxRedirects)
|
||||
result.headers = newHttpHeaders({
|
||||
"Accept" : "application/vnd.github+json",
|
||||
"Authorization" : fmt"Bearer {ghToken}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
})
|
||||
|
||||
|
||||
proc getGhApi(url: string): Response =
|
||||
let client = ghClient()
|
||||
try:
|
||||
result = client.get(url)
|
||||
except:
|
||||
error fmt"github api request failed: {url}"
|
||||
error fmt"response: {result.body}"
|
||||
quit QuitFailure
|
||||
|
||||
proc postGhApi(url: string, body: JsonNode) =
|
||||
checkToken()
|
||||
let client = newHttpClient()
|
||||
|
@ -43,4 +65,129 @@ proc createDispatch*(workflowFileName: string, `ref`: string) =
|
|||
}
|
||||
)
|
||||
|
||||
type
|
||||
GhArtifact = object
|
||||
id: int
|
||||
name: string
|
||||
url: string
|
||||
archive_download_url*: string
|
||||
|
||||
GhWorkflowRun = object
|
||||
id*: int
|
||||
node_id: string
|
||||
run_number: int
|
||||
event: string
|
||||
status: string
|
||||
conclusion: string
|
||||
html_url: string
|
||||
workflow_id: int
|
||||
created_at*: string # use datetime?
|
||||
updated_at: string # use datetime?
|
||||
|
||||
ListGhArtifactResponse = object
|
||||
total_count: int
|
||||
artifacts: seq[GhArtifact]
|
||||
|
||||
ListGhWorkflowResponse = object
|
||||
total_count: int
|
||||
workflow_runs: seq[GhWorkflowRun]
|
||||
|
||||
|
||||
proc listUpdateRuns(): seq[GhWorkflowRun] =
|
||||
## get update.yml runs
|
||||
## endpoint https://api.github.com/repos/OWNER/REPO/actions/workflows/WORKFLOW_ID/runs
|
||||
debug "listing update workflows"
|
||||
let response = getGhApi("https://api.github.com/repos/daylinmorgan/oizys/actions/workflows/update.yml/runs")
|
||||
fromJson(response.body, ListGhWorkflowResponse).workflow_runs
|
||||
|
||||
proc getLastUpdateRun*(): GhWorkflowRun =
|
||||
let runs = listUpdateRuns()
|
||||
let run = runs[0]
|
||||
if run.conclusion == "failure":
|
||||
fatalQuit bb(fmt("Most recent run was not successful\n[b]runID[/]: {run.id}\n[b]conclusion[/]: {run.conclusion}"))
|
||||
if run.status in ["in_progress", "queued"]:
|
||||
fatalQuit bb(fmt("Most recent run is not finished\nview workflow run at: {run.html_url}"))
|
||||
result = run
|
||||
|
||||
|
||||
proc getArtifacts(runId: int): seq[GhArtifact] =
|
||||
## get workflow artifacts
|
||||
## https://api.github.com/repos/OWNER/REPO/actions/runs/RUN_ID/artifacts
|
||||
let response = getGhApi(fmt"https://api.github.com/repos/daylinmorgan/oizys/actions/runs/{runId}/artifacts")
|
||||
fromJson(response.body, ListGhArtifactResponse).artifacts
|
||||
|
||||
proc getUpdateSummaryArtifact(runId: int, host: string): GhArtifact =
|
||||
let name = fmt"{host}-summary"
|
||||
let artifacts = getArtifacts(runId)
|
||||
for artifact in artifacts:
|
||||
if artifact.name == name:
|
||||
return artifact
|
||||
fatalQuit fmt"failed to find summary for run id: {runID}"
|
||||
|
||||
proc getUpdateSummaryUrl(runID: int, host: string): string =
|
||||
## https://api.github.com/repos/OWNER/REPO/actions/artifacts/ARTIFACT_ID/ARCHIVE_FORMAT
|
||||
let artifact = getUpdateSummaryArtifact(runID, host)
|
||||
# httpclient was forwarding the Authorization headers,
|
||||
# which confused Azure where the archive lives...
|
||||
var response: Response
|
||||
try:
|
||||
let client = ghClient(maxRedirects = 0)
|
||||
response = client.get(artifact.archive_download_url)
|
||||
except:
|
||||
errorQuit fmt("fetching summary failed:\n\n{response.headers}\n\n{response.body}")
|
||||
|
||||
if "location" notin response.headers.table:
|
||||
errorQuit fmt("fetching summary failed:\n\n{response.headers}\n\n{response.body}")
|
||||
|
||||
let location = response.headers.table.getOrDefault("location", @[])
|
||||
if location.len == 0: errorQuit fmt("location header missing url?")
|
||||
return location[0]
|
||||
|
||||
proc fetchUpdateSummaryFromUrl(url: string): string =
|
||||
withTmpDir:
|
||||
let client = newHttpClient()
|
||||
client.downloadFile(url, tmpDir / "summary.zip")
|
||||
let reader = openZipArchive(tmpDir / "summary.zip")
|
||||
try:
|
||||
result = reader.extractFile("summary.md")
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
proc getUpdateSummary*(runId: int, host: string): string =
|
||||
let url = getUpdateSummaryUrl(runId, host)
|
||||
result = fetchUpdateSummaryFromUrl(url)
|
||||
|
||||
type
|
||||
GitRepo = object
|
||||
path: string
|
||||
|
||||
proc git(r: GitRepo, rest: varargs[string]): string =
|
||||
result = "git"
|
||||
result.addArgs ["-C", r.path]
|
||||
result.addArgs rest
|
||||
|
||||
proc checkGit(code: int) =
|
||||
if code != 0: fatalQuit "git had a non-zero exit status"
|
||||
|
||||
proc fetch(r: GitRepo) =
|
||||
let code = runCmd r.git("fetch", "origin")
|
||||
checkGit code
|
||||
|
||||
proc status(r: GitRepo) =
|
||||
let (output, _, code) = runCmdCapt(r.git("status", "--porcelain"))
|
||||
checkGit code
|
||||
if output.len > 0:
|
||||
info "unstaged commits, cowardly exiting..."
|
||||
quit QuitFailure
|
||||
|
||||
proc rebase(r: GitRepo, `ref`: string) =
|
||||
r.status()
|
||||
let code = runCmd r.git("rebase", `ref`)
|
||||
checkGit code
|
||||
|
||||
proc updateRepo*() =
|
||||
let repo = GitRepo(path: getFlake())
|
||||
fetch repo
|
||||
rebase repo, "origin/flake-lock"
|
||||
|
||||
|
||||
|
|
|
@ -20,3 +20,4 @@ gitea
|
|||
lock
|
||||
code
|
||||
comma-with-db
|
||||
nix-index-with-db
|
||||
|
|
|
@ -94,9 +94,10 @@ proc trunc(s: string, limit: int): string =
|
|||
|
||||
proc display(msg: string, drvs: seq[Derivation]) =
|
||||
echo fmt"{msg}: [bold cyan]{drvs.len()}[/]".bb
|
||||
let maxLen = min(max drvs.mapIt(it.name.len), 40)
|
||||
for drv in drvs:
|
||||
echo " ", drv.name.trunc(maxLen).alignLeft(maxLen), " ", drv.hash.bb("faint")
|
||||
if drvs.len > 0:
|
||||
let maxLen = min(max drvs.mapIt(it.name.len), 40)
|
||||
for drv in drvs:
|
||||
echo " ", drv.name.trunc(maxLen).alignLeft(maxLen), " ", drv.hash.bb("faint")
|
||||
|
||||
proc display(output: DryRunOutput) =
|
||||
if isDebug():
|
||||
|
@ -130,7 +131,7 @@ proc evaluateDerivations(drvs: seq[string]): Table[string, NixDerivation] =
|
|||
fromJson(output, Table[string,NixDerivation])
|
||||
|
||||
|
||||
# TODO: replace asserts in this proc
|
||||
# TODO: replace asserts in this proc, would be easier with results type
|
||||
proc findSystemPaths(drvs: Table[string, NixDerivation]): seq[string] =
|
||||
let hosts = getHosts()
|
||||
let systemDrvs = collect(
|
||||
|
@ -148,7 +149,8 @@ proc findSystemPaths(drvs: Table[string, NixDerivation]): seq[string] =
|
|||
|
||||
func isIgnored(drv: string): bool =
|
||||
const ignoredPackages = (slurp "ignored.txt").splitLines()
|
||||
drv.split("-", 1)[1].replace(".drv","") in ignoredPackages
|
||||
let name = drv.split("-", 1)[1].replace(".drv","")
|
||||
name in ignoredPackages
|
||||
|
||||
proc systemPathDrvsToBuild(): seq[string] =
|
||||
let toBuild = toBuildNixosConfiguration()
|
||||
|
@ -157,11 +159,12 @@ proc systemPathDrvsToBuild(): seq[string] =
|
|||
var inputDrvs: seq[string]
|
||||
for p in systemPaths:
|
||||
inputDrvs &= drvs[p].inputDrvs.keys().toSeq()
|
||||
result = collect(
|
||||
for drv in inputDrvs:
|
||||
if (drv in toBuild) and (not drv.isIgnored()):
|
||||
drv & "^*"
|
||||
)
|
||||
result = inputDrvs.filterIt(it in toBuild)
|
||||
let nToBuild = result.len
|
||||
result = result.filterIt(not it.isIgnored)
|
||||
let nIgnored = result.len - nToBuild
|
||||
debug fmt"ignored {nIgnored} derivations"
|
||||
result = result.mapIt(it & "^*")
|
||||
|
||||
func splitDrv(drv: string): tuple[name, hash:string] =
|
||||
let s = drv.split("-", 1)
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
# oizys-nim todo's
|
||||
|
||||
- [x] nix commands including dry runs
|
||||
- [ ] gh api commands
|
||||
- [x] ci <- start with the easier one
|
||||
- [ ] update
|
||||
|
||||
<!-- generated with <3 by daylinmorgan/todo -->
|
||||
|
|
Loading…
Reference in a new issue