diff --git a/share/wake/lib/system/job.wake b/share/wake/lib/system/job.wake index 51e52b0ac..9b0d80e4c 100644 --- a/share/wake/lib/system/job.wake +++ b/share/wake/lib/system/job.wake @@ -92,6 +92,13 @@ export def primJobLaunch (job: Job) (jobKey: JobKey) (usage: Usage): Unit = export def primJobFailLaunch (job: Job) (error: Error): Unit = (\_ \_ prim "job_fail_launch") job error +# Wrap `primJobFailLaunch` in the type signature required to satisfy `rmapFail`, for cases where +# a runner fails *before* delegating to `localRunner` or anything else which calls `primJobLaunch`. +export def markJobSetupFailure job err = + def Unit = primJobFailLaunch job err + + Fail err + # Complete a job after launch with userland defined failure export def primJobFailFinish (job: Job) (error: Error): Unit = (\_ \_ prim "job_fail_finish") job error diff --git a/share/wake/lib/system/job_cache_runner.wake b/share/wake/lib/system/job_cache_runner.wake index fb3a51da5..621a376a7 100644 --- a/share/wake/lib/system/job_cache_runner.wake +++ b/share/wake/lib/system/job_cache_runner.wake @@ -70,7 +70,9 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero def jobCacheVisible = JArray (map mkVisJson vis) - require Pass hashKey = hashFn input + require Pass hashKey = + hashFn input + | rmapFail (markJobSetupFailure job) def jobCacheJsonIn = prettyJSON @@ -86,14 +88,21 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero "dir_redirects" :-> JObject (wakeroot :-> JString "./",), ) - require Pass cacheResult = - job_cache_read jobCacheJsonIn - | rmapFail failWithError + def jobCacheJsonOutResult = + require Pass cacheResult = + job_cache_read jobCacheJsonIn + | rmapFail failWithError - require Pass jobCacheJsonOut = parseJSONBody cacheResult + require Pass jobCacheJsonOut = parseJSONBody cacheResult - require Pass (JBoolean cacheHit) = jField jobCacheJsonOut "found" - else failWithError "job-cache returned unexpected json schema" + require Pass (JBoolean cacheHit) = jField jobCacheJsonOut "found" + else failWithError "job-cache returned unexpected json schema" + + Pass (jobCacheJsonOut; cacheHit) + + require Pass (jobCacheJsonOut; cacheHit) = + jobCacheJsonOutResult + | rmapFail (markJobSetupFailure job) def isDebugOn = require Some value = getenv "DEBUG_WAKE_SHARED_CACHE" @@ -107,63 +116,16 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero require True = isDebugOn def _ = write ".cache-hit/read.{prefix}.json" "//{label}\n{jobCacheJsonIn}" - def _ = write ".cache-hit/out.{prefix}.json" "//{label}\n{cacheResult}" - - True - - require Pass match_info = jField jobCacheJsonOut "match" - require Pass output_info = jField match_info "output_info" - - require Pass status = - jField output_info "status" - | jInteger - - require Pass runtime = - jField output_info "runtime" - | jDouble - - require Pass cputime = - jField output_info "cputime" - | jDouble - require Pass mem = - jField output_info "mem" - | jInteger + def _ = + write ".cache-hit/out.{prefix}.json" "//{label}\n{prettyJSON jobCacheJsonOut}" - require Pass ibytes = - jField output_info "ibytes" - | jInteger - - require Pass obytes = - jField output_info "obytes" - | jInteger - - require Pass inputs = - jField match_info "input_files" - | jArray jString - - require Pass output_files = - jField match_info "output_files" - | jArray getPath - - require Pass output_dirs = - jField match_info "output_dirs" - | jArray getPath - - require Pass output_symlinks = - jField match_info "output_symlinks" - | jArray getPath - - require Pass stdout = - jField output_info "stdout" - | jString + True - require Pass stderr = - jField output_info "stderr" - | jString + require Pass (JobCacheMatch inputs outputs stdout stderr predict) = + parseJobCacheMatch job jobCacheJsonOut + | rmapFail (markJobSetupFailure job) - def outputs = output_files ++ output_dirs ++ output_symlinks - def predict = Usage status runtime cputime mem ibytes obytes def _ = primJobVirtual job stdout stderr predict Pass (RunnerOutput inputs outputs Nil predict) @@ -246,3 +208,67 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero Pass (RunnerOutput (map getPathName vis) outputs cleanable usage) makeRunner "job-cache: {name}" run + +tuple JobCacheMatch = + Inputs: List String + Outputs: List String + Stdout: String + Stderr: String + Predict: Usage + +def parseJobCacheMatch (job: Job) (jobCacheJsonOut: JValue): Result JobCacheMatch Error = + require Pass match_info = jField jobCacheJsonOut "match" + require Pass output_info = jField match_info "output_info" + + require Pass status = + jField output_info "status" + | jInteger + + require Pass runtime = + jField output_info "runtime" + | jDouble + + require Pass cputime = + jField output_info "cputime" + | jDouble + + require Pass mem = + jField output_info "mem" + | jInteger + + require Pass ibytes = + jField output_info "ibytes" + | jInteger + + require Pass obytes = + jField output_info "obytes" + | jInteger + + require Pass inputs = + jField match_info "input_files" + | jArray jString + + require Pass output_files = + jField match_info "output_files" + | jArray getPath + + require Pass output_dirs = + jField match_info "output_dirs" + | jArray getPath + + require Pass output_symlinks = + jField match_info "output_symlinks" + | jArray getPath + + require Pass stdout = + jField output_info "stdout" + | jString + + require Pass stderr = + jField output_info "stderr" + | jString + + def outputs = output_files ++ output_dirs ++ output_symlinks + def predict = Usage status runtime cputime mem ibytes obytes + + Pass (JobCacheMatch inputs outputs stdout stderr predict) diff --git a/share/wake/lib/system/remote_cache_runner.wake b/share/wake/lib/system/remote_cache_runner.wake index f159eb0f5..282600423 100644 --- a/share/wake/lib/system/remote_cache_runner.wake +++ b/share/wake/lib/system/remote_cache_runner.wake @@ -249,7 +249,9 @@ export def mkRemoteCacheRunner (rscApi: RemoteCacheApi) (hashFn: RunnerInput => def run (job: Job) (input: RunnerInput): Result RunnerOutput Error = def label = input.getRunnerInputLabel - require Pass hashKey = hashFn input + require Pass hashKey = + hashFn input + | rmapFail (markJobSetupFailure job) # If pulling from the cache is not enabled don't bother searching. require True = rscApi.getRemoteCacheApiCanPull diff --git a/share/wake/lib/system/runner.wake b/share/wake/lib/system/runner.wake index 611fa9b1e..8e0d6e588 100644 --- a/share/wake/lib/system/runner.wake +++ b/share/wake/lib/system/runner.wake @@ -234,7 +234,7 @@ export def makeJSONRunner ((JSONRunnerPlan rawScript extraArgs extraEnv estimate def run (job: Job) ((RunnerInput label command visible environment directory stdin res prefix record isatty fnInputs fnOutputs): RunnerInput): Result RunnerOutput Error = require True = executeOk - else failWithError "Runner {script} is not executable" + else markJobSetupFailure job "Runner {script} is not executable".makeError def Usage status runtime cputime membytes inbytes outbytes = record @@ -263,18 +263,14 @@ export def makeJSONRunner ((JSONRunnerPlan rawScript extraArgs extraEnv estimate Nil ) - require Pass build = - mkdir ".build" - | addErrorContext "Failed to 'mkdir .build'." - |< getPathName - - def specFile = "{build}/spec-{prefix}.json" - def resultFile = "{build}/result-{prefix}.json" + def buildDirName = ".build" + def specFile = "{buildDirName}/spec-{prefix}.json" + def resultFile = "{buildDirName}/result-{prefix}.json" require Pass _ = write specFile (prettyJSON json) | addErrorContext "Failed to 'write {specFile}: '" - |< getPathName + | rmapFail (markJobSetupFailure job) def cmd = script, "-I", "-p", specFile, "-o", resultFile, extraArgs