Skip to content

Commit

Permalink
Fix potential "infinite recursion" panics in runners (#1670)
Browse files Browse the repository at this point in the history
* Fix potential "infinite recursion" panics in runners

* Fix wake-format
  • Loading branch information
ag-eitilt authored Dec 9, 2024
1 parent a50abc6 commit 72ff132
Show file tree
Hide file tree
Showing 4 changed files with 101 additions and 70 deletions.
7 changes: 7 additions & 0 deletions share/wake/lib/system/job.wake
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,13 @@ export def primJobLaunch (job: Job) (jobKey: JobKey) (usage: Usage): Unit =
export def primJobFailLaunch (job: Job) (error: Error): Unit =
(\_ \_ prim "job_fail_launch") job error

# Wrap `primJobFailLaunch` in the type signature required to satisfy `rmapFail`, for cases where
# a runner fails *before* delegating to `localRunner` or anything else which calls `primJobLaunch`.
export def markJobSetupFailure job err =
def Unit = primJobFailLaunch job err

Fail err

# Complete a job after launch with userland defined failure
export def primJobFailFinish (job: Job) (error: Error): Unit =
(\_ \_ prim "job_fail_finish") job error
Expand Down
146 changes: 86 additions & 60 deletions share/wake/lib/system/job_cache_runner.wake
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero

def jobCacheVisible = JArray (map mkVisJson vis)

require Pass hashKey = hashFn input
require Pass hashKey =
hashFn input
| rmapFail (markJobSetupFailure job)

def jobCacheJsonIn =
prettyJSON
Expand All @@ -86,14 +88,21 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero
"dir_redirects" :-> JObject (wakeroot :-> JString "./",),
)

require Pass cacheResult =
job_cache_read jobCacheJsonIn
| rmapFail failWithError
def jobCacheJsonOutResult =
require Pass cacheResult =
job_cache_read jobCacheJsonIn
| rmapFail failWithError

require Pass jobCacheJsonOut = parseJSONBody cacheResult
require Pass jobCacheJsonOut = parseJSONBody cacheResult

require Pass (JBoolean cacheHit) = jField jobCacheJsonOut "found"
else failWithError "job-cache returned unexpected json schema"
require Pass (JBoolean cacheHit) = jField jobCacheJsonOut "found"
else failWithError "job-cache returned unexpected json schema"

Pass (jobCacheJsonOut; cacheHit)

require Pass (jobCacheJsonOut; cacheHit) =
jobCacheJsonOutResult
| rmapFail (markJobSetupFailure job)

def isDebugOn =
require Some value = getenv "DEBUG_WAKE_SHARED_CACHE"
Expand All @@ -107,63 +116,16 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero
require True = isDebugOn

def _ = write ".cache-hit/read.{prefix}.json" "//{label}\n{jobCacheJsonIn}"
def _ = write ".cache-hit/out.{prefix}.json" "//{label}\n{cacheResult}"

True

require Pass match_info = jField jobCacheJsonOut "match"
require Pass output_info = jField match_info "output_info"

require Pass status =
jField output_info "status"
| jInteger

require Pass runtime =
jField output_info "runtime"
| jDouble

require Pass cputime =
jField output_info "cputime"
| jDouble

require Pass mem =
jField output_info "mem"
| jInteger
def _ =
write ".cache-hit/out.{prefix}.json" "//{label}\n{prettyJSON jobCacheJsonOut}"

require Pass ibytes =
jField output_info "ibytes"
| jInteger

require Pass obytes =
jField output_info "obytes"
| jInteger

require Pass inputs =
jField match_info "input_files"
| jArray jString

require Pass output_files =
jField match_info "output_files"
| jArray getPath

require Pass output_dirs =
jField match_info "output_dirs"
| jArray getPath

require Pass output_symlinks =
jField match_info "output_symlinks"
| jArray getPath

require Pass stdout =
jField output_info "stdout"
| jString
True

require Pass stderr =
jField output_info "stderr"
| jString
require Pass (JobCacheMatch inputs outputs stdout stderr predict) =
parseJobCacheMatch job jobCacheJsonOut
| rmapFail (markJobSetupFailure job)

def outputs = output_files ++ output_dirs ++ output_symlinks
def predict = Usage status runtime cputime mem ibytes obytes
def _ = primJobVirtual job stdout stderr predict

Pass (RunnerOutput inputs outputs Nil predict)
Expand Down Expand Up @@ -246,3 +208,67 @@ export def mkJobCacheRunner (hashFn: RunnerInput => Result String Error) (wakero
Pass (RunnerOutput (map getPathName vis) outputs cleanable usage)

makeRunner "job-cache: {name}" run

tuple JobCacheMatch =
Inputs: List String
Outputs: List String
Stdout: String
Stderr: String
Predict: Usage

def parseJobCacheMatch (job: Job) (jobCacheJsonOut: JValue): Result JobCacheMatch Error =
require Pass match_info = jField jobCacheJsonOut "match"
require Pass output_info = jField match_info "output_info"

require Pass status =
jField output_info "status"
| jInteger

require Pass runtime =
jField output_info "runtime"
| jDouble

require Pass cputime =
jField output_info "cputime"
| jDouble

require Pass mem =
jField output_info "mem"
| jInteger

require Pass ibytes =
jField output_info "ibytes"
| jInteger

require Pass obytes =
jField output_info "obytes"
| jInteger

require Pass inputs =
jField match_info "input_files"
| jArray jString

require Pass output_files =
jField match_info "output_files"
| jArray getPath

require Pass output_dirs =
jField match_info "output_dirs"
| jArray getPath

require Pass output_symlinks =
jField match_info "output_symlinks"
| jArray getPath

require Pass stdout =
jField output_info "stdout"
| jString

require Pass stderr =
jField output_info "stderr"
| jString

def outputs = output_files ++ output_dirs ++ output_symlinks
def predict = Usage status runtime cputime mem ibytes obytes

Pass (JobCacheMatch inputs outputs stdout stderr predict)
4 changes: 3 additions & 1 deletion share/wake/lib/system/remote_cache_runner.wake
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,9 @@ export def mkRemoteCacheRunner (rscApi: RemoteCacheApi) (hashFn: RunnerInput =>
def run (job: Job) (input: RunnerInput): Result RunnerOutput Error =
def label = input.getRunnerInputLabel

require Pass hashKey = hashFn input
require Pass hashKey =
hashFn input
| rmapFail (markJobSetupFailure job)

# If pulling from the cache is not enabled don't bother searching.
require True = rscApi.getRemoteCacheApiCanPull
Expand Down
14 changes: 5 additions & 9 deletions share/wake/lib/system/runner.wake
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ export def makeJSONRunner ((JSONRunnerPlan rawScript extraArgs extraEnv estimate

def run (job: Job) ((RunnerInput label command visible environment directory stdin res prefix record isatty fnInputs fnOutputs): RunnerInput): Result RunnerOutput Error =
require True = executeOk
else failWithError "Runner {script} is not executable"
else markJobSetupFailure job "Runner {script} is not executable".makeError

def Usage status runtime cputime membytes inbytes outbytes = record

Expand Down Expand Up @@ -263,18 +263,14 @@ export def makeJSONRunner ((JSONRunnerPlan rawScript extraArgs extraEnv estimate
Nil
)

require Pass build =
mkdir ".build"
| addErrorContext "Failed to 'mkdir .build'."
|< getPathName

def specFile = "{build}/spec-{prefix}.json"
def resultFile = "{build}/result-{prefix}.json"
def buildDirName = ".build"
def specFile = "{buildDirName}/spec-{prefix}.json"
def resultFile = "{buildDirName}/result-{prefix}.json"

require Pass _ =
write specFile (prettyJSON json)
| addErrorContext "Failed to 'write {specFile}: '"
|< getPathName
| rmapFail (markJobSetupFailure job)

def cmd = script, "-I", "-p", specFile, "-o", resultFile, extraArgs

Expand Down

0 comments on commit 72ff132

Please sign in to comment.