diff --git a/.github/workflows/install_and_test.yml b/.github/workflows/install_and_test.yml index 67a34828c..267d21bff 100644 --- a/.github/workflows/install_and_test.yml +++ b/.github/workflows/install_and_test.yml @@ -40,6 +40,10 @@ jobs: run: | nix build .#check-examples -L + - name: Try to extract Rust By Examples + run: | + nix build .#rust-by-example-hax-extraction -L + - name: Checkout specifications uses: actions/checkout@v3 with: diff --git a/.github/workflows/licenses.yml b/.github/workflows/licenses.yml new file mode 100644 index 000000000..914ab0ab7 --- /dev/null +++ b/.github/workflows/licenses.yml @@ -0,0 +1,27 @@ +name: Check licenses + +on: + pull_request: + merge_group: + workflow_dispatch: + push: + branches: [main] + +jobs: + tests: + name: nix-action + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: extractions/setup-just@v1 + - name: Set-up OCaml + uses: ocaml/setup-ocaml@v3 + with: + ocaml-compiler: 5 + - name: Install cargo-deny + run: cargo install cargo-deny + - name: Install cargo-deny + run: cargo install toml2json + - name: Check the licenses + run: just check-licenses + diff --git a/.github/workflows/mlkem.yml b/.github/workflows/mlkem.yml index 267e1e9e5..0d59ce119 100644 --- a/.github/workflows/mlkem.yml +++ b/.github/workflows/mlkem.yml @@ -40,61 +40,15 @@ jobs: with: repository: hacl-star/hacl-star path: hacl-star - - - name: 🏃 Extract the Kyber reference code - run: | - eval $(opam env) - (cd proofs/fstar/extraction/ && ./clean.sh) - rm -f sys/platform/proofs/fstar/extraction/*.fst* - ./hax-driver.py --kyber-reference - - - name: 🏃 Regenerate `extraction-*` folders - run: ./proofs/fstar/patches.sh apply - - - name: 🏃 Make sure snapshots are up-to-date - run: git diff --exit-code - - - name: 🏃 Verify the Kyber reference code + - name: 🏃 Extract ML-KEM crate + working-directory: libcrux-ml-kem + run: ./hax.py extract + + - name: 🏃 Lax ML-KEM crate + working-directory: libcrux-ml-kem run: | env FSTAR_HOME=${{ github.workspace }}/fstar \ HACL_HOME=${{ github.workspace }}/hacl-star \ HAX_HOME=${{ github.workspace }}/hax \ PATH="${PATH}:${{ github.workspace }}/fstar/bin" \ - ./hax-driver.py --verify-extraction - - - name: 🏃 Verify Kyber `extraction-edited` F* code - run: | - env FSTAR_HOME=${{ github.workspace }}/fstar \ - HACL_HOME=${{ github.workspace }}/hacl-star \ - HAX_HOME=${{ github.workspace }}/hax \ - PATH="${PATH}:${{ github.workspace }}/fstar/bin" \ - make -C proofs/fstar/extraction-edited - - - name: 🏃 Verify Kyber `extraction-secret-independent` F* code - run: | - env FSTAR_HOME=${{ github.workspace }}/fstar \ - HACL_HOME=${{ github.workspace }}/hacl-star \ - HAX_HOME=${{ github.workspace }}/hax \ - PATH="${PATH}:${{ github.workspace }}/fstar/bin" \ - make -C proofs/fstar/extraction-secret-independent - - - name: 🏃 Extract the Kyber specification - run: | - eval $(opam env) - # Extract the functions in the compress module individually to test - # the function-extraction code. - # Extract functions from the remaining modules to test the - # module-extraction code. - ./hax-driver.py --crate-path specs/kyber \ - --functions hacspec_kyber::compress::compress \ - hacspec_kyber::compress::decompress \ - hacspec_kyber::compress::compress_d \ - hacspec::kyber::compress::decompress_d \ - --modules ind_cpa \ - hacspec_kyber \ - matrix \ - ntt \ - parameters \ - sampling \ - serialize \ - --exclude-modules libcrux::hacl::sha3 libcrux::digest + ./hax.py prove --admit diff --git a/.utils/expand.sh b/.utils/expand.sh deleted file mode 100755 index 6f0e5ea96..000000000 --- a/.utils/expand.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -# This script expands a crate so that one can inspect macro expansion -# by hax. It is a wrapper around `cargo expand` that inject the -# required rustc flags. - -RUSTFLAGS='-Zcrate-attr=register_tool(_hax) -Zcrate-attr=feature(register_tool) --cfg hax_compilation --cfg _hax --cfg hax --cfg hax_backend_fstar --cfg hax' cargo expand "$@" - diff --git a/.utils/list-names.sh b/.utils/list-names.sh deleted file mode 100755 index e0c99c586..000000000 --- a/.utils/list-names.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -function pager() { - if command -v bat &> /dev/null; then - bat -l ml - else - less - fi -} - -hax-engine-names-extract | sed '/include .val/,$d' | pager diff --git a/.utils/rebuild.sh b/.utils/rebuild.sh index adadc61ce..150da64f0 100755 --- a/.utils/rebuild.sh +++ b/.utils/rebuild.sh @@ -22,7 +22,7 @@ DUNEJOBS=${DUNEJOBS:-} # required since `set -u` YELLOW=43 GREEN=42 RED=41 -BLACK=40 +BLACK=90 status () { echo -e "\033[1m[rebuild script] \033[30m\033[$1m$2\033[0m"; } cd_rootwise () { diff --git a/.utils/rust-by-example.js b/.utils/rust-by-example.js new file mode 100644 index 000000000..1dd0c6f70 --- /dev/null +++ b/.utils/rust-by-example.js @@ -0,0 +1,143 @@ +// This script expects Rust By Example to be in current directory +// (clone the repo https://github.com/rust-lang/rust-by-example, `cd` into it, and run `node rust-by-examples.js`) + +const fs = require('fs'); +const SRC_DIR = 'src'; + +// Lists all markdown files under `SRC_DIR` +function getMarkdownFiles() { + return fs.readdirSync(SRC_DIR, { recursive: true }) + .filter(path => path.endsWith('.md')); +} + +// Code blocks from a file of given path +function extractCodeBlocks(path) { + let contents = fs.readFileSync(SRC_DIR + '/' + path).toString(); + let blocks = contents + .split(/^```/m) + .filter((_, i) => i % 2 == 1) + .map(s => { + let lines = s.split('\n'); + let modifiers = lines[0].split(',').map(x => x.trim()).filter(x => x); + let contents = lines.slice(1).join('\n'); + return {modifiers, contents}; + }) + .filter(x => x.modifiers.includes('rust')); + let name = path.replace(/[.]md$/, '').split('/').join('_'); + return {name, blocks}; +} + +let code = getMarkdownFiles() + .map(extractCodeBlocks) + .filter(({blocks}) => blocks.length); + +// Strips the comments of a rust snippet +let stripComments = rust_snippet => rust_snippet.replace(/[/][/]+.*/mg, ''); + +// Given a Rust snippet, returns `true` whenever we detect a top-level +// `let` binding: this means we need to wrap the snippet in a function. +let isDirectLet = rust_snippet => stripComments(rust_snippet).trim().startsWith('let '); + +// Wraps a Rust snippet inside a function +let protectSnippet = rust_snippet => `fn wrapper_fn() { let _ = {${rust_snippet}}; }`; + +function codeBlocksToModules(code_blocks) { + let denylist = [ + /unsafe_asm \d+/ + ]; + let modules = {}; + + for(let {name, blocks} of code_blocks) { + let mod_section = `section_${name}`; + modules[mod_section] = {}; + let nth = 0; + for(let {modifiers, contents} of blocks) { + nth += 1; + if(['edition2015', 'compile_fail', 'ignore'].some(m => modifiers.includes(m))) { + continue; + } + let id = `section_${name} ${nth}`; + // Remove top-level assertions + contents = contents.replace(/^# assert.*\n?/mg, ''); + // Strip `# ` (the mdbook marker to hide a line) + contents = contents.replace(/^# /mg, ''); + // Whenever we detect a `let` + if(isDirectLet(contents)) + contents = protectSnippet(contents); + if(denylist.some(re => id.match(re))) + continue; + let mod_snippet = `snippet_${nth}`; + // Replace `crate::` by a full path to the current module + contents = contents.replace(/crate::/g, 'crate::' + mod_section + '::' + mod_snippet + '::'); + modules[mod_section][mod_snippet] = `// modifiers: ${modifiers.join(', ')}\n` + contents; + } + } + + return modules; +} + +let modules = codeBlocksToModules(code); + +let OUTPUT_CRATE = 'rust-by-examples-crate'; +fs.rmSync(OUTPUT_CRATE, { recursive: true, force: true }); +fs.mkdirSync(OUTPUT_CRATE, { recursive: true }); +const { execSync } = require('child_process'); +execSync("cargo init --lib", { cwd: OUTPUT_CRATE }); + +let OUTPUT_CRATE_SRC = OUTPUT_CRATE + '/src/'; +fs.rmSync(OUTPUT_CRATE_SRC, { recursive: true, force: true }); +let root_mod = '#![allow(unused)]'; +for(let mod_name in modules) { + let submodules = modules[mod_name]; + fs.mkdirSync(OUTPUT_CRATE_SRC + mod_name, { recursive: true }); + let mod_contents = ''; + for (let submod_name in submodules) { + let contents = submodules[submod_name]; + fs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '/' + submod_name + '.rs', contents); + mod_contents += 'pub mod ' + submod_name + ';\n'; + } +fs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '.rs', mod_contents); + root_mod += 'pub mod ' + mod_name + ';\n'; +} +fs.writeFileSync(OUTPUT_CRATE_SRC + '/lib.rs', root_mod); + + +// A list of [, []] that are known not to be processed by hax +let cargo_hax_denylist = [ + ['error_iter_result', [3]], + ['error_option_unwrap_defaults', [3,4]], + ['flow_control_for', [1,2,3,5]], + ['flow_control_if_let', [3]], + ['flow_control_loop_nested', [1]], + ['flow_control_loop_return', [1]], + ['flow_control_loop', [1]], + ['flow_control_match_binding', [1,2]], + ['flow_control_match_destructuring_destructure_pointers', [1]], + ['flow_control_match_destructuring_destructure_slice', [1]], + ['flow_control_match', [1]], + ['flow_control_while_let', [1,2]], + ['fn_closures_capture', [1]], + ['fn_closures_input_parameters', [1]], + ['fn', [1]], + ['hello_print_fmt', [1]], + ['macros_dry', [1]], + ['scope_borrow_alias', [1]], + ['scope_borrow_ref', [1]], + ['scope_move_mut', [1]], + ['scope_raii', [1]], + ['std_arc', [1]], + ['std_hash', [1]], + ['std_misc_arg_matching', [1]], + ['std_misc_channels', [1]], + ['std_misc_file_read_lines', [3]], + ['std_misc_threads', [1]], + ['std_misc_threads_testcase_mapreduce', [1]], + ['std_str', [1]], + ['trait_iter', [1]], + ['trait', [1]], + ['unsafe', [1,2]], +].map(([module, snippets]) => snippets.map(n => `section_${module}::snippet_${n}`)).flat(); + +let include_clause = cargo_hax_denylist.map(path => `-*::${path}::**`).join(' '); + +execSync(`cargo hax into -i '${include_clause}' fstar`, { cwd: OUTPUT_CRATE, stdio: 'inherit' }); diff --git a/Cargo.lock b/Cargo.lock index 8462089a3..5daeaf35d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "aho-corasick" @@ -98,25 +98,6 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" -[[package]] -name = "bincode" -version = "2.0.0-rc.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f11ea1a0346b94ef188834a65c068a03aec181c94896d481d7a0a40d85b0ce95" -dependencies = [ - "bincode_derive", - "serde", -] - -[[package]] -name = "bincode_derive" -version = "2.0.0-rc.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e30759b3b99a1b802a7a3aa21c85c3ded5c28e1c83170d82d70f08bbf7f3e4c" -dependencies = [ - "virtue", -] - [[package]] name = "bitflags" version = "1.3.2" @@ -512,7 +493,6 @@ dependencies = [ name = "hax-engine-names-extract" version = "0.1.0-alpha.1" dependencies = [ - "bincode", "hax-adt-into", "hax-engine-names", "serde", @@ -524,7 +504,6 @@ dependencies = [ name = "hax-frontend-exporter" version = "0.1.0-alpha.1" dependencies = [ - "bincode", "extension-traits", "hax-adt-into", "hax-frontend-exporter-options", @@ -541,7 +520,6 @@ dependencies = [ name = "hax-frontend-exporter-options" version = "0.1.0-alpha.1" dependencies = [ - "bincode", "hax-adt-into", "schemars", "serde", @@ -620,17 +598,19 @@ name = "hax-types" version = "0.1.0-alpha.1" dependencies = [ "annotate-snippets", - "bincode", "clap", "colored", "hax-adt-into", "hax-frontend-exporter", "hax-frontend-exporter-options", "itertools", + "miette", "path-clean", "schemars", "serde", + "serde-brief", "serde_json", + "tracing", "zstd", ] @@ -872,6 +852,29 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "miette" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1" +dependencies = [ + "cfg-if", + "miette-derive", + "thiserror", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "mio" version = "0.8.11" @@ -1256,6 +1259,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-brief" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3ebe2a279beb0833037245a36adbbf71239fdaefc0f670122aff9922bcaf0b" +dependencies = [ + "serde", + "tracing", +] + [[package]] name = "serde-jsonlines" version = "0.5.0" @@ -1645,12 +1658,6 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "virtue" -version = "0.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dcc60c0624df774c82a0ef104151231d37da4962957d691c011c852b2473314" - [[package]] name = "wait-timeout" version = "0.2.0" diff --git a/Cargo.toml b/Cargo.toml index 427fd8221..52243d79a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,7 +44,7 @@ readme = "README.md" itertools = "0.11.0" schemars = "0.8" which = "4.4" -serde = { version = "1.0", features = ["derive"] } +serde = { version = "1.0", features = ["derive", "rc"] } serde_json = "1.0" clap = { version = "4.0", features = ["derive"] } syn = { version = "1.0.107", features = [ @@ -68,7 +68,6 @@ quote = "1.0.32" proc-macro2 = "1.0.66" cargo_metadata = "0.15" colored = "2" -bincode = "2.0.0-rc.3" annotate-snippets = "0.11" # Crates in this repository diff --git a/README.md b/README.md index 65eb569f5..3adf2858d 100644 --- a/README.md +++ b/README.md @@ -138,10 +138,17 @@ You can also just use [direnv](https://github.com/nix-community/nix-direnv), wit programs from the Rust language to various backends (see `engine/backends/`). - `cli/`: the `hax` subcommand for Cargo. -### Recompiling -You can use the [`.utils/rebuild.sh`](./.utils/rebuild.sh) script (which is available automatically as the command `rebuild` when using the Nix devshell): - - `rebuild`: rebuild the Rust then the OCaml part; - - `rebuild TARGET`: rebuild the `TARGET` part (`TARGET` is either `rust` or `ocaml`). +### Compiling, formatting, and more +We use the [`just` command runner](https://just.systems/). If you use +Nix, the dev shell provides it automatically, if you don't use Nix, +please [install `just`](https://just.systems/man/en/packages.html) on +your system. + +Anywhere within the repository, you can build and install in PATH (1) +the Rust parts with `just rust`, (2) the OCaml parts with `just ocaml` +or (3) both with `just build`. More commands (e.g. `just fmt` to +format) are available, please run `just` or `just --list` to get all +the commands. ## Publications & Other material diff --git a/book/book.toml b/book/book.toml index c7377ded6..19d6bf585 100644 --- a/book/book.toml +++ b/book/book.toml @@ -7,4 +7,9 @@ title = "hax" [output.html] mathjax-support = true +additional-css = ["static/custom.css"] +additional-js = ["theme/fstar.js", "theme/lz-string.js", "theme/ansi_up.js"] +[output.html.playground] +runnable = true +editable = true diff --git a/book/default.nix b/book/default.nix index 98580862c..a7358b93f 100644 --- a/book/default.nix +++ b/book/default.nix @@ -9,7 +9,6 @@ stdenv.mkDerivation { buildPhase = '' mdbook build mdbook build archive -d ../book/archive - bash ./postprocess.sh ''; installPhase = "mv book $out"; } diff --git a/book/postprocess.sh b/book/postprocess.sh deleted file mode 100755 index 79d4a55ae..000000000 --- a/book/postprocess.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env sh - -# This file replaces `user-checkable` with actual -# checkboxes and adds CSS to the generated HTML. - -for file in $(find . -name '*.html'); do - sed -i 's|user-checkable||g' "$file" -done - -for css in $(find . -name 'general.css'); do - cat >> "$css" <<-EOF -input.user-checkable { - transform: scale(1.5); - margin-right: 8px; - margin-left: 8px; -} - -ul:has(> li > .user-checkable) { - list-style-type: none; - padding: 0; - margin: 0; -} -li:has(> .user-checkable) { - list-style-type: none; - padding: 0; - margin: 0; -} -EOF -done diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index 7da2c743f..e5f094882 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -1,17 +1,21 @@ # Summary - [Introduction](./readme.md) -- [Examples]() - [Quick start](quick_start/intro.md) - [Tutorial](tutorial/readme.md) - [Panic freedom](tutorial/panic-freedom.md) - [Properties on functions](tutorial/properties.md) - [Data invariants](tutorial/data-invariants.md) +- [Examples](examples/intro.md) + - [Rust By Example](examples/rust-by-examples/intro.md) + - [Using the F* backend](examples/fstar/intro.md) + - [Using the Coq backend](examples/coq/intro.md) + - [Using the ProVerif backend](examples/coq/intro.md) - [Proofs]() - [F*]() - [Coq]() - [`libcore`]() -- [Troubleshooting/FAQ](faq/into.md) +- [Troubleshooting/FAQ](faq/intro.md) - [Command line usage]() - [The include flag: which items should be extracted, and how?](faq/include-flags.md) - [Contributing]() diff --git a/book/src/examples/coq/intro.md b/book/src/examples/coq/intro.md new file mode 100644 index 000000000..84914c08f --- /dev/null +++ b/book/src/examples/coq/intro.md @@ -0,0 +1 @@ +# Using the ProVerif backend diff --git a/book/src/examples/fstar/intro.md b/book/src/examples/fstar/intro.md new file mode 100644 index 000000000..9b80bca00 --- /dev/null +++ b/book/src/examples/fstar/intro.md @@ -0,0 +1 @@ +# Using the F* backend diff --git a/book/src/examples/intro.md b/book/src/examples/intro.md new file mode 100644 index 000000000..0ff72fdfa --- /dev/null +++ b/book/src/examples/intro.md @@ -0,0 +1,12 @@ +# Examples + +This chapter contains various examples that demonstrates how hax can +be used to prove properties about programs. Each example is +self-contained. hax being a tool that can extract Rust to various +backends, this section provides examples for each backend. + +The first subsection takes some examples from [Rust by +Example](https://doc.rust-lang.org/rust-by-example/), and shows how to +prove properties on them. + +The other sections present backend-specific examples. diff --git a/book/src/examples/readme.md b/book/src/examples/readme.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/book/src/examples/rust-by-examples/intro.md b/book/src/examples/rust-by-examples/intro.md new file mode 100644 index 000000000..d8d487984 --- /dev/null +++ b/book/src/examples/rust-by-examples/intro.md @@ -0,0 +1 @@ +# Rust By Example diff --git a/book/src/faq/into.md b/book/src/faq/into.md index cefa77ebd..856d84b65 100644 --- a/book/src/faq/into.md +++ b/book/src/faq/into.md @@ -1,3 +1 @@ # Troubleshooting/FAQ - -This chapter captures a list of common questions or issues and how to resolve them. If you happen to run into an issue that is not documented here, please consider submitting a pull request! diff --git a/book/src/faq/intro.md b/book/src/faq/intro.md new file mode 100644 index 000000000..cefa77ebd --- /dev/null +++ b/book/src/faq/intro.md @@ -0,0 +1,3 @@ +# Troubleshooting/FAQ + +This chapter captures a list of common questions or issues and how to resolve them. If you happen to run into an issue that is not documented here, please consider submitting a pull request! diff --git a/book/src/quick_start/intro.md b/book/src/quick_start/intro.md index 15d42ba26..a5dd82a12 100644 --- a/book/src/quick_start/intro.md +++ b/book/src/quick_start/intro.md @@ -5,9 +5,9 @@ what you are looking for! ## Setup the tools - - **user-checkable** [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation). + - [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation). 🪄 Running `cargo hax --version` should print some version info. - - **user-checkable** [Install F*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md) *(optional: only if want to run F\*)* + - [Install F*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md) *(optional: only if want to run F\*)* ## Setup the crate you want to verify @@ -16,11 +16,11 @@ what you are looking for! *Note: this part is useful only if you want to run F\*.* - - **user-checkable** Create the folder `proofs/fstar/extraction` folder, right next to the `Cargo.toml` of the crate you want to verify. + - Create the folder `proofs/fstar/extraction` folder, right next to the `Cargo.toml` of the crate you want to verify. 🪄 `mkdir -p proofs/fstar/extraction` - - **user-checkable** Copy [this makefile](https://gist.github.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3) to `proofs/fstar/extraction/Makefile`. + - Copy [this makefile](https://gist.github.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3) to `proofs/fstar/extraction/Makefile`. 🪄 `curl -o proofs/fstar/extraction/Makefile https://gist.githubusercontent.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3/raw/Makefile` - - **user-checkable** Add `hax-lib` as a dependency to your crate, enabled only when using hax. + - Add `hax-lib` as a dependency to your crate, enabled only when using hax. 🪄 `cargo add --target 'cfg(hax)' --git https://github.com/hacspec/hax hax-lib` 🪄 *(`hax-lib` is not mandatory, but this guide assumes it is present)* diff --git a/book/src/tutorial/Cargo.lock b/book/src/tutorial/Cargo.lock deleted file mode 100644 index 9675766ca..000000000 --- a/book/src/tutorial/Cargo.lock +++ /dev/null @@ -1,239 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "dyn-clone" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" - -[[package]] -name = "getrandom" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "hax-lib" -version = "0.1.0-pre.1" -source = "git+https://github.com/hacspec/hax#d668de4d17e5ddee3a613068dc30b71353a9db4f" - -[[package]] -name = "hax-lib-macros" -version = "0.1.0-pre.1" -source = "git+https://github.com/hacspec/hax#d668de4d17e5ddee3a613068dc30b71353a9db4f" -dependencies = [ - "hax-lib-macros-types", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "hax-lib-macros-types" -version = "0.1.0-pre.1" -source = "git+https://github.com/hacspec/hax#d668de4d17e5ddee3a613068dc30b71353a9db4f" -dependencies = [ - "proc-macro2", - "quote", - "schemars", - "serde", - "serde_json", - "uuid", -] - -[[package]] -name = "itoa" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" - -[[package]] -name = "libc" -version = "0.2.153" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "ryu" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" - -[[package]] -name = "schemars" -version = "0.8.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" -dependencies = [ - "dyn-clone", - "schemars_derive", - "serde", - "serde_json", -] - -[[package]] -name = "schemars_derive" -version = "0.8.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967" -dependencies = [ - "proc-macro2", - "quote", - "serde_derive_internals", - "syn 1.0.109", -] - -[[package]] -name = "serde" -version = "1.0.197" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.197" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.52", -] - -[[package]] -name = "serde_derive_internals" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "serde_json" -version = "1.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "tutorial-src" -version = "0.1.0" -dependencies = [ - "hax-lib", - "hax-lib-macros", -] - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "uuid" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" -dependencies = [ - "getrandom", -] - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" diff --git a/book/src/tutorial/Cargo.toml b/book/src/tutorial/Cargo.toml deleted file mode 100644 index de5569b89..000000000 --- a/book/src/tutorial/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "tutorial-src" -version = "0.1.0" -edition = "2021" - -[lib] -path = "sources.rs" - -[dependencies] -hax-lib = { git = "https://github.com/hacspec/hax", version = "0.1.0-pre.1" } diff --git a/book/src/tutorial/Sources.fst b/book/src/tutorial/Sources.fst deleted file mode 100644 index cedf378a5..000000000 --- a/book/src/tutorial/Sources.fst +++ /dev/null @@ -1,110 +0,0 @@ -module Tutorial_src -#set-options "--fuel 0 --ifuel 1 --z3rlimit 150" -open Core -open FStar.Mul - -// ANCHOR: F3 -type t_F3 = - | F3_E1 : t_F3 - | F3_E2 : t_F3 - | F3_E3 : t_F3 - -let t_F3_cast_to_repr (x: t_F3) : isize = - match x with - | F3_E1 -> isz 0 - | F3_E2 -> isz 1 - | F3_E3 -> isz 3 -// ANCHOR_END: F3 - -// ANCHOR: barrett -unfold -let t_FieldElement = i32 - -let v_BARRETT_MULTIPLIER: i64 = 20159L - -let v_BARRETT_R: i64 = 67108864L - -let v_BARRETT_SHIFT: i64 = 26L - -let v_FIELD_MODULUS: i32 = 3329l - -let barrett_reduce (value: i32) - : Pure i32 - (requires - (Core.Convert.f_from value <: i64) >=. (Core.Ops.Arith.Neg.neg v_BARRETT_R <: i64) && - (Core.Convert.f_from value <: i64) <=. v_BARRETT_R) - (ensures - fun result -> - let result:i32 = result in - result >. (Core.Ops.Arith.Neg.neg v_FIELD_MODULUS <: i32) && result <. v_FIELD_MODULUS && - (result %! v_FIELD_MODULUS <: i32) =. (value %! v_FIELD_MODULUS <: i32)) = - let t:i64 = (Core.Convert.f_from value <: i64) *! v_BARRETT_MULTIPLIER in - let t:i64 = t +! (v_BARRETT_R >>! 1l <: i64) in - let quotient:i64 = t >>! v_BARRETT_SHIFT in - let quotient:i32 = cast (quotient <: i64) <: i32 in - let sub:i32 = quotient *! v_FIELD_MODULUS in - let _:Prims.unit = Tutorial_src.Math.Lemmas.cancel_mul_mod quotient 3329l in - value -! sub -// ANCHOR_END: barrett - -// ANCHOR: encrypt_decrypt -let decrypt (ciphertext key: u32) : u32 = ciphertext ^. key - -let encrypt (plaintext key: u32) : u32 = plaintext ^. key -// ANCHOR_END: encrypt_decrypt - - - - - - - -// ANCHOR: encrypt_decrypt_identity -let encrypt_decrypt_identity (key plaintext: u32) - : Lemma (requires true) - (ensures (decrypt (encrypt plaintext key <: u32) key <: u32) =. plaintext) = () -// ANCHOR_END: encrypt_decrypt_identity - -// ANCHOR: square -let square (x: u8) : u8 = x *! x -// ANCHOR_END: square - -// ANCHOR: square_ensures -let square_ensures (x: u8) - : Pure u8 - (requires x <. 16uy) - (ensures fun result -> result >=. x) - = x *! x -// ANCHOR_END: square_ensures - -// ANCHOR: square_option -let square_option (x: u8) : Core.Option.t_Option u8 = - if x >=. 16uy - then Core.Option.Option_None <: Core.Option.t_Option u8 - else Core.Option.Option_Some (x *! x) <: Core.Option.t_Option u8 -// ANCHOR_END: square_option - -// ANCHOR: square_requires -let square_requires (x: u8) - : Pure u8 (requires x <. 16uy) (requires fun _ -> True) - = x *! x -// ANCHOR_END: square_requires - -// ANCHOR: F -let v_Q: u16 = 2347us - -type t_F = { f_v:f_v: u16{f_v <. v_Q} } -// ANCHOR_END: F - -// ANCHOR: AddF -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl: Core.Ops.Arith.t_Add t_F t_F = - { - f_Output = t_F; - f_add_pre = (fun (self: t_F) (rhs: t_F) -> true); - f_add_post = (fun (self: t_F) (rhs: t_F) (out: t_F) -> true); - f_add = fun (self: t_F) (rhs: t_F) -> { - f_v = (self.f_v +! rhs.f_v <: u16) %! v_Q - } <: t_F - } -// ANCHOR_END: AddF diff --git a/book/src/tutorial/data-invariants.md b/book/src/tutorial/data-invariants.md index a6777795f..285413bb2 100644 --- a/book/src/tutorial/data-invariants.md +++ b/book/src/tutorial/data-invariants.md @@ -25,11 +25,12 @@ Rust alone already can solve our representation issues with define the `enum` type `F3` which has only three constructor: `F3` represent exactly the elements of `F₃`, not more, not less. -```rust,noplaypen -{{#include sources.rs:F3}} -``` -```ocaml -{{#include Sources.fst:F3}} +```rust,editable +enum F3 { + E1, + E2, + E3, +} ``` With `F3`, there doesn't exist illegal values at all: we can now @@ -53,11 +54,14 @@ one `u16` field `v`. Notice the refinment annotation on `v`: the extraction of this type `F` via hax will result in a type enforcing `v` small enough. -```rust,noplaypen -{{#include sources.rs:F}} -``` -```ocaml -{{#include Sources.fst:F}} +```rust,editable +pub const Q: u16 = 2347; + +#[hax_lib::attributes] +pub struct F { + #[hax_lib::refine(v < Q)] + pub v: u16, +} ``` In Rust, we can now define functions that operates on type `F`, @@ -65,11 +69,25 @@ assuming they are in bounds with respect to `F₂₃₄₇`: every such assumption will be checked and enforced by the proof assistant. As an example, below is the implementation of the addition for type `F`. -```rust,noplaypen -{{#include sources.rs:AddF}} -``` -```ocaml -{{#include Sources.fst:AddF}} +```rust,editable +# pub const Q: u16 = 2347; +# +# #[hax_lib::attributes] +# pub struct F { +# #[hax_lib::refine(v < Q)] +# pub v: u16, +# } + +use core::ops::Add; + +impl Add for F { + type Output = Self; + fn add(self, rhs: Self) -> Self { + Self { + v: (self.v + rhs.v) % Q, + } + } +} ``` Here, F* is able to prove automatically that (1) the addition doesn't diff --git a/book/src/tutorial/panic-freedom.md b/book/src/tutorial/panic-freedom.md index c0a6dd707..0ec0d5284 100644 --- a/book/src/tutorial/panic-freedom.md +++ b/book/src/tutorial/panic-freedom.md @@ -5,15 +5,13 @@ integer. To extract this function to F* using hax, we simply need to run the command `cargo hax into fstar` in the directory of the crate in which the function `square` is defined. -*Note: throughout this tutorial, you can inspect the hax extraction to -F\* for each code Rust snippets, by clicking on the "F\* extraction" -tab.* +*Note: throughout this tutorial, you can edit the snippets of code and +extract to F\* by clicking the play button ( ), or even typecheck it with the button ( ).* -```rust,noplaypen -{{#include sources.rs:square}} -``` -```ocaml -{{#include Sources.fst:square}} +```rust,editable +fn square(x: u8) -> u8 { + x * x +} ``` Though, if we try to verify this function, F* is complaining about a @@ -58,11 +56,14 @@ its input is within `0` and `15`. ### Solution A: reflect the partialness of the function in Rust A first solution is to make `square` return an `Option` instead of a `u8`: -```rust,noplaypen -{{#include sources.rs:square_option}} -``` -```ocaml -{{#include Sources.fst:square_option}} +```rust,editable +fn square_option(x: u8) -> Option { + if x >= 16 { + None + } else { + Some(x * x) + } +} ``` Here, F* is able to prove panic-freedom: calling `square` with any @@ -90,11 +91,11 @@ provdes the `requires` [proc-macro](https://doc.rust-lang.org/reference/procedural-macros.html) which lets user writting pre-conditions directly in Rust. -```rust,noplaypen -{{#include sources.rs:square_requires}} -``` -```ocaml -{{#include Sources.fst:square_requires}} +```rust,editable +#[hax_lib::requires(x < 16)] +fn square_requires(x: u8) -> u8 { + x * x +} ``` With this precondition, F* is able to prove panic freedom. From now diff --git a/book/src/tutorial/properties.md b/book/src/tutorial/properties.md index 5e1a438ab..fb411243e 100644 --- a/book/src/tutorial/properties.md +++ b/book/src/tutorial/properties.md @@ -9,12 +9,12 @@ _return a value_: it will not panic or diverge. We could enrich the contract of `square` with a post-condition about the fact it is a increasing function: -```rust,noplaypen -{{#include sources.rs:square_ensures}} -``` - -```ocaml -{{#include Sources.fst:square_ensures}} +```rust,editable +#[hax_lib::requires(x < 16)] +#[hax_lib::ensures(|result| result >= x)] +fn square_ensures(x: u8) -> u8 { + x * x +} ``` Such a simple post-condition is automatically proven by F\*. The @@ -37,12 +37,30 @@ Given `value` a field element (a `i32` whose absolute value is at most It is easy to write this contract directly as `hax::requires` and `hax::ensures` annotations, as shown in the snippet below. -```rust,noplaypen -{{#include sources.rs:barrett}} -``` +```rust,editable +type FieldElement = i32; +const FIELD_MODULUS: i32 = 3329; +const BARRETT_SHIFT: i64 = 26; +const BARRETT_R: i64 = 0x4000000; // 2^26 +const BARRETT_MULTIPLIER: i64 = 20159; // ⌊(BARRETT_R / FIELD_MODULUS) + 1/2⌋ + +#[hax_lib::requires((i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R))] +#[hax_lib::ensures(|result| result > -FIELD_MODULUS && result < FIELD_MODULUS + && result % FIELD_MODULUS == value % FIELD_MODULUS)] +fn barrett_reduce(value: i32) -> i32 { + let t = i64::from(value) * BARRETT_MULTIPLIER; + let t = t + (BARRETT_R >> 1); + + let quotient = t >> BARRETT_SHIFT; + let quotient = quotient as i32; -```ocaml -{{#include Sources.fst:barrett}} + let sub = quotient * FIELD_MODULUS; + + // Here a lemma to prove that `(quotient * 3329) % 3329 = 0` + // may have to be called in F*. + + value - sub +} ``` diff --git a/book/theme/fonts/OPEN-SANS-LICENSE.txt b/book/theme/fonts/OPEN-SANS-LICENSE.txt new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/book/theme/fonts/OPEN-SANS-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/book/theme/fonts/SOURCE-CODE-PRO-LICENSE.txt b/book/theme/fonts/SOURCE-CODE-PRO-LICENSE.txt new file mode 100644 index 000000000..366206f54 --- /dev/null +++ b/book/theme/fonts/SOURCE-CODE-PRO-LICENSE.txt @@ -0,0 +1,93 @@ +Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe Systems Incorporated in the United States and/or other countries. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/book/theme/fonts/fonts.css b/book/theme/fonts/fonts.css new file mode 100644 index 000000000..858efa598 --- /dev/null +++ b/book/theme/fonts/fonts.css @@ -0,0 +1,100 @@ +/* Open Sans is licensed under the Apache License, Version 2.0. See http://www.apache.org/licenses/LICENSE-2.0 */ +/* Source Code Pro is under the Open Font License. See https://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=OFL */ + +/* open-sans-300 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 300; + src: local('Open Sans Light'), local('OpenSans-Light'), + url('open-sans-v17-all-charsets-300.woff2') format('woff2'); +} + +/* open-sans-300italic - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: italic; + font-weight: 300; + src: local('Open Sans Light Italic'), local('OpenSans-LightItalic'), + url('open-sans-v17-all-charsets-300italic.woff2') format('woff2'); +} + +/* open-sans-regular - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 400; + src: local('Open Sans Regular'), local('OpenSans-Regular'), + url('open-sans-v17-all-charsets-regular.woff2') format('woff2'); +} + +/* open-sans-italic - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: italic; + font-weight: 400; + src: local('Open Sans Italic'), local('OpenSans-Italic'), + url('open-sans-v17-all-charsets-italic.woff2') format('woff2'); +} + +/* open-sans-600 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 600; + src: local('Open Sans SemiBold'), local('OpenSans-SemiBold'), + url('open-sans-v17-all-charsets-600.woff2') format('woff2'); +} + +/* open-sans-600italic - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: italic; + font-weight: 600; + src: local('Open Sans SemiBold Italic'), local('OpenSans-SemiBoldItalic'), + url('open-sans-v17-all-charsets-600italic.woff2') format('woff2'); +} + +/* open-sans-700 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 700; + src: local('Open Sans Bold'), local('OpenSans-Bold'), + url('open-sans-v17-all-charsets-700.woff2') format('woff2'); +} + +/* open-sans-700italic - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: italic; + font-weight: 700; + src: local('Open Sans Bold Italic'), local('OpenSans-BoldItalic'), + url('open-sans-v17-all-charsets-700italic.woff2') format('woff2'); +} + +/* open-sans-800 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 800; + src: local('Open Sans ExtraBold'), local('OpenSans-ExtraBold'), + url('open-sans-v17-all-charsets-800.woff2') format('woff2'); +} + +/* open-sans-800italic - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Open Sans'; + font-style: italic; + font-weight: 800; + src: local('Open Sans ExtraBold Italic'), local('OpenSans-ExtraBoldItalic'), + url('open-sans-v17-all-charsets-800italic.woff2') format('woff2'); +} + +/* source-code-pro-500 - latin_vietnamese_latin-ext_greek_cyrillic-ext_cyrillic */ +@font-face { + font-family: 'Source Code Pro'; + font-style: normal; + font-weight: 500; + src: url('source-code-pro-v11-all-charsets-500.woff2') format('woff2'); +} diff --git a/book/theme/fonts/open-sans-v17-all-charsets-300.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-300.woff2 new file mode 100644 index 000000000..9f51be370 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-300.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-300italic.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-300italic.woff2 new file mode 100644 index 000000000..2f5454484 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-300italic.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-600.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-600.woff2 new file mode 100644 index 000000000..f503d558d Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-600.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-600italic.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-600italic.woff2 new file mode 100644 index 000000000..c99aabe80 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-600italic.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-700.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-700.woff2 new file mode 100644 index 000000000..421a1ab25 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-700.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-700italic.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-700italic.woff2 new file mode 100644 index 000000000..12ce3d20d Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-700italic.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-800.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-800.woff2 new file mode 100644 index 000000000..c94a223b0 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-800.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-800italic.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-800italic.woff2 new file mode 100644 index 000000000..eed7d3c63 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-800italic.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-italic.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-italic.woff2 new file mode 100644 index 000000000..398b68a08 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-italic.woff2 differ diff --git a/book/theme/fonts/open-sans-v17-all-charsets-regular.woff2 b/book/theme/fonts/open-sans-v17-all-charsets-regular.woff2 new file mode 100644 index 000000000..8383e94c6 Binary files /dev/null and b/book/theme/fonts/open-sans-v17-all-charsets-regular.woff2 differ diff --git a/book/theme/fonts/source-code-pro-v11-all-charsets-500.woff2 b/book/theme/fonts/source-code-pro-v11-all-charsets-500.woff2 new file mode 100644 index 000000000..722245682 Binary files /dev/null and b/book/theme/fonts/source-code-pro-v11-all-charsets-500.woff2 differ diff --git a/book/theme/fstar.js b/book/theme/fstar.js new file mode 100644 index 000000000..5b1c6ba37 --- /dev/null +++ b/book/theme/fstar.js @@ -0,0 +1,82 @@ +/*! `ocaml` grammar compiled for Highlight.js 11.10.0 */ + (function(){ + var hljsGrammar = (function () { + 'use strict'; + + function fstar(hljs) { + /* missing support for heredoc-like string (OCaml 4.0.2+) */ + return { + name: 'FStar', + aliases: [ 'fstar', 'fst', 'fsti' ], + keywords: { + $pattern: '[a-z_]\\w*!?', + keyword: 'attributes noeq unopteq and assert assume begin by calc class default decreases effect eliminate else end ensures exception exists false friend forall fun λ function if in include inline inline_for_extraction instance introduce irreducible let logic match returns as module new new_effect layered_effect polymonadic_bind polymonadic_subcomp noextract of open opaque private quote range_of rec reifiable reify reflectable requires set_range_of sub_effect synth then total true try type unfold unfoldable val when with string', + built_in: 'unit', + literal: 'true false' + }, + // illegal: /\/\/|>>/, + contains: [ + { + className: 'literal', + begin: '\\[(\\|\\|)?\\]|\\(\\)', + relevance: 0 + }, + hljs.COMMENT( + '\\(\\*', + '\\*\\)', + { contains: [ 'self' ] } + ), + // hljs.inherit( + // hljs.COMMENT(), + // { + // match: [ + // /(^|\s)/, + // /\/\/.*$/ + // ], + // scope: { + // 2: 'comment' + // } + // } + // ), + { /* type variable */ + className: 'symbol', + begin: '\'[A-Za-z_](?!\')[\\w\']*' + /* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */ + }, + { /* module or constructor */ + className: 'type', + begin: '\\b[A-Z][\\w\']*', + relevance: 0 + }, + { /* don't color identifiers, but safely catch all identifiers with ' */ + begin: '[a-z_]\\w*\'[\\w\']*', + relevance: 0 + }, + hljs.inherit(hljs.APOS_STRING_MODE, { + className: 'string', + relevance: 0 + }), + hljs.inherit(hljs.QUOTE_STRING_MODE, { illegal: null }), + { + className: 'number', + begin: + '\\b(0[xX][a-fA-F0-9_]+[Lln]?|' + + '0[oO][0-7_]+[Lln]?|' + + '0[bB][01_]+[Lln]?|' + + '[0-9][0-9_]*([Lln]|(\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)', + relevance: 0 + }, + { begin: /->/ // relevance booster + } + ] + }; + } + + return fstar; + +})(); + hljs.registerLanguage('fstar', hljsGrammar); + })(); + +// hljs.initHighlightingOnLoad(); + diff --git a/book/theme/highlight.css b/book/theme/highlight.css new file mode 100644 index 000000000..ba57b82b2 --- /dev/null +++ b/book/theme/highlight.css @@ -0,0 +1,82 @@ +/* + * An increased contrast highlighting scheme loosely based on the + * "Base16 Atelier Dune Light" theme by Bram de Haan + * (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune) + * Original Base16 color scheme by Chris Kempson + * (https://github.com/chriskempson/base16) + */ + +/* Comment */ +.hljs-comment, +.hljs-quote { + color: #575757; +} + +/* Red */ +.hljs-variable, +.hljs-template-variable, +.hljs-attribute, +.hljs-tag, +.hljs-name, +.hljs-regexp, +.hljs-link, +.hljs-name, +.hljs-selector-id, +.hljs-selector-class { + color: #d70025; +} + +/* Orange */ +.hljs-number, +.hljs-meta, +.hljs-built_in, +.hljs-builtin-name, +.hljs-literal, +.hljs-type, +.hljs-params { + color: #b21e00; +} + +/* Green */ +.hljs-string, +.hljs-symbol, +.hljs-bullet { + color: #008200; +} + +/* Blue */ +.hljs-title, +.hljs-section { + color: #0030f2; +} + +/* Purple */ +.hljs-keyword, +.hljs-selector-tag { + color: #9d00ec; +} + +.hljs { + display: block; + overflow-x: auto; + background: #f6f7f6; + color: #000; +} + +.hljs-emphasis { + font-style: italic; +} + +.hljs-strong { + font-weight: bold; +} + +.hljs-addition { + color: #22863a; + background-color: #f0fff4; +} + +.hljs-deletion { + color: #b31d28; + background-color: #ffeef0; +} diff --git a/book/theme/highlight.css.old b/book/theme/highlight.css.old new file mode 100644 index 000000000..ba57b82b2 --- /dev/null +++ b/book/theme/highlight.css.old @@ -0,0 +1,82 @@ +/* + * An increased contrast highlighting scheme loosely based on the + * "Base16 Atelier Dune Light" theme by Bram de Haan + * (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune) + * Original Base16 color scheme by Chris Kempson + * (https://github.com/chriskempson/base16) + */ + +/* Comment */ +.hljs-comment, +.hljs-quote { + color: #575757; +} + +/* Red */ +.hljs-variable, +.hljs-template-variable, +.hljs-attribute, +.hljs-tag, +.hljs-name, +.hljs-regexp, +.hljs-link, +.hljs-name, +.hljs-selector-id, +.hljs-selector-class { + color: #d70025; +} + +/* Orange */ +.hljs-number, +.hljs-meta, +.hljs-built_in, +.hljs-builtin-name, +.hljs-literal, +.hljs-type, +.hljs-params { + color: #b21e00; +} + +/* Green */ +.hljs-string, +.hljs-symbol, +.hljs-bullet { + color: #008200; +} + +/* Blue */ +.hljs-title, +.hljs-section { + color: #0030f2; +} + +/* Purple */ +.hljs-keyword, +.hljs-selector-tag { + color: #9d00ec; +} + +.hljs { + display: block; + overflow-x: auto; + background: #f6f7f6; + color: #000; +} + +.hljs-emphasis { + font-style: italic; +} + +.hljs-strong { + font-weight: bold; +} + +.hljs-addition { + color: #22863a; + background-color: #f0fff4; +} + +.hljs-deletion { + color: #b31d28; + background-color: #ffeef0; +} diff --git a/book/theme/highlight.js b/book/theme/highlight.js new file mode 100644 index 000000000..3256c00ed --- /dev/null +++ b/book/theme/highlight.js @@ -0,0 +1,53 @@ +/* + Highlight.js 10.1.1 (93fd0d73) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(n){Object.freeze(n);var t="function"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||"object"!=typeof n[r]&&"function"!=typeof n[r]||t&&("caller"===r||"callee"===r||"arguments"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s="",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function u(e){s+=""}function d(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var g=l();if(s+=t(r.substring(i,g[0].offset)),i=g[0].offset,g===e){o.reverse().forEach(u);do{d(g.splice(0,1)[0]),g=l()}while(g===e&&g.length&&g[0].offset===i);o.reverse().forEach(c)}else"start"===g[0].event?o.push(g[0].node):o.pop(),d(g.splice(0,1)[0])}return s+t(r.substr(i))}});const s="",o=e=>!!e.kind;class l{constructor(e,n){this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=``}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){"string"!=typeof e&&e.children&&(e.children.every(e=>"string"==typeof e)?e.children=[e.children.join("")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){""!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){""!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function d(e){return e?"string"==typeof e?e:e.source:null}const g="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",h={begin:"\\\\[\\s\\S]",relevance:0},f={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[h]},p={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[h]},b={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},m=function(e,n,t={}){var a=r({className:"comment",begin:e,end:n,contains:[]},t);return a.contains.push(b),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),a},v=m("//","$"),x=m("/\\*","\\*/"),E=m("#","$");var _=Object.freeze({__proto__:null,IDENT_RE:"[a-zA-Z]\\w*",UNDERSCORE_IDENT_RE:"[a-zA-Z_]\\w*",NUMBER_RE:"\\b\\d+(\\.\\d+)?",C_NUMBER_RE:g,BINARY_NUMBER_RE:"\\b(0b[01]+)",RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(e={})=>{const n=/^#![ ]*\//;return e.binary&&(e.begin=function(...e){return e.map(e=>d(e)).join("")}(n,/.*\b/,e.binary,/\b.*/)),r({className:"meta",begin:n,end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:b,COMMENT:m,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?",relevance:0},C_NUMBER_MODE:{className:"number",begin:g,relevance:0},BINARY_NUMBER_MODE:{className:"number",begin:"\\b(0b[01]+)",relevance:0},CSS_NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[h,{begin:/\[/,end:/\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:"title",begin:"[a-zA-Z]\\w*",relevance:0},UNDERSCORE_TITLE_MODE:{className:"title",begin:"[a-zA-Z_]\\w*",relevance:0},METHOD_GUARD:{begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),N="of and for in not or if then".split(" ");function w(e,n){return n?+n:function(e){return N.includes(e.toLowerCase())}(e)?0:1}const R=t,y=r,{nodeStream:k,mergeStreams:O}=i,M=Symbol("nomatch");return function(t){var a=[],i={},s={},o=[],l=!0,c=/(^(<[^>]+>|\t|)+|\n)/gm,g="Could not find the language '{}', did you forget to load/include a language module?";const h={disableAutodetect:!0,name:"Plain text",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function b(e,n,t,r){var a={code:n,language:e};S("before:highlight",a);var i=a.result?a.result:m(a.language,a.code,t,r);return i.code=a.code,S("after:highlight",i),i}function m(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=y.subLanguage?function(){if(""!==A){var e=null;if("string"==typeof y.subLanguage){if(!i[y.subLanguage])return void O.addText(A);e=m(y.subLanguage,A,!0,k[y.subLanguage]),k[y.subLanguage]=e.top}else e=v(A,y.subLanguage.length?y.subLanguage:null);y.relevance>0&&(I+=e.relevance),O.addSublanguage(e.emitter,e.language)}}():function(){if(!y.keywords)return void O.addText(A);let e=0;y.keywordPatternRe.lastIndex=0;let n=y.keywordPatternRe.exec(A),t="";for(;n;){t+=A.substring(e,n.index);const r=c(y,n);if(r){const[e,a]=r;O.addText(t),t="",I+=a,O.addKeyword(n[0],e)}else t+=n[0];e=y.keywordPatternRe.lastIndex,n=y.keywordPatternRe.exec(A)}t+=A.substr(e),O.addText(t)}(),A=""}function h(e){return e.className&&O.openNode(e.className),y=Object.create(e,{parent:{value:y}})}function p(e){return 0===y.matcher.regexIndex?(A+=e[0],1):(L=!0,0)}var b={};function x(t,r){var i=r&&r[0];if(A+=t,null==i)return u(),0;if("begin"===b.type&&"end"===r.type&&b.index===r.index&&""===i){if(A+=o.slice(r.index,r.index+1),!l){const n=Error("0 width match regex");throw n.languageName=e,n.badRule=b.rule,n}return 1}if(b=r,"begin"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r["on:begin"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),r.skip?A+=t:(r.excludeBegin&&(A+=t),u(),r.returnBegin||r.excludeBegin||(A=t)),h(r),r.returnBegin?0:t.length}(r);if("illegal"===r.type&&!a){const e=Error('Illegal lexeme "'+i+'" for mode "'+(y.className||"")+'"');throw e.mode=y,e}if("end"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t["on:end"]){const e=new n(t);t["on:end"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(y,e,r);if(!a)return M;var i=y;i.skip?A+=t:(i.returnEnd||i.excludeEnd||(A+=t),u(),i.excludeEnd&&(A=t));do{y.className&&O.closeNode(),y.skip||y.subLanguage||(I+=y.relevance),y=y.parent}while(y!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==M)return s}if("illegal"===r.type&&""===i)return 1;if(B>1e5&&B>3*r.index)throw Error("potential infinite loop, way more iterations than matches");return A+=i,i.length}var E=T(e);if(!E)throw console.error(g.replace("{}",e)),Error('Unknown language: "'+e+'"');var _=function(e){function n(n,t){return RegExp(d(n),"m"+(e.case_insensitive?"i":"")+(t?"g":""))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+"|").exec("").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n="|"){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i0&&(a+=n),a+="(";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),"\\"===l[0][0]&&l[1]?a+="\\"+(+l[1]+s):(a+=l[0],"("===l[0]&&r++)}a+=")"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];"."!==t&&"."!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if("object"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return"string"==typeof e?r("keyword",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(" ").forEach((function(n){var r=n.split("|");t[r[0]]=[e,w(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return l.keywordPatternRe=n(s.lexemes||c||/\w+/,!0),o&&(s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?=\\b|\\s)",s.__beforeBegin=i),s.begin||(s.begin=/\B|\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\B|\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=d(s.end)||"",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?"|":"")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}("self"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:"begin"})),e.terminator_end&&n.addRule(e.terminator_end,{type:"end"}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n}(l),l}(e)}(E),N="",y=s||_,k={},O=new f.__emitter(f);!function(){for(var e=[],n=y;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>O.openNode(e))}();var A="",I=0,S=0,B=0,L=!1;try{for(y.matcher.considerAll();;){B++,L?L=!1:(y.matcher.lastIndex=S,y.matcher.considerAll());const e=y.matcher.exec(o);if(!e)break;const n=x(o.substring(S,e.index),e);S=e.index+n}return x(o.substr(S)),O.closeAllNodes(),O.finalize(),N=O.toHTML(),{relevance:I,value:N,language:e,illegal:!1,emitter:O,top:y}}catch(n){if(n.message&&n.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(S-100,S+100),mode:n.mode},sofar:N,relevance:0,value:R(o),emitter:O};if(l)return{illegal:!1,relevance:0,value:R(o),emitter:O,language:e,top:y,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:R(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(T).filter(I).forEach((function(n){var a=m(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>"\n"===e?f.useBR?"
":e:f.tabReplace?e.replace(/\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+" ";n+=e.parentNode?e.parentNode.className:"";const t=f.languageDetectRe.exec(n);if(t){var r=T(t[1]);return r||(console.warn(g.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?t[1]:"no-highlight"}return n.split(/\s+/).find(e=>p(e)||T(e))}(e);if(p(t))return;S("before:highlightBlock",{block:e,language:t}),f.useBR?(n=document.createElement("div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n"):n=e;const r=n.textContent,a=t?b(t,r,!0):v(r),i=k(n);if(i.length){const e=document.createElement("div");e.innerHTML=a.value,a.value=O(i,k(e),r)}a.value=x(a.value),S("after:highlightBlock",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),e.includes(r)||a.push(r),a.join(" ").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const N=()=>{if(!N.called){N.called=!0;var e=document.querySelectorAll("pre code");a.forEach.call(e,E)}};function T(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}function A(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function I(e){var n=T(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:b,highlightAuto:v,fixMarkup:x,highlightBlock:E,configure:function(e){f=y(f,e)},initHighlighting:N,initHighlightingOnLoad:function(){window.addEventListener("DOMContentLoaded",N,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error("Language definition for '{}' could not be registered.".replace("{}",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&A(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:T,registerAliases:A,requireLanguage:function(e){var n=T(e);if(n)return n;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))},autoDetection:I,inherit:y,addPlugin:function(e){o.push(e)}}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString="10.1.1";for(const n in _)"object"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs); +hljs.registerLanguage("apache",function(){"use strict";return function(e){var n={className:"number",begin:"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?"};return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:"section",begin:"",contains:[n,{className:"number",begin:":\\d{1,5}"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute",begin:/\w+/,relevance:0,keywords:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"},contains:[{className:"meta",begin:"\\s\\[",end:"\\]$"},{className:"variable",begin:"[\\$%]\\{",end:"\\}",contains:["self",{className:"number",begin:"[\\$%]\\d+"}]},n,{className:"number",begin:"\\d+"},e.QUOTE_STRING_MODE]}}],illegal:/\S/}}}()); +hljs.registerLanguage("bash",function(){"use strict";return function(e){const s={};Object.assign(s,{className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{/,end:/\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b-?[a-z\._]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},s]}}}()); +hljs.registerLanguage("c-like",function(){"use strict";return function(e){function t(e){return"(?:"+e+")?"}var n="(decltype\\(auto\\)|"+t("[a-zA-Z_]\\w*::")+"[a-zA-Z_]\\w*"+t("<.*?>")+")",r={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},a={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},i={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(a,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:t("[a-zA-Z_]\\w*::")+e.IDENT_RE,relevance:0},c=t("[a-zA-Z_]\\w*::")+e.IDENT_RE+"\\s*\\(",l={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:l,contains:d.concat([{begin:/\(/,end:/\)/,keywords:l,contains:d.concat(["self"]),relevance:0}]),relevance:0},u={className:"function",begin:"("+n+"[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\w\s\*&:<>]/,contains:[{begin:"decltype\\(auto\\)",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:l,disableAutodetect:!0,illegal:"",keywords:l,contains:["self",r]},{begin:e.IDENT_RE+"::",keywords:l},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}()); +hljs.registerLanguage("c",function(){"use strict";return function(e){var n=e.getLanguage("c-like").rawDefinition();return n.name="C",n.aliases=["c","h"],n}}()); +hljs.registerLanguage("coffeescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((e=>n=>!e.includes(n))(["var","const","let","function","static"])).join(" "),literal:n.concat(["yes","no","on","off"]).join(" "),built_in:a.concat(["npm","print"]).join(" ")},i="[A-Za-z$_][0-9A-Za-z$_]*",s={className:"subst",begin:/#\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?",relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/"/,end:/"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:"regexp",variants:[{begin:"///",end:"///",contains:[s,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)",relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+i},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{begin:"```",end:"```"},{begin:"`",end:"`"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/,end:/\)/,keywords:t,contains:["self"].concat(o)}]};return{name:"CoffeeScript",aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/,contains:o.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{className:"function",begin:"^\\s*"+i+"\\s*=\\s*(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[c,l]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function",begin:"(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[l]}]},{className:"class",beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[c]},c]},{begin:i+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}}()); +hljs.registerLanguage("cpp",function(){"use strict";return function(e){var t=e.getLanguage("c-like").rawDefinition();return t.disableAutodetect=!1,t.name="C++",t.aliases=["cc","c++","h++","hpp","hh","hxx","cxx"],t}}()); +hljs.registerLanguage("csharp",function(){"use strict";return function(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},i=e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},t=e.inherit(s,{illegal:/\n/}),l={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(l,{illegal:/\n/}),c={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},l]},g=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]});l.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[g,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})];var d={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},E={begin:"<",end:">",contains:[{beginKeywords:"in out"},i]},_=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",b={begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},d,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[{beginKeywords:"where class"},i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+_+"\\s+)+"+e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE,E],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[d,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}}()); +hljs.registerLanguage("css",function(){"use strict";return function(e){var n={begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[{className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}}]};return{name:"CSS",case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,n]}]}}}()); +hljs.registerLanguage("diff",function(){"use strict";return function(e){return{name:"Diff",aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{begin:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{begin:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{className:"comment",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\-{3}/,end:/$/},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/}]},{className:"addition",begin:"^\\+",end:"$"},{className:"deletion",begin:"^\\-",end:"$"},{className:"addition",begin:"^\\!",end:"$"}]}}}()); +hljs.registerLanguage("go",function(){"use strict";return function(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{name:"Go",aliases:["golang"],keywords:n,illegal:"e(n)).join("")}return function(a){var s={className:"number",relevance:0,variants:[{begin:/([\+\-]+)?[\d]+_[\d_]+/},{begin:a.NUMBER_RE}]},i=a.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];var t={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{begin:/\$\{(.*?)}/}]},r={className:"literal",begin:/\bon|off|true|false|yes|no\b/},l={className:"string",contains:[a.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}]},c={begin:/\[/,end:/\]/,contains:[i,r,t,l,s,"self"],relevance:0},g="("+[/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/].map(n=>e(n)).join("|")+")";return{name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/,contains:[i,{className:"section",begin:/\[+/,end:/\]+/},{begin:n(g,"(\\s*\\.\\s*",g,")*",n("(?=",/\s*=\s*[^#\s]/,")")),className:"attr",starts:{end:/$/,contains:[i,c,r,t,l,s]}}]}}}()); +hljs.registerLanguage("java",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(e){return a("(",e,")?")}function a(...n){return n.map(n=>e(n)).join("")}function s(...n){return"("+n.map(n=>e(n)).join("|")+")"}return function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",i={className:"meta",begin:"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},r=e=>a("[",e,"]+([",e,"_]*[",e,"]+)?"),c={className:"number",variants:[{begin:`\\b(0[bB]${r("01")})[lL]?`},{begin:`\\b(0${r("0-7")})[dDfFlL]?`},{begin:a(/\b0[xX]/,s(a(r("a-fA-F0-9"),/\./,r("a-fA-F0-9")),a(r("a-fA-F0-9"),/\.?/),a(/\./,r("a-fA-F0-9"))),/([pP][+-]?(\d+))?/,/[fFdDlL]?/)},{begin:a(/\b/,s(a(/\d*\./,r("\\d")),r("\\d")),/[eE][+-]?[\d]+[dDfF]?/)},{begin:a(/\b/,r(/\d/),n(/\.?/),n(r(/\d/)),/[dDfFlL]?/)}],relevance:0};return{name:"Java",aliases:["jsp"],keywords:t,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface",end:/[{;=]/,excludeEnd:!0,keywords:"class interface",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}()); +hljs.registerLanguage("javascript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function s(e){return r("(?=",e,")")}function r(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(t){var i="[A-Za-z$_][0-9A-Za-z$_]*",c={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/},o={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.join(" "),literal:n.join(" "),built_in:a.join(" ")},l={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:t.C_NUMBER_RE+"n?"}],relevance:0},E={className:"subst",begin:"\\$\\{",end:"\\}",keywords:o,contains:[]},d={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"xml"}},g={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"css"}},u={className:"string",begin:"`",end:"`",contains:[t.BACKSLASH_ESCAPE,E]};E.contains=[t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,l,t.REGEXP_MODE];var b=E.contains.concat([{begin:/\(/,end:/\)/,contains:["self"].concat(E.contains,[t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE])},t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE]),_={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:b};return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:o,contains:[t.SHEBANG({binary:"node",relevance:5}),{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,t.C_LINE_COMMENT_MODE,t.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:i+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),t.C_BLOCK_COMMENT_MODE,l,{begin:r(/[{,\n]\s*/,s(r(/(((\/\/.*)|(\/\*(.|\n)*\*\/))\s*)*/,i+"\\s*:"))),relevance:0,contains:[{className:"attr",begin:i+s("\\s*:"),relevance:0}]},{begin:"("+t.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+t.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:t.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:o,contains:b}]}]},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:"<>",end:""},{begin:c.begin,end:c.end}],subLanguage:"xml",contains:[{begin:c.begin,end:c.end,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[t.inherit(t.TITLE_MODE,{begin:i}),_],illegal:/\[|%/},{begin:/\$[(.]/},t.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},t.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor",end:/\{/,excludeEnd:!0},{begin:"(get|set)\\s+(?="+i+"\\()",end:/{/,keywords:"get set",contains:[t.inherit(t.TITLE_MODE,{begin:i}),{begin:/\(\)/},_]}],illegal:/#(?!!)/}}}()); +hljs.registerLanguage("json",function(){"use strict";return function(n){var e={literal:"true false null"},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],t=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],a={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:e},l={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[n.BACKSLASH_ESCAPE],illegal:"\\n"},n.inherit(a,{begin:/:/})].concat(i),illegal:"\\S"},s={begin:"\\[",end:"\\]",contains:[n.inherit(a)],illegal:"\\S"};return t.push(l,s),i.forEach((function(n){t.push(n)})),{name:"JSON",contains:t,keywords:e,illegal:"\\S"}}}()); +hljs.registerLanguage("kotlin",function(){"use strict";return function(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},i={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},s={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[s,i]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,s,i]}]};i.contains.push(t);var r={className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"},l={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(t,{className:"meta-string"})]}]},c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]},d=o;return d.variants[1].contains=[o],o.variants[1].contains=[d],{name:"Kotlin",aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,r,l,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,c],relevance:0},e.C_LINE_COMMENT_MODE,c,r,l,t,e.C_NUMBER_MODE]},c]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},r,l]},t,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},{className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0}]}}}()); +hljs.registerLanguage("less",function(){"use strict";return function(e){var n="([\\w-]+|@{[\\w-]+})",a=[],s=[],t=function(e){return{className:"string",begin:"~?"+e+".*?"+e}},r=function(e,n,a){return{className:e,begin:n,relevance:a}},i={begin:"\\(",end:"\\)",contains:s,relevance:0};s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,t("'"),t('"'),e.CSS_NUMBER_MODE,{begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]",excludeEnd:!0}},r("number","#[0-9A-Fa-f]+\\b"),i,r("variable","@@?[\\w-]+",10),r("variable","@{[\\w-]+}"),r("built_in","~?`[^`]*?`"),{className:"attribute",begin:"[\\w-]+\\s*:",end:":",returnBegin:!0,excludeEnd:!0},{className:"meta",begin:"!important"});var c=s.concat({begin:"{",end:"}",contains:a}),l={beginKeywords:"when",endsWithParent:!0,contains:[{beginKeywords:"and not"}].concat(s)},o={begin:n+"\\s*:",returnBegin:!0,end:"[;}]",relevance:0,contains:[{className:"attribute",begin:n,end:":",excludeEnd:!0,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}]},g={className:"keyword",begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b",starts:{end:"[;{}]",returnEnd:!0,contains:s,relevance:0}},d={className:"variable",variants:[{begin:"@[\\w-]+\\s*:",relevance:15},{begin:"@[\\w-]+"}],starts:{end:"[;}]",returnEnd:!0,contains:c}},b={variants:[{begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:n,end:"{"}],returnBegin:!0,returnEnd:!0,illegal:"[<='$\"]",relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l,r("keyword","all\\b"),r("variable","@{[\\w-]+}"),r("selector-tag",n+"%?",0),r("selector-id","#"+n),r("selector-class","\\."+n,0),r("selector-tag","&",0),{className:"selector-attr",begin:"\\[",end:"\\]"},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"\\(",end:"\\)",contains:c},{begin:"!important"}]};return a.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,g,d,o,b),{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:a}}}()); +hljs.registerLanguage("lua",function(){"use strict";return function(e){var t={begin:"\\[=*\\[",end:"\\]=*\\]",contains:["self"]},a=[e.COMMENT("--(?!\\[=*\\[)","$"),e.COMMENT("--\\[=*\\[","\\]=*\\]",{contains:[t],relevance:10})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE,literal:"true false nil",keyword:"and break do else elseif end for goto if in local not or repeat return then until while",built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove"},contains:a.concat([{className:"function",beginKeywords:"function",end:"\\)",contains:[e.inherit(e.TITLE_MODE,{begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params",begin:"\\(",endsWithParent:!0,contains:a}].concat(a)},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string",begin:"\\[=*\\[",end:"\\]=*\\]",contains:[t],relevance:5}])}}}()); +hljs.registerLanguage("makefile",function(){"use strict";return function(e){var i={className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)",contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%`]+/}]}]}]};return{name:"HTML, XML",aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin:"",relevance:10,contains:[a,i,t,s,{begin:"\\[",end:"\\]",contains:[{className:"meta",begin:"",contains:[a,s,i,t]}]}]},e.COMMENT("\x3c!--","--\x3e",{relevance:10}),{begin:"<\\!\\[CDATA\\[",end:"\\]\\]>",relevance:10},n,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag",begin:")",end:">",keywords:{name:"style"},contains:[c],starts:{end:"",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[c],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},c]}]}}}()); +hljs.registerLanguage("markdown",function(){"use strict";return function(n){const e={begin:"<",end:">",subLanguage:"xml",relevance:0},a={begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},i={className:"strong",contains:[],variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\*{2}/,end:/\*{2}/}]},s={className:"emphasis",contains:[],variants:[{begin:/\*(?!\*)/,end:/\*/},{begin:/_(?!_)/,end:/_/,relevance:0}]};i.contains.push(s),s.contains.push(i);var c=[e,a];return i.contains=i.contains.concat(c),s.contains=s.contains.concat(c),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:c=c.concat(i,s)},{begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n",contains:c}]}]},e,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)",end:"\\s+",excludeEnd:!0},i,s,{className:"quote",begin:"^>\\s+",contains:c,end:"$"},{className:"code",variants:[{begin:"(`{3,})(.|\\n)*?\\1`*[ ]*"},{begin:"(~{3,})(.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))",contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},a,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}}()); +hljs.registerLanguage("nginx",function(){"use strict";return function(e){var n={className:"variable",variants:[{begin:/\$\d+/},{begin:/\$\{/,end:/}/},{begin:"[\\$\\@]"+e.UNDERSCORE_IDENT_RE}]},a={endsWithParent:!0,keywords:{$pattern:"[a-z/_]+",literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll"},relevance:0,illegal:"=>",contains:[e.HASH_COMMENT_MODE,{className:"string",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:/"/,end:/"/},{begin:/'/,end:/'/}]},{begin:"([a-z]+):/",end:"\\s",endsWithParent:!0,excludeEnd:!0,contains:[n]},{className:"regexp",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:"\\s\\^",end:"\\s|{|;",returnEnd:!0},{begin:"~\\*?\\s+",end:"\\s|{|;",returnEnd:!0},{begin:"\\*(\\.[a-z\\-]+)+"},{begin:"([a-z\\-]+\\.)+\\*"}]},{className:"number",begin:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{className:"number",begin:"\\b\\d+[kKmMgGdshdwy]*\\b",relevance:0},n]};return{name:"Nginx config",aliases:["nginxconf"],contains:[e.HASH_COMMENT_MODE,{begin:e.UNDERSCORE_IDENT_RE+"\\s+{",returnBegin:!0,end:"{",contains:[{className:"section",begin:e.UNDERSCORE_IDENT_RE}],relevance:0},{begin:e.UNDERSCORE_IDENT_RE+"\\s",end:";|{",returnBegin:!0,contains:[{className:"attribute",begin:e.UNDERSCORE_IDENT_RE,starts:a}],relevance:0}],illegal:"[^\\s\\}]"}}}()); +hljs.registerLanguage("objectivec",function(){"use strict";return function(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n,keyword:"@interface @class @protocol @implementation"};return{name:"Objective-C",aliases:["mm","objc","obj-c"],keywords:{$pattern:n,keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+_.keyword.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}}()); +hljs.registerLanguage("perl",function(){"use strict";return function(e){var n={$pattern:/[\w.]+/,keyword:"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qq fileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmget sub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedir ioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when"},t={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:n},s={begin:"->{",end:"}"},r={variants:[{begin:/\$\d/},{begin:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{begin:/[\$%@][^\s\w{]/,relevance:0}]},i=[e.BACKSLASH_ESCAPE,t,r],a=[r,e.HASH_COMMENT_MODE,e.COMMENT("^\\=\\w","\\=cut",{endsWithParent:!0}),s,{className:"string",contains:i,variants:[{begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[",end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*\\<",end:"\\>",relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE]},{begin:"{\\w+}",contains:[],relevance:0},{begin:"-?\\w+\\s*\\=\\>",contains:[],relevance:0}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*",keywords:"split return print reverse grep",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:"regexp",begin:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",relevance:10},{className:"regexp",begin:"(m|qr)?/",end:"/[a-z]*",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:"function",beginKeywords:"sub",end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$",subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}]}];return t.contains=a,s.contains=a,{name:"Perl",aliases:["pl","pm"],keywords:n,contains:a}}}()); +hljs.registerLanguage("php",function(){"use strict";return function(e){var r={begin:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},t={className:"meta",variants:[{begin:/<\?php/,relevance:10},{begin:/<\?[=]?/},{begin:/\?>/}]},a={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:'b"',end:'"'},{begin:"b'",end:"'"},e.inherit(e.APOS_STRING_MODE,{illegal:null}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null})]},n={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]},i={keyword:"__CLASS__ __DIR__ __FILE__ __FUNCTION__ __LINE__ __METHOD__ __NAMESPACE__ __TRAIT__ die echo exit include include_once print require require_once array abstract and as binary bool boolean break callable case catch class clone const continue declare default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends final finally float for foreach from global goto if implements instanceof insteadof int integer interface isset iterable list new object or private protected public real return string switch throw trait try unset use var void while xor yield",literal:"false null true",built_in:"Error|0 AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass"};return{aliases:["php","php3","php4","php5","php6","php7"],case_insensitive:!0,keywords:i,contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[t]}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0,keywords:"__halt_compiler"}),{className:"string",begin:/<<<['"]?\w+['"]?$/,end:/^\w+;?$/,contains:[e.BACKSLASH_ESCAPE,{className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/,end:/\}/}]}]},t,{className:"keyword",begin:/\$this\b/},r,{begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function",beginKeywords:"fn function",end:/[;{]/,excludeEnd:!0,illegal:"[$%\\[]",contains:[e.UNDERSCORE_TITLE_MODE,{className:"params",begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0,keywords:i,contains:["self",r,e.C_BLOCK_COMMENT_MODE,a,n]}]},{className:"class",beginKeywords:"class interface",end:"{",excludeEnd:!0,illegal:/[:\(\$"]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",end:";",illegal:/[\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use",end:";",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"=>"},a,n]}}}()); +hljs.registerLanguage("php-template",function(){"use strict";return function(n){return{name:"PHP template",subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]}]}}}()); +hljs.registerLanguage("plaintext",function(){"use strict";return function(t){return{name:"Plain text",aliases:["text","txt"],disableAutodetect:!0}}}()); +hljs.registerLanguage("properties",function(){"use strict";return function(e){var n="[ \\t\\f]*",t="("+n+"[:=]"+n+"|[ \\t\\f]+)",a="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:t,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{name:".properties",case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+"+t,returnBegin:!0,contains:[{className:"attr",begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",endsParent:!0,relevance:0}],starts:s},{begin:a+t,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:a,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:a+n+"$"}]}}}()); +hljs.registerLanguage("python",function(){"use strict";return function(e){var n={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10",built_in:"Ellipsis NotImplemented",literal:"False None True"},a={className:"meta",begin:/^(>>>|\.\.\.) /},i={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},s={begin:/\{\{/,relevance:0},r={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,s,i]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,i]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},t={className:"params",variants:[{begin:/\(\s*\)/,skip:!0,className:null},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:["self",a,l,r,e.HASH_COMMENT_MODE]}]};return i.contains=[r,l,a],{name:"Python",aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,l,{beginKeywords:"if",relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,t,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}}()); +hljs.registerLanguage("python-repl",function(){"use strict";return function(n){return{aliases:["pycon"],contains:[{className:"meta",starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}}}()); +hljs.registerLanguage("ruby",function(){"use strict";return function(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},s={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},r=[e.COMMENT("#","$",{contains:[s]}),e.COMMENT("^\\=begin","^\\=end",{contains:[s],relevance:10}),e.COMMENT("^__END__","\\n$")],c={className:"subst",begin:"#\\{",end:"}",keywords:a},t={className:"string",contains:[e.BACKSLASH_ESCAPE,c],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/,contains:[e.BACKSLASH_ESCAPE,c]})]}]},b={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},d=[t,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(r)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),b].concat(r)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[t,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(r),relevance:0}].concat(r);c.contains=d,b.contains=d;var g=[{begin:/^\s*=>/,starts:{end:"$",contains:d}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:d}}];return{name:"Ruby",aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:r.concat(g).concat(d)}}}()); +hljs.registerLanguage("rust",function(){"use strict";return function(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",t="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:t},illegal:""}]}}}()); +hljs.registerLanguage("scss",function(){"use strict";return function(e){var t={className:"variable",begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b"},i={className:"number",begin:"#[0-9A-Fa-f]+"};return e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{name:"SCSS",case_insensitive:!0,illegal:"[=/|']",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:"\\#[A-Za-z0-9_-]+",relevance:0},{className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0},{className:"selector-attr",begin:"\\[",end:"\\]",illegal:"$"},{className:"selector-tag",begin:"\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\b",relevance:0},{className:"selector-pseudo",begin:":(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)"},{className:"selector-pseudo",begin:"::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)"},t,{className:"attribute",begin:"\\b(src|z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\b",illegal:"[^\\s]"},{begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b"},{begin:":",end:";",contains:[t,i,e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:"meta",begin:"!important"}]},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",returnBegin:!0,keywords:"and or not only",contains:[{begin:"@[a-z-]+",className:"keyword"},t,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,i,e.CSS_NUMBER_MODE]}]}}}()); +hljs.registerLanguage("shell",function(){"use strict";return function(s){return{name:"Shell Session",aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}}()); +hljs.registerLanguage("sql",function(){"use strict";return function(e){var t=e.COMMENT("--","$");return{name:"SQL",case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,keywords:{$pattern:/[\w\.]+/,keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]}}}()); +hljs.registerLanguage("swift",function(){"use strict";return function(e){var i={keyword:"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet",literal:"true false nil",built_in:"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c compactMap contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip"},n=e.COMMENT("/\\*","\\*/",{contains:["self"]}),t={className:"subst",begin:/\\\(/,end:"\\)",keywords:i,contains:[]},a={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/"/,end:/"/}]},r={className:"number",begin:"\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b",relevance:0};return t.contains=[r],{name:"Swift",keywords:i,contains:[a,e.C_LINE_COMMENT_MODE,n,{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*[!?]"},{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*",relevance:0},r,{className:"function",beginKeywords:"func",end:"{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin://},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:i,contains:["self",r,a,e.C_BLOCK_COMMENT_MODE,{begin:":"}],illegal:/["']/}],illegal:/\[|%/},{className:"class",beginKeywords:"struct protocol class extension enum",keywords:i,end:"\\{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/})]},{className:"meta",begin:"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)\\b"},{beginKeywords:"import",end:/$/,contains:[e.C_LINE_COMMENT_MODE,n]}]}}}()); +hljs.registerLanguage("typescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.concat(["type","namespace","typedef","interface","public","private","protected","implements","declare","abstract","readonly"]).join(" "),literal:n.join(" "),built_in:a.concat(["any","void","number","boolean","string","object","never","enum"]).join(" ")},s={className:"meta",begin:"@[A-Za-z$_][0-9A-Za-z$_]*"},i={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:r.C_NUMBER_RE+"n?"}],relevance:0},o={className:"subst",begin:"\\$\\{",end:"\\}",keywords:t,contains:[]},c={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"xml"}},l={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"css"}},E={className:"string",begin:"`",end:"`",contains:[r.BACKSLASH_ESCAPE,o]};o.contains=[r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,i,r.REGEXP_MODE];var d={begin:"\\(",end:/\)/,keywords:t,contains:["self",r.QUOTE_STRING_MODE,r.APOS_STRING_MODE,r.NUMBER_MODE]},u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,s,d]};return{name:"TypeScript",aliases:["ts"],keywords:t,contains:[r.SHEBANG(),{className:"meta",begin:/^\s*['"]use strict['"]/},r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,i,{begin:"("+r.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,r.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+r.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:r.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:d.contains}]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/[\{;]/,excludeEnd:!0,keywords:t,contains:["self",r.inherit(r.TITLE_MODE,{begin:"[A-Za-z$_][0-9A-Za-z$_]*"}),u],illegal:/%/,relevance:0},{beginKeywords:"constructor",end:/[\{;]/,excludeEnd:!0,contains:["self",u]},{begin:/module\./,keywords:{built_in:"module"},relevance:0},{beginKeywords:"module",end:/\{/,excludeEnd:!0},{beginKeywords:"interface",end:/\{/,excludeEnd:!0,keywords:"interface extends"},{begin:/\$[(.]/},{begin:"\\."+r.IDENT_RE,relevance:0},s,d]}}}()); +hljs.registerLanguage("yaml",function(){"use strict";return function(e){var n="true false yes no null",a="[\\w#;/?:@&=+$,.~*\\'()[\\]]+",s={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]},i=e.inherit(s,{variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),l={end:",",endsWithParent:!0,excludeEnd:!0,contains:[],keywords:n,relevance:0},t={begin:"{",end:"}",contains:[l],illegal:"\\n",relevance:0},g={begin:"\\[",end:"\\]",contains:[l],illegal:"\\n",relevance:0},b=[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!\\w+!"+a},{className:"type",begin:"!<"+a+">"},{className:"type",begin:"!"+a},{className:"type",begin:"!!"+a},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b"},{className:"number",begin:e.C_NUMBER_RE+"\\b"},t,g,s],c=[...b];return c.pop(),c.push(i),l.contains=c,{name:"YAML",case_insensitive:!0,aliases:["yml","YAML"],contains:b}}}()); +hljs.registerLanguage("armasm",function(){"use strict";return function(s){const e={variants:[s.COMMENT("^[ \\t]*(?=#)","$",{relevance:0,excludeBegin:!0}),s.COMMENT("[;@]","$",{relevance:0}),s.C_LINE_COMMENT_MODE,s.C_BLOCK_COMMENT_MODE]};return{name:"ARM Assembly",case_insensitive:!0,aliases:["arm"],keywords:{$pattern:"\\.?"+s.IDENT_RE,meta:".2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .arm .thumb .code16 .code32 .force_thumb .thumb_func .ltorg ALIAS ALIGN ARM AREA ASSERT ATTR CN CODE CODE16 CODE32 COMMON CP DATA DCB DCD DCDU DCDO DCFD DCFDU DCI DCQ DCQU DCW DCWU DN ELIF ELSE END ENDFUNC ENDIF ENDP ENTRY EQU EXPORT EXPORTAS EXTERN FIELD FILL FUNCTION GBLA GBLL GBLS GET GLOBAL IF IMPORT INCBIN INCLUDE INFO KEEP LCLA LCLL LCLS LTORG MACRO MAP MEND MEXIT NOFP OPT PRESERVE8 PROC QN READONLY RELOC REQUIRE REQUIRE8 RLIST FN ROUT SETA SETL SETS SN SPACE SUBT THUMB THUMBX TTL WHILE WEND ",built_in:"r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 pc lr sp ip sl sb fp a1 a2 a3 a4 v1 v2 v3 v4 v5 v6 v7 v8 f0 f1 f2 f3 f4 f5 f6 f7 p0 p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12 p13 p14 p15 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 q0 q1 q2 q3 q4 q5 q6 q7 q8 q9 q10 q11 q12 q13 q14 q15 cpsr_c cpsr_x cpsr_s cpsr_f cpsr_cx cpsr_cxs cpsr_xs cpsr_xsf cpsr_sf cpsr_cxsf spsr_c spsr_x spsr_s spsr_f spsr_cx spsr_cxs spsr_xs spsr_xsf spsr_sf spsr_cxsf s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15 s16 s17 s18 s19 s20 s21 s22 s23 s24 s25 s26 s27 s28 s29 s30 s31 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 d10 d11 d12 d13 d14 d15 d16 d17 d18 d19 d20 d21 d22 d23 d24 d25 d26 d27 d28 d29 d30 d31 {PC} {VAR} {TRUE} {FALSE} {OPT} {CONFIG} {ENDIAN} {CODESIZE} {CPU} {FPU} {ARCHITECTURE} {PCSTOREOFFSET} {ARMASM_VERSION} {INTER} {ROPI} {RWPI} {SWST} {NOSWST} . @"},contains:[{className:"keyword",begin:"\\b(adc|(qd?|sh?|u[qh]?)?add(8|16)?|usada?8|(q|sh?|u[qh]?)?(as|sa)x|and|adrl?|sbc|rs[bc]|asr|b[lx]?|blx|bxj|cbn?z|tb[bh]|bic|bfc|bfi|[su]bfx|bkpt|cdp2?|clz|clrex|cmp|cmn|cpsi[ed]|cps|setend|dbg|dmb|dsb|eor|isb|it[te]{0,3}|lsl|lsr|ror|rrx|ldm(([id][ab])|f[ds])?|ldr((s|ex)?[bhd])?|movt?|mvn|mra|mar|mul|[us]mull|smul[bwt][bt]|smu[as]d|smmul|smmla|mla|umlaal|smlal?([wbt][bt]|d)|mls|smlsl?[ds]|smc|svc|sev|mia([bt]{2}|ph)?|mrr?c2?|mcrr2?|mrs|msr|orr|orn|pkh(tb|bt)|rbit|rev(16|sh)?|sel|[su]sat(16)?|nop|pop|push|rfe([id][ab])?|stm([id][ab])?|str(ex)?[bhd]?|(qd?)?sub|(sh?|q|u[qh]?)?sub(8|16)|[su]xt(a?h|a?b(16)?)|srs([id][ab])?|swpb?|swi|smi|tst|teq|wfe|wfi|yield)(eq|ne|cs|cc|mi|pl|vs|vc|hi|ls|ge|lt|gt|le|al|hs|lo)?[sptrx]?(?=\\s)"},e,s.QUOTE_STRING_MODE,{className:"string",begin:"'",end:"[^\\\\]'",relevance:0},{className:"title",begin:"\\|",end:"\\|",illegal:"\\n",relevance:0},{className:"number",variants:[{begin:"[#$=]?0x[0-9a-f]+"},{begin:"[#$=]?0b[01]+"},{begin:"[#$=]\\d+"},{begin:"\\b\\d+"}],relevance:0},{className:"symbol",variants:[{begin:"^[ \\t]*[a-z_\\.\\$][a-z0-9_\\.\\$]+:"},{begin:"^[a-z_\\.\\$][a-z0-9_\\.\\$]+"},{begin:"[=#]\\w+"}],relevance:0}]}}}()); +hljs.registerLanguage("d",function(){"use strict";return function(e){var a={$pattern:e.UNDERSCORE_IDENT_RE,keyword:"abstract alias align asm assert auto body break byte case cast catch class const continue debug default delete deprecated do else enum export extern final finally for foreach foreach_reverse|10 goto if immutable import in inout int interface invariant is lazy macro mixin module new nothrow out override package pragma private protected public pure ref return scope shared static struct super switch synchronized template this throw try typedef typeid typeof union unittest version void volatile while with __FILE__ __LINE__ __gshared|10 __thread __traits __DATE__ __EOF__ __TIME__ __TIMESTAMP__ __VENDOR__ __VERSION__",built_in:"bool cdouble cent cfloat char creal dchar delegate double dstring float function idouble ifloat ireal long real short string ubyte ucent uint ulong ushort wchar wstring",literal:"false null true"},d="((0|[1-9][\\d_]*)|0[bB][01_]+|0[xX]([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))",n="\\\\(['\"\\?\\\\abfnrtv]|u[\\dA-Fa-f]{4}|[0-7]{1,3}|x[\\dA-Fa-f]{2}|U[\\dA-Fa-f]{8})|&[a-zA-Z\\d]{2,};",t={className:"number",begin:"\\b"+d+"(L|u|U|Lu|LU|uL|UL)?",relevance:0},_={className:"number",begin:"\\b(((0[xX](([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)\\.([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*)|\\.?([\\da-fA-F][\\da-fA-F_]*|_[\\da-fA-F][\\da-fA-F_]*))[pP][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))|((0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(\\.\\d*|([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)))|\\d+\\.(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d)|\\.(0|[1-9][\\d_]*)([eE][+-]?(0|[1-9][\\d_]*|\\d[\\d_]*|[\\d_]+?\\d))?))([fF]|L|i|[fF]i|Li)?|"+d+"(i|[fF]i|Li))",relevance:0},r={className:"string",begin:"'("+n+"|.)",end:"'",illegal:"."},i={className:"string",begin:'"',contains:[{begin:n,relevance:0}],end:'"[cwd]?'},s=e.COMMENT("\\/\\+","\\+\\/",{contains:["self"],relevance:10});return{name:"D",keywords:a,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,{className:"string",begin:'x"[\\da-fA-F\\s\\n\\r]*"[cwd]?',relevance:10},i,{className:"string",begin:'[rq]"',end:'"[cwd]?',relevance:5},{className:"string",begin:"`",end:"`[cwd]?"},{className:"string",begin:'q"\\{',end:'\\}"'},_,t,r,{className:"meta",begin:"^#!",end:"$",relevance:5},{className:"meta",begin:"#(line)",end:"$",relevance:5},{className:"keyword",begin:"@[a-zA-Z_][a-zA-Z_\\d]*"}]}}}()); +hljs.registerLanguage("handlebars",function(){"use strict";function e(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(n){const a={"builtin-name":"action bindattr collection component concat debugger each each-in get hash if in input link-to loc log lookup mut outlet partial query-params render template textarea unbound unless view with yield"},t=/\[.*?\]/,s=/[^\s!"#%&'()*+,.\/;<=>@\[\\\]^`{|}~]+/,i=e("(",/'.*?'/,"|",/".*?"/,"|",t,"|",s,"|",/\.|\//,")+"),r=e("(",t,"|",s,")(?==)"),l={begin:i,lexemes:/[\w.\/]+/},c=n.inherit(l,{keywords:{literal:"true false undefined null"}}),o={begin:/\(/,end:/\)/},m={className:"attr",begin:r,relevance:0,starts:{begin:/=/,end:/=/,starts:{contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,c,o]}}},d={contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,{begin:/as\s+\|/,keywords:{keyword:"as"},end:/\|/,contains:[{begin:/\w+/}]},m,c,o],returnEnd:!0},g=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/\)/})});o.contains=[g];const u=n.inherit(l,{keywords:a,className:"name",starts:n.inherit(d,{end:/}}/})}),b=n.inherit(l,{keywords:a,className:"name"}),h=n.inherit(l,{className:"name",keywords:a,starts:n.inherit(d,{end:/}}/})});return{name:"Handlebars",aliases:["hbs","html.hbs","html.handlebars","htmlbars"],case_insensitive:!0,subLanguage:"xml",contains:[{begin:/\\\{\{/,skip:!0},{begin:/\\\\(?=\{\{)/,skip:!0},n.COMMENT(/\{\{!--/,/--\}\}/),n.COMMENT(/\{\{!/,/\}\}/),{className:"template-tag",begin:/\{\{\{\{(?!\/)/,end:/\}\}\}\}/,contains:[u],starts:{end:/\{\{\{\{\//,returnEnd:!0,subLanguage:"xml"}},{className:"template-tag",begin:/\{\{\{\{\//,end:/\}\}\}\}/,contains:[b]},{className:"template-tag",begin:/\{\{#/,end:/\}\}/,contains:[u]},{className:"template-tag",begin:/\{\{(?=else\}\})/,end:/\}\}/,keywords:"else"},{className:"template-tag",begin:/\{\{\//,end:/\}\}/,contains:[b]},{className:"template-variable",begin:/\{\{\{/,end:/\}\}\}/,contains:[h]},{className:"template-variable",begin:/\{\{/,end:/\}\}/,contains:[h]}]}}}()); +hljs.registerLanguage("haskell",function(){"use strict";return function(e){var n={variants:[e.COMMENT("--","$"),e.COMMENT("{-","-}",{contains:["self"]})]},i={className:"meta",begin:"{-#",end:"#-}"},a={className:"meta",begin:"^#",end:"$"},s={className:"type",begin:"\\b[A-Z][\\w']*",relevance:0},l={begin:"\\(",end:"\\)",illegal:'"',contains:[i,a,{className:"type",begin:"\\b[A-Z][\\w]*(\\((\\.\\.|,|\\w+)\\))?"},e.inherit(e.TITLE_MODE,{begin:"[_a-z][\\w']*"}),n]};return{name:"Haskell",aliases:["hs"],keywords:"let in if then else case of where do module import hiding qualified type data newtype deriving class instance as default infix infixl infixr foreign export ccall stdcall cplusplus jvm dotnet safe unsafe family forall mdo proc rec",contains:[{beginKeywords:"module",end:"where",keywords:"module where",contains:[l,n],illegal:"\\W\\.|;"},{begin:"\\bimport\\b",end:"$",keywords:"import qualified as hiding",contains:[l,n],illegal:"\\W\\.|;"},{className:"class",begin:"^(\\s*)?(class|instance)\\b",end:"where",keywords:"class family instance where",contains:[s,l,n]},{className:"class",begin:"\\b(data|(new)?type)\\b",end:"$",keywords:"data family type newtype deriving",contains:[i,s,l,{begin:"{",end:"}",contains:l.contains},n]},{beginKeywords:"default",end:"$",contains:[s,l,n]},{beginKeywords:"infix infixl infixr",end:"$",contains:[e.C_NUMBER_MODE,n]},{begin:"\\bforeign\\b",end:"$",keywords:"foreign import export ccall stdcall cplusplus jvm dotnet safe unsafe",contains:[s,e.QUOTE_STRING_MODE,n]},{className:"meta",begin:"#!\\/usr\\/bin\\/env runhaskell",end:"$"},i,a,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,s,e.inherit(e.TITLE_MODE,{begin:"^[_a-z][\\w']*"}),n,{begin:"->|<-"}]}}}()); +hljs.registerLanguage("julia",function(){"use strict";return function(e){var r="[A-Za-z_\\u00A1-\\uFFFF][A-Za-z_0-9\\u00A1-\\uFFFF]*",t={$pattern:r,keyword:"in isa where baremodule begin break catch ccall const continue do else elseif end export false finally for function global if import importall let local macro module quote return true try using while type immutable abstract bitstype typealias ",literal:"true false ARGS C_NULL DevNull ENDIAN_BOM ENV I Inf Inf16 Inf32 Inf64 InsertionSort JULIA_HOME LOAD_PATH MergeSort NaN NaN16 NaN32 NaN64 PROGRAM_FILE QuickSort RoundDown RoundFromZero RoundNearest RoundNearestTiesAway RoundNearestTiesUp RoundToZero RoundUp STDERR STDIN STDOUT VERSION catalan e|0 eu|0 eulergamma golden im nothing pi γ π φ ",built_in:"ANY AbstractArray AbstractChannel AbstractFloat AbstractMatrix AbstractRNG AbstractSerializer AbstractSet AbstractSparseArray AbstractSparseMatrix AbstractSparseVector AbstractString AbstractUnitRange AbstractVecOrMat AbstractVector Any ArgumentError Array AssertionError Associative Base64DecodePipe Base64EncodePipe Bidiagonal BigFloat BigInt BitArray BitMatrix BitVector Bool BoundsError BufferStream CachingPool CapturedException CartesianIndex CartesianRange Cchar Cdouble Cfloat Channel Char Cint Cintmax_t Clong Clonglong ClusterManager Cmd CodeInfo Colon Complex Complex128 Complex32 Complex64 CompositeException Condition ConjArray ConjMatrix ConjVector Cptrdiff_t Cshort Csize_t Cssize_t Cstring Cuchar Cuint Cuintmax_t Culong Culonglong Cushort Cwchar_t Cwstring DataType Date DateFormat DateTime DenseArray DenseMatrix DenseVecOrMat DenseVector Diagonal Dict DimensionMismatch Dims DirectIndexString Display DivideError DomainError EOFError EachLine Enum Enumerate ErrorException Exception ExponentialBackOff Expr Factorization FileMonitor Float16 Float32 Float64 Function Future GlobalRef GotoNode HTML Hermitian IO IOBuffer IOContext IOStream IPAddr IPv4 IPv6 IndexCartesian IndexLinear IndexStyle InexactError InitError Int Int128 Int16 Int32 Int64 Int8 IntSet Integer InterruptException InvalidStateException Irrational KeyError LabelNode LinSpace LineNumberNode LoadError LowerTriangular MIME Matrix MersenneTwister Method MethodError MethodTable Module NTuple NewvarNode NullException Nullable Number ObjectIdDict OrdinalRange OutOfMemoryError OverflowError Pair ParseError PartialQuickSort PermutedDimsArray Pipe PollingFileWatcher ProcessExitedException Ptr QuoteNode RandomDevice Range RangeIndex Rational RawFD ReadOnlyMemoryError Real ReentrantLock Ref Regex RegexMatch RemoteChannel RemoteException RevString RoundingMode RowVector SSAValue SegmentationFault SerializationState Set SharedArray SharedMatrix SharedVector Signed SimpleVector Slot SlotNumber SparseMatrixCSC SparseVector StackFrame StackOverflowError StackTrace StepRange StepRangeLen StridedArray StridedMatrix StridedVecOrMat StridedVector String SubArray SubString SymTridiagonal Symbol Symmetric SystemError TCPSocket Task Text TextDisplay Timer Tridiagonal Tuple Type TypeError TypeMapEntry TypeMapLevel TypeName TypeVar TypedSlot UDPSocket UInt UInt128 UInt16 UInt32 UInt64 UInt8 UndefRefError UndefVarError UnicodeError UniformScaling Union UnionAll UnitRange Unsigned UpperTriangular Val Vararg VecElement VecOrMat Vector VersionNumber Void WeakKeyDict WeakRef WorkerConfig WorkerPool "},a={keywords:t,illegal:/<\//},n={className:"subst",begin:/\$\(/,end:/\)/,keywords:t},o={className:"variable",begin:"\\$"+r},i={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],variants:[{begin:/\w*"""/,end:/"""\w*/,relevance:10},{begin:/\w*"/,end:/"\w*/}]},l={className:"string",contains:[e.BACKSLASH_ESCAPE,n,o],begin:"`",end:"`"},s={className:"meta",begin:"@"+r};return a.name="Julia",a.contains=[{className:"number",begin:/(\b0x[\d_]*(\.[\d_]*)?|0x\.\d[\d_]*)p[-+]?\d+|\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\b\d[\d_]*(\.[\d_]*)?|\.\d[\d_]*)([eEfF][-+]?\d+)?/,relevance:0},{className:"string",begin:/'(.|\\[xXuU][a-zA-Z0-9]+)'/},i,l,s,{className:"comment",variants:[{begin:"#=",end:"=#",relevance:10},{begin:"#",end:"$"}]},e.HASH_COMMENT_MODE,{className:"keyword",begin:"\\b(((abstract|primitive)\\s+)type|(mutable\\s+)?struct)\\b"},{begin:/<:/}],n.contains=a.contains,a}}()); +hljs.registerLanguage("nim",function(){"use strict";return function(e){return{name:"Nim",aliases:["nim"],keywords:{keyword:"addr and as asm bind block break case cast const continue converter discard distinct div do elif else end enum except export finally for from func generic if import in include interface is isnot iterator let macro method mixin mod nil not notin object of or out proc ptr raise ref return shl shr static template try tuple type using var when while with without xor yield",literal:"shared guarded stdin stdout stderr result true false",built_in:"int int8 int16 int32 int64 uint uint8 uint16 uint32 uint64 float float32 float64 bool char string cstring pointer expr stmt void auto any range array openarray varargs seq set clong culong cchar cschar cshort cint csize clonglong cfloat cdouble clongdouble cuchar cushort cuint culonglong cstringarray semistatic"},contains:[{className:"meta",begin:/{\./,end:/\.}/,relevance:10},{className:"string",begin:/[a-zA-Z]\w*"/,end:/"/,contains:[{begin:/""/}]},{className:"string",begin:/([a-zA-Z]\w*)?"""/,end:/"""/},e.QUOTE_STRING_MODE,{className:"type",begin:/\b[A-Z]\w+\b/,relevance:0},{className:"number",relevance:0,variants:[{begin:/\b(0[xX][0-9a-fA-F][_0-9a-fA-F]*)('?[iIuU](8|16|32|64))?/},{begin:/\b(0o[0-7][_0-7]*)('?[iIuUfF](8|16|32|64))?/},{begin:/\b(0(b|B)[01][_01]*)('?[iIuUfF](8|16|32|64))?/},{begin:/\b(\d[_\d]*)('?[iIuUfF](8|16|32|64))?/}]},e.HASH_COMMENT_MODE]}}}()); +hljs.registerLanguage("r",function(){"use strict";return function(e){var n="([a-zA-Z]|\\.[a-zA-Z.])[a-zA-Z0-9._]*";return{name:"R",contains:[e.HASH_COMMENT_MODE,{begin:n,keywords:{$pattern:n,keyword:"function if in break next repeat else for return switch while try tryCatch stop warning require library attach detach source setMethod setGeneric setGroupGeneric setClass ...",literal:"NULL NA TRUE FALSE T F Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10"},relevance:0},{className:"number",begin:"0[xX][0-9a-fA-F]+[Li]?\\b",relevance:0},{className:"number",begin:"\\d+(?:[eE][+\\-]?\\d*)?L\\b",relevance:0},{className:"number",begin:"\\d+\\.(?!\\d)(?:i\\b)?",relevance:0},{className:"number",begin:"\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{className:"number",begin:"\\.\\d+(?:[eE][+\\-]?\\d*)?i?\\b",relevance:0},{begin:"`",end:"`",relevance:0},{className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:'"',end:'"'},{begin:"'",end:"'"}]}]}}}()); +hljs.registerLanguage("scala",function(){"use strict";return function(e){var n={className:"subst",variants:[{begin:"\\$[A-Za-z0-9_]+"},{begin:"\\${",end:"}"}]},a={className:"string",variants:[{begin:'"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:'"""',end:'"""',relevance:10},{begin:'[a-z]+"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE,n]},{className:"string",begin:'[a-z]+"""',end:'"""',contains:[n],relevance:10}]},s={className:"type",begin:"\\b[A-Z][A-Za-z0-9_]*",relevance:0},t={className:"title",begin:/[^0-9\n\t "'(),.`{}\[\]:;][^\n\t "'(),.`{}\[\]:;]+|[^0-9\n\t "'(),.`{}\[\]:;=]/,relevance:0},i={className:"class",beginKeywords:"class object trait type",end:/[:={\[\n;]/,excludeEnd:!0,contains:[{beginKeywords:"extends with",relevance:10},{begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},t]},l={className:"function",beginKeywords:"def",end:/[:={\[(\n;]/,excludeEnd:!0,contains:[t]};return{name:"Scala",keywords:{literal:"true false null",keyword:"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,{className:"symbol",begin:"'\\w[\\w\\d_]*(?!')"},s,l,i,e.C_NUMBER_MODE,{className:"meta",begin:"@[A-Za-z]+"}]}}}()); +hljs.registerLanguage("x86asm",function(){"use strict";return function(s){return{name:"Intel x86 Assembly",case_insensitive:!0,keywords:{$pattern:"[.%]?"+s.IDENT_RE,keyword:"lock rep repe repz repne repnz xaquire xrelease bnd nobnd aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63",built_in:"ip eip rip al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 cs ds es fs gs ss st st0 st1 st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 k0 k1 k2 k3 k4 k5 k6 k7 bnd0 bnd1 bnd2 bnd3 cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d r0h r1h r2h r3h r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l db dw dd dq dt ddq do dy dz resb resw resd resq rest resdq reso resy resz incbin equ times byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr",meta:"%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif %if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep %endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment .nolist __FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ __UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend align alignb sectalign daz nodaz up down zero default option assume public bits use16 use32 use64 default section segment absolute extern global common cpu float __utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ __float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ __Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__"},contains:[s.COMMENT(";","$",{relevance:0}),{className:"number",variants:[{begin:"\\b(?:([0-9][0-9_]*)?\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|(0[Xx])?[0-9][0-9_]*\\.?[0-9_]*(?:[pP](?:[+-]?[0-9_]+)?)?)\\b",relevance:0},{begin:"\\$[0-9][0-9A-Fa-f]*",relevance:0},{begin:"\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\b"},{begin:"\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\b"}]},s.QUOTE_STRING_MODE,{className:"string",variants:[{begin:"'",end:"[^\\\\]'"},{begin:"`",end:"[^\\\\]`"}],relevance:0},{className:"symbol",variants:[{begin:"^\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\s+label)"},{begin:"^\\s*%%[A-Za-z0-9_$#@~.?]*:"}],relevance:0},{className:"subst",begin:"%[0-9]+",relevance:0},{className:"subst",begin:"%!S+",relevance:0},{className:"meta",begin:/^\s*\.[\w_-]+/}]}}}()); \ No newline at end of file diff --git a/book/theme/index.hbs b/book/theme/index.hbs new file mode 100644 index 000000000..080b78516 --- /dev/null +++ b/book/theme/index.hbs @@ -0,0 +1,346 @@ + + + + + + {{ title }} + {{#if is_print }} + + {{/if}} + {{#if base_url}} + + {{/if}} + + + + {{> head}} + + + + + + {{#if favicon_svg}} + + {{/if}} + {{#if favicon_png}} + + {{/if}} + + + + {{#if print_enable}} + + {{/if}} + + + + {{#if copy_fonts}} + + {{/if}} + + + + + + + + {{#each additional_css}} + + {{/each}} + + {{#if mathjax_support}} + + + {{/if}} + + +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ {{> header}} + + + + {{#if search_enabled}} + + {{/if}} + + + + +
+
+ {{{ content }}} +
+ + +
+
+ + + +
+ + {{#if live_reload_endpoint}} + + + {{/if}} + + {{#if google_analytics}} + + + {{/if}} + + {{#if playground_line_numbers}} + + {{/if}} + + {{#if playground_copyable}} + + {{/if}} + + {{#if playground_js}} + + + + + + {{/if}} + + {{#if search_js}} + + + + {{/if}} + + + + + + + {{#each additional_js}} + + {{/each}} + + {{#if is_print}} + {{#if mathjax_support}} + + {{else}} + + {{/if}} + {{/if}} + +
+ + diff --git a/book/theme/lz-string.js b/book/theme/lz-string.js new file mode 100644 index 000000000..534b61ff6 --- /dev/null +++ b/book/theme/lz-string.js @@ -0,0 +1 @@ +var LZString=function(){var r=String.fromCharCode,o="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-$",e={};function t(r,o){if(!e[r]){e[r]={};for(var n=0;n>>8,n[2*e+1]=s%256}return n},decompressFromUint8Array:function(o){if(null==o)return i.decompress(o);for(var n=new Array(o.length/2),e=0,t=n.length;e>=1}else{for(t=1,e=0;e>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e>=1;0==--l&&(l=Math.pow(2,h),h++),s[p]=f++,c=String(a)}if(""!==c){if(Object.prototype.hasOwnProperty.call(u,c)){if(c.charCodeAt(0)<256){for(e=0;e>=1}else{for(t=1,e=0;e>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e>=1;0==--l&&(l=Math.pow(2,h),h++)}for(t=2,e=0;e>=1;for(;;){if(m<<=1,v==o-1){d.push(n(m));break}v++}return d.join("")},decompress:function(r){return null==r?"":""==r?null:i._decompress(r.length,32768,function(o){return r.charCodeAt(o)})},_decompress:function(o,n,e){var t,i,s,u,a,p,c,l=[],f=4,h=4,d=3,m="",v=[],g={val:e(0),position:n,index:1};for(t=0;t<3;t+=1)l[t]=t;for(s=0,a=Math.pow(2,2),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 2:return""}for(l[3]=c,i=c,v.push(c);;){if(g.index>o)return"";for(s=0,a=Math.pow(2,d),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(c=s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 2:return v.join("")}if(0==f&&(f=Math.pow(2,d),d++),l[c])m=l[c];else{if(c!==h)return null;m=i+i.charAt(0)}v.push(m),l[h++]=i+m.charAt(0),i=m,0==--f&&(f=Math.pow(2,d),d++)}}};return i}();"function"==typeof define&&define.amd?define(function(){return LZString}):"undefined"!=typeof module&&null!=module?module.exports=LZString:"undefined"!=typeof angular&&null!=angular&&angular.module("LZString",[]).factory("LZString",function(){return LZString}); diff --git a/cli/driver/src/callbacks_wrapper.rs b/cli/driver/src/callbacks_wrapper.rs index 5a5bc28a1..1ba359288 100644 --- a/cli/driver/src/callbacks_wrapper.rs +++ b/cli/driver/src/callbacks_wrapper.rs @@ -14,10 +14,18 @@ impl<'a> Callbacks for CallbacksWrapper<'a> { fn config(&mut self, config: &mut interface::Config) { let options = self.options.clone(); config.psess_created = Some(Box::new(move |parse_sess| { - parse_sess.env_depinfo.get_mut().insert(( + let depinfo = parse_sess.env_depinfo.get_mut(); + depinfo.insert(( Symbol::intern(ENV_VAR_OPTIONS_FRONTEND), Some(Symbol::intern(&serde_json::to_string(&options).unwrap())), )); + depinfo.insert(( + Symbol::intern("HAX_CARGO_CACHE_KEY"), + std::env::var("HAX_CARGO_CACHE_KEY") + .ok() + .as_deref() + .map(Symbol::intern), + )); })); self.sub.config(config) } diff --git a/cli/driver/src/driver.rs b/cli/driver/src/driver.rs index bccfe3a96..3a6a92823 100644 --- a/cli/driver/src/driver.rs +++ b/cli/driver/src/driver.rs @@ -60,11 +60,6 @@ fn setup_logging() { }; let subscriber = tracing_subscriber::Registry::default() .with(tracing_subscriber::EnvFilter::from_default_env()) - .with( - tracing_subscriber::fmt::layer() - .with_file(true) - .with_line_number(true), - ) .with( tracing_tree::HierarchicalLayer::new(2) .with_ansi(enable_colors) diff --git a/cli/driver/src/exporter.rs b/cli/driver/src/exporter.rs index 91151aa5d..96adfabb2 100644 --- a/cli/driver/src/exporter.rs +++ b/cli/driver/src/exporter.rs @@ -1,4 +1,4 @@ -use hax_frontend_exporter::state::{ExportedSpans, LocalContextS}; +use hax_frontend_exporter::state::LocalContextS; use hax_frontend_exporter::SInto; use hax_types::cli_options::{Backend, PathOrDash, ENV_VAR_OPTIONS_FRONTEND}; use rustc_driver::{Callbacks, Compilation}; @@ -46,8 +46,7 @@ fn dummy_thir_body( /// stealing issues (theoretically...) fn precompute_local_thir_bodies( tcx: TyCtxt<'_>, -) -> std::collections::HashMap> { - let hir = tcx.hir(); +) -> impl Iterator)> { use rustc_hir::def::DefKind::*; use rustc_hir::*; @@ -74,7 +73,7 @@ fn precompute_local_thir_bodies( } use itertools::Itertools; - hir.body_owners() + tcx.hir().body_owners() .filter(|ldid| { match tcx.def_kind(ldid.to_def_id()) { InlineConst | AnonConst => { @@ -91,10 +90,10 @@ fn precompute_local_thir_bodies( } }) .sorted_by_key(|ldid| const_level_of(tcx, *ldid)) - .filter(|ldid| hir.maybe_body_owned_by(*ldid).is_some()) - .map(|ldid| { + .filter(move |ldid| tcx.hir().maybe_body_owned_by(*ldid).is_some()) + .map(move |ldid| { tracing::debug!("⏳ Type-checking THIR body for {:#?}", ldid); - let span = hir.span(tcx.local_def_id_to_hir_id(ldid)); + let span = tcx.hir().span(tcx.local_def_id_to_hir_id(ldid)); let (thir, expr) = match tcx.thir_body(ldid) { Ok(x) => x, Err(e) => { @@ -117,7 +116,7 @@ fn precompute_local_thir_bodies( tracing::debug!("✅ Type-checked THIR body for {:#?}", ldid); (ldid, (Rc::new(thir), expr)) }) - .collect() + .map(|(ldid, bundle)| (ldid.to_def_id(), bundle)) } /// Browse a crate and translate every item from HIR+THIR to "THIR'" @@ -135,27 +134,35 @@ fn convert_thir<'tcx, Body: hax_frontend_exporter::IsBody>( hax_frontend_exporter::ImplInfos, )>, Vec>, + hax_frontend_exporter::id_table::Table, ) { + use hax_frontend_exporter::WithGlobalCacheExt; let mut state = hax_frontend_exporter::state::State::new(tcx, options.clone()); state.base.macro_infos = Rc::new(macro_calls); - state.base.cached_thirs = Rc::new(precompute_local_thir_bodies(tcx)); + for (def_id, thir) in precompute_local_thir_bodies(tcx) { + state.with_item_cache(def_id, |caches| caches.thir = Some(thir)); + } let result = hax_frontend_exporter::inline_macro_invocations(tcx.hir().items(), &state); let impl_infos = hax_frontend_exporter::impl_def_ids_to_impled_types_and_bounds(&state) .into_iter() .collect(); - let exported_spans = state.base.exported_spans.borrow().clone(); + let exported_spans = state.with_global_cache(|cache| cache.spans.keys().copied().collect()); + let exported_def_ids = state.with_global_cache(|cache| { + cache + .per_item + .values() + .filter_map(|per_item_cache| per_item_cache.def_id.clone()) + .collect() + }); + let cache_map = state.with_global_cache(|cache| cache.id_table_session.table().clone()); - let exported_def_ids = { - let def_ids = state.base.exported_def_ids.borrow(); - let state = hax_frontend_exporter::state::State::new(tcx, options.clone()); - def_ids.iter().map(|did| did.sinto(&state)).collect() - }; ( - exported_spans.into_iter().collect(), + exported_spans, exported_def_ids, impl_infos, result, + cache_map, ) } @@ -261,7 +268,7 @@ impl Callbacks for ExtractionCallbacks { with_kind_type!( self.body_types.clone(), || { - let (spans, def_ids, impl_infos, items) = + let (spans, def_ids, impl_infos, items, cache_map) = convert_thir(&self.clone().into(), self.macro_calls.clone(), tcx); let files: HashSet = HashSet::from_iter( items @@ -280,7 +287,7 @@ impl Callbacks for ExtractionCallbacks { .collect(), def_ids, }; - haxmeta.write(&mut file); + haxmeta.write(&mut file, cache_map); } ); diff --git a/cli/subcommands/build.rs b/cli/subcommands/build.rs index 224b4037c..b1a0402c2 100644 --- a/cli/subcommands/build.rs +++ b/cli/subcommands/build.rs @@ -33,7 +33,24 @@ fn json_schema_static_asset() { .unwrap(); } +fn git_dirty_env_var() { + println!("cargo:rurun-if-env-changed=HAX_GIT_IS_DIRTY"); + let dirty = { + use std::process::Command; + let _ = Command::new("git") + .args(["update-index", "-q", "--refresh"]) + .status(); + !Command::new("git") + .args(["diff-index", "--quiet", "HEAD", "--"]) + .status() + .map(|status| status.success()) + .unwrap_or(true) + }; + println!("cargo:rustc-env=HAX_GIT_IS_DIRTY={}", dirty); +} + fn main() { rustc_version_env_var(); json_schema_static_asset(); + git_dirty_env_var(); } diff --git a/cli/subcommands/src/cargo_hax.rs b/cli/subcommands/src/cargo_hax.rs index 36d70d9a9..a1952dc70 100644 --- a/cli/subcommands/src/cargo_hax.rs +++ b/cli/subcommands/src/cargo_hax.rs @@ -1,3 +1,4 @@ +#![feature(rustc_private)] use annotate_snippets::{Level, Renderer}; use clap::Parser; use colored::Colorize; @@ -13,6 +14,7 @@ use std::path::PathBuf; use std::process; mod engine_debug_webapp; +use hax_frontend_exporter::id_table; /// Return a toolchain argument to pass to `cargo`: when the correct nightly is /// already present, this is None, otherwise we (1) ensure `rustup` is available @@ -180,6 +182,26 @@ impl HaxMessage { ); eprintln!("{}", renderer.render(Level::Error.title(&title))); } + Self::ProfilingData(data) => { + fn format_with_dot(shift: u32, n: u64) -> String { + let factor = 10u64.pow(shift); + format!("{}.{}", n / factor, n % factor) + } + let title = format!( + "[profiling] {}: {}ms, memory={}, {} item{}{}", + data.context, + format_with_dot(6, data.time_ns), + data.memory, + data.quantity, + if data.quantity > 1 { "s" } else { "" }, + if data.errored { + " (note: this failed!)" + } else { + "" + } + ); + eprintln!("{}", renderer.render(Level::Info.title(&title))); + } Self::CargoBuildFailure => { let title = "hax: running `cargo build` was not successful, continuing anyway.".to_string(); @@ -199,6 +221,7 @@ impl HaxMessage { /// Runs `hax-engine` fn run_engine( haxmeta: HaxMeta, + id_table: id_table::Table, working_dir: PathBuf, manifest_dir: PathBuf, backend: &BackendOptions<()>, @@ -246,7 +269,9 @@ fn run_engine( }; } - send!(&engine_options); + id_table::WithTable::run(id_table, engine_options, |with_table| { + send!(with_table); + }); let out_dir = backend.output_dir.clone().unwrap_or({ let relative_path: PathBuf = [ @@ -311,6 +336,9 @@ fn run_engine( }; send!(&ToEngine::PrettyPrintedRust(code)); } + FromEngine::ProfilingData(profiling_data) => { + HaxMessage::ProfilingData(profiling_data).report(message_format, None) + } FromEngine::Ping => { send!(&ToEngine::Pong); } @@ -362,6 +390,29 @@ fn target_dir(suffix: &str) -> PathBuf { dir.into() } +/// Gets hax version: if hax is being compiled from a dirty git repo, +/// then this function taints the hax version with the hash of the +/// current executable. This makes sure cargo doesn't cache across +/// different versions of hax, for more information see +/// https://github.com/hacspec/hax/issues/801. +fn get_hax_version() -> String { + let mut version = hax_types::HAX_VERSION.to_string(); + if env!("HAX_GIT_IS_DIRTY") == "true" { + version += &std::env::current_exe() + .ok() + .and_then(|exe_path| std::fs::read(exe_path).ok()) + .map(|contents| { + use std::hash::{DefaultHasher, Hash, Hasher}; + let mut s = DefaultHasher::new(); + contents.hash(&mut s); + format!("hash-exe-{}", s.finish()) + }) + .expect("Expect read path") + } + + version +} + /// Calls `cargo` with a custom driver which computes `haxmeta` files /// in `TARGET`. One `haxmeta` file is produced by crate. Each /// `haxmeta` file contains the full AST of one crate. @@ -393,6 +444,7 @@ fn compute_haxmeta_files(options: &Options) -> (Vec, i32) { ) .env(RUST_LOG_STYLE, rust_log_style()) .env(RUSTFLAGS, rustflags()) + .env("HAX_CARGO_CACHE_KEY", get_hax_version()) .env( ENV_VAR_OPTIONS_FRONTEND, serde_json::to_string(&options) @@ -442,26 +494,39 @@ fn run_command(options: &Options, haxmeta_files: Vec) -> boo output_file, kind, include_extra, + use_ids, .. } => { with_kind_type!(kind, || { for EmitHaxMetaMessage { path, .. } in haxmeta_files { - let haxmeta: HaxMeta = HaxMeta::read(fs::File::open(&path).unwrap()); + let (haxmeta, id_table): (HaxMeta, _) = HaxMeta::read(fs::File::open(&path).unwrap()); let dest = output_file.open_or_stdout(); + (if include_extra { - serde_json::to_writer( - dest, - &WithDefIds { - def_ids: haxmeta.def_ids, - impl_infos: haxmeta.impl_infos, - items: haxmeta.items, - comments: haxmeta.comments, - }, - ) + let data = WithDefIds { + def_ids: haxmeta.def_ids, + impl_infos: haxmeta.impl_infos, + items: haxmeta.items, + comments: haxmeta.comments, + }; + if use_ids { + id_table::WithTable::run(id_table, data, |with_table| { + serde_json::to_writer(dest, with_table) + }) + } else { + serde_json::to_writer(dest, &data) + } } else { - serde_json::to_writer(dest, &haxmeta.items) + if use_ids { + id_table::WithTable::run(id_table, haxmeta.items, |with_table| { + serde_json::to_writer(dest, with_table) + }) + } else { + serde_json::to_writer(dest, &haxmeta.items) + } }) .unwrap() + } }); false @@ -484,11 +549,13 @@ fn run_command(options: &Options, haxmeta_files: Vec) -> boo path, } in haxmeta_files { - let haxmeta: HaxMeta = HaxMeta::read(fs::File::open(&path).unwrap()); + let (haxmeta, id_table): (HaxMeta, _) = + HaxMeta::read(fs::File::open(&path).unwrap()); error = error || run_engine( haxmeta, + id_table, working_dir, manifest_dir, &backend, diff --git a/deny.toml b/deny.toml new file mode 100644 index 000000000..cae10ee5b --- /dev/null +++ b/deny.toml @@ -0,0 +1,13 @@ +[licenses] +unused-allowed-license = "allow" +allow = [ + "Apache-2.0", + "MIT", + "Unicode-DFS-2016", + "MPL-2.0", + # Licences used in the OCaml dependencies in the engine + "BSD-3-Clause", + "LGPL-2.1", + "LGPL-2.0", + "ISC", +] diff --git a/engine/DEV.md b/engine/DEV.md index a0d2ef381..37f96217a 100644 --- a/engine/DEV.md +++ b/engine/DEV.md @@ -52,7 +52,8 @@ To see the implementation of the `Ast_visitors` module, run `dune describe pp li You can enable a debug mode that prints a Rustish AST at each phase, that you can browse interactively along with the actual AST. -Just add the flag `--debug-engine` (or `-d`) to the `into` subcommand. -At the end of the translation, `cargo hax` will spawn a webserver that -lets you browse the debug information. Note, you can change to port by -setting the environment variable `HAX_DEBUGGER_PORT`. +Just add the flag `--debug-engine i` (or `-d i`, `i` stands for +**i**nteractive) to the `into` subcommand. At the end of the +translation, `cargo hax` will spawn a webserver that lets you browse +the debug information. Note, you can change to port by setting the +environment variable `HAX_DEBUGGER_PORT`. diff --git a/engine/backends/coq/coq/coq_backend.ml b/engine/backends/coq/coq/coq_backend.ml index 34338363f..2db7c6cee 100644 --- a/engine/backends/coq/coq/coq_backend.ml +++ b/engine/backends/coq/coq/coq_backend.ml @@ -34,17 +34,18 @@ module SubtypeToInputLanguage and type monadic_action = Features.Off.monadic_action and type arbitrary_lhs = Features.Off.arbitrary_lhs and type nontrivial_lhs = Features.Off.nontrivial_lhs - and type loop = Features.Off.loop and type block = Features.Off.block - and type for_loop = Features.Off.for_loop - and type while_loop = Features.Off.while_loop - and type for_index_loop = Features.Off.for_index_loop and type quote = Features.Off.quote - and type state_passing_loop = Features.Off.state_passing_loop and type dyn = Features.Off.dyn and type match_guard = Features.Off.match_guard and type trait_item_default = Features.Off.trait_item_default - and type unsafe = Features.Off.unsafe) = + and type unsafe = Features.Off.unsafe + and type loop = Features.Off.loop + and type for_loop = Features.Off.for_loop + and type while_loop = Features.Off.while_loop + and type for_index_loop = Features.Off.for_index_loop + and type state_passing_loop = Features.Off.state_passing_loop + and type fold_like_loop = Features.Off.fold_like_loop) = struct module FB = InputLanguage @@ -70,615 +71,692 @@ module CoqNamePolicy = Concrete_ident.DefaultNamePolicy module U = Ast_utils.MakeWithNamePolicy (InputLanguage) (CoqNamePolicy) open AST -module CoqLibrary : Library = struct - module Notation = struct - let int_repr (x : string) (i : string) : string = - "(@repr" ^ " " ^ "WORDSIZE" ^ x ^ " " ^ i ^ ")" - - let type_str : string = "Type" - let bool_str : string = "bool" - let unit_str : string = "unit" +let hardcoded_coq_headers = + "(* File automatically generated by Hacspec *)\n\ + From Coq Require Import ZArith.\n\ + Require Import List.\n\ + Import List.ListNotations.\n\ + Open Scope Z_scope.\n\ + Open Scope bool_scope.\n\ + Require Import Ascii.\n\ + Require Import String.\n\ + Require Import Coq.Floats.Floats.\n\ + From RecordUpdate Require Import RecordSet.\n\ + Import RecordSetNotations.\n" + +module BasePrinter = Generic_printer.Make (InputLanguage) + +module Make (Default : sig + val default : string -> string +end) +(Attrs : Attrs.WITH_ITEMS) = +struct + open PPrint + + let default_string_for s = "TODO: please implement the method `" ^ s ^ "`" + let default_document_for = default_string_for >> string + + module CoqNotation = struct + let definition_struct keyword n name generics params typ body = + keyword ^^ space ^^ name ^^ generics + ^^ concat_map (fun x -> space ^^ x) params + ^^ space ^^ colon ^^ space ^^ typ ^^ space ^^ string ":=" + ^^ nest n (break 1 ^^ body) + ^^ dot + + let proof_struct keyword name generics params statement = + keyword ^^ space ^^ name ^^ generics + ^^ concat_map (fun x -> space ^^ x) params + ^^ space ^^ colon + ^^ nest 2 (break 1 ^^ statement ^^ dot) + ^^ break 1 ^^ string "Proof" ^^ dot ^^ space ^^ string "Admitted" ^^ dot + + let definition = definition_struct (string "Definition") 2 + let fixpoint = definition_struct (string "Fixpoint") 2 + let inductive = definition_struct (string "Inductive") 0 + let record = definition_struct (string "Record") 2 + let instance = definition_struct (string "Instance") 2 + let class_ = definition_struct (string "Class") 2 + let lemma = proof_struct (string "Lemma") end -end -module C = Coq (CoqLibrary) + type ('get_span_data, 'a) object_type = + ('get_span_data, 'a) BasePrinter.Gen.object_type -module Context = struct - type t = { current_namespace : string * string list } -end + class printer = + object (self) + inherit BasePrinter.base -let primitive_to_string (id : primitive_ident) : string = - match id with - | Deref -> "(TODO: Deref)" (* failwith "Deref" *) - | Cast -> "cast" (* failwith "Cast" *) - | LogicalOp op -> ( match op with And -> "andb" | Or -> "orb") + method private primitive_to_string (id : primitive_ident) : document = + match id with + | Deref -> default_document_for "(TODO: Deref)" + | Cast -> string "cast" + | LogicalOp op -> ( + match op with And -> string "andb" | Or -> string "orb") -module Make (Ctx : sig - val ctx : Context.t -end) = -struct - open Ctx - - let pconcrete_ident (id : concrete_ident) : string = - let id = U.Concrete_ident_view.to_view id in - let crate, path = ctx.current_namespace in - if String.(crate = id.crate) && [%eq: string list] id.path path then - id.definition - else - (* id.crate ^ "_" ^ *) - (* List.fold_left ~init:"" ~f:(fun x y -> x ^ "_" ^ y) *) - id.definition - - let pglobal_ident (id : global_ident) : string = - match id with - | `Projector (`Concrete cid) | `Concrete cid -> pconcrete_ident cid - | `Primitive p_id -> primitive_to_string p_id - | `TupleType _i -> "TODO (global ident) tuple type" - | `TupleCons _i -> "TODO (global ident) tuple cons" - | `Projector (`TupleField _) | `TupleField _ -> - "TODO (global ident) tuple field" - | _ -> . - - module TODOs_debug = struct - let __TODO_pat__ _ s = C.AST.Ident (s ^ " todo(pat)") - let __TODO_ty__ _ s : C.AST.ty = C.AST.NameTy (s ^ " todo(ty)") - let __TODO_item__ _ s = C.AST.Unimplemented (s ^ " todo(item)") - let __TODO_term__ _ s = C.AST.Const (C.AST.Const_string (s ^ " todo(term)")) - end + method arm ~arm ~span:_ = arm#p - module TODOs = struct - let __TODO_ty__ span s : C.AST.ty = - Error.unimplemented ~details:("[ty] node " ^ s) span + method arm' ~super:_ ~arm_pat ~body ~guard:_ = + arm_pat#p ^^ space ^^ string "=>" ^^ nest 2 (break 1 ^^ body#p) - let __TODO_pat__ span s = - Error.unimplemented ~details:("[pat] node " ^ s) span + method attrs x1 = default_document_for "attrs" - let __TODO_term__ span s = - Error.unimplemented ~details:("[expr] node " ^ s) span + method binding_mode_ByRef _x1 _x2 = + default_document_for "binding_mode_ByRef" - let __TODO_item__ _span s = C.AST.Unimplemented (s ^ " todo(item)") - end + method binding_mode_ByValue = default_document_for "binding_mode_ByValue" + method borrow_kind_Mut _x1 = default_document_for "borrow_kind_Mut" + method borrow_kind_Shared = default_document_for "borrow_kind_Shared" + method borrow_kind_Unique = default_document_for "borrow_kind_Unique" + method common_array x1 = brackets (separate (semi ^^ space) x1) + + method dyn_trait_goal ~trait:_ ~non_self_args:_ = + default_document_for "dyn_trait_goal" + + method error_expr x1 = parens (string x1 ^^ string "(* ERROR_EXPR *)") + method error_item x1 = parens (string x1 ^^ string "(* ERROR_ITEM *)") + method error_pat x1 = parens (string x1 ^^ string "(* ERROR_PAT *)") + method expr ~e ~span:_ ~typ = e#p + + method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness = + match witness with _ -> . + + method expr'_App_application ~super:_ ~f ~args ~generics:_ = + f#p ^^ concat_map (fun x -> space ^^ parens x#p) args - open TODOs - - let pint_kind (k : int_kind) : C.AST.int_type = - { - size = - (match k.size with - | S8 -> U8 - | S16 -> U16 - | S32 -> U32 - | S64 -> U64 - | S128 -> U128 - | SSize -> USize); - signed = (match k.signedness with Signed -> true | _ -> false); - } - - let pliteral span (e : literal) = - match e with - | String s -> C.AST.Const_string s - | Char c -> C.AST.Const_char (Char.to_int c) - | Int { value; kind; _ } -> C.AST.Const_int (value, pint_kind kind) - | Float _ -> Error.unimplemented ~details:"pliteral: Float" span - | Bool b -> C.AST.Const_bool b - - let rec pty span (t : ty) : C.AST.ty = - match t with - | TBool -> C.AST.Bool - | TChar -> __TODO_ty__ span "char" - | TInt k -> C.AST.Int (pint_kind k) - | TStr -> __TODO_ty__ span "str" - | TApp { ident = `TupleType 0; args = [] } -> C.AST.Unit - | TApp { ident = `TupleType 1; args = [ GType ty ] } -> pty span ty - | TApp { ident = `TupleType n; args } when n >= 2 -> - C.AST.Product (args_ty span args) - | TApp { ident; args } -> - C.AST.AppTy - (C.AST.NameTy (pglobal_ident ident ^ "_t"), args_ty span args) - | TArrow (inputs, output) -> - List.fold_right ~init:(pty span output) - ~f:(fun x y -> C.AST.Arrow (x, y)) - (List.map ~f:(pty span) inputs) - | TFloat _ -> __TODO_ty__ span "pty: Float" - | TArray { typ; _ } -> - C.AST.ArrayTy (pty span typ, "TODO: Int.to_string length") - | TSlice { ty; _ } -> C.AST.SliceTy (pty span ty) - | TParam i -> C.AST.NameTy i.name - | TAssociatedType _ -> C.AST.WildTy - | TOpaque _ -> __TODO_ty__ span "pty: TAssociatedType/TOpaque" - | _ -> . - - and args_ty span (args : generic_value list) : C.AST.ty list = - (* List.map ~f:pty *) - match args with - | arg :: xs -> - (match arg with - | GLifetime _ -> __TODO_ty__ span "lifetime" - | GType x -> pty span x - | GConst _ -> __TODO_ty__ span "const") - :: args_ty span xs - | [] -> [] - - let rec ppat (p : pat) : C.AST.pat = - match p.p with - | PWild -> C.AST.WildPat - | PAscription { typ; pat; _ } -> - C.AST.AscriptionPat (ppat pat, pty p.span typ) - | PBinding - { - mut = Immutable; - mode = _; - subpat = None; - var; - typ = _ (* we skip type annot here *); - } -> - C.AST.Ident var.name - | POr { subpats } -> C.AST.DisjunctivePat (List.map ~f:ppat subpats) - | PArray _ -> __TODO_pat__ p.span "Parray?" - | PConstruct { name = `TupleCons 0; args = []; _ } -> C.AST.UnitPat - | PConstruct { name = `TupleCons 1; args = [ _ ]; _ } -> - __TODO_pat__ p.span "tuple 1" - | PConstruct { name = `TupleCons _n; args; _ } -> - C.AST.TuplePat (List.map ~f:(fun { pat; _ } -> ppat pat) args) - | PConstruct { name; args; is_record = true; _ } -> - C.AST.RecordPat (pglobal_ident name, pfield_pats args) - | PConstruct { name; args; is_record = false; _ } -> - C.AST.ConstructorPat - (pglobal_ident name, List.map ~f:(fun p -> ppat p.pat) args) - | PConstant { lit } -> C.AST.Lit (pliteral p.span lit) - | _ -> . - - and pfield_pats (args : field_pat list) : (string * C.AST.pat) list = - match args with - | { field; pat } :: xs -> (pglobal_ident field, ppat pat) :: pfield_pats xs - | _ -> [] - - (* TODO: I guess this should be named `notations` rather than `operators`, for the Coq backend, right? *) - let operators = - let c = Global_ident.of_name Value in - [ - (c Rust_primitives__hax__array_of_list, (3, [ ""; ".["; "]<-"; "" ])); - (c Core__ops__index__Index__index, (2, [ ""; ".["; "]" ])); - (c Core__ops__bit__BitXor__bitxor, (2, [ ""; ".^"; "" ])); - (c Core__ops__bit__BitAnd__bitand, (2, [ ""; ".&"; "" ])); - (c Core__ops__bit__BitOr__bitor, (2, [ ""; ".|"; "" ])); - (c Core__ops__arith__Add__add, (2, [ ""; ".+"; "" ])); - (c Core__ops__arith__Sub__sub, (2, [ ""; ".-"; "" ])); - (c Core__ops__arith__Mul__mul, (2, [ ""; ".*"; "" ])); - (c Core__ops__arith__Div__div, (2, [ ""; "./"; "" ])); - (c Core__cmp__PartialEq__eq, (2, [ ""; "=.?"; "" ])); - (c Core__cmp__PartialOrd__lt, (2, [ ""; "<.?"; "" ])); - (c Core__cmp__PartialOrd__le, (2, [ ""; "<=.?"; "" ])); - (c Core__cmp__PartialOrd__ge, (2, [ ""; ">=.?"; "" ])); - (c Core__cmp__PartialOrd__gt, (2, [ ""; ">.?"; "" ])); - (c Core__cmp__PartialEq__ne, (2, [ ""; "<>"; "" ])); - (c Core__ops__arith__Rem__rem, (2, [ ""; ".%"; "" ])); - (c Core__ops__bit__Shl__shl, (2, [ ""; " shift_left "; "" ])); - (c Core__ops__bit__Shr__shr, (2, [ ""; " shift_right "; "" ])); - (* TODO: those two are not notations/operators at all, right? *) - (* (c "secret_integers::rotate_left", (2, [ "rol "; " "; "" ])); *) - (* (c "hacspec::lib::foldi", (4, [ "foldi "; " "; " "; " "; "" ])); *) - - (* (c "secret_integers::u8", (0, ["U8"])); *) - (* (c "secret_integers::u16", (0, ["U16"])); *) - (* (c "secret_integers::u32", (0, ["U32"])); *) - (* (c "secret_integers::u64", (0, ["U64"])); *) - (* (c "secret_integers::u128", (0, ["U128"])); *) - ] - |> Map.of_alist_exn (module Global_ident) - - let rec pexpr (e : expr) = - try pexpr_unwrapped e - with Diagnostics.SpanFreeError.Exn _ -> - TODOs_debug.__TODO_term__ e.span "failure" - - and pexpr_unwrapped (e : expr) : C.AST.term = - let span = e.span in - match e.e with - | Literal l -> C.AST.Const (pliteral e.span l) - | LocalVar local_ident -> C.AST.NameTerm local_ident.name - | GlobalVar (`TupleCons 0) - | Construct { constructor = `TupleCons 0; fields = []; _ } -> - C.AST.UnitTerm - | GlobalVar global_ident -> C.AST.Var (pglobal_ident global_ident) - | App - { - f = { e = GlobalVar (`Projector (`TupleField _)); _ }; - args = [ _ ]; - _; - } -> - __TODO_term__ span "app global vcar projector tuple" - | App { f = { e = GlobalVar x; _ }; args; _ } when Map.mem operators x -> - let arity, op = Map.find_exn operators x in - if List.length args <> arity then - Error.assertion_failure span "expr: function application: bad arity"; - let args = List.map ~f:(fun x -> C.AST.Value (pexpr x, true, 0)) args in - C.AST.AppFormat (op, args) - (* | App { f = { e = GlobalVar x }; args } -> *) - (* __TODO_term__ span "GLOBAL APP?" *) - | App { f; args; _ } -> - let base = pexpr f in - let args = List.map ~f:pexpr args in - C.AST.App (base, args) - | If { cond; then_; else_ } -> - C.AST.If - ( pexpr cond, - pexpr then_, - Option.value_map else_ ~default:(C.AST.Literal "()") ~f:pexpr ) - | Array l -> C.AST.Array (List.map ~f:pexpr l) - | Let { lhs; rhs; body; monadic } -> - C.AST.Let - { - pattern = ppat lhs; - mut = - (match lhs.p with - | PBinding { mut = Mutable _; _ } -> true - | _ -> false); - value = pexpr rhs; - body = pexpr body; - value_typ = pty span lhs.typ; - monad_typ = - Option.map - ~f:(fun (m, _) -> - match m with - | MException typ -> C.AST.Exception (pty span typ) - | MResult typ -> C.AST.Result (pty span typ) - | MOption -> C.AST.Option) - monadic; - } - | Match { scrutinee; arms } -> - C.AST.Match - ( pexpr scrutinee, - List.map - ~f:(fun { arm = { arm_pat; body; _ }; _ } -> - (ppat arm_pat, pexpr body)) - arms ) - | Ascription _ -> __TODO_term__ span "asciption" - | Construct { constructor = `TupleCons 1; fields = [ (_, e) ]; _ } -> - pexpr e - | Construct { constructor = `TupleCons _n; fields; _ } -> - C.AST.Tuple (List.map ~f:(snd >> pexpr) fields) - | Construct { is_record = true; constructor; fields; _ } -> - (* TODO: handle base *) - C.AST.RecordConstructor - ( pglobal_ident constructor, - List.map ~f:(fun (f, e) -> (pglobal_ident f, pexpr e)) fields ) - | Construct { is_record = false; constructor; fields = [ (_f, e) ]; _ } -> - C.AST.App (C.AST.Var (pglobal_ident constructor), [ pexpr e ]) - | Construct { constructor; _ } -> - (* __TODO_term__ span "constructor" *) - C.AST.Var - (pglobal_ident constructor - ^ C.ty_to_string_without_paren (pty span e.typ)) - | Closure { params; body; _ } -> - C.AST.Lambda (List.map ~f:ppat params, pexpr body) - | MacroInvokation { macro; _ } -> - Error.raise - @@ { - kind = UnsupportedMacro { id = [%show: global_ident] macro }; - span = e.span; - } - | _ -> . - - let pgeneric_param_as_argument span : generic_param -> C.AST.argument = - function - | { ident; kind = GPType; _ } -> - C.AST.Explicit (C.AST.Ident ident.name, C.AST.WildTy) - | _ -> Error.unimplemented ~details:"Coq: TODO: generic_params" span - - let rec pitem (e : item) : C.AST.decl list = - try pitem_unwrapped e - with Diagnostics.SpanFreeError.Exn _ -> - [ C.AST.Unimplemented "item error backend" ] - - and pitem_unwrapped (e : item) : C.AST.decl list = - let span = e.span in - match e.v with - | Fn { name; body; params; _ } -> - [ - C.AST.Definition - ( pconcrete_ident name, - List.map - ~f:(fun { pat; typ; _ } -> - C.AST.Explicit (ppat pat, pty span typ)) - params, - pexpr body, - pty span body.typ ); - ] - | TyAlias { name; ty; _ } -> - [ - C.AST.Notation - ( "'" ^ pconcrete_ident name ^ "_t" ^ "'", - C.AST.Type (pty span ty), - None ); - ] - (* record *) - | Type { name; generics; variants = [ v ]; is_struct = true } -> - [ - (* TODO: generics *) - C.AST.Record - ( U.Concrete_ident_view.to_definition_name name, - List.map ~f:(pgeneric_param_as_argument span) generics.params, - List.map - ~f:(fun (x, y) -> C.AST.Named (x, y)) - (p_record_record span v.arguments) ); - ] - (* enum *) - | Type { name; generics; variants; _ } -> - [ - C.AST.Inductive - ( U.Concrete_ident_view.to_definition_name name, - List.map ~f:(pgeneric_param_as_argument span) generics.params, - p_inductive span variants name ); - ] - (* TODO: this is never matched, now *) - (* | Type { name; generics; variants } -> *) - (* [ *) - (* C.AST.Notation *) - (* ( U.Concrete_ident_view.to_definition_name name, *) - (* C.AST.Product (List.map ~f:snd (p_record span variants name)) ); *) - (* C.AST.Definition *) - (* ( U.Concrete_ident_view.to_definition_name name, *) - (* [], *) - (* C.AST.Var "id", *) - (* C.AST.Arrow *) - (* ( C.AST.Name (U.Concrete_ident_view.to_definition_name name), *) - (* C.AST.Name (U.Concrete_ident_view.to_definition_name name) ) ); *) - (* ] *) - | IMacroInvokation { macro; argument; _ } -> ( - let unsupported () = - let id = [%show: concrete_ident] macro in - Error.raise { kind = UnsupportedMacro { id }; span = e.span } + method expr'_App_constant ~super:_ ~constant ~generics:_ = constant#p + + method expr'_App_field_projection ~super:_ ~field ~e = + field#p ^^ space ^^ e#p + + method expr'_App_tuple_projection ~super:_ ~size:_ ~nth:_ ~e:_ = + default_document_for "expr'_App_tuple_projection" + + method expr'_Ascription ~super:_ ~e ~typ = + e#p ^^ space ^^ colon ^^ space ^^ typ#p + + method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness = + match witness with _ -> . + + method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness = + match witness with _ -> . + + method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness = + match witness with _ -> . + + method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness = + match witness with _ -> . + + method expr'_Closure ~super:_ ~params ~body ~captures:_ = + !^"fun" + ^^ concat_map (fun x -> space ^^ x#p) params + ^^ space ^^ !^"=>" ^^ space + ^^ nest 2 (break 1 ^^ body#p) + + method expr'_Construct_inductive ~super:_ ~constructor ~is_record + ~is_struct ~fields ~base = + let fields_or_empty add_space = + if List.is_empty fields then empty + else + add_space + ^^ parens + (separate_map (comma ^^ space) (fun x -> (snd x)#p) fields) in - match U.Concrete_ident_view.to_view macro with - | { crate = "hacspec_lib"; path = _; definition = name } -> ( - match name with - | "public_nat_mod" -> - let open Hacspeclib_macro_parser in - let o : PublicNatMod.t = - PublicNatMod.parse argument |> Result.ok_or_failwith - in - [ - C.AST.Notation - ( "'" ^ o.type_name ^ "_t" ^ "'", - C.AST.Type - (C.AST.NatMod - ( o.type_of_canvas, - o.bit_size_of_field, - o.modulo_value )), - None ); - C.AST.Definition - ( o.type_name, - [], - C.AST.Var "id", - C.AST.Arrow - ( C.AST.NameTy (o.type_name ^ "_t"), - C.AST.NameTy (o.type_name ^ "_t") ) ); - ] - | "bytes" -> - let open Hacspeclib_macro_parser in - let o : Bytes.t = - Bytes.parse argument |> Result.ok_or_failwith - in - [ - C.AST.Notation - ( "'" ^ o.bytes_name ^ "_t" ^ "'", - C.AST.Type - (C.AST.ArrayTy - ( C.AST.Int { size = C.AST.U8; signed = false }, - (* int_of_string *) o.size )), - None ); - C.AST.Definition - ( o.bytes_name, - [], - C.AST.Var "id", - C.AST.Arrow - ( C.AST.NameTy (o.bytes_name ^ "_t"), - C.AST.NameTy (o.bytes_name ^ "_t") ) ); - ] - | "unsigned_public_integer" -> - let open Hacspeclib_macro_parser in - let o = - UnsignedPublicInteger.parse argument |> Result.ok_or_failwith - in - [ - C.AST.Notation - ( "'" ^ o.integer_name ^ "_t" ^ "'", - C.AST.Type - (C.AST.ArrayTy - ( C.AST.Int { size = C.AST.U8; signed = false }, - Int.to_string ((o.bits + 7) / 8) )), - None ); - C.AST.Definition - ( o.integer_name, - [], - C.AST.Var "id", - C.AST.Arrow - ( C.AST.NameTy (o.integer_name ^ "_t"), - C.AST.NameTy (o.integer_name ^ "_t") ) ); - ] - | "public_bytes" -> - let open Hacspeclib_macro_parser in - let o : Bytes.t = - Bytes.parse argument |> Result.ok_or_failwith - in - let typ = - C.AST.ArrayTy - ( C.AST.Int { size = C.AST.U8; signed = false }, - (* int_of_string *) o.size ) - in - [ - C.AST.Notation - ("'" ^ o.bytes_name ^ "_t" ^ "'", C.AST.Type typ, None); - C.AST.Definition - ( o.bytes_name, - [], - C.AST.Var "id", - C.AST.Arrow - ( C.AST.NameTy (o.bytes_name ^ "_t"), - C.AST.NameTy (o.bytes_name ^ "_t") ) ); - ] - | "array" -> - let open Hacspeclib_macro_parser in - let o : Array.t = - Array.parse argument |> Result.ok_or_failwith - in - let typ = - match o.typ with - (* Some *) - | "U128" -> C.AST.U128 - (* Some *) - | "U64" -> C.AST.U64 - (* Some *) - | "U32" -> C.AST.U32 - (* Some *) - | "U16" -> C.AST.U16 - (* Some *) - | "U8" -> C.AST.U8 - | _usize -> C.AST.U32 (* TODO: usize? *) - in - [ - C.AST.Notation - ( "'" ^ o.array_name ^ "_t" ^ "'", - C.AST.Type - (C.AST.ArrayTy - ( C.AST.Int { size = typ; signed = false }, - (* int_of_string *) o.size )), - None ); - C.AST.Definition - ( o.array_name, - [], - C.AST.Var "id", - C.AST.Arrow - ( C.AST.NameTy (o.array_name ^ "_t"), - C.AST.NameTy (o.array_name ^ "_t") ) ); - ] - | _ -> unsupported ()) - | _ -> unsupported ()) - | Use { path; is_external; rename } -> - if is_external then [] else [ C.AST.Require (None, path, rename) ] - | HaxError s -> [ __TODO_item__ span s ] - | NotImplementedYet -> [ __TODO_item__ span "Not implemented yet?" ] - | Alias _ -> [ __TODO_item__ span "Not implemented yet? alias" ] - | Trait { name; generics; items } -> - [ - C.AST.Class - ( U.Concrete_ident_view.to_definition_name name, - List.map - ~f:(pgeneric_param_as_argument span) - (match List.rev generics.params with - | _ :: xs -> List.rev xs - | _ -> []), - List.map - ~f:(fun x -> - C.AST.Named - ( U.Concrete_ident_view.to_definition_name x.ti_ident, - match x.ti_v with - | TIFn fn_ty -> pty span fn_ty - | TIDefault _ -> . - | _ -> __TODO_ty__ span "field_ty" )) - items ); - ] - | Impl { generics; self_ty; of_trait = name, gen_vals; items } -> - [ - C.AST.Instance - ( pglobal_ident name, - List.map ~f:(pgeneric_param_as_argument span) generics.params, - pty span self_ty, - args_ty span gen_vals, - List.map - ~f:(fun x -> - match x.ii_v with - | IIFn { body; params } -> - ( U.Concrete_ident_view.to_definition_name x.ii_ident, - List.map - ~f:(fun { pat; typ; _ } -> - C.AST.Explicit (ppat pat, pty span typ)) - params, - pexpr body, - pty span body.typ ) - | _ -> - ( "todo_name", - [], - __TODO_term__ span "body", - __TODO_ty__ span "typ" )) - items ); - ] - - and p_inductive span variants _parrent_name : C.AST.inductive_case list = - List.map variants ~f:(fun { name; arguments; is_record; _ } -> + if is_record && is_struct then + match base with + | Some x -> string "Build_" ^^ x#p ^^ fields_or_empty space + | None -> string "Build_t_" ^^ constructor#p ^^ fields_or_empty space + else if not is_record then + if is_struct then + string "Build_t_" ^^ constructor#p ^^ fields_or_empty space + else constructor#p ^^ fields_or_empty space + else + default_document_for + "expr'_Construct_inductive [is_record=true, is_struct = false] \ + todo record" + + method expr'_Construct_tuple ~super:_ ~components = + if List.length components == 0 then !^"tt" + else parens (separate_map comma (fun x -> x#p) components) + + method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness = + match witness with _ -> . + + method expr'_EffectAction ~super:_ ~action:_ ~argument:_ = + default_document_for "expr'_EffectAction" + + method expr'_GlobalVar_concrete ~super:_ x2 = x2#p + method expr'_GlobalVar_primitive ~super:_ x2 = self#primitive_to_string x2 + + method expr'_If ~super:_ ~cond ~then_ ~else_ = + string "if" + ^^ nest 2 (break 1 ^^ cond#p) + ^^ break 1 ^^ string "then" + ^^ nest 2 (break 1 ^^ then_#p) + ^^ break 1 ^^ string "else" + ^^ nest 2 + (break 1 ^^ match else_ with Some x -> x#p | None -> string "tt") + + method expr'_Let ~super:_ ~monadic:_ ~lhs ~rhs ~body = + string "let" ^^ space ^^ lhs#p ^^ space ^^ string ":=" ^^ space ^^ rhs#p + ^^ space ^^ string "in" ^^ break 1 ^^ body#p + + method expr'_Literal ~super:_ x2 = x2#p + method expr'_LocalVar ~super:_ x2 = x2#p + + method expr'_Loop ~super:_ ~body ~kind ~state ~control_flow ~label:_ + ~witness:_ = + kind#p ^^ space + ^^ brackets + (Option.value ~default:(string "is_none") + (Option.map ~f:(fun x -> x#p) control_flow)) + ^^ Option.value ~default:(string "default") + (Option.map ~f:(fun x -> x#p) state) + ^^ space ^^ string "of" ^^ space + ^^ parens (nest 2 (break 1 ^^ body#p)) + + method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ = + default_document_for "expr'_MacroInvokation" + + method expr'_Match ~super:_ ~scrutinee ~arms = + string "match" ^^ space ^^ scrutinee#p ^^ space ^^ string "with" + ^^ break 1 + ^^ concat_map (fun x -> string "|" ^^ space ^^ x#p ^^ break 1) arms + ^^ string "end" + + method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness = + match witness with _ -> . + + method expr'_Quote ~super:_ _x2 = default_document_for "expr'_Quote" + method expr'_Return ~super:_ ~e:_ ~witness = match witness with _ -> . + + method cf_kind_BreakOrReturn = + default_document_for "cf_kind_BreakOrReturn" + + method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly" + method field_pat ~field ~pat = pat#p + + method generic_constraint_GCLifetime _x1 _x2 = + default_document_for "generic_constraint_GCLifetime" + + method generic_constraint_GCProjection x1 = string "`" ^^ braces x1#p + method generic_constraint_GCType x1 = string "`" ^^ braces x1#p + + method generic_param ~ident ~span:_ ~attrs:_ ~kind = + string "`" ^^ braces (ident#p ^^ space ^^ colon ^^ space ^^ kind#p) + + method generic_param_kind_GPConst ~typ = typ#p + + method generic_param_kind_GPLifetime ~witness = + match witness with _ -> . + + method generic_param_kind_GPType = string "Type" + method generic_value_GConst x1 = x1#p + + method generic_value_GLifetime ~lt:_ ~witness = + match witness with _ -> . + + method generic_value_GType x1 = parens x1#p + + method generics ~params ~constraints = + let params_document = concat_map (fun x -> space ^^ x#p) params in + let constraints_document = + concat_map (fun x -> space ^^ x#p) constraints + in + params_document ^^ constraints_document + + method guard ~guard:_ ~span:_ = default_document_for "guard" + + method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness = + match witness with _ -> . + + method impl_expr ~kind:_ ~goal = goal#p + + method impl_expr_kind_Builtin _x1 = + default_document_for "impl_expr_kind_Builtin" + + method impl_expr_kind_Concrete _x1 = + default_document_for "impl_expr_kind_Concrete" + + method impl_expr_kind_Dyn = default_document_for "impl_expr_kind_Dyn" + + method impl_expr_kind_ImplApp ~impl:_ ~args:_ = + default_document_for "impl_expr_kind_ImplApp" + + method impl_expr_kind_LocalBound ~id:_ = + default_document_for "impl_expr_kind_LocalBound" + + method impl_expr_kind_Parent ~impl:_ ~ident:_ = + default_document_for "impl_expr_kind_Parent" + + method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ = + default_document_for "impl_expr_kind_Projection" + + method impl_expr_kind_Self = default_document_for "impl_expr_kind_Self" + method impl_ident ~goal ~name:_ = goal#p + + method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v ~ii_ident ~ii_attrs:_ = + ii_ident#p ^^ space ^^ string ":=" ^^ space ^^ ii_v#p ^^ semi + + method impl_item'_IIFn ~body ~params = + if List.length params == 0 then body#p + else + string "fun" ^^ space + ^^ concat_map (fun x -> x#p ^^ space) params + ^^ string "=>" + ^^ nest 2 (break 1 ^^ body#p) + + method impl_item'_IIType ~typ ~parent_bounds:_ = typ#p + method item ~v ~span:_ ~ident:_ ~attrs:_ = v#p ^^ break 1 + + method item'_Alias ~super:_ ~name ~item = + string "Notation" ^^ space ^^ string "\"'" ^^ name#p ^^ string "'\"" + ^^ space ^^ string ":=" ^^ space ^^ parens item#p ^^ dot + + method item'_Fn ~super ~name ~generics ~body ~params ~safety:_ = + (* TODO: Why is type not available here ? *) + let is_rec = + Set.mem + (U.Reducers.collect_concrete_idents#visit_expr () body#v) + name#v + in + let typ = + self#_do_not_override_lazy_of_ty AstPos_item'_Fn_body body#v.typ + in + + let get_expr_of kind f : document option = + Attrs.associated_expr kind super.attrs + |> Option.map ~f:(self#entrypoint_expr >> f) + in + let requires = + get_expr_of Requires (fun x -> + x ^^ space ^^ string "=" ^^ space ^^ string "true") + in + let ensures = + get_expr_of Ensures (fun x -> + x ^^ space ^^ string "=" ^^ space ^^ string "true") + in + + let is_lemma = Attrs.lemma super.attrs in + if is_lemma then + CoqNotation.lemma name#p generics#p + (List.map ~f:(fun x -> x#p) params) + (Option.value ~default:empty requires + ^^ space ^^ !^"->" ^^ break 1 + ^^ Option.value ~default:empty ensures) + else if is_rec then + CoqNotation.fixpoint name#p generics#p + (List.map ~f:(fun x -> x#p) params + @ Option.value ~default:[] + (Option.map ~f:(fun x -> [ string "`" ^^ braces x ]) requires)) + typ#p body#p (* ^^ TODO: ensures? *) + else + CoqNotation.definition name#p generics#p + (List.map ~f:(fun x -> x#p) params + @ Option.value ~default:[] + (Option.map ~f:(fun x -> [ string "`" ^^ braces x ]) requires)) + typ#p body#p (* ^^ TODO: ensures? *) + + method item'_HaxError ~super:_ _x2 = default_document_for "item'_HaxError" + + method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_ + ~witness:_ = + default_document_for "item'_IMacroInvokation" + + method item'_Impl ~super ~generics ~self_ty ~of_trait ~items + ~parent_bounds:_ ~safety:_ = + let name, args = of_trait#v in + CoqNotation.instance + (name#p ^^ string "_" ^^ string (Int.to_string ([%hash: item] super))) + generics#p [] + (name#p ^^ concat_map (fun x -> space ^^ parens x#p) args) + (braces + (nest 2 + (concat_map (fun x -> break 1 ^^ name#p ^^ !^"_" ^^ x#p) items) + ^^ break 1)) + + method item'_NotImplementedYet = string "(* NotImplementedYet *)" + + method item'_Quote ~super:_ ~quote:_ ~origin:_ = + default_document_for "item'_Quote" + + method item'_Trait ~super:_ ~name ~generics ~items ~safety:_ = + let _, params, constraints = generics#v in + CoqNotation.class_ name#p generics#p [] !^"Type" + (braces + (nest 2 (concat_map (fun x -> break 1 ^^ x#p) items) ^^ break 1)) + ^^ break 1 ^^ !^"Arguments" ^^ space ^^ name#p ^^ colon + ^^ !^"clear implicits" ^^ dot ^^ break 1 ^^ !^"Arguments" ^^ space + ^^ name#p + ^^ concat_map (fun _ -> space ^^ !^"(_)") params + ^^ concat_map (fun _ -> space ^^ !^"{_}") constraints + ^^ dot + + method item'_TyAlias ~super:_ ~name ~generics:_ ~ty = + string "Notation" ^^ space ^^ string "\"'" ^^ name#p ^^ string "'\"" + ^^ space ^^ string ":=" ^^ space ^^ ty#p ^^ dot + + method item'_Type_struct ~super:_ ~name ~generics ~tuple_struct:_ + ~arguments = + CoqNotation.record name#p generics#p [] (string "Type") + (braces + (nest 2 + (concat_map + (fun (ident, typ, attr) -> + break 1 ^^ ident#p ^^ space ^^ colon ^^ space ^^ typ#p + ^^ semi) + arguments) + ^^ break 1)) + ^^ break 1 ^^ !^"Arguments" ^^ space ^^ name#p ^^ colon + ^^ !^"clear implicits" ^^ dot ^^ break 1 ^^ !^"Arguments" ^^ space + ^^ name#p + ^^ concat_map (fun _ -> space ^^ !^"(_)") generics#v.params + ^^ concat_map (fun _ -> space ^^ !^"{_}") generics#v.constraints + ^^ dot ^^ break 1 ^^ !^"Arguments" ^^ space ^^ !^"Build_" ^^ name#p + ^^ concat_map (fun _ -> space ^^ !^"{_}") generics#v.params + ^^ concat_map (fun _ -> space ^^ !^"{_}") generics#v.constraints + ^^ dot ^^ break 1 ^^ !^"#[export]" ^^ space + ^^ CoqNotation.instance + (string "settable" ^^ string "_" ^^ name#p) + generics#p [] + (!^"Settable" ^^ space ^^ !^"_") + (string "settable!" ^^ space + ^^ parens (!^"@" ^^ !^"Build_" ^^ name#p ^^ generics#p) + ^^ space ^^ string "<" + ^^ separate_map (semi ^^ space) + (fun (ident, typ, attr) -> ident#p) + arguments + ^^ string ">") + + method item'_Type_enum ~super:_ ~name ~generics ~variants = + CoqNotation.inductive name#p generics#p [] (string "Type") + (separate_map (break 1) + (fun x -> string "|" ^^ space ^^ x#p) + variants) + ^^ break 1 ^^ !^"Arguments" ^^ space ^^ name#p ^^ colon + ^^ !^"clear implicits" ^^ dot ^^ break 1 ^^ !^"Arguments" ^^ space + ^^ name#p + ^^ concat_map (fun _ -> space ^^ !^"(_)") generics#v.params + ^^ concat_map (fun _ -> space ^^ !^"{_}") generics#v.constraints + ^^ dot + + method item'_Use ~super:_ ~path ~is_external ~rename:_ = + if List.length path == 0 || is_external then empty + else + let crate = + String.capitalize + (Option.value ~default:"(TODO CRATE)" + (Option.map ~f:fst current_namespace)) + in + let concat_capitalize l = + String.concat ~sep:"_" (List.map ~f:String.capitalize l) + in + let concat_capitalize_include l = + concat_capitalize (List.drop_last_exn l) + ^ " (t_" ^ List.last_exn l ^ ")" + in + let path_string = + match path with + | "crate" :: xs -> concat_capitalize_include (crate :: xs) + | "super" :: xs -> + concat_capitalize + (crate + :: List.drop_last_exn + (Option.value ~default:[] + (Option.map ~f:snd current_namespace)) + @ xs) + | [ a ] -> a + | xs -> concat_capitalize_include xs + in + if String.is_empty path_string then empty + else + string "From" ^^ space ^^ string crate ^^ space + ^^ string "Require Import" ^^ space ^^ string path_string ^^ dot + ^^ break 1 ^^ string "Export" ^^ space ^^ string path_string ^^ dot + + method lhs_LhsArbitraryExpr ~e:_ ~witness = match witness with _ -> . + + method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness = + match witness with _ -> . + + method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness = + match witness with _ -> . + + method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness = + match witness with _ -> . + + method lhs_LhsLocalVar ~var:_ ~typ:_ = + default_document_for "lhs_LhsLocalVar" + + method literal_Bool x1 = string (if x1 then "true" else "false") + + method literal_Char x1 = + string "\"" ^^ string (Char.escaped x1) ^^ string "\"" ^^ string "%char" + + method literal_Float ~value ~negative ~kind:_ = + (if negative then !^"-" else empty) ^^ string value ^^ string "%float" + + method literal_Int ~value ~negative ~kind:_ = + (if negative then !^"-" else empty) ^^ string value + + method literal_String x1 = string "\"" ^^ string x1 ^^ string "\"%string" + + method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_ ~witness + = + default_document_for "loop_kind_ForIndexLoop" + + method loop_kind_ForLoop ~pat ~it ~witness = + braces it#p ^^ space ^^ string "inP?" ^^ space ^^ brackets pat#p + + method loop_kind_UnconditionalLoop = + default_document_for "loop_kind_UnconditionalLoop" + + method loop_kind_WhileLoop ~condition:_ ~witness:_ = + default_document_for "loop_kind_WhileLoop" + + method loop_state ~init ~bpat ~witness:_ = + parens (init#p ^^ space ^^ !^"state" ^^ space ^^ bpat#p) + + method modul x1 = separate_map (break 1) (fun x -> x#p) x1 + + method param ~pat ~typ ~typ_span:_ ~attrs:_ = + parens (pat#p ^^ space ^^ colon ^^ space ^^ typ#p) + + method pat ~p ~span:_ ~typ:_ = p#p + + method pat'_PAscription ~super:_ ~typ ~typ_span:_ ~pat = + pat#p ^^ space ^^ colon ^^ space ^^ typ#p + + method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var ~typ:_ ~subpat:_ = var#p + method pat'_PConstant ~super:_ ~lit = lit#p + + method pat'_PConstruct_inductive ~super:_ ~constructor ~is_record + ~is_struct ~fields = if is_record then - C.AST.InductiveCase - ( U.Concrete_ident_view.to_definition_name name, - C.AST.RecordTy - (pconcrete_ident name, p_record_record span arguments) ) + constructor#p ^^ space + ^^ parens + (separate_map (comma ^^ space) + (fun field_pat -> (snd field_pat)#p) + fields) else - let name = U.Concrete_ident_view.to_definition_name name in - match arguments with - | [] -> C.AST.BaseCase name - | [ (_arg_name, arg_ty, _arg_attrs) ] -> - C.AST.InductiveCase (name, pty span arg_ty) - | _ -> - C.AST.InductiveCase - (name, C.AST.Product (List.map ~f:(snd3 >> pty span) arguments))) - (* match variants with _ -> [] *) - (* TODO: I don't get this pattern maching below. Variant with more than one payloads are rejected implicitely? *) - (* | { name; arguments = [ (arg_name, arg_ty) ] } :: xs -> *) - (* if (index_of_field >> Option.is_some) arg_name then *) - (* C.AST.InductiveCase (U.Concrete_ident_view.to_definition_name name, pty span arg_ty) *) - (* :: p_inductive span xs parrent_name *) - (* else *) - (* C.AST.InductiveCase (U.Concrete_ident_view.to_definition_name arg_name, pty span arg_ty) *) - (* :: p_inductive span xs parrent_name *) - (* | { name; arguments = [] } :: xs -> *) - (* C.AST.BaseCase (U.Concrete_ident_view.to_definition_name name) *) - (* :: p_inductive span xs parrent_name *) - (* | { name; arguments } :: xs -> *) - (* C.AST.InductiveCase *) - (* ( U.Concrete_ident_view.to_definition_name name, *) - (* C.AST.RecordTy (pglobal_ident name, p_record_record span arguments) *) - (* ) *) - (* :: p_inductive span xs parrent_name *) - (* | _ -> [] *) - - and p_record_record span arguments : (string * C.AST.ty) list = - List.map - ~f:(function - | arg_name, arg_ty, _arg_attrs -> - (U.Concrete_ident_view.to_definition_name arg_name, pty span arg_ty)) - arguments + (if is_struct then string "Build_t_" else empty) + ^^ constructor#p + ^^ concat_map (fun (ident, exp) -> space ^^ parens exp#p) fields + + method pat'_PConstruct_tuple ~super:_ ~components = + (* TODO: Only add `'` if you are a top-level pattern *) + (* string "'" ^^ *) + parens (separate_map comma (fun x -> x#p) components) + + method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ = + default_document_for "pat'_PDeref" + + method pat'_PWild = string "_" + method printer_name = "Coq printer" + + method projection_predicate ~impl:_ ~assoc_item ~typ = + string "_" (* TODO: name of impl#p *) ^^ dot + ^^ parens assoc_item#p ^^ space ^^ string "=" ^^ space ^^ typ#p + + method safety_kind_Safe = default_document_for "safety_kind_Safe" + method safety_kind_Unsafe _x1 = default_document_for "safety_kind_Unsafe" + + method supported_monads_MException _x1 = + default_document_for "supported_monads_MException" + + method supported_monads_MOption = + default_document_for "supported_monads_MOption" + + method supported_monads_MResult _x1 = + default_document_for "supported_monads_MResult" + + method trait_goal ~trait ~args = + trait#p ^^ concat_map (fun x -> space ^^ x#p) args + + method trait_item ~ti_span:_ ~ti_generics ~ti_v ~ti_ident ~ti_attrs:_ = + let _, params, constraints = ti_generics#v in + let generic_params = concat_map (fun x -> space ^^ x#p) params in + let filter_constraints = function + | GCProjection { impl = { goal = { trait; _ }; _ }; _ } -> true + | GCType + { + goal = { trait; args = [ GType (TAssociatedType { item; _ }) ] }; + _; + } -> + Concrete_ident.(item == ti_ident#v) + | _ -> true + in + let generic_constraints_other = + concat_map + (fun x -> space ^^ self#entrypoint_generic_constraint x) + (List.filter ~f:filter_constraints + (List.map ~f:(fun x -> x#v) constraints)) + in + let generic_constraints_self = + concat_map + (fun x -> + break 1 ^^ string "_" ^^ space ^^ string "::" ^^ space + ^^ self#entrypoint_generic_constraint x + ^^ semi) + (List.filter + ~f:(fun x -> not (filter_constraints x)) + (List.map ~f:(fun x -> x#v) constraints)) + in + ti_ident#p ^^ generic_params ^^ generic_constraints_other ^^ space + ^^ (match ti_v#v with TIDefault _ -> string ":=" | _ -> colon) + ^^ space ^^ ti_v#p ^^ semi ^^ generic_constraints_self + + method trait_item'_TIDefault ~params ~body ~witness:_ = + (if List.is_empty params then empty + else + string "fun" ^^ space + ^^ separate_map space (fun x -> x#p) params + ^^ space ^^ string "=>") + ^^ nest 2 (break 1 ^^ body#p) + + method trait_item'_TIFn x1 = x1#p + method trait_item'_TIType x1 = string "Type" + + method ty_TApp_application ~typ ~generics = + typ#p ^^ concat_map (fun x -> space ^^ parens x#p) generics + + method ty_TApp_tuple ~types = + if List.length types == 0 then string "unit" + else parens (separate_map star (fun x -> self#entrypoint_ty x) types) + + method ty_TArray ~typ ~length = + string "t_Array" ^^ space ^^ parens typ#p ^^ space ^^ parens length#p + + method ty_TArrow x1 x2 = + concat_map (fun x -> x#p ^^ space ^^ string "->" ^^ space) x1 ^^ x2#p + + method ty_TAssociatedType ~impl:_ ~item = item#p + method ty_TBool = string "bool" + method ty_TChar = string "ascii" + method ty_TDyn ~witness:_ ~goals:_ = default_document_for "ty_TDyn" + method ty_TFloat _x1 = string "float" + + method ty_TInt x1 = + string "t_" + ^^ + match x1 with + | { size; signedness } -> ( + (match signedness with + | Unsigned -> string "u" + | Signed -> string "i") + ^^ + match size with + | S8 -> string "8" + | S16 -> string "16" + | S32 -> string "32" + | S64 -> string "64" + | S128 -> string "128" + | SSize -> string "size") + + method ty_TOpaque x1 = x1#p + method ty_TParam x1 = x1#p + method ty_TRawPointer ~witness:_ = default_document_for "ty_TRawPointer" + + method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ = + default_document_for "ty_TRef" + + method ty_TSlice ~witness:_ ~ty = !^"t_Slice" ^^ space ^^ ty#p + method ty_TStr = string "string" + + method item'_Enum_Variant ~name ~arguments ~is_record ~attrs:_ = + if is_record then + concat_map + (fun (ident, typ, attr) -> + ident#p ^^ space ^^ colon ^^ space ^^ typ#p) + arguments + ^^ semi + else if List.length arguments == 0 then name#p + else + name#p ^^ space ^^ colon ^^ space + ^^ separate_map + (space ^^ string "->" ^^ space) + (fun (ident, typ, attr) -> typ#p) + arguments + ^^ space ^^ string "->" ^^ space ^^ string "_" + + method module_path_separator = "." + + method concrete_ident ~local:_ id : document = + string + (match id.definition with + | "not" -> "negb" + | "eq" -> "t_PartialEq_f_eq" + | "lt" -> "t_PartialOrd_f_lt" + | "gt" -> "t_PartialOrd_f_gt" + | "le" -> "t_PartialOrd_f_le" + | "ge" -> "t_PartialOrd_f_ge" + | "rem" -> "t_Rem_f_rem" + | "add" -> "t_Add_f_add" + | "mul" -> "t_Mul_f_mul" + | "div" -> "t_Div_f_div" + | x -> x) + end + + let new_printer : BasePrinter.finalized_printer = + BasePrinter.finalize (fun () -> (new printer :> BasePrinter.printer)) end module type S = sig - val pitem : item -> C.AST.decl list + val new_printer : BasePrinter.finalized_printer end -let make ctx = - (module Make (struct - let ctx = ctx - end) : S) - -let string_of_item (item : item) : string = - let (module Print) = - make { current_namespace = U.Concrete_ident_view.to_namespace item.ident } - in - List.map ~f:C.decl_to_string @@ Print.pitem item |> String.concat ~sep:"\n" - -let string_of_items : AST.item list -> string = - List.map ~f:string_of_item >> List.map ~f:String.strip - >> List.filter ~f:(String.is_empty >> not) - >> String.concat ~sep:"\n\n" - -let hardcoded_coq_headers = - "(* File automatically generated by Hacspec *)\n\ - From Hacspec Require Import Hacspec_Lib MachineIntegers.\n\ - From Coq Require Import ZArith.\n\ - Import List.ListNotations.\n\ - Open Scope Z_scope.\n\ - Open Scope bool_scope.\n" +let make (module M : Attrs.WITH_ITEMS) = + let open ( + Make + (struct + let default x = x + end) + (M) : + S) in + new_printer -let translate _ (_bo : BackendOptions.t) ~(bundles : AST.item list list) - (items : AST.item list) : Types.file list = +let translate m _ ~bundles:_ (items : AST.item list) : Types.file list = + let my_printer = make m in U.group_items_by_namespace items |> Map.to_alist |> List.map ~f:(fun (ns, items) -> @@ -688,12 +766,12 @@ let translate _ (_bo : BackendOptions.t) ~(bundles : AST.item list list) ~f:(map_first_letter String.uppercase) (fst ns :: snd ns)) in - + let contents, _annotations = my_printer#entrypoint_modul items in Types. { path = mod_name ^ ".v"; - contents = - hardcoded_coq_headers ^ "\n" ^ string_of_items items ^ "\n"; + contents = hardcoded_coq_headers ^ "\n" ^ contents; + sourcemap = None; }) open Phase_utils @@ -722,6 +800,7 @@ module TransformToInputLanguage = |> Phases.Reject.As_pattern |> Phases.Reject.Dyn |> Phases.Reject.Trait_item_default + |> Phases.Bundle_cycles |> SubtypeToInputLanguage |> Identity ] diff --git a/engine/backends/coq/ssprove/ssprove_backend.ml b/engine/backends/coq/ssprove/ssprove_backend.ml index 6db7387c9..54393f0dd 100644 --- a/engine/backends/coq/ssprove/ssprove_backend.ml +++ b/engine/backends/coq/ssprove/ssprove_backend.ml @@ -18,6 +18,7 @@ include include On.While_loop include On.For_index_loop include On.State_passing_loop + include On.Fold_like_loop end) (struct let backend = Diagnostics.Backend.SSProve @@ -63,6 +64,7 @@ struct include Features.SUBTYPE.On.While_loop include Features.SUBTYPE.On.For_index_loop include Features.SUBTYPE.On.State_passing_loop + include Features.SUBTYPE.On.Fold_like_loop end) let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend)) @@ -589,6 +591,7 @@ module TransformToInputLanguage (* : PHASE *) = |> Phases.Reject.As_pattern |> Phases.Reject.Dyn |> Phases.Reject.Trait_item_default + |> Phases.Bundle_cycles |> SubtypeToInputLanguage |> Identity ] @@ -792,25 +795,26 @@ struct SSP.AST.Ident (plocal_ident var) (* TODO Mutable binding ! *) | POr { subpats } -> SSP.AST.DisjunctivePat (List.map ~f:ppat subpats) | PArray _ -> __TODO_pat__ p.span "Parray?" - | PConstruct { name = `TupleCons 0; args = []; _ } -> + | PConstruct { constructor = `TupleCons 0; fields = []; _ } -> SSP.AST.WildPat (* UnitPat *) - | PConstruct { name = `TupleCons 1; args = [ _ ]; _ } -> + | PConstruct { constructor = `TupleCons 1; fields = [ _ ]; _ } -> __TODO_pat__ p.span "tuple 1" - | PConstruct { name = `TupleCons _n; args; _ } -> - SSP.AST.TuplePat (List.map ~f:(fun { pat; _ } -> ppat pat) args) + | PConstruct { constructor = `TupleCons _n; fields; _ } -> + SSP.AST.TuplePat (List.map ~f:(fun { pat; _ } -> ppat pat) fields) (* Record *) | PConstruct { is_record = true; _ } -> __TODO_pat__ p.span "record pattern" - (* (\* SSP.AST.Ident (pglobal_ident name) *\) *) - (* SSP.AST.RecordPat (pglobal_ident name, List.map ~f:(fun {field; pat} -> (pglobal_ident field, ppat pat)) args) *) - (* (\* SSP.AST.ConstructorPat (pglobal_ident name ^ "_case", [SSP.AST.Ident "temp"]) *\) *) - (* (\* List.map ~f:(fun {field; pat} -> (pat, SSP.AST.App (SSP.AST.Var (pglobal_ident field), [SSP.AST.Var "temp"]))) args *\) *) + (* (\* SSP.AST.Ident (pglobal_ident constructor) *\) *) + (* SSP.AST.RecordPat (pglobal_ident constructor, List.map ~f:(fun {field; pat} -> (pglobal_ident field, ppat pat)) fields) *) + (* (\* SSP.AST.ConstructorPat (pglobal_ident constructor ^ "_case", [SSP.AST.Ident "temp"]) *\) *) + (* (\* List.map ~f:(fun {field; pat} -> (pat, SSP.AST.App (SSP.AST.Var (pglobal_ident field), [SSP.AST.Var "temp"]))) fields *\) *) (* Enum *) - | PConstruct { name; args; is_record = false; _ } -> + | PConstruct { constructor; fields; is_record = false; _ } -> SSP.AST.ConstructorPat - ( pglobal_ident name, - match args with + ( pglobal_ident constructor, + match fields with | [] -> [] - | _ -> [ SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) args) ] + | _ -> + [ SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) fields) ] ) | PConstant { lit } -> SSP.AST.Lit (pliteral lit) | _ -> . @@ -959,7 +963,7 @@ struct p = PConstruct { - args = [ { pat; _ } ]; + fields = [ { pat; _ } ]; is_record = false; is_struct = true; _; @@ -990,16 +994,21 @@ struct ~f:(fun { arm = { arm_pat; body }; _ } -> match arm_pat.p with | PConstruct - { name; args; is_record = false; is_struct = false } -> ( + { + constructor; + fields; + is_record = false; + is_struct = false; + } -> ( let arg_tuple = SSP.AST.TuplePat - (List.map ~f:(fun p -> ppat p.pat) args) + (List.map ~f:(fun p -> ppat p.pat) fields) in ( SSP.AST.ConstructorPat - ( pglobal_ident name ^ "_case", - match args with [] -> [] | _ -> [ arg_tuple ] ), + ( pglobal_ident constructor ^ "_case", + match fields with [] -> [] | _ -> [ arg_tuple ] ), match - (args, SSPExtraDefinitions.pat_as_expr arg_tuple) + (fields, SSPExtraDefinitions.pat_as_expr arg_tuple) with | _ :: _, Some (redefine_pat, redefine_expr) -> SSPExtraDefinitions.letb @@ -1016,14 +1025,14 @@ struct (List.map ~f:(fun x -> pty arm_pat.span x.pat.typ) - args) ); + fields) ); ] ); body = (pexpr env true) body; value_typ = SSP.AST.Product (List.map ~f:(fun x -> pty arm_pat.span x.pat.typ) - args); + fields); monad_typ = None; } | _, _ -> (pexpr env true) body )) @@ -1103,8 +1112,8 @@ struct p = PConstruct { - name = `TupleCons 0; - args = []; + constructor = `TupleCons 0; + fields = []; is_record = false; is_struct = false; }; @@ -1117,6 +1126,7 @@ struct }; label; witness; + control_flow = None; }; typ = e.typ; span = e.span; @@ -1299,9 +1309,9 @@ struct match pat.p with | PWild -> false | PAscription { pat; _ } -> is_mutable_pat pat - | PConstruct { name = `TupleCons _; args; _ } -> + | PConstruct { constructor = `TupleCons _; fields; _ } -> List.fold ~init:false ~f:( || ) - (List.map ~f:(fun p -> is_mutable_pat p.pat) args) + (List.map ~f:(fun p -> is_mutable_pat p.pat) fields) | PConstruct _ -> false | PArray _ -> (* List.fold ~init:false ~f:(||) (List.map ~f:(fun p -> is_mutable_pat p) args) *) @@ -1793,7 +1803,7 @@ struct items @ [ SSP.AST.ProgramInstance - ( pglobal_ident name, + ( pconcrete_ident name, pgeneric span generics, pty span self_ty, args_ty span gen_vals, @@ -1859,7 +1869,9 @@ struct ]) items) ); ] - @ [ SSP.AST.HintUnfold (pglobal_ident name, Some (pty span self_ty)) ] + @ [ + SSP.AST.HintUnfold (pconcrete_ident name, Some (pty span self_ty)); + ] in decls_from_item @@ -2422,7 +2434,8 @@ let translate _ (_bo : BackendOptions.t) ~(bundles : AST.item list list) ^ "\n" in - Types.{ path = mod_name ^ ".v"; contents = file_content }) + Types. + { path = mod_name ^ ".v"; contents = file_content; sourcemap = None }) let apply_phases (_bo : BackendOptions.t) (i : Ast.Rust.item list) : AST.item list = diff --git a/engine/backends/easycrypt/easycrypt_backend.ml b/engine/backends/easycrypt/easycrypt_backend.ml index ac3c74f50..c0739d2b7 100644 --- a/engine/backends/easycrypt/easycrypt_backend.ml +++ b/engine/backends/easycrypt/easycrypt_backend.ml @@ -53,6 +53,7 @@ module RejectNotEC (FA : Features.T) = struct let monadic_binding = reject let arbitrary_lhs = reject let state_passing_loop = reject + let fold_like_loop = reject let nontrivial_lhs = reject let block = reject let for_loop = reject @@ -351,7 +352,8 @@ module TransformToInputLanguage = Phases.Reject.RawOrMutPointer Features.Rust |> Phases.Reject.Unsafe |> Phases.And_mut_defsite |> Phases.Reconstruct_asserts |> Phases.Reconstruct_for_loops |> Phases.Direct_and_mut |> Phases.Drop_blocks -|> Phases.Reject.Continue |> Phases.Drop_references |> RejectNotEC] +|> Phases.Reject.Continue |> Phases.Drop_references |> Phases.Bundle_cycles +|> RejectNotEC] let apply_phases (_bo : BackendOptions.t) (items : Ast.Rust.item list) : AST.item list = diff --git a/engine/backends/fstar/fstar_backend.ml b/engine/backends/fstar/fstar_backend.ml index 2ae37c6dd..845a90854 100644 --- a/engine/backends/fstar/fstar_backend.ml +++ b/engine/backends/fstar/fstar_backend.ml @@ -42,6 +42,7 @@ module SubtypeToInputLanguage and type while_loop = Features.Off.while_loop and type for_index_loop = Features.Off.for_index_loop and type state_passing_loop = Features.Off.state_passing_loop + and type fold_like_loop = Features.Off.fold_like_loop and type match_guard = Features.Off.match_guard and type trait_item_default = Features.Off.trait_item_default) = struct @@ -163,7 +164,8 @@ struct ( (match signedness with Signed -> Signed | Unsigned -> Unsigned), size ) ) | Float _ -> - Error.unimplemented ~issue_id:230 ~details:"pliteral: Float" span + Error.unimplemented ~issue_id:464 + ~details:"Matching on f32 or f64 literals is not yet supported." span | Bool b -> F.Const.Const_bool b let pliteral_as_expr span (e : literal) = @@ -178,6 +180,13 @@ struct | Int { value; kind = { size = S128; signedness = sn }; negative } -> let prefix = match sn with Signed -> "i" | Unsigned -> "u" in wrap_app ("pub_" ^ prefix ^ "128") value negative + | Float { value; negative; _ } -> + F.mk_e_app + (F.term_of_lid [ "mk_float" ]) + [ + mk_const + (F.Const.Const_string (pnegative negative ^ value, F.dummyRange)); + ] | _ -> mk_const @@ pliteral span e let pconcrete_ident (id : concrete_ident) = @@ -309,7 +318,7 @@ struct F.mk_e_app base args | TArrow (inputs, output) -> F.mk_e_arrow (List.map ~f:(pty span) inputs) (pty span output) - | TFloat _ -> Error.unimplemented ~issue_id:230 ~details:"pty: Float" span + | TFloat _ -> F.term_of_lid [ "float" ] | TArray { typ; length } -> F.mk_e_app (F.term_of_lid [ "t_Array" ]) [ pty span typ; pexpr length ] | TParam i -> F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident i) @@ -409,23 +418,26 @@ struct "Nested disjuntive patterns should have been eliminated by phase \ `HoistDisjunctions` (see PR #830)." | PArray { args } -> F.pat @@ F.AST.PatList (List.map ~f:ppat args) - | PConstruct { name = `TupleCons 0; args = [] } -> + | PConstruct { constructor = `TupleCons 0; fields = [] } -> F.pat @@ F.AST.PatConst F.Const.Const_unit - | PConstruct { name = `TupleCons 1; args = [ { pat } ] } -> ppat pat - | PConstruct { name = `TupleCons n; args } -> + | PConstruct { constructor = `TupleCons 1; fields = [ { pat } ] } -> + ppat pat + | PConstruct { constructor = `TupleCons n; fields } -> F.pat - @@ F.AST.PatTuple (List.map ~f:(fun { pat } -> ppat pat) args, false) - | PConstruct { name; args; is_record; is_struct } -> + @@ F.AST.PatTuple (List.map ~f:(fun { pat } -> ppat pat) fields, false) + | PConstruct { constructor; fields; is_record; is_struct } -> let pat_rec () = - F.pat @@ F.AST.PatRecord (List.map ~f:pfield_pat args) + F.pat @@ F.AST.PatRecord (List.map ~f:pfield_pat fields) in if is_struct && is_record then pat_rec () else - let pat_name = F.pat @@ F.AST.PatName (pglobal_ident p.span name) in + let pat_name = + F.pat @@ F.AST.PatName (pglobal_ident p.span constructor) + in F.pat_app pat_name @@ if is_record then [ pat_rec () ] - else List.map ~f:(fun { field; pat } -> ppat pat) args + else List.map ~f:(fun { field; pat } -> ppat pat) fields | PConstant { lit } -> F.pat @@ F.AST.PatConst (pliteral p.span lit) | _ -> . @@ -1416,7 +1428,7 @@ struct in let typ = F.mk_e_app - (F.term @@ F.AST.Name (pglobal_ident e.span trait)) + (F.term @@ F.AST.Name (pconcrete_ident trait)) (List.map ~f:(pgeneric_value e.span) generic_args) in let pat = F.pat @@ F.AST.PatAscribed (pat, (typ, None)) in @@ -1462,7 +1474,7 @@ struct let tcinst = F.term @@ F.AST.Var FStar_Parser_Const.tcinstance_lid in F.decls ~fsti:ctx.interface_mode ~attrs:[ tcinst ] @@ F.AST.TopLevelLet (NoLetQualifier, [ (pat, body) ]) - | Quote quote -> + | Quote { quote; _ } -> let fstar_opts = Attrs.find_unique_attr e.attrs ~f:(function | ItemQuote q -> Some q.fstar_options @@ -1678,7 +1690,8 @@ let fstar_headers (bo : BackendOptions.t) = in [ opts; "open Core"; "open FStar.Mul" ] |> String.concat ~sep:"\n" -let translate m (bo : BackendOptions.t) ~(bundles : AST.item list list) +(** Translate as F* (the "legacy" printer) *) +let translate_as_fstar m (bo : BackendOptions.t) ~(bundles : AST.item list list) (items : AST.item list) : Types.file list = let show_view Concrete_ident.{ crate; path; definition } = crate :: (path @ [ definition ]) |> String.concat ~sep:"::" @@ -1710,11 +1723,19 @@ let translate m (bo : BackendOptions.t) ~(bundles : AST.item list list) contents = "module " ^ mod_name ^ "\n" ^ fstar_headers bo ^ "\n\n" ^ body ^ "\n"; + sourcemap = None; } in List.filter_map ~f:Fn.id [ make ~ext:"fst" impl; make ~ext:"fsti" intf ]) +let translate = + if + Sys.getenv "HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER_INSTEAD_OF_FSTAR" + |> Option.is_some + then failwith "todo" + else translate_as_fstar + open Phase_utils module DepGraphR = Dependencies.Make (Features.Rust) @@ -1738,18 +1759,17 @@ module TransformToInputLanguage = |> Side_effect_utils.Hoist |> Phases.Hoist_disjunctive_patterns |> Phases.Simplify_match_return - |> Phases.Rewrite_control_flow - |> Phases.Drop_needless_returns |> Phases.Local_mutation - |> Phases.Reject.Continue - |> Phases.Cf_into_monads - |> Phases.Reject.EarlyExit + |> Phases.Rewrite_control_flow + |> Phases.Drop_return_break_continue |> Phases.Functionalize_loops + |> Phases.Reject.Question_mark |> Phases.Reject.As_pattern |> Phases.Traits_specs |> Phases.Simplify_hoisting |> Phases.Newtype_as_refinement |> Phases.Reject.Trait_item_default + |> Phases.Bundle_cycles |> SubtypeToInputLanguage |> Identity ] diff --git a/engine/backends/proverif/proverif_backend.ml b/engine/backends/proverif/proverif_backend.ml index f66a1ea14..2be7b4348 100644 --- a/engine/backends/proverif/proverif_backend.ml +++ b/engine/backends/proverif/proverif_backend.ml @@ -75,6 +75,7 @@ struct let lifetime = reject let monadic_action = reject let monadic_binding = reject + let fold_like_loop = reject let block = reject let dyn = reject let match_guard = reject @@ -129,22 +130,14 @@ module type MAKE = sig module Letfuns : sig val print : item list -> string end - - module Processes : sig - val print : item list -> string - end - - module Toplevel : sig - val print : item list -> string - end end module Make (Options : OPTS) : MAKE = struct module Print = struct module GenericPrint = - Generic_printer.Make (InputLanguage) (U.Concrete_ident_view) + Deprecated_generic_printer.Make (InputLanguage) (U.Concrete_ident_view) - open Generic_printer_base.Make (InputLanguage) + open Deprecated_generic_printer_base.Make (InputLanguage) open PPrint let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group @@ -242,8 +235,8 @@ module Make (Options : OPTS) : MAKE = struct let body = print#expr_at Arm_body body in match arm_pat with | { p = PWild; _ } -> body - | { p = PConstruct { name; _ } } - when Global_ident.eq_name Core__result__Result__Err name -> + | { p = PConstruct { constructor; _ } } + when Global_ident.eq_name Core__result__Result__Err constructor -> print#pv_letfun_call (print#error_letfun_name body_typ) [] | _ -> let pat = @@ -264,7 +257,8 @@ module Make (Options : OPTS) : MAKE = struct method typed_wildcard = print#wildcard ^^ string ": bitstring" - method tuple_elem_pat' : Generic_printer_base.par_state -> pat' fn = + method tuple_elem_pat' + : Deprecated_generic_printer_base.par_state -> pat' fn = fun ctx -> let wrap_parens = group @@ -277,14 +271,15 @@ module Make (Options : OPTS) : MAKE = struct p ^^ colon ^^ space ^^ print#ty ctx typ | p -> print#pat' ctx p - method tuple_elem_pat : Generic_printer_base.par_state -> pat fn = + method tuple_elem_pat + : Deprecated_generic_printer_base.par_state -> pat fn = fun ctx { p; span; _ } -> print#with_span ~span (fun _ -> print#tuple_elem_pat' ctx p) method tuple_elem_pat_at = print#par_state >> print#tuple_elem_pat (* Overridden methods *) - method! pat' : Generic_printer_base.par_state -> pat' fn = + method! pat' : Deprecated_generic_printer_base.par_state -> pat' fn = fun ctx -> let wrap_parens = group @@ -294,16 +289,18 @@ module Make (Options : OPTS) : MAKE = struct fun pat -> match pat with | PConstant { lit } -> string "=" ^^ print#literal Pat lit - | PConstruct { name; args } - when Global_ident.eq_name Core__option__Option__None name -> + | PConstruct { constructor; fields } + when Global_ident.eq_name Core__option__Option__None constructor + -> string "None()" - | PConstruct { name; args } + | PConstruct { constructor; fields } (* The `Some` constructor in ProVerif expects a bitstring argument, so we use the appropriate `_to_bitstring` type converter on the inner expression. *) - when Global_ident.eq_name Core__option__Option__Some name -> - let inner_field = List.hd_exn args in + when Global_ident.eq_name Core__option__Option__Some constructor + -> + let inner_field = List.hd_exn fields in let inner_field_type_doc = print#ty AlreadyPar inner_field.pat.typ in @@ -320,21 +317,23 @@ module Make (Options : OPTS) : MAKE = struct ^^ iblock parens inner_field_doc) in string "Some" ^^ inner_block - | PConstruct { name; args } + | PConstruct { constructor; fields } (* We replace applications of the `Ok` constructor with their contents. *) - when Global_ident.eq_name Core__result__Result__Ok name -> - let inner_field = List.hd_exn args in + when Global_ident.eq_name Core__result__Result__Ok constructor + -> + let inner_field = List.hd_exn fields in let inner_field_type_doc = print#ty AlreadyPar inner_field.pat.typ in let inner_field_doc = print#pat ctx inner_field.pat in inner_field_doc - | PConstruct { name; args } -> ( + | PConstruct { constructor; fields } -> ( match - translate_known_name name ~dict:library_constructor_patterns + translate_known_name constructor + ~dict:library_constructor_patterns with - | Some (_, translation) -> translation args + | Some (_, translation) -> translation fields | None -> super#pat' ctx pat) | PWild -> print#typed_wildcard @@ -344,7 +343,8 @@ module Make (Options : OPTS) : MAKE = struct method! ty_bool = string "bool" method! ty_int _ = string "nat" - method! pat_at : Generic_printer_base.ast_position -> pat fn = + method! pat_at : Deprecated_generic_printer_base.ast_position -> pat fn + = fun pos pat -> match pat with | { p = PWild } -> ( @@ -374,7 +374,7 @@ module Make (Options : OPTS) : MAKE = struct in f ^^ iblock parens args - method! expr' : Generic_printer_base.par_state -> expr' fn = + method! expr' : Deprecated_generic_printer_base.par_state -> expr' fn = fun ctx e -> let wrap_parens = group @@ -611,24 +611,31 @@ module Make (Options : OPTS) : MAKE = struct (string "let x = construct_fail() in " ^^ print#default_value type_name_doc) in - - if is_struct then - let struct_constructor = List.hd variants in - match struct_constructor with - | None -> empty - | Some constructor -> - type_line ^^ hardline ^^ to_bitstring_converter_line - ^^ hardline ^^ from_bitstring_converter_line ^^ hardline - ^^ default_line ^^ hardline ^^ err_line ^^ hardline - ^^ fun_and_reduc name constructor - else + let default_lines = type_line ^^ hardline ^^ to_bitstring_converter_line ^^ hardline ^^ from_bitstring_converter_line ^^ hardline ^^ default_line ^^ hardline ^^ err_line ^^ hardline - ^^ separate_map hardline - (fun variant -> fun_and_reduc name variant) - variants - | Quote quote -> print#quote quote + in + let destructor_lines = + if is_struct then + let struct_constructor = List.hd variants in + match struct_constructor with + | None -> empty + | Some constructor -> fun_and_reduc name constructor + else + separate_map hardline + (fun variant -> fun_and_reduc name variant) + variants + in + if + Attrs.find_unique_attr item.attrs + ~f: + ([%eq: Types.ha_payload] OpaqueType + >> Fn.flip Option.some_if ()) + |> Option.is_some + then default_lines + else default_lines ^^ destructor_lines + | Quote { quote; _ } -> print#quote quote | _ -> empty method! expr_let : lhs:pat -> rhs:expr -> expr fn = @@ -720,7 +727,7 @@ module Make (Options : OPTS) : MAKE = struct | _ -> super#expr ctx e (*This cannot happen*)) | _ -> super#expr ctx e) - method! ty : Generic_printer_base.par_state -> ty fn = + method! ty : Deprecated_generic_printer_base.par_state -> ty fn = fun ctx ty -> match ty with | TBool -> print#ty_bool @@ -759,7 +766,11 @@ module Make (Options : OPTS) : MAKE = struct end let filter_crate_functions (items : AST.item list) = - List.filter ~f:(fun item -> [%matches? Fn _] item.v) items + List.filter + ~f:(fun item -> + [%matches? Fn _] item.v + || [%matches? Quote { origin = { item_kind = `Fn; _ }; _ }] item.v) + items let is_process_read : attrs -> bool = Attr_payloads.payloads @@ -811,7 +822,8 @@ module Make (Options : OPTS) : MAKE = struct letfun bitstring_err() = let x = construct_fail() in \ bitstring_default().\n\n\ letfun nat_default() = 0.\n\ - fun nat_to_bitstring(nat): bitstring.\n\n\ + fun nat_to_bitstring(nat): bitstring.\n\ + letfun nat_err() = let x = construct_fail() in nat_default().\n\n\ letfun bool_default() = false.\n" let contents items = "" @@ -822,7 +834,11 @@ module Make (Options : OPTS) : MAKE = struct let preamble items = "" let filter_data_types items = - List.filter ~f:(fun item -> [%matches? Type _] item.v) items + List.filter + ~f:(fun item -> + [%matches? Type _] item.v + || [%matches? Quote { origin = { item_kind = `Type; _ }; _ }] item.v) + items let contents items = let contents, _ = Print.items NoAuxInfo (filter_data_types items) in @@ -845,24 +861,6 @@ module Make (Options : OPTS) : MAKE = struct in pure_letfuns_print ^ process_letfuns_print end) - - module Processes = MkSubprinter (struct - let banner = "Processes" - let preamble items = "" - let process_filter item = [%matches? Fn _] item.v && is_process item - - let contents items = - let contents, _ = - Print.items NoAuxInfo (List.filter ~f:process_filter items) - in - contents - end) - - module Toplevel = MkSubprinter (struct - let banner = "Top-level process" - let preamble items = "process\n 0\n" - let contents items = "" - end) end let translate m (bo : BackendOptions.t) ~(bundles : AST.item list list) @@ -874,14 +872,11 @@ let translate m (bo : BackendOptions.t) ~(bundles : AST.item list list) in let lib_contents = M.Preamble.print items ^ M.DataTypes.print items ^ M.Letfuns.print items - ^ M.Processes.print items in - let analysis_contents = M.Toplevel.print items in - let lib_file = Types.{ path = "lib.pvl"; contents = lib_contents } in - let analysis_file = - Types.{ path = "analysis.pv"; contents = analysis_contents } + let lib_file = + Types.{ path = "lib.pvl"; contents = lib_contents; sourcemap = None } in - [ lib_file; analysis_file ] + [ lib_file ] open Phase_utils module DepGraph = Dependencies.Make (InputLanguage) @@ -902,10 +897,10 @@ module TransformToInputLanguage = |> Phases.Trivialize_assign_lhs |> Side_effect_utils.Hoist |> Phases.Simplify_match_return - |> Phases.Drop_needless_returns |> Phases.Local_mutation |> Phases.Reject.Continue |> Phases.Reject.Dyn + |> Phases.Bundle_cycles |> SubtypeToInputLanguage |> Identity ] diff --git a/engine/bin/lib.ml b/engine/bin/lib.ml index efa2d60ed..e75c294d2 100644 --- a/engine/bin/lib.ml +++ b/engine/bin/lib.ml @@ -99,7 +99,10 @@ let run (options : Types.engine_options) : Types.output = let include_clauses = options.backend.translation_options.include_namespaces in - let items = import_thir_items include_clauses options.input in + let items = + Profiling.profile ThirImport (List.length options.input) (fun _ -> + import_thir_items include_clauses options.input) + in let items = if options.backend.extract_type_aliases then items else @@ -112,9 +115,10 @@ let run (options : Types.engine_options) : Types.output = ([%show: Diagnostics.Backend.t] M.backend)); let items = apply_phases backend_options items in let with_items = Attrs.with_items items in - let module DepGraph = Dependencies.Make (InputLanguage) in - let items = DepGraph.bundle_cyclic_modules items in - let bundles, _ = DepGraph.recursive_bundles items in + let bundles, _ = + let module DepGraph = Dependencies.Make (InputLanguage) in + DepGraph.recursive_bundles items + in let items = List.filter items ~f:(fun (i : AST.item) -> Attrs.late_skip i.attrs |> not) @@ -122,7 +126,10 @@ let run (options : Types.engine_options) : Types.output = Logs.info (fun m -> m "Translating items with backend %s" ([%show: Diagnostics.Backend.t] M.backend)); - let items = translate with_items backend_options items ~bundles in + let items = + Profiling.profile (Backend M.backend) (List.length items) (fun _ -> + translate with_items backend_options items ~bundles) + in items in let diagnostics, files = @@ -140,11 +147,53 @@ let run (options : Types.engine_options) : Types.output = debug_json = None; } +(** Shallow parses a `id_table::Node` (or a raw `T`) JSON *) +let parse_id_table_node (json : Yojson.Safe.t) : + (int64 * Yojson.Safe.t) list * Yojson.Safe.t = + let expect_uint64 = function + | `Intlit str -> Some (Int64.of_string str) + | `Int id -> Some (Int.to_int64 id) + | _ -> None + in + let table, value = + match json with + | `List [ table; value ] -> (table, value) + | _ -> failwith "parse_id_table_node: expected a tuple at top-level" + in + let table = + match table with + | `List json_list -> json_list + | _ -> failwith "parse_id_table_node: `map` is supposed to be a list" + in + let table = + List.map + ~f:(function + | `List [ id; `Assoc [ (_, contents) ] ] -> + let id = + expect_uint64 id + |> Option.value_exn + ~message:"parse_id_table_node: id: expected int64" + in + (id, contents) + | _ -> failwith "parse_id_table_node: expected a list of size two") + table + in + (table, value) + +let parse_options () = + let table, json = + Hax_io.read_json () |> Option.value_exn |> parse_id_table_node + in + table + |> List.iter ~f:(fun (id, json) -> + Hashtbl.add_exn Types.cache_map ~key:id ~data:(`JSON json)); + let options = Types.parse_engine_options json in + Profiling.enabled := options.backend.profile; + options + (** Entrypoint of the engine. Assumes `Hax_io.init` was called. *) let main () = - let options = - Hax_io.read_json () |> Option.value_exn |> Types.parse_engine_options - in + let options = Profiling.profile (Other "parse_options") 1 parse_options in Printexc.record_backtrace true; let result = try Ok (run options) with diff --git a/engine/lib/ast.ml b/engine/lib/ast.ml index 684001400..7b9564869 100644 --- a/engine/lib/ast.ml +++ b/engine/lib/ast.ml @@ -99,6 +99,29 @@ type literal = type 'mut_witness mutability = Mutable of 'mut_witness | Immutable [@@deriving show, yojson, hash, compare, sexp, hash, eq] +type item_kind = + [ `Fn + | `TyAlias + | `Type + | `IMacroInvokation + | `Trait + | `Impl + | `Alias + | `Use + | `Quote + | `HaxError + | `NotImplementedYet ] +[@@deriving show, yojson, hash, compare, sexp, hash, eq] +(** Describes the (shallow) kind of an item. *) + +type item_quote_origin = { + item_kind : item_kind; + item_ident : concrete_ident; + position : [ `Before | `After | `Replace ]; +} +[@@deriving show, yojson, hash, compare, sexp, hash, eq] +(** From where does a quote item comes from? *) + module Make = functor (F : Features.T) @@ -196,10 +219,10 @@ functor | PWild | PAscription of { typ : ty; typ_span : span; pat : pat } | PConstruct of { - name : global_ident; - args : field_pat list; + constructor : global_ident; is_record : bool; (* are fields named? *) is_struct : bool; (* a struct has one constructor *) + fields : field_pat list; } (* An or-pattern, e.g. `p | q`. Invariant: `List.length subpats >= 2`. *) @@ -218,6 +241,13 @@ functor and pat = { p : pat'; span : span; typ : ty } and field_pat = { field : global_ident; pat : pat } + (* This marker describes what control flow is present in a loop. + It is added by phase `DropReturnBreakContinue` and the + information is used in `FunctionalizeLoops`. We need it because + we replace the control flow nodes of the AST by some encoding + in the `ControlFlow` enum. *) + and cf_kind = BreakOnly | BreakOrReturn + and expr' = (* pure fragment *) | If of { cond : expr; then_ : expr; else_ : expr option } @@ -262,18 +292,24 @@ functor body : expr; kind : loop_kind; state : loop_state option; + control_flow : (cf_kind * F.fold_like_loop) option; label : string option; witness : F.loop; } (* ControlFlow *) - | Break of { e : expr; label : string option; witness : F.break * F.loop } + | Break of { + e : expr; + acc : (expr * F.state_passing_loop) option; + label : string option; + witness : F.break * F.loop; + } | Return of { e : expr; witness : F.early_exit } | QuestionMark of { e : expr; return_typ : ty; witness : F.question_mark } (** The expression `e?`. In opposition to Rust, no implicit coercion is applied on the (potential) error payload of `e`. Coercion should be made explicit within `e`. *) | Continue of { - e : (F.state_passing_loop * expr) option; + acc : (expr * F.state_passing_loop) option; label : string option; witness : F.continue * F.loop; } @@ -421,7 +457,7 @@ functor | Impl of { generics : generics; self_ty : ty; - of_trait : global_ident * generic_value list; + of_trait : concrete_ident * generic_value list; items : impl_item list; parent_bounds : (impl_expr * impl_ident) list; safety : safety_kind; @@ -434,7 +470,7 @@ functor is_external : bool; rename : string option; } - | Quote of quote + | Quote of { quote : quote; origin : item_quote_origin } | HaxError of string | NotImplementedYet diff --git a/engine/lib/ast_builder.ml b/engine/lib/ast_builder.ml index 851477048..a092f9225 100644 --- a/engine/lib/ast_builder.ml +++ b/engine/lib/ast_builder.ml @@ -22,6 +22,27 @@ module Make (F : Features.T) = struct let ty_tuple_or_id : ty list -> ty = function | [ ty ] -> ty | types -> ty_tuple types + + (** This gives the type of a value in the `ControlFlow` enum *) + let ty_cf ~(continue_type : ty) ~(break_type : ty) : ty = + TApp + { + ident = Global_ident.of_name Type Core__ops__control_flow__ControlFlow; + args = [ GType break_type; GType continue_type ]; + } + + (** This gives the type of a value encoded in the `ControlFlow` enum. + In case a `return_type` is provided the encoding is nested: + `return v` is `Break (Break v)` + `break v` is `Break (Continue (v, acc))` *) + let ty_cf_return ~(acc_type : ty) ~(break_type : ty) + ~(return_type : ty option) : ty = + let break_type = ty_tuple [ break_type; acc_type ] in + match return_type with + | Some ret_ty -> + let break_type = ty_cf ~break_type:ret_ty ~continue_type:break_type in + ty_cf ~break_type ~continue_type:acc_type + | None -> ty_cf ~break_type ~continue_type:acc_type end include NoSpan @@ -29,10 +50,147 @@ module Make (F : Features.T) = struct module Explicit = struct let ty_unit : ty = TApp { ident = `TupleType 0; args = [] } let expr_unit = expr_GlobalVar (`TupleCons 0) ~typ:ty_unit + + let expr_tuple ~(span : span) (tuple : expr list) = + let len = List.length tuple in + let fields = List.mapi ~f:(fun i x -> (`TupleField (i, len), x)) tuple in + let typ = NoSpan.ty_tuple @@ List.map ~f:(fun { typ; _ } -> typ) tuple in + expr_Construct ~span ~typ ~constructor:(`TupleCons len) ~is_record:false + ~is_struct:true ~fields ~base:None + let pat_PBinding ~typ = pat_PBinding ~inner_typ:typ ~typ let arm ~span arm_pat ?(guard = None) body = { arm = { arm_pat; body; guard }; span } + + let pat_Constructor_CF ~(span : span) ~(typ : ty) + (cf : [ `Break | `Continue ]) (pat : pat) = + match cf with + | `Break -> + { + p = + PConstruct + { + constructor = + Global_ident.of_name + (Constructor { is_struct = false }) + Core__ops__control_flow__ControlFlow__Break; + fields = + [ + { + field = + Global_ident.of_name Field + Core__ops__control_flow__ControlFlow__Break__0; + pat; + }; + ]; + is_record = false; + is_struct = false; + }; + typ; + span; + } + | `Continue -> + { + p = + PConstruct + { + constructor = + Global_ident.of_name + (Constructor { is_struct = false }) + Core__ops__control_flow__ControlFlow__Continue; + fields = + [ + { + field = + Global_ident.of_name Field + Core__ops__control_flow__ControlFlow__Continue__0; + pat; + }; + ]; + is_record = false; + is_struct = false; + }; + typ; + span; + } + + let call_Constructor' (constructor : global_ident) is_struct + (args : expr list) span ret_typ = + let mk_field = + let len = List.length args in + fun n -> `TupleField (len, n) + in + let fields = List.mapi ~f:(fun i arg -> (mk_field i, arg)) args in + { + e = + Construct + { constructor; is_record = false; is_struct; fields; base = None }; + typ = ret_typ; + span; + } + + let call_Constructor (constructor_name : Concrete_ident.name) + (is_struct : bool) (args : expr list) span ret_typ = + call_Constructor' + (`Concrete + (Concrete_ident.of_name (Constructor { is_struct }) constructor_name)) + is_struct args span ret_typ + + let expr'_Constructor_CF ~(span : span) ~(break_type : ty) + ?(continue_type : ty = ty_unit) (cf : [ `Break | `Continue ]) (e : expr) + = + let typ = NoSpan.ty_cf ~continue_type ~break_type in + match cf with + | `Break -> + call_Constructor Core__ops__control_flow__ControlFlow__Break false + [ e ] span typ + | `Continue -> + call_Constructor Core__ops__control_flow__ControlFlow__Continue false + [ e ] span typ + + (** We use the following encoding of return, break and continue in the `ControlFlow` enum: + Return e -> Break (Break e) + Break e -> Break ((Continue(e, acc))) + Continue -> Continue(acc) + + In case there is no return we simplify to: + Break e -> (Break (e, acc)) + Continue -> (continue (acc)) + *) + let expr_Constructor_CF ~(span : span) ~(break_type : ty option) + ~(return_type : ty option) ~(acc : expr) ?(e : expr = expr_unit ~span) + (cf : [ `Return | `Break | `Continue ]) = + let break_type = Option.value ~default:ty_unit break_type in + match cf with + | `Return -> + let continue_type = NoSpan.ty_tuple [ break_type; acc.typ ] in + let inner = + expr'_Constructor_CF ~break_type:e.typ ~continue_type ~span `Break e + in + expr'_Constructor_CF ~span ~break_type:inner.typ + ~continue_type:acc.typ `Break inner + | `Break -> + let tuple = expr_tuple ~span [ e; acc ] in + let inner = + match return_type with + | Some ret_typ -> + expr'_Constructor_CF ~span ~break_type:ret_typ + ~continue_type:tuple.typ `Continue tuple + | None -> tuple + in + expr'_Constructor_CF ~span ~break_type:inner.typ + ~continue_type:acc.typ `Break inner + | `Continue -> + let break_type = + let tuple_type = NoSpan.ty_tuple [ break_type; acc.typ ] in + match return_type with + | Some ret_typ -> + NoSpan.ty_cf ~break_type:ret_typ ~continue_type:tuple_type + | None -> tuple_type + in + expr'_Constructor_CF ~span ~break_type ~continue_type:acc.typ + `Continue acc end include Explicit @@ -44,6 +202,10 @@ module Make (F : Features.T) = struct let pat_PBinding = Explicit.pat_PBinding ~span let expr_unit = expr_unit ~span + let expr_tuple = expr_tuple ~span + let pat_Constructor_CF = pat_Constructor_CF ~span + let expr'_Constructor_CF = expr'_Constructor_CF ~span + let expr_Constructor_CF = expr_Constructor_CF ~span let arm ?(guard = None) = arm ~span ?guard end diff --git a/engine/lib/ast_utils.ml b/engine/lib/ast_utils.ml index 7c3a8f412..1c149404f 100644 --- a/engine/lib/ast_utils.ml +++ b/engine/lib/ast_utils.ml @@ -306,6 +306,7 @@ module Make (F : Features.T) = struct method! visit_global_ident lvl (x : Global_ident.t) = match x with | `Concrete x -> `Concrete (f lvl x) + | `Projector (`Concrete x) -> `Projector (`Concrete (f lvl x)) | _ -> super#visit_global_ident lvl x method! visit_ty _ t = super#visit_ty TypeLevel t @@ -706,7 +707,7 @@ module Make (F : Features.T) = struct (* TODO: Those tuple1 things are wrong! Tuples of size one exists in Rust! e.g. `(123,)` *) let rec remove_tuple1_pat (p : pat) : pat = match p.p with - | PConstruct { name = `TupleType 1; args = [ { pat; _ } ]; _ } -> + | PConstruct { constructor = `TupleType 1; fields = [ { pat; _ } ]; _ } -> remove_tuple1_pat pat | _ -> p @@ -747,7 +748,7 @@ module Make (F : Features.T) = struct pat_is_expr p e | PBinding { subpat = None; var = pv; _ }, LocalVar ev -> [%eq: local_ident] pv ev - | ( PConstruct { name = pn; args = pargs; _ }, + | ( PConstruct { constructor = pn; fields = pargs; _ }, Construct { constructor = en; fields = eargs; base = None; _ } ) when [%eq: global_ident] pn en -> ( match List.zip pargs eargs with @@ -823,10 +824,10 @@ module Make (F : Features.T) = struct p = PConstruct { - name = `TupleCons len; - args = tuple; + constructor = `TupleCons len; is_record = false; is_struct = true; + fields = tuple; }; typ = make_tuple_typ @@ List.map ~f:(fun { pat; _ } -> pat.typ) tuple; span; @@ -889,7 +890,8 @@ module Make (F : Features.T) = struct (Concrete_ident.of_name (Constructor { is_struct }) constructor_name)) is_struct args span ret_typ - let call' ?impl f (args : expr list) span ret_typ = + let call' ?impl f ?(generic_args = []) ?(impl_generic_args = []) + (args : expr list) span ret_typ = let typ = TArrow (List.map ~f:(fun arg -> arg.typ) args, ret_typ) in let e = GlobalVar f in { @@ -898,17 +900,18 @@ module Make (F : Features.T) = struct { f = { e; typ; span }; args; - generic_args = []; + generic_args; bounds_impls = []; - trait = Option.map ~f:(fun impl -> (impl, [])) impl; + trait = Option.map ~f:(fun impl -> (impl, impl_generic_args)) impl; }; typ = ret_typ; span; } - let call ?(kind : Concrete_ident.Kind.t = Value) ?impl - (f_name : Concrete_ident.name) (args : expr list) span ret_typ = - call' ?impl + let call ?(kind : Concrete_ident.Kind.t = Value) ?(generic_args = []) + ?(impl_generic_args = []) ?impl (f_name : Concrete_ident.name) + (args : expr list) span ret_typ = + call' ?impl ~generic_args ~impl_generic_args (`Concrete (Concrete_ident.of_name kind f_name)) args span ret_typ @@ -998,6 +1001,20 @@ module Make (F : Features.T) = struct in Some { pat; typ; typ_span = Some span; attrs = [] } + let kind_of_item (item : item) : item_kind = + match item.v with + | Fn _ -> `Fn + | TyAlias _ -> `TyAlias + | Type _ -> `Type + | IMacroInvokation _ -> `IMacroInvokation + | Trait _ -> `Trait + | Impl _ -> `Impl + | Alias _ -> `Alias + | Use _ -> `Use + | Quote _ -> `Quote + | HaxError _ -> `HaxError + | NotImplementedYet -> `NotImplementedYet + let rec expr_of_lhs (span : span) (lhs : lhs) : expr = match lhs with | LhsLocalVar { var; typ } -> { e = LocalVar var; typ; span } diff --git a/engine/lib/backend.ml b/engine/lib/backend.ml index 0dfb6ea26..9836708bf 100644 --- a/engine/lib/backend.ml +++ b/engine/lib/backend.ml @@ -71,6 +71,9 @@ module Make (InputLanguage : Features.T) (M : BackendMetadata) = struct end module Attrs = Attr_payloads.Make (InputLanguage) (Error) + [@@ocaml.deprecated + "Use more precise errors: Error.unimplemented, Error.assertion_failure or \ + a raw Error.t (with Error.raise)"] let failwith ?(span = Span.default) msg = Error.unimplemented @@ -78,7 +81,4 @@ module Make (InputLanguage : Features.T) (M : BackendMetadata) = struct ("[TODO: this error uses failwith, and thus leads to bad error \ messages, please update it using [Diagnostics.*] helpers] " ^ msg) span - [@@ocaml.deprecated - "Use more precise errors: Error.unimplemented, Error.assertion_failure \ - or a raw Error.t (with Error.raise)"] end diff --git a/engine/lib/concrete_ident/concrete_ident.ml b/engine/lib/concrete_ident/concrete_ident.ml index c8a1a49ec..b0d0cc160 100644 --- a/engine/lib/concrete_ident/concrete_ident.ml +++ b/engine/lib/concrete_ident/concrete_ident.ml @@ -27,7 +27,7 @@ module Imported = struct [@@deriving show, yojson, compare, sexp, eq, hash] let of_def_path_item : Types.def_path_item -> def_path_item = function - | CrateRoot -> CrateRoot + | CrateRoot _ -> CrateRoot | Impl -> Impl | ForeignMod -> ForeignMod | Use -> Use @@ -50,7 +50,8 @@ module Imported = struct disambiguator = MyInt64.to_int_exn disambiguator; } - let of_def_id Types.{ krate; path; _ } = + let of_def_id + ({ contents = { value = { krate; path; _ }; _ } } : Types.def_id) = { krate; path = List.map ~f:of_disambiguated_def_path_item path } let parent { krate; path; _ } = { krate; path = List.drop_last_exn path } @@ -266,7 +267,7 @@ module View = struct open Utils let simple_ty_to_string ~(namespace : Imported.def_id) : - Types.ty -> string option = + Types.node_for__ty_kind -> string option = let escape = let re = Re.Pcre.regexp "_((?:e_)*)of_" in let f group = "_e_" ^ Re.Group.get group 1 ^ "of_" in @@ -279,14 +280,14 @@ module View = struct namespace |> some_if_true in - let* last = List.last def_id.path in + let* last = List.last def_id.contents.value.path in let* () = some_if_true Int64.(last.disambiguator = zero) in last.data |> Imported.of_def_path_item |> string_of_def_path_item |> Option.map ~f:escape in - let arity0 = - Option.map ~f:escape << function - | Types.Bool -> Some "bool" + let arity0 (ty : Types.node_for__ty_kind) = + match ty.Types.value with + | Bool -> Some "bool" | Char -> Some "char" | Str -> Some "str" | Never -> Some "never" @@ -305,12 +306,14 @@ module View = struct | Float F32 -> Some "f32" | Float F64 -> Some "f64" | Tuple [] -> Some "unit" - | Adt { def_id; generic_args = []; _ } -> adt def_id + | Adt { def_id; generic_args = []; _ } -> + Option.map ~f:escape (adt def_id) | _ -> None in let apply left right = left ^ "_of_" ^ right in - let rec arity1 = function - | Types.Slice sub -> arity1 sub |> Option.map ~f:(apply "slice") + let rec arity1 (ty : Types.node_for__ty_kind) = + match ty.value with + | Slice sub -> arity1 sub |> Option.map ~f:(apply "slice") | Ref (_, sub, _) -> arity1 sub |> Option.map ~f:(apply "ref") | Adt { def_id; generic_args = [ Type arg ]; _ } -> let* adt = adt def_id in @@ -319,7 +322,7 @@ module View = struct | Tuple l -> let* l = List.map ~f:arity0 l |> Option.all in Some ("tuple_" ^ String.concat ~sep:"_" l) - | otherwise -> arity0 otherwise + | _ -> arity0 ty in arity1 @@ -385,7 +388,7 @@ module View = struct namespace in let* typ = simple_ty_to_string ~namespace typ in - let* trait = List.last trait.path in + let* trait = List.last trait.contents.value.path in let* trait = Imported.of_def_path_item trait.data |> string_of_def_path_item in @@ -628,6 +631,10 @@ module Create = struct in let path = List.drop_last_exn old.def_id.path @ [ last ] in { old with def_id = { old.def_id with path } } + + let constructor name = + let path = name.def_id.path @ [ { data = Ctor; disambiguator = 0 } ] in + { name with def_id = { name.def_id with path } } end let lookup_raw_impl_info (impl : t) : Types.impl_infos option = diff --git a/engine/lib/concrete_ident/concrete_ident.mli b/engine/lib/concrete_ident/concrete_ident.mli index 8a5e413eb..e87f71b22 100644 --- a/engine/lib/concrete_ident/concrete_ident.mli +++ b/engine/lib/concrete_ident/concrete_ident.mli @@ -30,6 +30,10 @@ module Create : sig val fresh_module : from:t list -> t val move_under : new_parent:t -> t -> t + val constructor : t -> t + (** [constructor ident] adds a [Ctor] to [ident] + this allows to build a constructor from a variant name. *) + val map_last : f:(string -> string) -> t -> t (** [map_last f ident] applies [f] on the last chunk of [ident]'s path if it holds a string *) diff --git a/engine/lib/dependencies.ml b/engine/lib/dependencies.ml index 667426d50..eb8819ac8 100644 --- a/engine/lib/dependencies.ml +++ b/engine/lib/dependencies.ml @@ -71,7 +71,7 @@ module Make (F : Features.T) = struct -> v#visit_generics () generics @ v#visit_ty () self_ty - @ v#visit_global_ident () (fst of_trait) + @ v#visit_concrete_ident () (fst of_trait) @ concat_map (v#visit_generic_value ()) (snd of_trait) @ concat_map (v#visit_impl_item ()) items @ concat_map @@ -155,13 +155,6 @@ module Make (F : Features.T) = struct end module CyclicDep = struct - (* We are looking for dependencies between items that give a cyclic dependency at the module level - (but not necessarily at the item level). All the items belonging to such a cycle should be bundled - together. *) - (* The algorithm is to take the transitive closure of the items dependency graph and look - for paths of length 3 that in terms of modules have the form A -> B -> A (A != B) *) - (* To compute the bundles, we keep a second (undirected graph) where an edge between two items - means they should be in the same bundle. The bundles are the connected components of this graph. *) module Bundle = struct type t = Concrete_ident.t list @@ -171,57 +164,22 @@ module Make (F : Features.T) = struct let cycles g = CC.components_list g end - let of_graph' (g : G.t) (mod_graph_cycles : Namespace.Set.t list) : - Bundle.t list = - let closure = Oper.transitive_closure g in - - let bundles_graph = - G.fold_vertex - (fun start (bundles_graph : Bundle.G.t) -> - let start_mod = Namespace.of_concrete_ident start in - let cycle_modules = - List.filter mod_graph_cycles ~f:(fun cycle -> - Set.mem cycle start_mod) - |> List.reduce ~f:Set.union - in - match cycle_modules with - | Some cycle_modules -> - let bundles_graph = - G.fold_succ - (fun interm bundles_graph -> - let interm_mod = Namespace.of_concrete_ident interm in - if - (not ([%eq: Namespace.t] interm_mod start_mod)) - && Set.mem cycle_modules interm_mod - then - G.fold_succ - (fun dest bundles_graph -> - let dest_mod = Namespace.of_concrete_ident dest in - if [%eq: Namespace.t] interm_mod dest_mod then - let g = - Bundle.G.add_edge bundles_graph start interm - in - let g = Bundle.G.add_edge g interm dest in - g - else bundles_graph) - g start bundles_graph - else bundles_graph) - g start bundles_graph - in - - bundles_graph - | None -> bundles_graph) - closure Bundle.G.empty + (* This is a solution that bundles together everything that belongs to the same module SCC. + It results in bundles that are much bigger than they could be but is a simple solution + to the problem described in https://github.com/hacspec/hax/issues/995#issuecomment-2411114404 *) + let of_mod_sccs (items : item list) + (mod_graph_cycles : Namespace.Set.t list) : Bundle.t list = + let item_names = List.map items ~f:(fun x -> x.ident) in + let cycles = + List.filter mod_graph_cycles ~f:(fun set -> + Prelude.Set.length set > 1) + in + let bundles = + List.map cycles ~f:(fun set -> + List.filter item_names ~f:(fun item -> + Prelude.Set.mem set (Namespace.of_concrete_ident item))) in - - let bundles = Bundle.cycles bundles_graph in bundles - - let of_graph (g : G.t) (mod_graph_cycles : Namespace.Set.t list) : - Bundle.t list = - match mod_graph_cycles with - | [] -> [] - | _ -> of_graph' g mod_graph_cycles end open Graph.Graphviz.Dot (struct @@ -425,18 +383,31 @@ module Make (F : Features.T) = struct let aliases = List.map (old_new :: variants_renamings old_new) ~f:(fun (old_ident, new_ident) -> - { item with v = Alias { name = old_ident; item = new_ident } }) + let attrs = + List.filter ~f:(fun att -> Attrs.late_skip [ att ]) item.attrs + in + + { item with v = Alias { name = old_ident; item = new_ident }; attrs }) in item' :: aliases let bundle_cyclic_modules (items : item list) : item list = - let g = ItemGraph.of_items ~original_items:items items in let from_ident ident : item option = List.find ~f:(fun i -> [%equal: Concrete_ident.t] i.ident ident) items in let mut_rec_bundles = let mod_graph_cycles = ModGraph.of_items items |> ModGraph.cycles in - let bundles = ItemGraph.CyclicDep.of_graph g mod_graph_cycles in + (* `Use` items shouldn't be bundled as they have no dependencies + and they have dummy names. *) + let non_use_items = + List.filter + ~f:(fun item -> + match item.v with Use _ | NotImplementedYet -> false | _ -> true) + items + in + let bundles = + ItemGraph.CyclicDep.of_mod_sccs non_use_items mod_graph_cycles + in let f = List.filter_map ~f:from_ident in List.map ~f bundles in @@ -476,13 +447,27 @@ module Make (F : Features.T) = struct List.map variants ~f:(fun { name; _ } -> ( name, Concrete_ident.Create.move_under ~new_parent:new_name name )) + | Some { v = Type { variants; is_struct = true; _ }; _ } -> + List.concat_map variants ~f:(fun { arguments; _ } -> + List.map arguments ~f:(fun (name, _, _) -> + ( name, + Concrete_ident.Create.move_under ~new_parent:new_name name + ))) | _ -> [] in - + let variant_and_constructors_renamings = + List.concat_map ~f:variants_renamings renamings + |> List.concat_map ~f:(fun (old_name, new_name) -> + [ + (old_name, new_name); + ( Concrete_ident.Create.constructor old_name, + Concrete_ident.Create.constructor new_name ); + ]) + in let renamings = Map.of_alist_exn (module Concrete_ident) - (renamings @ List.concat_map ~f:variants_renamings renamings) + (renamings @ variant_and_constructors_renamings) in let rename = let renamer _lvl i = Map.find renamings i |> Option.value ~default:i in diff --git a/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml new file mode 100644 index 000000000..3b1c190aa --- /dev/null +++ b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml @@ -0,0 +1,463 @@ +open! Prelude +open! Ast + +module Make (F : Features.T) (View : Concrete_ident.VIEW_API) = struct + open Deprecated_generic_printer_base + open Deprecated_generic_printer_base.Make (F) + + module Class = struct + module U = Ast_utils.Make (F) + open! AST + open PPrint + + let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group + + class print = + object (print) + inherit print_base as super + method printer_name = "Generic" + + method par_state : ast_position -> par_state = + function + | Lhs_LhsArrayAccessor | Ty_Tuple | Ty_TSlice | Ty_TArray_length + | Expr_If_cond | Expr_If_then | Expr_If_else | Expr_Array + | Expr_Assign | Expr_Closure_param | Expr_Closure_body + | Expr_Ascription_e | Expr_Let_lhs | Expr_Let_rhs | Expr_Let_body + | Expr_App_arg | Expr_ConstructTuple | Pat_ConstructTuple | Pat_PArray + | Pat_Ascription_pat | Param_pat | Item_Fn_body | GenericParam_GPConst + -> + AlreadyPar + | _ -> NeedsPar + + method namespace_of_concrete_ident + : concrete_ident -> string * string list = + fun i -> View.to_namespace i + + method concrete_ident' ~(under_current_ns : bool) : concrete_ident fn = + fun id -> + let id = View.to_view id in + let chunks = + if under_current_ns then [ id.definition ] + else id.crate :: (id.path @ [ id.definition ]) + in + separate_map (colon ^^ colon) utf8string chunks + + method name_of_concrete_ident : concrete_ident fn = + View.to_definition_name >> utf8string + + method mutability : 'a. 'a mutability fn = fun _ -> empty + + method primitive_ident : primitive_ident fn = + function + | Deref -> string "deref" + | Cast -> string "cast" + | LogicalOp And -> string "and" + | LogicalOp Or -> string "or" + + method local_ident : local_ident fn = View.local_ident >> utf8string + + method literal : literal_ctx -> literal fn = + (* TODO : escape *) + fun _ctx -> function + | String s -> utf8string s |> dquotes + | Char c -> char c |> bquotes + | Int { value; negative; _ } -> + string value |> precede (if negative then minus else empty) + | Float { value; kind; negative } -> + string value + |> precede (if negative then minus else empty) + |> terminate (string (show_float_kind kind)) + | Bool b -> OCaml.bool b + + method generic_value : generic_value fn = + function + | GLifetime _ -> string "Lifetime" + | GType ty -> print#ty_at GenericValue_GType ty + | GConst expr -> print#expr_at GenericValue_GConst expr + + method lhs : lhs fn = + function + | LhsLocalVar { var; _ } -> print#local_ident var + | LhsArbitraryExpr { e; _ } -> print#expr_at Lhs_LhsArbitraryExpr e + | LhsFieldAccessor { e; field; _ } -> + print#lhs e |> parens + |> terminate (dot ^^ print#global_ident_projector field) + | LhsArrayAccessor { e; index; _ } -> + print#lhs e |> parens + |> terminate (print#expr_at Lhs_LhsArrayAccessor index |> brackets) + + method ty_bool : document = string "bool" + method ty_char : document = string "char" + method ty_str : document = string "str" + + method ty_int : int_kind fn = + fun { size; signedness } -> + let signedness = match signedness with Signed -> "i" | _ -> "u" in + let size = + match int_of_size size with + | Some n -> OCaml.int n + | None -> string "size" + in + string signedness ^^ size + + method ty_float : float_kind fn = show_float_kind >> string + + method generic_values : generic_value list fn = + function + | [] -> empty + | values -> separate_map comma print#generic_value values |> angles + + method ty_app : concrete_ident -> generic_value list fn = + fun f args -> print#concrete_ident f ^^ print#generic_values args + + method ty_tuple : int -> ty list fn = + fun _n -> + separate_map (comma ^^ break 1) (print#ty_at Ty_Tuple) + >> iblock parens + + method! ty : par_state -> ty fn = + fun ctx ty -> + match ty with + | TBool -> string "bool" + | TChar -> string "char" + | TInt kind -> print#ty_int kind + | TFloat kind -> print#ty_float kind + | TStr -> string "String" + | TArrow (inputs, output) -> + separate_map (string "->") (print#ty_at Ty_TArrow) + (inputs @ [ output ]) + |> parens + |> precede (string "arrow!") + | TRef { typ; mut; _ } -> + ampersand ^^ print#mutability mut ^^ print#ty_at Ty_TRef typ + | TParam i -> print#local_ident i + | TSlice { ty; _ } -> print#ty_at Ty_TSlice ty |> brackets + | TRawPointer _ -> string "raw_pointer!()" + | TArray { typ; length } -> + print#ty_at Ty_TArray_length typ + ^/^ semi + ^/^ print#expr_at Ty_TArray_length length + |> brackets + | TAssociatedType _ -> string "assoc_type!()" + | TOpaque _ -> string "opaque_type!()" + | TApp _ -> super#ty ctx ty + | TDyn _ -> empty (* TODO *) + + method! expr' : par_state -> expr' fn = + fun ctx e -> + let wrap_parens = + group + >> + match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces + in + match e with + | If { cond; then_; else_ } -> + let if_then = + (string "if" ^//^ nest 2 (print#expr_at Expr_If_cond cond)) + ^/^ string "then" + ^//^ (print#expr_at Expr_If_then then_ |> braces |> nest 1) + in + (match else_ with + | None -> if_then + | Some else_ -> + if_then ^^ break 1 ^^ string "else" ^^ space + ^^ (print#expr_at Expr_If_else else_ |> iblock braces)) + |> wrap_parens + | Match { scrutinee; arms } -> + let header = + string "match" ^^ space + ^^ (print#expr_at Expr_Match_scrutinee scrutinee + |> terminate space |> iblock Fn.id) + |> group + in + let arms = + separate_map hardline + (print#arm >> group >> nest 2 + >> precede (bar ^^ space) + >> group) + arms + in + header ^^ iblock braces arms + | Let { monadic; lhs; rhs; body } -> + (Option.map + ~f:(fun monad -> print#expr_monadic_let ~monad) + monadic + |> Option.value ~default:print#expr_let) + ~lhs ~rhs body + |> wrap_parens + | Literal l -> print#literal Expr l + | Block { e; safety_mode; _ } -> ( + let e = lbrace ^/^ nest 2 (print#expr ctx e) ^/^ rbrace in + match safety_mode with + | Safe -> e + | Unsafe _ -> !^"unsafe " ^^ e) + | Array l -> + separate_map comma (print#expr_at Expr_Array) l + |> group |> brackets + | LocalVar i -> print#local_ident i + | GlobalVar (`Concrete i) -> print#concrete_ident i + | GlobalVar (`Primitive p) -> print#primitive_ident p + | GlobalVar (`TupleCons 0) -> print#expr_construct_tuple [] + | GlobalVar + (`TupleType _ | `TupleField _ | `Projector _ | `TupleCons _) -> + print#assertion_failure "GlobalVar" + | Assign { lhs; e; _ } -> + group (print#lhs lhs) + ^^ space ^^ equals + ^/^ group (print#expr_at Expr_Assign e) + ^^ semi + | Loop _ -> string "todo loop;" + | Break _ -> string "todo break;" + | Return _ -> string "todo return;" + | Continue _ -> string "todo continue;" + | QuestionMark { e; _ } -> + print#expr_at Expr_QuestionMark e |> terminate qmark + | Borrow { kind; e; _ } -> + string (match kind with Mut _ -> "&mut " | _ -> "&") + ^^ print#expr_at Expr_Borrow e + | AddressOf _ -> string "todo address of;" + | Closure { params; body; _ } -> + separate_map comma (print#pat_at Expr_Closure_param) params + |> group |> enclose bar bar + |> terminate (print#expr_at Expr_Closure_body body |> group) + |> wrap_parens + | Ascription { e; typ } -> + print#expr_at Expr_Ascription_e e + ^^ string "as" + ^/^ print#ty_at Expr_Ascription_typ typ + |> wrap_parens + | MacroInvokation _ -> print#assertion_failure "MacroInvokation" + | EffectAction _ -> print#assertion_failure "EffectAction" + | Quote quote -> print#quote quote + | App _ | Construct _ -> super#expr' ctx e + + method quote { contents; _ } = + List.map + ~f:(function + | `Verbatim code -> string code + | `Expr e -> print#expr_at Expr_Quote e + | `Pat p -> print#pat_at Expr_Quote p + | `Typ p -> print#ty_at Expr_Quote p) + contents + |> concat + + method expr_monadic_let + : monad:supported_monads * F.monadic_binding -> + lhs:pat -> + rhs:expr -> + expr fn = + fun ~monad:_ ~lhs ~rhs body -> print#expr_let ~lhs ~rhs body + + method expr_let : lhs:pat -> rhs:expr -> expr fn = + fun ~lhs ~rhs body -> + string "let" + ^/^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs) + ^/^ equals + ^/^ iblock Fn.id (print#expr_at Expr_Let_rhs rhs) + ^^ semi + ^/^ (print#expr_at Expr_Let_body body |> group) + + method tuple_projection : size:int -> nth:int -> expr fn = + fun ~size:_ ~nth e -> + print#expr_at Expr_TupleProjection e + |> terminate (dot ^^ OCaml.int nth) + + method field_projection : concrete_ident -> expr fn = + fun i e -> + print#expr_at Expr_FieldProjection e + |> terminate (dot ^^ print#name_of_concrete_ident i) + + method expr_app : expr -> expr list -> generic_value list fn = + fun f args _generic_args -> + let args = + separate_map + (comma ^^ break 1) + (print#expr_at Expr_App_arg >> group) + args + in + let f = print#expr_at Expr_App_f f |> group in + f ^^ iblock parens args + + method doc_construct_tuple : document list fn = + separate (comma ^^ break 1) >> iblock parens + + method expr_construct_tuple : expr list fn = + List.map ~f:(print#expr_at Expr_ConstructTuple) + >> print#doc_construct_tuple + + method pat_construct_tuple : pat list fn = + List.map ~f:(print#pat_at Pat_ConstructTuple) + >> print#doc_construct_tuple + + method global_ident_projector : global_ident fn = + function + | `Projector (`Concrete i) | `Concrete i -> print#concrete_ident i + | _ -> + print#assertion_failure "global_ident_projector: not a projector" + + method doc_construct_inductive + : is_record:bool -> + is_struct:bool -> + constructor:concrete_ident -> + base:document option -> + (global_ident * document) list fn = + fun ~is_record ~is_struct:_ ~constructor ~base:_ args -> + if is_record then + print#concrete_ident constructor + ^^ space + ^^ iblock parens + (separate_map (break 0) + (fun (field, body) -> + (print#global_ident_projector field + |> terminate comma |> group) + ^^ colon ^^ space ^^ iblock Fn.id body) + args) + else + print#concrete_ident constructor + ^^ space + ^^ iblock parens (separate_map (break 0) snd args) + + method expr_construct_inductive + : is_record:bool -> + is_struct:bool -> + constructor:concrete_ident -> + base:(expr * F.construct_base) option -> + (global_ident * expr) list fn = + fun ~is_record ~is_struct ~constructor ~base -> + let base = + Option.map + ~f:(fst >> print#expr_at Expr_ConcreteInductive_base) + base + in + List.map ~f:(print#expr_at Expr_ConcreteInductive_field |> map_snd) + >> print#doc_construct_inductive ~is_record ~is_struct ~constructor + ~base + + method attr : attr fn = fun _ -> empty + + method! pat' : par_state -> pat' fn = + fun ctx -> + let wrap_parens = + group + >> + match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces + in + function + | PWild -> underscore + | PAscription { typ; typ_span; pat } -> + print#pat_ascription ~typ ~typ_span pat |> wrap_parens + | PBinding { mut; mode; var; typ = _; subpat } -> ( + let p = + (match mode with ByRef _ -> string "&" | _ -> empty) + ^^ (match mut with Mutable _ -> string "mut " | _ -> empty) + ^^ print#local_ident var + in + match subpat with + | Some (subpat, _) -> + p ^^ space ^^ at ^^ space + ^^ print#pat_at Pat_PBinding_subpat subpat + |> wrap_parens + | None -> p) + | PArray { args } -> + separate_map (break 0) + (print#pat_at Pat_PArray >> terminate comma >> group) + args + |> iblock brackets + | PDeref { subpat; _ } -> + ampersand ^^ print#pat_at Pat_PDeref subpat + | (PConstruct _ | PConstant _) as pat -> super#pat' ctx pat + | POr { subpats } -> + separate_map (bar ^^ break 1) (print#pat_at Pat_Or) subpats + + method pat_ascription : typ:ty -> typ_span:span -> pat fn = + fun ~typ ~typ_span pat -> + print#pat_at Pat_Ascription_pat pat + ^^ colon + ^^ print#with_span ~span:typ_span (fun () -> + print#ty_at Pat_Ascription_typ typ) + + method expr_unwrapped : par_state -> expr fn = + fun ctx { e; _ } -> print#expr' ctx e + + method param : param fn = + fun { pat; typ; typ_span; attrs } -> + let typ = + match typ_span with + | Some span -> + print#with_span ~span (fun _ -> print#ty_at Param_typ typ) + | None -> print#ty_at Param_typ typ + in + print#attrs attrs ^^ print#pat_at Param_pat pat ^^ space ^^ colon + ^^ space ^^ typ + + method item' : item' fn = + function + | Fn { name; generics; body; params; safety } -> + let params = + iblock parens + (separate_map (comma ^^ break 1) print#param params) + in + let generics = print#generic_params generics.params in + let safety = + optional Base.Fn.id + (match safety with + | Safe -> None + | Unsafe _ -> Some !^"unsafe ") + in + safety ^^ !^"fn" ^^ space ^^ print#concrete_ident name ^^ generics + ^^ params + ^^ iblock braces (print#expr_at Item_Fn_body body) + | Quote { quote; _ } -> print#quote quote + | _ -> string "item not implemented" + + method generic_param' : generic_param fn = + fun { ident; attrs; kind; _ } -> + let suffix = + match kind with + | GPLifetime _ -> space ^^ colon ^^ space ^^ string "'unk" + | GPType -> empty + | GPConst { typ } -> + space ^^ colon ^^ space + ^^ print#ty_at GenericParam_GPConst typ + in + let prefix = + match kind with + | GPConst _ -> string "const" ^^ space + | _ -> empty + in + let ident = + let name = + if String.(ident.name = "_") then "Anonymous" else ident.name + in + { ident with name } + in + prefix ^^ print#attrs attrs ^^ print#local_ident ident ^^ suffix + + method generic_params : generic_param list fn = + separate_map comma print#generic_param >> group >> angles + + (*Option.map ~f:(...) guard |> Option.value ~default:empty*) + method arm' : arm' fn = + fun { arm_pat; body; guard } -> + let pat = print#pat_at Arm_pat arm_pat |> group in + let body = print#expr_at Arm_body body in + let guard = + Option.map + ~f:(fun { guard = IfLet { lhs; rhs; _ }; _ } -> + string " if let " ^^ print#pat_at Arm_pat lhs ^^ string " = " + ^^ print#expr_at Arm_body rhs) + guard + |> Option.value ~default:empty + in + pat ^^ guard ^^ string " => " ^^ body ^^ comma + end + end + + include Class + + include Api (struct + type aux_info = unit + + let new_print () = (new Class.print :> print_object) + end) +end diff --git a/engine/lib/generic_printer/generic_printer.mli b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.mli similarity index 70% rename from engine/lib/generic_printer/generic_printer.mli rename to engine/lib/deprecated_generic_printer/deprecated_generic_printer.mli index ccd471cc3..3eb3904f6 100644 --- a/engine/lib/generic_printer/generic_printer.mli +++ b/engine/lib/deprecated_generic_printer/deprecated_generic_printer.mli @@ -1,5 +1,5 @@ module Make (F : Features.T) (View : Concrete_ident.VIEW_API) : sig - open Generic_printer_base.Make(F) + open Deprecated_generic_printer_base.Make(F) include API class print : print_class diff --git a/engine/lib/generic_printer/generic_printer_base.ml b/engine/lib/deprecated_generic_printer/deprecated_generic_printer_base.ml similarity index 98% rename from engine/lib/generic_printer/generic_printer_base.ml rename to engine/lib/deprecated_generic_printer/deprecated_generic_printer_base.ml index 3e61a44fd..c887ecdf1 100644 --- a/engine/lib/generic_printer/generic_printer_base.ml +++ b/engine/lib/deprecated_generic_printer/deprecated_generic_printer_base.ml @@ -216,17 +216,17 @@ module Make (F : Features.T) = struct method pat' : par_state -> pat' fn = fun _ -> function | PConstant { lit } -> print#literal Pat lit - | PConstruct { name; args; is_record; is_struct } -> ( - match name with + | PConstruct { constructor; is_record; is_struct; fields } -> ( + match constructor with | `Concrete constructor -> print#doc_construct_inductive ~is_record ~is_struct ~constructor ~base:None (List.map ~f:(fun fp -> (fp.field, print#pat_at Pat_ConcreteInductive fp.pat)) - args) + fields) | `TupleCons _ -> - List.map ~f:(fun fp -> fp.pat) args + List.map ~f:(fun fp -> fp.pat) fields |> print#pat_construct_tuple | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ -> print#assertion_failure "todo err") diff --git a/engine/lib/diagnostics.ml b/engine/lib/diagnostics.ml index b04bd5868..e5f8ed5b3 100644 --- a/engine/lib/diagnostics.ml +++ b/engine/lib/diagnostics.ml @@ -12,6 +12,8 @@ module Phase = struct | NotInBackendLang of Backend.t | ArbitraryLhs | Continue + | Break + | QuestionMark | RawOrMutPointer | EarlyExit | AsPattern @@ -25,39 +27,8 @@ module Phase = struct | x -> [%show: t] x end - type t = - | DirectAndMut - | AndMutDefSite - | Identity - | DropReferences - | DropBlocks - | DropSizedTrait - | DropMatchGuards - | RefMut - | ResugarAsserts - | ResugarForLoops - | ResugarWhileLoops - | ResugarForIndexLoops - | ResugarQuestionMarks - | RewriteControlFlow - | SimplifyQuestionMarks - | Specialize - | HoistSideEffects - | HoistDisjunctions - | LocalMutation - | TrivializeAssignLhs - | CfIntoMonads - | FunctionalizeLoops - | TraitsSpecs - | SimplifyMatchReturn - | SimplifyHoisting - | DropNeedlessReturns - | TransformHaxLibInline - | NewtypeAsRefinement - | DummyA - | DummyB - | DummyC - | Reject of Rejection.t + (** All names for phases defined in `lib/phases_*` are generated automatically *) + type%add_phase_names t = Identity | HoistSideEffects | Reject of Rejection.t [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp] let display = function diff --git a/engine/lib/dune b/engine/lib/dune index 0134495ea..17a3db14d 100644 --- a/engine/lib/dune +++ b/engine/lib/dune @@ -12,6 +12,7 @@ core logs re + sourcemaps ocamlgraph) (preprocessor_deps ; `ppx_inline` is used on the `Subtype` module, thus we need it at PPX time @@ -57,6 +58,17 @@ %{ast} (run generate_from_ast visitors))))) +(rule + (target generated_generic_printer_base.ml) + (deps + (:ast ast.ml)) + (action + (with-stdout-to + generated_generic_printer_base.ml + (with-stdin-from + %{ast} + (run generate_from_ast printer))))) + (rule (target ast_destruct_generated.ml) (deps diff --git a/engine/lib/features.ml b/engine/lib/features.ml index 22a4a3909..9040d8192 100644 --- a/engine/lib/features.ml +++ b/engine/lib/features.ml @@ -4,6 +4,7 @@ loop, for_index_loop, while_loop, state_passing_loop, + fold_like_loop, continue, break, mutable_variable, @@ -40,6 +41,7 @@ module Rust = struct include Off.Monadic_action include Off.Monadic_binding include Off.State_passing_loop + include Off.Fold_like_loop include Off.Quote end diff --git a/engine/lib/generic_printer/generic_printer.ml b/engine/lib/generic_printer/generic_printer.ml index c18ef0a6a..181aef257 100644 --- a/engine/lib/generic_printer/generic_printer.ml +++ b/engine/lib/generic_printer/generic_printer.ml @@ -1,463 +1,652 @@ open! Prelude open! Ast +open! PPrint +module LazyDoc = Generated_generic_printer_base.LazyDoc +open LazyDoc + +module Annotation = struct + type loc = { line : int; col : int } [@@deriving show, yojson, eq] + type t = loc * span [@@deriving show, yojson, eq] + + let compare ((a, _) : t) ((b, _) : t) : int = + let line = Int.compare a.line b.line in + if Int.equal line 0 then Int.compare a.col b.col else line + + (** Converts a list of annotation and a string to a list of annotated string *) + let split_with_string (s : string) (annots : t list) = + let lines_position = + String.to_list s + |> List.filter_mapi ~f:(fun i ch -> + match ch with '\n' -> Some i | _ -> None) + |> List.to_array |> Array.get + in + let annots = List.sort ~compare annots in + let init = ({ line = 0; col = 0 }, None) in + let slices = + List.folding_map + ~f:(fun (start, start_span) (end_, end_span) -> + let span = Option.value ~default:end_span start_span in + ((end_, Some end_span), (span, start, end_))) + ~init annots + in + List.map slices ~f:(fun (span, start, end_) -> + let pos = lines_position start.line + start.col in + let len = lines_position end_.line + end_.col - pos in + (span, String.sub s ~pos ~len)) + + let to_mapping ((loc, span) : t) : Sourcemaps.Source_maps.mapping option = + let real_path (x : Types.file_name) = + match x with + | Real (LocalPath p) | Real (Remapped { local_path = Some p; _ }) -> + Some p + | _ -> None + in + let loc_to_loc ({ line; col } : loc) : Sourcemaps.Location.t = + { line; col } + in + let to_loc ({ col; line } : Types.loc) : loc = + { col = Int.of_string col; line = Int.of_string line - 1 } + in + let* span = + Span.to_thir span + |> List.find ~f:(fun (s : Types.span) -> + real_path s.filename |> Option.is_some) + in + let* src_filename = real_path span.filename in + let src_start = to_loc span.lo |> loc_to_loc in + let src_end = to_loc span.hi |> loc_to_loc in + let dst_start = loc_to_loc loc in + Some + Sourcemaps.Source_maps. + { + src = { start = src_start; end_ = Some src_end }; + gen = { start = dst_start; end_ = None }; + source = src_filename; + name = None; + } +end -module Make (F : Features.T) (View : Concrete_ident.VIEW_API) = struct - open Generic_printer_base - open Generic_printer_base.Make (F) - - module Class = struct - module U = Ast_utils.Make (F) - open! AST - open PPrint - - let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group - - class print = - object (print) - inherit print_base as super - method printer_name = "Generic" - - method par_state : ast_position -> par_state = - function - | Lhs_LhsArrayAccessor | Ty_Tuple | Ty_TSlice | Ty_TArray_length - | Expr_If_cond | Expr_If_then | Expr_If_else | Expr_Array - | Expr_Assign | Expr_Closure_param | Expr_Closure_body - | Expr_Ascription_e | Expr_Let_lhs | Expr_Let_rhs | Expr_Let_body - | Expr_App_arg | Expr_ConstructTuple | Pat_ConstructTuple | Pat_PArray - | Pat_Ascription_pat | Param_pat | Item_Fn_body | GenericParam_GPConst - -> - AlreadyPar - | _ -> NeedsPar - - method namespace_of_concrete_ident - : concrete_ident -> string * string list = - fun i -> View.to_namespace i - - method concrete_ident' ~(under_current_ns : bool) : concrete_ident fn = - fun id -> - let id = View.to_view id in - let chunks = - if under_current_ns then [ id.definition ] - else id.crate :: (id.path @ [ id.definition ]) - in - separate_map (colon ^^ colon) utf8string chunks - - method name_of_concrete_ident : concrete_ident fn = - View.to_definition_name >> utf8string - - method mutability : 'a. 'a mutability fn = fun _ -> empty - - method primitive_ident : primitive_ident fn = - function - | Deref -> string "deref" - | Cast -> string "cast" - | LogicalOp And -> string "and" - | LogicalOp Or -> string "or" - - method local_ident : local_ident fn = View.local_ident >> utf8string - - method literal : literal_ctx -> literal fn = - (* TODO : escape *) - fun _ctx -> function - | String s -> utf8string s |> dquotes - | Char c -> char c |> bquotes - | Int { value; negative; _ } -> - string value |> precede (if negative then minus else empty) - | Float { value; kind; negative } -> - string value - |> precede (if negative then minus else empty) - |> terminate (string (show_float_kind kind)) - | Bool b -> OCaml.bool b - - method generic_value : generic_value fn = - function - | GLifetime _ -> string "Lifetime" - | GType ty -> print#ty_at GenericValue_GType ty - | GConst expr -> print#expr_at GenericValue_GConst expr - - method lhs : lhs fn = - function - | LhsLocalVar { var; _ } -> print#local_ident var - | LhsArbitraryExpr { e; _ } -> print#expr_at Lhs_LhsArbitraryExpr e - | LhsFieldAccessor { e; field; _ } -> - print#lhs e |> parens - |> terminate (dot ^^ print#global_ident_projector field) - | LhsArrayAccessor { e; index; _ } -> - print#lhs e |> parens - |> terminate (print#expr_at Lhs_LhsArrayAccessor index |> brackets) - - method ty_bool : document = string "bool" - method ty_char : document = string "char" - method ty_str : document = string "str" - - method ty_int : int_kind fn = - fun { size; signedness } -> - let signedness = match signedness with Signed -> "i" | _ -> "u" in - let size = - match int_of_size size with - | Some n -> OCaml.int n - | None -> string "size" - in - string signedness ^^ size - - method ty_float : float_kind fn = show_float_kind >> string - - method generic_values : generic_value list fn = - function - | [] -> empty - | values -> separate_map comma print#generic_value values |> angles - - method ty_app : concrete_ident -> generic_value list fn = - fun f args -> print#concrete_ident f ^^ print#generic_values args - - method ty_tuple : int -> ty list fn = - fun _n -> - separate_map (comma ^^ break 1) (print#ty_at Ty_Tuple) - >> iblock parens - - method! ty : par_state -> ty fn = - fun ctx ty -> - match ty with - | TBool -> string "bool" - | TChar -> string "char" - | TInt kind -> print#ty_int kind - | TFloat kind -> print#ty_float kind - | TStr -> string "String" - | TArrow (inputs, output) -> - separate_map (string "->") (print#ty_at Ty_TArrow) - (inputs @ [ output ]) - |> parens - |> precede (string "arrow!") - | TRef { typ; mut; _ } -> - ampersand ^^ print#mutability mut ^^ print#ty_at Ty_TRef typ - | TParam i -> print#local_ident i - | TSlice { ty; _ } -> print#ty_at Ty_TSlice ty |> brackets - | TRawPointer _ -> string "raw_pointer!()" - | TArray { typ; length } -> - print#ty_at Ty_TArray_length typ - ^/^ semi - ^/^ print#expr_at Ty_TArray_length length - |> brackets - | TAssociatedType _ -> string "assoc_type!()" - | TOpaque _ -> string "opaque_type!()" - | TApp _ -> super#ty ctx ty - | TDyn _ -> empty (* TODO *) - - method! expr' : par_state -> expr' fn = - fun ctx e -> - let wrap_parens = - group - >> - match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces - in - match e with - | If { cond; then_; else_ } -> - let if_then = - (string "if" ^//^ nest 2 (print#expr_at Expr_If_cond cond)) - ^/^ string "then" - ^//^ (print#expr_at Expr_If_then then_ |> braces |> nest 1) - in - (match else_ with - | None -> if_then - | Some else_ -> - if_then ^^ break 1 ^^ string "else" ^^ space - ^^ (print#expr_at Expr_If_else else_ |> iblock braces)) - |> wrap_parens - | Match { scrutinee; arms } -> - let header = - string "match" ^^ space - ^^ (print#expr_at Expr_Match_scrutinee scrutinee - |> terminate space |> iblock Fn.id) - |> group - in - let arms = - separate_map hardline - (print#arm >> group >> nest 2 - >> precede (bar ^^ space) - >> group) - arms - in - header ^^ iblock braces arms - | Let { monadic; lhs; rhs; body } -> - (Option.map - ~f:(fun monad -> print#expr_monadic_let ~monad) - monadic - |> Option.value ~default:print#expr_let) - ~lhs ~rhs body - |> wrap_parens - | Literal l -> print#literal Expr l - | Block { e; safety_mode; _ } -> ( - let e = lbrace ^/^ nest 2 (print#expr ctx e) ^/^ rbrace in - match safety_mode with - | Safe -> e - | Unsafe _ -> !^"unsafe " ^^ e) - | Array l -> - separate_map comma (print#expr_at Expr_Array) l - |> group |> brackets - | LocalVar i -> print#local_ident i - | GlobalVar (`Concrete i) -> print#concrete_ident i - | GlobalVar (`Primitive p) -> print#primitive_ident p - | GlobalVar (`TupleCons 0) -> print#expr_construct_tuple [] - | GlobalVar - (`TupleType _ | `TupleField _ | `Projector _ | `TupleCons _) -> - print#assertion_failure "GlobalVar" - | Assign { lhs; e; _ } -> - group (print#lhs lhs) - ^^ space ^^ equals - ^/^ group (print#expr_at Expr_Assign e) - ^^ semi - | Loop _ -> string "todo loop;" - | Break _ -> string "todo break;" - | Return _ -> string "todo return;" - | Continue _ -> string "todo continue;" - | QuestionMark { e; _ } -> - print#expr_at Expr_QuestionMark e |> terminate qmark - | Borrow { kind; e; _ } -> - string (match kind with Mut _ -> "&mut " | _ -> "&") - ^^ print#expr_at Expr_Borrow e - | AddressOf _ -> string "todo address of;" - | Closure { params; body; _ } -> - separate_map comma (print#pat_at Expr_Closure_param) params - |> group |> enclose bar bar - |> terminate (print#expr_at Expr_Closure_body body |> group) - |> wrap_parens - | Ascription { e; typ } -> - print#expr_at Expr_Ascription_e e - ^^ string "as" - ^/^ print#ty_at Expr_Ascription_typ typ - |> wrap_parens - | MacroInvokation _ -> print#assertion_failure "MacroInvokation" - | EffectAction _ -> print#assertion_failure "EffectAction" - | Quote quote -> print#quote quote - | App _ | Construct _ -> super#expr' ctx e - - method quote { contents; _ } = - List.map - ~f:(function - | `Verbatim code -> string code - | `Expr e -> print#expr_at Expr_Quote e - | `Pat p -> print#pat_at Expr_Quote p - | `Typ p -> print#ty_at Expr_Quote p) - contents - |> concat - - method expr_monadic_let - : monad:supported_monads * F.monadic_binding -> - lhs:pat -> - rhs:expr -> - expr fn = - fun ~monad:_ ~lhs ~rhs body -> print#expr_let ~lhs ~rhs body - - method expr_let : lhs:pat -> rhs:expr -> expr fn = - fun ~lhs ~rhs body -> - string "let" - ^/^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs) - ^/^ equals - ^/^ iblock Fn.id (print#expr_at Expr_Let_rhs rhs) - ^^ semi - ^/^ (print#expr_at Expr_Let_body body |> group) - - method tuple_projection : size:int -> nth:int -> expr fn = - fun ~size:_ ~nth e -> - print#expr_at Expr_TupleProjection e - |> terminate (dot ^^ OCaml.int nth) - - method field_projection : concrete_ident -> expr fn = - fun i e -> - print#expr_at Expr_FieldProjection e - |> terminate (dot ^^ print#name_of_concrete_ident i) - - method expr_app : expr -> expr list -> generic_value list fn = - fun f args _generic_args -> - let args = - separate_map - (comma ^^ break 1) - (print#expr_at Expr_App_arg >> group) - args - in - let f = print#expr_at Expr_App_f f |> group in - f ^^ iblock parens args - - method doc_construct_tuple : document list fn = - separate (comma ^^ break 1) >> iblock parens - - method expr_construct_tuple : expr list fn = - List.map ~f:(print#expr_at Expr_ConstructTuple) - >> print#doc_construct_tuple +module AnnotatedString = struct + type t = string * Annotation.t list [@@deriving show, yojson, eq] + + let to_string = fst + + let to_spanned_strings ((s, annots) : t) : (Ast.span * string) list = + Annotation.split_with_string s annots + + let to_sourcemap : t -> Types.source_map = + snd >> List.filter_map ~f:Annotation.to_mapping >> Sourcemaps.Source_maps.mk + >> fun ({ + mappings; + sourceRoot; + sources; + sourcesContent; + names; + version; + file; + } : + Sourcemaps.Source_maps.t) -> + Types. + { mappings; sourceRoot; sources; sourcesContent; names; version; file } +end - method pat_construct_tuple : pat list fn = - List.map ~f:(print#pat_at Pat_ConstructTuple) - >> print#doc_construct_tuple +(** Helper class that brings imperative span *) +class span_helper : + object + method span_data : Annotation.t list + (** Get the span annotation accumulated while printing *) + + method with_span : span:span -> (unit -> document) -> document + (** Runs the printer `f` under a node of span `span` *) + + method current_span : span + (** Get the current span *) + end = + object (self) + val mutable current_span = Span.default + val mutable span_data : Annotation.t list = [] + method span_data = span_data + method current_span = current_span + + method with_span ~(span : span) (f : unit -> document) : document = + let prev_span = current_span in + current_span <- span; + let doc = f () |> self#spanned_doc |> custom in + current_span <- prev_span; + doc + + method private spanned_doc (doc : document) : custom = + let span = current_span in + object + method requirement : requirement = requirement doc + + method pretty : output -> state -> int -> bool -> unit = + fun o s i b -> + span_data <- ({ line = s.line; col = s.column }, span) :: span_data; + pretty o s i b doc + + method compact : output -> unit = fun o -> compact o doc + end + end - method global_ident_projector : global_ident fn = - function - | `Projector (`Concrete i) | `Concrete i -> print#concrete_ident i +module Make (F : Features.T) = struct + module AST = Ast.Make (F) + open Ast.Make (F) + module Gen = Generated_generic_printer_base.Make (F) + + type printer = (Annotation.t list, PPrint.document) Gen.object_type + type finalized_printer = (unit, string * Annotation.t list) Gen.object_type + + let finalize (new_printer : unit -> printer) : finalized_printer = + Gen.map (fun apply -> + let printer = new_printer () in + let doc = apply printer in + let buf = Buffer.create 0 in + PPrint.ToBuffer.pretty 1.0 80 buf doc; + (Buffer.contents buf, printer#span_data)) + + class virtual base = + object (self) + inherit Gen.base as super + inherit span_helper + val mutable current_namespace : (string * string list) option = None + + method private catch_exn (handle : string -> document) + (f : unit -> document) : document = + self#catch_exn' + (fun context kind -> + Diagnostics.pretty_print_context_kind context kind |> handle) + f + + method private catch_exn' + (handle : Diagnostics.Context.t -> Diagnostics.kind -> document) + (f : unit -> document) : document = + try f () + with Diagnostics.SpanFreeError.Exn (Data (context, kind)) -> + handle context kind + + (** {2:specialize-expr Printer settings} *) + + method virtual printer_name : string + (** Mark a path as unreachable *) + + val concrete_ident_view : (module Concrete_ident.VIEW_API) = + (module Concrete_ident.DefaultViewAPI) + (** The concrete ident view to be used *) + + (** {2:specialize-expr Utility functions} *) + + method assertion_failure : 'any. string -> 'any = + fun details -> + let span = Span.to_thir self#current_span in + let kind = Types.AssertionFailure { details } in + let ctx = Diagnostics.Context.GenericPrinter self#printer_name in + Diagnostics.SpanFreeError.raise ~span ctx kind + (** An assertion failed *) + + method unreachable : 'any. unit -> 'any = + self#assertion_failure "Unreachable" + (** Mark a path as unreachable *) + + method local_ident (id : local_ident) : document = + let module View = (val concrete_ident_view) in + View.local_ident + (match String.chop_prefix ~prefix:"impl " id.name with + | Some _ -> + let name = "impl_" ^ Int.to_string ([%hash: string] id.name) in + { id with name } + | _ -> id) + |> string + (** {2:specialize-expr Printers for special types} *) + + method concrete_ident ~local (id : Concrete_ident.view) : document = + string + (if local then id.definition + else + String.concat ~sep:self#module_path_separator + (id.crate :: (id.path @ [ id.definition ]))) + (** [concrete_ident ~local id] prints a name without path if + [local] is true, otherwise it prints the full path, separated by + `module_path_separator`. *) + + method quote (quote : quote) : document = + List.map + ~f:(function + | `Verbatim code -> string code + | `Expr e -> self#print_expr AstPosition_Quote e + | `Pat p -> self#print_pat AstPosition_Quote p + | `Typ p -> self#print_ty AstPosition_Quote p) + quote.contents + |> concat + + (** {2:specialize-expr Specialized printers for [expr]} *) + + method virtual expr'_App_constant + : super:expr -> + constant:concrete_ident lazy_doc -> + generics:generic_value lazy_doc list -> + document + (** [expr'_App_constant ~super ~constant ~generics] prints the + constant [e] with generics [generics]. [super] is the + unspecialized [expr]. *) + + method virtual expr'_App_application + : super:expr -> + f:expr lazy_doc -> + args:expr lazy_doc list -> + generics:generic_value lazy_doc list -> + document + (** [expr'_App_application ~super ~f ~args ~generics] prints the + function application [e<...generics>(...args)]. [super] is the + unspecialized [expr]. *) + + method virtual expr'_App_tuple_projection + : super:expr -> size:int -> nth:int -> e:expr lazy_doc -> document + (** [expr'_App_tuple_projection ~super ~size ~nth ~e] prints + the projection of the [nth] component of the tuple [e] of + size [size]. [super] is the unspecialized [expr]. *) + + method virtual expr'_App_field_projection + : super:expr -> + field:concrete_ident lazy_doc -> + e:expr lazy_doc -> + document + (** [expr'_App_field_projection ~super ~field ~e] prints the + projection of the field [field] in the expression [e]. [super] + is the unspecialized [expr]. *) + + method virtual expr'_Construct_inductive + : super:expr -> + constructor:concrete_ident lazy_doc -> + is_record:bool -> + is_struct:bool -> + fields:(global_ident lazy_doc * expr lazy_doc) list -> + base:(expr lazy_doc * F.construct_base) lazy_doc option -> + document + (** [expr'_Construct_inductive ~super ~is_record ~is_struct + ~constructor ~base ~fields] prints the construction of an + inductive with base [base] and fields [fields]. [super] is the + unspecialized [expr]. TODO doc is_record is_struct *) + + method virtual expr'_Construct_tuple + : super:expr -> components:expr lazy_doc list -> document + + method virtual expr'_GlobalVar_concrete + : super:expr -> concrete_ident lazy_doc -> document + + method virtual expr'_GlobalVar_primitive + : super:expr -> primitive_ident -> document + + (** {2:specialize-pat Specialized printers for [pat]} *) + + method virtual pat'_PConstruct_inductive + : super:pat -> + constructor:concrete_ident lazy_doc -> + is_record:bool -> + is_struct:bool -> + fields:(global_ident lazy_doc * pat lazy_doc) list -> + document + + method virtual pat'_PConstruct_tuple + : super:pat -> components:pat lazy_doc list -> document + + (** {2:specialize-lhs Specialized printers for [lhs]} *) + + method virtual lhs_LhsFieldAccessor_field + : e:lhs lazy_doc -> + typ:ty lazy_doc -> + field:concrete_ident lazy_doc -> + witness:F.nontrivial_lhs -> + document + + method virtual lhs_LhsFieldAccessor_tuple + : e:lhs lazy_doc -> + typ:ty lazy_doc -> + nth:int -> + size:int -> + witness:F.nontrivial_lhs -> + document + + (** {2:specialize-ty Specialized printers for [ty]} *) + + method virtual ty_TApp_tuple : types:ty list -> document + (** [ty_TApp_tuple ~types] prints a tuple type with + compounds types [types]. *) + + method virtual ty_TApp_application + : typ:concrete_ident lazy_doc -> + generics:generic_value lazy_doc list -> + document + (** [ty_TApp_application ~typ ~generics] prints the type + [typ<...generics>]. *) + + (** {2:specialize-ty Specialized printers for [item]} *) + + method virtual item'_Type_struct + : super:item -> + name:concrete_ident lazy_doc -> + generics:generics lazy_doc -> + tuple_struct:bool -> + arguments: + (concrete_ident lazy_doc * ty lazy_doc * attr list lazy_doc) list -> + document + (** [item'_Type_struct ~super ~name ~generics ~tuple_struct ~arguments] prints the struct definition [struct name arguments]. `tuple_struct` says whether we are dealing with a tuple struct + (e.g. [struct Foo(T1, T2)]) or a named struct + (e.g. [struct Foo {field: T1, other: T2}])? *) + + method virtual item'_Type_enum + : super:item -> + name:concrete_ident lazy_doc -> + generics:generics lazy_doc -> + variants:variant lazy_doc list -> + document + (** [item'_Type_enum ~super ~name ~generics ~variants] prints + the enum type [enum name { ... }]. *) + + method virtual item'_Enum_Variant + : name:concrete_ident lazy_doc -> + arguments: + (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list -> + is_record:bool -> + attrs:attrs lazy_doc -> + document + (** [item'_Enum_Variant] prints a variant of an enum. *) + + (** {2:common-nodes Printers for common nodes} *) + + method virtual common_array : document list -> document + (** [common_array values] is a default for printing array-like nodes: array patterns, array expressions. *) + + (** {2:defaults Default printers} **) + + method module_path_separator = "::" + (** [module_path_separator] is the default separator for + paths. `::` by default *) + + method pat'_PArray ~super:_ ~args = + List.map ~f:(fun arg -> arg#p) args |> self#common_array + + method expr'_Array ~super:_ args = + List.map ~f:(fun arg -> arg#p) args |> self#common_array + + method pat'_POr ~super:_ ~subpats = + List.map ~f:(fun subpat -> subpat#p) subpats + |> separate (break 1 ^^ char '|' ^^ space) + + (**/**) + (* This section is about defining or overriding + `_do_not_override_` methods. This is internal logic, whence this + is excluded from documentation (with the nice and user friendly + `(**/**)` ocamldoc syntax) *) + + method _do_not_override_lhs_LhsFieldAccessor ~e ~typ ~field ~witness = + let field = + match field with + | `Projector field -> field | _ -> - print#assertion_failure "global_ident_projector: not a projector" - - method doc_construct_inductive - : is_record:bool -> - is_struct:bool -> - constructor:concrete_ident -> - base:document option -> - (global_ident * document) list fn = - fun ~is_record ~is_struct:_ ~constructor ~base:_ args -> - if is_record then - print#concrete_ident constructor - ^^ space - ^^ iblock parens - (separate_map (break 0) - (fun (field, body) -> - (print#global_ident_projector field - |> terminate comma |> group) - ^^ colon ^^ space ^^ iblock Fn.id body) - args) - else - print#concrete_ident constructor - ^^ space - ^^ iblock parens (separate_map (break 0) snd args) - - method expr_construct_inductive - : is_record:bool -> - is_struct:bool -> - constructor:concrete_ident -> - base:(expr * F.construct_base) option -> - (global_ident * expr) list fn = - fun ~is_record ~is_struct ~constructor ~base -> - let base = - Option.map - ~f:(fst >> print#expr_at Expr_ConcreteInductive_base) - base + self#assertion_failure + @@ "LhsFieldAccessor: field not a [`Projector] " + in + match field with + | `TupleField (nth, size) -> + self#lhs_LhsFieldAccessor_tuple ~e ~typ ~nth ~size ~witness + | `Concrete field -> + let field : concrete_ident lazy_doc = + self#_do_not_override_lazy_of_concrete_ident + AstPos_lhs_LhsFieldAccessor_field field in - List.map ~f:(print#expr_at Expr_ConcreteInductive_field |> map_snd) - >> print#doc_construct_inductive ~is_record ~is_struct ~constructor - ~base - - method attr : attr fn = fun _ -> empty - - method! pat' : par_state -> pat' fn = - fun ctx -> - let wrap_parens = - group - >> - match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces + self#lhs_LhsFieldAccessor_field ~e ~typ ~field ~witness + + method _do_not_override_expr'_App ~super ~f ~args ~generic_args + ~bounds_impls ~trait = + let _ = (super, f, args, generic_args, bounds_impls, trait) in + match f#v with + | { e = GlobalVar i; _ } -> ( + let expect_one_arg where = + match args with + | [ arg ] -> arg + | _ -> self#assertion_failure @@ "Expected one arg at " ^ where in - function - | PWild -> underscore - | PAscription { typ; typ_span; pat } -> - print#pat_ascription ~typ ~typ_span pat |> wrap_parens - | PBinding { mut; mode; var; typ = _; subpat } -> ( - let p = - (match mode with ByRef _ -> string "&" | _ -> empty) - ^^ (match mut with Mutable _ -> string "mut " | _ -> empty) - ^^ print#local_ident var + match i with + | `Concrete _ | `Primitive _ -> ( + match (args, i) with + | [], `Concrete i -> + let constant = + self#_do_not_override_lazy_of_concrete_ident + AstPos_expr'_App_f i + in + self#expr'_App_constant ~super ~constant + ~generics:generic_args + | [], _ -> self#assertion_failure "Primitive app of arity 0" + | _ -> + self#expr'_App_application ~super ~f ~args + ~generics:generic_args) + | `TupleType _ | `TupleCons _ | `TupleField _ -> + self#assertion_failure "App: unexpected tuple" + | `Projector (`TupleField (nth, size)) -> + let e = expect_one_arg "projector tuple field" in + self#expr'_App_tuple_projection ~super ~size ~nth ~e + | `Projector (`Concrete field) -> + let e = expect_one_arg "projector concrete" in + let field = + self#_do_not_override_lazy_of_concrete_ident + AstPos_expr'_App_f field in - match subpat with - | Some (subpat, _) -> - p ^^ space ^^ at ^^ space - ^^ print#pat_at Pat_PBinding_subpat subpat - |> wrap_parens - | None -> p) - | PArray { args } -> - separate_map (break 0) - (print#pat_at Pat_PArray >> terminate comma >> group) - args - |> iblock brackets - | PDeref { subpat; _ } -> - ampersand ^^ print#pat_at Pat_PDeref subpat - | (PConstruct _ | PConstant _) as pat -> super#pat' ctx pat - | POr { subpats } -> - separate_map (bar ^^ break 1) (print#pat_at Pat_Or) subpats - - method pat_ascription : typ:ty -> typ_span:span -> pat fn = - fun ~typ ~typ_span pat -> - print#pat_at Pat_Ascription_pat pat - ^^ colon - ^^ print#with_span ~span:typ_span (fun () -> - print#ty_at Pat_Ascription_typ typ) - - method expr_unwrapped : par_state -> expr fn = - fun ctx { e; _ } -> print#expr' ctx e - - method param : param fn = - fun { pat; typ; typ_span; attrs } -> - let typ = - match typ_span with - | Some span -> - print#with_span ~span (fun _ -> print#ty_at Param_typ typ) - | None -> print#ty_at Param_typ typ + self#expr'_App_field_projection ~super ~field ~e) + | _ -> self#assertion_failure "Primitive app of arity 0" + + method _do_not_override_expr'_Construct ~super ~constructor ~is_record + ~is_struct ~fields ~base = + match constructor with + | `Concrete constructor -> + let constructor = + self#_do_not_override_lazy_of_concrete_ident + AstPos_expr'_Construct_constructor constructor in - print#attrs attrs ^^ print#pat_at Param_pat pat ^^ space ^^ colon - ^^ space ^^ typ - - method item' : item' fn = - function - | Fn { name; generics; body; params; safety } -> - let params = - iblock parens - (separate_map (comma ^^ break 1) print#param params) - in - let generics = print#generic_params generics.params in - let safety = - optional Base.Fn.id - (match safety with - | Safe -> None - | Unsafe _ -> Some !^"unsafe ") - in - safety ^^ !^"fn" ^^ space ^^ print#concrete_ident name ^^ generics - ^^ params - ^^ iblock braces (print#expr_at Item_Fn_body body) - | Quote quote -> print#quote quote - | _ -> string "item not implemented" - - method generic_param' : generic_param fn = - fun { ident; attrs; kind; _ } -> - let suffix = - match kind with - | GPLifetime _ -> space ^^ colon ^^ space ^^ string "'unk" - | GPType -> empty - | GPConst { typ } -> - space ^^ colon ^^ space - ^^ print#ty_at GenericParam_GPConst typ + let fields = + List.map + ~f:(fun field -> + let name, expr = field#v in + ( self#_do_not_override_lazy_of_global_ident + Generated_generic_printer_base + .AstPos_pat'_PConstruct_constructor name, + expr )) + fields in - let prefix = - match kind with - | GPConst _ -> string "const" ^^ space - | _ -> empty + self#expr'_Construct_inductive ~super ~constructor ~is_record + ~is_struct ~fields ~base + | `TupleCons _ -> + let components = List.map ~f:(fun field -> snd field#v) fields in + self#expr'_Construct_tuple ~super ~components + | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ -> + self#assertion_failure "Construct unexpected constructors" + + method _do_not_override_expr'_GlobalVar ~super global_ident = + match global_ident with + | `Concrete concrete -> + let concrete = + self#_do_not_override_lazy_of_concrete_ident + AstPos_expr'_GlobalVar_x0 concrete in - let ident = - let name = - if String.(ident.name = "_") then "Anonymous" else ident.name - in - { ident with name } + self#expr'_GlobalVar_concrete ~super concrete + | `Primitive primitive -> + self#expr'_GlobalVar_primitive ~super primitive + | `TupleCons 0 -> + self#_do_not_override_expr'_Construct ~super + ~constructor:global_ident ~is_record:false ~is_struct:false + ~fields:[] ~base:None + | _ -> + self#assertion_failure + @@ "GlobalVar: expected a concrete or primitive global ident, got:" + ^ [%show: global_ident] global_ident + + method _do_not_override_pat'_PConstruct ~super ~constructor ~is_record + ~is_struct ~fields = + match constructor with + | `Concrete constructor -> + let constructor = + self#_do_not_override_lazy_of_concrete_ident + AstPos_pat'_PConstruct_constructor constructor in - prefix ^^ print#attrs attrs ^^ print#local_ident ident ^^ suffix - - method generic_params : generic_param list fn = - separate_map comma print#generic_param >> group >> angles - - (*Option.map ~f:(...) guard |> Option.value ~default:empty*) - method arm' : arm' fn = - fun { arm_pat; body; guard } -> - let pat = print#pat_at Arm_pat arm_pat |> group in - let body = print#expr_at Arm_body body in - let guard = - Option.map - ~f:(fun { guard = IfLet { lhs; rhs; _ }; _ } -> - string " if let " ^^ print#pat_at Arm_pat lhs ^^ string " = " - ^^ print#expr_at Arm_body rhs) - guard - |> Option.value ~default:empty + let fields = + List.map + ~f:(fun field -> + let { field; pat } = field#v in + let field = + self#_do_not_override_lazy_of_global_ident + Generated_generic_printer_base + .AstPos_pat'_PConstruct_fields field + in + let pat = + self#_do_not_override_lazy_of_pat + Generated_generic_printer_base + .AstPos_pat'_PConstruct_fields pat + in + (field, pat)) + fields in - pat ^^ guard ^^ string " => " ^^ body ^^ comma - end - end - - include Class - - include Api (struct - type aux_info = unit + self#pat'_PConstruct_inductive ~super ~constructor ~is_record + ~is_struct ~fields + | `TupleCons _ -> + let components = + List.map + ~f:(fun field -> + self#_do_not_override_lazy_of_pat AstPos_field_pat__pat + field#v.pat) + fields + in + self#pat'_PConstruct_tuple ~super ~components + | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ -> + self#assertion_failure "Construct unexpected constructors" - let new_print () = (new Class.print :> print_object) - end) + method _do_not_override_ty_TApp ~ident ~args = + match ident with + | `Concrete ident -> + let typ = + self#_do_not_override_lazy_of_concrete_ident AstPos_ty_TApp_args + ident + in + self#ty_TApp_application ~typ ~generics:args |> group + | `Primitive _ | `TupleCons _ | `TupleField _ | `Projector _ -> + self#assertion_failure "TApp not concrete" + | `TupleType size -> + let types = + List.filter_map + ~f:(fun garg -> + match garg#v with GType t -> Some t | _ -> None) + args + in + if [%equal: int] (List.length args) size |> not then + self#assertion_failure "malformed [ty.TApp] tuple"; + self#ty_TApp_tuple ~types + + method _do_not_override_item'_Type ~super ~name ~generics ~variants + ~is_struct = + let generics, _, _ = generics#v in + if is_struct then + match variants with + | [ variant ] -> + let variant_arguments = + List.map + ~f:(fun (ident, typ, attrs) -> + ( self#_do_not_override_lazy_of_concrete_ident + AstPos_variant__arguments ident, + self#_do_not_override_lazy_of_ty AstPos_variant__arguments + typ, + self#_do_not_override_lazy_of_attrs AstPos_variant__attrs + attrs )) + variant#v.arguments + in + self#item'_Type_struct ~super ~name ~generics + ~tuple_struct:(not variant#v.is_record) + ~arguments:variant_arguments + | _ -> self#unreachable () + else self#item'_Type_enum ~super ~name ~generics ~variants + + method _do_not_override_variant + : name:concrete_ident lazy_doc -> + arguments: + (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list -> + is_record:bool -> + attrs:attrs lazy_doc -> + document = + self#item'_Enum_Variant + + method _do_not_override_lazy_of_local_ident ast_position + (id : local_ident) = + lazy_doc (fun (id : local_ident) -> self#local_ident id) ast_position id + + method _do_not_override_lazy_of_concrete_ident ast_position + (id : concrete_ident) : concrete_ident lazy_doc = + lazy_doc + (fun (id : concrete_ident) -> + let module View = (val concrete_ident_view) in + let id = View.to_view id in + let ns_crate, ns_path = + Option.value ~default:("", []) current_namespace + in + let local = + String.(ns_crate = id.crate) && [%eq: string list] ns_path id.path + in + self#concrete_ident ~local id) + ast_position id + + method _do_not_override_lazy_of_global_ident ast_position + (id : global_ident) : global_ident lazy_doc = + lazy_doc + (fun (id : global_ident) -> + match id with + | `Concrete cid -> + (self#_do_not_override_lazy_of_concrete_ident ast_position cid) + #p + | _ -> + self#assertion_failure + ("_do_not_override_lazy_of_global_ident: expected [`Concrete \ + _] got [" + ^ [%show: global_ident] id + ^ "]")) + ast_position id + + method _do_not_override_lazy_of_quote ast_position (value : quote) + : quote lazy_doc = + lazy_doc (fun (value : quote) -> self#quote value) ast_position value + + method! _do_not_override_lazy_of_item ast_position (value : item) + : item lazy_doc = + let module View = (val concrete_ident_view) in + current_namespace <- View.to_namespace value.ident |> Option.some; + super#_do_not_override_lazy_of_item ast_position value + + method _do_not_override_lazy_of_generics ast_position (value : generics) + : (generics lazy_doc + * generic_param lazy_doc list + * generic_constraint lazy_doc list) + lazy_doc = + let params = + List.map + ~f:(fun x -> + self#_do_not_override_lazy_of_generic_param + AstPos_generics__params x) + value.params + in + let constraints = + List.map + ~f:(fun x -> + self#_do_not_override_lazy_of_generic_constraint + AstPos_generics__constraints x) + value.constraints + in + lazy_doc + (fun (lazy_doc, _, _) -> lazy_doc#p) + ast_position + ( lazy_doc + (fun (value : generics) -> + self#wrap_generics ast_position value + (self#generics ~params ~constraints)) + ast_position value, + params, + constraints ) + + (**/**) + end end diff --git a/engine/lib/generic_printer/generic_printer_template.generate.js b/engine/lib/generic_printer/generic_printer_template.generate.js new file mode 100755 index 000000000..66ec63b8c --- /dev/null +++ b/engine/lib/generic_printer/generic_printer_template.generate.js @@ -0,0 +1,49 @@ +#!/usr/bin/env node + +// This script regenerates `generic_printer_template.ml` + +const {readFileSync, writeFileSync} = require('fs'); +const {execSync} = require('child_process'); + +const GENERIC_PRINTER_DIR = `lib/generic_printer`; +const GENERIC_PRINTER = `${GENERIC_PRINTER_DIR}/generic_printer.ml`; +const TEMPLATE = `${GENERIC_PRINTER_DIR}/generic_printer_template.ml`; + +// Utility function to format an OCaml module +let fmt = path => execSync(`ocamlformat -i ${path}`); + +// Go to the root of the engine +require('process').chdir(`${execSync('git rev-parse --show-toplevel').toString().trim()}/engine`); + + +// Prints the signature of module `Generic_printer` (using `ocaml-print-intf`) +let mli = execSync(`dune exec -- ocaml-print-intf ${GENERIC_PRINTER}`).toString().split('class virtual base')[2]; + +writeFileSync('/tmp/exported.mli', mli); + +// Parses all +let virtual_methods = [...mli.matchAll(/^( +)method (private )?virtual +(?.*) +:(?.*(\n \1.*)*)/gm)]; + +let output = []; +for(let v of virtual_methods) { + let {name, sig} = v.groups; + let out = sig.trim().split('->').slice(-1)[0].trim().split('.').slice(-1)[0]; + let args = sig.trim().split('->').map((s, i) => { + let chunks = s.trim().split(':').reverse(); + if(chunks.length > 2 || chunks.length == 0) { + throw "Chunks: bad length"; + } + let [type, name] = chunks; + name = name ? '~'+name+':_' : '_x'+(i + 1); + return {type, name}; + }).map(n => n.name).slice(0, -1).join(' '); + + output.push(`method ${name} ${args} = default_${out}_for "${name}"`); +} + +{ + let [before, _, after] = readFileSync(TEMPLATE).toString().split(/(?=\(\* (?:BEGIN|END) GENERATED \*\))/); + writeFileSync(TEMPLATE, before + '\n(* BEGIN GENERATED *)\n' + output.join('\n') + '\n' + after); +} + +fmt(TEMPLATE); diff --git a/engine/lib/generic_printer/generic_printer_template.ml b/engine/lib/generic_printer/generic_printer_template.ml new file mode 100644 index 000000000..9105966aa --- /dev/null +++ b/engine/lib/generic_printer/generic_printer_template.ml @@ -0,0 +1,370 @@ +open! Prelude +open! Ast +open! PPrint + +module Make + (F : Features.T) (Default : sig + val default : string -> string + end) = +struct + module AST = Ast.Make (F) + open Ast.Make (F) + module Base = Generic_printer.Make (F) + open PPrint + + let default_string_for s = "TODO: please implement the method `" ^ s ^ "`" + let default_document_for = default_string_for >> string + + class printer = + object + inherit Base.base + + (* BEGIN GENERATED *) + method arm ~arm:_ ~span:_ = default_document_for "arm" + + method arm' ~super:_ ~arm_pat:_ ~body:_ ~guard:_ = + default_document_for "arm'" + + method attrs _x1 = default_document_for "attrs" + + method binding_mode_ByRef _x1 _x2 = + default_document_for "binding_mode_ByRef" + + method binding_mode_ByValue = default_document_for "binding_mode_ByValue" + method borrow_kind_Mut _x1 = default_document_for "borrow_kind_Mut" + method borrow_kind_Shared = default_document_for "borrow_kind_Shared" + method borrow_kind_Unique = default_document_for "borrow_kind_Unique" + method common_array _x1 = default_document_for "common_array" + + method dyn_trait_goal ~trait:_ ~non_self_args:_ = + default_document_for "dyn_trait_goal" + + method error_expr _x1 = default_document_for "error_expr" + method error_item _x1 = default_document_for "error_item" + method error_pat _x1 = default_document_for "error_pat" + method expr ~e:_ ~span:_ ~typ:_ = default_document_for "expr" + + method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness:_ = + default_document_for "expr'_AddressOf" + + method expr'_App_application ~super:_ ~f:_ ~args:_ ~generics:_ = + default_document_for "expr'_App_application" + + method expr'_App_constant ~super:_ ~constant:_ ~generics:_ = + default_document_for "expr'_App_constant" + + method expr'_App_field_projection ~super:_ ~field:_ ~e:_ = + default_document_for "expr'_App_field_projection" + + method expr'_App_tuple_projection ~super:_ ~size:_ ~nth:_ ~e:_ = + default_document_for "expr'_App_tuple_projection" + + method expr'_Ascription ~super:_ ~e:_ ~typ:_ = + default_document_for "expr'_Ascription" + + method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness:_ = + default_document_for "expr'_Assign" + + method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness:_ = + default_document_for "expr'_Block" + + method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness:_ = + default_document_for "expr'_Borrow" + + method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness:_ = + default_document_for "expr'_Break" + + method expr'_Closure ~super:_ ~params:_ ~body:_ ~captures:_ = + default_document_for "expr'_Closure" + + method expr'_Construct_inductive ~super:_ ~constructor:_ ~is_record:_ + ~is_struct:_ ~fields:_ ~base:_ = + default_document_for "expr'_Construct_inductive" + + method expr'_Construct_tuple ~super:_ ~components:_ = + default_document_for "expr'_Construct_tuple" + + method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness:_ = + default_document_for "expr'_Continue" + + method expr'_EffectAction ~super:_ ~action:_ ~argument:_ = + default_document_for "expr'_EffectAction" + + method expr'_GlobalVar_concrete ~super:_ _x2 = + default_document_for "expr'_GlobalVar_concrete" + + method expr'_GlobalVar_primitive ~super:_ _x2 = + default_document_for "expr'_GlobalVar_primitive" + + method expr'_If ~super:_ ~cond:_ ~then_:_ ~else_:_ = + default_document_for "expr'_If" + + method expr'_Let ~super:_ ~monadic:_ ~lhs:_ ~rhs:_ ~body:_ = + default_document_for "expr'_Let" + + method expr'_Literal ~super:_ _x2 = default_document_for "expr'_Literal" + method expr'_LocalVar ~super:_ _x2 = default_document_for "expr'_LocalVar" + + method expr'_Loop ~super:_ ~body:_ ~kind:_ ~state:_ ~control_flow:_ + ~label:_ ~witness:_ = + default_document_for "expr'_Loop" + + method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ = + default_document_for "expr'_MacroInvokation" + + method expr'_Match ~super:_ ~scrutinee:_ ~arms:_ = + default_document_for "expr'_Match" + + method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness:_ = + default_document_for "expr'_QuestionMark" + + method expr'_Quote ~super:_ _x2 = default_document_for "expr'_Quote" + + method expr'_Return ~super:_ ~e:_ ~witness:_ = + default_document_for "expr'_Return" + + method cf_kind_BreakOrReturn = + default_document_for "cf_kind_BreakOrReturn" + + method cf_kind_BreakOnly = default_document_for "cf_kind_BreakOnly" + method field_pat ~field:_ ~pat:_ = default_document_for "field_pat" + + method generic_constraint_GCLifetime _x1 _x2 = + default_document_for "generic_constraint_GCLifetime" + + method generic_constraint_GCProjection _x1 = + default_document_for "generic_constraint_GCProjection" + + method generic_constraint_GCType _x1 = + default_document_for "generic_constraint_GCType" + + method generic_param ~ident:_ ~span:_ ~attrs:_ ~kind:_ = + default_document_for "generic_param" + + method generic_param_kind_GPConst ~typ:_ = + default_document_for "generic_param_kind_GPConst" + + method generic_param_kind_GPLifetime ~witness:_ = + default_document_for "generic_param_kind_GPLifetime" + + method generic_param_kind_GPType = + default_document_for "generic_param_kind_GPType" + + method generic_value_GConst _x1 = + default_document_for "generic_value_GConst" + + method generic_value_GLifetime ~lt:_ ~witness:_ = + default_document_for "generic_value_GLifetime" + + method generic_value_GType _x1 = + default_document_for "generic_value_GType" + + method generics ~params:_ ~constraints:_ = default_document_for "generics" + method guard ~guard:_ ~span:_ = default_document_for "guard" + + method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness:_ = + default_document_for "guard'_IfLet" + + method impl_expr ~kind:_ ~goal:_ = default_document_for "impl_expr" + + method impl_expr_kind_Builtin _x1 = + default_document_for "impl_expr_kind_Builtin" + + method impl_expr_kind_Concrete _x1 = + default_document_for "impl_expr_kind_Concrete" + + method impl_expr_kind_Dyn = default_document_for "impl_expr_kind_Dyn" + + method impl_expr_kind_ImplApp ~impl:_ ~args:_ = + default_document_for "impl_expr_kind_ImplApp" + + method impl_expr_kind_LocalBound ~id:_ = + default_document_for "impl_expr_kind_LocalBound" + + method impl_expr_kind_Parent ~impl:_ ~ident:_ = + default_document_for "impl_expr_kind_Parent" + + method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ = + default_document_for "impl_expr_kind_Projection" + + method impl_expr_kind_Self = default_document_for "impl_expr_kind_Self" + method impl_ident ~goal:_ ~name:_ = default_document_for "impl_ident" + + method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v:_ ~ii_ident:_ ~ii_attrs:_ + = + default_document_for "impl_item" + + method impl_item'_IIFn ~body:_ ~params:_ = + default_document_for "impl_item'_IIFn" + + method impl_item'_IIType ~typ:_ ~parent_bounds:_ = + default_document_for "impl_item'_IIType" + + method item ~v:_ ~span:_ ~ident:_ ~attrs:_ = default_document_for "item" + + method item'_Alias ~super:_ ~name:_ ~item:_ = + default_document_for "item'_Alias" + + method item'_Enum_Variant ~name:_ ~arguments:_ ~is_record:_ ~attrs:_ = + default_document_for "item'_Enum_Variant" + + method item'_Fn ~super:_ ~name:_ ~generics:_ ~body:_ ~params:_ ~safety:_ = + default_document_for "item'_Fn" + + method item'_HaxError ~super:_ _x2 = default_document_for "item'_HaxError" + + method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_ + ~witness:_ = + default_document_for "item'_IMacroInvokation" + + method item'_Impl ~super:_ ~generics:_ ~self_ty:_ ~of_trait:_ ~items:_ + ~parent_bounds:_ ~safety:_ = + default_document_for "item'_Impl" + + method item'_NotImplementedYet = + default_document_for "item'_NotImplementedYet" + + method item'_Quote ~super:_ ~quote:_ ~origin:_ = + default_document_for "item'_Quote" + + method item'_Trait ~super:_ ~name:_ ~generics:_ ~items:_ ~safety:_ = + default_document_for "item'_Trait" + + method item'_TyAlias ~super:_ ~name:_ ~generics:_ ~ty:_ = + default_document_for "item'_TyAlias" + + method item'_Type_enum ~super:_ ~name:_ ~generics:_ ~variants:_ = + default_document_for "item'_Type_enum" + + method item'_Type_struct ~super:_ ~name:_ ~generics:_ ~tuple_struct:_ + ~arguments:_ = + default_document_for "item'_Type_struct" + + method item'_Use ~super:_ ~path:_ ~is_external:_ ~rename:_ = + default_document_for "item'_Use" + + method lhs_LhsArbitraryExpr ~e:_ ~witness:_ = + default_document_for "lhs_LhsArbitraryExpr" + + method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness:_ = + default_document_for "lhs_LhsArrayAccessor" + + method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness:_ = + default_document_for "lhs_LhsFieldAccessor_field" + + method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness:_ = + default_document_for "lhs_LhsFieldAccessor_tuple" + + method lhs_LhsLocalVar ~var:_ ~typ:_ = + default_document_for "lhs_LhsLocalVar" + + method literal_Bool _x1 = default_document_for "literal_Bool" + method literal_Char _x1 = default_document_for "literal_Char" + + method literal_Float ~value:_ ~negative:_ ~kind:_ = + default_document_for "literal_Float" + + method literal_Int ~value:_ ~negative:_ ~kind:_ = + default_document_for "literal_Int" + + method literal_String _x1 = default_document_for "literal_String" + + method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_ + ~witness:_ = + default_document_for "loop_kind_ForIndexLoop" + + method loop_kind_ForLoop ~pat:_ ~it:_ ~witness:_ = + default_document_for "loop_kind_ForLoop" + + method loop_kind_UnconditionalLoop = + default_document_for "loop_kind_UnconditionalLoop" + + method loop_kind_WhileLoop ~condition:_ ~witness:_ = + default_document_for "loop_kind_WhileLoop" + + method loop_state ~init:_ ~bpat:_ ~witness:_ = + default_document_for "loop_state" + + method modul _x1 = default_document_for "modul" + + method param ~pat:_ ~typ:_ ~typ_span:_ ~attrs:_ = + default_document_for "param" + + method pat ~p:_ ~span:_ ~typ:_ = default_document_for "pat" + + method pat'_PAscription ~super:_ ~typ:_ ~typ_span:_ ~pat:_ = + default_document_for "pat'_PAscription" + + method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var:_ ~typ:_ ~subpat:_ = + default_document_for "pat'_PBinding" + + method pat'_PConstant ~super:_ ~lit:_ = + default_document_for "pat'_PConstant" + + method pat'_PConstruct_inductive ~super:_ ~constructor:_ ~is_record:_ + ~is_struct:_ ~fields:_ = + default_document_for "pat'_PConstruct_inductive" + + method pat'_PConstruct_tuple ~super:_ ~components:_ = + default_document_for "pat'_PConstruct_tuple" + + method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ = + default_document_for "pat'_PDeref" + + method pat'_PWild = default_document_for "pat'_PWild" + method printer_name = default_string_for "printer_name" + + method projection_predicate ~impl:_ ~assoc_item:_ ~typ:_ = + default_document_for "projection_predicate" + + method safety_kind_Safe = default_document_for "safety_kind_Safe" + method safety_kind_Unsafe _x1 = default_document_for "safety_kind_Unsafe" + + method supported_monads_MException _x1 = + default_document_for "supported_monads_MException" + + method supported_monads_MOption = + default_document_for "supported_monads_MOption" + + method supported_monads_MResult _x1 = + default_document_for "supported_monads_MResult" + + method trait_goal ~trait:_ ~args:_ = default_document_for "trait_goal" + + method trait_item ~ti_span:_ ~ti_generics:_ ~ti_v:_ ~ti_ident:_ + ~ti_attrs:_ = + default_document_for "trait_item" + + method trait_item'_TIDefault ~params:_ ~body:_ ~witness:_ = + default_document_for "trait_item'_TIDefault" + + method trait_item'_TIFn _x1 = default_document_for "trait_item'_TIFn" + method trait_item'_TIType _x1 = default_document_for "trait_item'_TIType" + + method ty_TApp_application ~typ:_ ~generics:_ = + default_document_for "ty_TApp_application" + + method ty_TApp_tuple ~types:_ = default_document_for "ty_TApp_tuple" + method ty_TArray ~typ:_ ~length:_ = default_document_for "ty_TArray" + method ty_TArrow _x1 _x2 = default_document_for "ty_TArrow" + + method ty_TAssociatedType ~impl:_ ~item:_ = + default_document_for "ty_TAssociatedType" + + method ty_TBool = default_document_for "ty_TBool" + method ty_TChar = default_document_for "ty_TChar" + method ty_TDyn ~witness:_ ~goals:_ = default_document_for "ty_TDyn" + method ty_TFloat _x1 = default_document_for "ty_TFloat" + method ty_TInt _x1 = default_document_for "ty_TInt" + method ty_TOpaque _x1 = default_document_for "ty_TOpaque" + method ty_TParam _x1 = default_document_for "ty_TParam" + method ty_TRawPointer ~witness:_ = default_document_for "ty_TRawPointer" + + method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ = + default_document_for "ty_TRef" + + method ty_TSlice ~witness:_ ~ty:_ = default_document_for "ty_TSlice" + method ty_TStr = default_document_for "ty_TStr" + (* END GENERATED *) + end +end diff --git a/engine/lib/hax_io.ml b/engine/lib/hax_io.ml index af50a0615..0038375be 100644 --- a/engine/lib/hax_io.ml +++ b/engine/lib/hax_io.ml @@ -1,3 +1,7 @@ +(** +This module helps communicating with `cargo-hax`. +*) + open Prelude module type S = sig diff --git a/engine/lib/import_thir.ml b/engine/lib/import_thir.ml index 7c3f35c95..c9dc1a8a4 100644 --- a/engine/lib/import_thir.ml +++ b/engine/lib/import_thir.ml @@ -10,6 +10,7 @@ module Thir = struct type generic_param = generic_param_for__decorated_for__expr_kind type generic_param_kind = generic_param_kind_for__decorated_for__expr_kind type trait_item = trait_item_for__decorated_for__expr_kind + type ty = node_for__ty_kind end open! Prelude @@ -385,7 +386,7 @@ end) : EXPR = struct in (* if there is no expression & the last expression is ⊥, just use that *) let lift_last_statement_as_expr_if_possible expr stmts (ty : Thir.ty) = - match (ty, expr, List.drop_last stmts, List.last stmts) with + match (ty.value, expr, List.drop_last stmts, List.last stmts) with | ( Thir.Never, None, Some stmts, @@ -517,7 +518,7 @@ end) : EXPR = struct let f = let f = c_expr fun' in match (trait, fun'.contents) with - | Some _, GlobalName { id } -> + | Some _, GlobalName { id; _ } -> { f with e = GlobalVar (def_id (AssociatedItem Value) id) } | _ -> f in @@ -585,6 +586,7 @@ end) : EXPR = struct state = None; label = None; witness = W.loop; + control_flow = None; } | Match { scrutinee; arms } -> let scrutinee = c_expr scrutinee in @@ -636,13 +638,21 @@ end) : EXPR = struct trait = None (* TODO: see issue #328 *); bounds_impls = []; } - | GlobalName { id } -> GlobalVar (def_id Value id) + | GlobalName { id; constructor } -> + let kind = + match constructor with + | Some { kind = Struct _; _ } -> + Concrete_ident.Kind.Constructor { is_struct = true } + | Some _ -> Concrete_ident.Kind.Constructor { is_struct = false } + | None -> Concrete_ident.Kind.Value + in + GlobalVar (def_id kind id) | UpvarRef { var_hir_id = id; _ } -> LocalVar (local_ident Expr id) | Borrow { arg; borrow_kind = kind } -> let e' = c_expr arg in let kind = c_borrow_kind e.span kind in Borrow { kind; e = e'; witness = W.reference } - | AddressOf { arg; mutability = mut } -> + | RawBorrow { arg; mutability = mut } -> let e = c_expr arg in AddressOf { @@ -654,9 +664,9 @@ end) : EXPR = struct (* TODO: labels! *) let e = Option.map ~f:c_expr value in let e = Option.value ~default:(unit_expr span) e in - Break { e; label = None; witness = (W.break, W.loop) } + Break { e; acc = None; label = None; witness = (W.break, W.loop) } | Continue _ -> - Continue { e = None; label = None; witness = (W.continue, W.loop) } + Continue { acc = None; label = None; witness = (W.continue, W.loop) } | Return { value } -> let e = Option.map ~f:c_expr value in let e = Option.value ~default:(unit_expr span) e in @@ -679,9 +689,15 @@ end) : EXPR = struct (U.make_tuple_expr' ~span @@ List.map ~f:c_expr fields).e | Array { fields } -> Array (List.map ~f:c_expr fields) | Adt { info; base; fields; _ } -> - let constructor = - def_id (Constructor { is_struct = info.typ_is_struct }) info.variant + let is_struct, is_record = + match info.kind with + | Struct { named } -> (true, named) + | Enum { named; _ } -> (false, named) + | Union -> + unimplemented ~issue_id:998 [ e.span ] + "Construct union types: not supported" in + let constructor = def_id (Constructor { is_struct }) info.variant in let base = Option.map ~f:(fun base -> (c_expr base.base, W.construct_base)) @@ -695,14 +711,7 @@ end) : EXPR = struct (field, value)) fields in - Construct - { - is_record = info.variant_is_record; - is_struct = info.typ_is_struct; - constructor; - fields; - base; - } + Construct { is_record; is_struct; constructor; fields; base } | Literal { lit; neg; _ } -> ( match c_lit e.span neg lit typ with | EL_Lit lit -> Literal lit @@ -716,11 +725,17 @@ end) : EXPR = struct typ = TInt { size = S8; signedness = Unsigned }; }) l)) - | NamedConst { def_id = id; impl; _ } -> + | NamedConst { def_id = id; impl; _ } -> ( let kind : Concrete_ident.Kind.t = match impl with Some _ -> AssociatedItem Value | _ -> Value in - GlobalVar (def_id kind id) + let f = GlobalVar (def_id kind id) in + match impl with + | Some impl -> + let trait = Some (c_impl_expr e.span impl, []) in + let f = { e = f; span; typ = TArrow ([], typ) } in + App { f; trait; args = []; generic_args = []; bounds_impls = [] } + | _ -> f) | Closure { body; params; upvars; _ } -> let params = List.filter_map ~f:(fun p -> Option.map ~f:c_pat p.pat) params @@ -817,11 +832,12 @@ end) : EXPR = struct Array { fields = List.map ~f:constant_expr_to_expr fields } | Tuple { fields } -> Tuple { fields = List.map ~f:constant_expr_to_expr fields } - | GlobalName { id; _ } -> GlobalName { id } + | GlobalName { id; variant_information; _ } -> + GlobalName { id; constructor = variant_information } | Borrow arg -> Borrow { arg = constant_expr_to_expr arg; borrow_kind = Thir.Shared } | ConstRef { id } -> ConstRef { id } - | MutPtr _ | TraitConst _ | FnPtr _ -> + | Cast _ | RawBorrow _ | TraitConst _ | FnPtr _ -> assertion_failure [ span ] "constant_lit_to_lit: TraitConst | FnPtr | MutPtr" | Todo _ -> assertion_failure [ span ] "ConstantExpr::Todo" @@ -867,17 +883,17 @@ end) : EXPR = struct let var = local_ident Expr var in PBinding { mut; mode; var; typ; subpat } | Variant { info; subpatterns; _ } -> - let name = - def_id (Constructor { is_struct = info.typ_is_struct }) info.variant + let is_struct, is_record = + match info.kind with + | Struct { named } -> (true, named) + | Enum { named; _ } -> (false, named) + | Union -> + unimplemented ~issue_id:998 [ pat.span ] + "Pattern match on union types: not supported" in - let args = List.map ~f:(c_field_pat info) subpatterns in - PConstruct - { - name; - args; - is_record = info.variant_is_record; - is_struct = info.typ_is_struct; - } + let constructor = def_id (Constructor { is_struct }) info.variant in + let fields = List.map ~f:(c_field_pat info) subpatterns in + PConstruct { constructor; fields; is_record; is_struct } | Tuple { subpatterns } -> (List.map ~f:c_pat subpatterns |> U.make_tuple_pat').p | Deref { subpattern } -> @@ -975,7 +991,7 @@ end) : EXPR = struct ("Pointer, with [cast] being " ^ [%show: Thir.pointer_coercion] cast) and c_ty (span : Thir.span) (ty : Thir.ty) : ty = - match ty with + match ty.value with | Bool -> TBool | Char -> TChar | Int k -> TInt (c_int_ty k) @@ -1014,7 +1030,7 @@ end) : EXPR = struct let impl = c_impl_expr span impl_expr in let item = Concrete_ident.of_def_id (AssociatedItem Type) def_id in TAssociatedType { impl; item } - | Alias { kind = Opaque; def_id; _ } -> + | Alias { kind = Opaque _; def_id; _ } -> TOpaque (Concrete_ident.of_def_id Type def_id) | Alias { kind = Inherent; _ } -> assertion_failure [ span ] "Ty::Alias with AliasTyKind::Inherent" @@ -1273,7 +1289,7 @@ include struct let is_core_item = false end) - let import_ty : Types.span -> Types.ty -> Ast.Rust.ty = c_ty + let import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty = c_ty let import_trait_ref : Types.span -> Types.trait_ref -> Ast.Rust.trait_goal = c_trait_ref @@ -1294,7 +1310,7 @@ let make ~krate : (module EXPR) = (module M) let c_trait_item (item : Thir.trait_item) : trait_item = - let open (val make ~krate:item.owner_id.krate : EXPR) in + let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in let { params; constraints } = c_generics item.generics in (* TODO: see TODO in impl items *) let ti_ident = Concrete_ident.of_def_id Field item.owner_id in @@ -1374,12 +1390,12 @@ let cast_of_enum typ_name generics typ thir_span { is_record = variant.is_record; is_struct = false; - args = + fields = List.map ~f:(fun (cid, typ, _) -> { field = `Concrete cid; pat = { p = PWild; typ; span } }) variant.arguments; - name = `Concrete variant.name; + constructor = `Concrete variant.name; } in let pat = { p = pat; typ = self; span } in @@ -1433,7 +1449,7 @@ let rec c_item ~ident ~drop_body (item : Thir.item) : item list = [ make_hax_error_item span ident error ] and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = - let open (val make ~krate:item.owner_id.krate : EXPR) in + let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in if should_skip item.attributes then [] else let span = Span.of_thir item.span in @@ -1457,7 +1473,7 @@ and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = name = Concrete_ident.of_def_id Value (Option.value_exn item.def_id); generics = c_generics generics; - body = c_body body; + body = c_expr body; params = []; safety = Safe; } @@ -1513,7 +1529,8 @@ and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = ~f: (fun ({ data; def_id = variant_id; attributes; _ } as original) -> let is_record = - [%matches? Types.Struct { fields = _ :: _; _ }] data + [%matches? (Struct { fields = _ :: _; _ } : Types.variant_data)] + data in let name = Concrete_ident.of_def_id kind variant_id in let arguments = @@ -1549,6 +1566,7 @@ and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = let v = let kind = Concrete_ident.Kind.Constructor { is_struct } in let name = Concrete_ident.of_def_id kind def_id in + let name = Concrete_ident.Create.move_under name ~new_parent:name in let mk fields is_record = let arguments = List.map @@ -1667,7 +1685,7 @@ and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = generics = c_generics generics; self_ty = c_ty item.span self_ty; of_trait = - ( def_id Trait of_trait.def_id, + ( Concrete_ident.of_def_id Trait of_trait.def_id, List.map ~f:(c_generic_value item.span) of_trait.generic_args ); items = @@ -1740,19 +1758,27 @@ and c_item_unwrapped ~ident ~drop_body (item : Thir.item) : item list = (* TODO: is this DUMMY thing really needed? there's a `Use` segment (see #272) *) let def_id = item.owner_id in let def_id : Types.def_id = - { - def_id with - path = - def_id.path - @ [ - Types. - { data = ValueNs "DUMMY"; disambiguator = MyInt64.of_int 0 }; - ]; - } + let value = + { + def_id.contents.value with + path = + def_id.contents.value.path + @ [ + Types. + { + data = ValueNs "DUMMY"; + disambiguator = MyInt64.of_int 0; + }; + ]; + } + in + { contents = { def_id.contents with value } } in [ { span; v; ident = Concrete_ident.of_def_id Value def_id; attrs } ] + | Union _ -> + unimplemented ~issue_id:998 [ item.span ] "Union types: not supported" | ExternCrate _ | Static _ | Macro _ | Mod _ | ForeignMod _ | GlobalAsm _ - | OpaqueTy _ | Union _ | TraitAlias _ -> + | TraitAlias _ -> mk NotImplementedYet let import_item ~drop_body (item : Thir.item) : diff --git a/engine/lib/import_thir.mli b/engine/lib/import_thir.mli index 70bff0423..42a5f2184 100644 --- a/engine/lib/import_thir.mli +++ b/engine/lib/import_thir.mli @@ -1,4 +1,4 @@ -val import_ty : Types.span -> Types.ty -> Ast.Rust.ty +val import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty val import_trait_ref : Types.span -> Types.trait_ref -> Ast.Rust.trait_goal val import_clause : diff --git a/engine/lib/phase_utils.ml b/engine/lib/phase_utils.ml index 80eb807df..1085c86d4 100644 --- a/engine/lib/phase_utils.ml +++ b/engine/lib/phase_utils.ml @@ -248,10 +248,12 @@ module TracePhase (P : PHASE) = struct include P let name = [%show: Diagnostics.Phase.t] P.metadata.current_phase - let enable = Option.is_some P.metadata.previous_phase + (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *) + + let composite_phase = Option.is_some P.metadata.previous_phase let ditems = - if enable then P.ditems + if composite_phase then P.ditems else fun items -> Logs.info (fun m -> m "Entering phase [%s]" name); let items = P.ditems items in @@ -259,12 +261,25 @@ module TracePhase (P : PHASE) = struct items end +module ProfilePhase (P : PHASE) = struct + include P + + (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *) + let composite_phase = Option.is_some P.metadata.previous_phase + + let ditems items = + if composite_phase then P.ditems items + else + let ctx = Diagnostics.Context.Phase P.metadata.current_phase in + Profiling.profile ctx (List.length items) (fun () -> P.ditems items) +end + module BindPhase (D1 : PHASE) (D2 : PHASE with module FA = D1.FB and module A = D1.B) = struct - module D1' = TracePhase (D1) - module D2' = TracePhase (D2) + module D1' = ProfilePhase (TracePhase (D1)) + module D2' = ProfilePhase (TracePhase (D2)) module FA = D1.FA module FB = D2.FB module A = D1.A diff --git a/engine/lib/phases/phase_and_mut_defsite.ml b/engine/lib/phases/phase_and_mut_defsite.ml index 88cfb8486..d640ef92e 100644 --- a/engine/lib/phases/phase_and_mut_defsite.ml +++ b/engine/lib/phases/phase_and_mut_defsite.ml @@ -14,7 +14,7 @@ struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.AndMutDefSite + let phase_id = [%auto_phase_name auto] end) module A = Ast.Make (FA) diff --git a/engine/lib/phases/phase_bundle_cycles.ml b/engine/lib/phases/phase_bundle_cycles.ml new file mode 100644 index 000000000..b26891f9b --- /dev/null +++ b/engine/lib/phases/phase_bundle_cycles.ml @@ -0,0 +1,20 @@ +open! Prelude + +module Make (F : Features.T) = + Phase_utils.MakeMonomorphicPhase + (F) + (struct + let phase_id = [%auto_phase_name auto] + + module A = Ast.Make (F) + + module Error = Phase_utils.MakeError (struct + let ctx = Diagnostics.Context.Phase phase_id + end) + + module Attrs = Attr_payloads.MakeBase (Error) + + let ditems items = + let module DepGraph = Dependencies.Make (F) in + DepGraph.bundle_cyclic_modules items + end) diff --git a/engine/lib/phases/phase_bundle_cycles.mli b/engine/lib/phases/phase_bundle_cycles.mli new file mode 100644 index 000000000..2a8c3f80d --- /dev/null +++ b/engine/lib/phases/phase_bundle_cycles.mli @@ -0,0 +1,5 @@ +(** This phase makes sure the items don't yield any cycle, +namespace-wise. It does so by creating namespaces we call bundles, in +which we regroup definitions that would otherwise yield cycles. *) + +module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE diff --git a/engine/lib/phases/phase_cf_into_monads.ml b/engine/lib/phases/phase_cf_into_monads.ml index 9cc170da0..54f8d035e 100644 --- a/engine/lib/phases/phase_cf_into_monads.ml +++ b/engine/lib/phases/phase_cf_into_monads.ml @@ -20,7 +20,7 @@ struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.CfIntoMonads + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct diff --git a/engine/lib/phases/phase_direct_and_mut.ml b/engine/lib/phases/phase_direct_and_mut.ml index 4d3ae8493..c33d18096 100644 --- a/engine/lib/phases/phase_direct_and_mut.ml +++ b/engine/lib/phases/phase_direct_and_mut.ml @@ -18,7 +18,7 @@ struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.RefMut + let phase_id = [%auto_phase_name auto] end) (** Reference to a fresh local ident (item-wise) *) diff --git a/engine/lib/phases/phase_drop_blocks.ml b/engine/lib/phases/phase_drop_blocks.ml index f92633194..45844e7eb 100644 --- a/engine/lib/phases/phase_drop_blocks.ml +++ b/engine/lib/phases/phase_drop_blocks.ml @@ -12,7 +12,7 @@ module%inlined_contents Make (F : Features.T) = struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.DropReferences + let phase_id = [%auto_phase_name auto] end) module UA = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_drop_match_guards.ml b/engine/lib/phases/phase_drop_match_guards.ml index ff5304d9a..b4cf7524d 100644 --- a/engine/lib/phases/phase_drop_match_guards.ml +++ b/engine/lib/phases/phase_drop_match_guards.ml @@ -45,7 +45,7 @@ module%inlined_contents Make (F : Features.T) = struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.DropMatchGuards + let phase_id = [%auto_phase_name auto] end) module UA = Ast_utils.Make (F) @@ -120,7 +120,7 @@ module%inlined_contents Make (F : Features.T) = struct in let mk_opt_pattern (binding : B.pat option) : B.pat = - let (name : Concrete_ident.name), (args : B.field_pat list) = + let (name : Concrete_ident.name), (fields : B.field_pat list) = match binding with | Some b -> ( Core__option__Option__Some, @@ -128,9 +128,9 @@ module%inlined_contents Make (F : Features.T) = struct | None -> (Core__option__Option__None, []) in MS.pat_PConstruct - ~name: + ~constructor: (Global_ident.of_name (Constructor { is_struct = false }) name) - ~args ~is_record:false ~is_struct:false ~typ:opt_result_typ + ~fields ~is_record:false ~is_struct:false ~typ:opt_result_typ in let expr_none = mk_opt_expr None in diff --git a/engine/lib/phases/phase_drop_needless_returns.ml b/engine/lib/phases/phase_drop_needless_returns.ml deleted file mode 100644 index e4a59355e..000000000 --- a/engine/lib/phases/phase_drop_needless_returns.ml +++ /dev/null @@ -1,48 +0,0 @@ -open! Prelude - -module Make (F : Features.T) = - Phase_utils.MakeMonomorphicPhase - (F) - (struct - let phase_id = Diagnostics.Phase.DropNeedlessReturns - - open Ast.Make (F) - module U = Ast_utils.Make (F) - module Visitors = Ast_visitors.Make (F) - - module Error = Phase_utils.MakeError (struct - let ctx = Diagnostics.Context.Phase phase_id - end) - - let visitor = - object (self) - inherit [_] Visitors.map as _super - - method! visit_expr () e = - match e with - | { e = Return { e; _ }; _ } -> e - (* we know [e] is on an exit position: the return is - thus useless, we can skip it *) - | { e = Let { monadic = None; lhs; rhs; body }; _ } -> - let body = self#visit_expr () body in - { e with e = Let { monadic = None; lhs; rhs; body } } - (* If a let expression is an exit node, then it's body - is as well *) - | { e = Match { scrutinee; arms }; _ } -> - let arms = List.map ~f:(self#visit_arm ()) arms in - { e with e = Match { scrutinee; arms } } - | { e = If { cond; then_; else_ }; _ } -> - let then_ = self#visit_expr () then_ in - let else_ = Option.map ~f:(self#visit_expr ()) else_ in - { e with e = If { cond; then_; else_ } } - | _ -> e - (** The invariant here is that [visit_expr] is called only - on expressions that are on exit positions. [visit_expr] - is first called on root expressions, which are (by - definition) exit nodes. Then, [visit_expr] itself makes - recursive calls to sub expressions that are themselves - in exit nodes. **) - end - - let ditems = List.map ~f:(visitor#visit_item ()) - end) diff --git a/engine/lib/phases/phase_drop_needless_returns.mli b/engine/lib/phases/phase_drop_needless_returns.mli deleted file mode 100644 index b53603f1e..000000000 --- a/engine/lib/phases/phase_drop_needless_returns.mli +++ /dev/null @@ -1,4 +0,0 @@ -(** This phase transforms `return e` expressions into `e` when `return -e` is on an exit position. *) - -module Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE diff --git a/engine/lib/phases/phase_drop_references.ml b/engine/lib/phases/phase_drop_references.ml index ec21704e2..dcd7f36ad 100644 --- a/engine/lib/phases/phase_drop_references.ml +++ b/engine/lib/phases/phase_drop_references.ml @@ -18,7 +18,7 @@ struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.DropReferences + let phase_id = [%auto_phase_name auto] end) module UA = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_drop_return_break_continue.ml b/engine/lib/phases/phase_drop_return_break_continue.ml new file mode 100644 index 000000000..937bfeb69 --- /dev/null +++ b/engine/lib/phases/phase_drop_return_break_continue.ml @@ -0,0 +1,178 @@ +(** This phase removes `return`s in exit position. Inside loops, + it replaces `return`, `break` and `continue` (in exit position) + by their encoding in the `ControlFlow` enum. It replaces another + expression in exit position by an equivalent `continue`. + This phase should comae after `RewriteControlFlow` to ensure all + control flow is in exit position. *) + +open! Prelude + +module%inlined_contents Make (F : Features.T) = struct + open Ast + module FA = F + + module FB = struct + include F + include Features.On.Fold_like_loop + include Features.Off.Early_exit + include Features.Off.Break + include Features.Off.Continue + end + + include + Phase_utils.MakeBase (F) (FB) + (struct + let phase_id = [%auto_phase_name auto] + end) + + module Implem : ImplemT.T = struct + let metadata = metadata + + module UA = Ast_utils.Make (F) + module UB = Ast_utils.Make (FB) + + module S = struct + include Features.SUBTYPE.Id + end + + (* break_type is "by default" unit since there always is a (possibly implicit) break type *) + type loop_info = { return_type : A.ty option; break_type : A.ty option } + + let has_return = + let module Visitors = Ast_visitors.Make (F) in + object (self) + inherit [_] Visitors.reduce as super + method zero = { return_type = None; break_type = None } + + method plus li1 li2 = + { + return_type = Option.first_some li1.return_type li2.return_type; + break_type = Option.first_some li1.break_type li2.break_type; + } + + method! visit_expr' () e = + match e with + | Return { e; _ } -> { return_type = Some e.typ; break_type = None } + | Break { e; _ } -> { return_type = None; break_type = Some e.typ } + (* We should avoid catching breaks of a nested + loops as they could have different types. *) + | Loop { body; _ } -> + { + return_type = (self#visit_expr () body).return_type; + break_type = None; + } + | _ -> super#visit_expr' () e + end + + let visitor = + let module Visitors = Ast_visitors.Make (F) in + object (self) + inherit [_] Visitors.map as _super + + method! visit_expr (in_loop : (loop_info * A.ty) option) e = + let span = e.span in + match (e.e, in_loop) with + | Return { e; _ }, None -> e + (* we know [e] is on an exit position: the return is + thus useless, we can skip it *) + | Let { monadic = None; lhs; rhs; body }, _ -> + let body = self#visit_expr in_loop body in + { + e with + e = Let { monadic = None; lhs; rhs; body }; + typ = body.typ; + } + (* If a let expression is an exit node, then it's body + is as well *) + | Match { scrutinee; arms }, _ -> + let arms = List.map ~f:(self#visit_arm in_loop) arms in + let typ = + match arms with { arm; _ } :: _ -> arm.body.typ | [] -> e.typ + in + { e with e = Match { scrutinee; arms }; typ } + | If { cond; then_; else_ }, _ -> + let then_ = self#visit_expr in_loop then_ in + let else_ = Option.map ~f:(self#visit_expr in_loop) else_ in + { e with e = If { cond; then_; else_ }; typ = then_.typ } + | Return { e; _ }, Some ({ return_type; break_type }, acc_type) -> + UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e + ~acc:{ e with typ = acc_type } `Return + | ( Break { e; acc = Some (acc, _); _ }, + Some ({ return_type; break_type }, _) ) -> + UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e ~acc + `Break + | ( Continue { acc = Some (acc, _); _ }, + Some ({ return_type; break_type }, _) ) -> + UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc + `Continue + | _, Some ({ return_type; break_type }, _) + when Option.is_some return_type || Option.is_some break_type -> + UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc:e + `Continue + | _ -> e + (** The invariant here is that [visit_expr] is called only + on expressions that are on exit positions. [visit_expr] + is first called on root expressions, which are (by + definition) exit nodes. Then, [visit_expr] itself makes + recursive calls to sub expressions that are themselves + in exit nodes. **) + end + + let closure_visitor = + let module Visitors = Ast_visitors.Make (F) in + object + inherit [_] Visitors.map as super + + method! visit_expr' () e = + match e with + | Closure ({ body; _ } as closure) -> + Closure { closure with body = visitor#visit_expr None body } + | _ -> super#visit_expr' () e + end + + [%%inline_defs dmutability + dsafety_kind] + + let rec dexpr' (span : span) (expr : A.expr') : B.expr' = + match expr with + | [%inline_arms "dexpr'.*" - Return - Break - Continue - Loop] -> auto + | Return _ | Break _ | Continue _ -> + Error.assertion_failure span + "Return/Break/Continue are expected to be gone as this point" + | Loop { body; kind; state; label; witness; _ } -> + let control_flow_type = has_return#visit_expr () body in + let control_flow = + match control_flow_type with + | { return_type = Some _; _ } -> + Some (B.BreakOrReturn, Features.On.fold_like_loop) + | { break_type = Some _; _ } -> + Some (BreakOnly, Features.On.fold_like_loop) + | _ -> None + in + let acc_type = + match body.typ with + | TApp { ident; args = [ GType _; GType continue_type ] } + when Ast.Global_ident.equal ident + (Ast.Global_ident.of_name Type + Core__ops__control_flow__ControlFlow) -> + continue_type + | _ -> body.typ + in + let body = + visitor#visit_expr (Some (control_flow_type, acc_type)) body + |> dexpr + in + let kind = dloop_kind span kind in + let state = Option.map ~f:(dloop_state span) state in + Loop { body; control_flow; kind; state; label; witness } + [@@inline_ands bindings_of dexpr - dexpr'] + + [%%inline_defs "Item.*" - ditems] + + let ditems (items : A.item list) : B.item list = + List.concat_map items + ~f:(visitor#visit_item None >> closure_visitor#visit_item () >> ditem) + end + + include Implem +end +[@@add "subtype.ml"] diff --git a/engine/lib/phases/phase_drop_return_break_continue.mli b/engine/lib/phases/phase_drop_return_break_continue.mli new file mode 100644 index 000000000..b4e2d6d39 --- /dev/null +++ b/engine/lib/phases/phase_drop_return_break_continue.mli @@ -0,0 +1,26 @@ +(** This phase transforms `return e` expressions into `e` when `return +e` is on an exit position. It should come after phase `RewriteControlFlow` +and thus eliminate all `return`s. Inside loops it rewrites `return`, +`break` and `continue` as their equivalent in terms of the `ControlFlow` +wrapper that will be handled by the specific fold operators introduced by +phase `FunctionalizeLoops`. *) + +module Make (F : Features.T) : sig + include module type of struct + module FA = F + + module FB = struct + include F + include Features.On.Fold_like_loop + include Features.Off.Early_exit + include Features.Off.Break + include Features.Off.Continue + end + + module A = Ast.Make (F) + module B = Ast.Make (FB) + module ImplemT = Phase_utils.MakePhaseImplemT (A) (B) + end + + include ImplemT.T +end diff --git a/engine/lib/phases/phase_drop_sized_trait.ml b/engine/lib/phases/phase_drop_sized_trait.ml index 569e0ce70..00df13185 100644 --- a/engine/lib/phases/phase_drop_sized_trait.ml +++ b/engine/lib/phases/phase_drop_sized_trait.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.DropSizedTrait + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_functionalize_loops.ml b/engine/lib/phases/phase_functionalize_loops.ml index b101a8ad7..956952368 100644 --- a/engine/lib/phases/phase_functionalize_loops.ml +++ b/engine/lib/phases/phase_functionalize_loops.ml @@ -3,7 +3,8 @@ open! Prelude module%inlined_contents Make (F : Features.T with type continue = Features.Off.continue - and type early_exit = Features.Off.early_exit) = + and type early_exit = Features.Off.early_exit + and type break = Features.Off.break) = struct open Ast module FA = F @@ -15,12 +16,16 @@ struct include Features.Off.While_loop include Features.Off.For_index_loop include Features.Off.State_passing_loop + include Features.Off.Fold_like_loop + include Features.Off.Continue + include Features.Off.Early_exit + include Features.Off.Break end include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.FunctionalizeLoops + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -28,6 +33,7 @@ struct module UA = Ast_utils.Make (F) module UB = Ast_utils.Make (FB) + module Visitors = Ast_visitors.Make (F) module S = struct include Features.SUBTYPE.Id @@ -76,7 +82,7 @@ struct | StepBy of { n : B.expr; it : iterator } [@@deriving show] - let rec as_iterator' (e : B.expr) : iterator option = + let rec as_iterator (e : B.expr) : iterator option = match e.e with | Construct { @@ -93,12 +99,6 @@ struct Some (Range { start; end_ }) | _ -> meth_as_iterator e - and as_iterator (e : B.expr) : iterator option = - let result = as_iterator' e in - (* UB.Debug.expr ~label:"as_iterator" e; *) - (* " = " ^ [%show: iterator option] result |> Stdio.prerr_endline; *) - result - and meth_as_iterator (e : B.expr) : iterator option = let* f, args = match e.e with @@ -129,29 +129,50 @@ struct Some (ChunksExact { size; slice }) else None - let fn_args_of_iterator (it : iterator) : + let fn_args_of_iterator (cf : A.cf_kind option) (it : iterator) : (Concrete_ident.name * B.expr list * B.ty) option = let open Concrete_ident_generated in let usize = B.TInt { size = SSize; signedness = Unsigned } in match it with | Enumerate (ChunksExact { size; slice }) -> - Some - ( Rust_primitives__hax__folds__fold_enumerated_chunked_slice, - [ size; slice ], - usize ) + let fold_op = + match cf with + | Some BreakOrReturn -> + Rust_primitives__hax__folds__fold_enumerated_chunked_slice_return + | Some BreakOnly -> + Rust_primitives__hax__folds__fold_enumerated_chunked_slice_cf + | None -> Rust_primitives__hax__folds__fold_enumerated_chunked_slice + in + Some (fold_op, [ size; slice ], usize) | Enumerate (Slice slice) -> - Some - ( Rust_primitives__hax__folds__fold_enumerated_slice, - [ slice ], - usize ) + let fold_op = + match cf with + | Some BreakOrReturn -> + Rust_primitives__hax__folds__fold_enumerated_slice_return + | Some BreakOnly -> + Rust_primitives__hax__folds__fold_enumerated_slice_cf + | None -> Rust_primitives__hax__folds__fold_enumerated_slice + in + Some (fold_op, [ slice ], usize) | StepBy { n; it = Range { start; end_ } } -> - Some - ( Rust_primitives__hax__folds__fold_range_step_by, - [ start; end_; n ], - start.typ ) + let fold_op = + match cf with + | Some BreakOrReturn -> + Rust_primitives__hax__folds__fold_range_step_by_return + | Some BreakOnly -> + Rust_primitives__hax__folds__fold_range_step_by_cf + | None -> Rust_primitives__hax__folds__fold_range_step_by + in + Some (fold_op, [ start; end_; n ], start.typ) | Range { start; end_ } -> - Some - (Rust_primitives__hax__folds__fold_range, [ start; end_ ], start.typ) + let fold_op = + match cf with + | Some BreakOrReturn -> + Rust_primitives__hax__folds__fold_range_return + | Some BreakOnly -> Rust_primitives__hax__folds__fold_range_cf + | None -> Rust_primitives__hax__folds__fold_range + in + Some (fold_op, [ start; end_ ], start.typ) | _ -> None [%%inline_defs dmutability + dsafety_kind] @@ -165,19 +186,35 @@ struct { body; kind = ForLoop { it; pat; _ }; - state = Some { init; bpat; _ }; + state = Some _ as state; + control_flow; + _; + } + | Loop + { + body; + kind = ForLoop { it; pat; _ }; + state; + control_flow = Some (BreakOrReturn, _) as control_flow; _; } -> + let bpat, init = + match state with + | Some { bpat; init; _ } -> (dpat bpat, dexpr init) + | None -> + let unit = UB.unit_expr span in + (M.pat_PWild ~span ~typ:unit.typ, unit) + in let body = dexpr body in let { body; invariant } = extract_loop_invariant body in let it = dexpr it in let pat = dpat pat in - let bpat = dpat bpat in let fn : B.expr = UB.make_closure [ bpat; pat ] body body.span in - let init = dexpr init in + let cf = Option.map ~f:fst control_flow in let f, kind, args = - match as_iterator it |> Option.bind ~f:fn_args_of_iterator with + match as_iterator it |> Option.bind ~f:(fn_args_of_iterator cf) with | Some (f, args, typ) -> + (* TODO what happens if there is control flow? *) let invariant : B.expr = let default = let pat = MS.pat_PWild ~typ in @@ -188,22 +225,41 @@ struct in (f, Concrete_ident.Kind.Value, args @ [ invariant; init; fn ]) | None -> - ( Core__iter__traits__iterator__Iterator__fold, - AssociatedItem Value, - [ it; init; fn ] ) + let fold : Concrete_ident.name = + match cf with + | Some BreakOrReturn -> + Rust_primitives__hax__folds__fold_return + | Some BreakOnly -> Rust_primitives__hax__folds__fold_cf + | None -> Core__iter__traits__iterator__Iterator__fold + in + (fold, AssociatedItem Value, [ it; init; fn ]) in UB.call ~kind f args span (dty span expr.typ) | Loop { body; kind = WhileLoop { condition; _ }; - state = Some { init; bpat; _ }; + state = Some _ as state; + control_flow; + _; + } + | Loop + { + body; + kind = WhileLoop { condition; _ }; + state; + control_flow = Some (BreakOrReturn, _) as control_flow; _; } -> + let bpat, init = + match state with + | Some { bpat; init; _ } -> (dpat bpat, dexpr init) + | None -> + let unit = UB.unit_expr span in + (M.pat_PWild ~span ~typ:unit.typ, unit) + in let body = dexpr body in let condition = dexpr condition in - let bpat = dpat bpat in - let init = dexpr init in let condition : B.expr = M.expr_Closure ~params:[ bpat ] ~body:condition ~captures:[] ~span:condition.span @@ -214,19 +270,19 @@ struct ~typ:(TArrow ([ bpat.typ ], body.typ)) ~span:body.span in - UB.call ~kind:(AssociatedItem Value) Rust_primitives__hax__while_loop + let fold_operator : Concrete_ident.name = + match control_flow with + | Some (BreakOrReturn, _) -> Rust_primitives__hax__while_loop_return + | Some (BreakOnly, _) -> Rust_primitives__hax__while_loop_cf + | None -> Rust_primitives__hax__while_loop + in + UB.call ~kind:(AssociatedItem Value) fold_operator [ condition; init; body ] span (dty span expr.typ) | Loop { state = None; _ } -> Error.unimplemented ~issue_id:405 ~details:"Loop without mutation" span | Loop _ -> Error.unimplemented ~issue_id:933 ~details:"Unhandled loop kind" span - | Break _ -> - Error.unimplemented ~issue_id:15 - ~details: - "For now, the AST node [Break] is feature gated only by [loop], \ - there is nothing for having loops but no breaks." - span | [%inline_arms "dexpr'.*" - Loop - Break - Continue - Return] -> map (fun e -> B.{ e; typ = dty expr.span expr.typ; span = expr.span }) | _ -> . diff --git a/engine/lib/phases/phase_functionalize_loops.mli b/engine/lib/phases/phase_functionalize_loops.mli index d8e3c77ed..a27824364 100644 --- a/engine/lib/phases/phase_functionalize_loops.mli +++ b/engine/lib/phases/phase_functionalize_loops.mli @@ -3,7 +3,8 @@ open! Prelude module Make (F : Features.T with type continue = Features.Off.continue - and type early_exit = Features.Off.early_exit) : sig + and type early_exit = Features.Off.early_exit + and type break = Features.Off.break) : sig include module type of struct module FA = F @@ -14,6 +15,7 @@ module Make include Features.Off.For_loop include Features.Off.For_index_loop include Features.Off.State_passing_loop + include Features.Off.Fold_like_loop end module A = Ast.Make (F) diff --git a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml index 332a094d8..70e92d163 100644 --- a/engine/lib/phases/phase_hoist_disjunctive_patterns.ml +++ b/engine/lib/phases/phase_hoist_disjunctive_patterns.ml @@ -7,7 +7,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.HoistDisjunctions + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) @@ -64,18 +64,18 @@ module Make (F : Features.T) = in match p.p with - | PConstruct { name; args; is_record; is_struct } -> - let args_as_pat = - List.rev_map args ~f:(fun arg -> self#visit_pat () arg.pat) + | PConstruct { constructor; fields; is_record; is_struct } -> + let fields_as_pat = + List.rev_map fields ~f:(fun arg -> self#visit_pat () arg.pat) in let subpats = - List.map (treat_args [ [] ] args_as_pat) - ~f:(fun args_as_pat -> - let args = - List.map2_exn args_as_pat args + List.map (treat_args [ [] ] fields_as_pat) + ~f:(fun fields_as_pat -> + let fields = + List.map2_exn fields_as_pat fields ~f:(fun pat { field; _ } -> { field; pat }) in - PConstruct { name; args; is_record; is_struct } + PConstruct { constructor; fields; is_record; is_struct } |> return_pat) in diff --git a/engine/lib/phases/phase_local_mutation.ml b/engine/lib/phases/phase_local_mutation.ml index 9ccd6f869..705c6624a 100644 --- a/engine/lib/phases/phase_local_mutation.ml +++ b/engine/lib/phases/phase_local_mutation.ml @@ -25,7 +25,7 @@ struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.LocalMutation + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -78,6 +78,14 @@ struct let rec dexpr e = dexpr_s { s with expr_level = []; drop_expr = false } e and dloop_state = [%inline_body dloop_state] in let span = expr.span in + let local_vars_expr = + let vars = + List.map + ~f:(fun (i, typ) : B.expr -> { e = LocalVar i; typ; span }) + s.loop_level + in + match vars with [ v ] -> v | _ -> UB.make_tuple_expr ~span vars + in match expr.e with | Let { @@ -229,7 +237,35 @@ struct dexpr_s { s with expr_level = []; drop_expr = false } scrutinee in { e = Match { scrutinee; arms }; typ; span = expr.span } - | Loop { body; kind; state; label; witness } -> + | Break { e; label; witness; _ } -> + let w = Features.On.state_passing_loop in + { + e = + Break + { + e = dexpr_same e; + acc = Some (local_vars_expr, w); + label; + witness; + }; + span = expr.span; + typ = local_vars_expr.typ; + } + | Return { e; witness } -> + { + e = Return { e = dexpr e; witness }; + span = expr.span; + typ = dty expr.span expr.typ; + } + | Continue { acc = None; label; witness; _ } -> + let w = Features.On.state_passing_loop in + let e = local_vars_expr in + { + e = Continue { acc = Some (e, w); label; witness }; + span = expr.span; + typ = e.typ; + } + | Loop { body; kind; state; label; witness; _ } -> let variables_to_output = s.expr_level in (* [adapt]: should we reorder shadowings? *) let observable_mutations, adapt = @@ -295,7 +331,12 @@ struct (* we deal with a for loop: this is always a unit expression (i.e. no [break foo] with [foo] non-unit allowed) *) let typ = List.map ~f:snd observable_mutations |> UB.make_tuple_typ in let loop : B.expr = - { e = Loop { body; kind; state; label; witness }; typ; span } + { + e = + Loop { body; kind; state; label; witness; control_flow = None }; + typ; + span; + } in if adapt && not (List.is_empty variables_to_output) then (* here, we need to introduce the shadowings as bindings *) @@ -318,8 +359,9 @@ struct typ = out.typ; } else loop - | [%inline_arms "dexpr'.*" - Let - Assign - Closure - Loop - If - Match] - -> + | [%inline_arms + "dexpr'.*" - Let - Assign - Closure - Loop - If - Match - Break + - Return] -> map (fun e -> let e' = B.{ e; typ = dty expr.span expr.typ; span = expr.span } diff --git a/engine/lib/phases/phase_newtype_as_refinement.ml b/engine/lib/phases/phase_newtype_as_refinement.ml index e19ec748b..86a29ab6a 100644 --- a/engine/lib/phases/phase_newtype_as_refinement.ml +++ b/engine/lib/phases/phase_newtype_as_refinement.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.NewtypeAsRefinement + let phase_id = [%auto_phase_name auto] module A = Ast.Make (F) module Visitors = Ast_visitors.Make (F) diff --git a/engine/lib/phases/phase_reconstruct_asserts.ml b/engine/lib/phases/phase_reconstruct_asserts.ml index 43164106b..0dfacd364 100644 --- a/engine/lib/phases/phase_reconstruct_asserts.ml +++ b/engine/lib/phases/phase_reconstruct_asserts.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.ResugarAsserts + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_reconstruct_for_index_loops.ml b/engine/lib/phases/phase_reconstruct_for_index_loops.ml index 4ecdfceca..15333d8aa 100644 --- a/engine/lib/phases/phase_reconstruct_for_index_loops.ml +++ b/engine/lib/phases/phase_reconstruct_for_index_loops.ml @@ -11,7 +11,7 @@ module%inlined_contents Make (FA : Features.T) = struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.ResugarForIndexLoops + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct diff --git a/engine/lib/phases/phase_reconstruct_for_loops.ml b/engine/lib/phases/phase_reconstruct_for_loops.ml index a6b3c48b5..2c7df3b8f 100644 --- a/engine/lib/phases/phase_reconstruct_for_loops.ml +++ b/engine/lib/phases/phase_reconstruct_for_loops.ml @@ -15,7 +15,7 @@ struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.ResugarForLoops + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -131,10 +131,10 @@ struct p = PConstruct { - name = + constructor = `Concrete none_ctor; - args = + fields = []; _; }; @@ -180,10 +180,10 @@ struct p = PConstruct { - name = + constructor = `Concrete some_ctor; - args = + fields = [ { pat; @@ -255,6 +255,7 @@ struct state = Option.map ~f:(dloop_state expr.span) state; label; witness = S.loop expr.span witness; + control_flow = None; }; span = expr.span; typ = UB.unit_typ; diff --git a/engine/lib/phases/phase_reconstruct_question_marks.ml b/engine/lib/phases/phase_reconstruct_question_marks.ml index 1a4735b90..c1faf65c8 100644 --- a/engine/lib/phases/phase_reconstruct_question_marks.ml +++ b/engine/lib/phases/phase_reconstruct_question_marks.ml @@ -11,7 +11,7 @@ module%inlined_contents Make (FA : Features.T) = struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.ResugarQuestionMarks + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -130,8 +130,8 @@ module%inlined_contents Make (FA : Features.T) = struct match p.p with | PConstruct { - name; - args = + constructor; + fields = [ { pat = @@ -145,7 +145,7 @@ module%inlined_contents Make (FA : Features.T) = struct ]; _; } -> - Some (name, var) + Some (constructor, var) | _ -> None in match e.e with diff --git a/engine/lib/phases/phase_reconstruct_while_loops.ml b/engine/lib/phases/phase_reconstruct_while_loops.ml index 11e955c4a..53062c7ec 100644 --- a/engine/lib/phases/phase_reconstruct_while_loops.ml +++ b/engine/lib/phases/phase_reconstruct_while_loops.ml @@ -11,7 +11,7 @@ module%inlined_contents Make (FA : Features.T) = struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.ResugarWhileLoops + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -95,6 +95,7 @@ module%inlined_contents Make (FA : Features.T) = struct state = Option.map ~f:(dloop_state expr.span) state; label; witness = S.loop expr.span witness; + control_flow = None; }; span = expr.span; typ = UB.unit_typ; diff --git a/engine/lib/phases/phase_reject.ml b/engine/lib/phases/phase_reject.ml index 484dda0b3..4e314e915 100644 --- a/engine/lib/phases/phase_reject.ml +++ b/engine/lib/phases/phase_reject.ml @@ -41,6 +41,26 @@ end module _ (FA : Features.T) : Phase_utils.PHASE = Continue (FA) +module Question_mark (FA : Features.T) = struct + module FB = struct + include FA + include Features.Off.Question_mark + end + + include + Feature_gate.Make (FA) (FB) + (struct + module A = FA + module B = FB + include Feature_gate.DefaultSubtype + + let question_mark = reject + let metadata = make_metadata QuestionMark + end) +end + +module _ (FA : Features.T) : Phase_utils.PHASE = Question_mark (FA) + module RawOrMutPointer (FA : Features.T) = struct module FB = struct include FA diff --git a/engine/lib/phases/phase_rewrite_control_flow.ml b/engine/lib/phases/phase_rewrite_control_flow.ml index 7531f8584..d97f0b275 100644 --- a/engine/lib/phases/phase_rewrite_control_flow.ml +++ b/engine/lib/phases/phase_rewrite_control_flow.ml @@ -1,5 +1,8 @@ (* This phase rewrites: `if c {return a}; b` as `if c {return a; b} else {b}` - and does the equivalent transformation for pattern matchings. *) + and does the equivalent transformation for pattern matchings. + It rewrites the body of loops considering `break` and `continue` + as `return` to place them in return position. If a loop contains + a `return` it places it is rewritten inside a pattern matching over the result. *) open! Prelude @@ -7,35 +10,100 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.RewriteControlFlow + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) + module M = Ast_builder.Make (F) module Visitors = Ast_visitors.Make (F) module Error = Phase_utils.MakeError (struct let ctx = Diagnostics.Context.Phase phase_id end) - let has_return = + let has_cf = object (_self) inherit [_] Visitors.reduce as super method zero = false method plus = ( || ) + method! visit_expr' break_continue e = + match e with + | Return _ -> true + | (Break _ | Continue _) when break_continue -> true + | _ -> super#visit_expr' break_continue e + end + + let loop_return_type = + object (_self) + inherit [_] Visitors.reduce as super + method zero = (U.unit_typ, None) + method plus l r = if [%eq: ty] (fst l) U.unit_typ then r else l + method! visit_expr' () e = - match e with Return _ -> true | _ -> super#visit_expr' () e + match e with + | Return { e; witness; _ } -> (e.typ, Some witness) + | _ -> super#visit_expr' () e end let rewrite_control_flow = object (self) inherit [_] Visitors.map as super - method! visit_expr () e = + method! visit_expr in_loop e = + let loop_with_return (loop : expr) stmts_after final pat = + let return_type, witness = loop_return_type#visit_expr () loop in + + let typ = + U.M.ty_cf ~continue_type:loop.typ ~break_type:return_type + in + let loop = { loop with typ } in + let span = loop.span in + let id = U.fresh_local_ident_in [] "ret" in + let module MS = (val U.M.make span) in + let mk_cf_pat = U.M.pat_Constructor_CF ~span ~typ in + let return_expr = + let inner_e = MS.expr_LocalVar ~typ:return_type id in + match witness with + | Some witness -> + MS.expr_Return ~typ:return_type ~witness ~inner_e + | None -> inner_e + in + let arms = + [ + MS.arm + (mk_cf_pat `Break (U.make_var_pat id typ span)) + return_expr; + MS.arm (mk_cf_pat `Continue pat) + (U.make_lets stmts_after final |> self#visit_expr in_loop); + ] + in + MS.expr_Match ~scrutinee:loop ~arms ~typ:return_type + in match e.e with - | _ when not (has_return#visit_expr () e) -> e - (* Returns in loops will be handled by issue #196 *) - | Loop _ -> e + (* This is supposed to improve performance but it might actually make it worse in some cases *) + | _ when not (has_cf#visit_expr true e) -> e + | Loop loop -> + let return_inside = has_cf#visit_expr false loop.body in + let new_body = self#visit_expr true loop.body in + let loop_expr = + { + e with + e = + Loop + { + loop with + body = { new_body with typ = loop.body.typ }; + }; + } + in + if return_inside then + let id = U.fresh_local_ident_in [] "loop_res" in + let pat = U.make_var_pat id loop_expr.typ loop_expr.span in + let module MS = (val U.M.make loop_expr.span) in + let final = MS.expr_LocalVar ~typ:loop_expr.typ id in + loop_with_return loop_expr [] final pat + else loop_expr | Let _ -> ( (* Collect let bindings to get the sequence of "statements", find the first "statement" that is a @@ -51,6 +119,10 @@ module Make (F : Features.T) = (* This avoids adding `let _ = ()` *) | { p = PWild; _ }, { e = GlobalVar (`TupleCons 0); _ } -> stmts_after + (* This avoids adding `let x = x` *) + | { p = PBinding { var; _ }; _ }, { e = LocalVar evar; _ } + when Local_ident.equal var evar -> + stmts_after | stmt -> stmt :: stmts_after in U.make_lets (branch_stmts @ stmts_to_add) final @@ -58,12 +130,28 @@ module Make (F : Features.T) = let stmts_before, stmt_and_stmts_after = List.split_while stmts ~f:(fun (_, e) -> match e.e with - | (If _ | Match _) when has_return#visit_expr () e -> + | (If _ | Match _) when has_cf#visit_expr in_loop e -> false - | Return _ -> false + | Loop _ when has_cf#visit_expr false e -> false + | Return _ | Break _ | Continue _ -> false | _ -> true) in match stmt_and_stmts_after with + | (p, ({ e = Loop loop; _ } as rhs)) :: stmts_after -> + let new_body = self#visit_expr true loop.body in + let loop_expr = + { + rhs with + e = + Loop + { + loop with + body = { new_body with typ = loop.body.typ }; + }; + } + in + U.make_lets stmts_before + (loop_with_return loop_expr stmts_after final p) | (p, ({ e = If { cond; then_; else_ }; _ } as rhs)) :: stmts_after -> (* We know there is no "return" in the condition @@ -78,7 +166,7 @@ module Make (F : Features.T) = in U.make_lets stmts_before { rhs with e = If { cond; then_; else_ } } - |> self#visit_expr () + |> self#visit_expr in_loop | (p, ({ e = Match { scrutinee; arms }; _ } as rhs)) :: stmts_after -> let arms = @@ -90,18 +178,19 @@ module Make (F : Features.T) = in U.make_lets stmts_before { rhs with e = Match { scrutinee; arms } } - |> self#visit_expr () + |> self#visit_expr in_loop (* The statements coming after a "return" are useless. *) - | (_, ({ e = Return _; _ } as rhs)) :: _ -> - U.make_lets stmts_before rhs |> self#visit_expr () + | (_, ({ e = Return _ | Break _ | Continue _; _ } as rhs)) :: _ + -> + U.make_lets stmts_before rhs |> self#visit_expr in_loop | _ -> let stmts = List.map stmts ~f:(fun (p, e) -> - (p, self#visit_expr () e)) + (p, self#visit_expr in_loop e)) in - U.make_lets stmts (self#visit_expr () final)) - | _ -> super#visit_expr () e + U.make_lets stmts (self#visit_expr in_loop final)) + | _ -> super#visit_expr in_loop e end - let ditems = List.map ~f:(rewrite_control_flow#visit_item ()) + let ditems = List.map ~f:(rewrite_control_flow#visit_item false) end) diff --git a/engine/lib/phases/phase_simplify_hoisting.ml b/engine/lib/phases/phase_simplify_hoisting.ml index 008f152fa..d0ab09bfc 100644 --- a/engine/lib/phases/phase_simplify_hoisting.ml +++ b/engine/lib/phases/phase_simplify_hoisting.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.SimplifyHoisting + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_simplify_match_return.ml b/engine/lib/phases/phase_simplify_match_return.ml index 378aa7744..9fa762997 100644 --- a/engine/lib/phases/phase_simplify_match_return.ml +++ b/engine/lib/phases/phase_simplify_match_return.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.SimplifyMatchReturn + let phase_id = [%auto_phase_name auto] open Ast.Make (F) module U = Ast_utils.Make (F) diff --git a/engine/lib/phases/phase_simplify_question_marks.ml b/engine/lib/phases/phase_simplify_question_marks.ml index 4567ec48a..3cda9ad7f 100644 --- a/engine/lib/phases/phase_simplify_question_marks.ml +++ b/engine/lib/phases/phase_simplify_question_marks.ml @@ -11,7 +11,7 @@ module%inlined_contents Make (FA : Features.T) = struct include Phase_utils.MakeBase (FA) (FB) (struct - let phase_id = Diagnostics.Phase.ResugarQuestionMarks + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -87,9 +87,10 @@ module%inlined_contents Make (FA : Features.T) = struct let* impl = expect_residual_impl_result impl in let*? _ = [%eq: ty] error_src error_dest |> not in let from_typ = TArrow ([ error_src ], error_dest) in + let impl_generic_args = [ GType error_dest; GType error_src ] in Some - (UA.call ~kind:(AssociatedItem Value) ~impl Core__convert__From__from - [ e ] e.span from_typ) + (UA.call ~impl_generic_args ~kind:(AssociatedItem Value) ~impl + Core__convert__From__from [ e ] e.span from_typ) (** [map_err e error_dest impl] creates the expression [e.map_err(from)] with the proper types and impl @@ -111,17 +112,17 @@ module%inlined_contents Make (FA : Features.T) = struct let mk_pconstruct ~is_struct ~is_record ~span ~typ (constructor : Concrete_ident_generated.t) (fields : (Concrete_ident_generated.t * pat) list) = - let name = + let constructor = Global_ident.of_name (Constructor { is_struct }) constructor in - let args = + let fields = List.map ~f:(fun (field, pat) -> let field = Global_ident.of_name Field field in { field; pat }) fields in - let p = PConstruct { name; args; is_record; is_struct } in + let p = PConstruct { constructor; fields; is_record; is_struct } in { p; span; typ } (** [extract e] returns [Some (x, ty)] if [e] was a `y?` @@ -153,8 +154,8 @@ module%inlined_contents Make (FA : Features.T) = struct match p.p with | PConstruct { - name; - args = + constructor = name; + fields = [ { pat = diff --git a/engine/lib/phases/phase_specialize.ml b/engine/lib/phases/phase_specialize.ml index 17ed60370..5b5f0a6b2 100644 --- a/engine/lib/phases/phase_specialize.ml +++ b/engine/lib/phases/phase_specialize.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.Specialize + let phase_id = [%auto_phase_name auto] module A = Ast.Make (F) module FB = F diff --git a/engine/lib/phases/phase_traits_specs.ml b/engine/lib/phases/phase_traits_specs.ml index 2d2aa087f..6020266a4 100644 --- a/engine/lib/phases/phase_traits_specs.ml +++ b/engine/lib/phases/phase_traits_specs.ml @@ -4,7 +4,7 @@ module Make (F : Features.T) = Phase_utils.MakeMonomorphicPhase (F) (struct - let phase_id = Diagnostics.Phase.TraitsSpecs + let phase_id = [%auto_phase_name auto] module A = Ast.Make (F) module FB = F diff --git a/engine/lib/phases/phase_transform_hax_lib_inline.ml b/engine/lib/phases/phase_transform_hax_lib_inline.ml index d2b2a4527..7122047aa 100644 --- a/engine/lib/phases/phase_transform_hax_lib_inline.ml +++ b/engine/lib/phases/phase_transform_hax_lib_inline.ml @@ -13,7 +13,7 @@ module%inlined_contents Make (F : Features.T) = struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.TransformHaxLibInline + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct @@ -52,7 +52,7 @@ module%inlined_contents Make (F : Features.T) = struct arm = { arm_pat = - { p = PConstruct { args = [ arg ]; _ }; _ }; + { p = PConstruct { fields = [ arg ]; _ }; _ }; _; }; _; @@ -166,6 +166,7 @@ module%inlined_contents Make (F : Features.T) = struct let before, after = let map_fst = List.map ~f:fst in try + let replace = Attrs.late_skip item.attrs in Attrs.associated_items Attr_payloads.AssocRole.ItemQuote item.attrs |> List.map ~f:(fun assoc_item -> let e : A.expr = @@ -181,7 +182,6 @@ module%inlined_contents Make (F : Features.T) = struct (* ^ (UA.LiftToFullAst.expr e |> Print_rust.pexpr_str) *) ^ [%show: A.expr] e) in - let v : B.item' = Quote quote in let span = e.span in let position, attr = Attrs.find_unique_attr assoc_item.attrs ~f:(function @@ -192,6 +192,21 @@ module%inlined_contents Make (F : Features.T) = struct "Malformed `Quote` item: could not find a \ ItemQuote payload") in + let v : B.item' = + let origin : item_quote_origin = + { + item_kind = UA.kind_of_item item; + item_ident = item.ident; + position = + (if replace then `Replace + else + match position with + | After -> `After + | Before -> `Before); + } + in + Quote { quote; origin } + in let attrs = [ Attr_payloads.to_attr attr assoc_item.span ] in (B.{ v; span; ident = item.ident; attrs }, position)) |> List.partition_tf ~f:(snd >> [%matches? Types.Before]) diff --git a/engine/lib/phases/phase_trivialize_assign_lhs.ml b/engine/lib/phases/phase_trivialize_assign_lhs.ml index e64a7baab..030b7528f 100644 --- a/engine/lib/phases/phase_trivialize_assign_lhs.ml +++ b/engine/lib/phases/phase_trivialize_assign_lhs.ml @@ -13,7 +13,7 @@ module%inlined_contents Make (F : Features.T) = struct include Phase_utils.MakeBase (F) (FB) (struct - let phase_id = Diagnostics.Phase.TrivializeAssignLhs + let phase_id = [%auto_phase_name auto] end) module Implem : ImplemT.T = struct diff --git a/engine/lib/print_rust.ml b/engine/lib/print_rust.ml index bea5c641a..d88d32614 100644 --- a/engine/lib/print_rust.ml +++ b/engine/lib/print_rust.ml @@ -195,21 +195,21 @@ module Raw = struct | PWild -> !"_" | PAscription { typ; pat; _ } -> !"pat_ascription!(" & ppat pat & !" as " & pty e.span typ & !")" - | PConstruct { name; args; is_record; _ } -> - pglobal_ident e.span name + | PConstruct { constructor; fields; is_record; _ } -> + pglobal_ident e.span constructor & - if List.is_empty args then !"" + if List.is_empty fields then !"" else if is_record then !"{" & concat ~sep:!", " (List.map ~f:(fun { field; pat } -> !(last_of_global_ident field e.span) & !":" & ppat pat) - args) + fields) & !"}" else !"(" - & concat ~sep:!", " (List.map ~f:(fun { pat; _ } -> ppat pat) args) + & concat ~sep:!", " (List.map ~f:(fun { pat; _ } -> ppat pat) fields) & !")" | POr { subpats } -> concat ~sep:!" | " (List.map ~f:ppat subpats) | PArray { args } -> !"[" & concat ~sep:!"," (List.map ~f:ppat args) & !"]" @@ -343,8 +343,8 @@ module Raw = struct | Some { init; _ } -> !"(" & main & !")(" & pexpr init & !")" | None -> main) | Break { e; _ } -> !"(break (" & pexpr e & !"))" - | Continue { e = None; _ } -> !"continue" - | Continue { e = Some (_, e); _ } -> + | Continue { acc = None; _ } -> !"continue" + | Continue { acc = Some (e, _); _ } -> !"state_passing_continue!(" & pexpr e & !")" | Return { e; _ } -> !"(return " & pexpr e & !")" | QuestionMark { e; _ } -> !"(" & pexpr e & !")?" @@ -576,7 +576,7 @@ module Raw = struct | Impl { generics; self_ty; of_trait; items; parent_bounds = _; safety } -> let trait = - pglobal_ident e.span (fst of_trait) + pglobal_ident e.span (`Concrete (fst of_trait)) & !"<" & concat ~sep:!"," (List.map ~f:(pgeneric_value e.span) (snd of_trait)) @@ -592,7 +592,7 @@ module Raw = struct & !"{" & List.map ~f:pimpl_item items |> concat ~sep:!"\n" & !"}" - | Quote quote -> pquote e.span quote & !";" + | Quote { quote; _ } -> pquote e.span quote & !";" | _ -> raise NotImplemented in pattrs e.attrs & pi @@ -661,37 +661,82 @@ let rustfmt_annotated (x : AnnotatedString.t) : AnnotatedString.t = if String.equal rf "no" then x else try rustfmt_annotated' x with RetokenizationFailure -> x -let pitem : item -> AnnotatedString.Output.t = - Raw.pitem >> rustfmt_annotated >> AnnotatedString.Output.convert - -let pitems : item list -> AnnotatedString.Output.t = - List.concat_map ~f:Raw.pitem - >> rustfmt_annotated >> AnnotatedString.Output.convert - -let pitem_str : item -> string = pitem >> AnnotatedString.Output.raw_string - -let pty_str (e : ty) : string = - let e = Raw.pty (Span.dummy ()) e in - let ( ! ) = AnnotatedString.pure @@ Span.dummy () in - let ( & ) = AnnotatedString.( & ) in - let prefix = "type TypeWrapper = " in - let suffix = ";" in - let item = !prefix & e & !suffix in - rustfmt_annotated item |> AnnotatedString.Output.convert - |> AnnotatedString.Output.raw_string |> Stdlib.String.trim - |> String.chop_suffix_if_exists ~suffix - |> String.chop_prefix_if_exists ~prefix - |> Stdlib.String.trim - -let pexpr_str (e : expr) : string = - let e = Raw.pexpr e in - let ( ! ) = AnnotatedString.pure @@ Span.dummy () in - let ( & ) = AnnotatedString.( & ) in - let prefix = "fn expr_wrapper() {" in - let suffix = "}" in - let item = !prefix & e & !suffix in - rustfmt_annotated item |> AnnotatedString.Output.convert - |> AnnotatedString.Output.raw_string |> Stdlib.String.trim - |> String.chop_suffix_if_exists ~suffix - |> String.chop_prefix_if_exists ~prefix - |> Stdlib.String.trim +module type T = sig + val pitem : item -> AnnotatedString.Output.t + val pitems : item list -> AnnotatedString.Output.t + val pitem_str : item -> string + val pexpr_str : expr -> string + val pty_str : ty -> string +end + +module Traditional : T = struct + let pitem : item -> AnnotatedString.Output.t = + Raw.pitem >> rustfmt_annotated >> AnnotatedString.Output.convert + + let pitems : item list -> AnnotatedString.Output.t = + List.concat_map ~f:Raw.pitem + >> rustfmt_annotated >> AnnotatedString.Output.convert + + let pitem_str : item -> string = pitem >> AnnotatedString.Output.raw_string + + let pexpr_str (e : expr) : string = + let e = Raw.pexpr e in + let ( ! ) = AnnotatedString.pure @@ Span.dummy () in + let ( & ) = AnnotatedString.( & ) in + let prefix = "fn expr_wrapper() {" in + let suffix = "}" in + let item = !prefix & e & !suffix in + rustfmt_annotated item |> AnnotatedString.Output.convert + |> AnnotatedString.Output.raw_string |> Stdlib.String.trim + |> String.chop_suffix_if_exists ~suffix + |> String.chop_prefix_if_exists ~prefix + |> Stdlib.String.trim + + let pty_str (e : ty) : string = + let e = Raw.pty (Span.dummy ()) e in + let ( ! ) = AnnotatedString.pure @@ Span.dummy () in + let ( & ) = AnnotatedString.( & ) in + let prefix = "type TypeWrapper = " in + let suffix = ";" in + let item = !prefix & e & !suffix in + rustfmt_annotated item |> AnnotatedString.Output.convert + |> AnnotatedString.Output.raw_string |> Stdlib.String.trim + |> String.chop_suffix_if_exists ~suffix + |> String.chop_prefix_if_exists ~prefix + |> Stdlib.String.trim +end + +(* module Experimental : T = struct *) +(* module GenericRustPrinter = Generic_rust_printer.Make (Features.Full) *) + +(* let pitem : item -> AnnotatedString.Output.t = *) +(* GenericRustPrinter.item () *) +(* >> Generic_printer_api.AnnotatedString.to_spanned_strings *) +(* >> AnnotatedString.Output.convert *) + +(* let pitems : item list -> AnnotatedString.Output.t = *) +(* GenericRustPrinter.items () *) +(* >> Generic_printer_api.AnnotatedString.to_spanned_strings *) +(* >> AnnotatedString.Output.convert *) + +(* let pexpr : expr -> AnnotatedString.Output.t = *) +(* GenericRustPrinter.expr () *) +(* >> Generic_printer_api.AnnotatedString.to_spanned_strings *) +(* >> AnnotatedString.Output.convert *) + +(* let pitem_str : item -> string = *) +(* GenericRustPrinter.item () >> Generic_printer_api.AnnotatedString.to_string *) + +(* let pexpr_str : expr -> string = *) +(* GenericRustPrinter.expr () >> Generic_printer_api.AnnotatedString.to_string *) + +(* let pty_str : ty -> string = *) +(* GenericRustPrinter.ty () >> Generic_printer_api.AnnotatedString.to_string *) +(* end *) + +let experimental = + Sys.getenv "HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER" |> Option.is_some + +include + (val if experimental then failwith "todo" (*module Experimental : T*) + else (module Traditional : T)) diff --git a/engine/lib/profiling.ml b/engine/lib/profiling.ml new file mode 100644 index 000000000..819b3de35 --- /dev/null +++ b/engine/lib/profiling.ml @@ -0,0 +1,37 @@ +open Prelude + +(** Is profiling enabled? *) +let enabled = ref true + +(** Profiles the function `f`, that operates in a given context over a given quantity of things it is processing. *) +let profile (type b) (context : Diagnostics.Context.t) (quantity : int) + (f : unit -> b) : b = + if !enabled (* `!` derefs, it's not a negation *) then ( + let time0 = Core.Time_ns.now () in + let mem0 = Core.Gc.minor_words () in + let finalize errored = + if !enabled (* `!` derefs, it's not a negation *) then + let time1 = Core.Time_ns.now () in + let mem1 = Core.Gc.minor_words () in + let time_ns = Core.Time_ns.diff time1 time0 in + let memory = mem1 - mem0 in + Hax_io.write + (Types.ProfilingData + { + context = Diagnostics.Context.display context; + time_ns = + Core.Time_ns.Span.to_int63_ns time_ns |> Int63.to_string; + memory = Int.to_string memory; + quantity = Int.to_int64 quantity; + errored; + }) + else () + in + try + let result = f () in + finalize false; + result + with e -> + finalize true; + raise e) + else f () diff --git a/engine/lib/side_effect_utils.ml b/engine/lib/side_effect_utils.ml index 79f66fb0e..8526de37a 100644 --- a/engine/lib/side_effect_utils.ml +++ b/engine/lib/side_effect_utils.ml @@ -261,30 +261,31 @@ struct ( { e with e = Return { e = e'; witness } }, m#plus effects (no_lbs { SideEffects.zero with return = Some e'.typ }) )) - | Break { e = e'; label; witness } -> + | Break { e = e'; label; acc; witness } -> HoistSeq.one env (self#visit_expr env e') (fun e' effects -> - ( { e with e = Break { e = e'; label; witness } }, + ( { e with e = Break { e = e'; acc; label; witness } }, m#plus effects (no_lbs { SideEffects.zero with break = Some e'.typ }) )) - | Continue { e = e'; label; witness } -> ( + | Continue { acc = e'; label; witness } -> ( let ceffect = no_lbs { SideEffects.zero with - continue = Some (Option.map ~f:(fun (_, e) -> e.typ) e'); + continue = Some (Option.map ~f:(fun (e, _) -> e.typ) e'); } in match e' with - | Some (witness', e') -> + | Some (e', witness') -> HoistSeq.one env (self#visit_expr env e') (fun e' effects -> ( { e with e = - Continue { e = Some (witness', e'); label; witness }; + Continue + { acc = Some (e', witness'); label; witness }; }, m#plus ceffect effects )) | None -> (e, ceffect)) - | Loop { body; kind; state; label; witness } -> + | Loop { body; kind; state; label; witness; control_flow } -> let kind' = match kind with | UnconditionalLoop -> [] @@ -329,7 +330,11 @@ struct in let effects = m#plus effects body_effects in let body = lets_of_bindings lbs body in - ( { e with e = Loop { body; kind; state; label; witness } }, + ( { + e with + e = + Loop { body; kind; state; label; witness; control_flow }; + }, effects )) | If { cond; then_; else_ } -> HoistSeq.one env (self#visit_expr env cond) (fun cond effects -> diff --git a/engine/lib/subtype.ml b/engine/lib/subtype.ml index 5c4656e5b..1f9528d49 100644 --- a/engine/lib/subtype.ml +++ b/engine/lib/subtype.ml @@ -121,13 +121,13 @@ struct | PWild -> PWild | PAscription { typ; typ_span; pat } -> PAscription { typ = dty span typ; pat = dpat pat; typ_span } - | PConstruct { name; args; is_record; is_struct } -> + | PConstruct { constructor; is_record; is_struct; fields } -> PConstruct { - name; - args = List.map ~f:(dfield_pat span) args; + constructor; is_record; is_struct; + fields = List.map ~f:(dfield_pat span) fields; } | POr { subpats } -> POr { subpats = List.map ~f:dpat subpats } | PArray { args } -> PArray { args = List.map ~f:dpat args } @@ -236,19 +236,28 @@ struct e = dexpr e; witness = S.mutable_variable span witness; } - | Loop { body; kind; state; label; witness } -> + | Loop { body; kind; state; label; witness; control_flow } -> Loop { body = dexpr body; kind = dloop_kind span kind; state = Option.map ~f:(dloop_state span) state; label; + control_flow = + Option.map + ~f: + ((function + | A.BreakOnly -> B.BreakOnly + | A.BreakOrReturn -> B.BreakOrReturn) + *** S.fold_like_loop span) + control_flow; witness = S.loop span witness; } - | Break { e; label; witness } -> + | Break { e; acc; label; witness } -> Break { e = dexpr e; + acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc; label; witness = (S.break span *** S.loop span) witness; } @@ -261,10 +270,10 @@ struct return_typ = dty span return_typ; witness = S.question_mark span witness; } - | Continue { e; label; witness = w1, w2 } -> + | Continue { acc; label; witness = w1, w2 } -> Continue { - e = Option.map ~f:(S.state_passing_loop span *** dexpr) e; + acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc; label; witness = (S.continue span w1, S.loop span w2); } @@ -532,7 +541,7 @@ struct } | Alias { name; item } -> B.Alias { name; item } | Use { path; is_external; rename } -> B.Use { path; is_external; rename } - | Quote quote -> Quote (dquote span quote) + | Quote { quote; origin } -> Quote { quote = dquote span quote; origin } | HaxError e -> B.HaxError e | NotImplementedYet -> B.NotImplementedYet diff --git a/engine/names/extract/Cargo.toml b/engine/names/extract/Cargo.toml index c6f9dacb6..22604c874 100644 --- a/engine/names/extract/Cargo.toml +++ b/engine/names/extract/Cargo.toml @@ -16,7 +16,6 @@ serde_json.workspace = true hax-engine-names.workspace = true hax-adt-into.workspace = true tempfile.version = "3.9" -bincode.workspace = true [features] default = ["extract_names_mode"] diff --git a/engine/names/extract/build.rs b/engine/names/extract/build.rs index 47e7fbc51..aeff4063b 100644 --- a/engine/names/extract/build.rs +++ b/engine/names/extract/build.rs @@ -10,6 +10,25 @@ use std::process::{Command, Stdio}; mod hax_frontend_exporter_def_id; use hax_frontend_exporter_def_id::*; +mod id_table { + //! Shim to make `def_id.rs` build. Replaces the `id_table` interner with a plain `Arc`. + use serde::{Deserialize, Serialize}; + use std::sync::Arc; + + #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] + pub struct Node { + value: Arc, + cache_id: u32, + } + + impl std::ops::Deref for Node { + type Target = T; + fn deref(&self) -> &Self::Target { + self.value.as_ref() + } + } +} + /// Name of the current crate const HAX_ENGINE_NAMES_CRATE: &str = "hax_engine_names"; /// Path `a::b` needs to be compiled to a OCaml variant name, `::` is @@ -40,7 +59,7 @@ fn def_path_item_to_str(path_item: DefPathItem) -> String { | DefPathItem::ValueNs(s) | DefPathItem::MacroNs(s) | DefPathItem::LifetimeNs(s) => s, - DefPathItem::CrateRoot => "CrateRoot".into(), + DefPathItem::CrateRoot { name } => uppercase_first_letter(&name), DefPathItem::Impl => "Impl".into(), DefPathItem::ForeignMod => "ForeignMod".into(), DefPathItem::Use => "Use".into(), @@ -53,23 +72,29 @@ fn def_path_item_to_str(path_item: DefPathItem) -> String { } } -fn disambiguated_def_path_item_to_str(defpath: DisambiguatedDefPathItem) -> String { - let data = def_path_item_to_str(defpath.data); +fn disambiguated_def_path_item_to_str(defpath: &DisambiguatedDefPathItem) -> String { + let data = def_path_item_to_str(defpath.data.clone()); let disambiguator = disambiguator_to_str(defpath.disambiguator); format!("{data}{disambiguator}") } -fn def_id_to_str(DefId { krate, path, .. }: &mut DefId) -> String { - if krate == HAX_ENGINE_NAMES_CRATE { - *krate = "rust_primitives".into(); +fn def_id_to_str(def_id: &DefId) -> (Value, String) { + let crate_name = if def_id.krate == HAX_ENGINE_NAMES_CRATE { + "rust_primitives" + } else { + &def_id.krate }; - let path = path - .clone() + // Update the crate name in the json output as well. + let mut json = serde_json::to_value(def_id).unwrap(); + json["contents"]["value"]["krate"] = Value::String(crate_name.to_owned()); + + let crate_name = uppercase_first_letter(crate_name); + let path = [crate_name] .into_iter() - .map(disambiguated_def_path_item_to_str) + .chain(def_id.path.iter().map(disambiguated_def_path_item_to_str)) .collect::>() .join(SEPARATOR); - format!("{}{SEPARATOR}{path}", uppercase_first_letter(&krate)) + (json, path) } fn reader_to_str(s: String) -> String { @@ -82,9 +107,9 @@ fn reader_to_str(s: String) -> String { let def_ids = def_ids .into_iter() - .map(|mut did| { - let krate_name = def_id_to_str(&mut did); - (serde_json::to_string(&did).unwrap(), krate_name) + .map(|did| { + let (json, krate_name) = def_id_to_str(&did); + (serde_json::to_string(&json).unwrap(), krate_name) }) .collect::>(); diff --git a/engine/names/src/lib.rs b/engine/names/src/lib.rs index 438dd41f0..9df5efea9 100644 --- a/engine/names/src/lib.rs +++ b/engine/names/src/lib.rs @@ -38,6 +38,11 @@ fn dummy_hax_concrete_ident_wrapper>(x: I, mu let _ = ..; let _ = ..1; + let _ = [ + std::ops::ControlFlow::Break(()), + std::ops::ControlFlow::Continue(()), + ]; + fn iterator_functions(it: It) { let _ = it.clone().step_by(2); let _ = it.clone().enumerate(); @@ -159,6 +164,8 @@ mod hax { enum MutRef {} fn while_loop() {} + fn while_loop_cf() {} + fn while_loop_return() {} fn repeat() {} fn update_at() {} mod monomorphized_update_at { @@ -174,9 +181,19 @@ mod hax { mod folds { fn fold_range() {} + fn fold_range_cf() {} + fn fold_range_return() {} fn fold_range_step_by() {} + fn fold_range_step_by_cf() {} + fn fold_range_step_by_return() {} fn fold_enumerated_slice() {} + fn fold_enumerated_slice_cf() {} + fn fold_enumerated_slice_return() {} fn fold_enumerated_chunked_slice() {} + fn fold_enumerated_chunked_slice_cf() {} + fn fold_enumerated_chunked_slice_return() {} + fn fold_cf() {} + fn fold_return() {} } /// The engine uses this `dropped_body` symbol as a marker value diff --git a/engine/utils/generate_from_ast/codegen_printer.ml b/engine/utils/generate_from_ast/codegen_printer.ml new file mode 100644 index 000000000..3e764c892 --- /dev/null +++ b/engine/utils/generate_from_ast/codegen_printer.ml @@ -0,0 +1,411 @@ +open Base +open Utils +open Types + +type state = { names_with_doc : string list } + +let ( let* ) x f = Option.bind ~f x +let super_types_list = [ "expr"; "pat"; "guard"; "arm"; "item" ] + +let get_super_type ty = + List.find ~f:(fun s -> String.equal (s ^ "'") ty) super_types_list + +let get_child_type ty = + if List.mem ~equal:String.equal super_types_list ty then Some (ty ^ "'") + else None + +let do_not_override_prefix = "_do_not_override_" + +let is_hidden_method = + let list = + [ + "expr'_App"; + "expr'_Construct"; + "ty_TApp"; + "lhs_LhsFieldAccessor"; + "local_ident"; + "pat'_PConstruct"; + "expr'_GlobalVar"; + "variant"; + "item'_Type"; + ] + in + List.mem ~equal:[%eq: string] list + +let lazy_doc_manual_definitions = [ "_do_not_override_lazy_of_generics" ] + +let rec of_ty (state : state) (call_method : string -> ty:string -> string) + (t : Type.t) : ((unit -> string) -> string -> string) option = + let* args = + List.fold t.args ~init:(Some []) ~f:(fun acc x -> + let* acc = acc in + let* x = of_ty state call_method x in + Some (x :: acc)) + |> Option.map ~f:List.rev + in + match (t.typ, args) with + | "option", [ inner ] -> + Some + (fun pos value -> + "(match " ^ value ^ " with | None -> None | Some value -> Some (" + ^ inner pos "value" ^ "))") + | "list", [ inner ] -> + Some + (fun pos value -> + "(List.map ~f:(fun x -> " ^ inner pos "x" ^ ") " ^ value ^ ")") + | "prim___tuple_2", [ fst; snd ] -> + Some + (fun pos value -> + let base = + "(" + ^ fst pos ("(fst " ^ value ^ ")") + ^ "," + ^ snd pos ("(snd " ^ value ^ ")") + ^ ")" + in + let mk proj = + "(let x = " ^ base ^ "in lazy_doc (fun tuple -> (" ^ proj + ^ " tuple)#p) " ^ pos () ^ " x)" + in + match List.map ~f:(is_lazy_doc_typ state) t.args with + | [ false; true ] -> mk "snd" + | [ true; false ] -> mk "fst" + | _ -> base) + (* if String.is_prefix ~prefix:"F." (List.nth t.args 1 |> Option.value ~default:"") then "(let x = " ^ base ^ "in lazy_doc x)" else base) *) + | "prim___tuple_3", [ fst; snd; thd ] -> + Some + (fun pos value -> + "(let (value1, value2, value3) = " ^ value ^ " in (" + ^ fst pos "value1" ^ "," ^ snd pos "value2" ^ "," ^ thd pos "value3" + ^ "))") + | _ when List.mem ~equal:[%eq: string] state.names_with_doc t.typ -> + Some + (fun pos value -> + "(print#" ^ do_not_override_prefix ^ "lazy_of_" ^ t.typ + ^ (if Option.is_some (get_super_type t.typ) then " ~super" else "") + ^ " " ^ pos () ^ " " ^ value ^ ")") + | _ -> Some (fun pos value -> "(" ^ value ^ ")") + +and string_ty_of_ty' (state : state) (t : Type.t) = + if String.is_prefix t.typ ~prefix:"prim___tuple_" then + let args = List.map t.args ~f:(string_ty_of_ty' state) in + let n = List.count args ~f:(String.is_suffix ~suffix:"lazy_doc)") in + let base = + "(" + ^ String.concat ~sep:" * " (List.map t.args ~f:(string_ty_of_ty' state)) + ^ ")" + in + if [%eq: int] n 1 then "(" ^ base ^ " lazy_doc)" else base + else + "(" + ^ (if List.is_empty t.args then "" + else + "(" + ^ String.concat ~sep:", " (List.map t.args ~f:(string_ty_of_ty' state)) + ^ ") ") + ^ t.typ + ^ (if List.mem ~equal:[%eq: string] state.names_with_doc t.typ then + " lazy_doc" + else "") + ^ ")" + +and is_lazy_doc_typ (state : state) = string_ty_of_ty' state >> is_lazy_doc_typ' +and is_lazy_doc_typ' = String.is_suffix ~suffix:"lazy_doc)" + +let string_ty_of_ty (state : state) (t : Type.t) = + let s = string_ty_of_ty' state t in + match s with + | "(generics lazy_doc)" -> + "((generics lazy_doc * generic_param lazy_doc list * generic_constraint \ + lazy_doc list) lazy_doc)" + | _ -> s + +let meth_name' typ_name variant_name = + typ_name ^ if String.is_empty variant_name then "" else "_" ^ variant_name + +let meth_name typ_name variant_name = + let meth = meth_name' typ_name variant_name in + (if is_hidden_method meth then do_not_override_prefix else "") ^ meth + +let print_variant state (call_method : string -> ty:string -> string) + (register_position : string option -> string) (super_type : string option) + (register_signature : string -> unit) (t_name : string) (v : Variant.t) : + string = + let meth_name = meth_name t_name v.name in + let meth = "print#" ^ meth_name in + let mk named fields = + let head = + v.name + ^ (if named then " { " else " ( ") + ^ String.concat ~sep:(if named then ";" else ",") (List.map ~f:fst fields) + ^ (if named then " } " else ")") + ^ " -> " + in + let args = + List.map + ~f:(fun (field_name, ty) -> + let value = + match of_ty state call_method ty with + | Some f -> + let pos = register_position (Some field_name) in + f (fun _ -> pos) field_name + | None -> field_name + in + let name = "~" ^ field_name ^ ":" in + (if named then name else "") ^ "(" ^ value ^ ")") + fields + in + let call = + String.concat ~sep:" " + (meth + :: ((if Option.is_some super_type then [ "~super" ] else []) @ args)) + in + let signature = + let ty = + List.map + ~f:(fun (name, ty) -> + let name = if named then name ^ ":" else "" in + name ^ string_ty_of_ty state ty) + fields + |> String.concat ~sep:" -> " + in + let super = + match super_type with + | Some super_type -> " super:(" ^ super_type ^ ") -> " + | None -> "" + in + register_signature + ("method virtual " ^ meth_name ^ " : " ^ super ^ ty ^ " -> document") + in + head ^ call + in + "\n | " + ^ + match v.payload with + | Record fields -> mk true fields + | None -> v.name ^ " -> " ^ meth + | Tuple types -> + mk false (List.mapi ~f:(fun i ty -> ("x" ^ Int.to_string i, ty)) types) + +let catch_errors_for = [ "expr"; "item"; "pat" ] + +let print_datatype state (dt : Datatype.t) + (register_entrypoint : string -> unit) + (register_position : string -> string -> string option -> string) = + let super_type = get_super_type dt.name in + let sigs = ref [] in + let method_name = do_not_override_prefix ^ "lazy_of_" ^ dt.name in + let print_variants variants wrapper = + let head = + "(**/**) method " ^ method_name + ^ (match super_type with Some t -> " ~(super: " ^ t ^ ")" | _ -> "") + ^ " ast_position (value: " ^ dt.name ^ "): " ^ dt.name ^ " lazy_doc =" + in + let body = + (if Option.is_some (get_child_type dt.name) then + "\n let super = value in" + else "") + ^ "\n match value with" + ^ String.concat ~sep:"" + (List.map + ~f:(fun variant -> + print_variant state + (fun name ~ty:_ -> name) + (register_position dt.name variant.Variant.name) + super_type + (fun s -> sigs := s :: !sigs) + dt.name variant) + variants) + in + let body = + "(print#wrap_" ^ dt.name ^ " ast_position value (" ^ body ^ "))" + in + let body = wrapper body in + sigs := + ("method wrap_" ^ dt.name ^ " (_pos: ast_position) (_value: " ^ dt.name + ^ ") (doc: document): document = doc") + :: !sigs; + let def = + head ^ "lazy_doc (fun (value: " ^ dt.name ^ ") -> " ^ body + ^ ") ast_position value" + in + if List.mem ~equal:[%eq: string] lazy_doc_manual_definitions method_name + then "(* skipping " ^ method_name ^ " *) (**/**)" + else def ^ "(**/**)" + in + let main = + match dt.kind with + | Variant variants -> print_variants variants Fn.id + | Record record -> + let wrapper = + if List.exists ~f:(fst >> [%eq: string] "span") record then + fun body -> + "print#with_span ~span:value.span (fun _ -> " ^ body ^ ")" + else Fn.id + in + let wrapper = + if List.mem ~equal:[%eq: string] catch_errors_for dt.name then + fun body -> + "print#catch_exn print#error_" ^ dt.name ^ " (fun () -> " + ^ wrapper body ^ ")" + else wrapper + in + print_variants [ { name = ""; payload = Record record } ] wrapper + | TypeSynonym ty -> + print_variants [ { name = ""; payload = Tuple [ ty ] } ] (fun x -> x) + | _ -> "(* Not translating " ^ dt.name ^ " *)" + in + let print = + let name = "print_" ^ dt.name in + let ty = "ast_position -> " ^ dt.name ^ " -> " in + let body = + "fun ast_position x -> (print#" ^ method_name ^ " ast_position x)#p" + in + if Option.is_none super_type then + "method " ^ name ^ ": " ^ ty ^ " document = " ^ body + else "" + in + let entrypoint = + let name = "entrypoint_" ^ dt.name in + let ty = dt.name ^ " -> " in + let body = "print#print_" ^ dt.name ^ " AstPos_Entrypoint" in + if Option.is_none super_type then ( + register_entrypoint (name ^ " : " ^ ty ^ " 'a"); + "method " ^ name ^ ": " ^ ty ^ " document = " ^ body) + else "" + in + String.concat ~sep:"\n\n" (main :: print :: entrypoint :: !sigs) + +let hardcoded = + {| +module LazyDoc = struct + type 'a lazy_doc = + < compact : output -> unit + ; pretty : output -> state -> int -> bool -> unit + ; requirement : int + ; p : document + ; v : 'a + ; ast_position : ast_position > + let lazy_doc : 'a. ('a -> document) -> ast_position -> 'a -> 'a lazy_doc = + fun to_document pos value -> + let lazy_doc = ref None in + let doc () = + match !lazy_doc with + | None -> + let doc = to_document value in + lazy_doc := Some doc; + doc + | Some doc -> doc + in + object (self) + method requirement : requirement = requirement (doc ()) + method pretty : output -> state -> int -> bool -> unit = + fun o s i b -> pretty o s i b (doc ()) + method compact : output -> unit = fun o -> compact o (doc ()) + method p = custom (self :> custom) + method v = value + method ast_position = pos + end +end +open LazyDoc +|} + +let class_prelude = + {| + method virtual with_span: span:span -> (unit -> document) -> document + method virtual catch_exn : (string -> document) -> (unit -> document) -> document + + method virtual _do_not_override_lazy_of_local_ident: _ + method virtual _do_not_override_lazy_of_concrete_ident: _ +|} + +let mk datatypes = + let datatypes = + List.filter + ~f:(fun dt -> not ([%eq: string] dt.Datatype.name "mutability")) + datatypes + in + let state = + let names_with_doc = List.map ~f:(fun dt -> dt.name) datatypes in + let names_with_doc = + "quote" :: "concrete_ident" :: "local_ident" :: names_with_doc + in + { names_with_doc } + in + let positions = ref [ "AstPos_Entrypoint"; "AstPos_NotApplicable" ] in + let entrypoint_types = ref [] in + let class_body = + List.map + ~f:(fun dt -> + print_datatype state dt + (fun x -> entrypoint_types := x :: !entrypoint_types) + (fun ty variant field -> + let pos = + "AstPos_" ^ ty ^ "_" ^ variant + ^ match field with Some field -> "_" ^ field | _ -> "" + in + positions := pos :: !positions; + pos)) + datatypes + |> String.concat ~sep:"\n\n" + in + let object_poly = String.concat ~sep:";\n " !entrypoint_types in + let object_span_data_map = + String.concat ~sep:"\n" + (List.map + ~f:(fun s -> + let n = fst (String.lsplit2_exn ~on:':' s) in + "method " ^ n ^ " = obj#" ^ n) + !entrypoint_types) + in + let object_map = + String.concat ~sep:"\n" + (List.map + ~f:(fun s -> + let n = fst (String.lsplit2_exn ~on:':' s) in + "method " ^ n ^ " x = f (fun obj -> obj#" ^ n ^ " x)") + !entrypoint_types) + in + Printf.sprintf + {| +open! Prelude +open! Ast +open PPrint +type ast_position = %s | AstPosition_Quote + +%s + +module Make (F : Features.T) = struct + module AST = Ast.Make (F) + open Ast.Make (F) + + class virtual base = object (print) + %s + end + + type ('span_data, 'a) object_type = < + span_data : 'span_data; + %s + > + + let map (type span_data) (type a) (type b) + (f: ((span_data, a) object_type -> a) -> b) + : (unit, b) object_type = object + method span_data: unit = () + %s + end + + let map_span_data (type a) (type b) (type t) + (obj: (a, t) object_type) + (span_data: b) + : (b, t) object_type = object + method span_data: b = span_data + %s + end +end +|} + (String.concat ~sep:" | " + (List.dedup_and_sort ~compare:String.compare !positions)) + hardcoded + (class_prelude ^ class_body) + object_poly object_map object_span_data_map diff --git a/engine/utils/generate_from_ast/codegen_visitor.ml b/engine/utils/generate_from_ast/codegen_visitor.ml index 9a8b62725..e9e65ad24 100644 --- a/engine/utils/generate_from_ast/codegen_visitor.ml +++ b/engine/utils/generate_from_ast/codegen_visitor.ml @@ -233,6 +233,7 @@ let is_allowed_opaque name = "quote"; "float_kind"; "int_kind"; + "item_quote_origin"; ] in List.mem ~equal:String.equal allowlist name diff --git a/engine/utils/generate_from_ast/generate_from_ast.ml b/engine/utils/generate_from_ast/generate_from_ast.ml index 9bc114e5c..7526afa76 100644 --- a/engine/utils/generate_from_ast/generate_from_ast.ml +++ b/engine/utils/generate_from_ast/generate_from_ast.ml @@ -33,6 +33,7 @@ let _main = |> match Sys.get_argv () with | [| _; "visitors" |] -> Codegen_visitor.mk + | [| _; "printer" |] -> Codegen_printer.mk | [| _; "ast_builder" |] -> Codegen_ast_builder.mk | [| _; "ast_destruct" |] -> Codegen_ast_destruct.mk | [| _; "json" |] -> diff --git a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js index 0f9082802..12ff7487d 100644 --- a/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js +++ b/engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js @@ -537,9 +537,12 @@ function run(str){ let impl = `include struct [@@@warning "-A"]`; - let items = Object.entries(definitions).map( - ([name, def]) => export_definition(name, def) - ).filter(x => x instanceof Object); + let items = Object.entries(definitions) + .map(([name, def]) => ['Node_for_TyKind' == name ? 'node_for_ty_kind_generated' : name, def]) + .map(([name, def]) => ['Node_for_DefIdContents' == name ? 'node_for_def_id_contents_generated' : name, def]) + .map( + ([name, def]) => export_definition(name, def) + ).filter(x => x instanceof Object); let derive_items = ['show', 'eq']; @@ -568,14 +571,73 @@ open ParseError ).join('\nand ') + derive_clause ); + impl += ` +and node_for__ty_kind = node_for_ty_kind_generated +and node_for__def_id_contents = node_for_def_id_contents_generated + +type map_types = ${"[`TyKind of ty_kind | `DefIdContents of def_id_contents]"} +let cache_map: (int64, ${"[ `Value of map_types | `JSON of Yojson.Safe.t ]"}) Base.Hashtbl.t = Base.Hashtbl.create (module Base.Int64) + +let parse_table_id_node (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) = + let label = "parse_table_id_node:" ^ name ^ ": " in + match o with + | \`Assoc alist -> begin + let id = match List.assoc_opt "cache_id" alist with + | Some (\`Int id) -> Base.Int.to_int64 id + | Some (\`Intlit lit) -> (try Base.Int64.of_string lit with | _ -> failwith (label ^ "Base.Int64.of_string failed for " ^ lit)) + | Some bad_json -> failwith (label ^ "id was expected to be an int, got: " ^ Yojson.Safe.pretty_to_string bad_json ^ "\n\n\nfull json: " ^ Yojson.Safe.pretty_to_string o) + | None -> failwith (label ^ " could not find the key 'cache_id' in the following json: " ^ Yojson.Safe.pretty_to_string o) + in + let decode v = decode v |> Base.Option.value_exn ~message:(label ^ "could not decode value (wrong type)") in + match List.assoc_opt "value" alist with + | Some json when (match json with \`Null -> false | _ -> true) -> + (parse json, id) + | _ -> + let value = match Base.Hashtbl.find cache_map id with + | None -> failwith (label ^ "failed to lookup id " ^ Base.Int64.to_string id) + | Some (\`Value v) -> decode v + | Some (\`JSON json) -> + let value = parse json in + Base.Hashtbl.set cache_map ~key:id ~data:(\`Value (encode value)); + value + in (value, id) + end + | _ -> failwith (label ^ "expected Assoc") + +`; impl += (''); impl += ('let rec ' + items.map(({name, type, parse}) => `parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}` ).join('\nand ')); + impl += ` +and parse_node_for__ty_kind (o: Yojson.Safe.t): node_for__ty_kind = + let (value, id) = + parse_table_id_node "TyKind" + (fun value -> \`TyKind value) + (function | \`TyKind value -> Some value | _ -> None) + parse_ty_kind + o + in + {value; id} +and parse_node_for__def_id_contents (o: Yojson.Safe.t): node_for__def_id_contents = + let (value, id) = + parse_table_id_node "DefIdContents" + (fun value -> \`DefIdContents value) + (function | \`DefIdContents value -> Some value | _ -> None) + parse_def_id_contents + o + in + {value; id} +`; impl += (''); impl += ('let rec ' + items.map(({name, type, parse, to_json}) => `to_json_${name} (o: ${name}): Yojson.Safe.t = ${to_json}` ).join('\nand ')); + impl += ` +and to_json_node_for__ty_kind {value; id} = to_json_node_for_ty_kind_generated {value; id} +and to_json_node_for__def_id_contents {value; id} = to_json_node_for_def_id_contents_generated {value; id} +`; + return impl + ' \n end'; } diff --git a/engine/utils/ppx_phases_index/ppx_phases_index.ml b/engine/utils/ppx_phases_index/ppx_phases_index.ml index 48d1e58e0..5e4c25b75 100644 --- a/engine/utils/ppx_phases_index/ppx_phases_index.ml +++ b/engine/utils/ppx_phases_index/ppx_phases_index.ml @@ -1,7 +1,6 @@ open Base open Ppxlib -let name = "phases_index" let ( let* ) x f = Option.bind ~f x let map_first_letter (f : string -> string) (s : string) = @@ -25,7 +24,7 @@ let locate_phases_directory () : string = |> Option.value_exn ~message:"ppx_phases_index: could not locate folder [phases]" -let list_phases loc : (string * string * _ option) list = +let list_phases loc : (string * string * string * _ option) list = let dir = locate_phases_directory () in Stdlib.Sys.readdir dir |> Array.to_list |> List.filter_map ~f:(fun filename -> @@ -50,9 +49,9 @@ let list_phases loc : (string * string * _ option) list = str in match str with - | [ _ ] -> (module_name, phase_name, None) + | [ _ ] -> (filename, module_name, phase_name, None) | [ { psig_desc = Psig_attribute attr; _ }; _ ] -> - (module_name, phase_name, Some attr) + (filename, module_name, phase_name, Some attr) | [] -> failwith ("Empty phase" ^ filename) | _ -> failwith @@ -80,13 +79,13 @@ let rename (l : (string * string) list) = r end -let expand ~(ctxt : Expansion_context.Extension.t) (str : structure_item) : - structure_item = +let expand_phases_index ~(ctxt : Expansion_context.Extension.t) + (str : structure_item) : structure_item = let loc = Expansion_context.Extension.extension_point_loc ctxt in let (module S) = Ppxlib.Ast_builder.make loc in let modules = list_phases loc - |> List.map ~f:(fun (module_name, phase_name, attrs) -> + |> List.map ~f:(fun (_, module_name, phase_name, attrs) -> let h x = { txt = Lident x; loc } in let original = S.pmod_ident { txt = Ldot (Lident module_name, "Make"); loc } @@ -111,10 +110,85 @@ let expand ~(ctxt : Expansion_context.Extension.t) (str : structure_item) : in S.pstr_include (S.include_infos (S.pmod_structure modules)) -let ext = - Extension.V3.declare name Extension.Context.structure_item - Ast_pattern.(pstr (__ ^:: nil)) - expand +let chop_ml_or_mli str = + match String.chop_suffix ~suffix:".ml" str with + | Some result -> Some result + | None -> String.chop_suffix ~suffix:".mli" str -let rule = Ppxlib.Context_free.Rule.extension ext -let () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name +let filename_to_phase_constructor file_name = + let phase_name = + file_name |> String.rsplit2 ~on:'/' |> Option.map ~f:snd + |> Option.value ~default:file_name + |> String.chop_prefix ~prefix:"phase_" + |> Option.value_exn + ~message: + ("`[%auto_phase_name]` can only be used in a phase, whose filename \ + starts with `phase_`. Current file is: [" ^ file_name ^ "]") + |> chop_ml_or_mli + |> Option.value_exn + ~message: + ("File name [" ^ file_name + ^ "] was expected to end with a `.ml` or `.mli`") + in + phase_name |> String.split ~on:'_' + |> List.map ~f:uppercase_first_char + |> String.concat + +let expand_add_phase_names ~(ctxt : Expansion_context.Extension.t) + (typ : type_declaration) : structure_item = + let loc = Expansion_context.Extension.extension_point_loc ctxt in + let (module S) = Ppxlib.Ast_builder.make loc in + let ptype_kind = + match typ.ptype_kind with + | Ptype_variant ctors -> + let phases = list_phases loc in + let extra = + List.map + ~f:(fun (filename, _, _, _) -> + let name = filename_to_phase_constructor filename in + let name = { txt = name; loc = S.loc } in + let args = Pcstr_tuple [] in + S.constructor_declaration ~name ~args ~res:None) + phases + in + Ptype_variant (ctors @ extra) + | _ -> failwith "expected variants" + in + let typ = { typ with ptype_kind } in + S.pstr_type Recursive [ typ ] + +let expand_auto_phase_name ~(ctxt : Expansion_context.Extension.t) + (str : structure_item) : expression = + let file_name = Expansion_context.Extension.input_name ctxt in + let constructor = filename_to_phase_constructor file_name in + let loc = Expansion_context.Extension.extension_point_loc ctxt in + let (module S) = Ppxlib.Ast_builder.make loc in + let txt = Astlib.Longident.parse ("Diagnostics.Phase." ^ constructor) in + S.pexp_construct { txt; loc = S.loc } None + +let () = + let rule_phases_index = + let name = "phases_index" in + Ppxlib.Context_free.Rule.extension + (Extension.V3.declare name Extension.Context.structure_item + Ast_pattern.(pstr (__ ^:: nil)) + expand_phases_index) + in + let rule_auto_phase_name = + let name = "auto_phase_name" in + Ppxlib.Context_free.Rule.extension + (Extension.V3.declare name Extension.Context.expression + Ast_pattern.(pstr (__ ^:: nil)) + expand_auto_phase_name) + in + let rule_expand_add_phase_names = + let name = "add_phase_names" in + Ppxlib.Context_free.Rule.extension + (Extension.V3.declare name Extension.Context.structure_item + Ast_pattern.(pstr (pstr_type drop (__ ^:: nil) ^:: nil)) + expand_add_phase_names) + in + Ppxlib.Driver.register_transformation + ~rules: + [ rule_phases_index; rule_auto_phase_name; rule_expand_add_phase_names ] + "ppx_phases_index" diff --git a/engine/utils/sourcemaps/base64.ml b/engine/utils/sourcemaps/base64.ml new file mode 100644 index 000000000..064b85a61 --- /dev/null +++ b/engine/utils/sourcemaps/base64.ml @@ -0,0 +1,10 @@ +open Prelude + +let alphabet = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" + +let encode (n : int) : char = + assert (n >= 0 && n < 64); + String.get alphabet n + +let decode (c : char) : int = String.index alphabet c |> Option.value_exn diff --git a/engine/utils/sourcemaps/dune b/engine/utils/sourcemaps/dune new file mode 100644 index 000000000..cf4e7dc83 --- /dev/null +++ b/engine/utils/sourcemaps/dune @@ -0,0 +1,9 @@ +(library + (name sourcemaps) + (package hax-engine) + (inline_tests) + (preprocess + (pps ppx_inline_test ppx_yojson_conv ppx_deriving.show ppx_deriving.eq)) + (libraries base)) + +(include_subdirs unqualified) diff --git a/engine/utils/sourcemaps/location.ml b/engine/utils/sourcemaps/location.ml new file mode 100644 index 000000000..cf2bda904 --- /dev/null +++ b/engine/utils/sourcemaps/location.ml @@ -0,0 +1,22 @@ +open Prelude + +type t = { line : int; col : int } [@@deriving eq] + +let show { line; col } = + "(" ^ Int.to_string line ^ ":" ^ Int.to_string col ^ ")" + +let pp (fmt : Stdlib.Format.formatter) (s : t) : unit = + Stdlib.Format.pp_print_string fmt @@ show s + +let default = { line = 0; col = 0 } +let plus_cols x cols = { x with col = x.col + cols } +let op ( + ) x y = { line = x.line + y.line; col = x.col + y.col } +let ( + ) = op ( + ) +let ( - ) = op ( - ) + +let compare (x : t) (y : t) : int = + let open Int in + if x.line > y.line then 1 + else if x.line = y.line then + if x.col > y.col then 1 else if x.col = y.col then 0 else -1 + else -1 diff --git a/engine/utils/sourcemaps/mappings/dual.ml b/engine/utils/sourcemaps/mappings/dual.ml new file mode 100644 index 000000000..09548fd9f --- /dev/null +++ b/engine/utils/sourcemaps/mappings/dual.ml @@ -0,0 +1,10 @@ +type 'a t = { gen : 'a; src : 'a } [@@deriving show, eq] + +let transpose ~(default : 'a t) ({ gen; src } : 'a option t) : 'a t option = + match (gen, src) with + | Some gen, None -> Some { gen; src = default.src } + | None, Some src -> Some { gen = default.gen; src } + | Some gen, Some src -> Some { gen; src } + | _ -> None + +let default (type a) (default : a) : a t = { gen = default; src = default } diff --git a/engine/utils/sourcemaps/mappings/instruction.ml b/engine/utils/sourcemaps/mappings/instruction.ml new file mode 100644 index 000000000..966e4cba3 --- /dev/null +++ b/engine/utils/sourcemaps/mappings/instruction.ml @@ -0,0 +1,104 @@ +open Prelude +open Types + +type t = + | ShiftGenLinesResetGenCols of { lines : int } + | ShiftGenCols of int + | Full of { shift_gen_col : int; shift_src : Location.t; meta : meta } +[@@deriving show { with_path = false }, eq] + +let encode_one : t -> string * [ `Sep | `NeedsSep ] = function + | ShiftGenLinesResetGenCols { lines } -> + Stdlib.prerr_endline ("lines:::" ^ Int.to_string lines); + (String.make lines ';', `Sep) + | ShiftGenCols n -> (Vql.encode_base64 [ n ], `NeedsSep) + | Full { shift_gen_col; shift_src; meta = { file_offset; name } } -> + ( Vql.encode_base64 + ([ shift_gen_col; file_offset; shift_src.line; shift_src.col ] + @ match name with Some name -> [ name ] | None -> []), + `NeedsSep ) + +let encode : t list -> string = + List.map ~f:encode_one + >> List.fold_left + ~f:(fun (acc, sep) (str, sep') -> + let acc = + acc + ^ + match (sep, sep') with `NeedsSep, `NeedsSep -> "," ^ str | _ -> str + in + (acc, sep')) + ~init:("", `Sep) + >> fst + +let decode_one (s : string) : t = + match Vql.decode_base64 s with + | [ cols ] -> ShiftGenCols cols + | shift_gen_col :: file_offset :: line :: col :: rest -> + let name = match rest with [ name ] -> Some name | _ -> None in + let meta = { file_offset; name } in + let shift_src : Location.t = { line; col } in + Full { shift_gen_col; shift_src; meta } + | _ -> failwith "??" + +let rec decode' (s : string) : t option list = + if String.is_empty s then [] + else + let n = + String.lfindi ~f:(fun _ -> function ';' | ',' -> true | _ -> false) s + |> Option.value ~default:(String.length s) + in + (if n > 0 then Some (decode_one (String.prefix s n)) + else + match String.get s 0 with + | ';' -> Some (ShiftGenLinesResetGenCols { lines = 1 }) + | ',' -> None + | _ -> failwith "should not be possible") + :: decode' (String.drop_prefix s (Int.max 1 n)) + +let decode : string -> t list = decode' >> List.filter_map ~f:Fn.id + +let eval_one (s : Location.t Dual.t) (i : t) : Location.t Dual.t * meta option = + match i with + | ShiftGenLinesResetGenCols { lines } -> + ({ s with gen = { line = s.gen.line + lines; col = 0 } }, None) + | ShiftGenCols i -> ({ s with gen = Location.plus_cols s.gen i }, None) + | Full { shift_gen_col; shift_src; meta } -> + let gen = Location.plus_cols s.gen shift_gen_col in + let src = Location.(s.src + shift_src) in + ({ gen; src }, Some meta) + +let to_points ?(init = Dual.default Location.default) : t list -> point list = + List.fold_left ~init:(init, []) ~f:(fun (s, acc) i -> + let s, r = eval_one s i in + (s, (s, r) :: acc)) + >> snd >> List.rev + +let from_points : point list -> t list = + List.folding_map ~init:(Dual.default Location.default) + ~f:(fun { src; gen } (x, m) -> + let d = + Location.(Dual.{ Dual.src = x.src - src; Dual.gen = x.gen - gen }) + in + let shift_gen_col = (if Int.(d.gen.line = 0) then d else x).gen.col in + let output = + (if Int.(d.gen.line = 0) then [] + else [ ShiftGenLinesResetGenCols { lines = d.gen.line } ]) + @ + match m with + | Some meta -> [ Full { shift_gen_col; shift_src = d.src; meta } ] + | None when Int.(shift_gen_col = 0) -> [] + | _ -> [ ShiftGenCols shift_gen_col ] + in + let x = match m with Some _ -> x | None -> { x with src } in + (x, output)) + >> List.concat + +let%test _ = + let f = decode >> to_points >> from_points >> encode in + [ + ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC;AAAA,EAGtE,YAAYG,GAAqB;AAC/B,UAAMA,CAAK;AAIb,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAEC,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAClE,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAED,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAJhE,SAAK,MAAMA,IAASF,EAAMI;AAAA,EAC5B;AAAA,EAKA,SAAS;AACP,WAAOR,EAAC;AAAA,MAAI,OAAM;AAAA,OAChBA,EAAC,YAAI,KAAK,MAAM,KAAM,GACtBA,EAAC,WACCA,EAAC;AAAA,MAAO,SAAS,KAAKO;AAAA,OAAY,GAAC,GAClC,KACA,KAAK,MAAMD,GACX,KACDN,EAAC;AAAA,MAAO,SAAS,KAAKK;AAAA,OAAY,GAAC,CACrC,CACF;AAAA,EACF;AACF,GAEWI,IAAkB,CAACL,MAAwB;AACpD,MAAI,CAACM,GAAOC,CAAQ,IAAIT,EAASE,EAAMI,CAAa;AACpD,SAAOR,EAAC;AAAA,IAAI,OAAM;AAAA,KAChBA,EAAC,YAAII,EAAMQ,CAAO,GAClBZ,EAAC,WACCA,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,GAC5C,KACAA,GACA,KACDV,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,CAC/C,CACF;AACF;;;AD9CAG;AAAA,EACEC,EAAAC,GAAA,MACED,EAACE,GAAA;AAAA,IAAaC,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,GACrDJ,EAACK,GAAA;AAAA,IAAgBF,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,CAC1D;AAAA,EACA,SAAS,eAAe,MAAM;AAChC;"; + ] + |> List.for_all ~f:(fun s -> String.equal s (f s)) + +let from_spanned : Spanned.t list -> t list = Spanned.to_points >> from_points diff --git a/engine/utils/sourcemaps/mappings/mappings.ml b/engine/utils/sourcemaps/mappings/mappings.ml new file mode 100644 index 000000000..67fb40347 --- /dev/null +++ b/engine/utils/sourcemaps/mappings/mappings.ml @@ -0,0 +1,41 @@ +open Prelude +include Types + +type range = { start : Location.t; end_ : Location.t option } +[@@deriving show, eq] + +module Chunk = struct + type t = { gen : range; src : range; meta : meta } [@@deriving show, eq] + + let compare (x : t) (y : t) = Location.compare x.gen.start y.gen.start + + let from_spanned ((start, end_, meta) : Spanned.t) : t = + let gen = { start = start.gen; end_ = end_.gen } in + let src = { start = start.src; end_ = end_.src } in + { gen; src; meta } + + let to_spanned ({ gen; src; meta } : t) : Spanned.t = + ( { gen = gen.start; src = src.start }, + { gen = gen.end_; src = src.end_ }, + meta ) + + let%test _ = + let x = ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU" in + let s = Instruction.(decode x |> to_points) |> Spanned.from_points in + [%eq: Spanned.t list] (List.map ~f:(from_spanned >> to_spanned) s) s + + let decode : string -> t list = + Instruction.(decode >> to_points >> Spanned.from_points) + >> List.map ~f:from_spanned + + let encode : t list -> string = + List.map ~f:to_spanned >> Instruction.from_spanned >> Instruction.encode + + let%test _ = + let x = + ";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC" + in + decode x |> encode |> [%eq: string] x +end + +include Chunk diff --git a/engine/utils/sourcemaps/mappings/mappings.mli b/engine/utils/sourcemaps/mappings/mappings.mli new file mode 100644 index 000000000..7bc0e9d55 --- /dev/null +++ b/engine/utils/sourcemaps/mappings/mappings.mli @@ -0,0 +1,13 @@ +type meta = { file_offset : int; name : int option } [@@deriving show, eq] +type range = { start : Location.t; end_ : Location.t option } + +module Chunk : sig + type t = { gen : range; src : range; meta : meta } [@@deriving show, eq] + + val compare : t -> t -> int +end + +open Chunk + +val decode : string -> t list +val encode : t list -> string diff --git a/engine/utils/sourcemaps/mappings/spanned.ml b/engine/utils/sourcemaps/mappings/spanned.ml new file mode 100644 index 000000000..965485025 --- /dev/null +++ b/engine/utils/sourcemaps/mappings/spanned.ml @@ -0,0 +1,44 @@ +open Prelude +open Types + +type t = Location.t Dual.t * Location.t option Dual.t * meta +[@@deriving show, eq] + +let to_points (pts : t list) : point list = + List.map pts ~f:Option.some + |> Fn.flip List.append [ None ] + |> List.folding_map ~init:None ~f:(fun acc x -> + let prev_end = + match (acc, x) with + | Some end_, Some (start, _, _) + when [%eq: Location.t] start.Dual.gen end_.Dual.gen |> not -> + Some end_ + | Some end_, None -> Some end_ + | _ -> None + in + let out, end_ = + match x with + | Some (start, end_, meta) -> + ([ (start, Some meta) ], Dual.transpose ~default:start end_) + | None -> ([], None) + in + ( end_, + (prev_end |> Option.map ~f:(fun e -> (e, None)) |> Option.to_list) + @ out )) + |> List.concat + +let from_points : point list -> t list = + List.rev + >> List.folding_map + ~init:(None, Map.empty (module Int)) + ~f:(fun (gen_loc_0, src_locs) ((loc_start : _ Dual.t), meta) -> + match meta with + | Some meta -> + let src_loc_0 = Map.find src_locs meta.file_offset in + let src_locs = + Map.set src_locs ~key:meta.file_offset ~data:loc_start.src + in + let loc_end = Dual.{ gen = gen_loc_0; src = src_loc_0 } in + ((Some loc_start.gen, src_locs), Some (loc_start, loc_end, meta)) + | None -> ((Some loc_start.gen, src_locs), None)) + >> List.filter_map ~f:Fn.id >> List.rev diff --git a/engine/utils/sourcemaps/mappings/types.ml b/engine/utils/sourcemaps/mappings/types.ml new file mode 100644 index 000000000..be2cd146e --- /dev/null +++ b/engine/utils/sourcemaps/mappings/types.ml @@ -0,0 +1,4 @@ +open Prelude + +type meta = { file_offset : int; name : int option } [@@deriving show, eq] +type point = Location.t Dual.t * meta option [@@deriving show, eq] diff --git a/engine/utils/sourcemaps/prelude.ml b/engine/utils/sourcemaps/prelude.ml new file mode 100644 index 000000000..e4d6ca4bd --- /dev/null +++ b/engine/utils/sourcemaps/prelude.ml @@ -0,0 +1,5 @@ +include Base +include Ppx_yojson_conv_lib.Yojson_conv.Primitives + +let ( << ) f g x = f (g x) +let ( >> ) f g x = g (f x) diff --git a/engine/utils/sourcemaps/source_maps.ml b/engine/utils/sourcemaps/source_maps.ml new file mode 100644 index 000000000..6da383baa --- /dev/null +++ b/engine/utils/sourcemaps/source_maps.ml @@ -0,0 +1,53 @@ +open Prelude +module Location = Location +include Mappings + +type mapping = { + gen : range; + src : range; + source : string; + name : string option; +} + +type t = { + mappings : string; + sourceRoot : string; + sources : string list; + sourcesContent : string option list; + names : string list; + version : int; + file : string; +} +[@@deriving yojson] + +let dedup_freq (l : string list) : string list = + let hashtbl : (string, int) Hashtbl.t = Hashtbl.create (module String) in + List.iter ~f:(Hashtbl.incr hashtbl) l; + Hashtbl.to_alist hashtbl + |> List.sort ~compare:(fun (_, x) (_, y) -> Int.(y - x)) + |> List.map ~f:fst + +let mk ?(file = "") ?(sourceRoot = "") ?(sourcesContent = fun _ -> None) + (mappings : mapping list) : t = + let sources = List.map ~f:(fun x -> x.source) mappings |> dedup_freq in + let names = List.filter_map ~f:(fun x -> x.name) mappings |> dedup_freq in + let f { gen; src; source; name } = + let file_offset, _ = + List.findi_exn ~f:(fun _ -> String.equal source) sources + in + let name = + Option.map + ~f:(fun name -> + List.findi_exn ~f:(fun _ -> String.equal name) names |> fst) + name + in + let meta = { file_offset; name } in + Chunk.{ gen; src; meta } + in + let mappings = List.map mappings ~f |> List.sort ~compare:Chunk.compare in + Stdlib.prerr_endline @@ [%show: Chunk.t list] mappings; + let mappings = Mappings.encode mappings in + let sourcesContent = List.map ~f:sourcesContent sources in + { mappings; sourceRoot; sourcesContent; sources; names; version = 3; file } + +let to_json = [%yojson_of: t] >> Yojson.Safe.pretty_to_string diff --git a/engine/utils/sourcemaps/source_maps.mli b/engine/utils/sourcemaps/source_maps.mli new file mode 100644 index 000000000..73105053b --- /dev/null +++ b/engine/utils/sourcemaps/source_maps.mli @@ -0,0 +1,33 @@ +type range = { start : Location.t; end_ : Location.t option } + +module Location : sig + type t = { line : int; col : int } [@@deriving eq] +end + +type mapping = { + gen : range; + src : range; + source : string; + name : string option; +} +(** A source file to generated file mapping *) + +type t = { + mappings : string; + sourceRoot : string; + sources : string list; + sourcesContent : string option list; + names : string list; + version : int; + file : string; +} +[@@deriving yojson] + +val mk : + ?file:string -> + ?sourceRoot:string -> + ?sourcesContent:(string -> string option) -> + mapping list -> + t + +val to_json : t -> string diff --git a/engine/utils/sourcemaps/vql.ml b/engine/utils/sourcemaps/vql.ml new file mode 100644 index 000000000..50bc07a45 --- /dev/null +++ b/engine/utils/sourcemaps/vql.ml @@ -0,0 +1,48 @@ +open Prelude + +let rec encode_one ?(first = true) (n : int) : int list = + let n = if first then (Int.abs n lsl 1) + if n < 0 then 1 else 0 else n in + let lhs, rhs = (n lsr 5, n land 0b11111) in + let last = Int.equal lhs 0 in + let output = (if last then 0b000000 else 0b100000) lor rhs in + output :: (if last then [] else encode_one ~first:false lhs) + +let encode : int list -> int list = List.concat_map ~f:encode_one + +let encode_base64 : int list -> string = + encode >> List.map ~f:Base64.encode >> String.of_char_list + +let rec decode_one' (first : bool) (l : int list) : int * int list = + match l with + | [] -> (0, []) + | hd :: tl -> + assert (hd < 64); + let c = Int.shift_right hd 5 |> Int.bit_and 0b1 in + let last = Int.equal c 0 in + if first then + let sign = match Int.bit_and hd 0b1 with 1 -> -1 | _ -> 1 in + let hd = Int.shift_right hd 1 |> Int.bit_and 0b1111 in + if last then (sign * hd, tl) + else + let next, tl = decode_one' false tl in + let value = hd + Int.shift_left next 4 in + (sign * value, tl) + else + let hd = Int.bit_and hd 0b11111 in + if last then (hd, tl) + else + let next, tl = decode_one' false tl in + (hd + Int.shift_left next 5, tl) + +let rec decode (l : int list) : int list = + match decode_one' true l with n, [] -> [ n ] | n, tl -> n :: decode tl + +let decode_base64 : string -> int list = + String.to_list >> List.map ~f:Base64.decode >> decode + +let%test _ = + let tests = + [ [ 132; 6; 2323; 64; 32; 63; 31; 65; 33 ]; [ 133123232 ]; [ 0; 0; 0 ] ] + in + let tests = tests @ List.map ~f:(List.map ~f:(fun x -> -x)) tests in + List.for_all ~f:(fun x -> [%eq: int list] x (encode x |> decode)) tests diff --git a/examples/.envrc b/examples/.envrc index 7045e0610..42800cde0 100644 --- a/examples/.envrc +++ b/examples/.envrc @@ -1 +1 @@ -use flake .#fstar +use flake .#examples diff --git a/examples/Cargo.lock b/examples/Cargo.lock index 61ed48fed..af90ddc48 100644 --- a/examples/Cargo.lock +++ b/examples/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "anyhow" @@ -59,6 +59,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + [[package]] name = "byteorder" version = "1.5.0" @@ -110,6 +116,7 @@ version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ + "jobserver", "libc", ] @@ -137,6 +144,13 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "coq-example" +version = "0.1.0" +dependencies = [ + "hax-lib", +] + [[package]] name = "cpufeatures" version = "0.2.11" @@ -181,6 +195,16 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "duplicate" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de78e66ac9061e030587b2a2e75cc88f22304913c907b11307bca737141230cb" +dependencies = [ + "heck", + "proc-macro-error", +] + [[package]] name = "either" version = "1.9.0" @@ -210,8 +234,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", ] [[package]] @@ -222,14 +248,16 @@ checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" [[package]] name = "hax-bounded-integers" -version = "0.1.0-pre.1" +version = "0.1.0-alpha.1" dependencies = [ + "duplicate", "hax-lib", + "paste", ] [[package]] name = "hax-lib" -version = "0.1.0-pre.1" +version = "0.1.0-alpha.1" dependencies = [ "hax-lib-macros", "num-bigint", @@ -238,9 +266,10 @@ dependencies = [ [[package]] name = "hax-lib-macros" -version = "0.1.0-pre.1" +version = "0.1.0-alpha.1" dependencies = [ "hax-lib-macros-types", + "paste", "proc-macro-error", "proc-macro2", "quote", @@ -249,7 +278,7 @@ dependencies = [ [[package]] name = "hax-lib-macros-types" -version = "0.1.0-pre.1" +version = "0.1.0-alpha.1" dependencies = [ "proc-macro2", "quote", @@ -258,6 +287,12 @@ dependencies = [ "uuid", ] +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "hex" version = "0.4.3" @@ -327,6 +362,24 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "kyber_compress" version = "0.1.0" @@ -353,6 +406,48 @@ version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +[[package]] +name = "libcrux" +version = "0.0.2-pre.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d9dcd435758db03438089760c55a45e6bcab7e4e299ee261f75225ab29d482" +dependencies = [ + "getrandom", + "libcrux-hacl", + "libcrux-platform", + "libjade-sys", + "rand", +] + +[[package]] +name = "libcrux-hacl" +version = "0.0.2-pre.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52b2581ce493c5c22700077b5552b47be69b67b8176716572b02856218db0b68" +dependencies = [ + "cc", + "libcrux-platform", +] + +[[package]] +name = "libcrux-platform" +version = "0.0.2-pre.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "647e39666194b11df17c19451d1154b9be79df98b9821532560c2ecad0cf3410" +dependencies = [ + "libc", +] + +[[package]] +name = "libjade-sys" +version = "0.0.2-pre.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec4d22bba476bf8f5aebe36ccfd0e56dba8707e0c3b5c76996576028f48ffb8e" +dependencies = [ + "cc", + "libcrux-platform", +] + [[package]] name = "lob_backend" version = "0.1.0" @@ -364,6 +459,12 @@ dependencies = [ "serde", ] +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + [[package]] name = "memchr" version = "2.6.4" @@ -430,9 +531,18 @@ checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "pretty" @@ -488,6 +598,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proverif-psk" +version = "0.1.0" +dependencies = [ + "hax-lib", + "libcrux", + "rand", +] + [[package]] name = "psm" version = "0.1.21" @@ -506,6 +625,36 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -713,6 +862,61 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasm-bindgen" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.39", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" + [[package]] name = "winapi" version = "0.3.9" @@ -752,3 +956,24 @@ checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" dependencies = [ "memchr", ] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] diff --git a/examples/Cargo.toml b/examples/Cargo.toml index bcdf6b58c..9b7eceb41 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -5,7 +5,8 @@ members = [ "sha256", "barrett", "kyber_compress", -] + "proverif-psk", + "coq-example"] resolver = "2" [workspace.dependencies] diff --git a/examples/Makefile b/examples/Makefile index 69cb20696..583193f08 100644 --- a/examples/Makefile +++ b/examples/Makefile @@ -5,6 +5,7 @@ default: OTHERFLAGS="--lax" make -C sha256 make -C barrett make -C kyber_compress + make -C proverif-psk clean: make -C limited-order-book clean @@ -12,3 +13,4 @@ clean: make -C sha256 clean make -C barrett clean make -C kyber_compress clean + make -C proverif-psk clean diff --git a/examples/README.md b/examples/README.md index 462d02101..03aff939f 100644 --- a/examples/README.md +++ b/examples/README.md @@ -15,7 +15,7 @@ First, make sure to have hax installed in PATH. Then: - * With Nix, `nix develop .#fstar` setups a shell automatically for you. + * With Nix, `nix develop .#examples` setups a shell automatically for you. * Without Nix: 1. install F* `v2024.01.13` manually (see https://github.com/FStarLang/FStar/blob/master/INSTALL.md); diff --git a/examples/coq-example/Cargo.toml b/examples/coq-example/Cargo.toml new file mode 100644 index 000000000..45c3973f8 --- /dev/null +++ b/examples/coq-example/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "coq-example" +version = "0.1.0" +edition = "2021" + +[dependencies] +hax-lib = {path = "../../hax-lib" } diff --git a/examples/coq-example/README.md b/examples/coq-example/README.md new file mode 100644 index 000000000..3a1fbc7db --- /dev/null +++ b/examples/coq-example/README.md @@ -0,0 +1,49 @@ +# Stack example +This example is a simple interpreter for a stack. + +## How to build +```sh +cargo hax into coq +``` + +## Coq +Now we have the file `proofs/coq/extraction/Coq_example.v`. +To run the files we first need to install some dependencies. + +### Dependencies for Coq +The coq backend depends on `coq-record-update` to implement Rust record updates. +This can be installed by +```sh +opam install coq-record-update +``` +or alternatively the import lines +```coq +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. +``` +can be commented out. + +## Library required to run +As Rust implicitly imports the `Core` library for a lot of the basic functionality, we will also require a core library for Coq. For this small example, we build a dummy library with the required definitions, to run the example. As a hack to get this to run we add +``` +mod dummy_core_lib; +use dummy_core_lib::*; +``` +to the Rust example file `src/lib.rs`. The definitions of the library are put into `proofs/coq/extraction/dummy_core_lib.v` to match this import. + +## Running the code and doing proofs +We can set up a Coq project by making a `_CoqProject` file in `proofs/coq/extraction/`. +``` +-R ./ Coq_example +-arg -w +-arg all + +./dummy_core_lib.v +./Coq_example.v +./Coq_proofs.v +``` +We then build a makefile from the project file by +```sh +coq_makefile -f _CoqProject -o Makefile +``` +and run `make` to build. Any tests and proofs, we put into a seperate file `proofs/coq/extraction/Coq_proofs.v`. which imports the generated file, such that we can update and regenerate the file, without overwriting the proofs. diff --git a/examples/coq-example/proofs/coq/extraction/Coq_example.v b/examples/coq-example/proofs/coq/extraction/Coq_example.v new file mode 100644 index 000000000..9bcf0a4d4 --- /dev/null +++ b/examples/coq-example/proofs/coq/extraction/Coq_example.v @@ -0,0 +1,118 @@ +(* File automatically generated by Hacspec *) +From Coq Require Import ZArith. +Require Import List. +Import List.ListNotations. +Open Scope Z_scope. +Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. + + + +From Coq_example Require Import dummy_core_lib. +Export dummy_core_lib. + +Inductive t_Instruction : Type := +| Instruction_Push : t_isize -> _ +| Instruction_Pop +| Instruction_Add +| Instruction_Sub +| Instruction_Mul +| Instruction_Not +| Instruction_Dup. +Arguments t_Instruction:clear implicits. +Arguments t_Instruction. + +(* NotImplementedYet *) + +(* NotImplementedYet *) + +Definition impl__Instruction__interpret (self : t_Instruction) (stack : t_Vec ((t_isize)) ((t_Global))) : t_Vec ((t_isize)) ((t_Global)) := + let (stack,hax_temp_output) := match self with + | Instruction_Push (v) => + (impl_1__push (stack) (v),tt) + | Instruction_Pop => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let _ := out in + (stack,tt) + | Instruction_Add => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist2 := out in + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist1 := out in + let hoist3 := (hoist2,hoist1) in + match hoist3 with + | (Option_Some (a),Option_Some (b)) => + (impl_1__push (stack) (t_Add_f_add (b) (a)),tt) + | _ => + (stack,tt) + end + | Instruction_Sub => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist5 := out in + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist4 := out in + let hoist6 := (hoist5,hoist4) in + match hoist6 with + | (Option_Some (a),Option_Some (b)) => + (impl_1__push (stack) (sub (b) (a)),tt) + | _ => + (stack,tt) + end + | Instruction_Mul => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist8 := out in + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist7 := out in + let hoist9 := (hoist8,hoist7) in + match hoist9 with + | (Option_Some (a),Option_Some (b)) => + (impl_1__push (stack) (t_Mul_f_mul (b) (a)),tt) + | _ => + (stack,tt) + end + | Instruction_Not => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist10 := out in + match hoist10 with + | Option_Some (a) => + (impl_1__push (stack) (if + t_PartialEq_f_eq (a) (0) + then + 1 + else + 0),tt) + | _ => + (stack,tt) + end + | Instruction_Dup => + let (tmp0,out) := impl_1__pop (stack) in + let stack := tmp0 in + let hoist11 := out in + match hoist11 with + | Option_Some (a) => + let stack := impl_1__push (stack) (a) in + let stack := impl_1__push (stack) (a) in + (stack,tt) + | _ => + (stack,tt) + end + end in + stack. + +Definition example (_ : unit) : t_Vec ((t_isize)) ((t_Global)) := + let stk := impl__new (tt) in + let stk := f_fold (f_into_iter ([Instruction_Push (1); Instruction_Push (1); Instruction_Add; Instruction_Push (1); Instruction_Push (1); Instruction_Push (1); Instruction_Add; Instruction_Add; Instruction_Dup; Instruction_Mul; Instruction_Sub])) (stk) (fun stk cmd => + impl__Instruction__interpret (cmd) (stk)) in + stk. diff --git a/examples/coq-example/proofs/coq/extraction/Coq_proofs.v b/examples/coq-example/proofs/coq/extraction/Coq_proofs.v new file mode 100644 index 000000000..219ce53cc --- /dev/null +++ b/examples/coq-example/proofs/coq/extraction/Coq_proofs.v @@ -0,0 +1,33 @@ +(* Handwritten Proofs *) + +From Coq Require Import ZArith. +Require Import List. +Import List.ListNotations. + +From Coq_example Require Import Coq_example. + + +(* Check example *) +Example is_example_correct : example tt = [-7]. Proof. reflexivity. Qed. + +(* Proof composite operations *) +Theorem dup_mul_is_square : forall x, + impl__Instruction__interpret Instruction_Mul ( + impl__Instruction__interpret Instruction_Dup [x]) + = [Z.pow x 2]. +Proof. + intros. + cbn. + rewrite Z.mul_1_r. + reflexivity. +Qed. + +Theorem push_pop_cancel : forall l x, + impl__Instruction__interpret Instruction_Pop ( + impl__Instruction__interpret (Instruction_Push x) l) + = l. +Proof. + intros. + cbn. + reflexivity. +Qed. diff --git a/examples/coq-example/proofs/coq/extraction/Makefile b/examples/coq-example/proofs/coq/extraction/Makefile new file mode 100644 index 000000000..42626b668 --- /dev/null +++ b/examples/coq-example/proofs/coq/extraction/Makefile @@ -0,0 +1,989 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # Copyright INRIA, CNRS and contributors ## +## /dev/null 2>/dev/null; echo $$?)) +STDTIME?=command time -f $(TIMEFMT) +else +ifeq (0,$(shell gtime -f "" true >/dev/null 2>/dev/null; echo $$?)) +STDTIME?=gtime -f $(TIMEFMT) +else +STDTIME?=command time +endif +endif +else +STDTIME?=command time -f $(TIMEFMT) +endif + +COQBIN?= +ifneq (,$(COQBIN)) +# add an ending / +COQBIN:=$(COQBIN)/ +endif + +# Coq binaries +COQC ?= "$(COQBIN)coqc" +COQTOP ?= "$(COQBIN)coqtop" +COQCHK ?= "$(COQBIN)coqchk" +COQNATIVE ?= "$(COQBIN)coqnative" +COQDEP ?= "$(COQBIN)coqdep" +COQDOC ?= "$(COQBIN)coqdoc" +COQPP ?= "$(COQBIN)coqpp" +COQMKFILE ?= "$(COQBIN)coq_makefile" +OCAMLLIBDEP ?= "$(COQBIN)ocamllibdep" + +# Timing scripts +COQMAKE_ONE_TIME_FILE ?= "$(COQCORELIB)/tools/make-one-time-file.py" +COQMAKE_BOTH_TIME_FILES ?= "$(COQCORELIB)/tools/make-both-time-files.py" +COQMAKE_BOTH_SINGLE_TIMING_FILES ?= "$(COQCORELIB)/tools/make-both-single-timing-files.py" +BEFORE ?= +AFTER ?= + +# OCaml binaries +CAMLC ?= "$(OCAMLFIND)" ocamlc -c +CAMLOPTC ?= "$(OCAMLFIND)" opt -c +CAMLLINK ?= "$(OCAMLFIND)" ocamlc -linkall +CAMLOPTLINK ?= "$(OCAMLFIND)" opt -linkall +CAMLDOC ?= "$(OCAMLFIND)" ocamldoc +CAMLDEP ?= "$(OCAMLFIND)" ocamldep -slash -ml-synonym .mlpack + +# DESTDIR is prepended to all installation paths +DESTDIR ?= + +# Debug builds, typically -g to OCaml, -debug to Coq. +CAMLDEBUG ?= +COQDEBUG ?= + +# Extra packages to be linked in (as in findlib -package) +CAMLPKGS ?= +FINDLIBPKGS = -package coq-core.plugins.ltac $(CAMLPKGS) + +# Option for making timing files +TIMING?= +# Option for changing sorting of timing output file +TIMING_SORT_BY ?= auto +# Option for changing the fuzz parameter on the output file +TIMING_FUZZ ?= 0 +# Option for changing whether to use real or user time for timing tables +TIMING_REAL?= +# Option for including the memory column(s) +TIMING_INCLUDE_MEM?= +# Option for sorting by the memory column +TIMING_SORT_BY_MEM?= +# Output file names for timed builds +TIME_OF_BUILD_FILE ?= time-of-build.log +TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log +TIME_OF_BUILD_AFTER_FILE ?= time-of-build-after.log +TIME_OF_PRETTY_BUILD_FILE ?= time-of-build-pretty.log +TIME_OF_PRETTY_BOTH_BUILD_FILE ?= time-of-build-both.log +TIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line + +TGTS ?= + +# Retro compatibility (DESTDIR is standard on Unix, DSTROOT is not) +ifdef DSTROOT +DESTDIR := $(DSTROOT) +endif + +# Substitution of the path by appending $(DESTDIR) if needed. +# The variable $(COQMF_WINDRIVE) can be needed for Cygwin environments. +windrive_path = $(if $(COQMF_WINDRIVE),$(subst $(COQMF_WINDRIVE),/,$(1)),$(1)) +destination_path = $(if $(DESTDIR),$(DESTDIR)/$(call windrive_path,$(1)),$(1)) + +# Installation paths of libraries and documentation. +COQLIBINSTALL ?= $(call destination_path,$(COQLIB)/user-contrib) +COQDOCINSTALL ?= $(call destination_path,$(DOCDIR)/coq/user-contrib) +COQPLUGININSTALL ?= $(call destination_path,$(COQCORELIB)/..) +COQTOPINSTALL ?= $(call destination_path,$(COQLIB)/toploop) # FIXME: Unused variable? + +# findlib files installation +FINDLIBPREINST= mkdir -p "$(COQPLUGININSTALL)/" +FINDLIBDESTDIR= -destdir "$(COQPLUGININSTALL)/" + +# we need to move out of sight $(METAFILE) otherwise findlib thinks the +# package is already installed +findlib_install = \ + $(HIDE)if [ "$(METAFILE)" ]; then \ + $(FINDLIBPREINST) && \ + mv "$(METAFILE)" "$(METAFILE).skip" ; \ + "$(OCAMLFIND)" install $(2) $(FINDLIBDESTDIR) $(FINDLIBPACKAGE) $(1); \ + rc=$$?; \ + mv "$(METAFILE).skip" "$(METAFILE)"; \ + exit $$rc; \ + fi +findlib_remove = \ + $(HIDE)if [ ! -z "$(METAFILE)" ]; then\ + "$(OCAMLFIND)" remove $(FINDLIBDESTDIR) $(FINDLIBPACKAGE); \ + fi + + +########## End of parameters ################################################## +# What follows may be relevant to you only if you need to +# extend this Makefile. If so, look for 'Extension point' here and +# put in Makefile.local double colon rules accordingly. +# E.g. to perform some work after the all target completes you can write +# +# post-all:: +# echo "All done!" +# +# in Makefile.local +# +############################################################################### + + + + +# Flags ####################################################################### +# +# We define a bunch of variables combining the parameters. +# To add additional flags to coq, coqchk or coqdoc, set the +# {COQ,COQCHK,COQDOC}EXTRAFLAGS variable to whatever you want to add. +# To overwrite the default choice and set your own flags entirely, set the +# {COQ,COQCHK,COQDOC}FLAGS variable. + +SHOW := $(if $(VERBOSE),@true "",@echo "") +HIDE := $(if $(VERBOSE),,@) + +TIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD)) + +OPT?= + +# The DYNOBJ and DYNLIB variables are used by "coqdep -dyndep var" in .v.d +ifeq '$(OPT)' '-byte' +USEBYTE:=true +DYNOBJ:=.cma +DYNLIB:=.cma +else +USEBYTE:= +DYNOBJ:=.cmxs +DYNLIB:=.cmxs +endif + +# these variables are meant to be overridden if you want to add *extra* flags +COQEXTRAFLAGS?= +COQCHKEXTRAFLAGS?= +COQDOCEXTRAFLAGS?= + +# Find the last argument of the form "-native-compiler FLAG" +COQUSERNATIVEFLAG:=$(strip \ +$(subst -native-compiler-,,\ +$(lastword \ +$(filter -native-compiler-%,\ +$(subst -native-compiler ,-native-compiler-,\ +$(strip $(COQEXTRAFLAGS))))))) + +COQFILTEREDEXTRAFLAGS:=$(strip \ +$(filter-out -native-compiler-%,\ +$(subst -native-compiler ,-native-compiler-,\ +$(strip $(COQEXTRAFLAGS))))) + +COQACTUALNATIVEFLAG:=$(lastword $(COQMF_COQ_NATIVE_COMPILER_DEFAULT) $(COQMF_COQPROJECTNATIVEFLAG) $(COQUSERNATIVEFLAG)) + +ifeq '$(COQACTUALNATIVEFLAG)' 'yes' + COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" + COQDONATIVE="yes" +else +ifeq '$(COQACTUALNATIVEFLAG)' 'ondemand' + COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "ondemand" + COQDONATIVE="no" +else + COQNATIVEFLAG="-w" "-deprecated-native-compiler-option" "-native-compiler" "no" + COQDONATIVE="no" +endif +endif + +# these flags do NOT contain the libraries, to make them easier to overwrite +COQFLAGS?=-q $(OTHERFLAGS) $(COQFILTEREDEXTRAFLAGS) $(COQNATIVEFLAG) +COQCHKFLAGS?=-silent -o $(COQCHKEXTRAFLAGS) +COQDOCFLAGS?=-interpolate -utf8 $(COQDOCEXTRAFLAGS) + +COQDOCLIBS?=$(COQLIBS_NOML) + +# The version of Coq being run and the version of coq_makefile that +# generated this makefile +COQ_VERSION:=$(shell $(COQC) --print-version | cut -d " " -f 1) +COQMAKEFILE_VERSION:=8.18.0 + +# COQ_SRC_SUBDIRS is for user-overriding, usually to add +# `user-contrib/Foo` to the includes, we keep COQCORE_SRC_SUBDIRS for +# Coq's own core libraries, which should be replaced by ocamlfind +# options at some point. +COQ_SRC_SUBDIRS?= +COQSRCLIBS?= $(foreach d,$(COQ_SRC_SUBDIRS), -I "$(COQLIB)/$(d)") + +CAMLFLAGS+=$(OCAMLLIBS) $(COQSRCLIBS) +# ocamldoc fails with unknown argument otherwise +CAMLDOCFLAGS:=$(filter-out -annot, $(filter-out -bin-annot, $(CAMLFLAGS))) +CAMLFLAGS+=$(OCAMLWARN) + +ifneq (,$(TIMING)) + ifeq (after,$(TIMING)) + TIMING_EXT=after-timing + else + ifeq (before,$(TIMING)) + TIMING_EXT=before-timing + else + TIMING_EXT=timing + endif + endif + TIMING_ARG=-time-file $<.$(TIMING_EXT) +else + TIMING_ARG= +endif + +# Files ####################################################################### +# +# We here define a bunch of variables about the files being part of the +# Coq project in order to ease the writing of build target and build rules + +VDFILE := .Makefile.d + +ALLSRCFILES := \ + $(MLGFILES) \ + $(MLFILES) \ + $(MLPACKFILES) \ + $(MLLIBFILES) \ + $(MLIFILES) + +# helpers +vo_to_obj = $(addsuffix .o,\ + $(filter-out Warning: Error:,\ + $(shell $(COQTOP) -q -noinit -batch -quiet -print-mod-uid $(1)))) +strip_dotslash = $(patsubst ./%,%,$(1)) + +# without this we get undefined variables in the expansion for the +# targets of the [deprecated,use-mllib-or-mlpack] rule +with_undef = $(if $(filter-out undefined, $(origin $(1))),$($(1))) + +VO = vo +VOS = vos + +VOFILES = $(VFILES:.v=.$(VO)) +GLOBFILES = $(VFILES:.v=.glob) +HTMLFILES = $(VFILES:.v=.html) +GHTMLFILES = $(VFILES:.v=.g.html) +BEAUTYFILES = $(addsuffix .beautified,$(VFILES)) +TEXFILES = $(VFILES:.v=.tex) +GTEXFILES = $(VFILES:.v=.g.tex) +CMOFILES = \ + $(MLGFILES:.mlg=.cmo) \ + $(MLFILES:.ml=.cmo) \ + $(MLPACKFILES:.mlpack=.cmo) +CMXFILES = $(CMOFILES:.cmo=.cmx) +OFILES = $(CMXFILES:.cmx=.o) +CMAFILES = $(MLLIBFILES:.mllib=.cma) $(MLPACKFILES:.mlpack=.cma) +CMXAFILES = $(CMAFILES:.cma=.cmxa) +CMIFILES = \ + $(CMOFILES:.cmo=.cmi) \ + $(MLIFILES:.mli=.cmi) +# the /if/ is because old _CoqProject did not list a .ml(pack|lib) but just +# a .mlg file +CMXSFILES = \ + $(MLPACKFILES:.mlpack=.cmxs) \ + $(CMXAFILES:.cmxa=.cmxs) \ + $(if $(MLPACKFILES)$(CMXAFILES),,\ + $(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs)) + +# files that are packed into a plugin (no extension) +PACKEDFILES = \ + $(call strip_dotslash, \ + $(foreach lib, \ + $(call strip_dotslash, \ + $(MLPACKFILES:.mlpack=_MLPACK_DEPENDENCIES)),$(call with_undef,$(lib)))) +# files that are archived into a .cma (mllib) +LIBEDFILES = \ + $(call strip_dotslash, \ + $(foreach lib, \ + $(call strip_dotslash, \ + $(MLLIBFILES:.mllib=_MLLIB_DEPENDENCIES)),$(call with_undef,$(lib)))) +CMIFILESTOINSTALL = $(filter-out $(addsuffix .cmi,$(PACKEDFILES)),$(CMIFILES)) +CMOFILESTOINSTALL = $(filter-out $(addsuffix .cmo,$(PACKEDFILES)),$(CMOFILES)) +OBJFILES = $(call vo_to_obj,$(VOFILES)) +ALLNATIVEFILES = \ + $(OBJFILES:.o=.cmi) \ + $(OBJFILES:.o=.cmx) \ + $(OBJFILES:.o=.cmxs) +FINDLIBPACKAGE=$(patsubst .%,%,$(suffix $(METAFILE))) + +# trick: wildcard filters out non-existing files, so that `install` doesn't show +# warnings and `clean` doesn't pass to rm a list of files that is too long for +# the shell. +NATIVEFILES = $(wildcard $(ALLNATIVEFILES)) +FILESTOINSTALL = \ + $(VOFILES) \ + $(VFILES) \ + $(GLOBFILES) \ + $(NATIVEFILES) \ + $(CMXSFILES) # to be removed when we remove legacy loading +FINDLIBFILESTOINSTALL = \ + $(CMIFILESTOINSTALL) +ifeq '$(HASNATDYNLINK)' 'true' +DO_NATDYNLINK = yes +FINDLIBFILESTOINSTALL += $(CMXSFILES) $(CMXAFILES) $(CMOFILESTOINSTALL:.cmo=.cmx) +else +DO_NATDYNLINK = +endif + +ALLDFILES = $(addsuffix .d,$(ALLSRCFILES)) $(VDFILE) + +# Compilation targets ######################################################### + +all: + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all +.PHONY: all + +all.timing.diff: + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all.timing.diff TIME_OF_PRETTY_BUILD_EXTRA_FILES="" + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all +.PHONY: all.timing.diff + +ifeq (0,$(TIMING_REAL)) +TIMING_REAL_ARG := +TIMING_USER_ARG := --user +else +ifeq (1,$(TIMING_REAL)) +TIMING_REAL_ARG := --real +TIMING_USER_ARG := +else +TIMING_REAL_ARG := +TIMING_USER_ARG := +endif +endif + +ifeq (0,$(TIMING_INCLUDE_MEM)) +TIMING_INCLUDE_MEM_ARG := --no-include-mem +else +TIMING_INCLUDE_MEM_ARG := +endif + +ifeq (1,$(TIMING_SORT_BY_MEM)) +TIMING_SORT_BY_MEM_ARG := --sort-by-mem +else +TIMING_SORT_BY_MEM_ARG := +endif + +make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) +make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) +make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: + $(HIDE)rm -f pretty-timed-success.ok + $(HIDE)($(MAKE) --no-print-directory -f "$(PARENT)" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) + $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed +print-pretty-timed:: + $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) +print-pretty-timed-diff:: + $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) +ifeq (,$(BEFORE)) +print-pretty-single-time-diff:: + @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' + $(HIDE)false +else +ifeq (,$(AFTER)) +print-pretty-single-time-diff:: + @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' + $(HIDE)false +else +print-pretty-single-time-diff:: + $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) +endif +endif +pretty-timed: + $(HIDE)$(MAKE) --no-print-directory -f "$(PARENT)" make-pretty-timed + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-timed +.PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff + +# Extension points for actions to be performed before/after the all target +pre-all:: + @# Extension point + $(HIDE)if [ "$(COQMAKEFILE_VERSION)" != "$(COQ_VERSION)" ]; then\ + echo "W: This Makefile was generated by Coq $(COQMAKEFILE_VERSION)";\ + echo "W: while the current Coq version is $(COQ_VERSION)";\ + fi +.PHONY: pre-all + +post-all:: + @# Extension point +.PHONY: post-all + +real-all: $(VOFILES) $(if $(USEBYTE),bytefiles,optfiles) +.PHONY: real-all + +real-all.timing.diff: $(VOFILES:.vo=.v.timing.diff) +.PHONY: real-all.timing.diff + +bytefiles: $(CMOFILES) $(CMAFILES) +.PHONY: bytefiles + +optfiles: $(if $(DO_NATDYNLINK),$(CMXSFILES)) +.PHONY: optfiles + +# FIXME, see Ralf's bugreport +# quick is deprecated, now renamed vio +vio: $(VOFILES:.vo=.vio) +.PHONY: vio +quick: vio + $(warning "'make quick' is deprecated, use 'make vio' or consider using 'vos' files") +.PHONY: quick + +vio2vo: + $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ + -schedule-vio2vo $(J) $(VOFILES:%.vo=%.vio) +.PHONY: vio2vo + +# quick2vo is undocumented +quick2vo: + $(HIDE)make -j $(J) vio + $(HIDE)VIOFILES=$$(for vofile in $(VOFILES); do \ + viofile="$$(echo "$$vofile" | sed "s/\.vo$$/.vio/")"; \ + if [ "$$vofile" -ot "$$viofile" -o ! -e "$$vofile" ]; then printf "$$viofile "; fi; \ + done); \ + echo "VIO2VO: $$VIOFILES"; \ + if [ -n "$$VIOFILES" ]; then \ + $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -schedule-vio2vo $(J) $$VIOFILES; \ + fi +.PHONY: quick2vo + +checkproofs: + $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \ + -schedule-vio-checking $(J) $(VOFILES:%.vo=%.vio) +.PHONY: checkproofs + +vos: $(VOFILES:%.vo=%.vos) +.PHONY: vos + +vok: $(VOFILES:%.vo=%.vok) +.PHONY: vok + +validate: $(VOFILES) + $(TIMER) $(COQCHK) $(COQCHKFLAGS) $(COQLIBS_NOML) $^ +.PHONY: validate + +only: $(TGTS) +.PHONY: only + +# Documentation targets ####################################################### + +html: $(GLOBFILES) $(VFILES) + $(SHOW)'COQDOC -d html $(GAL)' + $(HIDE)mkdir -p html + $(HIDE)$(COQDOC) \ + -toc $(COQDOCFLAGS) -html $(GAL) $(COQDOCLIBS) -d html $(VFILES) + +mlihtml: $(MLIFILES:.mli=.cmi) + $(SHOW)'CAMLDOC -d $@' + $(HIDE)mkdir $@ || rm -rf $@/* + $(HIDE)$(CAMLDOC) -html \ + -d $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS) + +all-mli.tex: $(MLIFILES:.mli=.cmi) + $(SHOW)'CAMLDOC -latex $@' + $(HIDE)$(CAMLDOC) -latex \ + -o $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS) + +all.ps: $(VFILES) + $(SHOW)'COQDOC -ps $(GAL)' + $(HIDE)$(COQDOC) \ + -toc $(COQDOCFLAGS) -ps $(GAL) $(COQDOCLIBS) \ + -o $@ `$(COQDEP) -sort $(VFILES)` + +all.pdf: $(VFILES) + $(SHOW)'COQDOC -pdf $(GAL)' + $(HIDE)$(COQDOC) \ + -toc $(COQDOCFLAGS) -pdf $(GAL) $(COQDOCLIBS) \ + -o $@ `$(COQDEP) -sort $(VFILES)` + +# FIXME: not quite right, since the output name is different +gallinahtml: GAL=-g +gallinahtml: html + +all-gal.ps: GAL=-g +all-gal.ps: all.ps + +all-gal.pdf: GAL=-g +all-gal.pdf: all.pdf + +# ? +beautify: $(BEAUTYFILES) + for file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done + @echo 'Do not do "make clean" until you are sure that everything went well!' + @echo 'If there were a problem, execute "for file in $$(find . -name \*.v.old -print); do mv $${file} $${file%.old}; done" in your shell/' +.PHONY: beautify + +# Installation targets ######################################################## +# +# There rules can be extended in Makefile.local +# Extensions can't assume when they run. + +# We use $(file) to avoid generating a very long command string to pass to the shell +# (cf https://coq.zulipchat.com/#narrow/stream/250632-Coq-Platform-devs-.26-users/topic/Strange.20command.20length.20limit.20on.20Linux) +# However Apple ships old make which doesn't have $(file) so we need a fallback +$(file >.hasfile,1) +HASFILE:=$(shell if [ -e .hasfile ]; then echo 1; rm .hasfile; fi) + +MKFILESTOINSTALL= $(if $(HASFILE),$(file >.filestoinstall,$(FILESTOINSTALL)),\ + $(shell rm -f .filestoinstall) \ + $(foreach x,$(FILESTOINSTALL),$(shell printf '%s\n' "$x" >> .filestoinstall))) + +# findlib needs the package to not be installed, so we remove it before +# installing it (see the call to findlib_remove) +install: META + @$(MKFILESTOINSTALL) + $(HIDE)code=0; for f in $$(cat .filestoinstall); do\ + if ! [ -f "$$f" ]; then >&2 echo $$f does not exist; code=1; fi \ + done; exit $$code + $(HIDE)for f in $$(cat .filestoinstall); do\ + df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\ + if [ "$$?" != "0" -o -z "$$df" ]; then\ + echo SKIP "$$f" since it has no logical path;\ + else\ + install -d "$(COQLIBINSTALL)/$$df" &&\ + install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\ + echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\ + fi;\ + done + $(call findlib_remove) + $(call findlib_install, META $(FINDLIBFILESTOINSTALL)) + $(HIDE)$(MAKE) install-extra -f "$(SELF)" + @rm -f .filestoinstall +install-extra:: + @# Extension point +.PHONY: install install-extra + +META: $(METAFILE) + $(HIDE)if [ "$(METAFILE)" ]; then \ + cat "$(METAFILE)" | grep -v 'directory.*=.*' > META; \ + fi + +install-byte: + $(call findlib_install, $(CMAFILES) $(CMOFILESTOINSTALL), -add) + +install-doc:: html mlihtml + @# Extension point + $(HIDE)install -d "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" + $(HIDE)for i in html/*; do \ + dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ + install -m 0644 "$$i" "$$dest";\ + echo INSTALL "$$i" "$$dest";\ + done + $(HIDE)install -d \ + "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" + $(HIDE)for i in mlihtml/*; do \ + dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\ + install -m 0644 "$$i" "$$dest";\ + echo INSTALL "$$i" "$$dest";\ + done +.PHONY: install-doc + +uninstall:: + @# Extension point + @$(MKFILESTOINSTALL) + $(call findlib_remove) + $(HIDE)for f in $$(cat .filestoinstall); do \ + df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\ + instf="$(COQLIBINSTALL)/$$df/`basename $$f`" &&\ + rm -f "$$instf" &&\ + echo RM "$$instf" ;\ + done + $(HIDE)for f in $$(cat .filestoinstall); do \ + df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\ + echo RMDIR "$(COQLIBINSTALL)/$$df/" &&\ + (rmdir "$(COQLIBINSTALL)/$$df/" 2>/dev/null || true); \ + done + @rm -f .filestoinstall + +.PHONY: uninstall + +uninstall-doc:: + @# Extension point + $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html' + $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html" + $(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml' + $(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml" + $(HIDE) rmdir "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/" || true +.PHONY: uninstall-doc + +# Cleaning #################################################################### +# +# There rules can be extended in Makefile.local +# Extensions can't assume when they run. + +clean:: + @# Extension point + $(SHOW)'CLEAN' + $(HIDE)rm -f $(CMOFILES) + $(HIDE)rm -f $(CMIFILES) + $(HIDE)rm -f $(CMAFILES) + $(HIDE)rm -f $(CMXFILES) + $(HIDE)rm -f $(CMXAFILES) + $(HIDE)rm -f $(CMXSFILES) + $(HIDE)rm -f $(OFILES) + $(HIDE)rm -f $(CMXAFILES:.cmxa=.a) + $(HIDE)rm -f $(MLGFILES:.mlg=.ml) + $(HIDE)rm -f $(CMXFILES:.cmx=.cmt) + $(HIDE)rm -f $(MLIFILES:.mli=.cmti) + $(HIDE)rm -f $(ALLDFILES) + $(HIDE)rm -f $(NATIVEFILES) + $(HIDE)find . -name .coq-native -type d -empty -delete + $(HIDE)rm -f $(VOFILES) + $(HIDE)rm -f $(VOFILES:.vo=.vio) + $(HIDE)rm -f $(VOFILES:.vo=.vos) + $(HIDE)rm -f $(VOFILES:.vo=.vok) + $(HIDE)rm -f $(BEAUTYFILES) $(VFILES:=.old) + $(HIDE)rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob all-mli.tex + $(HIDE)rm -f $(VFILES:.v=.glob) + $(HIDE)rm -f $(VFILES:.v=.tex) + $(HIDE)rm -f $(VFILES:.v=.g.tex) + $(HIDE)rm -f pretty-timed-success.ok + $(HIDE)rm -f META + $(HIDE)rm -rf html mlihtml +.PHONY: clean + +cleanall:: clean + @# Extension point + $(SHOW)'CLEAN *.aux *.timing' + $(HIDE)rm -f $(foreach f,$(VFILES:.v=),$(dir $(f)).$(notdir $(f)).aux) + $(HIDE)rm -f $(TIME_OF_BUILD_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) + $(HIDE)rm -f $(VOFILES:.vo=.v.timing) + $(HIDE)rm -f $(VOFILES:.vo=.v.before-timing) + $(HIDE)rm -f $(VOFILES:.vo=.v.after-timing) + $(HIDE)rm -f $(VOFILES:.vo=.v.timing.diff) + $(HIDE)rm -f .lia.cache .nia.cache +.PHONY: cleanall + +archclean:: + @# Extension point + $(SHOW)'CLEAN *.cmx *.o' + $(HIDE)rm -f $(NATIVEFILES) + $(HIDE)rm -f $(CMOFILES:%.cmo=%.cmx) +.PHONY: archclean + + +# Compilation rules ########################################################### + +$(MLIFILES:.mli=.cmi): %.cmi: %.mli + $(SHOW)'CAMLC -c $<' + $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $< + +$(MLGFILES:.mlg=.ml): %.ml: %.mlg + $(SHOW)'COQPP $<' + $(HIDE)$(COQPP) $< + +# Stupid hack around a deficient syntax: we cannot concatenate two expansions +$(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml + $(SHOW)'CAMLC -c $<' + $(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $< + +# Same hack +$(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml + $(SHOW)'CAMLOPT -c $(FOR_PACK) $<' + $(HIDE)$(TIMER) $(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $(FOR_PACK) $< + + +$(MLLIBFILES:.mllib=.cmxs): %.cmxs: %.cmxa + $(SHOW)'CAMLOPT -shared -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ + -shared -o $@ $< + +$(MLLIBFILES:.mllib=.cma): %.cma: | %.mllib + $(SHOW)'CAMLC -a -o $@' + $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ + +$(MLLIBFILES:.mllib=.cmxa): %.cmxa: | %.mllib + $(SHOW)'CAMLOPT -a -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ + + +$(MLPACKFILES:.mlpack=.cmxs): %.cmxs: %.cmxa + $(SHOW)'CAMLOPT -shared -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ + -shared -o $@ $< + +$(MLPACKFILES:.mlpack=.cmxa): %.cmxa: %.cmx | %.mlpack + $(SHOW)'CAMLOPT -a -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $< + +$(MLPACKFILES:.mlpack=.cma): %.cma: %.cmo | %.mlpack + $(SHOW)'CAMLC -a -o $@' + $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^ + +$(MLPACKFILES:.mlpack=.cmo): %.cmo: | %.mlpack + $(SHOW)'CAMLC -pack -o $@' + $(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^ + +$(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack + $(SHOW)'CAMLOPT -pack -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^ + +# This rule is for _CoqProject with no .mllib nor .mlpack +$(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx + $(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@' + $(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \ + -shared -o $@ $< + +# can't make +# https://www.gnu.org/software/make/manual/make.html#Static-Pattern +# work with multiple target rules +# so use eval in a loop instead +# with grouped targets https://www.gnu.org/software/make/manual/make.html#Multiple-Targets +# if available (GNU Make >= 4.3) +ifneq (,$(filter grouped-target,$(.FEATURES))) +define globvorule= + +# take care to $$ variables using $< etc + $(1).vo $(1).glob &: $(1).v | $(VDFILE) + $(SHOW)COQC $(1).v + $(HIDE)$$(TIMER) $(COQC) $(COQDEBUG) $$(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $(1).v +ifeq ($(COQDONATIVE), "yes") + $(SHOW)COQNATIVE $(1).vo + $(HIDE)$(call TIMER,$(1).vo.native) $(COQNATIVE) $(COQLIBS) $(1).vo +endif + +endef +else + +$(VOFILES): %.vo: %.v | $(VDFILE) + $(SHOW)COQC $< + $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $< +ifeq ($(COQDONATIVE), "yes") + $(SHOW)COQNATIVE $@ + $(HIDE)$(call TIMER,$@.native) $(COQNATIVE) $(COQLIBS) $@ +endif + +# this is broken :( todo fix if we ever find a solution that doesn't need grouped targets +$(GLOBFILES): %.glob: %.v + $(SHOW)'COQC $< (for .glob)' + $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< + +endif + +$(foreach vfile,$(VFILES:.v=),$(eval $(call globvorule,$(vfile)))) + +$(VFILES:.v=.vio): %.vio: %.v + $(SHOW)COQC -vio $< + $(HIDE)$(TIMER) $(COQC) -vio $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< + +$(VFILES:.v=.vos): %.vos: %.v + $(SHOW)COQC -vos $< + $(HIDE)$(TIMER) $(COQC) -vos $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< + +$(VFILES:.v=.vok): %.vok: %.v + $(SHOW)COQC -vok $< + $(HIDE)$(TIMER) $(COQC) -vok $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< + +$(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing + $(SHOW)PYTHON TIMING-DIFF $*.{before,after}-timing + $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" + +$(BEAUTYFILES): %.v.beautified: %.v + $(SHOW)'BEAUTIFY $<' + $(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -beautify $< + +$(TEXFILES): %.tex: %.v + $(SHOW)'COQDOC -latex $<' + $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@ + +$(GTEXFILES): %.g.tex: %.v + $(SHOW)'COQDOC -latex -g $<' + $(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@ + +$(HTMLFILES): %.html: %.v %.glob + $(SHOW)'COQDOC -html $<' + $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html $< -o $@ + +$(GHTMLFILES): %.g.html: %.v %.glob + $(SHOW)'COQDOC -html -g $<' + $(HIDE)$(COQDOC) $(COQDOCFLAGS) -html -g $< -o $@ + +# Dependency files ############################################################ + +ifndef MAKECMDGOALS + -include $(ALLDFILES) +else + ifneq ($(filter-out archclean clean cleanall printenv make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff,$(MAKECMDGOALS)),) + -include $(ALLDFILES) + endif +endif + +.SECONDARY: $(ALLDFILES) + +redir_if_ok = > "$@" || ( RV=$$?; rm -f "$@"; exit $$RV ) + +GENMLFILES:=$(MLGFILES:.mlg=.ml) +$(addsuffix .d,$(ALLSRCFILES)): $(GENMLFILES) + +$(addsuffix .d,$(MLIFILES)): %.mli.d: %.mli + $(SHOW)'CAMLDEP $<' + $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) + +$(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml + $(SHOW)'CAMLDEP $<' + $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) + +$(addsuffix .d,$(MLFILES)): %.ml.d: %.ml + $(SHOW)'CAMLDEP $<' + $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok) + +$(addsuffix .d,$(MLLIBFILES)): %.mllib.d: %.mllib + $(SHOW)'OCAMLLIBDEP $<' + $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) + +$(addsuffix .d,$(MLPACKFILES)): %.mlpack.d: %.mlpack + $(SHOW)'OCAMLLIBDEP $<' + $(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) "$<" $(redir_if_ok) + +# If this makefile is created using a _CoqProject we have coqdep get +# options from it. This avoids argument length limits for pathological +# projects. Note that extra options might be on the command line. +VDFILE_FLAGS:=$(if _CoqProject,-f _CoqProject,) $(CMDLINE_COQLIBS) $(CMDLINE_VFILES) + +$(VDFILE): _CoqProject $(VFILES) + $(SHOW)'COQDEP VFILES' + $(HIDE)$(COQDEP) $(if $(strip $(METAFILE)),-m "$(METAFILE)") -vos -dyndep var $(VDFILE_FLAGS) $(redir_if_ok) + +# Misc ######################################################################## + +byte: + $(HIDE)$(MAKE) all "OPT:=-byte" -f "$(SELF)" +.PHONY: byte + +opt: + $(HIDE)$(MAKE) all "OPT:=-opt" -f "$(SELF)" +.PHONY: opt + +# This is deprecated. To extend this makefile use +# extension points and Makefile.local +printenv:: + $(warning printenv is deprecated) + $(warning write extensions in Makefile.local or include Makefile.conf) + @echo 'COQLIB = $(COQLIB)' + @echo 'COQCORELIB = $(COQCORELIB)' + @echo 'DOCDIR = $(DOCDIR)' + @echo 'OCAMLFIND = $(OCAMLFIND)' + @echo 'HASNATDYNLINK = $(HASNATDYNLINK)' + @echo 'SRC_SUBDIRS = $(SRC_SUBDIRS)' + @echo 'COQ_SRC_SUBDIRS = $(COQ_SRC_SUBDIRS)' + @echo 'COQCORE_SRC_SUBDIRS = $(COQCORE_SRC_SUBDIRS)' + @echo 'OCAMLFIND = $(OCAMLFIND)' + @echo 'PP = $(PP)' + @echo 'COQFLAGS = $(COQFLAGS)' + @echo 'COQLIB = $(COQLIBS)' + @echo 'COQLIBINSTALL = $(COQLIBINSTALL)' + @echo 'COQDOCINSTALL = $(COQDOCINSTALL)' +.PHONY: printenv + +# Generate a .merlin file. If you need to append directives to this +# file you can extend the merlin-hook target in Makefile.local +.merlin: + $(SHOW)'FILL .merlin' + $(HIDE)echo 'FLG $(COQMF_CAMLFLAGS)' > .merlin + $(HIDE)echo 'B $(COQCORELIB)' >> .merlin + $(HIDE)echo 'S $(COQCORELIB)' >> .merlin + $(HIDE)$(foreach d,$(COQCORE_SRC_SUBDIRS), \ + echo 'B $(COQCORELIB)$(d)' >> .merlin;) + $(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \ + echo 'S $(COQLIB)$(d)' >> .merlin;) + $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'B $(d)' >> .merlin;) + $(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'S $(d)' >> .merlin;) + $(HIDE)$(MAKE) merlin-hook -f "$(SELF)" +.PHONY: merlin + +merlin-hook:: + @# Extension point +.PHONY: merlin-hook + +# prints all variables +debug: + $(foreach v,\ + $(sort $(filter-out $(INITIAL_VARS) INITIAL_VARS,\ + $(.VARIABLES))),\ + $(info $(v) = $($(v)))) +.PHONY: debug + +.DEFAULT_GOAL := all + +# Users can create Makefile.local-late to hook into double-colon rules +# or add other needed Makefile code, using defined +# variables if necessary. +-include Makefile.local-late + +# Local Variables: +# mode: makefile-gmake +# End: diff --git a/examples/coq-example/proofs/coq/extraction/dummy_core_lib.v b/examples/coq-example/proofs/coq/extraction/dummy_core_lib.v new file mode 100644 index 000000000..e95813f30 --- /dev/null +++ b/examples/coq-example/proofs/coq/extraction/dummy_core_lib.v @@ -0,0 +1,34 @@ +From Coq Require Import ZArith. +Require Import List. +Import List.ListNotations. +Open Scope Z_scope. + +(* LIBRARY CODE *) +Definition t_isize := Z. +Notation "'t_Vec' T '((t_Global))'" := (list T). +Definition impl_1__push {A} (l : list A) (a : A) : list A := cons a l. +Definition impl_1__pop {A} (l : list A) : list A * option A := + match l with + | [] => ([], None) + | (x :: xs) => (xs, Some x) + end. +Definition impl__unwrap {A} (x : option A) `{H : x <> None} : A := + match x as k return k <> None -> _ with + | None => fun H => False_rect _ (H eq_refl) + | Some a => fun _ => a + end H. +Definition t_Add_f_add := (fun x y => x + y). +Definition t_Mul_f_mul := (fun x y => x * y). +Definition t_PartialEq_f_eq := (fun x y => x =? y). +Definition impl__isize__rem_euclid := fun x y => x mod y. +Definition cast := fun (x : Z) => x. +Definition ne := fun x y => negb (x =? y). +Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). +Definition t_PartialOrd_f_lt := fun x y => x x - y. +Definition impl__new {A} (tt : unit) : list A := []. +Definition f_fold {A B} (l : list A) (i : B) (f : B -> A -> B) : B := List.fold_left f l i. +Definition f_into_iter {A} := @id A. +(* /LIBRARY CODE *) diff --git a/examples/coq-example/src/dummy_core_lib.rs b/examples/coq-example/src/dummy_core_lib.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/examples/coq-example/src/dummy_core_lib.rs @@ -0,0 +1 @@ + diff --git a/examples/coq-example/src/lib.rs b/examples/coq-example/src/lib.rs new file mode 100644 index 000000000..1b1fdaa41 --- /dev/null +++ b/examples/coq-example/src/lib.rs @@ -0,0 +1,77 @@ +mod dummy_core_lib; +use dummy_core_lib::*; + +enum Instruction { + Push(isize), + Pop, + Add, + Sub, + Mul, + Not, + Dup, +} + +impl Instruction { + pub fn interpret(self, stack: &mut Vec) { + match self { + Instruction::Push(v) => stack.push(v), + Instruction::Pop => { + stack.pop(); + } + Instruction::Add => match (stack.pop(), stack.pop()) { + (Some(a), Some(b)) => stack.push(b + a), + _ => (), + }, + Instruction::Sub => match (stack.pop(), stack.pop()) { + (Some(a), Some(b)) => stack.push(b - a), + _ => (), + }, + Instruction::Mul => match (stack.pop(), stack.pop()) { + (Some(a), Some(b)) => stack.push(b * a), + _ => (), + }, + Instruction::Not => match stack.pop() { + Some(a) => stack.push(if a == 0 { 1 } else { 0 }), + _ => (), + }, + Instruction::Dup => match stack.pop() { + Some(a) => { + stack.push(a); + stack.push(a); + } + _ => (), + }, + } + } +} + +fn example() -> Vec { + let mut stk = Vec::new(); + for cmd in [ + Instruction::Push(1), + Instruction::Push(1), + Instruction::Add, + Instruction::Push(1), + Instruction::Push(1), + Instruction::Push(1), + Instruction::Add, + Instruction::Add, + Instruction::Dup, + Instruction::Mul, + Instruction::Sub, + ] { + cmd.interpret(&mut stk) + } + stk +} +// Push 1: 1 +// Push 1: 1, 1 +// Add: 2 +// Push 1: 2, 1 +// Push 1: 2, 1, 1 +// Push 1: 2, 1, 1, 1 +// Add: 2, 1, 2 +// Add: 2, 3 +// Dup: 2, 3, 3 +// Mul: 2, 9 +// Sub: -7 diff --git a/examples/default.nix b/examples/default.nix index 74aba3dcd..bd602e515 100644 --- a/examples/default.nix +++ b/examples/default.nix @@ -7,6 +7,7 @@ hacl-star, hax-env, jq, + proverif, }: let matches = re: path: !builtins.isNull (builtins.match re path); commonArgs = { @@ -15,8 +16,8 @@ src = craneLib.path ./..; filter = path: type: # We include only certain files. FStar files under the example - # directory are listed out. - ( matches ".*(Makefile|.*[.](rs|toml|lock|diff|fsti?))$" path + # directory are listed out. Same for proverif (*.pvl) files. + ( matches ".*(Makefile|.*[.](rs|toml|lock|diff|fsti?|pv))$" path && !matches ".*examples/.*[.]fsti?$" path ) || ("directory" == type); }; @@ -46,5 +47,10 @@ in sed -i "s/make -C limited-order-book/HAX_VANILLA_RUSTC=never make -C limited-order-book/g" Makefile make ''; - buildInputs = [hax hax-env fstar jq]; + buildInputs = [ + hax hax-env fstar jq + (proverif.overrideDerivation (_: { + patches = [ ./proverif-psk/pv_div_by_zero_fix.diff ]; + })) + ]; }) diff --git a/examples/proverif-psk/Cargo.toml b/examples/proverif-psk/Cargo.toml new file mode 100644 index 000000000..03103d4b0 --- /dev/null +++ b/examples/proverif-psk/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "proverif-psk" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +hax-lib.workspace = true +libcrux = "=0.0.2-pre.2" + +[dev-dependencies] +rand = { version = "0.8" } diff --git a/examples/proverif-psk/Makefile b/examples/proverif-psk/Makefile new file mode 100644 index 000000000..8e337d064 --- /dev/null +++ b/examples/proverif-psk/Makefile @@ -0,0 +1,8 @@ +check: ./proofs/proverif/extraction/lib.pvl + timeout 30 proverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/extraction/analysis.pv + +proofs/proverif/extraction/lib.pvl: + cargo hax into pro-verif + +clean: + rm -f proofs/proverif/extraction/lib.pvl diff --git a/examples/proverif-psk/Readme.md b/examples/proverif-psk/Readme.md new file mode 100644 index 000000000..ebfba4f08 --- /dev/null +++ b/examples/proverif-psk/Readme.md @@ -0,0 +1,77 @@ +# A hax ProVerif example + +This crate demonstrates an example of ProVerif extraction using hax. + +The crate provides functions for implementing a simplistic pre-shared-key (PSK) based protocol +between an initiator and receiver, which is defined as follows: +``` +Initiator(psk: AEADKey): + let response_key = AEAD.KeyGen() + let message = AEAD.Encrypt(psk, response_key) + +Initiator -> Responder: message + +Responder(psk: AEADKey, payload: &[u8]): + let response_key = AEAD.Decrypt(psk, message) + let response = AEAD.Encrypt(response_key, payload) + +Responder -> Initiator: response + +Initiator(response_key, response): + let output = AEAD.Decrypt(response_key, response) + return output +``` + +The crate does not implement message transport, only the initiator and +responder protocol logic. + +A handwritten ProVerif model of this protocol is included in `psk.pv` for comparison. + +### On the use of `proverif::replace()` +Since ProVerif operates in a symbolic world, certain operations have +to be represented abstractly, in in symbolic terms. In this case, we +give symbolic replacements for serialization and deserialization, as +well as cryptographic operations such as encryption and +decryption. They are thus treated as ideal implementations of their +respective functionality in ProVerif's analysis of the protocol. To +obtain assurance that these operations are correct and implemented +securely, one of hax' other backends can be used. + + +## Extracting into ProVerif +To obtain a ProVerif model of the protocol logic functions, run +``` +cargo hax into pro-verif +``` +This will generate a file `./proofs/proverif/extraction/lib.pvl`. + +## Running a Basic Analysis on the Model +We have provided a handwritten file +`./proofs/proverif/extraction/analysis.pv`, which models the protocol +using the extracted functions in `lib.pvl` and uses ProVerif to verify + +- that initiator and receiver can both complete the protocol, as well as +- confidentiality of the pre-shared key and the protocol payload + +To let ProVerif perform the analysis, from the crate root, run: + +``` +proverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/extraction/analysis.pv +``` + +The expected final output is +``` +-------------------------------------------------------------- +Verification summary: + +Query not event(InitiatorFinished(initiator_result)) is false. + +Query not event(ResponderFinished(responder_result)) is false. + +Query not attacker(PSK[]) is true. + +Query not attacker(SECRET_PAYLOAD[]) is true. + +-------------------------------------------------------------- +``` + diff --git a/examples/proverif-psk/proofs/proverif/extraction/analysis.pv b/examples/proverif-psk/proofs/proverif/extraction/analysis.pv new file mode 100644 index 000000000..96b4d3ccd --- /dev/null +++ b/examples/proverif-psk/proofs/proverif/extraction/analysis.pv @@ -0,0 +1,38 @@ +(*****************************************) +(* Top-level processes *) +(*****************************************) + +event InitiatorFinished(bitstring). +event ResponderFinished(bitstring). + +free PSK: proverif_psk__t_KeyIv [private]. +free SECRET_PAYLOAD: bitstring [private]. + +query initiator_result: bitstring; event(InitiatorFinished(initiator_result)). +query responder_result: bitstring; event(ResponderFinished(responder_result)). + +query attacker(PSK). +query attacker(SECRET_PAYLOAD). + +let Initiator(psk: proverif_psk__t_KeyIv) = + new ikm: bitstring; + let (initiator_message: proverif_psk__t_Message, response_key: proverif_psk__t_KeyIv) = proverif_psk__initiate(ikm, psk) in + out(c, initiator_message); + in(c, response_message: proverif_psk__t_Message); + let response = proverif_psk__finish(response_message, response_key) in + event InitiatorFinished(response). + +let Responder(psk: proverif_psk__t_KeyIv, payload: bitstring) = + in(c, initiator_message: proverif_psk__t_Message); + let response_message = proverif_psk__respond( + psk, + payload, + initiator_message + ) in + event ResponderFinished(payload); + out(c, response_message). + +process + Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD) + + diff --git a/examples/proverif-psk/psk.pv b/examples/proverif-psk/psk.pv new file mode 100644 index 000000000..c93327114 --- /dev/null +++ b/examples/proverif-psk/psk.pv @@ -0,0 +1,39 @@ +free c: channel. + +type key. + +fun senc(bitstring, key): bitstring. +reduc forall m: bitstring, k: key; sdec(senc(m,k), k) = m. + +fun key_to_bitstring(key): bitstring. +reduc forall k: key; bitstring_to_key(key_to_bitstring(k)) = k. + +event InitiatorFinished(bitstring). +event ResponderFinished(bitstring). + +free PSK: key [private]. +free SECRET_PAYLOAD: bitstring [private]. + +query initiator_result: bitstring; event(InitiatorFinished(initiator_result)). +query responder_result: bitstring; event(ResponderFinished(responder_result)). + +query attacker(PSK). +query attacker(SECRET_PAYLOAD). + +let Initiator(psk: key) = + new response_key: key; + let initiator_message = senc(key_to_bitstring(response_key), psk) in + out(c, initiator_message); + in(c, response_message: bitstring); + let response = sdec(response_message, response_key) in + event InitiatorFinished(response). + +let Responder(psk: key, payload: bitstring) = + in(c, initiator_message: bitstring); + let response_key = sdec(initiator_message, psk) in + let response_message = senc(payload, bitstring_to_key(response_key)) in + event ResponderFinished(payload); + out(c, response_message). + +process + Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD) diff --git a/examples/proverif-psk/pv_div_by_zero_fix.diff b/examples/proverif-psk/pv_div_by_zero_fix.diff new file mode 100644 index 000000000..fec2cf726 --- /dev/null +++ b/examples/proverif-psk/pv_div_by_zero_fix.diff @@ -0,0 +1,13 @@ +diff proverif2.05/src/display.ml proverif2.05/src/display.ml +index c43785ec..2763d907 100644 +--- proverif/src/display.ml ++++ proverif/src/display.ml +@@ -49,7 +49,7 @@ let dynamic_display str = + then display_whitespace (!record_cursor_line - size); + (* If we cannot determine the number of columns, we just assume that the statistics + will fit on one line (the statistics will not be active by default) *) +- let lines = if columns = -1 then 0 else ((max (!record_cursor_line) size) - 1) / columns in ++ let lines = if columns <= 0 then 0 else ((max (!record_cursor_line) size) - 1) / columns in + (* Go to the beginning of the line *) + print_string "\r"; + if lines > 0 then diff --git a/examples/proverif-psk/src/lib.rs b/examples/proverif-psk/src/lib.rs new file mode 100644 index 000000000..7ce191a69 --- /dev/null +++ b/examples/proverif-psk/src/lib.rs @@ -0,0 +1,153 @@ +use hax_lib as hax; +use libcrux::aead::{self, Algorithm}; + +const AEAD_KEY_NONCE: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305) + + Algorithm::nonce_size(Algorithm::Chacha20Poly1305); + +const AEAD_KEY_LENGTH: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305); + +const EMPTY_AAD: &[u8; 0] = b""; +const RESPONSE_KEY_CONTEXT: &[u8; 12] = b"response-key"; + +/* Type definitions */ +#[derive(Debug)] +pub enum Error { + CryptoError, + OtherError, +} + +impl From for Error { + fn from(_value: libcrux::aead::Error) -> Error { + Error::CryptoError + } +} + +impl From for Error { + fn from(_value: libcrux::hkdf::Error) -> Error { + Error::CryptoError + } +} + +impl From for Error { + fn from(_value: std::array::TryFromSliceError) -> Error { + Error::OtherError + } +} + +#[hax::opaque_type] +pub struct Message(aead::Tag, Vec); + +#[hax::opaque_type] +pub struct KeyIv(libcrux::aead::Key, libcrux::aead::Iv); + +/* Wire formats */ +#[hax::pv_constructor] +fn serialize_key_iv(key_iv: &KeyIv) -> Vec { + let mut result = Vec::new(); + result.extend_from_slice(key_iv.1 .0.as_ref()); + match &key_iv.0 { + aead::Key::Chacha20Poly1305(k) => result.extend_from_slice(k.0.as_ref()), + _ => unimplemented!(), + } + result +} + +#[hax::proverif::replace( + "reduc forall k: $:{KeyIv}; ${deserialize_key_iv}(${serialize_key_iv}(k)) = k." +)] +fn deserialize_key_iv(bytes: &[u8]) -> Result { + let iv = aead::Iv::new(&bytes[..12])?; + let key = aead::Key::from_slice(Algorithm::Chacha20Poly1305, &bytes[12..])?; + Ok(KeyIv(key, iv)) +} + +/* Cryptographic functions */ +#[hax::pv_constructor] +fn derive_key_iv(ikm: &[u8], info: &[u8]) -> Result { + let key_iv_bytes = + libcrux::hkdf::expand(libcrux::hkdf::Algorithm::Sha256, ikm, info, AEAD_KEY_NONCE)?; + + let (key_bytes, iv_bytes) = key_iv_bytes.split_at(AEAD_KEY_LENGTH); + let key = + libcrux::aead::Key::from_slice(libcrux::aead::Algorithm::Chacha20Poly1305, key_bytes)?; + + let iv = libcrux::aead::Iv(iv_bytes.try_into()?); + Ok(KeyIv(key, iv)) +} + +#[hax::proverif::replace("fun ${encrypt} ($:{KeyIv}, bitstring): $:{Message}.")] +pub fn encrypt(key_iv: &KeyIv, message: &[u8]) -> Result { + let (tag, ctxt) = + libcrux::aead::encrypt_detached(&key_iv.0, message, aead::Iv(key_iv.1 .0), EMPTY_AAD)?; + Ok(Message(tag, ctxt)) +} + +#[hax::proverif::replace( + "reduc forall m: bitstring, k: $:{KeyIv}; ${decrypt}(k, ${encrypt}(k, m)) = m." +)] +fn decrypt(key_iv: &KeyIv, message: Message) -> Result, Error> { + libcrux::aead::decrypt_detached( + &key_iv.0, + message.1, + aead::Iv(key_iv.1 .0), + EMPTY_AAD, + &message.0, + ) + .map_err(|_| Error::CryptoError) +} + +/* Protocol */ +pub fn initiate(ikm: &[u8], psk: &KeyIv) -> Result<(Message, KeyIv), Error> { + let response_key_iv = derive_key_iv(ikm, RESPONSE_KEY_CONTEXT)?; + + let serialized_responder_key = serialize_key_iv(&response_key_iv); + + let initiator_message = encrypt(psk, &serialized_responder_key)?; + + Ok((initiator_message, response_key_iv)) +} + +pub fn respond(psk: &KeyIv, payload: &[u8], message: Message) -> Result { + let response_key_bytes = decrypt(psk, message)?; + + let response_key_iv = deserialize_key_iv(&response_key_bytes)?; + + let responder_message = encrypt(&response_key_iv, payload)?; + + Ok(responder_message) +} + +pub fn finish(message: Message, response_key_iv: &KeyIv) -> Result, Error> { + let response_bytes = decrypt(response_key_iv, message)?; + + Ok(response_bytes) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + use rand::{rngs::OsRng, RngCore}; + + fn random_array() -> [u8; L] { + let mut rng = OsRng; + let mut seed = [0; L]; + rng.try_fill_bytes(&mut seed).unwrap(); + seed + } + let payload = b"SECRET"; + let ikm_psk = random_array::<32>(); + let ikm_responder_key = random_array::<32>(); + + let psk = derive_key_iv(&ikm_psk, b"pre-shared-key") + .map_err(|_| Error::CryptoError) + .unwrap(); + + let (initiator_message, response_key) = initiate(&ikm_responder_key, &psk).unwrap(); + let responder_message = respond(&psk, payload, initiator_message).unwrap(); + let initiator_finish = finish(responder_message, &response_key).unwrap(); + assert_eq!(payload.to_vec(), initiator_finish); + } +} diff --git a/flake.lock b/flake.lock index ba2824fa7..13161cb34 100644 --- a/flake.lock +++ b/flake.lock @@ -100,9 +100,26 @@ "fstar": "fstar", "hacl-star": "hacl-star", "nixpkgs": "nixpkgs", + "rust-by-examples": "rust-by-examples", "rust-overlay": "rust-overlay" } }, + "rust-by-examples": { + "flake": false, + "locked": { + "lastModified": 1729958822, + "narHash": "sha256-/X1dI2MPYSfOdqOOxpNFCykoJtVetQCOo7WfBa7XAyU=", + "owner": "rust-lang", + "repo": "rust-by-example", + "rev": "4e3881e0cb1b690158acca3c16e271fdadf736da", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "repo": "rust-by-example", + "type": "github" + } + }, "rust-overlay": { "inputs": { "nixpkgs": [ @@ -110,11 +127,11 @@ ] }, "locked": { - "lastModified": 1723429325, - "narHash": "sha256-4x/32xTCd+xCwFoI/kKSiCr5LQA2ZlyTRYXKEni5HR8=", + "lastModified": 1729736953, + "narHash": "sha256-Rb6JUop7NRklg0uzcre+A+Ebrn/ZiQPkm4QdKg6/3pw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "65e3dc0fe079fe8df087cd38f1fe6836a0373aad", + "rev": "29b1275740d9283467b8117499ec8cbb35250584", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 4fd03ee02..736de0705 100644 --- a/flake.nix +++ b/flake.nix @@ -23,6 +23,10 @@ url = "github:hacl-star/hacl-star"; flake = false; }; + rust-by-examples = { + url = "github:rust-lang/rust-by-example"; + flake = false; + }; }; outputs = { @@ -56,6 +60,7 @@ cat "${hax-env-file}" | xargs -I{} echo "export {}" fi ''; + ocamlPackages = pkgs.ocamlPackages; in rec { packages = { inherit rustc ocamlformat rustfmt fstar hax-env; @@ -73,7 +78,7 @@ #!${pkgs.stdenv.shell} ${packages.hax-rust-frontend.hax-engine-names-extract}/bin/hax-engine-names-extract | sed 's|/nix/store/\(.\{6\}\)|/nix_store/\1-|g' ''; - inherit rustc; + inherit rustc ocamlPackages; }; hax-rust-frontend = pkgs.callPackage ./cli { inherit rustc craneLib; @@ -86,6 +91,18 @@ check-examples = checks.examples; check-readme-coherency = checks.readme-coherency; + rust-by-example-hax-extraction = pkgs.stdenv.mkDerivation { + name = "rust-by-example-hax-extraction"; + phases = ["installPhase"]; + buildInputs = [packages.hax pkgs.cargo]; + installPhase = '' + cp --no-preserve=mode -rf ${inputs.rust-by-examples} workdir + cd workdir + ${pkgs.nodejs}/bin/node ${./.utils/rust-by-example.js} + mv rust-by-examples-crate/proofs $out + ''; + }; + # The commit that corresponds to our nightly pin, helpful when updating rusrc. toolchain_commit = pkgs.runCommand "hax-toolchain-commit" { } '' # This is sad but I don't know a better way. @@ -131,24 +148,6 @@ ${pkgs.python3}/bin/python -m http.server "$@" ''}"; }; - # Check the coherency between issues labeled - # `marked-unimplemented` on GitHub and issues mentionned in - # the engine in the `Unimplemented {issue_id: ...}` errors. - check-unimlemented-issue-coherency = { - type = "app"; - program = "${pkgs.writeScript "check-unimlemented-issue-coherency" '' - RG=${pkgs.ripgrep}/bin/rg - SD=${pkgs.sd}/bin/sd - - diff -U0 \ - <(${pkgs.gh}/bin/gh issue -R hacspec/hax list --label 'marked-unimplemented' --json number,closed -L 200 \ - | ${pkgs.jq}/bin/jq '.[] | select(.closed | not) | .number' | sort -u) \ - <($RG 'issue_id:(\d+)' -Ior '$1' | sort -u) \ - | $RG '^[+-]\d' \ - | $SD '[-](\d+)' '#$1\t is labeled `marked-unimplemented`, but was not found in the code' \ - | $SD '[+](\d+)' '#$1\t is *not* labeled `marked-unimplemented` or is closed' - ''}"; - }; serve-book = { type = "app"; program = "${pkgs.writeScript "serve-book" '' @@ -176,24 +175,25 @@ installPhase = '' mkdir -p $out/bin cp ${./.utils/rebuild.sh} $out/bin/rebuild - cp ${./.utils/list-names.sh} $out/bin/list-names - cp ${./.utils/expand.sh} $out/bin/expand-hax-macros ''; }; packages = [ ocamlformat - pkgs.ocamlPackages.ocaml-lsp - pkgs.ocamlPackages.ocamlformat-rpc-lib - pkgs.ocamlPackages.ocaml-print-intf - pkgs.ocamlPackages.odoc - pkgs.ocamlPackages.utop + ocamlPackages.ocaml-lsp + ocamlPackages.ocamlformat-rpc-lib + ocamlPackages.ocaml-print-intf + ocamlPackages.odoc + ocamlPackages.utop + pkgs.just pkgs.cargo-expand pkgs.cargo-release pkgs.cargo-insta pkgs.openssl.dev pkgs.pkg-config pkgs.rust-analyzer + pkgs.toml2json + pkgs.mdbook rustfmt rustc @@ -201,7 +201,7 @@ ]; LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; in { - fstar = pkgs.mkShell { + examples = pkgs.mkShell { inherit inputsFrom LIBCLANG_PATH; HACL_HOME = "${hacl-star}"; shellHook = '' @@ -209,11 +209,11 @@ export HAX_PROOF_LIBS_HOME="$HAX_ROOT/proof-libs/fstar" export HAX_LIBS_HOME="$HAX_ROOT/hax-lib" ''; - packages = packages ++ [fstar]; + packages = packages ++ [fstar pkgs.proverif]; }; default = pkgs.mkShell { inherit packages inputsFrom LIBCLANG_PATH; - shellHook = ''echo "Commands available: $(ls ${utils}/bin | tr '\n' ' ')"''; + shellHook = ''echo "Commands available: $(ls ${utils}/bin | tr '\n' ' ')" 1>&2''; }; }; } diff --git a/frontend/exporter/Cargo.toml b/frontend/exporter/Cargo.toml index a9ed2b247..35ed4c09a 100644 --- a/frontend/exporter/Cargo.toml +++ b/frontend/exporter/Cargo.toml @@ -23,7 +23,6 @@ tracing.workspace = true paste = "1.0.11" extension-traits = "1.0.1" lazy_static = "1.4.0" -bincode.workspace = true [features] default = ["rustc"] diff --git a/frontend/exporter/adt-into/src/lib.rs b/frontend/exporter/adt-into/src/lib.rs index 2d0a115f5..98c062316 100644 --- a/frontend/exporter/adt-into/src/lib.rs +++ b/frontend/exporter/adt-into/src/lib.rs @@ -18,6 +18,7 @@ struct Options { from: syn::TypePath, state: syn::Ident, state_type: syn::Type, + where_clause: Option, } mod option_parse { use super::*; @@ -29,20 +30,30 @@ mod option_parse { fn parse(input: ParseStream) -> syn::Result { let generics = input.parse()?; input.parse::()?; + input.parse::()?; input.parse::()?; let from = input.parse()?; input.parse::()?; + input.parse::()?; input.parse::()?; let state_type = input.parse()?; input.parse::()?; let state = input.parse()?; + + let mut where_clause = None; + if input.peek(Token![,]) && input.peek2(Token![where]) { + input.parse::()?; + where_clause = Some(input.parse()?); + } + Ok(Options { generics, from, state, state_type, + where_clause, }) } } @@ -291,6 +302,7 @@ pub fn adt_into(input: proc_macro::TokenStream) -> proc_macro::TokenStream { from: from_with_generics, state, state_type, + where_clause, } = parse_attr("args", attrs).expect("An [args] attribute was expected"); let generics = { @@ -388,7 +400,7 @@ pub fn adt_into(input: proc_macro::TokenStream) -> proc_macro::TokenStream { const _ : () = { use #from as FROM_TYPE; use #to as TO_TYPE; - impl #generics SInto<#state_type, #to #to_generics> for #from_with_generics { + impl #generics SInto<#state_type, #to #to_generics> for #from_with_generics #where_clause { #[tracing::instrument(level = "trace", skip(#state))] fn sinto(&self, #state: &#state_type) -> #to #to_generics { tracing::trace!("Enters sinto ({})", stringify!(#from_with_generics)); @@ -443,10 +455,7 @@ fn drop_generics(type_path: syn::TypePath) -> syn::TypePath { /// and we don't want a whole crate only for that helper. /// /// This proc macro defines some groups of derive clauses that -/// we reuse all the time. This is particularly interesting for -/// serializers and deserializers: today we use `bincode` and -/// `serde`, but maybe we will want to move to something else -/// in the future. +/// we reuse all the time. #[proc_macro_attribute] pub fn derive_group( attr: proc_macro::TokenStream, @@ -460,7 +469,6 @@ pub fn derive_group( .map(|group| match group { "Serializers" => quote! { #[derive(::serde::Serialize, ::serde::Deserialize)] - #[derive(::bincode::Encode, ::bincode::Decode)] }, _ => { errors.push(quote! { diff --git a/frontend/exporter/options/Cargo.toml b/frontend/exporter/options/Cargo.toml index a8f04c553..c8c5a8b1e 100644 --- a/frontend/exporter/options/Cargo.toml +++ b/frontend/exporter/options/Cargo.toml @@ -14,4 +14,3 @@ serde.workspace = true serde_json.workspace = true schemars.workspace = true hax-adt-into.workspace = true -bincode.workspace = true diff --git a/frontend/exporter/src/body.rs b/frontend/exporter/src/body.rs index 8d47409cd..77c27e0c1 100644 --- a/frontend/exporter/src/body.rs +++ b/frontend/exporter/src/body.rs @@ -21,9 +21,11 @@ mod module { Rc>, rustc_middle::thir::ExprId, ) { - let base = s.base(); - let msg = || fatal!(s[base.tcx.def_span(did)], "THIR not found for {:?}", did); - base.cached_thirs.get(&did).unwrap_or_else(msg).clone() + let tcx = s.base().tcx; + s.with_item_cache(did.to_def_id(), |caches| { + let msg = || fatal!(s[tcx.def_span(did)], "THIR not found for {:?}", did); + caches.thir.as_ref().unwrap_or_else(msg).clone() + }) } pub trait IsBody: Sized + Clone + 'static { diff --git a/frontend/exporter/src/constant_utils.rs b/frontend/exporter/src/constant_utils.rs index cf91501dd..e35b3e986 100644 --- a/frontend/exporter/src/constant_utils.rs +++ b/frontend/exporter/src/constant_utils.rs @@ -22,9 +22,7 @@ pub enum ConstantInt { pub enum ConstantLiteral { Bool(bool), Char(char), - // Rust floats do not have the Eq or Ord traits due to the presence of NaN - // We instead store their bit representation, which always fits in a u128 - Float(u128, FloatTy), + Float(String, FloatTy), Int(ConstantInt), Str(String, StrStyle), ByteStr(Vec, StrStyle), @@ -62,6 +60,7 @@ pub enum ConstantExprKind { id: GlobalIdent, generics: Vec, trait_refs: Vec, + variant_information: Option, }, /// A trait constant /// @@ -77,8 +76,18 @@ pub enum ConstantExprKind { }, /// A shared reference to a static variable. Borrow(ConstantExpr), - /// A `*mut` pointer to a static mutable variable. - MutPtr(ConstantExpr), + /// A raw borrow (`*const` or `*mut`). + RawBorrow { + mutability: Mutability, + arg: ConstantExpr, + }, + /// A cast ` as `, `` is stored as the type of + /// the current constant expression. Currently, this is only used + /// to represent `lit as *mut T` or `lit as *const T`, where `lit` + /// is a `usize` literal. + Cast { + source: ConstantExpr, + }, ConstRef { id: ParamConst, }, @@ -111,6 +120,9 @@ pub struct ConstantFieldExpr { /// two construct to one same `ConstantExpr` type. pub type ConstantExpr = Decorated; +// For ConstantKind we merge all the cases (Ty, Val, Unevaluated) into one +pub type ConstantKind = ConstantExpr; + #[cfg(feature = "rustc")] pub use self::rustc::*; #[cfg(feature = "rustc")] @@ -159,7 +171,7 @@ mod rustc { } } } - Float(_bits, _ty) => todo!("Converting float literals back to AST"), + Float(f, ty) => LitKind::Float(f, LitFloatType::Suffixed(ty)), ByteStr(raw, str_style) => LitKind::ByteStr(raw, str_style), Str(raw, str_style) => LitKind::Str(raw, str_style), }; @@ -178,14 +190,18 @@ mod rustc { id, generics: _, trait_refs: _, - } => ExprKind::GlobalName { id }, + variant_information, + } => ExprKind::GlobalName { + id, + constructor: variant_information, + }, Borrow(e) => ExprKind::Borrow { borrow_kind: BorrowKind::Shared, arg: e.into(), }, - MutPtr(e) => ExprKind::AddressOf { - mutability: true, - arg: e.into(), + RawBorrow { mutability, arg } => ExprKind::RawBorrow { + mutability, + arg: arg.into(), }, ConstRef { id } => ExprKind::ConstRef { id }, Array { fields } => ExprKind::Array { @@ -194,6 +210,9 @@ mod rustc { Tuple { fields } => ExprKind::Tuple { fields: fields.into_iter().map(|field| field.into()).collect(), }, + Cast { source } => ExprKind::Cast { + source: source.into(), + }, kind @ (FnPtr { .. } | TraitConst { .. }) => { // SH: I see the `Closure` kind, but it's not the same as function pointer? ExprKind::Todo(format!("FnPtr or TraitConst. kind={:#?}", kind)) @@ -207,10 +226,11 @@ mod rustc { } } + #[tracing::instrument(level = "trace", skip(s))] pub(crate) fn scalar_int_to_constant_literal<'tcx, S: UnderOwnerState<'tcx>>( s: &S, x: rustc_middle::ty::ScalarInt, - ty: rustc_middle::ty::Ty, + ty: rustc_middle::ty::Ty<'tcx>, ) -> ConstantLiteral { match ty.kind() { ty::Char => ConstantLiteral::Char( @@ -228,14 +248,34 @@ mod rustc { let v = x.to_uint(x.size()); ConstantLiteral::Int(ConstantInt::Uint(v, kind.sinto(s))) } - _ => fatal!( - s, - "scalar_int_to_constant_literal: the type {:?} is not a literal", - ty - ), + ty::Float(kind) => { + let v = x.to_bits_unchecked(); + bits_and_type_to_float_constant_literal(v, kind.sinto(s)) + } + _ => { + let ty_sinto: Ty = ty.sinto(s); + supposely_unreachable_fatal!( + s, + "scalar_int_to_constant_literal_ExpectedLiteralType"; + { ty, ty_sinto, x } + ) + } } } + /// Converts a bit-representation of a float of type `ty` to a constant literal + fn bits_and_type_to_float_constant_literal(bits: u128, ty: FloatTy) -> ConstantLiteral { + use rustc_apfloat::{ieee, Float}; + let string = match &ty { + FloatTy::F16 => ieee::Half::from_bits(bits).to_string(), + FloatTy::F32 => ieee::Single::from_bits(bits).to_string(), + FloatTy::F64 => ieee::Double::from_bits(bits).to_string(), + FloatTy::F128 => ieee::Quad::from_bits(bits).to_string(), + }; + ConstantLiteral::Float(string, ty) + } + + #[tracing::instrument(level = "trace", skip(s))] pub(crate) fn scalar_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>( s: &S, ty: rustc_middle::ty::Ty<'tcx>, @@ -265,10 +305,9 @@ mod rustc { scalar ) }); - ConstantExprKind::Literal(ConstantLiteral::Float( - scalar_int.to_bits_unchecked(), - float_type.sinto(s), - )) + let data = scalar_int.to_bits_unchecked(); + let lit = bits_and_type_to_float_constant_literal(data, float_type.sinto(s)); + ConstantExprKind::Literal(lit) } ty::Ref(_, inner_ty, Mutability::Not) | ty::RawPtr(inner_ty, Mutability::Mut) => { let tcx = s.base().tcx; @@ -285,6 +324,7 @@ mod rustc { id: did.sinto(s), generics: Vec::new(), trait_refs: Vec::new(), + variant_information: None, }, GlobalAlloc::Memory(alloc) => { let values = alloc.inner().get_bytes_unchecked( @@ -308,7 +348,10 @@ mod rustc { let contents = contents.decorate(inner_ty.sinto(s), cspan.clone()); match ty.kind() { ty::Ref(..) => ConstantExprKind::Borrow(contents), - ty::RawPtr(..) => ConstantExprKind::MutPtr(contents), + ty::RawPtr(_, mutability) => ConstantExprKind::RawBorrow { + arg: contents, + mutability: mutability.sinto(s), + }, _ => unreachable!(), } } @@ -364,6 +407,7 @@ mod rustc { ) } + #[tracing::instrument(level = "trace", skip(s))] fn trait_const_to_constant_expr_kind<'tcx, S: BaseState<'tcx> + HasOwnerId>( s: &S, _const_def_id: rustc_hir::def_id::DefId, @@ -430,7 +474,7 @@ mod rustc { // Solve the trait obligations let parent_def_id = tcx.parent(ucv.def); - let trait_refs = solve_item_traits(s, parent_def_id, ucv.args, None); + let trait_refs = solve_item_required_traits(s, parent_def_id, ucv.args); // Convert let id = ucv.def.sinto(s); @@ -439,6 +483,7 @@ mod rustc { id, generics, trait_refs, + variant_information: None, } } } else { @@ -449,6 +494,7 @@ mod rustc { id, generics: vec![], trait_refs: vec![], + variant_information: None, } }; let cv = kind.decorate(ty.sinto(s), span.sinto(s)); @@ -459,7 +505,7 @@ mod rustc { impl<'tcx> ConstantExt<'tcx> for ty::Const<'tcx> { fn eval_constant>(&self, s: &S) -> Option { let (ty, evaluated) = self - .eval(s.base().tcx, s.param_env(), rustc_span::DUMMY_SP) + .eval_valtree(s.base().tcx, s.param_env(), rustc_span::DUMMY_SP) .ok()?; let evaluated = ty::Const::new(s.base().tcx, ty::ConstKind::Value(ty, evaluated)); (&evaluated != self).then_some(evaluated) @@ -475,6 +521,7 @@ mod rustc { } } impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Const<'tcx> { + #[tracing::instrument(level = "trace", skip(s))] fn sinto(&self, s: &S) -> ConstantExpr { use rustc_middle::query::Key; let span = self.default_span(s.base().tcx); @@ -504,7 +551,7 @@ mod rustc { } } - // #[tracing::instrument(skip(s))] + #[tracing::instrument(level = "trace", skip(s))] pub(crate) fn valtree_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>( s: &S, valtree: rustc_middle::ty::ValTree<'tcx>, @@ -512,59 +559,69 @@ mod rustc { span: rustc_span::Span, ) -> ConstantExpr { let kind = match (valtree, ty.kind()) { - (_, ty::Ref(_, inner_ty, _)) => { - ConstantExprKind::Borrow(valtree_to_constant_expr(s, valtree, *inner_ty, span)) - } - (ty::ValTree::Branch(valtrees), ty::Str) => ConstantExprKind::Literal( - ConstantLiteral::byte_str(valtrees.iter().map(|x| match x { - ty::ValTree::Leaf(leaf) => leaf.to_u8(), - _ => fatal!(s[span], "Expected a flat list of leaves while translating a str literal, got a arbitrary valtree.") - }).collect(), StrStyle::Cooked)) - , - (ty::ValTree::Branch(_), ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) => { - let contents: rustc_middle::ty::DestructuredConst = s - .base().tcx - .destructure_const(ty::Const::new_value(s.base().tcx, valtree, ty)); - let fields = contents.fields.iter().copied(); - match ty.kind() { - ty::Array(_, _) => ConstantExprKind::Array { - fields: fields - .map(|field| field.sinto(s)) - .collect(), - }, - ty::Tuple(_) => ConstantExprKind::Tuple { - fields: fields - .map(|field| field.sinto(s)) - .collect(), - }, - ty::Adt(def, _) => { - let variant_idx = contents - .variant - .s_expect(s, "destructed const of adt without variant idx"); - let variant_def = &def.variant(variant_idx); - - ConstantExprKind::Adt{ - info: get_variant_information(def, variant_idx, s), - fields: fields.into_iter() - .zip(&variant_def.fields) - .map(|(value, field)| ConstantFieldExpr { - field: field.did.sinto(s), - value: value.sinto(s), - }) - .collect(), + (_, ty::Ref(_, inner_ty, _)) => { + ConstantExprKind::Borrow(valtree_to_constant_expr(s, valtree, *inner_ty, span)) + } + (ty::ValTree::Branch(valtrees), ty::Str) => ConstantExprKind::Literal( + ConstantLiteral::byte_str(valtrees.iter().map(|x| match x { + ty::ValTree::Leaf(leaf) => leaf.to_u8(), + _ => fatal!(s[span], "Expected a flat list of leaves while translating a str literal, got a arbitrary valtree.") + }).collect(), StrStyle::Cooked)) + , + (ty::ValTree::Branch(_), ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) => { + let contents: rustc_middle::ty::DestructuredConst = s + .base().tcx + .destructure_const(ty::Const::new_value(s.base().tcx, valtree, ty)); + let fields = contents.fields.iter().copied(); + match ty.kind() { + ty::Array(_, _) => ConstantExprKind::Array { + fields: fields + .map(|field| field.sinto(s)) + .collect(), + }, + ty::Tuple(_) => ConstantExprKind::Tuple { + fields: fields + .map(|field| field.sinto(s)) + .collect(), + }, + ty::Adt(def, _) => { + let variant_idx = contents + .variant + .s_expect(s, "destructed const of adt without variant idx"); + let variant_def = &def.variant(variant_idx); + + ConstantExprKind::Adt{ + info: get_variant_information(def, variant_idx, s), + fields: fields.into_iter() + .zip(&variant_def.fields) + .map(|(value, field)| ConstantFieldExpr { + field: field.did.sinto(s), + value: value.sinto(s), + }) + .collect(), + } } + _ => unreachable!(), } - _ => unreachable!(), } - } - (ty::ValTree::Leaf(x), _) => ConstantExprKind::Literal ( - scalar_int_to_constant_literal(s, x, ty) - ), - _ => supposely_unreachable_fatal!( - s[span], "valtree_to_expr"; - {valtree, ty} - ), - }; + (ty::ValTree::Leaf(x), ty::RawPtr(_, _)) => { + use crate::rustc_type_ir::inherent::Ty; + let raw_address = x.to_bits_unchecked(); + let uint_ty = UintTy::Usize; + let usize_ty = rustc_middle::ty::Ty::new_usize(s.base().tcx).sinto(s); + let lit = ConstantLiteral::Int(ConstantInt::Uint(raw_address, uint_ty)); + ConstantExprKind::Cast { + source: ConstantExprKind::Literal(lit).decorate(usize_ty, span.sinto(s)) + } + } + (ty::ValTree::Leaf(x), _) => ConstantExprKind::Literal ( + scalar_int_to_constant_literal(s, x, ty) + ), + _ => supposely_unreachable_fatal!( + s[span], "valtree_to_expr"; + {valtree, ty} + ), + }; kind.decorate(ty.sinto(s), span.sinto(s)) } @@ -581,27 +638,43 @@ mod rustc { .s_unwrap(s); // Iterate over the fields, which should be values - assert!(dc.variant.is_none()); - - // The type should be tuple - let hax_ty = ty.sinto(s); - match &hax_ty { - Ty::Tuple(_) => (), - _ => { - fatal!(s[span], "Expected the type to be tuple: {:?}", val) - } - }; - // Below: we are mutually recursive with [const_value_to_constant_expr], // which takes a [Const] as input, but it should be // ok because we call it on a strictly smaller value. - let fields: Vec = dc + let fields = dc .fields .iter() .copied() - .map(|(val, ty)| const_value_to_constant_expr(s, ty, val, span)) - .collect(); - (ConstantExprKind::Tuple { fields }).decorate(hax_ty, span.sinto(s)) + .map(|(val, ty)| const_value_to_constant_expr(s, ty, val, span)); + + // The type should be tuple + let hax_ty: Ty = ty.sinto(s); + match ty.kind() { + ty::TyKind::Tuple(_) => { + assert!(dc.variant.is_none()); + let fields = fields.collect(); + ConstantExprKind::Tuple { fields } + } + ty::TyKind::Adt(adt_def, ..) => { + let variant = dc.variant.unwrap_or(rustc_target::abi::FIRST_VARIANT); + let variants_info = get_variant_information(adt_def, variant, s); + let fields = fields + .zip(&adt_def.variant(variant).fields) + .map(|(value, field)| ConstantFieldExpr { + field: field.did.sinto(s), + value, + }) + .collect(); + ConstantExprKind::Adt { + info: variants_info, + fields, + } + } + _ => { + fatal!(s[span], "Expected the type to be tuple or adt: {:?}", val) + } + } + .decorate(hax_ty, span.sinto(s)) } pub fn const_value_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>( @@ -632,27 +705,31 @@ mod rustc { } ConstValue::ZeroSized { .. } => { // Should be unit - let hty = ty.sinto(s); - let cv = match &hty { - Ty::Tuple(tys) if tys.is_empty() => { + let hty: Ty = ty.sinto(s); + let cv = match ty.kind() { + ty::TyKind::Tuple(tys) if tys.is_empty() => { ConstantExprKind::Tuple { fields: Vec::new() } } - Ty::Arrow(_) => match ty.kind() { - rustc_middle::ty::TyKind::FnDef(def_id, args) => { - let (def_id, generics, generics_impls, method_impl) = - get_function_from_def_id_and_generics(s, *def_id, args); - - ConstantExprKind::FnPtr { - def_id, - generics, - generics_impls, - method_impl, - } + ty::TyKind::FnDef(def_id, args) => { + let (def_id, generics, generics_impls, method_impl) = + get_function_from_def_id_and_generics(s, *def_id, args); + + ConstantExprKind::FnPtr { + def_id, + generics, + generics_impls, + method_impl, } - kind => { - fatal!(s[span], "Unexpected:"; {kind}) + } + ty::TyKind::Adt(adt_def, ..) => { + assert_eq!(adt_def.variants().len(), 1); + let variant = rustc_target::abi::FIRST_VARIANT; + let variants_info = get_variant_information(adt_def, variant, s); + ConstantExprKind::Adt { + info: variants_info, + fields: vec![], } - }, + } _ => { fatal!( s[span], diff --git a/frontend/exporter/src/id_table.rs b/frontend/exporter/src/id_table.rs new file mode 100644 index 000000000..569199eec --- /dev/null +++ b/frontend/exporter/src/id_table.rs @@ -0,0 +1,358 @@ +/// This module provides a notion of table, identifiers and nodes. A +/// `Node` is a `Arc` bundled with a unique identifier such that +/// there exists an entry in a table for that identifier. +/// +/// The type `WithTable` bundles a table with a value of type +/// `T`. That value of type `T` may hold an arbitrary number of +/// `Node<_>`s. In the context of a `WithTable`, the type `Node<_>` +/// serializes and deserializes using a table as a state. In this +/// case, serializing a `Node` produces only an identifier, without +/// any data of type `U`. Deserializing a `Node` under a +/// `WithTable` will recover `U` data from the table held by +/// `WithTable`. +/// +/// Serde is not designed for stateful (de)serialization. There is no +/// way of deriving `serde::de::DeserializeSeed` systematically. This +/// module thus makes use of global state to achieve serialization and +/// deserialization. This modules provides an API that hides this +/// global state. +use crate::prelude::*; +use std::{ + hash::{Hash, Hasher}, + sync::{atomic::Ordering, Arc, LazyLock, Mutex, MutexGuard}, +}; + +/// Unique IDs in a ID table. +#[derive_group(Serializers)] +#[derive(Default, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[serde(transparent)] +pub struct Id { + id: u32, +} + +/// A session providing fresh IDs for ID table. +#[derive(Default, Debug)] +pub struct Session { + next_id: Id, + table: Table, +} + +impl Session { + pub fn table(&self) -> &Table { + &self.table + } +} + +/// The different types of values one can store in an ID table. +#[derive(Debug, Clone, Deserialize, Serialize)] +pub enum Value { + Ty(Arc), + DefId(Arc), +} + +impl SupportedType for TyKind { + fn to_types(value: Arc) -> Value { + Value::Ty(value) + } + fn from_types(t: &Value) -> Option> { + match t { + Value::Ty(value) => Some(value.clone()), + _ => None, + } + } +} + +impl SupportedType for DefIdContents { + fn to_types(value: Arc) -> Value { + Value::DefId(value) + } + fn from_types(t: &Value) -> Option> { + match t { + Value::DefId(value) => Some(value.clone()), + _ => None, + } + } +} + +/// A node is a bundle of an ID with a value. +#[derive(Deserialize, Serialize, Debug, JsonSchema, PartialEq, Eq, PartialOrd, Ord)] +#[serde(into = "serde_repr::NodeRepr")] +#[serde(try_from = "serde_repr::NodeRepr")] +pub struct Node> { + id: Id, + value: Arc, +} + +impl> std::ops::Deref for Node { + type Target = T; + fn deref(&self) -> &Self::Target { + self.value.as_ref() + } +} + +/// Hax relies on hashes being deterministic for predicates +/// ids. Identifiers are not deterministic: we implement hash for +/// `Node` manually, discarding the field `id`. +impl + Hash> Hash for Node { + fn hash(&self, state: &mut H) { + self.value.as_ref().hash(state); + } +} + +/// Manual implementation of `Clone` that doesn't require a `Clone` +/// bound on `T`. +impl> Clone for Node { + fn clone(&self) -> Self { + Self { + id: self.id.clone(), + value: self.value.clone(), + } + } +} + +/// A table is a map from IDs to `Value`s. When serialized, we +/// represent a table as a *sorted* vector. Indeed, the values stored +/// in the table might reference each other, without cycle, so the +/// order matters. +#[derive(Default, Debug, Clone, Deserialize, Serialize)] +#[serde(into = "serde_repr::SortedIdValuePairs")] +#[serde(from = "serde_repr::SortedIdValuePairs")] +pub struct Table(HeterogeneousMap); + +mod heterogeneous_map { + //! This module provides an heterogenous map that can store types + //! that implement the trait `SupportedType`. + + use std::collections::HashMap; + use std::hash::Hash; + use std::sync::Arc; + #[derive(Clone, Debug)] + /// An heterogenous map is a map from `Key` to `Value`. It provide + /// the methods `insert` and `get` for any type `T` that + /// implements `SupportedType`. + pub struct HeterogeneousMap(HashMap); + + impl Default for HeterogeneousMap { + fn default() -> Self { + Self(HashMap::default()) + } + } + + impl HeterogeneousMap { + pub(super) fn insert(&mut self, key: Key, value: Arc) + where + T: SupportedType, + { + self.insert_raw_value(key, T::to_types(value)); + } + pub(super) fn insert_raw_value(&mut self, key: Key, value: Value) { + self.0.insert(key, value); + } + pub(super) fn from_iter(it: impl Iterator) -> Self { + Self(HashMap::from_iter(it)) + } + pub(super) fn into_iter(self) -> impl Iterator { + self.0.into_iter() + } + pub(super) fn get(&self, key: &Key) -> Option>> + where + T: SupportedType, + { + self.0.get(key).map(T::from_types) + } + } + + /// A type that can be mapped to `Value` and optionally + /// reconstructed back. + pub trait SupportedType: std::fmt::Debug { + fn to_types(value: Arc) -> Value; + fn from_types(t: &Value) -> Option>; + } +} +use heterogeneous_map::*; + +impl Session { + fn fresh_id(&mut self) -> Id { + let id = self.next_id.id; + self.next_id.id += 1; + Id { id } + } +} + +impl> Node { + pub fn new(value: T, session: &mut Session) -> Self { + let id = session.fresh_id(); + let value = Arc::new(value); + session.table.0.insert(id.clone(), value.clone()); + Self { id, value } + } + + pub fn inner(&self) -> &Arc { + &self.value + } +} + +/// Wrapper for a type `T` that creates a bundle containing both a ID +/// table and a value `T`. That value may contains `Node` values +/// inside it. Serializing `WithTable` will serialize IDs only, +/// skipping values. Deserialization of a `WithTable` will +/// automatically use the table and IDs to reconstruct skipped values. +#[derive(Debug)] +pub struct WithTable { + table: Table, + value: T, +} + +/// The state used for deserialization: a table. +static DESERIALIZATION_STATE: LazyLock> = + LazyLock::new(|| Mutex::new(Table::default())); +static DESERIALIZATION_STATE_LOCK: LazyLock> = LazyLock::new(|| Mutex::new(())); + +/// The mode of serialization: should `Node` ship values of type `T` or not? +static SERIALIZATION_MODE_USE_IDS: std::sync::atomic::AtomicBool = + std::sync::atomic::AtomicBool::new(false); + +fn serialize_use_id() -> bool { + SERIALIZATION_MODE_USE_IDS.load(Ordering::Relaxed) +} + +impl WithTable { + /// Runs `f` with a `WithTable` created out of `map` and + /// `value`. Any serialization of values of type `Node<_>` will + /// skip the field `value`. + pub fn run(map: Table, value: T, f: impl FnOnce(&Self) -> R) -> R { + if serialize_use_id() { + panic!("CACHE_MAP_LOCK: only one WithTable serialization can occur at a time (nesting is forbidden)") + } + SERIALIZATION_MODE_USE_IDS.store(true, Ordering::Relaxed); + let result = f(&Self { table: map, value }); + SERIALIZATION_MODE_USE_IDS.store(false, Ordering::Relaxed); + result + } + pub fn destruct(self) -> (T, Table) { + let Self { value, table: map } = self; + (value, map) + } +} + +impl Serialize for WithTable { + fn serialize(&self, serializer: S) -> Result { + let mut ts = serializer.serialize_tuple_struct("WithTable", 2)?; + use serde::ser::SerializeTupleStruct; + ts.serialize_field(&self.table)?; + ts.serialize_field(&self.value)?; + ts.end() + } +} + +/// The deserializer of `WithTable` is special. We first decode the +/// table in order: each `(Id, Value)` pair of the table populates the +/// global table state found in `DESERIALIZATION_STATE`. Only then we +/// can decode the value itself, knowing `DESERIALIZATION_STATE` is +/// complete. +impl<'de, T: Deserialize<'de>> serde::Deserialize<'de> for WithTable { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let _lock: MutexGuard<_> = DESERIALIZATION_STATE_LOCK.try_lock().expect("CACHE_MAP_LOCK: only one WithTable deserialization can occur at a time (nesting is forbidden)"); + use serde_repr::WithTableRepr; + let previous = std::mem::take(&mut *DESERIALIZATION_STATE.lock().unwrap()); + let with_table_repr = WithTableRepr::deserialize(deserializer); + *DESERIALIZATION_STATE.lock().unwrap() = previous; + let WithTableRepr(table, value) = with_table_repr?; + Ok(Self { table, value }) + } +} + +/// Defines representations for various types when serializing or/and +/// deserializing via serde +mod serde_repr { + use super::*; + + #[derive(Serialize, Deserialize, JsonSchema, Debug)] + pub(super) struct NodeRepr { + cache_id: Id, + value: Option>, + } + + #[derive(Serialize)] + pub(super) struct Pair(Id, Value); + pub(super) type SortedIdValuePairs = Vec; + + #[derive(Serialize, Deserialize)] + pub(super) struct WithTableRepr(pub(super) Table, pub(super) T); + + impl> Into> for Node { + fn into(self) -> NodeRepr { + let value = if serialize_use_id() { + None + } else { + Some(self.value.clone()) + }; + let cache_id = self.id; + NodeRepr { value, cache_id } + } + } + + impl> TryFrom> for Node { + type Error = serde::de::value::Error; + + fn try_from(cached: NodeRepr) -> Result { + use serde::de::Error; + let table = DESERIALIZATION_STATE.lock().unwrap(); + let id = cached.cache_id; + let kind = if let Some(kind) = cached.value { + kind + } else { + table + .0 + .get(&id) + .ok_or_else(|| { + Self::Error::custom(&format!( + "Stateful deserialization failed for id {:?}: not found in cache", + id + )) + })? + .ok_or_else(|| { + Self::Error::custom(&format!( + "Stateful deserialization failed for id {:?}: wrong type", + id + )) + })? + }; + Ok(Self { value: kind, id }) + } + } + + impl<'de> serde::Deserialize<'de> for Pair { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let (id, v) = <(Id, Value)>::deserialize(deserializer)?; + DESERIALIZATION_STATE + .lock() + .unwrap() + .0 + .insert_raw_value(id.clone(), v.clone()); + Ok(Pair(id, v)) + } + } + + impl Into for Table { + fn into(self) -> SortedIdValuePairs { + let mut vec: Vec<_> = self.0.into_iter().map(|(x, y)| Pair(x, y)).collect(); + vec.sort_by_key(|o| o.0.clone()); + vec + } + } + + impl From for Table { + fn from(t: SortedIdValuePairs) -> Self { + Self(HeterogeneousMap::from_iter( + t.into_iter().map(|Pair(x, y)| (x, y)), + )) + } + } +} diff --git a/frontend/exporter/src/lib.rs b/frontend/exporter/src/lib.rs index 35aefe101..49afafd0d 100644 --- a/frontend/exporter/src/lib.rs +++ b/frontend/exporter/src/lib.rs @@ -23,6 +23,7 @@ cfg_feature_rustc! { extern crate rustc_ast; extern crate rustc_ast_pretty; extern crate rustc_attr; + extern crate rustc_apfloat; extern crate rustc_data_structures; extern crate rustc_driver; extern crate rustc_errors; @@ -49,6 +50,7 @@ cfg_feature_rustc! { mod body; mod constant_utils; +pub mod id_table; mod types; mod index_vec; diff --git a/frontend/exporter/src/prelude.rs b/frontend/exporter/src/prelude.rs index c7fd69104..f9be0d4e3 100644 --- a/frontend/exporter/src/prelude.rs +++ b/frontend/exporter/src/prelude.rs @@ -7,6 +7,7 @@ pub use std::rc::Rc; pub use crate::body::*; pub use crate::constant_utils::*; +pub use crate::id_table; pub use crate::index_vec::*; pub use crate::traits::*; pub use crate::types::*; diff --git a/frontend/exporter/src/rustc_utils.rs b/frontend/exporter/src/rustc_utils.rs index 2e8601b86..1fcb671d9 100644 --- a/frontend/exporter/src/rustc_utils.rs +++ b/frontend/exporter/src/rustc_utils.rs @@ -12,47 +12,43 @@ impl<'tcx, T: ty::TypeFoldable>> ty::Binder<'tcx, T> { } } -#[tracing::instrument(skip(s))] -pub(crate) fn arrow_of_sig<'tcx, S: UnderOwnerState<'tcx>>(sig: &ty::PolyFnSig<'tcx>, s: &S) -> Ty { - Ty::Arrow(Box::new(sig.sinto(s))) -} - #[tracing::instrument(skip(s))] pub(crate) fn get_variant_information<'s, S: UnderOwnerState<'s>>( adt_def: &ty::AdtDef<'s>, variant_index: rustc_target::abi::VariantIdx, s: &S, ) -> VariantInformations { - s_assert!(s, !adt_def.is_union() || *CORE_EXTRACTION_MODE); - fn is_record<'s, I: std::iter::Iterator + Clone>(it: I) -> bool { + fn is_named<'s, I: std::iter::Iterator + Clone>(it: I) -> bool { it.clone() .any(|field| field.name.to_ident_string().parse::().is_err()) } let variant_def = adt_def.variant(variant_index); let variant = variant_def.def_id; let constructs_type: DefId = adt_def.did().sinto(s); + let kind = if adt_def.is_struct() { + let named = is_named(adt_def.all_fields()); + VariantKind::Struct { named } + } else if adt_def.is_union() { + VariantKind::Union + } else { + let named = is_named(variant_def.fields.iter()); + let index = variant_index.into(); + VariantKind::Enum { index, named } + }; VariantInformations { typ: constructs_type.clone(), variant: variant.sinto(s), - variant_index: variant_index.into(), - - typ_is_record: adt_def.is_struct() && is_record(adt_def.all_fields()), - variant_is_record: is_record(variant_def.fields.iter()), - typ_is_struct: adt_def.is_struct(), - - type_namespace: DefId { - path: match constructs_type.path.as_slice() { - [init @ .., _] => init.to_vec(), - _ => { - let span = s.base().tcx.def_span(variant); - fatal!( - s[span], - "Type {:#?} appears to have no path", - constructs_type - ) - } - }, - ..constructs_type.clone() + kind, + type_namespace: match &constructs_type.parent { + Some(parent) => parent.clone(), + None => { + let span = s.base().tcx.def_span(variant); + fatal!( + s[span], + "Type {:#?} appears to have no parent", + constructs_type + ) + } }, } } @@ -258,3 +254,16 @@ pub fn inline_macro_invocations<'t, S: BaseState<'t>, Body: IsBody>( }) .collect() } + +/// Gets the closest ancestor of `id` that is the id of a type. +pub fn get_closest_parent_type( + tcx: &ty::TyCtxt, + id: rustc_span::def_id::DefId, +) -> rustc_span::def_id::DefId { + match tcx.def_kind(id) { + rustc_hir::def::DefKind::Union + | rustc_hir::def::DefKind::Struct + | rustc_hir::def::DefKind::Enum => id, + _ => get_closest_parent_type(tcx, tcx.parent(id)), + } +} diff --git a/frontend/exporter/src/state.rs b/frontend/exporter/src/state.rs index f94d04fe1..810560e12 100644 --- a/frontend/exporter/src/state.rs +++ b/frontend/exporter/src/state.rs @@ -98,8 +98,8 @@ macro_rules! mk { mod types { use crate::prelude::*; - use std::cell::RefCell; - use std::collections::HashSet; + use rustc_middle::ty; + use std::{cell::RefCell, sync::Arc}; pub struct LocalContextS { pub vars: HashMap, @@ -119,24 +119,51 @@ mod types { } } + /// Global caches + #[derive(Default)] + pub struct GlobalCache<'tcx> { + /// Cache the `Span` translations. + pub spans: HashMap, + /// Per-item cache. + pub per_item: HashMap>, + /// A ID table session, providing fresh IDs. + pub id_table_session: id_table::Session, + } + + /// Defines a mapping from types to types, for use with `TypeMap`. + pub struct FullDefMapper {} + impl TypeMapper for FullDefMapper { + type Value = Arc>; + } + + /// Per-item cache + #[derive(Default)] + pub struct ItemCache<'tcx> { + /// The translated `DefId`. + pub def_id: Option, + /// The translated definitions, generic in the body. + pub full_def: TypeMap, + /// Cache the `Ty` translations. + pub tys: HashMap, Ty>, + /// Cache the trait resolution engine for each item. + pub predicate_searcher: Option>, + /// Cache of trait refs to resolved impl expressions. + pub impl_exprs: HashMap, crate::traits::ImplExpr>, + /// Cache thir bodies. + pub thir: Option<( + Rc>, + rustc_middle::thir::ExprId, + )>, + } + #[derive(Clone)] pub struct Base<'tcx> { pub options: Rc, pub macro_infos: MacroCalls, pub local_ctx: Rc>, pub opt_def_id: Option, - pub exported_spans: ExportedSpans, - pub exported_def_ids: ExportedDefIds, - pub cached_thirs: Rc< - HashMap< - rustc_span::def_id::LocalDefId, - ( - Rc>, - rustc_middle::thir::ExprId, - ), - >, - >, - pub tcx: rustc_middle::ty::TyCtxt<'tcx>, + pub cache: Rc>>, + pub tcx: ty::TyCtxt<'tcx>, /// Rust doesn't enforce bounds on generic parameters in type /// aliases. Thus, when translating type aliases, we need to /// disable the resolution of implementation expressions. For @@ -153,25 +180,21 @@ mod types { Self { tcx, macro_infos: Rc::new(HashMap::new()), - cached_thirs: Rc::new(HashMap::new()), + cache: Default::default(), options: Rc::new(options), // Always prefer `s.owner_id()` to `s.base().opt_def_id`. // `opt_def_id` is used in `utils` for error reporting opt_def_id: None, local_ctx: Rc::new(RefCell::new(LocalContextS::new())), - exported_spans: Rc::new(RefCell::new(HashSet::new())), - exported_def_ids: Rc::new(RefCell::new(HashSet::new())), ty_alias_mode: false, } } } pub type MacroCalls = Rc>; - pub type ExportedSpans = Rc>>; - pub type ExportedDefIds = Rc>>; pub type RcThir<'tcx> = Rc>; pub type RcMir<'tcx> = Rc>; - pub type Binder<'tcx> = rustc_middle::ty::Binder<'tcx, ()>; + pub type UnitBinder<'tcx> = rustc_middle::ty::Binder<'tcx, ()>; } mk!( @@ -180,7 +203,7 @@ mk!( thir: {'tcx} types::RcThir, mir: {'tcx} types::RcMir, owner_id: {} rustc_hir::def_id::DefId, - binder: {'tcx} types::Binder, + binder: {'tcx} types::UnitBinder, } ); @@ -189,7 +212,7 @@ pub use self::types::*; pub type StateWithBase<'tcx> = State, (), (), (), ()>; pub type StateWithOwner<'tcx> = State, (), (), rustc_hir::def_id::DefId, ()>; pub type StateWithBinder<'tcx> = - State, (), (), rustc_hir::def_id::DefId, types::Binder<'tcx>>; + State, (), (), rustc_hir::def_id::DefId, types::UnitBinder<'tcx>>; pub type StateWithThir<'tcx> = State, types::RcThir<'tcx>, (), rustc_hir::def_id::DefId, ()>; pub type StateWithMir<'tcx> = @@ -294,6 +317,31 @@ pub trait UnderBinderState<'tcx> = UnderOwnerState<'tcx> + HasBinder<'tcx>; /// body and an `owner_id` in the state pub trait ExprState<'tcx> = UnderOwnerState<'tcx> + HasThir<'tcx>; +pub trait WithGlobalCacheExt<'tcx>: BaseState<'tcx> { + /// Access the global cache. You must not call `sinto` within this function as this will likely + /// result in `BorrowMut` panics. + fn with_global_cache(&self, f: impl FnOnce(&mut GlobalCache<'tcx>) -> T) -> T { + let base = self.base(); + let mut cache = base.cache.borrow_mut(); + f(&mut *cache) + } + /// Access the cache for a given item. You must not call `sinto` within this function as this + /// will likely result in `BorrowMut` panics. + fn with_item_cache(&self, def_id: RDefId, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T { + self.with_global_cache(|cache| f(cache.per_item.entry(def_id).or_default())) + } +} +impl<'tcx, S: BaseState<'tcx>> WithGlobalCacheExt<'tcx> for S {} + +pub trait WithItemCacheExt<'tcx>: UnderOwnerState<'tcx> { + /// Access the cache for the current item. You must not call `sinto` within this function as + /// this will likely result in `BorrowMut` panics. + fn with_cache(&self, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T { + self.with_item_cache(self.owner_id(), f) + } +} +impl<'tcx, S: UnderOwnerState<'tcx>> WithItemCacheExt<'tcx> for S {} + impl ImplInfos { fn from(base: Base<'_>, did: rustc_hir::def_id::DefId) -> Self { let tcx = base.tcx; @@ -302,8 +350,11 @@ impl ImplInfos { Self { generics: tcx.generics_of(did).sinto(s), typ: tcx.type_of(did).instantiate_identity().sinto(s), - trait_ref: tcx.impl_trait_ref(did).sinto(s), - clauses: tcx.predicates_defined_on(did).predicates.sinto(s), + trait_ref: tcx + .impl_trait_ref(did) + .map(|trait_ref| trait_ref.instantiate_identity()) + .sinto(s), + clauses: predicates_defined_on(tcx, did).predicates.sinto(s), } } } @@ -313,13 +364,9 @@ impl ImplInfos { pub fn impl_def_ids_to_impled_types_and_bounds<'tcx, S: BaseState<'tcx>>( s: &S, ) -> HashMap { - let Base { - tcx, - exported_def_ids, - .. - } = s.base(); + let tcx = s.base().tcx; - let def_ids = exported_def_ids.as_ref().borrow().clone(); + let def_ids: Vec<_> = s.with_global_cache(|cache| cache.per_item.keys().copied().collect()); let with_parents = |mut did: rustc_hir::def_id::DefId| { let mut acc = vec![did]; while let Some(parent) = tcx.opt_parent(did) { @@ -330,8 +377,7 @@ pub fn impl_def_ids_to_impled_types_and_bounds<'tcx, S: BaseState<'tcx>>( }; use itertools::Itertools; def_ids - .iter() - .cloned() + .into_iter() .flat_map(with_parents) .unique() .filter(|&did| { diff --git a/frontend/exporter/src/traits.rs b/frontend/exporter/src/traits.rs index a04b6fafa..1ba78515d 100644 --- a/frontend/exporter/src/traits.rs +++ b/frontend/exporter/src/traits.rs @@ -1,23 +1,51 @@ use crate::prelude::*; +#[cfg(feature = "rustc")] +mod resolution; +#[cfg(feature = "rustc")] +mod utils; +#[cfg(feature = "rustc")] +pub use utils::{ + erase_and_norm, implied_predicates, predicates_defined_on, required_predicates, self_predicate, +}; + +#[cfg(feature = "rustc")] +pub use resolution::PredicateSearcher; +#[cfg(feature = "rustc")] +use rustc_middle::ty; +#[cfg(feature = "rustc")] +use rustc_span::def_id::DefId as RDefId; + #[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc::PathChunk<'tcx>, state: S as s)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::PathChunk<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum ImplExprPathChunk { AssocItem { item: AssocItem, + /// The arguments provided to the item (for GATs). + generic_args: Vec, + /// The impl exprs that must be satisfied to apply the given arguments to the item. E.g. + /// `T: Clone` in the following example: + /// ```ignore + /// trait Foo { + /// type Type: Debug; + /// } + /// ``` + impl_exprs: Vec, + /// The implemented predicate. predicate: Binder, #[value(<_ as SInto<_, Clause>>::sinto(predicate, s).id)] predicate_id: PredicateId, - /// The nth predicate returned by `tcx.item_bounds`. + /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, Parent { + /// The implemented predicate. predicate: Binder, #[value(<_ as SInto<_, Clause>>::sinto(predicate, s).id)] predicate_id: PredicateId, - /// The nth predicate returned by `tcx.predicates_of`. + /// The index of this predicate in the list returned by `implied_predicates`. index: usize, }, } @@ -25,7 +53,7 @@ pub enum ImplExprPathChunk { /// The source of a particular trait implementation. Most often this is either `Concrete` for a /// concrete `impl Trait for Type {}` item, or `LocalBound` for a context-bound `where T: Trait`. #[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc::ImplExprAtom<'tcx>, state: S as s)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExprAtom<'tcx>, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)] pub enum ImplExprAtom { @@ -44,8 +72,7 @@ pub enum ImplExprAtom { })] predicate_id: PredicateId, /// The nth (non-self) predicate found for this item. We use predicates from - /// `tcx.predicates_defined_on` starting from the parentmost item. If the item is an opaque - /// type, we also append the predicates from `explicit_item_bounds` to this list. + /// `required_predicates` starting from the parentmost item. index: usize, r#trait: Binder, path: Vec, @@ -75,7 +102,7 @@ pub enum ImplExprAtom { /// concrete implementations for `u8` and `&str`, represented as a tree. #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema, AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc::ImplExpr<'tcx>, state: S as s)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExpr<'tcx>, state: S as s)] pub struct ImplExpr { /// The trait this is an impl for. pub r#trait: Binder, @@ -85,616 +112,6 @@ pub struct ImplExpr { pub args: Vec, } -#[cfg(feature = "rustc")] -pub mod rustc { - use rustc_hir::def::DefKind; - use rustc_hir::def_id::DefId; - use rustc_middle::ty::*; - - /// Items have various predicates in scope. `path_to` uses them as a starting point for trait - /// resolution. This tracks where each of them comes from. - #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] - pub enum BoundPredicateOrigin { - /// The `Self: Trait` predicate implicitly present within trait declarations (note: we - /// don't add it for trait implementations, should we?). - SelfPred, - /// The nth (non-self) predicate found for this item. We use predicates from - /// `tcx.predicates_defined_on` starting from the parentmost item. If the item is an opaque - /// type, we also append the predicates from `explicit_item_bounds` to this list. - Item(usize), - } - - #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] - pub struct AnnotatedTraitPred<'tcx> { - pub origin: BoundPredicateOrigin, - pub clause: PolyTraitPredicate<'tcx>, - } - - /// Just like `TyCtxt::predicates_of`, but in the case of a trait or impl item or closures, - /// also includes the predicates defined on the parents. Also this returns the special - /// `Self` clause separately. - fn predicates_of_or_above<'tcx>( - tcx: TyCtxt<'tcx>, - did: rustc_span::def_id::DefId, - ) -> ( - Vec>, - Option>, - ) { - use DefKind::*; - let def_kind = tcx.def_kind(did); - - let (mut predicates, mut self_pred) = match def_kind { - // These inherit some predicates from their parent. - AssocTy | AssocFn | AssocConst | Closure => { - let parent = tcx.parent(did); - predicates_of_or_above(tcx, parent) - } - _ => (vec![], None), - }; - - match def_kind { - // Don't list the predicates of traits, we already list the `Self` clause from - // which we can resolve anything needed. - Trait => {} - AssocConst - | AssocFn - | AssocTy - | Const - | Enum - | Fn - | ForeignTy - | Impl { .. } - | OpaqueTy - | Static { .. } - | Struct - | TraitAlias - | TyAlias - | Union => { - // Only these kinds may reasonably have predicates; we have to filter - // otherwise calling `predicates_defined_on` may ICE. - predicates.extend( - tcx.predicates_defined_on(did) - .predicates - .iter() - .filter_map(|(clause, _span)| clause.as_trait_clause()), - ); - } - _ => {} - } - - // Add some extra predicates that aren't in `predicates_defined_on`. - match def_kind { - OpaqueTy => { - // An opaque type (e.g. `impl Trait`) provides predicates by itself: we need to - // account for them. - // TODO: is this still useful? The test that used to require this doesn't anymore. - predicates.extend( - tcx.explicit_item_bounds(did) - .skip_binder() // Skips an `EarlyBinder`, likely for GATs - .iter() - .filter_map(|(clause, _span)| clause.as_trait_clause()), - ) - } - Trait => { - // Add the special `Self: Trait` clause. - // Copied from the code of `tcx.predicates_of()`. - let self_clause: Clause<'_> = TraitRef::identity(tcx, did).upcast(tcx); - self_pred = Some(self_clause.as_trait_clause().unwrap()); - } - _ => {} - } - - (predicates, self_pred) - } - - /// The predicates to use as a starting point for resolving trait references within this - /// item. This is just like `TyCtxt::predicates_of`, but in the case of a trait or impl - /// item or closures, also includes the predicates defined on the parents. - fn initial_search_predicates<'tcx>( - tcx: TyCtxt<'tcx>, - did: rustc_span::def_id::DefId, - ) -> Vec> { - let (predicates, self_pred) = predicates_of_or_above(tcx, did); - let predicates = predicates - .into_iter() - .enumerate() - .map(|(i, clause)| AnnotatedTraitPred { - origin: BoundPredicateOrigin::Item(i), - clause, - }); - let self_pred = self_pred.map(|clause| AnnotatedTraitPred { - origin: BoundPredicateOrigin::SelfPred, - clause, - }); - - self_pred.into_iter().chain(predicates).collect() - } - - // FIXME: this has visibility `pub(crate)` only because of https://github.com/rust-lang/rust/issues/83049 - pub(crate) mod search_clause { - use super::{AnnotatedTraitPred, Path, PathChunk}; - use itertools::Itertools; - use rustc_hir::def_id::DefId; - use rustc_middle::ty::*; - use std::collections::{hash_map::Entry, HashMap}; - - /// Erase all regions. Largely copied from `tcx.erase_regions`. - fn erase_all_regions<'tcx, T>(tcx: TyCtxt<'tcx>, value: T) -> T - where - T: TypeFoldable>, - { - use rustc_middle::ty; - struct RegionEraserVisitor<'tcx> { - tcx: TyCtxt<'tcx>, - } - - impl<'tcx> TypeFolder> for RegionEraserVisitor<'tcx> { - fn cx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - ty.super_fold_with(self) - } - - fn fold_binder(&mut self, t: ty::Binder<'tcx, T>) -> ty::Binder<'tcx, T> - where - T: TypeFoldable>, - { - // Empty the binder - Binder::dummy(t.skip_binder().fold_with(self)) - } - - fn fold_region(&mut self, _r: ty::Region<'tcx>) -> ty::Region<'tcx> { - // We erase bound regions despite it being possibly incorrect. `for<'a> fn(&'a - // ())` and `fn(&'free ())` are different types: they may implement different - // traits and have a different `TypeId`. It's unclear whether this can cause us - // to select the wrong trait reference. - self.tcx.lifetimes.re_erased - } - } - value.fold_with(&mut RegionEraserVisitor { tcx }) - } - - // Lifetimes are irrelevant when resolving instances. - pub(super) fn erase_and_norm<'tcx, T>( - tcx: TyCtxt<'tcx>, - param_env: ParamEnv<'tcx>, - x: T, - ) -> T - where - T: TypeFoldable> + Copy, - { - erase_all_regions( - tcx, - tcx.try_normalize_erasing_regions(param_env, x).unwrap_or(x), - ) - } - - #[tracing::instrument(level = "trace", skip(tcx))] - fn parents_trait_predicates<'tcx>( - tcx: TyCtxt<'tcx>, - pred: PolyTraitPredicate<'tcx>, - ) -> Vec> { - let self_trait_ref = pred.to_poly_trait_ref(); - tcx.predicates_of(pred.def_id()) - .predicates - .iter() - // Substitute with the `self` args so that the clause makes sense in the - // outside context. - .map(|(clause, _span)| clause.instantiate_supertrait(tcx, self_trait_ref)) - .filter_map(|pred| pred.as_trait_clause()) - .collect() - } - - /// A candidate projects `self` along a path reaching some predicate. A candidate is - /// selected when its predicate is the one expected, aka `target`. - #[derive(Debug, Clone)] - struct Candidate<'tcx> { - path: Path<'tcx>, - pred: PolyTraitPredicate<'tcx>, - origin: AnnotatedTraitPred<'tcx>, - } - - /// Stores a set of predicates along with where they came from. - struct PredicateSearcher<'tcx> { - tcx: TyCtxt<'tcx>, - param_env: rustc_middle::ty::ParamEnv<'tcx>, - candidates: HashMap, Candidate<'tcx>>, - } - - impl<'tcx> PredicateSearcher<'tcx> { - /// Initialize the elaborator with the predicates accessible within this item. - fn new_for_owner( - tcx: TyCtxt<'tcx>, - param_env: rustc_middle::ty::ParamEnv<'tcx>, - owner_id: DefId, - ) -> Self { - let mut out = Self { - tcx, - param_env, - candidates: Default::default(), - }; - out.extend( - super::initial_search_predicates(tcx, owner_id) - .into_iter() - .map(|clause| Candidate { - path: vec![], - pred: clause.clause, - origin: clause, - }), - ); - out - } - - /// Insert new candidates and all their parent predicates. This deduplicates predicates - /// to avoid divergence. - fn extend(&mut self, candidates: impl IntoIterator>) { - let tcx = self.tcx; - // Filter out duplicated candidates. - let mut new_candidates = Vec::new(); - for mut candidate in candidates { - // Normalize and erase all lifetimes. - candidate.pred = erase_and_norm(tcx, self.param_env, candidate.pred); - if let Entry::Vacant(entry) = self.candidates.entry(candidate.pred) { - entry.insert(candidate.clone()); - new_candidates.push(candidate); - } - } - if !new_candidates.is_empty() { - self.extend_parents(new_candidates); - } - } - - /// Add the parents of these candidates. This is a separate function to avoid - /// polymorphic recursion due to the closures capturing the type parameters of this - /// function. - fn extend_parents(&mut self, new_candidates: Vec>) { - let tcx = self.tcx; - // Then recursively add their parents. This way ensures a breadth-first order, - // which means we select the shortest path when looking up predicates. - self.extend(new_candidates.into_iter().flat_map(|candidate| { - parents_trait_predicates(tcx, candidate.pred) - .into_iter() - .enumerate() - .map(move |(index, parent_pred)| { - let mut parent_candidate = Candidate { - pred: parent_pred, - path: candidate.path.clone(), - origin: candidate.origin, - }; - parent_candidate.path.push(PathChunk::Parent { - predicate: parent_pred, - index, - }); - parent_candidate - }) - })); - } - - /// Lookup a predicate in this set. If the predicate applies to an associated type, we - /// add the relevant implied associated type bounds to the set as well. - fn lookup(&mut self, target: PolyTraitRef<'tcx>) -> Option<&Candidate<'tcx>> { - let tcx = self.tcx; - let target: PolyTraitPredicate = - erase_and_norm(tcx, self.param_env, target).upcast(tcx); - - // The predicate is `::Type: OtherTrait`. We look up `T as Trait` in - // the current context and add all the bounds on `Trait::Type` to our context. - // Note: We skip a binder but rebind it just after. - if let TyKind::Alias(AliasTyKind::Projection, alias_ty) = - target.self_ty().skip_binder().kind() - { - let trait_ref = target.rebind(alias_ty.trait_ref(tcx)); - // Recursively look up the trait ref inside `self`. - let trait_candidate = self.lookup(trait_ref)?.clone(); - let item_bounds = tcx - // TODO: `item_bounds` can contain parent traits, we don't want them - .item_bounds(alias_ty.def_id) - .instantiate(tcx, alias_ty.args) - .iter() - .filter_map(|pred| pred.as_trait_clause()) - .enumerate(); - // Add all the bounds on the corresponding associated item. - self.extend(item_bounds.map(|(index, pred)| { - let mut candidate = Candidate { - path: trait_candidate.path.clone(), - pred, - origin: trait_candidate.origin, - }; - candidate.path.push(PathChunk::AssocItem { - item: tcx.associated_item(alias_ty.def_id), - predicate: pred, - index, - }); - candidate - })); - } - - tracing::trace!("Looking for {target:?}"); - let ret = self.candidates.get(&target); - if ret.is_none() { - tracing::trace!( - "Couldn't find {target:?} in: [\n{}]", - self.candidates - .iter() - .map(|(_, c)| format!(" - {:?}\n", c.pred)) - .join("") - ); - } - ret - } - } - - #[tracing::instrument(level = "trace", skip(tcx, param_env))] - pub(super) fn path_to<'tcx>( - tcx: TyCtxt<'tcx>, - owner_id: DefId, - param_env: rustc_middle::ty::ParamEnv<'tcx>, - target: PolyTraitRef<'tcx>, - ) -> Option<(Path<'tcx>, AnnotatedTraitPred<'tcx>)> { - let mut searcher = PredicateSearcher::new_for_owner(tcx, param_env, owner_id); - let candidate = searcher.lookup(target)?; - Some((candidate.path.clone(), candidate.origin)) - } - } - - #[derive(Debug, Clone)] - pub enum PathChunk<'tcx> { - AssocItem { - item: AssocItem, - predicate: PolyTraitPredicate<'tcx>, - /// The nth predicate returned by `tcx.item_bounds`. - index: usize, - }, - Parent { - predicate: PolyTraitPredicate<'tcx>, - /// The nth predicate returned by `tcx.predicates_of`. - index: usize, - }, - } - pub type Path<'tcx> = Vec>; - - #[derive(Debug, Clone)] - pub enum ImplExprAtom<'tcx> { - /// A concrete `impl Trait for Type {}` item. - Concrete { - def_id: DefId, - generics: GenericArgsRef<'tcx>, - }, - /// A context-bound clause like `where T: Trait`. - LocalBound { - predicate: Predicate<'tcx>, - /// The nth (non-self) predicate found for this item. We use predicates from - /// `tcx.predicates_defined_on` starting from the parentmost item. If the item is an - /// opaque type, we also append the predicates from `explicit_item_bounds` to this - /// list. - index: usize, - r#trait: PolyTraitRef<'tcx>, - path: Path<'tcx>, - }, - /// The automatic clause `Self: Trait` present inside a `impl Trait for Type {}` item. - SelfImpl { - r#trait: PolyTraitRef<'tcx>, - path: Path<'tcx>, - }, - /// `dyn Trait` is a wrapped value with a virtual table for trait - /// `Trait`. In other words, a value `dyn Trait` is a dependent - /// triple that gathers a type τ, a value of type τ and an - /// instance of type `Trait`. - /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that - /// built-in implementation. - Dyn, - /// A built-in trait whose implementation is computed by the compiler, such as `Sync`. - Builtin { r#trait: PolyTraitRef<'tcx> }, - /// An error happened while resolving traits. - Error(String), - } - - #[derive(Clone, Debug)] - pub struct ImplExpr<'tcx> { - /// The trait this is an impl for. - pub r#trait: PolyTraitRef<'tcx>, - /// The kind of implemention of the root of the tree. - pub r#impl: ImplExprAtom<'tcx>, - /// A list of `ImplExpr`s required to fully specify the trait references in `impl`. - pub args: Vec, - } - - #[tracing::instrument(level = "trace", skip(tcx, warn))] - fn impl_exprs<'tcx>( - tcx: TyCtxt<'tcx>, - owner_id: DefId, - obligations: &[rustc_trait_selection::traits::Obligation< - 'tcx, - rustc_middle::ty::Predicate<'tcx>, - >], - warn: &impl Fn(&str), - ) -> Result>, String> { - obligations - .iter() - // Only keep depth-1 obligations to avoid duplicate impl exprs. - .filter(|obligation| obligation.recursion_depth == 1) - .filter_map(|obligation| { - obligation.predicate.as_trait_clause().map(|trait_ref| { - impl_expr( - tcx, - owner_id, - obligation.param_env, - &trait_ref.map_bound(|p| p.trait_ref), - warn, - ) - }) - }) - .collect() - } - - #[tracing::instrument(level = "trace", skip(tcx, param_env, warn))] - pub(super) fn impl_expr<'tcx>( - tcx: TyCtxt<'tcx>, - owner_id: DefId, - param_env: rustc_middle::ty::ParamEnv<'tcx>, - tref: &rustc_middle::ty::PolyTraitRef<'tcx>, - // Call back into hax-related code to display a nice warning. - warn: &impl Fn(&str), - ) -> Result, String> { - use rustc_trait_selection::traits::{ - BuiltinImplSource, ImplSource, ImplSourceUserDefinedData, - }; - - let impl_source = copy_paste_from_rustc::codegen_select_candidate(tcx, (param_env, *tref)); - let atom = match impl_source { - Ok(ImplSource::UserDefined(ImplSourceUserDefinedData { - impl_def_id, - args: generics, - .. - })) => ImplExprAtom::Concrete { - def_id: impl_def_id, - generics, - }, - Ok(ImplSource::Param(_)) => { - match search_clause::path_to(tcx, owner_id, param_env, *tref) { - Some((path, apred)) => { - let r#trait = apred.clause.to_poly_trait_ref(); - match apred.origin { - BoundPredicateOrigin::SelfPred => { - ImplExprAtom::SelfImpl { r#trait, path } - } - BoundPredicateOrigin::Item(index) => ImplExprAtom::LocalBound { - predicate: apred.clause.upcast(tcx), - index, - r#trait, - path, - }, - } - } - None => { - let msg = format!( - "Could not find a clause for `{tref:?}` in the item parameters" - ); - warn(&msg); - ImplExprAtom::Error(msg) - } - } - } - Ok(ImplSource::Builtin(BuiltinImplSource::Object { .. }, _)) => ImplExprAtom::Dyn, - Ok(ImplSource::Builtin(_, _)) => ImplExprAtom::Builtin { r#trait: *tref }, - Err(e) => { - let msg = format!( - "Could not find a clause for `{tref:?}` in the current context: `{e:?}`" - ); - warn(&msg); - ImplExprAtom::Error(msg) - } - }; - - let nested = match &impl_source { - Ok(ImplSource::UserDefined(ImplSourceUserDefinedData { nested, .. })) => { - nested.as_slice() - } - Ok(ImplSource::Param(nested)) => nested.as_slice(), - // We ignore the contained obligations here. For example for `(): Send`, the - // obligations contained would be `[(): Send]`, which leads to an infinite loop. There - // might be important obligations here in other cases; we'll have to see if that comes - // up. - Ok(ImplSource::Builtin(_, _ignored)) => &[], - Err(_) => &[], - }; - let nested = impl_exprs(tcx, owner_id, nested, warn)?; - - Ok(ImplExpr { - r#impl: atom, - args: nested, - r#trait: *tref, - }) - } - - mod copy_paste_from_rustc { - use rustc_infer::infer::TyCtxtInferExt; - use rustc_middle::traits::CodegenObligationError; - use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt}; - use rustc_trait_selection::error_reporting::InferCtxtErrorExt; - use rustc_trait_selection::traits::{ - Obligation, ObligationCause, ObligationCtxt, ScrubbedTraitError, SelectionContext, - Unimplemented, - }; - - /// Attempts to resolve an obligation to an `ImplSource`. The result is - /// a shallow `ImplSource` resolution, meaning that we do not - /// (necessarily) resolve all nested obligations on the impl. Note - /// that type check should guarantee to us that all nested - /// obligations *could be* resolved if we wanted to. - /// - /// This also expects that `trait_ref` is fully normalized. - pub fn codegen_select_candidate<'tcx>( - tcx: TyCtxt<'tcx>, - (param_env, trait_ref): (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>), - ) -> Result, CodegenObligationError> - { - let trait_ref = super::search_clause::erase_and_norm(tcx, param_env, trait_ref); - - // Do the initial selection for the obligation. This yields the - // shallow result we are looking for -- that is, what specific impl. - let infcx = tcx.infer_ctxt().ignoring_regions().build(); - let mut selcx = SelectionContext::new(&infcx); - - let obligation_cause = ObligationCause::dummy(); - let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref); - - let selection = match selcx.poly_select(&obligation) { - Ok(Some(selection)) => selection, - Ok(None) => return Err(CodegenObligationError::Ambiguity), - Err(Unimplemented) => return Err(CodegenObligationError::Unimplemented), - Err(e) => { - panic!( - "Encountered error `{:?}` selecting `{:?}` during codegen", - e, trait_ref - ) - } - }; - - // Currently, we use a fulfillment context to completely resolve - // all nested obligations. This is because they can inform the - // inference of the impl's type parameters. - // FIXME(-Znext-solver): Doesn't need diagnostics if new solver. - let ocx = ObligationCtxt::new(&infcx); - let impl_source = selection.map(|obligation| { - ocx.register_obligation(obligation.clone()); - obligation - }); - - // In principle, we only need to do this so long as `impl_source` - // contains unbound type parameters. It could be a slight - // optimization to stop iterating early. - let errors = ocx.select_all_or_error(); - if !errors.is_empty() { - // `rustc_monomorphize::collector` assumes there are no type errors. - // Cycle errors are the only post-monomorphization errors possible; emit them now so - // `rustc_ty_utils::resolve_associated_item` doesn't return `None` post-monomorphization. - for err in errors { - if let ScrubbedTraitError::Cycle(cycle) = err { - infcx.err_ctxt().report_overflow_obligation_cycle(&cycle); - } - } - return Err(CodegenObligationError::FulfillmentError); - } - - let impl_source = infcx.resolve_vars_if_possible(impl_source); - let impl_source = infcx.tcx.erase_regions(impl_source); - - if impl_source.has_infer() { - // Unused lifetimes on an impl get replaced with inference vars, but never resolved, - // causing the return value of a query to contain inference vars. We do not have a concept - // for this and will in fact ICE in stable hashing of the return value. So bail out instead. - infcx.tcx.dcx().has_errors().unwrap(); - return Err(CodegenObligationError::FulfillmentError); - } - - Ok(impl_source) - } - } -} - /// Given a clause `clause` in the context of some impl block `impl_did`, susbts correctly `Self` /// from `clause` and (1) derive a `Clause` and (2) resolve an `ImplExpr`. #[cfg(feature = "rustc")] @@ -735,60 +152,79 @@ pub fn solve_trait<'tcx, S: BaseState<'tcx> + HasOwnerId>( s: &S, trait_ref: rustc_middle::ty::PolyTraitRef<'tcx>, ) -> ImplExpr { - use crate::ParamEnv; let warn = |msg: &str| { if !s.base().ty_alias_mode { crate::warning!(s, "{}", msg) } }; - match rustc::impl_expr(s.base().tcx, s.owner_id(), s.param_env(), &trait_ref, &warn) { + if let Some(impl_expr) = s.with_cache(|cache| cache.impl_exprs.get(&trait_ref).cloned()) { + return impl_expr; + } + let resolved = s.with_cache(|cache| { + cache + .predicate_searcher + .get_or_insert_with(|| PredicateSearcher::new_for_owner(s.base().tcx, s.owner_id())) + .resolve(&trait_ref, &warn) + }); + let impl_expr = match resolved { Ok(x) => x.sinto(s), Err(e) => crate::fatal!(s, "{}", e), - } + }; + s.with_cache(|cache| cache.impl_exprs.insert(trait_ref, impl_expr.clone())); + impl_expr } -/// Solve the trait obligations for a specific item use (for example, a method call, an ADT, etc.). -/// -/// [predicates]: optional predicates, in case we want to solve custom predicates (instead of the -/// ones returned by [TyCtxt::predicates_defined_on]. +/// Solve the trait obligations for a specific item use (for example, a method call, an ADT, etc.) +/// in the current context. #[cfg(feature = "rustc")] #[tracing::instrument(level = "trace", skip(s), ret)] -pub fn solve_item_traits<'tcx, S: UnderOwnerState<'tcx>>( +pub fn solve_item_required_traits<'tcx, S: UnderOwnerState<'tcx>>( s: &S, - def_id: rustc_hir::def_id::DefId, - generics: rustc_middle::ty::GenericArgsRef<'tcx>, - predicates: Option>, + def_id: RDefId, + generics: ty::GenericArgsRef<'tcx>, +) -> Vec { + let predicates = required_predicates(s.base().tcx, def_id); + solve_item_traits_inner(s, generics, predicates) +} + +/// Solve the trait obligations for implementing a trait (or for trait associated type bounds) in +/// the current context. +#[cfg(feature = "rustc")] +#[tracing::instrument(level = "trace", skip(s), ret)] +pub fn solve_item_implied_traits<'tcx, S: UnderOwnerState<'tcx>>( + s: &S, + def_id: RDefId, + generics: ty::GenericArgsRef<'tcx>, ) -> Vec { + let predicates = implied_predicates(s.base().tcx, def_id); + solve_item_traits_inner(s, generics, predicates) +} + +/// Apply the given generics to the provided clauses and resolve the trait references in the +/// current context. +#[cfg(feature = "rustc")] +fn solve_item_traits_inner<'tcx, S: UnderOwnerState<'tcx>>( + s: &S, + generics: ty::GenericArgsRef<'tcx>, + predicates: impl Iterator>, +) -> Vec { + use crate::rustc_middle::ty::ToPolyTraitRef; let tcx = s.base().tcx; let param_env = s.param_env(); - let mut impl_exprs = Vec::new(); - - // Lookup the predicates and iter through them: we want to solve all the - // trait requirements. - // IMPORTANT: we use [TyCtxt::predicates_defined_on] and not [TyCtxt::predicated_of] - let predicates = match predicates { - None => tcx.predicates_defined_on(def_id), - Some(preds) => preds, - }; - for (pred, _) in predicates.predicates { - // Explore only the trait predicates - if let Some(trait_clause) = pred.as_trait_clause() { - let poly_trait_ref = trait_clause.map_bound(|clause| clause.trait_ref); - // Apply the substitution - let poly_trait_ref = - rustc_middle::ty::EarlyBinder::bind(poly_trait_ref).instantiate(tcx, generics); - // Warning: this erases regions. We don't really have a way to normalize without - // erasing regions, but this may cause problems in trait solving if there are trait - // impls that include `'static` lifetimes. - let poly_trait_ref = tcx - .try_normalize_erasing_regions(param_env, poly_trait_ref) - .unwrap_or(poly_trait_ref); - let impl_expr = solve_trait(s, poly_trait_ref); - impl_exprs.push(impl_expr); - } - } - impl_exprs + predicates + .filter_map(|clause| clause.as_trait_clause()) + .map(|clause| clause.to_poly_trait_ref()) + // Substitute the item generics + .map(|trait_ref| ty::EarlyBinder::bind(trait_ref).instantiate(tcx, generics)) + // We unfortunately don't have a way to normalize without erasing regions. + .map(|trait_ref| { + tcx.try_normalize_erasing_regions(param_env, trait_ref) + .unwrap_or(trait_ref) + }) + // Resolve + .map(|trait_ref| solve_trait(s, trait_ref)) + .collect() } /// Retrieve the `Self: Trait` clause for a trait associated item. @@ -798,18 +234,16 @@ pub fn self_clause_for_item<'tcx, S: UnderOwnerState<'tcx>>( assoc: &rustc_middle::ty::AssocItem, generics: rustc_middle::ty::GenericArgsRef<'tcx>, ) -> Option { + use rustc_middle::ty::EarlyBinder; let tcx = s.base().tcx; - // Retrieve the trait let tr_def_id = tcx.trait_of_item(assoc.def_id)?; - - // Create the reference to the trait - use rustc_middle::ty::TraitRef; - let tr_generics = tcx.generics_of(tr_def_id); - let generics = generics.truncate_to(tcx, tr_generics); - let tr_ref = TraitRef::new(tcx, tr_def_id, generics); - let tr_ref = rustc_middle::ty::Binder::dummy(tr_ref); - - // Solve - Some(solve_trait(s, tr_ref)) + // The "self" predicate in the context of the trait. + let self_pred = self_predicate(tcx, tr_def_id).unwrap(); + // Substitute to be in the context of the current item. + let generics = generics.truncate_to(tcx, tcx.generics_of(tr_def_id)); + let self_pred = EarlyBinder::bind(self_pred).instantiate(tcx, generics); + + // Resolve + Some(solve_trait(s, self_pred)) } diff --git a/frontend/exporter/src/traits/resolution.rs b/frontend/exporter/src/traits/resolution.rs new file mode 100644 index 000000000..f7c67a86b --- /dev/null +++ b/frontend/exporter/src/traits/resolution.rs @@ -0,0 +1,495 @@ +//! Trait resolution: given a trait reference, we track which local clause caused it to be true. +//! This module is independent from the rest of hax, in particular it doesn't use its +//! state-tracking machinery. + +use itertools::Itertools; +use std::collections::{hash_map::Entry, HashMap}; + +use rustc_hir::def::DefKind; +use rustc_hir::def_id::DefId; +use rustc_middle::traits::CodegenObligationError; +use rustc_middle::ty::*; +use rustc_trait_selection::traits::ImplSource; + +use crate::{self_predicate, traits::utils::erase_and_norm}; + +use super::utils::{implied_predicates, required_predicates}; + +#[derive(Debug, Clone)] +pub enum PathChunk<'tcx> { + AssocItem { + item: AssocItem, + /// The arguments provided to the item (for GATs). + generic_args: &'tcx [GenericArg<'tcx>], + /// The impl exprs that must be satisfied to apply the given arguments to the item. E.g. + /// `T: Clone` in the following example: + /// ```ignore + /// trait Foo { + /// type Type: Debug; + /// } + /// ``` + impl_exprs: Vec>, + /// The implemented predicate. + predicate: PolyTraitPredicate<'tcx>, + /// The index of this predicate in the list returned by `implied_predicates`. + index: usize, + }, + Parent { + /// The implemented predicate. + predicate: PolyTraitPredicate<'tcx>, + /// The index of this predicate in the list returned by `implied_predicates`. + index: usize, + }, +} +pub type Path<'tcx> = Vec>; + +#[derive(Debug, Clone)] +pub enum ImplExprAtom<'tcx> { + /// A concrete `impl Trait for Type {}` item. + Concrete { + def_id: DefId, + generics: GenericArgsRef<'tcx>, + }, + /// A context-bound clause like `where T: Trait`. + LocalBound { + predicate: Predicate<'tcx>, + /// The nth (non-self) predicate found for this item. We use predicates from + /// `required_predicates` starting from the parentmost item. + index: usize, + r#trait: PolyTraitRef<'tcx>, + path: Path<'tcx>, + }, + /// The automatic clause `Self: Trait` present inside a `impl Trait for Type {}` item. + SelfImpl { + r#trait: PolyTraitRef<'tcx>, + path: Path<'tcx>, + }, + /// `dyn Trait` is a wrapped value with a virtual table for trait + /// `Trait`. In other words, a value `dyn Trait` is a dependent + /// triple that gathers a type τ, a value of type τ and an + /// instance of type `Trait`. + /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that + /// built-in implementation. + Dyn, + /// A built-in trait whose implementation is computed by the compiler, such as `Sync`. + Builtin { r#trait: PolyTraitRef<'tcx> }, + /// An error happened while resolving traits. + Error(String), +} + +#[derive(Clone, Debug)] +pub struct ImplExpr<'tcx> { + /// The trait this is an impl for. + pub r#trait: PolyTraitRef<'tcx>, + /// The kind of implemention of the root of the tree. + pub r#impl: ImplExprAtom<'tcx>, + /// A list of `ImplExpr`s required to fully specify the trait references in `impl`. + pub args: Vec, +} + +/// Items have various predicates in scope. `path_to` uses them as a starting point for trait +/// resolution. This tracks where each of them comes from. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub enum BoundPredicateOrigin { + /// The `Self: Trait` predicate implicitly present within trait declarations (note: we + /// don't add it for trait implementations, should we?). + SelfPred, + /// The nth (non-self) predicate found for this item. We use predicates from + /// `required_predicates` starting from the parentmost item. + Item(usize), +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct AnnotatedTraitPred<'tcx> { + pub origin: BoundPredicateOrigin, + pub clause: PolyTraitPredicate<'tcx>, +} + +/// The predicates to use as a starting point for resolving trait references within this item. This +/// includes the "self" predicate if applicable and the `required_predicates` of this item and all +/// its parents, numbered starting from the parents. +fn initial_search_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: rustc_span::def_id::DefId, +) -> Vec> { + fn acc_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: rustc_span::def_id::DefId, + predicates: &mut Vec>, + pred_id: &mut usize, + ) { + use DefKind::*; + match tcx.def_kind(def_id) { + // These inherit some predicates from their parent. + AssocTy | AssocFn | AssocConst | Closure => { + let parent = tcx.parent(def_id); + acc_predicates(tcx, parent, predicates, pred_id); + } + Trait => { + let self_pred = self_predicate(tcx, def_id).unwrap().upcast(tcx); + predicates.push(AnnotatedTraitPred { + origin: BoundPredicateOrigin::SelfPred, + clause: self_pred, + }) + } + _ => {} + } + predicates.extend(required_predicates(tcx, def_id).filter_map(|clause| { + clause.as_trait_clause().map(|clause| { + let id = *pred_id; + *pred_id += 1; + AnnotatedTraitPred { + origin: BoundPredicateOrigin::Item(id), + clause, + } + }) + })); + } + + let mut predicates = vec![]; + acc_predicates(tcx, def_id, &mut predicates, &mut 0); + predicates +} + +#[tracing::instrument(level = "trace", skip(tcx))] +fn parents_trait_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + pred: PolyTraitPredicate<'tcx>, +) -> Vec> { + let self_trait_ref = pred.to_poly_trait_ref(); + implied_predicates(tcx, pred.def_id()) + // Substitute with the `self` args so that the clause makes sense in the + // outside context. + .map(|clause| clause.instantiate_supertrait(tcx, self_trait_ref)) + .filter_map(|pred| pred.as_trait_clause()) + .collect() +} + +/// A candidate projects `self` along a path reaching some predicate. A candidate is +/// selected when its predicate is the one expected, aka `target`. +#[derive(Debug, Clone)] +struct Candidate<'tcx> { + path: Path<'tcx>, + pred: PolyTraitPredicate<'tcx>, + origin: AnnotatedTraitPred<'tcx>, +} + +/// Stores a set of predicates along with where they came from. +pub struct PredicateSearcher<'tcx> { + tcx: TyCtxt<'tcx>, + param_env: rustc_middle::ty::ParamEnv<'tcx>, + /// Local clauses available in the current context. + candidates: HashMap, Candidate<'tcx>>, +} + +impl<'tcx> PredicateSearcher<'tcx> { + /// Initialize the elaborator with the predicates accessible within this item. + pub fn new_for_owner(tcx: TyCtxt<'tcx>, owner_id: DefId) -> Self { + let mut out = Self { + tcx, + param_env: tcx.param_env(owner_id).with_reveal_all_normalized(tcx), + candidates: Default::default(), + }; + out.extend( + initial_search_predicates(tcx, owner_id) + .into_iter() + .map(|clause| Candidate { + path: vec![], + pred: clause.clause, + origin: clause, + }), + ); + out + } + + /// Insert new candidates and all their parent predicates. This deduplicates predicates + /// to avoid divergence. + fn extend(&mut self, candidates: impl IntoIterator>) { + let tcx = self.tcx; + // Filter out duplicated candidates. + let mut new_candidates = Vec::new(); + for mut candidate in candidates { + // Normalize and erase all lifetimes. + candidate.pred = erase_and_norm(tcx, self.param_env, candidate.pred); + if let Entry::Vacant(entry) = self.candidates.entry(candidate.pred) { + entry.insert(candidate.clone()); + new_candidates.push(candidate); + } + } + if !new_candidates.is_empty() { + self.extend_parents(new_candidates); + } + } + + /// Add the parents of these candidates. This is a separate function to avoid + /// polymorphic recursion due to the closures capturing the type parameters of this + /// function. + fn extend_parents(&mut self, new_candidates: Vec>) { + let tcx = self.tcx; + // Then recursively add their parents. This way ensures a breadth-first order, + // which means we select the shortest path when looking up predicates. + self.extend(new_candidates.into_iter().flat_map(|candidate| { + parents_trait_predicates(tcx, candidate.pred) + .into_iter() + .enumerate() + .map(move |(index, parent_pred)| { + let mut parent_candidate = Candidate { + pred: parent_pred, + path: candidate.path.clone(), + origin: candidate.origin, + }; + parent_candidate.path.push(PathChunk::Parent { + predicate: parent_pred, + index, + }); + parent_candidate + }) + })); + } + + /// If the type is a trait associated type, we add any relevant bounds to our context. + fn add_associated_type_refs( + &mut self, + ty: Binder<'tcx, Ty<'tcx>>, + // Call back into hax-related code to display a nice warning. + warn: &impl Fn(&str), + ) -> Result<(), String> { + let tcx = self.tcx; + // Note: We skip a binder but rebind it just after. + let TyKind::Alias(AliasTyKind::Projection, alias_ty) = ty.skip_binder().kind() else { + return Ok(()); + }; + let (trait_ref, item_args) = alias_ty.trait_ref_and_own_args(tcx); + let trait_ref = ty.rebind(trait_ref).upcast(tcx); + + // The predicate we're looking for is is `::Type: OtherTrait`. We look up `T as + // Trait` in the current context and add all the bounds on `Trait::Type` to our context. + let Some(trait_candidate) = self.resolve_local(trait_ref, warn)? else { + return Ok(()); + }; + + // The bounds that hold on the associated type. + let item_bounds = implied_predicates(tcx, alias_ty.def_id) + .filter_map(|pred| pred.as_trait_clause()) + // Substitute the item generics + .map(|pred| EarlyBinder::bind(pred).instantiate(tcx, alias_ty.args)) + .enumerate(); + + // Resolve predicates required to mention the item. + let nested_impl_exprs = + self.resolve_item_predicates(alias_ty.def_id, alias_ty.args, warn)?; + + // Add all the bounds on the corresponding associated item. + self.extend(item_bounds.map(|(index, pred)| { + let mut candidate = Candidate { + path: trait_candidate.path.clone(), + pred, + origin: trait_candidate.origin, + }; + candidate.path.push(PathChunk::AssocItem { + item: tcx.associated_item(alias_ty.def_id), + generic_args: item_args, + impl_exprs: nested_impl_exprs.clone(), + predicate: pred, + index, + }); + candidate + })); + + Ok(()) + } + + /// Resolve a local clause by looking it up in this set. If the predicate applies to an + /// associated type, we add the relevant implied associated type bounds to the set as well. + fn resolve_local( + &mut self, + target: PolyTraitPredicate<'tcx>, + // Call back into hax-related code to display a nice warning. + warn: &impl Fn(&str), + ) -> Result>, String> { + tracing::trace!("Looking for {target:?}"); + + // Look up the predicate + let ret = self.candidates.get(&target).cloned(); + if ret.is_some() { + return Ok(ret); + } + + // Add clauses related to associated type in the `Self` type of the predicate. + self.add_associated_type_refs(target.self_ty(), warn)?; + + let ret = self.candidates.get(&target).cloned(); + if ret.is_none() { + tracing::trace!( + "Couldn't find {target:?} in: [\n{}]", + self.candidates + .iter() + .map(|(_, c)| format!(" - {:?}\n", c.pred)) + .join("") + ); + } + Ok(ret) + } + + /// Resolve the given trait reference in the local context. + #[tracing::instrument(level = "trace", skip(self, warn))] + pub fn resolve( + &mut self, + tref: &PolyTraitRef<'tcx>, + // Call back into hax-related code to display a nice warning. + warn: &impl Fn(&str), + ) -> Result, String> { + use rustc_trait_selection::traits::{ + BuiltinImplSource, ImplSource, ImplSourceUserDefinedData, + }; + + let erased_tref = erase_and_norm(self.tcx, self.param_env, *tref); + + let tcx = self.tcx; + let impl_source = shallow_resolve_trait_ref(tcx, self.param_env, erased_tref); + let nested; + let atom = match impl_source { + Ok(ImplSource::UserDefined(ImplSourceUserDefinedData { + impl_def_id, + args: generics, + .. + })) => { + // Resolve the predicates required by the impl. + nested = self.resolve_item_predicates(impl_def_id, generics, warn)?; + ImplExprAtom::Concrete { + def_id: impl_def_id, + generics, + } + } + Ok(ImplSource::Param(_)) => { + // Mentioning a local clause requires no extra predicates to hold. + nested = vec![]; + match self.resolve_local(erased_tref.upcast(self.tcx), warn)? { + Some(candidate) => { + let path = candidate.path; + let r#trait = candidate.origin.clause.to_poly_trait_ref(); + match candidate.origin.origin { + BoundPredicateOrigin::SelfPred => { + ImplExprAtom::SelfImpl { r#trait, path } + } + BoundPredicateOrigin::Item(index) => ImplExprAtom::LocalBound { + predicate: candidate.origin.clause.upcast(tcx), + index, + r#trait, + path, + }, + } + } + None => { + let msg = format!( + "Could not find a clause for `{tref:?}` in the item parameters" + ); + warn(&msg); + ImplExprAtom::Error(msg) + } + } + } + Ok(ImplSource::Builtin(BuiltinImplSource::Object { .. }, _)) => { + nested = vec![]; + ImplExprAtom::Dyn + } + Ok(ImplSource::Builtin(_, _)) => { + // Builtin impls currently don't need nested predicates. + nested = vec![]; + ImplExprAtom::Builtin { r#trait: *tref } + } + Err(e) => { + nested = vec![]; + let msg = format!( + "Could not find a clause for `{tref:?}` in the current context: `{e:?}`" + ); + warn(&msg); + ImplExprAtom::Error(msg) + } + }; + + Ok(ImplExpr { + r#impl: atom, + args: nested, + r#trait: *tref, + }) + } + + /// Resolve the predicates required by the given item. + pub fn resolve_item_predicates( + &mut self, + def_id: DefId, + generics: GenericArgsRef<'tcx>, + // Call back into hax-related code to display a nice warning. + warn: &impl Fn(&str), + ) -> Result>, String> { + let tcx = self.tcx; + required_predicates(tcx, def_id) + .filter_map(|clause| clause.as_trait_clause()) + .map(|trait_pred| trait_pred.map_bound(|p| p.trait_ref)) + // Substitute the item generics + .map(|trait_ref| EarlyBinder::bind(trait_ref).instantiate(tcx, generics)) + // Resolve + .map(|trait_ref| self.resolve(&trait_ref, warn)) + .collect() + } +} + +/// Attempts to resolve an obligation to an `ImplSource`. The result is a shallow `ImplSource` +/// resolution, meaning that we do not resolve all nested obligations on the impl. Note that type +/// check should guarantee to us that all nested obligations *could be* resolved if we wanted to. +/// +/// This expects that `trait_ref` is fully normalized. +/// +/// This is based on `rustc_traits::codegen::codegen_select_candidate` in rustc. +pub fn shallow_resolve_trait_ref<'tcx>( + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + trait_ref: PolyTraitRef<'tcx>, +) -> Result, CodegenObligationError> { + use rustc_infer::infer::TyCtxtInferExt; + use rustc_middle::traits::CodegenObligationError; + use rustc_middle::ty::TypeVisitableExt; + use rustc_trait_selection::traits::{ + Obligation, ObligationCause, ObligationCtxt, SelectionContext, Unimplemented, + }; + // Do the initial selection for the obligation. This yields the + // shallow result we are looking for -- that is, what specific impl. + let infcx = tcx.infer_ctxt().ignoring_regions().build(); + let mut selcx = SelectionContext::new(&infcx); + + let obligation_cause = ObligationCause::dummy(); + let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref); + + let selection = match selcx.poly_select(&obligation) { + Ok(Some(selection)) => selection, + Ok(None) => return Err(CodegenObligationError::Ambiguity), + Err(Unimplemented) => return Err(CodegenObligationError::Unimplemented), + Err(_) => return Err(CodegenObligationError::FulfillmentError), + }; + + // Currently, we use a fulfillment context to completely resolve + // all nested obligations. This is because they can inform the + // inference of the impl's type parameters. + // FIXME(-Znext-solver): Doesn't need diagnostics if new solver. + let ocx = ObligationCtxt::new(&infcx); + let impl_source = selection.map(|obligation| { + ocx.register_obligation(obligation.clone()); + () + }); + + let errors = ocx.select_all_or_error(); + if !errors.is_empty() { + return Err(CodegenObligationError::FulfillmentError); + } + + let impl_source = infcx.resolve_vars_if_possible(impl_source); + let impl_source = tcx.erase_regions(impl_source); + + if impl_source.has_infer() { + // Unused lifetimes on an impl get replaced with inference vars, but never resolved. + return Err(CodegenObligationError::FulfillmentError); + } + + Ok(impl_source) +} diff --git a/frontend/exporter/src/traits/utils.rs b/frontend/exporter/src/traits/utils.rs new file mode 100644 index 000000000..cc06e573e --- /dev/null +++ b/frontend/exporter/src/traits/utils.rs @@ -0,0 +1,171 @@ +//! Each item can involve three kinds of predicates: +//! - input aka required predicates: the predicates required to mention the item. These are usually `where` +//! clauses (or equivalent) on the item: +//! ```ignore +//! struct Foo { ... } +//! trait Foo where T: Clone { ... } +//! fn function() where I: Iterator, I::Item: Clone { ... } +//! ``` +//! - output aka implied predicates: the predicates that are implied by the presence of this item in a +//! signature. This is mostly trait parent predicates: +//! ```ignore +//! trait Foo: Clone { ... } +//! fn bar() { +//! // from `T: Foo` we can deduce `T: Clone` +//! } +//! ``` +//! This could also include implied predicates such as `&'a T` implying `T: 'a` but we don't +//! consider these. +//! - "self" predicate: that's the special `Self: Trait` predicate in scope within a trait +//! declaration or implementation for trait `Trait`. +//! +//! Note that within a given item the polarity is reversed: input predicates are the ones that can +//! be assumed to hold and output predicates must be proven to hold. The "self" predicate is both +//! assumed and proven within an impl block, and just assumed within a trait declaration block. +//! +//! The current implementation considers all predicates on traits to be outputs, which has the +//! benefit of reducing the size of signatures. Moreover, the rules on which bounds are required vs +//! implied are subtle. We may change this if this proves to be a problem. +use rustc_hir::def::DefKind; +use rustc_middle::ty::*; +use rustc_span::def_id::DefId; + +/// Returns a list of type predicates for the definition with ID `def_id`, including inferred +/// lifetime constraints. This is the basic list of predicates we use for essentially all items. +pub fn predicates_defined_on(tcx: TyCtxt<'_>, def_id: DefId) -> GenericPredicates<'_> { + let mut result = tcx.explicit_predicates_of(def_id); + let inferred_outlives = tcx.inferred_outlives_of(def_id); + if !inferred_outlives.is_empty() { + let inferred_outlives_iter = inferred_outlives + .iter() + .map(|(clause, span)| ((*clause).upcast(tcx), *span)); + result.predicates = tcx.arena.alloc_from_iter( + result + .predicates + .into_iter() + .copied() + .chain(inferred_outlives_iter), + ); + } + result +} + +/// The predicates that must hold to mention this item. +pub fn required_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, +) -> impl Iterator> + DoubleEndedIterator { + use DefKind::*; + match tcx.def_kind(def_id) { + AssocConst + | AssocFn + | AssocTy + | Const + | Enum + | Fn + | ForeignTy + | Impl { .. } + | OpaqueTy + | Static { .. } + | Struct + | TraitAlias + | TyAlias + | Union => Some( + predicates_defined_on(tcx, def_id) + .predicates + .iter() + .map(|(clause, _span)| *clause), + ), + // We consider all predicates on traits to be outputs + Trait => None, + // `predicates_defined_on` ICEs on other def kinds. + _ => None, + } + .into_iter() + .flatten() +} + +/// The special "self" predicate on a trait. +pub fn self_predicate<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option> { + use DefKind::*; + match tcx.def_kind(def_id) { + // Copied from the code of `tcx.predicates_of()`. + Trait => Some(Binder::dummy(TraitRef::identity(tcx, def_id))), + _ => None, + } +} + +/// The predicates that can be deduced from the presence of this item in a signature. We only +/// consider predicates implied by traits here, not implied bounds such as `&'a T` implying `T: +/// 'a`. +pub fn implied_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, +) -> impl Iterator> + DoubleEndedIterator { + use DefKind::*; + match tcx.def_kind(def_id) { + // We consider all predicates on traits to be outputs + Trait => predicates_defined_on(tcx, def_id) + .predicates + .iter() + .map(|(clause, _span)| *clause) + .collect::>(), + AssocTy => tcx + // TODO: `item_bounds` contains parent traits, use `explicit_item_bounds` instead. + .item_bounds(def_id) + .instantiate_identity() + .iter() + .collect(), + _ => vec![], + } + .into_iter() +} + +/// Erase all regions. Largely copied from `tcx.erase_regions`. +pub fn erase_all_regions<'tcx, T>(tcx: TyCtxt<'tcx>, value: T) -> T +where + T: TypeFoldable>, +{ + use rustc_middle::ty; + struct RegionEraserVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + } + + impl<'tcx> TypeFolder> for RegionEraserVisitor<'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { + ty.super_fold_with(self) + } + + fn fold_binder(&mut self, t: ty::Binder<'tcx, T>) -> ty::Binder<'tcx, T> + where + T: TypeFoldable>, + { + // Empty the binder + Binder::dummy(t.skip_binder().fold_with(self)) + } + + fn fold_region(&mut self, _r: ty::Region<'tcx>) -> ty::Region<'tcx> { + // We erase bound regions despite it being possibly incorrect. `for<'a> fn(&'a + // ())` and `fn(&'free ())` are different types: they may implement different + // traits and have a different `TypeId`. It's unclear whether this can cause us + // to select the wrong trait reference. + self.tcx.lifetimes.re_erased + } + } + value.fold_with(&mut RegionEraserVisitor { tcx }) +} + +// Lifetimes are irrelevant when resolving instances. +pub fn erase_and_norm<'tcx, T>(tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, x: T) -> T +where + T: TypeFoldable> + Copy, +{ + erase_all_regions( + tcx, + tcx.try_normalize_erasing_regions(param_env, x).unwrap_or(x), + ) +} diff --git a/frontend/exporter/src/types/copied.rs b/frontend/exporter/src/types/copied.rs deleted file mode 100644 index 80a5359be..000000000 --- a/frontend/exporter/src/types/copied.rs +++ /dev/null @@ -1,4098 +0,0 @@ -use crate::prelude::*; - -#[cfg(feature = "rustc")] -use rustc_middle::ty; -#[cfg(feature = "rustc")] -use rustc_span::def_id::DefId as RDefId; - -impl std::hash::Hash for DefId { - fn hash(&self, state: &mut H) { - let DefId { - krate, - path, - index: _, // intentionally discarding index - is_local: _, // intentionally discarding is_local - } = self; - krate.hash(state); - path.hash(state); - } -} - -#[cfg(feature = "rustc")] -impl<'s, S: BaseState<'s>> SInto for rustc_hir::def_id::DefId { - fn sinto(&self, s: &S) -> DefId { - s.base().exported_def_ids.borrow_mut().insert(*self); - let tcx = s.base().tcx; - let def_path = tcx.def_path(*self); - let krate = tcx.crate_name(def_path.krate); - DefId { - path: def_path.data.iter().map(|x| x.sinto(s)).collect(), - krate: format!("{}", krate), - index: ( - rustc_hir::def_id::CrateNum::as_u32(self.krate), - rustc_hir::def_id::DefIndex::as_u32(self.index), - ), - is_local: self.is_local(), - } - } -} - -#[cfg(feature = "rustc")] -impl From<&DefId> for rustc_span::def_id::DefId { - fn from<'tcx>(def_id: &DefId) -> Self { - let (krate, index) = def_id.index; - Self { - krate: rustc_hir::def_id::CrateNum::from_u32(krate), - index: rustc_hir::def_id::DefIndex::from_u32(index), - } - } -} - -// Impl to be able to use hax's `DefId` for many rustc queries. -#[cfg(feature = "rustc")] -impl rustc_middle::query::IntoQueryParam for &DefId { - fn into_query_param(self) -> RDefId { - self.into() - } -} - -#[cfg(feature = "rustc")] -impl std::convert::From for Path { - fn from(v: DefId) -> Vec { - std::iter::once(v.krate) - .chain(v.path.into_iter().filter_map(|item| match item.data { - DefPathItem::TypeNs(s) - | DefPathItem::ValueNs(s) - | DefPathItem::MacroNs(s) - | DefPathItem::LifetimeNs(s) => Some(s), - _ => None, - })) - .collect() - } -} - -pub type GlobalIdent = DefId; -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_hir::def_id::LocalDefId { - fn sinto(&self, st: &S) -> DefId { - self.to_def_id().sinto(st) - } -} - -/// Reflects [`rustc_middle::thir::LogicalOp`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'a, S>, from: rustc_middle::thir::LogicalOp, state: S as _s)] -pub enum LogicalOp { - And, - Or, -} - -/// Reflects [`rustc_middle::thir::LintLevel`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'slt, S: UnderOwnerState<'slt> + HasThir<'slt>>, from: rustc_middle::thir::LintLevel, state: S as gstate)] -pub enum LintLevel { - Inherited, - Explicit(HirId), -} - -/// Reflects [`rustc_ast::ast::AttrStyle`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(, from: rustc_ast::ast::AttrStyle, state: S as _s)] -pub enum AttrStyle { - Outer, - Inner, -} - -/// Reflects [`rustc_ast::ast::Attribute`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::Attribute, state: S as gstate)] -pub struct Attribute { - pub kind: AttrKind, - #[map(x.as_usize())] - pub id: usize, - pub style: AttrStyle, - pub span: Span, -} - -/// Reflects [`rustc_attr::InlineAttr`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_attr::InlineAttr, state: S as _s)] -pub enum InlineAttr { - None, - Hint, - Always, - Never, -} - -/// Generic container for decorating items with a type, a span, -/// attributes and other meta-data. -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Decorated { - pub ty: Ty, - pub span: Span, - pub contents: Box, - pub hir_id: Option<(usize, usize)>, - pub attributes: Vec, -} - -/// Reflects [`rustc_middle::mir::UnOp`] -#[derive_group(Serializers)] -#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] -#[args(<'slt, S: UnderOwnerState<'slt>>, from: rustc_middle::mir::UnOp, state: S as _s)] -pub enum UnOp { - Not, - Neg, - PtrMetadata, -} - -/// Reflects [`rustc_middle::mir::BinOp`] -#[derive_group(Serializers)] -#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] -#[args(<'slt, S: UnderOwnerState<'slt>>, from: rustc_middle::mir::BinOp, state: S as _s)] -pub enum BinOp { - // We merge the checked and unchecked variants because in either case overflow is failure. - #[custom_arm( - rustc_middle::mir::BinOp::Add | rustc_middle::mir::BinOp::AddUnchecked => BinOp::Add, - )] - Add, - #[custom_arm( - rustc_middle::mir::BinOp::Sub | rustc_middle::mir::BinOp::SubUnchecked => BinOp::Sub, - )] - Sub, - #[custom_arm( - rustc_middle::mir::BinOp::Mul | rustc_middle::mir::BinOp::MulUnchecked => BinOp::Mul, - )] - Mul, - AddWithOverflow, - SubWithOverflow, - MulWithOverflow, - Div, - Rem, - BitXor, - BitAnd, - BitOr, - #[custom_arm( - rustc_middle::mir::BinOp::Shl | rustc_middle::mir::BinOp::ShlUnchecked => BinOp::Shl, - )] - Shl, - #[custom_arm( - rustc_middle::mir::BinOp::Shr | rustc_middle::mir::BinOp::ShrUnchecked => BinOp::Shr, - )] - Shr, - Eq, - Lt, - Le, - Ne, - Ge, - Gt, - Cmp, - Offset, -} - -pub type Pat = Decorated; -pub type Expr = Decorated; - -/// Reflects [`rustc_middle::mir::BinOp`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::ScopeData, state: S as gstate)] -pub enum ScopeData { - Node, - CallSite, - Arguments, - Destruction, - IfThen, - Remainder(FirstStatementIndex), -} - -/// Reflects [`rustc_middle::mir::BinOp`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::Scope, state: S as gstate)] -pub struct Scope { - pub id: ItemLocalId, - pub data: ScopeData, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::mir::Const<'tcx> { - fn sinto(&self, s: &S) -> ConstantExpr { - use rustc_middle::mir::Const; - let tcx = s.base().tcx; - match self { - Const::Val(const_value, ty) => { - const_value_to_constant_expr(s, *ty, *const_value, rustc_span::DUMMY_SP) - } - Const::Ty(_ty, c) => c.sinto(s), - Const::Unevaluated(ucv, _ty) => { - use crate::rustc_middle::query::Key; - let span = tcx - .def_ident_span(ucv.def) - .unwrap_or_else(|| ucv.def.default_span(tcx)); - if ucv.promoted.is_some() { - self.eval_constant(s) - .unwrap_or_else(|| { - supposely_unreachable_fatal!(s, "UnevalPromotedConstant"; {self, ucv}); - }) - .sinto(s) - } else { - match self.translate_uneval(s, ucv.shrink(), span) { - TranslateUnevalRes::EvaluatedConstant(c) => c.sinto(s), - TranslateUnevalRes::GlobalName(c) => c, - } - } - } - } - } -} - -// For ConstantKind we merge all the cases (Ty, Val, Unevaluated) into one -pub type ConstantKind = ConstantExpr; - -#[cfg(feature = "rustc")] -impl SInto for rustc_middle::mir::interpret::AllocId { - fn sinto(&self, _: &S) -> u64 { - self.0.get() - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto> for rustc_middle::ty::Ty<'tcx> { - fn sinto(&self, s: &S) -> Box { - Box::new(self.sinto(s)) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Ty<'tcx> { - fn sinto(&self, s: &S) -> Ty { - self.kind().sinto(s) - } -} - -/// Reflects [`rustc_hir::hir_id::HirId`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::hir_id::HirId, state: S as gstate)] -pub struct HirId { - owner: DefId, - local_id: usize, - // attrs: String -} -// TODO: If not working: See original - -#[cfg(feature = "rustc")] -impl<'tcx, S: BaseState<'tcx>> SInto for rustc_hir::hir_id::OwnerId { - fn sinto(&self, s: &S) -> DefId { - self.to_def_id().sinto(s) - } -} - -/// Reflects [`rustc_ast::ast::LitFloatType`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitFloatType, state: S as gstate)] -pub enum LitFloatType { - Suffixed(FloatTy), - Unsuffixed, -} -/// Reflects [`rustc_hir::Movability`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S>, from: rustc_hir::Movability, state: S as _s)] -pub enum Movability { - Static, - Movable, -} - -/// Reflects [`rustc_middle::infer::canonical::CanonicalTyVarKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::infer::canonical::CanonicalTyVarKind, state: S as gstate)] -pub enum CanonicalTyVarKind { - General(UniverseIndex), - Int, - Float, -} - -/// Reflects [`rustc_middle::ty::ParamTy`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::ParamTy, state: S as gstate)] -pub struct ParamTy { - pub index: u32, - pub name: Symbol, -} - -/// Reflects [`rustc_middle::ty::ParamConst`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(, from: rustc_middle::ty::ParamConst, state: S as gstate)] -pub struct ParamConst { - pub index: u32, - pub name: Symbol, -} - -/// A predicate without `Self`, for use in `dyn Trait`. -/// -/// Reflects [`rustc_middle::ty::ExistentialPredicate`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::ExistentialPredicate<'tcx>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum ExistentialPredicate { - /// E.g. `From`. Note that this isn't `T: From` with a given `T`, this is just - /// `From`. Could be written `?: From`. - Trait(ExistentialTraitRef), - /// E.g. `Iterator::Item = u64`. Could be written `::Item = u64`. - Projection(ExistentialProjection), - /// E.g. `Send`. - AutoTrait(DefId), -} - -/// Reflects [`rustc_type_ir::ExistentialTraitRef`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialTraitRef>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ExistentialTraitRef { - pub def_id: DefId, - pub args: Vec, -} - -/// Reflects [`rustc_type_ir::ExistentialProjection`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialProjection>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ExistentialProjection { - pub def_id: DefId, - pub args: Vec, - pub term: Term, -} - -/// Reflects [`rustc_middle::ty::DynKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(, from: rustc_middle::ty::DynKind, state: S as _s)] -pub enum DynKind { - Dyn, - DynStar, -} - -/// Reflects [`rustc_middle::ty::BoundTyKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::BoundTyKind, state: S as gstate)] -pub enum BoundTyKind { - Anon, - Param(DefId, Symbol), -} - -/// Reflects [`rustc_middle::ty::BoundTy`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::BoundTy, state: S as gstate)] -pub struct BoundTy { - pub var: BoundVar, - pub kind: BoundTyKind, -} - -/// Reflects [`rustc_middle::ty::BoundRegionKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::BoundRegionKind, state: S as gstate)] -pub enum BoundRegionKind { - BrAnon, - BrNamed(DefId, Symbol), - BrEnv, -} - -/// Reflects [`rustc_middle::ty::BoundRegion`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::BoundRegion, state: S as gstate)] -pub struct BoundRegion { - pub var: BoundVar, - pub kind: BoundRegionKind, -} - -/// Reflects [`rustc_middle::ty::PlaceholderRegion`] -pub type PlaceholderRegion = Placeholder; -/// Reflects [`rustc_middle::ty::PlaceholderConst`] -pub type PlaceholderConst = Placeholder; -/// Reflects [`rustc_middle::ty::PlaceholderType`] -pub type PlaceholderType = Placeholder; - -/// Reflects [`rustc_middle::ty::Placeholder`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Placeholder { - pub universe: UniverseIndex, - pub bound: T, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> - for rustc_middle::ty::Placeholder -{ - fn sinto(&self, s: &S) -> Placeholder { - Placeholder { - universe: self.universe.sinto(s), - bound: self.bound.sinto(s), - } - } -} - -/// Reflects [`rustc_middle::infer::canonical::Canonical`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Canonical { - pub max_universe: UniverseIndex, - pub variables: Vec, - pub value: T, -} -/// Reflects [`rustc_middle::ty::CanonicalUserType`] -pub type CanonicalUserType = Canonical; - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> - for rustc_middle::infer::canonical::Canonical<'tcx, T> -{ - fn sinto(&self, s: &S) -> Canonical { - Canonical { - max_universe: self.max_universe.sinto(s), - variables: self.variables.iter().map(|v| v.kind.sinto(s)).collect(), - value: self.value.sinto(s), - } - } -} - -/// Reflects [`rustc_middle::infer::canonical::CanonicalVarKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::infer::canonical::CanonicalVarKind>, state: S as gstate)] -pub enum CanonicalVarInfo { - Ty(CanonicalTyVarKind), - PlaceholderTy(PlaceholderType), - Region(UniverseIndex), - PlaceholderRegion(PlaceholderRegion), - Const(UniverseIndex), - PlaceholderConst(PlaceholderConst), - Effect, -} - -/// Reflects [`rustc_middle::ty::UserSelfTy`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::UserSelfTy<'tcx>, state: S as gstate)] -pub struct UserSelfTy { - pub impl_def_id: DefId, - pub self_ty: Ty, -} - -/// Reflects [`rustc_middle::ty::UserArgs`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::UserArgs<'tcx>, state: S as gstate)] -pub struct UserArgs { - pub args: Vec, - pub user_self_ty: Option, -} - -/// Reflects [`rustc_middle::ty::UserType`]: this is currently -/// disabled, and everything is printed as debug in the -/// [`UserType::Todo`] variant. -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::UserType<'tcx>, state: S as _s)] -pub enum UserType { - // TODO: for now, we don't use user types at all. - // We disable it for now, since it cause the following to fail: - // - // pub const MY_VAL: u16 = 5; - // pub type Alias = MyStruct; // Using the literal 5, it goes through - // - // pub struct MyStruct {} - // - // impl MyStruct { - // pub const MY_CONST: u16 = VAL; - // } - // - // pub fn do_something() -> u32 { - // u32::from(Alias::MY_CONST) - // } - // - // In this case, we get a [rustc_middle::ty::ConstKind::Bound] in - // [do_something], which we are not able to translate. - // See: https://github.com/hacspec/hax/pull/209 - - // Ty(Ty), - // TypeOf(DefId, UserArgs), - #[todo] - Todo(String), -} - -/// Reflects [`rustc_hir::def::CtorKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_hir::def::CtorKind, state: S as _s)] -pub enum CtorKind { - Fn, - Const, -} - -/// Reflects [`rustc_hir::def::CtorOf`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_hir::def::CtorOf, state: S as _s)] -pub enum CtorOf { - Struct, - Variant, -} - -/// Reflects [`rustc_middle::ty::VariantDiscr`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::VariantDiscr, state: S as gstate)] -pub enum DiscriminantDefinition { - Explicit(DefId), - Relative(u32), -} - -/// Reflects [`rustc_middle::ty::util::Discr`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::util::Discr<'tcx>, state: S as gstate)] -pub struct DiscriminantValue { - pub val: u128, - pub ty: Ty, -} - -/// Reflects [`rustc_middle::ty::Visibility`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Visibility { - Public, - Restricted(Id), -} - -#[cfg(feature = "rustc")] -impl, U> SInto> for rustc_middle::ty::Visibility { - fn sinto(&self, s: &S) -> Visibility { - use rustc_middle::ty::Visibility as T; - match self { - T::Public => Visibility::Public, - T::Restricted(id) => Visibility::Restricted(id.sinto(s)), - } - } -} - -/// Reflects [`rustc_middle::ty::FieldDef`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct FieldDef { - pub did: DefId, - /// Field definition of [tuple - /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types) - /// are anonymous, in that case `name` is [`None`]. - pub name: Option, - pub vis: Visibility, - pub ty: Ty, - pub span: Span, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::FieldDef { - fn sinto(&self, s: &S) -> FieldDef { - let tcx = s.base().tcx; - let ty = { - let generics = rustc_middle::ty::GenericArgs::identity_for_item(tcx, self.did); - self.ty(tcx, generics).sinto(s) - }; - let name = { - let name = self.name.sinto(s); - let is_user_provided = { - // SH: Note that the only way I found of checking if the user wrote the name or if it - // is just an integer generated by rustc is by checking if it is just made of - // numerals... - name.parse::().is_err() - }; - is_user_provided.then_some(name) - }; - - FieldDef { - did: self.did.sinto(s), - name, - vis: self.vis.sinto(s), - ty, - span: tcx.def_span(self.did).sinto(s), - } - } -} - -/// Reflects [`rustc_middle::ty::VariantDef`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct VariantDef { - pub def_id: DefId, - pub ctor: Option<(CtorKind, DefId)>, - pub name: Symbol, - pub discr_def: DiscriminantDefinition, - pub discr_val: DiscriminantValue, - /// The definitions of the fields on this variant. In case of - /// [tuple - /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types), - /// the fields are anonymous, otherwise fields are named. - pub fields: Vec, - /// Span of the definition of the variant - pub span: Span, -} - -#[cfg(feature = "rustc")] -impl VariantDef { - fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( - s: &S, - def: &ty::VariantDef, - discr_val: ty::util::Discr<'tcx>, - ) -> Self { - VariantDef { - def_id: def.def_id.sinto(s), - ctor: def.ctor.sinto(s), - name: def.name.sinto(s), - discr_def: def.discr.sinto(s), - discr_val: discr_val.sinto(s), - fields: def.fields.raw.sinto(s), - span: s.base().tcx.def_span(def.def_id).sinto(s), - } - } -} - -/// Reflects [`rustc_middle::ty::EarlyParamRegion`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::EarlyParamRegion, state: S as gstate)] -pub struct EarlyParamRegion { - pub index: u32, - pub name: Symbol, -} - -/// Reflects [`rustc_middle::ty::LateParamRegion`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::LateParamRegion, state: S as gstate)] -pub struct LateParamRegion { - pub scope: DefId, - pub bound_region: BoundRegionKind, -} - -/// Reflects [`rustc_middle::ty::RegionKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::RegionKind<'tcx>, state: S as gstate)] -pub enum RegionKind { - ReEarlyParam(EarlyParamRegion), - ReBound(DebruijnIndex, BoundRegion), - ReLateParam(LateParamRegion), - ReStatic, - ReVar(RegionVid), - RePlaceholder(PlaceholderRegion), - ReErased, - ReError(ErrorGuaranteed), -} - -/// Reflects [`rustc_middle::ty::Region`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::Region<'tcx>, state: S as s)] -pub struct Region { - #[value(self.kind().sinto(s))] - pub kind: RegionKind, -} - -/// Reflects both [`rustc_middle::ty::GenericArg`] and [`rustc_middle::ty::GenericArgKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::GenericArgKind<'tcx>, state: S as s)] -pub enum GenericArg { - Lifetime(Region), - Type(Ty), - Const(ConstantExpr), -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::GenericArg<'tcx> { - fn sinto(&self, s: &S) -> GenericArg { - self.unpack().sinto(s) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto> - for rustc_middle::ty::GenericArgsRef<'tcx> -{ - fn sinto(&self, s: &S) -> Vec { - self.iter().map(|v| v.unpack().sinto(s)).collect() - } -} - -/// Reflects both [`rustc_middle::ty::GenericArg`] and [`rustc_middle::ty::GenericArgKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitIntType, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum LitIntType { - Signed(IntTy), - Unsigned(UintTy), - Unsuffixed, -} - -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::FruInfo<'tcx>, state: S as gstate)] -/// Field Record Update (FRU) informations, this reflects [`rustc_middle::thir::FruInfo`] -pub struct FruInfo { - /// The base, e.g. `Foo {x: 1, .. base}` - pub base: Expr, - pub field_types: Vec, -} - -/// A field expression: a field name along with a value -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct FieldExpr { - pub field: DefId, - pub value: Expr, -} - -/// A field pattern: a field name along with a pattern -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct FieldPat { - pub field: DefId, - pub pattern: Pat, -} - -/// Reflects [`rustc_middle::thir::AdtExpr`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct AdtExpr { - pub info: VariantInformations, - pub user_ty: Option, - pub fields: Vec, - pub base: Option, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::AdtExpr<'tcx> { - fn sinto(&self, s: &S) -> AdtExpr { - let variants = self.adt_def.variants(); - let variant: &rustc_middle::ty::VariantDef = &variants[self.variant_index]; - AdtExpr { - info: get_variant_information(&self.adt_def, self.variant_index, s), - fields: self - .fields - .iter() - .map(|f| FieldExpr { - field: variant.fields[f.name].did.sinto(s), - value: f.expr.sinto(s), - }) - .collect(), - base: self.base.sinto(s), - user_ty: self.user_ty.sinto(s), - } - } -} - -/// Reflects [`rustc_span::Loc`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct Loc { - pub line: usize, - pub col: usize, -} - -/// Reflects [`rustc_span::hygiene::DesugaringKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_span::hygiene::DesugaringKind, state: S as _s)] -pub enum DesugaringKind { - CondTemporary, - QuestionMark, - TryBlock, - YeetExpr, - OpaqueTy, - Async, - Await, - ForLoop, - WhileLoop, - BoundModifier, -} - -/// Reflects [`rustc_span::hygiene::AstPass`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_span::hygiene::AstPass, state: S as _s)] -pub enum AstPass { - StdImports, - TestHarness, - ProcMacroHarness, -} - -/// Reflects [`rustc_span::hygiene::MacroKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(, from: rustc_span::hygiene::MacroKind, state: S as _s)] -pub enum MacroKind { - Bang, - Attr, - Derive, -} - -/// Reflects [`rustc_span::hygiene::ExpnKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_span::hygiene::ExpnKind, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ExpnKind { - Root, - Macro(MacroKind, Symbol), - AstPass(AstPass), - Desugaring(DesugaringKind), -} - -/// Reflects [`rustc_span::edition::Edition`] -#[derive(AdtInto)] -#[args(, from: rustc_span::edition::Edition, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Edition { - Edition2015, - Edition2018, - Edition2021, - Edition2024, -} - -/// Reflects [`rustc_span::hygiene::ExpnData`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_span::hygiene::ExpnData, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct ExpnData { - pub kind: ExpnKind, - // pub parent: Box, - pub call_site: Span, - pub def_site: Span, - #[map(x.as_ref().map(|x| x.clone().iter().map(|x|x.sinto(state)).collect()))] - pub allow_internal_unstable: Option>, - pub edition: Edition, - pub macro_def_id: Option, - pub parent_module: Option, - pub local_inner_macros: bool, -} - -/// Reflects [`rustc_span::Span`] -#[derive(::serde::Serialize, ::serde::Deserialize, Clone, Debug, JsonSchema, Eq, Ord)] -pub struct Span { - pub lo: Loc, - pub hi: Loc, - pub filename: FileName, - /// Original rustc span; can be useful for reporting rustc - /// diagnostics (this is used in Charon) - #[cfg(feature = "rustc")] - #[serde(skip)] - pub rust_span_data: Option, - #[cfg(not(feature = "rustc"))] - #[serde(skip)] - pub rust_span_data: Option<()>, - // expn_backtrace: Vec, -} - -/// We need to define manual `impl`s of `Span`: we want to skip the -/// field `rust_span_data`. The derive macros from `bincode` don't -/// allow that, see https://github.com/bincode-org/bincode/issues/452. -const _: () = { - impl bincode::Encode for Span { - fn encode( - &self, - encoder: &mut E, - ) -> core::result::Result<(), bincode::error::EncodeError> { - bincode::Encode::encode(&self.lo, encoder)?; - bincode::Encode::encode(&self.hi, encoder)?; - bincode::Encode::encode(&self.filename, encoder)?; - Ok(()) - } - } - - impl bincode::Decode for Span { - fn decode( - decoder: &mut D, - ) -> core::result::Result { - Ok(Self { - lo: bincode::Decode::decode(decoder)?, - hi: bincode::Decode::decode(decoder)?, - filename: bincode::Decode::decode(decoder)?, - rust_span_data: None, - }) - } - } - - impl<'de> bincode::BorrowDecode<'de> for Span { - fn borrow_decode>( - decoder: &mut D, - ) -> core::result::Result { - Ok(Self { - lo: bincode::BorrowDecode::borrow_decode(decoder)?, - hi: bincode::BorrowDecode::borrow_decode(decoder)?, - filename: bincode::BorrowDecode::borrow_decode(decoder)?, - rust_span_data: None, - }) - } - } -}; - -const _: () = { - // `rust_span_data` is a metadata that should *not* be taken into - // account while hashing or comparing - - impl std::hash::Hash for Span { - fn hash(&self, state: &mut H) { - self.lo.hash(state); - self.hi.hash(state); - self.filename.hash(state); - } - } - impl PartialEq for Span { - fn eq(&self, other: &Self) -> bool { - self.lo == other.lo && self.hi == other.hi && self.filename == other.filename - } - } - - impl PartialOrd for Span { - fn partial_cmp(&self, other: &Self) -> Option { - Some( - self.lo.partial_cmp(&other.lo)?.then( - self.hi - .partial_cmp(&other.hi)? - .then(self.filename.partial_cmp(&other.filename)?), - ), - ) - } - } -}; - -#[cfg(feature = "rustc")] -impl From for Loc { - fn from(val: rustc_span::Loc) -> Self { - Loc { - line: val.line, - col: val.col_display, - } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: BaseState<'tcx>> SInto for rustc_span::Span { - fn sinto(&self, s: &S) -> Span { - let set: crate::state::ExportedSpans = s.base().exported_spans; - set.borrow_mut().insert(*self); - translate_span(*self, s.base().tcx.sess) - } -} - -/// Reflects [`rustc_middle::thir::LocalVarId`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct LocalIdent { - pub name: String, - pub id: HirId, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::thir::LocalVarId { - fn sinto(&self, s: &S) -> LocalIdent { - LocalIdent { - name: s - .base() - .local_ctx - .borrow() - .vars - .get(self) - .s_unwrap(s) - .to_string(), - id: self.0.sinto(s), - } - } -} - -/// Reflects [`rustc_span::source_map::Spanned`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Spanned { - pub node: T, - pub span: Span, -} -#[cfg(feature = "rustc")] -impl<'s, S: UnderOwnerState<'s>, T: SInto, U> SInto> - for rustc_span::source_map::Spanned -{ - fn sinto<'a>(&self, s: &S) -> Spanned { - Spanned { - node: self.node.sinto(s), - span: self.span.sinto(s), - } - } -} - -impl<'tcx, S> SInto for PathBuf { - fn sinto(&self, _: &S) -> PathBuf { - self.clone() - } -} - -/// Reflects [`rustc_span::RealFileName`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] -#[args(, from: rustc_span::RealFileName, state: S as _s)] -pub enum RealFileName { - LocalPath(PathBuf), - Remapped { - local_path: Option, - virtual_name: PathBuf, - }, -} - -#[cfg(feature = "rustc")] -impl SInto for rustc_data_structures::stable_hasher::Hash64 { - fn sinto(&self, _: &S) -> u64 { - self.as_u64() - } -} - -/// Reflects [`rustc_span::FileName`] -#[derive(AdtInto)] -#[args(, from: rustc_span::FileName, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum FileName { - Real(RealFileName), - QuoteExpansion(u64), - Anon(u64), - MacroExpansion(u64), - ProcMacroSourceCode(u64), - CliCrateAttr(u64), - Custom(String), - // #[map(FileName::DocTest(x.0.to_str().unwrap().into()))] - #[custom_arm(FROM_TYPE::DocTest(x, _) => TO_TYPE::DocTest(x.to_str().unwrap().into()),)] - DocTest(String), - InlineAsm(u64), -} - -impl FileName { - pub fn to_string(&self) -> String { - match self { - Self::Real(RealFileName::LocalPath(path)) - | Self::Real(RealFileName::Remapped { - local_path: Some(path), - .. - }) - | Self::Real(RealFileName::Remapped { - virtual_name: path, .. - }) => format!("{}", path.display()), - _ => format!("{:?}", self), - } - } - pub fn to_path(&self) -> Option<&std::path::Path> { - match self { - Self::Real(RealFileName::LocalPath(path)) - | Self::Real(RealFileName::Remapped { - local_path: Some(path), - .. - }) - | Self::Real(RealFileName::Remapped { - virtual_name: path, .. - }) => Some(path), - _ => None, - } - } -} - -/// Reflects partially [`rustc_middle::ty::InferTy`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S>, from: rustc_middle::ty::InferTy, state: S as gstate)] -pub enum InferTy { - #[custom_arm(FROM_TYPE::TyVar(..) => TO_TYPE::TyVar,)] - TyVar, /*TODO?*/ - #[custom_arm(FROM_TYPE::IntVar(..) => TO_TYPE::IntVar,)] - IntVar, /*TODO?*/ - #[custom_arm(FROM_TYPE::FloatVar(..) => TO_TYPE::FloatVar,)] - FloatVar, /*TODO?*/ - FreshTy(u32), - FreshIntTy(u32), - FreshFloatTy(u32), -} - -/// Reflects [`rustc_middle::thir::BlockSafety`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S>, from: rustc_middle::thir::BlockSafety, state: S as _s)] -pub enum BlockSafety { - Safe, - BuiltinUnsafe, - #[custom_arm(FROM_TYPE::ExplicitUnsafe{..} => BlockSafety::ExplicitUnsafe,)] - ExplicitUnsafe, -} - -/// Reflects [`rustc_middle::thir::Block`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::Block, state: S as gstate)] -pub struct Block { - pub targeted_by_break: bool, - pub region_scope: Scope, - pub span: Span, - pub stmts: Vec, - pub expr: Option, - pub safety_mode: BlockSafety, -} - -/// Reflects [`rustc_ast::ast::BindingMode`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::ast::BindingMode, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct BindingMode { - #[value(self.0.sinto(s))] - pub by_ref: ByRef, - #[value(self.1.sinto(s))] - pub mutability: Mutability, -} - -/// Reflects [`rustc_ast::ast::ByRef`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::ast::ByRef, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ByRef { - Yes(Mutability), - No, -} - -/// Reflects [`rustc_middle::thir::Stmt`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::Stmt<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Stmt { - pub kind: StmtKind, -} - -/// Reflects [`rustc_ast::token::Delimiter`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::token::Delimiter, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Delimiter { - Parenthesis, - Brace, - Bracket, - Invisible, -} - -/// Reflects [`rustc_ast::tokenstream::TokenTree`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_ast::tokenstream::TokenTree, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum TokenTree { - Token(Token, Spacing), - Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream), -} - -/// Reflects [`rustc_ast::tokenstream::Spacing`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_ast::tokenstream::Spacing, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Spacing { - Alone, - Joint, - JointHidden, -} - -/// Reflects [`rustc_ast::token::BinOpToken`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::token::BinOpToken, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum BinOpToken { - Plus, - Minus, - Star, - Slash, - Percent, - Caret, - And, - Or, - Shl, - Shr, -} - -/// Reflects [`rustc_ast::token::TokenKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_ast::token::TokenKind, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum TokenKind { - Eq, - Lt, - Le, - EqEq, - Ne, - Ge, - Gt, - AndAnd, - OrOr, - Not, - Tilde, - BinOp(BinOpToken), - BinOpEq(BinOpToken), - At, - Dot, - DotDot, - DotDotDot, - DotDotEq, - Comma, - Semi, - Colon, - RArrow, - LArrow, - FatArrow, - Pound, - Dollar, - Question, - SingleQuote, - OpenDelim(Delimiter), - CloseDelim(Delimiter), - // Literal(l: Lit), - Ident(Symbol, bool), - Lifetime(Symbol), - // Interpolated(n: Nonterminal), - // DocComment(k: CommentKind, ats: AttrStyle, s: Symbol), - Eof, - #[todo] - Todo(String), -} - -#[cfg(feature = "rustc")] -impl SInto for rustc_ast::token::IdentIsRaw { - fn sinto(&self, _s: &S) -> bool { - match self { - Self::Yes => true, - Self::No => false, - } - } -} - -/// Reflects [`rustc_ast::token::Token`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_ast::token::Token, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Token { - pub kind: TokenKind, - pub span: Span, -} - -/// Reflects [`rustc_ast::ast::DelimArgs`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::ast::DelimArgs, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct DelimArgs { - pub dspan: DelimSpan, - pub delim: Delimiter, - pub tokens: TokenStream, -} - -/// Reflects [`rustc_ast::ast::MacCall`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_ast::ast::MacCall, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct MacCall { - #[map(x.segments.iter().map(|rustc_ast::ast::PathSegment{ident, ..}| ident.as_str().into()).collect())] - pub path: Path, - pub args: DelimArgs, -} - -/// Reflects [`rustc_ast::tokenstream::TokenStream`] as a plain -/// string. If you need to reshape that into Rust tokens or construct, -/// please use, e.g., `syn`. -pub type TokenStream = String; -#[cfg(feature = "rustc")] -impl<'t, S> SInto for rustc_ast::tokenstream::TokenStream { - fn sinto(&self, _: &S) -> String { - rustc_ast_pretty::pprust::tts_to_string(self) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::BlockId { - fn sinto(&self, s: &S) -> Block { - s.thir().blocks[*self].sinto(s) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::StmtId { - fn sinto(&self, s: &S) -> Stmt { - s.thir().stmts[*self].sinto(s) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::Expr<'tcx> { - fn sinto(&self, s: &S) -> Expr { - let (hir_id, attributes) = self.hir_id_and_attributes(s); - let hir_id = hir_id.map(|hir_id| hir_id.index()); - let unrolled = self.unroll_scope(s); - let rustc_middle::thir::Expr { span, kind, ty, .. } = unrolled; - let contents = match macro_invocation_of_span(span, s).map(ExprKind::MacroInvokation) { - Some(contents) => contents, - None => match kind { - // Introduce intermediate `Cast` from `T` to `U` when casting from a `#[repr(T)]` enum to `U` - rustc_middle::thir::ExprKind::Cast { source } => { - if let rustc_middle::ty::TyKind::Adt(def, _) = s.thir().exprs[source].ty.kind() - { - let tcx = s.base().tcx; - let contents = kind.sinto(s); - use crate::rustc_middle::ty::util::IntTypeExt; - let repr_type = tcx - .repr_options_of_def(def.did().expect_local()) - .discr_type() - .to_ty(s.base().tcx); - if repr_type == ty { - contents - } else { - ExprKind::Cast { - source: Decorated { - ty: repr_type.sinto(s), - span: span.sinto(s), - contents: Box::new(contents), - hir_id, - attributes: vec![], - }, - } - } - } else { - kind.sinto(s) - } - } - rustc_middle::thir::ExprKind::NonHirLiteral { lit, .. } => { - let cexpr: ConstantExpr = - (ConstantExprKind::Literal(scalar_int_to_constant_literal(s, lit, ty))) - .decorate(ty.sinto(s), span.sinto(s)); - return cexpr.into(); - } - rustc_middle::thir::ExprKind::ZstLiteral { .. } => match ty.kind() { - rustc_middle::ty::TyKind::FnDef(def, _generics) => { - /* TODO: translate generics - let tcx = s.base().tcx; - let sig = &tcx.fn_sig(*def).instantiate(tcx, generics); - let ret: rustc_middle::ty::Ty = tcx.erase_late_bound_regions(sig.output()); - let inputs = sig.inputs(); - let indexes = inputs.skip_binder().iter().enumerate().map(|(i, _)| i); - let params = indexes.map(|i| inputs.map_bound(|tys| tys[i])); - let params: Vec = - params.map(|i| tcx.erase_late_bound_regions(i)).collect(); - */ - return Expr { - contents: Box::new(ExprKind::GlobalName { id: def.sinto(s) }), - span: self.span.sinto(s), - ty: ty.sinto(s), - hir_id, - attributes, - }; - } - _ => { - if ty.is_phantom_data() { - let rustc_middle::ty::Adt(def, _) = ty.kind() else { - supposely_unreachable_fatal!(s[span], "PhantomDataNotAdt"; {kind, ty}) - }; - let adt_def = AdtExpr { - info: get_variant_information( - def, - rustc_target::abi::FIRST_VARIANT, - s, - ), - user_ty: None, - base: None, - fields: vec![], - }; - return Expr { - contents: Box::new(ExprKind::Adt(adt_def)), - span: self.span.sinto(s), - ty: ty.sinto(s), - hir_id, - attributes, - }; - } else { - supposely_unreachable!( - s[span], - "ZstLiteral ty≠FnDef(...) or PhantomData"; - {kind, span, ty} - ); - kind.sinto(s) - } - } - }, - rustc_middle::thir::ExprKind::Field { - lhs, - variant_index, - name, - } => { - let lhs_ty = s.thir().exprs[lhs].ty.kind(); - let idx = variant_index.index(); - if idx != 0 { - let _ = supposely_unreachable!( - s[span], - "ExprKindFieldIdxNonZero"; { - kind, - span, - ty, - ty.kind() - } - ); - }; - match lhs_ty { - rustc_middle::ty::TyKind::Adt(adt_def, _generics) => { - let variant = adt_def.variant(variant_index); - ExprKind::Field { - field: variant.fields[name].did.sinto(s), - lhs: lhs.sinto(s), - } - } - rustc_middle::ty::TyKind::Tuple(..) => ExprKind::TupleField { - field: name.index(), - lhs: lhs.sinto(s), - }, - _ => supposely_unreachable_fatal!( - s[span], - "ExprKindFieldBadTy"; { - kind, - span, - ty.kind(), - lhs_ty - } - ), - } - } - _ => kind.sinto(s), - }, - }; - Decorated { - ty: ty.sinto(s), - span: span.sinto(s), - contents: Box::new(contents), - hir_id, - attributes, - } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::ExprId { - fn sinto(&self, s: &S) -> Expr { - s.thir().exprs[*self].sinto(s) - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::Pat<'tcx> { - fn sinto(&self, s: &S) -> Pat { - let rustc_middle::thir::Pat { span, kind, ty } = self; - let contents = match kind { - rustc_middle::thir::PatKind::Leaf { subpatterns } => match ty.kind() { - rustc_middle::ty::TyKind::Adt(adt_def, args) => { - (rustc_middle::thir::PatKind::Variant { - adt_def: *adt_def, - args, - variant_index: rustc_target::abi::VariantIdx::from_usize(0), - subpatterns: subpatterns.clone(), - }) - .sinto(s) - } - rustc_middle::ty::TyKind::Tuple(..) => PatKind::Tuple { - subpatterns: subpatterns - .iter() - .map(|pat| pat.pattern.clone()) - .collect::>() - .sinto(s), - }, - _ => supposely_unreachable_fatal!( - s[span], - "PatLeafNonAdtTy"; - {ty.kind(), kind} - ), - }, - _ => kind.sinto(s), - }; - Decorated { - ty: ty.sinto(s), - span: span.sinto(s), - contents: Box::new(contents), - hir_id: None, - attributes: vec![], - } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: ExprState<'tcx>> SInto for rustc_middle::thir::ArmId { - fn sinto(&self, s: &S) -> Arm { - s.thir().arms[*self].sinto(s) - } -} - -/// Reflects [`rustc_type_ir::IntTy`] -#[derive(AdtInto)] -#[args(, from: rustc_type_ir::IntTy, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum IntTy { - Isize, - I8, - I16, - I32, - I64, - I128, -} - -/// Reflects [`rustc_type_ir::FloatTy`] -#[derive(AdtInto)] -#[args(, from: rustc_type_ir::FloatTy, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum FloatTy { - F16, - F32, - F64, - F128, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S> SInto for rustc_ast::ast::FloatTy { - fn sinto(&self, _: &S) -> FloatTy { - use rustc_ast::ast::FloatTy as T; - match self { - T::F16 => FloatTy::F16, - T::F32 => FloatTy::F32, - T::F64 => FloatTy::F64, - T::F128 => FloatTy::F128, - } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S> SInto for rustc_ast::ast::IntTy { - fn sinto(&self, _: &S) -> IntTy { - use rustc_ast::ast::IntTy as T; - match self { - T::Isize => IntTy::Isize, - T::I8 => IntTy::I8, - T::I16 => IntTy::I16, - T::I32 => IntTy::I32, - T::I64 => IntTy::I64, - T::I128 => IntTy::I128, - } - } -} -#[cfg(feature = "rustc")] -impl<'tcx, S> SInto for rustc_ast::ast::UintTy { - fn sinto(&self, _: &S) -> UintTy { - use rustc_ast::ast::UintTy as T; - match self { - T::Usize => UintTy::Usize, - T::U8 => UintTy::U8, - T::U16 => UintTy::U16, - T::U32 => UintTy::U32, - T::U64 => UintTy::U64, - T::U128 => UintTy::U128, - } - } -} - -/// Reflects [`rustc_type_ir::UintTy`] -#[derive(AdtInto)] -#[args(, from: rustc_type_ir::UintTy, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum UintTy { - Usize, - U8, - U16, - U32, - U64, - U128, -} - -impl ToString for IntTy { - fn to_string(&self) -> String { - use IntTy::*; - match self { - Isize => "isize".to_string(), - I8 => "i8".to_string(), - I16 => "i16".to_string(), - I32 => "i32".to_string(), - I64 => "i64".to_string(), - I128 => "i128".to_string(), - } - } -} - -impl ToString for UintTy { - fn to_string(&self) -> String { - use UintTy::*; - match self { - Usize => "usize".to_string(), - U8 => "u8".to_string(), - U16 => "u16".to_string(), - U32 => "u32".to_string(), - U64 => "u64".to_string(), - U128 => "u128".to_string(), - } - } -} - -/// Reflects [`rustc_middle::ty::TypeAndMut`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::TypeAndMut<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct TypeAndMut { - pub ty: Box, - pub mutbl: Mutability, -} - -#[cfg(feature = "rustc")] -impl> SInto> for rustc_middle::ty::List { - fn sinto(&self, s: &S) -> Vec { - self.iter().map(|x| x.sinto(s)).collect() - } -} - -/// Reflects [`rustc_middle::ty::GenericParamDef`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::GenericParamDef, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct GenericParamDef { - pub name: Symbol, - pub def_id: DefId, - pub index: u32, - pub pure_wrt_drop: bool, - #[value( - match self.kind { - ty::GenericParamDefKind::Lifetime => GenericParamDefKind::Lifetime, - ty::GenericParamDefKind::Type { has_default, synthetic } => GenericParamDefKind::Type { has_default, synthetic }, - ty::GenericParamDefKind::Const { has_default, is_host_effect, .. } => { - let ty = s.base().tcx.type_of(self.def_id).instantiate_identity().sinto(s); - GenericParamDefKind::Const { has_default, is_host_effect, ty } - }, - } - )] - pub kind: GenericParamDefKind, -} - -/// Reflects [`rustc_middle::ty::GenericParamDefKind`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum GenericParamDefKind { - Lifetime, - Type { - has_default: bool, - synthetic: bool, - }, - Const { - has_default: bool, - is_host_effect: bool, - ty: Ty, - }, -} - -/// Reflects [`rustc_middle::ty::Generics`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::Generics, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct TyGenerics { - pub parent: Option, - pub parent_count: usize, - #[from(own_params)] - pub params: Vec, - // pub param_def_id_to_index: FxHashMap, - pub has_self: bool, - pub has_late_bound_regions: Option, -} - -/// This type merges the information from -/// `rustc_type_ir::AliasKind` and `rustc_middle::ty::AliasTy` -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Alias { - pub kind: AliasKind, - pub args: Vec, - pub def_id: DefId, -} - -/// Reflects [`rustc_middle::ty::AliasKind`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AliasKind { - /// The projection of a trait type: `>::Type<...>` - Projection { - /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. - impl_expr: ImplExpr, - /// The `Type` in `Ty: Trait<..., Type = U>`. - assoc_item: AssocItem, - }, - /// An associated type in an inherent impl. - Inherent, - /// An `impl Trait` opaque type. - Opaque, - /// A type alias that references opaque types. Likely to always be normalized away. - Weak, -} - -#[cfg(feature = "rustc")] -impl Alias { - #[tracing::instrument(level = "trace", skip(s))] - fn from<'tcx, S: BaseState<'tcx> + HasOwnerId>( - s: &S, - alias_kind: &rustc_type_ir::AliasTyKind, - alias_ty: &rustc_middle::ty::AliasTy<'tcx>, - ) -> Self { - use rustc_type_ir::AliasTyKind as RustAliasKind; - let kind = match alias_kind { - RustAliasKind::Projection => { - use rustc_middle::ty::{Binder, TypeVisitableExt}; - let tcx = s.base().tcx; - let trait_ref = alias_ty.trait_ref(tcx); - // We don't have a clear handling of binders here; this is causing a number of - // problems in Charon. In the meantime we return something well-formed when we - // can't trait-solve. See also https://github.com/hacspec/hax/issues/495. - if trait_ref.has_escaping_bound_vars() { - warning!( - s, - "Hax frontend found a projected type with escaping bound vars. Please report https://github.com/hacspec/hax/issues/495" - ); - AliasKind::Opaque - } else { - AliasKind::Projection { - assoc_item: tcx.associated_item(alias_ty.def_id).sinto(s), - impl_expr: solve_trait(s, Binder::dummy(trait_ref)), - } - } - } - RustAliasKind::Inherent => AliasKind::Inherent, - RustAliasKind::Opaque => AliasKind::Opaque, - RustAliasKind::Weak => AliasKind::Weak, - }; - Alias { - kind, - args: alias_ty.args.sinto(s), - def_id: alias_ty.def_id.sinto(s), - } - } -} - -/// Reflects [`rustc_middle::ty::TyKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::TyKind<'tcx>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Ty { - Bool, - Char, - Int(IntTy), - Uint(UintTy), - Float(FloatTy), - - #[custom_arm( - rustc_middle::ty::TyKind::FnPtr(sig) => arrow_of_sig(sig, state), - rustc_middle::ty::TyKind::FnDef(def, generics) => { - let tcx = state.base().tcx; - arrow_of_sig(&tcx.fn_sig(*def).instantiate(tcx, generics), state) - }, - FROM_TYPE::Closure (_defid, generics) => { - let sig = generics.as_closure().sig(); - let sig = state.base().tcx.signature_unclosure(sig, rustc_hir::Safety::Safe); - arrow_of_sig(&sig, state) - }, - )] - /// Reflects [`rustc_middle::ty::TyKind::FnPtr`], [`rustc_middle::ty::TyKind::FnDef`] and [`rustc_middle::ty::TyKind::Closure`] - Arrow(Box), - - #[custom_arm( - rustc_middle::ty::TyKind::Adt(adt_def, generics) => { - let def_id = adt_def.did().sinto(state); - let generic_args: Vec = generics.sinto(state); - let trait_refs = solve_item_traits(state, adt_def.did(), generics, None); - Ty::Adt { def_id, generic_args, trait_refs } - }, - )] - Adt { - /// Reflects [`rustc_middle::ty::TyKind::Adt`]'s substitutions - generic_args: Vec, - /// Predicates required by the type, e.g. `T: Sized` for `Option` or `B: 'a + ToOwned` - /// for `Cow<'a, B>`. - trait_refs: Vec, - def_id: DefId, - }, - Foreign(DefId), - Str, - Array(Box, #[map(Box::new(x.sinto(state)))] Box), - Slice(Box), - RawPtr(Box, Mutability), - Ref(Region, Box, Mutability), - Dynamic(Vec>, Region, DynKind), - Coroutine(DefId, Vec), - Never, - Tuple(Vec), - #[custom_arm( - rustc_middle::ty::TyKind::Alias(alias_kind, alias_ty) => { - Ty::Alias(Alias::from(state, alias_kind, alias_ty)) - }, - )] - Alias(Alias), - Param(ParamTy), - Bound(DebruijnIndex, BoundTy), - Placeholder(PlaceholderType), - Infer(InferTy), - #[custom_arm(rustc_middle::ty::TyKind::Error(..) => Ty::Error,)] - Error, - #[todo] - Todo(String), -} - -/// Reflects [`rustc_middle::thir::StmtKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::StmtKind<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum StmtKind { - Expr { - scope: Scope, - expr: Expr, - }, - Let { - remainder_scope: Scope, - init_scope: Scope, - pattern: Pat, - initializer: Option, - else_block: Option, - lint_level: LintLevel, - #[value(attribute_from_scope(gstate, init_scope).1)] - /// The attribute on this `let` binding - attributes: Vec, - }, -} - -/// Reflects [`rustc_middle::ty::Variance`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::ty::Variance, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Variance { - Covariant, - Invariant, - Contravariant, - Bivariant, -} - -/// Reflects [`rustc_middle::ty::CanonicalUserTypeAnnotation`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::CanonicalUserTypeAnnotation<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct CanonicalUserTypeAnnotation { - pub user_ty: CanonicalUserType, - pub span: Span, - pub inferred_ty: Ty, -} - -/// Reflects [`rustc_middle::thir::Ascription`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::thir::Ascription<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Ascription { - pub annotation: CanonicalUserTypeAnnotation, - pub variance: Variance, -} - -/// Reflects [`rustc_hir::RangeEnd`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::RangeEnd, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum RangeEnd { - Included, - Excluded, -} - -/// Reflects [`rustc_middle::thir::PatRange`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::thir::PatRange<'tcx>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct PatRange { - pub lo: PatRangeBoundary, - pub hi: PatRangeBoundary, - pub end: RangeEnd, -} - -/// Reflects [`rustc_middle::thir::PatRangeBoundary`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::thir::PatRangeBoundary<'tcx>, state: S as state)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum PatRangeBoundary { - Finite(ConstantExpr), - NegInfinity, - PosInfinity, -} - -/// Reflects [`rustc_middle::ty::AdtKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::AdtKind, state: S as _s)] -pub enum AdtKind { - Struct, - Union, - Enum, -} - -// This comes from MIR -// TODO: add the generics and the predicates -/// Reflects [`rustc_middle::ty::AdtDef`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct AdtDef { - pub did: DefId, - pub adt_kind: AdtKind, - pub variants: IndexVec, - pub flags: AdtFlags, - pub repr: ReprOptions, -} - -/// Reflects [`rustc_middle::ty::ReprOptions`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::ReprOptions, state: S as s)] -pub struct ReprOptions { - pub int: Option, - #[value({ - use crate::rustc_middle::ty::util::IntTypeExt; - self.discr_type().to_ty(s.base().tcx).sinto(s) - })] - pub typ: Ty, - pub align: Option, - pub pack: Option, - pub flags: ReprFlags, - pub field_shuffle_seed: u64, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::AdtDef<'tcx> { - fn sinto(&self, s: &S) -> AdtDef { - let variants = self - .variants() - .iter_enumerated() - .map(|(variant_idx, variant)| { - let discr = if self.is_enum() { - self.discriminant_for_variant(s.base().tcx, variant_idx) - } else { - // Structs and unions have a single variant. - assert_eq!(variant_idx.index(), 0); - rustc_middle::ty::util::Discr { - val: 0, - ty: s.base().tcx.types.isize, - } - }; - VariantDef::sfrom(s, variant, discr) - }) - .collect(); - AdtDef { - did: self.did().sinto(s), - adt_kind: self.adt_kind().sinto(s), - variants, - flags: self.flags().sinto(s), - repr: self.repr().sinto(s), - } - } -} - -/// Describe a variant -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct VariantInformations { - pub type_namespace: DefId, - - pub typ: DefId, - pub variant: DefId, - pub variant_index: VariantIdx, - - /// A record type is a type with only one variant which is a - /// record variant. - pub typ_is_record: bool, - /// A record variant is a variant whose fields are named, a record - /// variant always has at least one field. - pub variant_is_record: bool, - /// A struct is a type with exactly one variant. Note that one - /// variant is named exactly as the type. - pub typ_is_struct: bool, -} - -/// Reflects [`rustc_middle::thir::PatKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::PatKind<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -#[append(rustc_middle::thir::PatKind::Leaf {..} => fatal!(gstate, "PatKind::Leaf: should never come up"),)] -pub enum PatKind { - Wild, - AscribeUserType { - ascription: Ascription, - subpattern: Pat, - }, - #[custom_arm( - rustc_middle::thir::PatKind::Binding {name, mode, var, ty, subpattern, is_primary} => { - let local_ctx = gstate.base().local_ctx; - local_ctx.borrow_mut().vars.insert(*var, name.to_string()); - PatKind::Binding { - mode: mode.sinto(gstate), - var: var.sinto(gstate), - ty: ty.sinto(gstate), - subpattern: subpattern.sinto(gstate), - is_primary: is_primary.sinto(gstate), - } - } - )] - Binding { - mode: BindingMode, - var: LocalIdent, // name VS var? TODO - ty: Ty, - subpattern: Option, - is_primary: bool, - }, - #[custom_arm( - FROM_TYPE::Variant {adt_def, variant_index, args, subpatterns} => { - let variants = adt_def.variants(); - let variant: &rustc_middle::ty::VariantDef = &variants[*variant_index]; - TO_TYPE::Variant { - info: get_variant_information(adt_def, *variant_index, gstate), - subpatterns: subpatterns - .iter() - .map(|f| FieldPat { - field: variant.fields[f.field].did.sinto(gstate), - pattern: f.pattern.sinto(gstate), - }) - .collect(), - args: args.sinto(gstate), - } - } - )] - Variant { - info: VariantInformations, - args: Vec, - subpatterns: Vec, - }, - #[disable_mapping] - Tuple { - subpatterns: Vec, - }, - Deref { - subpattern: Pat, - }, - DerefPattern { - subpattern: Pat, - }, - Constant { - value: ConstantExpr, - }, - InlineConstant { - def: DefId, - subpattern: Pat, - }, - Range(PatRange), - Slice { - prefix: Vec, - slice: Option, - suffix: Vec, - }, - Array { - prefix: Vec, - slice: Option, - suffix: Vec, - }, - Or { - pats: Vec, - }, - Never, - Error(ErrorGuaranteed), -} - -/// Reflects [`rustc_middle::thir::Arm`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::Arm<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Arm { - pub pattern: Pat, - pub guard: Option, - pub body: Expr, - pub lint_level: LintLevel, - pub scope: Scope, - pub span: Span, - #[value(attribute_from_scope(gstate, scope).1)] - attributes: Vec, -} - -/// Reflects [`rustc_hir::Safety`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::Safety, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Safety { - Unsafe, - Safe, -} - -/// Reflects [`rustc_middle::ty::adjustment::PointerCoercion`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::ty::adjustment::PointerCoercion, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum PointerCoercion { - ReifyFnPointer, - UnsafeFnPointer, - ClosureFnPointer(Safety), - MutToConstPointer, - ArrayToPointer, - Unsize, -} - -/// Reflects [`rustc_middle::mir::BorrowKind`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::mir::BorrowKind, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum BorrowKind { - Shared, - Fake(FakeBorrowKind), - Mut { kind: MutBorrowKind }, -} - -/// Reflects [`rustc_middle::mir::MutBorrowKind`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::mir::MutBorrowKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum MutBorrowKind { - Default, - TwoPhaseBorrow, - ClosureCapture, -} - -/// Reflects [`rustc_middle::mir::FakeBorrowKind`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::mir::FakeBorrowKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum FakeBorrowKind { - /// A shared (deep) borrow. Data must be immutable and is aliasable. - Deep, - /// The immediately borrowed place must be immutable, but projections from - /// it don't need to be. This is used to prevent match guards from replacing - /// the scrutinee. For example, a fake borrow of `a.b` doesn't - /// conflict with a mutable borrow of `a.b.c`. - Shallow, -} - -/// Reflects [`rustc_ast::ast::StrStyle`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::ast::StrStyle, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum StrStyle { - Cooked, - Raw(u8), -} - -/// Reflects [`rustc_ast::ast::LitKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitKind, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum LitKind { - Str(Symbol, StrStyle), - ByteStr(Vec, StrStyle), - CStr(Vec, StrStyle), - Byte(u8), - Char(char), - Int( - #[serde(with = "serialize_int::unsigned")] - #[schemars(with = "String")] - u128, - LitIntType, - ), - Float(Symbol, LitFloatType), - Bool(bool), - Err(ErrorGuaranteed), -} - -#[cfg(feature = "rustc")] -impl SInto for rustc_data_structures::packed::Pu128 { - fn sinto(&self, _s: &S) -> u128 { - self.0 - } -} - -// FIXME: typo: invo**C**ation -#[allow(rustdoc::private_intra_doc_links)] -/// Describe a macro invocation, using -/// [`macro_invocation_of_raw_mac_invocation`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct MacroInvokation { - pub macro_ident: DefId, - pub argument: String, - pub span: Span, -} - -/// Reflects [`rustc_hir::ImplicitSelfKind`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::ImplicitSelfKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ImplicitSelfKind { - Imm, - Mut, - RefImm, - RefMut, - None, -} - -/// Reflects [`rustc_ast::token::CommentKind`] -#[derive(AdtInto)] -#[args(, from: rustc_ast::token::CommentKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum CommentKind { - Line, - Block, -} - -/// Reflects [`rustc_ast::ast::AttrArgs`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrArgs, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AttrArgs { - Empty, - Delimited(DelimArgs), - - Eq(Span, AttrArgsEq), - // #[todo] - // Todo(String), -} - -/// Reflects [`rustc_ast::ast::AttrArgsEq`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrArgsEq, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AttrArgsEq { - Hir(MetaItemLit), - #[todo] - Ast(String), - // Ast(P), -} - -/// Reflects [`rustc_ast::ast::MetaItemLit`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::MetaItemLit, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct MetaItemLit { - pub symbol: Symbol, - pub suffix: Option, - pub kind: LitKind, - pub span: Span, -} - -/// Reflects [`rustc_ast::ast::AttrItem`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrItem, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct AttrItem { - #[map(rustc_ast_pretty::pprust::path_to_string(x))] - pub path: String, - pub args: AttrArgs, - pub tokens: Option, -} - -#[cfg(feature = "rustc")] -impl SInto for rustc_ast::tokenstream::LazyAttrTokenStream { - fn sinto(&self, st: &S) -> String { - rustc_ast::tokenstream::TokenStream::new(self.to_attr_token_stream().to_token_trees()) - .sinto(st) - } -} - -/// Reflects [`rustc_ast::ast::NormalAttr`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::NormalAttr, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct NormalAttr { - pub item: AttrItem, - pub tokens: Option, -} - -/// Reflects [`rustc_ast::AttrKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::AttrKind, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AttrKind { - Normal(NormalAttr), - DocComment(CommentKind, Symbol), -} - -/// Reflects [`rustc_middle::thir::Param`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::Param<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Param { - pub pat: Option, - pub ty: Ty, - pub ty_span: Option, - pub self_kind: Option, - pub hir_id: Option, - #[value(hir_id.map(|id| { - s.base().tcx.hir().attrs(id).sinto(s) - }).unwrap_or(vec![]))] - /// attributes on this parameter - pub attributes: Vec, -} - -/// Reflects [`rustc_middle::thir::ExprKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: ExprState<'tcx>>, from: rustc_middle::thir::ExprKind<'tcx>, state: S as gstate)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -#[append( - rustc_middle::thir::ExprKind::Scope {..} => { - fatal!(gstate, "Scope should have been eliminated at this point"); - }, - rustc_middle::thir::ExprKind::Field {..} => { - fatal!(gstate, "Field should have been eliminated at this point"); - }, - rustc_middle::thir::ExprKind::NonHirLiteral {..} => { - fatal!(gstate, "NonHirLiteral should have been eliminated at this point"); - }, -)] -pub enum ExprKind { - Box { - value: Expr, - }, - #[disable_mapping] - MacroInvokation(MacroInvokation), - /// Resugared macros calls. This is deprecated: see - /// . - If { - if_then_scope: Scope, - cond: Expr, - then: Expr, - else_opt: Option, - }, - #[map({ - let e = gstate.thir().exprs[*fun].unroll_scope(gstate); - let (generic_args, r#trait, bounds_impls); - // A function is any expression whose type is something callable - let fun = match ty.kind() { - rustc_middle::ty::TyKind::FnDef(def_id, generics) => { - let (hir_id, attributes) = e.hir_id_and_attributes(gstate); - let hir_id = hir_id.map(|hir_id| hir_id.index()); - let contents = Box::new(ExprKind::GlobalName { - id: def_id.sinto(gstate) - }); - let mut translated_generics = generics.sinto(gstate); - let tcx = gstate.base().tcx; - r#trait = (|| { - let assoc_item = tcx.opt_associated_item(*def_id)?; - let impl_expr = self_clause_for_item(gstate, &assoc_item, generics)?; - let assoc_generics = tcx.generics_of(assoc_item.def_id); - let assoc_generics = translated_generics.drain(0..assoc_generics.parent_count).collect(); - Some((impl_expr, assoc_generics)) - })(); - generic_args = translated_generics; - bounds_impls = solve_item_traits(gstate, *def_id, generics, None); - Expr { - contents, - span: e.span.sinto(gstate), - ty: e.ty.sinto(gstate), - hir_id, - attributes, - } - }, - rustc_middle::ty::TyKind::FnPtr(..) => { - generic_args = vec![]; // A function pointer has no generics - bounds_impls = vec![]; // A function pointer has no bounds - r#trait = None; // A function pointer is not a method - e.sinto(gstate) - }, - ty_kind => supposely_unreachable_fatal!( - gstate[e.span], - "CallNotTyFnDef"; - {e, ty_kind} - ) - }; - TO_TYPE::Call { - ty: ty.sinto(gstate), - args: args.sinto(gstate), - generic_args, - from_hir_call: from_hir_call.sinto(gstate), - fn_span: fn_span.sinto(gstate), - bounds_impls, - r#trait, - fun, - } - })] - /// A call to a function or a method. - /// - /// Example: `f(0i8)`, where `f` has signature `fn f(t: T) -> ()`. - Call { - /// The type of the function, substitution applied. - /// - /// Example: for the call `f(0i8)`, this is `i8 -> ()`. - ty: Ty, - /// The function itself. This can be something else than a - /// name, e.g. a closure. - /// - /// Example: for the call `f(0i8)`, this is `f`. - fun: Expr, // TODO: can [ty] and [fun.ty] be different? - /// The arguments given to the function. - /// - /// Example: for the call `f(0i8)`, this is `[0i8]`. - args: Vec, - from_hir_call: bool, - fn_span: Span, - /// The generic arguments given to the function. - /// - /// Example: for the call `f(0i8)`, this is the type `i8`. - #[not_in_source] - generic_args: Vec, - /// The implementations for the bounds of the function. - /// - /// Example: for the call `f(0i8)`, this is two implementation - /// expressions, one for the explicit bound `i8: Clone` and - /// one for the implicit `i8: Sized`. - #[not_in_source] - bounds_impls: Vec, - /// `trait` is `None` if this is a function call or a method - /// to an inherent trait. If this is a method call from a - /// trait `Trait`, then it contains the concrete - /// implementation of `Trait` it is called on, and the generic - /// arguments that comes from the trait declaration. - /// - /// Example: `f(0i8)` is a function call, hence the field - /// `impl` is `None`. - /// - /// Example: - /// ```ignore - /// trait MyTrait { - /// fn meth(...) {...} - /// } - /// fn example_call>(x: SelfType) { - /// x.meth::(...) - /// } - /// ``` - /// Here, in the call `x.meth::(...)`, `r#trait` will - /// be `Some((..., [SelfType, TraitType, 12]))`, and `generic_args` - /// will be `[String]`. - #[not_in_source] - r#trait: Option<(ImplExpr, Vec)>, - }, - Deref { - arg: Expr, - }, - Binary { - op: BinOp, - lhs: Expr, - rhs: Expr, - }, - LogicalOp { - op: LogicalOp, - lhs: Expr, - rhs: Expr, - }, - Unary { - op: UnOp, - arg: Expr, - }, - Cast { - source: Expr, - }, - Use { - source: Expr, - }, // Use a lexpr to get a vexpr. - NeverToAny { - source: Expr, - }, - PointerCoercion { - cast: PointerCoercion, - source: Expr, - }, - Loop { - body: Expr, - }, - Match { - scrutinee: Expr, - arms: Vec, - }, - Let { - expr: Expr, - pat: Pat, - }, - Block { - #[serde(flatten)] - block: Block, - }, - Assign { - lhs: Expr, - rhs: Expr, - }, - AssignOp { - op: BinOp, - lhs: Expr, - rhs: Expr, - }, - #[disable_mapping] - Field { - field: DefId, - lhs: Expr, - }, - - #[disable_mapping] - TupleField { - field: usize, - lhs: Expr, - }, - Index { - lhs: Expr, - index: Expr, - }, - VarRef { - id: LocalIdent, - }, - #[disable_mapping] - ConstRef { - id: ParamConst, - }, - #[disable_mapping] - GlobalName { - id: GlobalIdent, - }, - UpvarRef { - closure_def_id: DefId, - var_hir_id: LocalIdent, - }, - Borrow { - borrow_kind: BorrowKind, - arg: Expr, - }, - AddressOf { - mutability: Mutability, - arg: Expr, - }, - Break { - label: Scope, - value: Option, - }, - Continue { - label: Scope, - }, - Return { - value: Option, - }, - ConstBlock { - did: DefId, - args: Vec, - }, - Repeat { - value: Expr, - count: ConstantExpr, - }, - Array { - fields: Vec, - }, - Tuple { - fields: Vec, - }, - Adt(AdtExpr), - PlaceTypeAscription { - source: Expr, - user_ty: Option, - }, - ValueTypeAscription { - source: Expr, - user_ty: Option, - }, - #[custom_arm(FROM_TYPE::Closure(e) => { - let (thir, expr_entrypoint) = get_thir(e.closure_id, gstate); - let s = &State::from_thir(gstate.base(), gstate.owner_id(), thir.clone()); - TO_TYPE::Closure { - params: thir.params.raw.sinto(s), - body: expr_entrypoint.sinto(s), - upvars: e.upvars.sinto(gstate), - movability: e.movability.sinto(gstate) - } - }, - )] - Closure { - params: Vec, - body: Expr, - upvars: Vec, - movability: Option, - }, - Literal { - lit: Spanned, - neg: bool, // TODO - }, - //zero space type - // This is basically used for functions! e.g. `::from` - ZstLiteral { - user_ty: Option, - }, - NamedConst { - def_id: GlobalIdent, - args: Vec, - user_ty: Option, - #[not_in_source] - #[value({ - let tcx = gstate.base().tcx; - tcx.opt_associated_item(*def_id).as_ref().and_then(|assoc| { - self_clause_for_item(gstate, assoc, args) - }) - })] - r#impl: Option, - }, - ConstParam { - param: ParamConst, - def_id: GlobalIdent, - }, - StaticRef { - alloc_id: u64, - ty: Ty, - def_id: GlobalIdent, - }, - Yield { - value: Expr, - }, - #[todo] - Todo(String), -} - -#[cfg(feature = "rustc")] -pub trait ExprKindExt<'tcx> { - fn hir_id_and_attributes>( - &self, - s: &S, - ) -> (Option, Vec); - fn unroll_scope + HasThir<'tcx>>( - &self, - s: &S, - ) -> rustc_middle::thir::Expr<'tcx>; -} - -#[cfg(feature = "rustc")] -impl<'tcx> ExprKindExt<'tcx> for rustc_middle::thir::Expr<'tcx> { - fn hir_id_and_attributes>( - &self, - s: &S, - ) -> (Option, Vec) { - match &self.kind { - rustc_middle::thir::ExprKind::Scope { - region_scope: scope, - .. - } => attribute_from_scope(s, scope), - _ => (None, vec![]), - } - } - fn unroll_scope + HasThir<'tcx>>( - &self, - s: &S, - ) -> rustc_middle::thir::Expr<'tcx> { - // TODO: when we see a loop, we should lookup its label! label is actually a scope id - // we remove scopes here, whence the TODO - match self.kind { - rustc_middle::thir::ExprKind::Scope { value, .. } => { - s.thir().exprs[value].unroll_scope(s) - } - _ => self.clone(), - } - } -} - -/// Reflects [`rustc_middle::ty::FnSig`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::FnSig<'tcx>, state: S as s)] -pub struct TyFnSig { - #[value(self.inputs().sinto(s))] - pub inputs: Vec, - #[value(self.output().sinto(s))] - pub output: Ty, - pub c_variadic: bool, - pub safety: Safety, - pub abi: Abi, -} - -/// Reflects [`rustc_middle::ty::PolyFnSig`] -pub type PolyFnSig = Binder; - -/// Function definition -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct FnDef { - pub header: FnHeader, - pub params: Vec, - pub ret: Ty, - pub body: Body, - pub sig_span: Span, -} - -/// Reflects [`rustc_hir::FnDecl`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::FnDecl<'tcx>, state: S as tcx)] -pub struct FnDecl { - pub inputs: Vec, - pub output: FnRetTy, - pub c_variadic: bool, - pub implicit_self: ImplicitSelfKind, - pub lifetime_elision_allowed: bool, -} - -/// Reflects [`rustc_hir::FnSig`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::FnSig<'tcx>, state: S as tcx)] -pub struct FnSig { - pub header: FnHeader, - pub decl: FnDecl, - pub span: Span, -} - -/// Reflects [`rustc_hir::FnHeader`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::FnHeader, state: S as tcx)] -pub struct FnHeader { - pub safety: Safety, - pub constness: Constness, - pub asyncness: IsAsync, - pub abi: Abi, -} - -pub type ThirBody = Expr; - -#[cfg(feature = "rustc")] -impl<'x: 'tcx, 'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_hir::Ty<'x> { - fn sinto(self: &rustc_hir::Ty<'x>, s: &S) -> Ty { - // **Important:** - // We need a local id here, and we get it from the owner id, which must - // be local. It is safe to do so, because if we have access to a HIR ty, - // it necessarily means we are exploring a local item (we don't have - // access to the HIR of external objects, only their MIR). - let ctx = - rustc_hir_analysis::collect::ItemCtxt::new(s.base().tcx, s.owner_id().expect_local()); - ctx.lower_ty(self).sinto(s) - } -} - -/// Reflects [`rustc_hir::UseKind`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::UseKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum UseKind { - Single, - Glob, - ListStem, -} - -/// Reflects [`rustc_hir::IsAuto`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::IsAuto, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum IsAuto { - Yes, - No, -} - -/// Reflects [`rustc_hir::Defaultness`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::Defaultness, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Defaultness { - Default { has_value: bool }, - Final, -} - -/// Reflects [`rustc_hir::ImplPolarity`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::ImplPolarity, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ImplPolarity { - Positive, - Negative(Span), -} - -/// Reflects [`rustc_hir::Constness`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::Constness, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Constness { - Const, - NotConst, -} - -/// Reflects [`rustc_hir::Generics`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::Generics<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Generics { - pub params: Vec>, - #[value(region_bounds_at_current_owner(tcx))] - pub bounds: GenericBounds, - pub has_where_clause_predicates: bool, - pub where_clause_span: Span, - pub span: Span, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto> - for rustc_hir::ImplItemRef -{ - fn sinto(&self, s: &S) -> ImplItem { - let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; - let impl_item = tcx.hir().impl_item(self.id); - let s = with_owner_id(s.base(), (), (), impl_item.owner_id.to_def_id()); - impl_item.sinto(&s) - } -} - -/// Reflects [`rustc_hir::ParamName`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ParamName { - Plain(LocalIdent), - Fresh, - Error, -} - -/// Reflects [`rustc_hir::LifetimeParamKind`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::LifetimeParamKind, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum LifetimeParamKind { - Explicit, - Elided(MissingLifetimeKind), - Error, -} - -/// Reflects [`rustc_hir::AnonConst`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::AnonConst, state: S as s)] -pub struct AnonConst { - pub hir_id: HirId, - pub def_id: GlobalIdent, - #[map({ - body_from_id::(*x, &with_owner_id(s.base(), (), (), hir_id.owner.to_def_id())) - })] - pub body: Body, -} - -/// Reflects [`rustc_hir::ConstArg`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::ConstArg<'tcx>, state: S as s)] -pub struct ConstArg { - pub hir_id: HirId, - pub kind: ConstArgKind, - pub is_desugared_from_effects: bool, -} - -/// Reflects [`rustc_hir::ConstArgKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::ConstArgKind<'tcx>, state: S as s)] -pub enum ConstArgKind { - Path(QPath), - Anon(AnonConst), -} - -/// Reflects [`rustc_hir::GenericParamKind`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::GenericParamKind<'tcx>, state: S as tcx)] -pub enum GenericParamKind { - Lifetime { - kind: LifetimeParamKind, - }, - Type { - /// On use site, Rust always give us all the generic - /// parameters, no matter the defaultness. This information is - /// thus not so useful. At the same time, as discussed in - /// https://github.com/hacspec/hax/issues/310, extracting this - /// default type causes failures when querying Rust for trait - /// resolution. We thus decided to disable this feature. If - /// this default type information is useful to you, please - /// open an issue on https://github.com/hacspec/hax. - #[map(x.map(|_ty| ()))] - default: Option<()>, - synthetic: bool, - }, - Const { - ty: Ty, - default: Option>, - }, -} - -/// Reflects [`rustc_hir::GenericParam`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::GenericParam<'tcx>, state: S as s)] -pub struct GenericParam { - pub hir_id: HirId, - pub def_id: GlobalIdent, - #[map(match x { - rustc_hir::ParamName::Plain(loc_ident) => - ParamName::Plain(LocalIdent { - name: loc_ident.as_str().to_string(), - id: self.hir_id.sinto(s) - }), - rustc_hir::ParamName::Fresh => - ParamName::Fresh, - rustc_hir::ParamName::Error => - ParamName::Error, - })] - pub name: ParamName, - pub span: Span, - pub pure_wrt_drop: bool, - pub kind: GenericParamKind, - pub colon_span: Option, - #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] - attributes: Vec, -} - -/// Reflects [`rustc_hir::ImplItem`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::ImplItem<'tcx>, state: S as s)] -pub struct ImplItem { - pub ident: Ident, - pub owner_id: DefId, - pub generics: Generics, - pub kind: ImplItemKind, - pub defaultness: Defaultness, - pub span: Span, - pub vis_span: Span, - #[value(ItemAttributes::from_owner_id(s, *owner_id))] - /// the attributes on this impl item - pub attributes: ItemAttributes, -} - -/// Reflects [`rustc_hir::ImplItemKind`], inlining the body of the items. -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::ImplItemKind<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ImplItemKind { - Const(Ty, Body), - #[custom_arm(rustc_hir::ImplItemKind::Fn(sig, body) => { - ImplItemKind::Fn(make_fn_def::(sig, body, s)) - },)] - Fn(FnDef), - #[custom_arm(rustc_hir::ImplItemKind::Type(t) => { - let parent_bounds = { - let (tcx, owner_id) = (s.base().tcx, s.owner_id()); - let assoc_item = tcx.opt_associated_item(owner_id).unwrap(); - let impl_did = assoc_item.impl_container(tcx).unwrap(); - tcx.explicit_item_bounds(assoc_item.trait_item_def_id.unwrap()) - .skip_binder() // Skips an `EarlyBinder`, likely for GATs - .iter() - .copied() - .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, impl_did, clause, span)) - .collect::>() - }; - ImplItemKind::Type { - ty: t.sinto(s), - parent_bounds - } - },)] - /// An associated type with its parent bounds inlined. - Type { - ty: Ty, - parent_bounds: Vec<(Clause, ImplExpr, Span)>, - }, -} - -/// Reflects [`rustc_hir::AssocItemKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::AssocItemKind, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum AssocItemKind { - Const, - Fn { has_self: bool }, - Type, -} - -#[cfg(feature = "rustc")] -impl< - 'tcx, - S, - D: Clone, - T: SInto + rustc_middle::ty::TypeFoldable>, - > SInto for rustc_middle::ty::EarlyBinder<'tcx, T> -{ - fn sinto(&self, s: &S) -> D { - self.clone().instantiate_identity().sinto(s) - } -} - -/// Reflects [`rustc_hir::Impl`]. -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::Impl<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Impl { - pub safety: Safety, - pub polarity: ImplPolarity, - pub defaultness: Defaultness, - pub defaultness_span: Option, - pub generics: Generics, - #[map({ - s.base().tcx.impl_trait_ref(s.owner_id()).sinto(s) - })] - pub of_trait: Option, - pub self_ty: Ty, - pub items: Vec>, - #[value({ - let (tcx, owner_id) = (s.base().tcx, s.owner_id()); - let trait_did = tcx.trait_id_of_impl(owner_id); - if let Some(trait_did) = trait_did { - tcx.explicit_super_predicates_of(trait_did) - .predicates - .iter() - .copied() - .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, owner_id, clause, span)) - .collect::>() - } else { - vec![] - } - })] - /// The clauses and impl expressions corresponding to the impl's - /// trait (if not inherent) super bounds (if any). - pub parent_bounds: Vec<(Clause, ImplExpr, Span)>, -} - -/// Reflects [`rustc_hir::IsAsync`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::IsAsync, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum IsAsync { - Async(Span), - NotAsync, -} - -/// Reflects [`rustc_hir::FnRetTy`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::FnRetTy<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum FnRetTy { - DefaultReturn(Span), - Return(Ty), -} - -/// Reflects [`rustc_hir::VariantData`] -#[derive_group(Serializers)] -#[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::VariantData<'tcx>, state: S as tcx)] -pub enum VariantData { - Struct { - fields: Vec, - recovered: bool, - }, - Tuple(Vec, HirId, GlobalIdent), - Unit(HirId, GlobalIdent), -} - -#[cfg(feature = "rustc")] -impl SInto for rustc_ast::ast::Recovered { - fn sinto(&self, _s: &S) -> bool { - match self { - Self::Yes(_) => true, - Self::No => false, - } - } -} - -/// Reflects [`rustc_hir::FieldDef`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::FieldDef<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct HirFieldDef { - pub span: Span, - pub vis_span: Span, - pub ident: Ident, - pub hir_id: HirId, - pub def_id: GlobalIdent, - pub ty: Ty, - #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] - attributes: Vec, -} - -/// Reflects [`rustc_hir::Variant`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::Variant<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Variant { - pub ident: Ident, - pub hir_id: HirId, - pub def_id: GlobalIdent, - #[map(x.sinto(&with_owner_id(s.base(), (), (), self.def_id.to_def_id())))] - pub data: VariantData, - pub disr_expr: Option>, - #[value({ - let tcx = s.base().tcx; - let variant = tcx - .adt_def(s.owner_id()) - .variants() - .into_iter() - .find(|v| v.def_id == self.def_id.into()).unwrap(); - variant.discr.sinto(s) - })] - pub discr: DiscriminantDefinition, - pub span: Span, - #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] - pub attributes: Vec, -} - -/// Reflects [`rustc_hir::UsePath`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::UsePath<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct UsePath { - pub span: Span, - #[map(x.iter().map(|res| res.sinto(s)).collect())] - pub res: Vec, - pub segments: Vec, - #[value(self.segments.iter().last().and_then(|segment| { - match s.base().tcx.hir_node_by_def_id(segment.hir_id.owner.def_id) { - rustc_hir::Node::Item(rustc_hir::Item { - ident, - kind: rustc_hir::ItemKind::Use(_, _), - .. - }) if ident.name.to_ident_string() != "" => Some(ident.name.to_ident_string()), - _ => None, - } - }))] - pub rename: Option, -} - -/// Reflects [`rustc_hir::def::Res`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::def::Res, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum Res { - Def(DefKind, DefId), - PrimTy(PrimTy), - SelfTyParam { - trait_: DefId, - }, - SelfTyAlias { - alias_to: DefId, - forbid_generic: bool, - is_trait_impl: bool, - }, - SelfCtor(DefId), - Local(HirId), - ToolMod, - NonMacroAttr(NonMacroAttrKind), - Err, -} - -/// Reflects [`rustc_hir::PrimTy`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::PrimTy, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum PrimTy { - Int(IntTy), - Uint(UintTy), - Float(FloatTy), - Str, - Bool, - Char, -} - -/// Reflects [`rustc_hir::def::NonMacroAttrKind`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::def::NonMacroAttrKind, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum NonMacroAttrKind { - Builtin(Symbol), - Tool, - DeriveHelper, - DeriveHelperCompat, -} - -/// Reflects [`rustc_hir::PathSegment`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::PathSegment<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct PathSegment { - pub ident: Ident, - pub hir_id: HirId, - pub res: Res, - #[map(args.map(|args| args.sinto(s)))] - pub args: Option, - pub infer_args: bool, -} - -/// Reflects [`rustc_hir::ItemKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::ItemKind<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum ItemKind { - #[disable_mapping] - MacroInvokation(MacroInvokation), - ExternCrate(Option), - Use(UsePath, UseKind), - Static(Ty, Mutability, Body), - Const(Ty, Generics, Body), - #[custom_arm( - rustc_hir::ItemKind::Fn(sig, generics, body) => { - ItemKind::Fn(generics.sinto(s), make_fn_def::(sig, body, s)) - } - )] - Fn(Generics, FnDef), - Macro(MacroDef, MacroKind), - Mod(Vec>), - ForeignMod { - abi: Abi, - items: Vec>, - }, - GlobalAsm(InlineAsm), - TyAlias( - #[map({ - let s = &State { - base: Base {ty_alias_mode: true, ..s.base()}, - owner_id: s.owner_id(), - thir: (), - mir: (), - binder: (), - }; - x.sinto(s) - })] - Ty, - Generics, - ), - OpaqueTy(OpaqueTy), - Enum( - EnumDef, - Generics, - #[value({ - let tcx = s.base().tcx; - tcx.repr_options_of_def(s.owner_id().expect_local()).sinto(s) - })] - ReprOptions, - ), - Struct(VariantData, Generics), - Union(VariantData, Generics), - Trait( - IsAuto, - Safety, - Generics, - GenericBounds, - Vec>, - ), - TraitAlias(Generics, GenericBounds), - Impl(Impl), -} - -pub type EnumDef = Vec>; - -/// Reflects [`rustc_hir::TraitItemKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::TraitItemKind<'tcx>, state: S as tcx)] -#[derive(Clone, Debug, JsonSchema)] -#[derive_group(Serializers)] -pub enum TraitItemKind { - Const(Ty, Option), - #[custom_arm( - rustc_hir::TraitItemKind::Fn(sig, rustc_hir::TraitFn::Required(id)) => { - TraitItemKind::RequiredFn(sig.sinto(tcx), id.sinto(tcx)) - } - )] - /// Reflects a required [`rustc_hir::TraitItemKind::Fn`] - RequiredFn(FnSig, Vec), - #[custom_arm( - rustc_hir::TraitItemKind::Fn(sig, rustc_hir::TraitFn::Provided(body)) => { - TraitItemKind::ProvidedFn(sig.sinto(tcx), make_fn_def::(sig, body, tcx)) - } - )] - /// Reflects a provided [`rustc_hir::TraitItemKind::Fn`] - ProvidedFn(FnSig, FnDef), - #[custom_arm( - rustc_hir::TraitItemKind::Type(b, ty) => { - TraitItemKind::Type(b.sinto(tcx), ty.map(|t| t.sinto(tcx))) - } - )] - Type(GenericBounds, Option), -} - -/// Reflects [`rustc_hir::TraitItem`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::TraitItem<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct TraitItem { - pub ident: Ident, - pub owner_id: DefId, - pub generics: Generics, - pub kind: TraitItemKind, - pub span: Span, - pub defaultness: Defaultness, - #[value(ItemAttributes::from_owner_id(s, *owner_id))] - /// The attributes on this trait item - pub attributes: ItemAttributes, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto> - for rustc_hir::EnumDef<'tcx> -{ - fn sinto(&self, s: &S) -> EnumDef { - self.variants.iter().map(|v| v.sinto(s)).collect() - } -} - -#[cfg(feature = "rustc")] -impl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto> - for rustc_hir::TraitItemRef -{ - fn sinto(&self, s: &S) -> TraitItem { - let s = with_owner_id(s.base(), (), (), self.id.owner_id.to_def_id()); - let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; - tcx.hir().trait_item(self.id).sinto(&s) - } -} - -#[cfg(feature = "rustc")] -impl<'a, 'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto>> - for rustc_hir::Mod<'a> -{ - fn sinto(&self, s: &S) -> Vec> { - inline_macro_invocations(self.item_ids.iter().copied(), s) - // .iter() - // .map(|item_id| item_id.sinto(s)) - // .collect() - } -} - -/// Reflects [`rustc_hir::ForeignItemKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::ForeignItemKind<'tcx>, state: S as tcx)] -#[derive(Clone, Debug, JsonSchema)] -#[derive_group(Serializers)] -pub enum ForeignItemKind { - Fn(FnDecl, Vec, Generics, Safety), - Static(Ty, Mutability, Safety), - Type, -} - -/// Reflects [`rustc_hir::ForeignItem`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::ForeignItem<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct ForeignItem { - pub ident: Ident, - pub kind: ForeignItemKind, - pub owner_id: DefId, - pub span: Span, - pub vis_span: Span, -} - -#[cfg(feature = "rustc")] -impl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto> - for rustc_hir::ForeignItemRef -{ - fn sinto(&self, s: &S) -> ForeignItem { - let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; - tcx.hir().foreign_item(self.id).sinto(s) - } -} - -/// Reflects [`rustc_hir::OpaqueTy`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: rustc_hir::OpaqueTy<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct OpaqueTy { - pub generics: Generics, - pub bounds: GenericBounds, - pub origin: OpaqueTyOrigin, - pub in_trait: bool, -} - -/// Reflects [`rustc_hir::LifetimeName`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::LifetimeName, state: S as tcx)] -#[derive(Clone, Debug, JsonSchema)] -#[derive_group(Serializers)] -pub enum LifetimeName { - Param(GlobalIdent), - ImplicitObjectLifetimeDefault, - Error, - Infer, - Static, -} - -/// Reflects [`rustc_hir::Lifetime`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::Lifetime, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Lifetime { - pub hir_id: HirId, - pub ident: Ident, - pub res: LifetimeName, -} - -/// Reflects [`rustc_middle::ty::TraitRef`] -#[derive_group(Serializers)] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::TraitRef<'tcx>, state: S as tcx)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct TraitRef { - pub def_id: DefId, - #[from(args)] - /// reflects the `args` field - pub generic_args: Vec, -} - -/// Reflects [`rustc_middle::ty::TraitPredicate`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::TraitPredicate<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct TraitPredicate { - pub trait_ref: TraitRef, - #[map(*x == rustc_middle::ty::PredicatePolarity::Positive)] - #[from(polarity)] - pub is_positive: bool, -} - -/// Reflects [`rustc_middle::ty::OutlivesPredicate`] as a named struct -/// instead of a tuple struct. This is because the script converting -/// JSONSchema types to OCaml doesn't support tuple structs, and this -/// is the only tuple struct in the whole AST. -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct OutlivesPredicate { - pub lhs: T, - pub rhs: Region, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, T, U> SInto> - for rustc_middle::ty::OutlivesPredicate<'tcx, T> -where - T: SInto, -{ - fn sinto(&self, s: &S) -> OutlivesPredicate where { - OutlivesPredicate { - lhs: self.0.sinto(s), - rhs: self.1.sinto(s), - } - } -} - -/// Reflects [`rustc_middle::ty::RegionOutlivesPredicate`] -pub type RegionOutlivesPredicate = OutlivesPredicate; -/// Reflects [`rustc_middle::ty::TypeOutlivesPredicate`] -pub type TypeOutlivesPredicate = OutlivesPredicate; - -/// Reflects [`rustc_middle::ty::Term`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum Term { - Ty(Ty), - Const(ConstantExpr), -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Term<'tcx> { - fn sinto(&self, s: &S) -> Term { - use rustc_middle::ty::TermKind; - match self.unpack() { - TermKind::Ty(ty) => Term::Ty(ty.sinto(s)), - TermKind::Const(c) => Term::Const(c.sinto(s)), - } - } -} - -/// Expresses a constraints over an associated type. -/// -/// For instance: -/// ```text -/// fn f>(...) -/// ^^^^^^^^^^ -/// ``` -/// (provided the trait `Foo` has an associated type `S`). -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ProjectionPredicate { - /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. - pub impl_expr: ImplExpr, - /// The `Type` in `Ty: Trait<..., Type = U>`. - pub assoc_item: AssocItem, - /// The type `U` in `Ty: Trait<..., Type = U>`. - pub ty: Ty, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderBinderState<'tcx>> SInto - for rustc_middle::ty::ProjectionPredicate<'tcx> -{ - fn sinto(&self, s: &S) -> ProjectionPredicate { - let tcx = s.base().tcx; - let alias_ty = &self.projection_term.expect_ty(tcx); - let poly_trait_ref = s.binder().rebind(alias_ty.trait_ref(tcx)); - let Term::Ty(ty) = self.term.sinto(s) else { - unreachable!() - }; - ProjectionPredicate { - impl_expr: solve_trait(s, poly_trait_ref), - assoc_item: tcx.associated_item(alias_ty.def_id).sinto(s), - ty, - } - } -} - -/// Reflects [`rustc_middle::ty::ClauseKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderBinderState<'tcx>>, from: rustc_middle::ty::ClauseKind<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum ClauseKind { - Trait(TraitPredicate), - RegionOutlives(RegionOutlivesPredicate), - TypeOutlives(TypeOutlivesPredicate), - Projection(ProjectionPredicate), - ConstArgHasType(ConstantExpr, Ty), - WellFormed(GenericArg), - ConstEvaluatable(ConstantExpr), -} - -/// Reflects [`rustc_middle::ty::Clause`] and adds a hash-consed predicate identifier. -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Clause { - pub kind: Binder, - pub id: PredicateId, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Clause<'tcx> { - fn sinto(&self, s: &S) -> Clause { - let kind = self.kind().sinto(s); - let id = kind.clone().map(PredicateKind::Clause).predicate_id(); - Clause { kind, id } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto - for rustc_middle::ty::PolyTraitPredicate<'tcx> -{ - fn sinto(&self, s: &S) -> Clause { - let kind: Binder<_> = self.sinto(s); - let kind: Binder = kind.map(ClauseKind::Trait); - let id = kind.clone().map(PredicateKind::Clause).predicate_id(); - Clause { kind, id } - } -} - -/// Reflects [`rustc_middle::ty::Predicate`] and adds a hash-consed predicate identifier. -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Predicate { - pub kind: Binder, - pub id: PredicateId, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Predicate<'tcx> { - fn sinto(&self, s: &S) -> Predicate { - let kind = self.kind().sinto(s); - let id = kind.predicate_id(); - Predicate { kind, id } - } -} - -/// Reflects [`rustc_middle::ty::BoundVariableKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::BoundVariableKind, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum BoundVariableKind { - Ty(BoundTyKind), - Region(BoundRegionKind), - Const, -} - -/// Reflects [`rustc_middle::ty::Binder`] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Binder { - pub value: T, - pub bound_vars: Vec, -} - -impl Binder { - pub fn as_ref(&self) -> Binder<&T> { - Binder { - value: &self.value, - bound_vars: self.bound_vars.clone(), - } - } - - pub fn hax_skip_binder(self) -> T { - self.value - } - - pub fn hax_skip_binder_ref(&self) -> &T { - &self.value - } - - pub fn map(self, f: impl FnOnce(T) -> U) -> Binder { - Binder { - value: f(self.value), - bound_vars: self.bound_vars, - } - } - - pub fn inner_mut(&mut self) -> &mut T { - &mut self.value - } - - pub fn rebind(&self, value: U) -> Binder { - self.as_ref().map(|_| value) - } -} - -/// Reflects [`rustc_middle::ty::GenericPredicates`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::GenericPredicates<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct GenericPredicates { - pub parent: Option, - // FIXME: Switch from `Predicate` to `Clause` (will require correct handling of binders). - #[value(self.predicates.iter().map(|(clause, span)| (clause.as_predicate().sinto(s), span.sinto(s))).collect())] - pub predicates: Vec<(Predicate, Span)>, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>, T1, T2> SInto> - for rustc_middle::ty::Binder<'tcx, T1> -where - T1: SInto, T2>, -{ - fn sinto(&self, s: &S) -> Binder { - let bound_vars = self.bound_vars().sinto(s); - let value = { - let under_binder_s = &State { - base: s.base(), - owner_id: s.owner_id(), - binder: self.as_ref().map_bound(|_| ()), - thir: (), - mir: (), - }; - self.as_ref().skip_binder().sinto(under_binder_s) - }; - Binder { value, bound_vars } - } -} - -/// Reflects [`rustc_middle::ty::SubtypePredicate`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::SubtypePredicate<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct SubtypePredicate { - pub a_is_expected: bool, - pub a: Ty, - pub b: Ty, -} - -/// Reflects [`rustc_middle::ty::CoercePredicate`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::CoercePredicate<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct CoercePredicate { - pub a: Ty, - pub b: Ty, -} - -/// Reflects [`rustc_middle::ty::AliasRelationDirection`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::AliasRelationDirection, state: S as _tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AliasRelationDirection { - Equate, - Subtype, -} - -/// Reflects [`rustc_middle::ty::ClosureArgs`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: ty::ClosureArgs>, state: S as s)] -#[derive(Clone, Debug, JsonSchema)] -#[derive_group(Serializers)] -pub struct ClosureArgs { - #[value(self.kind().sinto(s))] - pub kind: ClosureKind, - #[value(self.parent_args().sinto(s))] - pub parent_args: Vec, - #[value(self.sig().sinto(s))] - pub sig: PolyFnSig, - #[value(self.upvar_tys().sinto(s))] - pub upvar_tys: Vec, -} - -/// Reflects [`rustc_middle::ty::ClosureKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::ClosureKind, state: S as _tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum ClosureKind { - Fn, - FnMut, - FnOnce, -} - -/// Reflects [`rustc_middle::ty::PredicateKind`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderBinderState<'tcx>>, from: rustc_middle::ty::PredicateKind<'tcx>, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum PredicateKind { - Clause(ClauseKind), - ObjectSafe(DefId), - Subtype(SubtypePredicate), - Coerce(CoercePredicate), - ConstEquate(ConstantExpr, ConstantExpr), - Ambiguous, - AliasRelate(Term, Term, AliasRelationDirection), - NormalizesTo(NormalizesTo), -} - -/// Reflects [`rustc_middle::ty::ImplSubject`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::ty::ImplSubject<'tcx>, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum ImplSubject { - Trait( - // Also record the polarity. - #[map({ - let polarity = s.base().tcx.impl_polarity(s.owner_id()); - TraitPredicate { - trait_ref: x.sinto(s), - is_positive: matches!(polarity, rustc_middle::ty::ImplPolarity::Positive), - } - })] - TraitPredicate, - ), - Inherent(Ty), -} - -/// Reflects [`rustc_hir::GenericBounds`] -type GenericBounds = Vec; - -/// Compute the bounds for the owner registed in the state `s` -#[cfg(feature = "rustc")] -fn region_bounds_at_current_owner<'tcx, S: UnderOwnerState<'tcx>>(s: &S) -> GenericBounds { - let tcx = s.base().tcx; - - // According to what kind of node we are looking at, we should - // either call `predicates_defined_on` or `item_bounds` - let use_item_bounds = { - if let Some(oid) = s.owner_id().as_local() { - let hir_id = tcx.local_def_id_to_hir_id(oid); - let node = tcx.hir_node(hir_id); - use rustc_hir as hir; - matches!( - node, - hir::Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Type(..), - .. - }) | hir::Node::Item(hir::Item { - kind: hir::ItemKind::OpaqueTy(hir::OpaqueTy { .. }), - .. - }) - ) - } else { - false - } - }; - - let clauses: Vec> = if use_item_bounds { - tcx.item_bounds(s.owner_id()) - .instantiate_identity() - .iter() - .collect() - } else { - tcx.predicates_defined_on(s.owner_id()) - .predicates - .iter() - .map(|(x, _span)| x) - .copied() - .collect() - }; - clauses.sinto(s) -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_hir::GenericBounds<'tcx> { - fn sinto(&self, s: &S) -> GenericBounds { - region_bounds_at_current_owner(s) - } -} - -/// Reflects [`rustc_hir::OpaqueTyOrigin`] -#[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::OpaqueTyOrigin, state: S as tcx)] -#[derive(Clone, Debug, JsonSchema)] -#[derive_group(Serializers)] -pub enum OpaqueTyOrigin { - FnReturn(GlobalIdent), - AsyncFn(GlobalIdent), - TyAlias { in_assoc_ty: bool }, -} - -/// Reflects [`rustc_ast::ast::MacroDef`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::MacroDef, state: S as tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct MacroDef { - pub body: DelimArgs, - pub macro_rules: bool, -} - -/// Reflects [`rustc_hir::Item`] (and [`rustc_hir::ItemId`]) -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct Item { - pub def_id: Option, - pub owner_id: DefId, - pub span: Span, - pub vis_span: Span, - pub kind: ItemKind, - pub attributes: ItemAttributes, - pub expn_backtrace: Vec, -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for rustc_hir::Item<'tcx> { - fn sinto(&self, s: &S) -> Item { - let name: String = self.ident.name.to_ident_string(); - let s = &with_owner_id(s.base(), (), (), self.owner_id.to_def_id()); - let owner_id: DefId = self.owner_id.sinto(s); - let def_id = Path::from(owner_id.clone()) - .ends_with(&[name]) - .then(|| owner_id.clone()); - Item { - def_id, - owner_id, - span: self.span.sinto(s), - vis_span: self.span.sinto(s), - kind: self.kind.sinto(s), - attributes: ItemAttributes::from_owner_id(s, self.owner_id), - expn_backtrace: self.span.macro_backtrace().map(|o| o.sinto(s)).collect(), - } - } -} - -#[cfg(feature = "rustc")] -impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for rustc_hir::ItemId { - fn sinto(&self, s: &S) -> Item { - let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; - tcx.hir().item(*self).sinto(s) - } -} - -/// Reflects [`rustc_span::symbol::Ident`] -pub type Ident = (Symbol, Span); - -#[cfg(feature = "rustc")] -impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_span::symbol::Ident { - fn sinto(&self, s: &S) -> Ident { - (self.name.sinto(s), self.span.sinto(s)) - } -} - -/// Reflects [`rustc_hir::PredicateOrigin`] -#[derive(AdtInto)] -#[args(, from: rustc_hir::PredicateOrigin, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub enum PredicateOrigin { - WhereClause, - GenericParam, - ImplTrait, -} - -/// Reflects [`rustc_middle::ty::AssocItem`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::ty::AssocItem, state: S as s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct AssocItem { - pub def_id: DefId, - pub name: Symbol, - pub kind: AssocKind, - #[value(get_container_for_assoc_item(s, self))] - pub container: AssocItemContainer, - /// Whether this item has a value (e.g. this is `false` for trait methods without default - /// implementations). - #[value(self.defaultness(s.base().tcx).has_value())] - pub has_value: bool, - pub fn_has_self_parameter: bool, - pub opt_rpitit_info: Option, -} - -/// Reflects [`rustc_middle::ty::ImplTraitInTraitData`] -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::ty::ImplTraitInTraitData, state: S as _s)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum ImplTraitInTraitData { - Trait { - fn_def_id: DefId, - opaque_def_id: DefId, - }, - Impl { - fn_def_id: DefId, - }, -} - -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AssocItemContainer { - TraitContainer { - trait_id: DefId, - }, - TraitImplContainer { - impl_id: DefId, - implemented_trait: DefId, - implemented_trait_item: DefId, - /// Whether the corresponding trait item had a default (and therefore this one overrides - /// it). - overrides_default: bool, - }, - InherentImplContainer { - impl_id: DefId, - }, -} - -#[cfg(feature = "rustc")] -fn get_container_for_assoc_item<'tcx, S: BaseState<'tcx>>( - s: &S, - item: &ty::AssocItem, -) -> AssocItemContainer { - let container_id = item.container_id(s.base().tcx); - match item.container { - ty::AssocItemContainer::TraitContainer => AssocItemContainer::TraitContainer { - trait_id: container_id.sinto(s), - }, - ty::AssocItemContainer::ImplContainer => { - if let Some(implemented_trait_item) = item.trait_item_def_id { - AssocItemContainer::TraitImplContainer { - impl_id: container_id.sinto(s), - implemented_trait: s - .base() - .tcx - .trait_of_item(implemented_trait_item) - .unwrap() - .sinto(s), - implemented_trait_item: implemented_trait_item.sinto(s), - overrides_default: s.base().tcx.defaultness(implemented_trait_item).has_value(), - } - } else { - AssocItemContainer::InherentImplContainer { - impl_id: container_id.sinto(s), - } - } - } - } -} - -/// Reflects [`rustc_middle::ty::AssocKind`] -#[derive(AdtInto)] -#[args(, from: rustc_middle::ty::AssocKind, state: S as _tcx)] -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum AssocKind { - Const, - Fn, - Type, -} diff --git a/frontend/exporter/src/types/def_id.rs b/frontend/exporter/src/types/def_id.rs index 517ebd8cd..a777ceded 100644 --- a/frontend/exporter/src/types/def_id.rs +++ b/frontend/exporter/src/types/def_id.rs @@ -12,22 +12,21 @@ use hax_adt_into::derive_group; +#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] +use crate::prelude::*; #[cfg(not(feature = "extract_names_mode"))] use crate::{AdtInto, JsonSchema}; -#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] -use crate::{BaseState, SInto}; +#[cfg(feature = "rustc")] +use rustc_span::def_id::DefId as RDefId; pub type Symbol = String; -#[derive_group(Serializers)] -#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] -#[cfg_attr(not(feature = "extract_names_mode"), args(<'a, S: BaseState<'a>>, from: rustc_hir::definitions::DisambiguatedDefPathData, state: S as s))] -/// Reflects [`rustc_hir::definitions::DisambiguatedDefPathData`] -pub struct DisambiguatedDefPathItem { - pub data: DefPathItem, - pub disambiguator: u32, +#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] +impl<'t, S> SInto for rustc_span::symbol::Symbol { + fn sinto(&self, _s: &S) -> Symbol { + self.to_ident_string() + } } /// Reflects [`rustc_hir::def_id::DefId`] @@ -35,8 +34,16 @@ pub struct DisambiguatedDefPathItem { #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema))] pub struct DefId { + pub(crate) contents: crate::id_table::Node, +} + +#[derive_group(Serializers)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(JsonSchema))] +pub struct DefIdContents { pub krate: String, pub path: Vec, + pub parent: Option, /// Rustc's `CrateNum` and `DefIndex` raw indexes. This can be /// useful if one needs to convert a [`DefId`] into a /// [`rustc_hir::def_id::DefId`]; there is a `From` instance for @@ -49,6 +56,42 @@ pub struct DefId { pub is_local: bool, } +#[cfg(feature = "rustc")] +impl DefId { + pub fn to_rust_def_id(&self) -> RDefId { + let (krate, index) = self.index; + RDefId { + krate: rustc_hir::def_id::CrateNum::from_u32(krate), + index: rustc_hir::def_id::DefIndex::from_u32(index), + } + } + + /// Iterate over this element and its parents. + pub fn ancestry(&self) -> impl Iterator { + std::iter::successors(Some(self), |def| def.parent.as_ref()) + } + + /// The `PathItem` corresponding to this item. + pub fn path_item(&self) -> DisambiguatedDefPathItem { + self.path + .last() + .cloned() + .unwrap_or_else(|| DisambiguatedDefPathItem { + disambiguator: 0, + data: DefPathItem::CrateRoot { + name: self.krate.clone(), + }, + }) + } +} + +impl std::ops::Deref for DefId { + type Target = DefIdContents; + fn deref(&self) -> &Self::Target { + &self.contents + } +} + #[cfg(not(feature = "rustc"))] impl std::fmt::Debug for DefId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -67,13 +110,108 @@ impl std::fmt::Debug for DefId { } } +impl std::hash::Hash for DefId { + fn hash(&self, state: &mut H) { + // A `DefId` is basically an interned path; we only hash the path, discarding the rest of + // the information. + self.krate.hash(state); + self.path.hash(state); + } +} + +#[cfg(feature = "rustc")] +pub(crate) fn translate_def_id<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) -> DefId { + let tcx = s.base().tcx; + let path = { + // Set the def_id so the `CrateRoot` path item can fetch the crate name. + let state_with_id = with_owner_id(s.base(), (), (), def_id); + tcx.def_path(def_id) + .data + .iter() + .map(|x| x.sinto(&state_with_id)) + .collect() + }; + let contents = DefIdContents { + path, + krate: tcx.crate_name(def_id.krate).to_string(), + parent: tcx.opt_parent(def_id).sinto(s), + index: ( + rustc_hir::def_id::CrateNum::as_u32(def_id.krate), + rustc_hir::def_id::DefIndex::as_u32(def_id.index), + ), + is_local: def_id.is_local(), + }; + let contents = + s.with_global_cache(|cache| id_table::Node::new(contents, &mut cache.id_table_session)); + DefId { contents } +} + +#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] +impl<'s, S: BaseState<'s>> SInto for RDefId { + fn sinto(&self, s: &S) -> DefId { + if let Some(def_id) = s.with_item_cache(*self, |cache| cache.def_id.clone()) { + return def_id; + } + let def_id = translate_def_id(s, *self); + s.with_item_cache(*self, |cache| cache.def_id = Some(def_id.clone())); + def_id + } +} + +#[cfg(feature = "rustc")] +impl From<&DefId> for RDefId { + fn from<'tcx>(def_id: &DefId) -> Self { + def_id.to_rust_def_id() + } +} + +// Impl to be able to use hax's `DefId` for many rustc queries. +#[cfg(feature = "rustc")] +impl rustc_middle::query::IntoQueryParam for &DefId { + fn into_query_param(self) -> RDefId { + self.into() + } +} + +#[cfg(not(feature = "extract_names_mode"))] +pub type Path = Vec; + +#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] +impl std::convert::From for Path { + fn from(v: DefId) -> Vec { + std::iter::once(&v.krate) + .chain(v.path.iter().filter_map(|item| match &item.data { + DefPathItem::TypeNs(s) + | DefPathItem::ValueNs(s) + | DefPathItem::MacroNs(s) + | DefPathItem::LifetimeNs(s) => Some(s), + _ => None, + })) + .cloned() + .collect() + } +} + +#[cfg(not(feature = "extract_names_mode"))] +pub type GlobalIdent = DefId; + +#[cfg(all(not(feature = "extract_names_mode"), feature = "rustc"))] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_hir::def_id::LocalDefId { + fn sinto(&self, st: &S) -> DefId { + self.to_def_id().sinto(st) + } +} + /// Reflects [`rustc_hir::definitions::DefPathData`] #[derive_group(Serializers)] #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] -#[cfg_attr(not(feature = "extract_names_mode"), args(<'ctx, S: BaseState<'ctx>>, from: rustc_hir::definitions::DefPathData, state: S as state))] +#[cfg_attr(not(feature = "extract_names_mode"), args(<'ctx, S: UnderOwnerState<'ctx>>, from: rustc_hir::definitions::DefPathData, state: S as s))] pub enum DefPathItem { - CrateRoot, + CrateRoot { + #[cfg_attr(not(feature = "extract_names_mode"), value(s.base().tcx.crate_name(s.owner_id().krate).sinto(s)))] + name: Symbol, + }, Impl, ForeignMod, Use, @@ -88,3 +226,13 @@ pub enum DefPathItem { OpaqueTy, AnonAdt, } + +#[derive_group(Serializers)] +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[cfg_attr(not(feature = "extract_names_mode"), derive(AdtInto, JsonSchema))] +#[cfg_attr(not(feature = "extract_names_mode"), args(<'a, S: UnderOwnerState<'a>>, from: rustc_hir::definitions::DisambiguatedDefPathData, state: S as s))] +/// Reflects [`rustc_hir::definitions::DisambiguatedDefPathData`] +pub struct DisambiguatedDefPathItem { + pub data: DefPathItem, + pub disambiguator: u32, +} diff --git a/frontend/exporter/src/types/hir.rs b/frontend/exporter/src/types/hir.rs new file mode 100644 index 000000000..4287fb190 --- /dev/null +++ b/frontend/exporter/src/types/hir.rs @@ -0,0 +1,1171 @@ +//! Copies of the relevant `HIR` types. HIR represents the code of a rust crate post-macro +//! expansion. It is close to the parsed AST, modulo some desugarings (and macro expansion). +//! +//! This module also includes some `rustc_ast` definitions when they show up in HIR. +use crate::prelude::*; +use crate::sinto_todo; + +#[cfg(feature = "rustc")] +use rustc_ast::ast; +#[cfg(feature = "rustc")] +use rustc_hir as hir; +#[cfg(feature = "rustc")] +use rustc_middle::ty; + +/// Reflects [`hir::hir_id::HirId`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: BaseState<'tcx>>, from: hir::hir_id::HirId, state: S as gstate)] +pub struct HirId { + owner: DefId, + local_id: usize, + // attrs: String +} +// TODO: If not working: See original + +#[cfg(feature = "rustc")] +impl<'tcx, S: BaseState<'tcx>> SInto for hir::hir_id::OwnerId { + fn sinto(&self, s: &S) -> DefId { + self.to_def_id().sinto(s) + } +} + +/// Reflects [`ast::LitFloatType`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: BaseState<'tcx>>, from: ast::LitFloatType, state: S as gstate)] +pub enum LitFloatType { + Suffixed(FloatTy), + Unsuffixed, +} + +/// Reflects [`hir::Movability`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S>, from: hir::Movability, state: S as _s)] +pub enum Movability { + Static, + Movable, +} + +pub type Mutability = bool; + +#[cfg(feature = "rustc")] +impl SInto for hir::Mutability { + fn sinto(&self, _s: &S) -> Mutability { + match self { + Self::Mut => true, + Self::Not => false, + } + } +} + +/// Reflects [`hir::def::CtorKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(, from: hir::def::CtorKind, state: S as _s)] +pub enum CtorKind { + Fn, + Const, +} + +/// Reflects [`hir::def::CtorOf`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(, from: hir::def::CtorOf, state: S as _s)] +pub enum CtorOf { + Struct, + Variant, +} + +/// Reflects [`hir::RangeEnd`] +#[derive(AdtInto)] +#[args(, from: hir::RangeEnd, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum RangeEnd { + Included, + Excluded, +} + +/// Reflects [`hir::Safety`] +#[derive(AdtInto)] +#[args(, from: hir::Safety, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum Safety { + Unsafe, + Safe, +} + +/// Reflects [`hir::ImplicitSelfKind`] +#[derive(AdtInto)] +#[args(, from: hir::ImplicitSelfKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ImplicitSelfKind { + Imm, + Mut, + RefImm, + RefMut, + None, +} + +/// Reflects [`hir::FnDecl`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnDecl<'tcx>, state: S as tcx)] +pub struct FnDecl { + pub inputs: Vec, + pub output: FnRetTy, + pub c_variadic: bool, + pub implicit_self: ImplicitSelfKind, + pub lifetime_elision_allowed: bool, +} + +/// Reflects [`hir::FnSig`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnSig<'tcx>, state: S as tcx)] +pub struct FnSig { + pub header: FnHeader, + pub decl: FnDecl, + pub span: Span, +} + +/// Reflects [`hir::FnHeader`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::FnHeader, state: S as tcx)] +pub struct FnHeader { + pub safety: Safety, + pub constness: Constness, + pub asyncness: IsAsync, + pub abi: Abi, +} + +sinto_todo!(rustc_target::spec::abi, Abi); + +/// Function definition +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct FnDef { + pub header: FnHeader, + pub params: Vec, + pub ret: Ty, + pub body: Body, + pub sig_span: Span, +} + +#[cfg(feature = "rustc")] +impl<'x: 'tcx, 'tcx, S: UnderOwnerState<'tcx>> SInto for hir::Ty<'x> { + fn sinto(self: &hir::Ty<'x>, s: &S) -> Ty { + // **Important:** + // We need a local id here, and we get it from the owner id, which must + // be local. It is safe to do so, because if we have access to a HIR ty, + // it necessarily means we are exploring a local item (we don't have + // access to the HIR of external objects, only their MIR). + let ctx = + rustc_hir_analysis::collect::ItemCtxt::new(s.base().tcx, s.owner_id().expect_local()); + ctx.lower_ty(self).sinto(s) + } +} + +/// Reflects [`hir::UseKind`] +#[derive(AdtInto)] +#[args(, from: hir::UseKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum UseKind { + Single, + Glob, + ListStem, +} + +/// Reflects [`hir::IsAuto`] +#[derive(AdtInto)] +#[args(, from: hir::IsAuto, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum IsAuto { + Yes, + No, +} + +/// Reflects [`hir::Defaultness`] +#[derive(AdtInto)] +#[args(, from: hir::Defaultness, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Defaultness { + Default { has_value: bool }, + Final, +} + +/// Reflects [`hir::ImplPolarity`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ImplPolarity, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ImplPolarity { + Positive, + Negative(Span), +} + +/// Reflects [`hir::Constness`] +#[derive(AdtInto)] +#[args(, from: hir::Constness, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Constness { + Const, + NotConst, +} + +/// Reflects [`hir::Generics`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Generics<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Generics { + pub params: Vec>, + #[value(region_bounds_at_current_owner(tcx))] + pub bounds: GenericBounds, + pub has_where_clause_predicates: bool, + pub where_clause_span: Span, + pub span: Span, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto> for hir::ImplItemRef { + fn sinto(&self, s: &S) -> ImplItem { + let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; + let impl_item = tcx.hir().impl_item(self.id); + let s = with_owner_id(s.base(), (), (), impl_item.owner_id.to_def_id()); + impl_item.sinto(&s) + } +} + +/// Reflects [`hir::ParamName`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ParamName { + Plain(LocalIdent), + Fresh, + Error, +} + +/// Reflects [`hir::LifetimeParamKind`] +#[derive(AdtInto)] +#[args(, from: hir::LifetimeParamKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum LifetimeParamKind { + Explicit, + Elided(MissingLifetimeKind), + Error, +} + +/// Reflects [`hir::AnonConst`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::AnonConst, state: S as s)] +pub struct AnonConst { + pub hir_id: HirId, + pub def_id: GlobalIdent, + #[map({ + body_from_id::(*x, &with_owner_id(s.base(), (), (), hir_id.owner.to_def_id())) + })] + pub body: Body, +} + +/// Reflects [`hir::ConstArg`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArg<'tcx>, state: S as s)] +pub struct ConstArg { + pub hir_id: HirId, + pub kind: ConstArgKind, + pub is_desugared_from_effects: bool, +} + +/// Reflects [`hir::ConstArgKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArgKind<'tcx>, state: S as s)] +pub enum ConstArgKind { + Path(QPath), + Anon(AnonConst), +} + +/// Reflects [`hir::GenericParamKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParamKind<'tcx>, state: S as tcx)] +pub enum GenericParamKind { + Lifetime { + kind: LifetimeParamKind, + }, + Type { + /// On use site, Rust always give us all the generic + /// parameters, no matter the defaultness. This information is + /// thus not so useful. At the same time, as discussed in + /// https://github.com/hacspec/hax/issues/310, extracting this + /// default type causes failures when querying Rust for trait + /// resolution. We thus decided to disable this feature. If + /// this default type information is useful to you, please + /// open an issue on https://github.com/hacspec/hax. + #[map(x.map(|_ty| ()))] + default: Option<()>, + synthetic: bool, + }, + Const { + ty: Ty, + default: Option>, + }, +} + +/// Reflects [`hir::GenericParam`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParam<'tcx>, state: S as s)] +pub struct GenericParam { + pub hir_id: HirId, + pub def_id: GlobalIdent, + #[map(match x { + hir::ParamName::Plain(loc_ident) => + ParamName::Plain(LocalIdent { + name: loc_ident.as_str().to_string(), + id: self.hir_id.sinto(s) + }), + hir::ParamName::Fresh => + ParamName::Fresh, + hir::ParamName::Error => + ParamName::Error, + })] + pub name: ParamName, + pub span: Span, + pub pure_wrt_drop: bool, + pub kind: GenericParamKind, + pub colon_span: Option, + #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] + attributes: Vec, +} + +/// Reflects [`hir::ImplItem`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItem<'tcx>, state: S as s)] +pub struct ImplItem { + pub ident: Ident, + pub owner_id: DefId, + pub generics: Generics, + pub kind: ImplItemKind, + pub defaultness: Defaultness, + pub span: Span, + pub vis_span: Span, + #[value(ItemAttributes::from_owner_id(s, *owner_id))] + /// the attributes on this impl item + pub attributes: ItemAttributes, +} + +/// Reflects [`hir::ImplItemKind`], inlining the body of the items. +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItemKind<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ImplItemKind { + Const(Ty, Body), + #[custom_arm(hir::ImplItemKind::Fn(sig, body) => { + ImplItemKind::Fn(make_fn_def::(sig, body, s)) + },)] + Fn(FnDef), + #[custom_arm(hir::ImplItemKind::Type(t) => { + let parent_bounds = { + let (tcx, owner_id) = (s.base().tcx, s.owner_id()); + let assoc_item = tcx.opt_associated_item(owner_id).unwrap(); + let impl_did = assoc_item.impl_container(tcx).unwrap(); + tcx.explicit_item_bounds(assoc_item.trait_item_def_id.unwrap()) + .skip_binder() // Skips an `EarlyBinder`, likely for GATs + .iter() + .copied() + .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, impl_did, clause, span)) + .collect::>() + }; + ImplItemKind::Type { + ty: t.sinto(s), + parent_bounds + } + },)] + /// An associated type with its parent bounds inlined. + Type { + ty: Ty, + parent_bounds: Vec<(Clause, ImplExpr, Span)>, + }, +} + +/// Reflects [`hir::AssocItemKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::AssocItemKind, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum AssocItemKind { + Const, + Fn { has_self: bool }, + Type, +} + +/// Reflects [`hir::Impl`]. +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Impl<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Impl { + pub safety: Safety, + pub polarity: ImplPolarity, + pub defaultness: Defaultness, + pub defaultness_span: Option, + pub generics: Generics, + #[map({ + s.base().tcx + .impl_trait_ref(s.owner_id()) + .map(|trait_ref| trait_ref.instantiate_identity()) + .sinto(s) + })] + pub of_trait: Option, + pub self_ty: Ty, + pub items: Vec>, + #[value({ + let (tcx, owner_id) = (s.base().tcx, s.owner_id()); + let trait_did = tcx.trait_id_of_impl(owner_id); + if let Some(trait_did) = trait_did { + tcx.explicit_super_predicates_of(trait_did) + .iter_identity_copied() + .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, owner_id, clause, span)) + .collect::>() + } else { + vec![] + } + })] + /// The clauses and impl expressions corresponding to the impl's + /// trait (if not inherent) super bounds (if any). + pub parent_bounds: Vec<(Clause, ImplExpr, Span)>, +} + +/// Reflects [`hir::IsAsync`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::IsAsync, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum IsAsync { + Async(Span), + NotAsync, +} + +/// Reflects [`hir::FnRetTy`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnRetTy<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum FnRetTy { + DefaultReturn(Span), + Return(Ty), +} + +/// Reflects [`hir::VariantData`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::VariantData<'tcx>, state: S as tcx)] +pub enum VariantData { + Struct { + fields: Vec, + recovered: bool, + }, + Tuple(Vec, HirId, GlobalIdent), + Unit(HirId, GlobalIdent), +} + +#[cfg(feature = "rustc")] +impl SInto for ast::Recovered { + fn sinto(&self, _s: &S) -> bool { + match self { + Self::Yes(_) => true, + Self::No => false, + } + } +} + +/// Reflects [`hir::FieldDef`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FieldDef<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct HirFieldDef { + pub span: Span, + pub vis_span: Span, + pub ident: Ident, + pub hir_id: HirId, + pub def_id: GlobalIdent, + pub ty: Ty, + #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] + attributes: Vec, +} + +/// Reflects [`hir::Variant`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Variant<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Variant { + pub ident: Ident, + pub hir_id: HirId, + pub def_id: GlobalIdent, + #[map(x.sinto(&with_owner_id(s.base(), (), (), self.def_id.to_def_id())))] + pub data: VariantData, + pub disr_expr: Option>, + #[value({ + let tcx = s.base().tcx; + let variant = tcx + .adt_def(s.owner_id()) + .variants() + .into_iter() + .find(|v| v.def_id == self.def_id.into()).unwrap(); + variant.discr.sinto(s) + })] + pub discr: DiscriminantDefinition, + pub span: Span, + #[value(s.base().tcx.hir().attrs(*hir_id).sinto(s))] + pub attributes: Vec, +} + +/// Reflects [`hir::UsePath`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::UsePath<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct UsePath { + pub span: Span, + #[map(x.iter().map(|res| res.sinto(s)).collect())] + pub res: Vec, + pub segments: Vec, + #[value(self.segments.iter().last().and_then(|segment| { + match s.base().tcx.hir_node_by_def_id(segment.hir_id.owner.def_id) { + hir::Node::Item(hir::Item { + ident, + kind: hir::ItemKind::Use(_, _), + .. + }) if ident.name.to_ident_string() != "" => Some(ident.name.to_ident_string()), + _ => None, + } + }))] + pub rename: Option, +} + +/// Reflects [`hir::def::Res`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::def::Res, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Res { + Def(DefKind, DefId), + PrimTy(PrimTy), + SelfTyParam { + trait_: DefId, + }, + SelfTyAlias { + alias_to: DefId, + forbid_generic: bool, + is_trait_impl: bool, + }, + SelfCtor(DefId), + Local(HirId), + ToolMod, + NonMacroAttr(NonMacroAttrKind), + Err, +} + +/// Reflects [`hir::PrimTy`] +#[derive(AdtInto)] +#[args(, from: hir::PrimTy, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum PrimTy { + Int(IntTy), + Uint(UintTy), + Float(FloatTy), + Str, + Bool, + Char, +} + +/// Reflects [`hir::def::NonMacroAttrKind`] +#[derive(AdtInto)] +#[args(, from: hir::def::NonMacroAttrKind, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum NonMacroAttrKind { + Builtin(Symbol), + Tool, + DeriveHelper, + DeriveHelperCompat, +} + +/// Reflects [`hir::PathSegment`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::PathSegment<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct PathSegment { + pub ident: Ident, + pub hir_id: HirId, + pub res: Res, + #[map(args.map(|args| args.sinto(s)))] + pub args: Option, + pub infer_args: bool, +} + +/// Reflects [`hir::ItemKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ItemKind<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ItemKind { + #[disable_mapping] + MacroInvokation(MacroInvokation), + ExternCrate(Option), + Use(UsePath, UseKind), + Static(Ty, Mutability, Body), + Const(Ty, Generics, Body), + #[custom_arm( + hir::ItemKind::Fn(sig, generics, body) => { + ItemKind::Fn(generics.sinto(s), make_fn_def::(sig, body, s)) + } + )] + Fn(Generics, FnDef), + Macro(MacroDef, MacroKind), + Mod(Vec>), + ForeignMod { + abi: Abi, + items: Vec>, + }, + GlobalAsm(InlineAsm), + TyAlias( + #[map({ + let s = &State { + base: Base {ty_alias_mode: true, ..s.base()}, + owner_id: s.owner_id(), + thir: (), + mir: (), + binder: (), + }; + x.sinto(s) + })] + Ty, + Generics, + ), + Enum( + EnumDef, + Generics, + #[value({ + let tcx = s.base().tcx; + tcx.repr_options_of_def(s.owner_id().expect_local()).sinto(s) + })] + ReprOptions, + ), + Struct(VariantData, Generics), + Union(VariantData, Generics), + Trait( + IsAuto, + Safety, + Generics, + GenericBounds, + Vec>, + ), + TraitAlias(Generics, GenericBounds), + Impl(Impl), +} + +pub type EnumDef = Vec>; + +/// Reflects [`hir::TraitItemKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItemKind<'tcx>, state: S as tcx)] +#[derive(Clone, Debug, JsonSchema)] +#[derive_group(Serializers)] +pub enum TraitItemKind { + Const(Ty, Option), + #[custom_arm( + hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(id)) => { + TraitItemKind::RequiredFn(sig.sinto(tcx), id.sinto(tcx)) + } + )] + /// Reflects a required [`hir::TraitItemKind::Fn`] + RequiredFn(FnSig, Vec), + #[custom_arm( + hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body)) => { + TraitItemKind::ProvidedFn(sig.sinto(tcx), make_fn_def::(sig, body, tcx)) + } + )] + /// Reflects a provided [`hir::TraitItemKind::Fn`] + ProvidedFn(FnSig, FnDef), + #[custom_arm( + hir::TraitItemKind::Type(b, ty) => { + TraitItemKind::Type(b.sinto(tcx), ty.map(|t| t.sinto(tcx))) + } + )] + Type(GenericBounds, Option), +} + +/// Reflects [`hir::TraitItem`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItem<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct TraitItem { + pub ident: Ident, + pub owner_id: DefId, + pub generics: Generics, + pub kind: TraitItemKind, + pub span: Span, + pub defaultness: Defaultness, + #[value(ItemAttributes::from_owner_id(s, *owner_id))] + /// The attributes on this trait item + pub attributes: ItemAttributes, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto> for hir::EnumDef<'tcx> { + fn sinto(&self, s: &S) -> EnumDef { + self.variants.iter().map(|v| v.sinto(s)).collect() + } +} + +#[cfg(feature = "rustc")] +impl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto> for hir::TraitItemRef { + fn sinto(&self, s: &S) -> TraitItem { + let s = with_owner_id(s.base(), (), (), self.id.owner_id.to_def_id()); + let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; + tcx.hir().trait_item(self.id).sinto(&s) + } +} + +#[cfg(feature = "rustc")] +impl<'a, 'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto>> for hir::Mod<'a> { + fn sinto(&self, s: &S) -> Vec> { + inline_macro_invocations(self.item_ids.iter().copied(), s) + // .iter() + // .map(|item_id| item_id.sinto(s)) + // .collect() + } +} + +/// Reflects [`hir::ForeignItemKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItemKind<'tcx>, state: S as tcx)] +#[derive(Clone, Debug, JsonSchema)] +#[derive_group(Serializers)] +pub enum ForeignItemKind { + Fn(FnSig, Vec, Generics), + Static(Ty, Mutability, Safety), + Type, +} + +/// Reflects [`hir::ForeignItem`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItem<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct ForeignItem { + pub ident: Ident, + pub kind: ForeignItemKind, + pub owner_id: DefId, + pub span: Span, + pub vis_span: Span, +} + +#[cfg(feature = "rustc")] +impl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto> for hir::ForeignItemRef { + fn sinto(&self, s: &S) -> ForeignItem { + let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; + tcx.hir().foreign_item(self.id).sinto(s) + } +} + +/// Reflects [`hir::OpaqueTy`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::OpaqueTy<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct OpaqueTy { + pub generics: Generics, + pub bounds: GenericBounds, + pub origin: OpaqueTyOrigin, +} + +/// Reflects [`hir::GenericBounds`] +type GenericBounds = Vec; + +/// Compute the bounds for the owner registed in the state `s` +#[cfg(feature = "rustc")] +fn region_bounds_at_current_owner<'tcx, S: UnderOwnerState<'tcx>>(s: &S) -> GenericBounds { + let tcx = s.base().tcx; + + // According to what kind of node we are looking at, we should + // either call `predicates_defined_on` or `item_bounds` + let use_item_bounds = { + if let Some(oid) = s.owner_id().as_local() { + let hir_id = tcx.local_def_id_to_hir_id(oid); + let node = tcx.hir_node(hir_id); + matches!( + node, + hir::Node::TraitItem(hir::TraitItem { + kind: hir::TraitItemKind::Type(..), + .. + }) | hir::Node::OpaqueTy(..), + ) + } else { + false + } + }; + + let clauses: Vec> = if use_item_bounds { + tcx.item_bounds(s.owner_id()) + .instantiate_identity() + .iter() + .collect() + } else { + predicates_defined_on(tcx, s.owner_id()) + .predicates + .iter() + .map(|(x, _span)| x) + .copied() + .collect() + }; + clauses.sinto(s) +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for hir::GenericBounds<'tcx> { + fn sinto(&self, s: &S) -> GenericBounds { + region_bounds_at_current_owner(s) + } +} + +/// Reflects [`hir::OpaqueTyOrigin`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::OpaqueTyOrigin, state: S as tcx)] +#[derive(Clone, Debug, JsonSchema)] +#[derive_group(Serializers)] +pub enum OpaqueTyOrigin { + FnReturn { + parent: GlobalIdent, + }, + AsyncFn { + parent: GlobalIdent, + }, + TyAlias { + parent: GlobalIdent, + in_assoc_ty: bool, + }, +} + +/// Reflects [`rustc_ast::tokenstream::TokenStream`] as a plain +/// string. If you need to reshape that into Rust tokens or construct, +/// please use, e.g., `syn`. +pub type TokenStream = String; + +#[cfg(feature = "rustc")] +impl<'t, S> SInto for rustc_ast::tokenstream::TokenStream { + fn sinto(&self, _: &S) -> String { + rustc_ast_pretty::pprust::tts_to_string(self) + } +} + +/// Reflects [`rustc_ast::token::Delimiter`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::token::Delimiter, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum Delimiter { + Parenthesis, + Brace, + Bracket, + Invisible, +} + +/// Reflects [`rustc_ast::ast::DelimArgs`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::ast::DelimArgs, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct DelimArgs { + pub dspan: DelimSpan, + pub delim: Delimiter, + pub tokens: TokenStream, +} + +sinto_todo!(rustc_ast::tokenstream, DelimSpan); + +/// Reflects [`ast::MacroDef`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: ast::MacroDef, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct MacroDef { + pub body: DelimArgs, + pub macro_rules: bool, +} + +/// Reflects [`hir::Item`] (and [`hir::ItemId`]) +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Item { + pub def_id: Option, + pub owner_id: DefId, + pub span: Span, + pub vis_span: Span, + pub kind: ItemKind, + pub attributes: ItemAttributes, + pub expn_backtrace: Vec, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for hir::Item<'tcx> { + fn sinto(&self, s: &S) -> Item { + let name: String = self.ident.name.to_ident_string(); + let s = &with_owner_id(s.base(), (), (), self.owner_id.to_def_id()); + let owner_id: DefId = self.owner_id.sinto(s); + let def_id = Path::from(owner_id.clone()) + .ends_with(&[name]) + .then(|| owner_id.clone()); + Item { + def_id, + owner_id, + span: self.span.sinto(s), + vis_span: self.span.sinto(s), + kind: self.kind.sinto(s), + attributes: ItemAttributes::from_owner_id(s, self.owner_id), + expn_backtrace: self.span.macro_backtrace().map(|o| o.sinto(s)).collect(), + } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto> for hir::ItemId { + fn sinto(&self, s: &S) -> Item { + let tcx: rustc_middle::ty::TyCtxt = s.base().tcx; + tcx.hir().item(*self).sinto(s) + } +} + +/// Reflects [`rustc_span::symbol::Ident`] +pub type Ident = (Symbol, Span); + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_span::symbol::Ident { + fn sinto(&self, s: &S) -> Ident { + (self.name.sinto(s), self.span.sinto(s)) + } +} + +/// Reflects [`rustc_ast::ast::AttrStyle`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(, from: rustc_ast::ast::AttrStyle, state: S as _s)] +pub enum AttrStyle { + Outer, + Inner, +} + +/// Reflects [`rustc_ast::ast::Attribute`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::Attribute, state: S as gstate)] +pub struct Attribute { + pub kind: AttrKind, + #[map(x.as_usize())] + pub id: usize, + pub style: AttrStyle, + pub span: Span, +} + +/// Reflects [`rustc_attr::InlineAttr`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_attr::InlineAttr, state: S as _s)] +pub enum InlineAttr { + None, + Hint, + Always, + Never, +} + +/// Reflects [`rustc_ast::ast::BindingMode`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::ast::BindingMode, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct BindingMode { + #[value(self.0.sinto(s))] + pub by_ref: ByRef, + #[value(self.1.sinto(s))] + pub mutability: Mutability, +} + +/// Reflects [`rustc_ast::ast::ByRef`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::ast::ByRef, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ByRef { + Yes(Mutability), + No, +} + +/// Reflects [`rustc_ast::ast::StrStyle`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::ast::StrStyle, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum StrStyle { + Cooked, + Raw(u8), +} + +/// Reflects [`rustc_ast::ast::LitKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitKind, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum LitKind { + Str(Symbol, StrStyle), + ByteStr(Vec, StrStyle), + CStr(Vec, StrStyle), + Byte(u8), + Char(char), + Int( + #[serde(with = "serialize_int::unsigned")] + #[schemars(with = "String")] + u128, + LitIntType, + ), + Float(Symbol, LitFloatType), + Bool(bool), + Err(ErrorGuaranteed), +} + +#[cfg(feature = "rustc")] +impl SInto for rustc_data_structures::packed::Pu128 { + fn sinto(&self, _s: &S) -> u128 { + self.0 + } +} + +// FIXME: typo: invo**C**ation +#[allow(rustdoc::private_intra_doc_links)] +/// Describe a macro invocation, using +/// [`macro_invocation_of_raw_mac_invocation`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct MacroInvokation { + pub macro_ident: DefId, + pub argument: String, + pub span: Span, +} + +/// Reflects [`rustc_ast::token::CommentKind`] +#[derive(AdtInto)] +#[args(, from: rustc_ast::token::CommentKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum CommentKind { + Line, + Block, +} + +/// Reflects [`rustc_ast::ast::AttrArgs`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrArgs, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AttrArgs { + Empty, + Delimited(DelimArgs), + + Eq(Span, AttrArgsEq), + // #[todo] + // Todo(String), +} + +/// Reflects [`rustc_ast::ast::AttrArgsEq`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrArgsEq, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AttrArgsEq { + Hir(MetaItemLit), + #[todo] + Ast(String), + // Ast(P), +} + +/// Reflects [`rustc_ast::ast::MetaItemLit`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::MetaItemLit, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct MetaItemLit { + pub symbol: Symbol, + pub suffix: Option, + pub kind: LitKind, + pub span: Span, +} + +/// Reflects [`rustc_ast::ast::AttrItem`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::AttrItem, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct AttrItem { + #[map(rustc_ast_pretty::pprust::path_to_string(x))] + pub path: String, + pub args: AttrArgs, + pub tokens: Option, +} + +#[cfg(feature = "rustc")] +impl SInto for rustc_ast::tokenstream::LazyAttrTokenStream { + fn sinto(&self, st: &S) -> String { + rustc_ast::tokenstream::TokenStream::new(self.to_attr_token_stream().to_token_trees()) + .sinto(st) + } +} + +/// Reflects [`rustc_ast::ast::NormalAttr`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::NormalAttr, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct NormalAttr { + pub item: AttrItem, + pub tokens: Option, +} + +/// Reflects [`rustc_ast::AttrKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::AttrKind, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AttrKind { + Normal(NormalAttr), + DocComment(CommentKind, Symbol), +} + +sinto_todo!(rustc_hir::def, DefKind); +sinto_todo!(rustc_hir, GenericArgs<'a> as HirGenericArgs); +sinto_todo!(rustc_hir, InlineAsm<'a>); +sinto_todo!(rustc_hir, MissingLifetimeKind); +sinto_todo!(rustc_hir, QPath<'tcx>); +sinto_todo!(rustc_hir, WhereRegionPredicate<'tcx>); +sinto_todo!(rustc_hir, WhereEqPredicate<'tcx>); +sinto_todo!(rustc_hir, OwnerId); diff --git a/frontend/exporter/src/types/index.rs b/frontend/exporter/src/types/index.rs deleted file mode 100644 index 2a9c2cc18..000000000 --- a/frontend/exporter/src/types/index.rs +++ /dev/null @@ -1,11 +0,0 @@ -#[cfg(feature = "rustc")] -use crate::prelude::*; -use crate::sinto_as_usize; - -sinto_as_usize!(rustc_middle::ty, DebruijnIndex); -sinto_as_usize!(rustc_middle::ty, UniverseIndex); -sinto_as_usize!(rustc_middle::ty, BoundVar); -sinto_as_usize!(rustc_middle::middle::region, FirstStatementIndex); -sinto_as_usize!(rustc_hir::hir_id, ItemLocalId); -sinto_as_usize!(rustc_target::abi, VariantIdx); -sinto_as_usize!(rustc_middle::ty, RegionVid); diff --git a/frontend/exporter/src/types/mir.rs b/frontend/exporter/src/types/mir.rs index 8e85f8f51..ecc4d9d72 100644 --- a/frontend/exporter/src/types/mir.rs +++ b/frontend/exporter/src/types/mir.rs @@ -1,4 +1,9 @@ +//! Copies of the relevant `MIR` types. MIR represents a rust (function) body as a CFG. It's a +//! semantically rich representation that contains no high-level control-flow operations like loops +//! or patterns; instead the control flow is entirely described by gotos and switches on integer +//! values. use crate::prelude::*; +use crate::sinto_as_usize; #[cfg(feature = "rustc")] use tracing::trace; @@ -310,7 +315,7 @@ pub(crate) fn get_function_from_def_id_and_generics<'tcx, S: BaseState<'tcx> + H // fn foo(...) // ^^^ // ``` - let mut trait_refs = solve_item_traits(s, def_id, generics, None); + let mut trait_refs = solve_item_required_traits(s, def_id, generics); // Check if this is a trait method call: retrieve the trait source if // it is the case (i.e., where does the method come from? Does it refer @@ -378,10 +383,9 @@ pub(crate) fn get_function_from_def_id_and_generics<'tcx, S: BaseState<'tcx> + H // } // ``` // The generics for `insert` are `` for the impl and `` for the method. - let params_info = get_params_info(s, container_def_id); - let num_container_generics = params_info.num_generic_params; match assoc.container { rustc_middle::ty::AssocItemContainer::TraitContainer => { + let num_container_generics = tcx.generics_of(container_def_id).own_params.len(); // Retrieve the trait information let impl_expr = self_clause_for_item(s, &assoc, generics).unwrap(); // Return only the method generics; the trait generics are included in `impl_expr`. @@ -393,7 +397,7 @@ pub(crate) fn get_function_from_def_id_and_generics<'tcx, S: BaseState<'tcx> + H let container_generics = tcx.generics_of(container_def_id); let container_generics = generics.truncate_to(tcx, container_generics); let container_trait_refs = - solve_item_traits(s, container_def_id, container_generics, None); + solve_item_required_traits(s, container_def_id, container_generics); trait_refs.extend(container_trait_refs); (generics.sinto(s), Option::None) } @@ -465,7 +469,7 @@ fn get_function_from_operand<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>( trace!("type: {:?}", ty); trace!("type kind: {:?}", ty.kind()); let sig = match ty.kind() { - rustc_middle::ty::TyKind::FnPtr(sig) => sig, + rustc_middle::ty::TyKind::FnPtr(sig, ..) => sig, _ => unreachable!(), }; trace!("FnPtr: {:?}", sig); @@ -540,8 +544,8 @@ fn translate_switch_targets<'tcx, S: UnderOwnerState<'tcx>>( let targets_vec: Vec<(u128, BasicBlock)> = targets.iter().map(|(v, b)| (v, b.sinto(s))).collect(); - match switch_ty { - Ty::Bool => { + match switch_ty.kind() { + TyKind::Bool => { // This is an: `if ... then ... else ...` assert!(targets_vec.len() == 1); // It seems the block targets are inverted @@ -554,10 +558,10 @@ fn translate_switch_targets<'tcx, S: UnderOwnerState<'tcx>>( SwitchTargets::If(if_block, otherwise_block) } - Ty::Int(_) | Ty::Uint(_) => { - let int_ty = match switch_ty { - Ty::Int(ty) => IntUintTy::Int(*ty), - Ty::Uint(ty) => IntUintTy::Uint(*ty), + TyKind::Int(_) | TyKind::Uint(_) => { + let int_ty = match switch_ty.kind() { + TyKind::Int(ty) => IntUintTy::Int(*ty), + TyKind::Uint(ty) => IntUintTy::Uint(*ty), _ => unreachable!(), }; @@ -946,7 +950,7 @@ pub enum AggregateKind { Tuple, #[custom_arm(rustc_middle::mir::AggregateKind::Adt(def_id, vid, generics, annot, fid) => { let adt_kind = s.base().tcx.adt_def(def_id).adt_kind().sinto(s); - let trait_refs = solve_item_traits(s, *def_id, generics, None); + let trait_refs = solve_item_required_traits(s, *def_id, generics); AggregateKind::Adt( def_id.sinto(s), vid.sinto(s), @@ -974,15 +978,14 @@ pub enum AggregateKind { let closure = generics.as_closure(); let sig = closure.sig().sinto(s); - // Solve the trait obligations. Note that we solve the parent + // Solve the predicates from the parent (i.e., the function which calls the closure). let tcx = s.base().tcx; let parent_generics = closure.parent_args(); let generics = tcx.mk_args(parent_generics); - // Retrieve the predicates from the parent (i.e., the function which calls - // the closure). - let predicates = tcx.predicates_defined_on(tcx.generics_of(rust_id).parent.unwrap()); + // TODO: does this handle nested closures? + let parent = tcx.generics_of(rust_id).parent.unwrap(); + let trait_refs = solve_item_required_traits(s, parent, generics); - let trait_refs = solve_item_traits(s, *rust_id, generics, Some(predicates)); AggregateKind::Closure(def_id, parent_generics.sinto(s), trait_refs, sig) })] Closure(DefId, Vec, Vec, PolyFnSig), @@ -993,12 +996,11 @@ pub enum AggregateKind { #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] -#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::CastKind, state: S as s)] +#[args(<'tcx, S>, from: rustc_middle::mir::CastKind, state: S as _s)] pub enum CastKind { PointerExposeProvenance, PointerWithExposedProvenance, - PointerCoercion(PointerCoercion), - DynStar, + PointerCoercion(PointerCoercion, CoercionSource), IntToInt, FloatToInt, FloatToFloat, @@ -1008,6 +1010,14 @@ pub enum CastKind { Transmute, } +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S>, from: rustc_middle::mir::CoercionSource, state: S as _s)] +pub enum CoercionSource { + AsCast, + Implicit, +} + #[derive_group(Serializers)] #[derive(AdtInto, Clone, Debug, JsonSchema)] #[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::NullOp<'tcx>, state: S as s)] @@ -1032,7 +1042,7 @@ pub enum Rvalue { Repeat(Operand, ConstantExpr), Ref(Region, BorrowKind, Place), ThreadLocalRef(DefId), - AddressOf(Mutability, Place), + RawPtr(Mutability, Place), Len(Place), Cast(CastKind, Operand, Ty), BinaryOp(BinOp, (Operand, Operand)), @@ -1062,11 +1072,168 @@ make_idx_wrapper!(rustc_middle::mir, Local); make_idx_wrapper!(rustc_middle::ty, UserTypeAnnotationIndex); make_idx_wrapper!(rustc_target::abi, FieldIdx); +/// Reflects [`rustc_middle::mir::UnOp`] +#[derive_group(Serializers)] +#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] +#[args(<'slt, S: UnderOwnerState<'slt>>, from: rustc_middle::mir::UnOp, state: S as _s)] +pub enum UnOp { + Not, + Neg, + PtrMetadata, +} + +/// Reflects [`rustc_middle::mir::BinOp`] +#[derive_group(Serializers)] +#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] +#[args(<'slt, S: UnderOwnerState<'slt>>, from: rustc_middle::mir::BinOp, state: S as _s)] +pub enum BinOp { + // We merge the checked and unchecked variants because in either case overflow is failure. + #[custom_arm( + rustc_middle::mir::BinOp::Add | rustc_middle::mir::BinOp::AddUnchecked => BinOp::Add, + )] + Add, + #[custom_arm( + rustc_middle::mir::BinOp::Sub | rustc_middle::mir::BinOp::SubUnchecked => BinOp::Sub, + )] + Sub, + #[custom_arm( + rustc_middle::mir::BinOp::Mul | rustc_middle::mir::BinOp::MulUnchecked => BinOp::Mul, + )] + Mul, + AddWithOverflow, + SubWithOverflow, + MulWithOverflow, + Div, + Rem, + BitXor, + BitAnd, + BitOr, + #[custom_arm( + rustc_middle::mir::BinOp::Shl | rustc_middle::mir::BinOp::ShlUnchecked => BinOp::Shl, + )] + Shl, + #[custom_arm( + rustc_middle::mir::BinOp::Shr | rustc_middle::mir::BinOp::ShrUnchecked => BinOp::Shr, + )] + Shr, + Eq, + Lt, + Le, + Ne, + Ge, + Gt, + Cmp, + Offset, +} + +/// Reflects [`rustc_middle::mir::ScopeData`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::ScopeData, state: S as gstate)] +pub enum ScopeData { + Node, + CallSite, + Arguments, + Destruction, + IfThen, + IfThenRescope, + Remainder(FirstStatementIndex), +} + +sinto_as_usize!(rustc_middle::middle::region, FirstStatementIndex); + +/// Reflects [`rustc_middle::mir::BinOp`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::Scope, state: S as gstate)] +pub struct Scope { + pub id: ItemLocalId, + pub data: ScopeData, +} + +sinto_as_usize!(rustc_hir::hir_id, ItemLocalId); + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::mir::Const<'tcx> { + fn sinto(&self, s: &S) -> ConstantExpr { + use rustc_middle::mir::Const; + let tcx = s.base().tcx; + match self { + Const::Val(const_value, ty) => { + const_value_to_constant_expr(s, *ty, *const_value, rustc_span::DUMMY_SP) + } + Const::Ty(_ty, c) => c.sinto(s), + Const::Unevaluated(ucv, _ty) => { + use crate::rustc_middle::query::Key; + let span = tcx + .def_ident_span(ucv.def) + .unwrap_or_else(|| ucv.def.default_span(tcx)); + if ucv.promoted.is_some() { + self.eval_constant(s) + .unwrap_or_else(|| { + supposely_unreachable_fatal!(s, "UnevalPromotedConstant"; {self, ucv}); + }) + .sinto(s) + } else { + match self.translate_uneval(s, ucv.shrink(), span) { + TranslateUnevalRes::EvaluatedConstant(c) => c.sinto(s), + TranslateUnevalRes::GlobalName(c) => c, + } + } + } + } + } +} + +#[cfg(feature = "rustc")] +impl SInto for rustc_middle::mir::interpret::AllocId { + fn sinto(&self, _: &S) -> u64 { + self.0.get() + } +} + +/// Reflects [`rustc_middle::mir::BorrowKind`] +#[derive(AdtInto)] +#[args(, from: rustc_middle::mir::BorrowKind, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum BorrowKind { + Shared, + Fake(FakeBorrowKind), + Mut { kind: MutBorrowKind }, +} + +/// Reflects [`rustc_middle::mir::MutBorrowKind`] +#[derive(AdtInto)] +#[args(, from: rustc_middle::mir::MutBorrowKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum MutBorrowKind { + Default, + TwoPhaseBorrow, + ClosureCapture, +} + +/// Reflects [`rustc_middle::mir::FakeBorrowKind`] +#[derive(AdtInto)] +#[args(, from: rustc_middle::mir::FakeBorrowKind, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum FakeBorrowKind { + /// A shared (deep) borrow. Data must be immutable and is aliasable. + Deep, + /// The immediately borrowed place must be immutable, but projections from + /// it don't need to be. This is used to prevent match guards from replacing + /// the scrutinee. For example, a fake borrow of `a.b` doesn't + /// conflict with a mutable borrow of `a.b.c`. + Shallow, +} + +sinto_todo!(rustc_ast::ast, InlineAsmTemplatePiece); +sinto_todo!(rustc_ast::ast, InlineAsmOptions); sinto_todo!(rustc_middle::ty, InstanceKind<'tcx>); sinto_todo!(rustc_middle::mir, UserTypeProjections); sinto_todo!(rustc_middle::mir, LocalInfo<'tcx>); -sinto_todo!(rustc_ast::ast, InlineAsmTemplatePiece); -sinto_todo!(rustc_ast::ast, InlineAsmOptions); sinto_todo!(rustc_middle::mir, InlineAsmOperand<'tcx>); sinto_todo!(rustc_middle::mir, AssertMessage<'tcx>); sinto_todo!(rustc_middle::mir, UnwindAction); @@ -1077,5 +1244,6 @@ sinto_todo!(rustc_middle::mir, MirSource<'tcx>); sinto_todo!(rustc_middle::mir, CoroutineInfo<'tcx>); sinto_todo!(rustc_middle::mir, VarDebugInfo<'tcx>); sinto_todo!(rustc_middle::mir, CallSource); +sinto_todo!(rustc_middle::mir, UnwindTerminateReason); sinto_todo!(rustc_middle::mir::coverage, CoverageKind); -sinto_todo!(rustc_span, ErrorGuaranteed); +sinto_todo!(rustc_middle::mir::interpret, ConstAllocation<'a>); diff --git a/frontend/exporter/src/types/mir_traits.rs b/frontend/exporter/src/types/mir_traits.rs deleted file mode 100644 index fa2fa5f1e..000000000 --- a/frontend/exporter/src/types/mir_traits.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::prelude::*; - -/// We use this to store information about the parameters in parent blocks. -/// This is necessary because when querying the generics of a definition, -/// rustc gives us *all* the generics used in this definition, including -/// those coming from the outer impl block. -/// -/// For instance: -/// ```text -/// impl Foo { -/// ^^^ -/// outer block generics -/// fn bar(...) { ... } -/// ^^^ -/// generics local to the function bar -/// } -/// ``` -/// -/// `TyCtxt.generics_of(bar)` gives us: `[T, U]`. -/// -/// We however sometimes need to make a distinction between those two kinds -/// of generics, in particular when manipulating trait instances. For instance: -/// -/// ```text -/// impl Foo for Bar { -/// fn baz(...) { ... } -/// } -/// -/// fn test(...) { -/// // Here: -/// x.baz(...); -/// // We should refer to this call as: -/// // > Foo::baz(...) -/// // -/// // If baz hadn't been a method implementation of a trait, -/// // we would have refered to it as: -/// // > baz(...) -/// // -/// // The reason is that with traits, we refer to the whole -/// // trait implementation (as if it were a structure), then -/// // pick a specific method inside (as if projecting a field -/// // from a structure). -/// } -/// ``` -/// -/// **Remark**: Rust only allows refering to the generics of the **immediately** -/// outer block. For this reason, when we need to store the information about -/// the generics of the outer block(s), we need to do it only for one level -/// (this definitely makes things simpler). -/// **Additional remark**: it is not possible to directly write an impl block -/// or a trait definition inside an impl/trait block. However it is possible -/// to define an impl/trait inside a function, which can itself be inside a -/// block, leading to nested impl blocks. -#[derive_group(Serializers)] -#[derive(Clone, Debug, JsonSchema)] -pub struct ParamsInfo { - /// The total number of generic parameters (regions + types + consts). - /// We do not consider the trait clauses as generic parameters. - pub num_generic_params: usize, - pub num_region_params: usize, - pub num_type_params: usize, - pub num_const_generic_params: usize, - pub num_trait_clauses: usize, - pub num_regions_outlive: usize, - pub num_types_outlive: usize, - pub num_trait_type_constraints: usize, -} - -/// Compute the parameters information for a definition. See [ParamsInfo]. -pub fn get_params_info<'tcx, S: BaseState<'tcx> + HasOwnerId>( - s: &S, - def_id: rustc_hir::def_id::DefId, -) -> ParamsInfo { - let tcx = s.base().tcx; - - // Compute the number of generics - let mut num_region_params = 0; - let mut num_type_params = 0; - let mut num_const_generic_params = 0; - let mut num_regions_outlive = 0; - let mut num_types_outlive = 0; - let mut num_trait_type_constraints = 0; - - let generics = tcx.generics_of(def_id); - let num_generic_params = generics.own_params.len(); - use rustc_middle::ty::GenericParamDefKind; - for param in &generics.own_params { - match param.kind { - GenericParamDefKind::Lifetime => num_region_params += 1, - GenericParamDefKind::Type { .. } => num_type_params += 1, - GenericParamDefKind::Const { .. } => num_const_generic_params += 1, - } - } - - // Compute the number of trait clauses - let mut num_trait_clauses = 0; - // **IMPORTANT**: we do NOT want to [TyCtxt::predicates_of]. - // If we use [TyCtxt::predicates_of] on a trait `Foo`, we get an - // additional predicate `Self : Foo` (i.e., the trait requires itself), - // which is not what we want. - let preds = tcx.predicates_defined_on(def_id); - for (pred, _) in preds.predicates { - use rustc_middle::ty::ClauseKind; - match &pred.kind().skip_binder() { - ClauseKind::Trait(_) => num_trait_clauses += 1, - ClauseKind::RegionOutlives(_) => num_regions_outlive += 1, - ClauseKind::TypeOutlives(_) => num_types_outlive += 1, - ClauseKind::Projection(_) => num_trait_type_constraints += 1, - _ => (), - } - } - - ParamsInfo { - num_generic_params, - num_region_params, - num_type_params, - num_const_generic_params, - num_trait_clauses, - num_regions_outlive, - num_types_outlive, - num_trait_type_constraints, - } -} diff --git a/frontend/exporter/src/types/mod.rs b/frontend/exporter/src/types/mod.rs index 9a1acfa6a..e70f2ce3c 100644 --- a/frontend/exporter/src/types/mod.rs +++ b/frontend/exporter/src/types/mod.rs @@ -1,23 +1,19 @@ // There's a conflict between `mir::ScalarInt`and `todo::ScalarInt` but it doesn't matter. #![allow(ambiguous_glob_reexports)] -mod copied; mod def_id; -mod index; +mod hir; mod mir; -#[cfg(feature = "rustc")] -mod mir_traits; mod new; -mod replaced; pub(crate) mod serialize_int; -mod todo; +mod span; +mod thir; +mod ty; -pub use copied::*; pub use def_id::*; -pub use index::*; +pub use hir::*; pub use mir::*; -#[cfg(feature = "rustc")] -pub use mir_traits::*; pub use new::*; -pub use replaced::*; -pub use todo::*; +pub use span::*; +pub use thir::*; +pub use ty::*; diff --git a/frontend/exporter/src/types/new/full_def.rs b/frontend/exporter/src/types/new/full_def.rs index 784de0a67..eabdde186 100644 --- a/frontend/exporter/src/types/new/full_def.rs +++ b/frontend/exporter/src/types/new/full_def.rs @@ -1,50 +1,98 @@ use crate::prelude::*; +use std::sync::Arc; +#[cfg(feature = "rustc")] +use rustc_hir::def::DefKind as RDefKind; #[cfg(feature = "rustc")] use rustc_middle::ty; #[cfg(feature = "rustc")] use rustc_span::def_id::DefId as RDefId; /// Gathers a lot of definition information about a [`rustc_hir::def_id::DefId`]. -#[derive(AdtInto)] -#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::def_id::DefId, state: S as s)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] -pub struct FullDef { - #[value(self.sinto(s))] +pub struct FullDef { pub def_id: DefId, - #[value(s.base().tcx.opt_parent(*self).sinto(s))] + /// The enclosing item. pub parent: Option, - #[value(s.base().tcx.def_span(*self).sinto(s))] + /// The span of the definition of this item (e.g. for a function this is is signature). pub span: Span, - #[value(s.base().tcx.get_attrs_unchecked(*self).sinto(s))] + /// The span of the whole definition (including e.g. the function body). + pub source_span: Option, + /// The text of the whole definition. + pub source_text: Option, /// Attributes on this definition, if applicable. pub attributes: Vec, - #[value(get_def_visibility(s, *self))] /// Visibility of the definition, for definitions where this makes sense. pub visibility: Option, - #[value(s.base().tcx.as_lang_item(*self).map(|litem| litem.name()).sinto(s))] /// If this definition is a lang item, we store the identifier, e.g. `sized`. pub lang_item: Option, - #[value(s.base().tcx.get_diagnostic_name(*self).sinto(s))] /// If this definition is a diagnostic item, we store the identifier, e.g. `box_new`. pub diagnostic_item: Option, - #[value({ - let state_with_id = State { thir: (), mir: (), owner_id: *self, binder: (), base: s.base() }; - s.base().tcx.def_kind(*self).sinto(&state_with_id) - })] - pub kind: FullDefKind, + pub kind: FullDefKind, +} + +#[cfg(feature = "rustc")] +fn translate_full_def<'tcx, S, Body>(s: &S, def_id: RDefId) -> FullDef +where + S: BaseState<'tcx>, + Body: IsBody + TypeMappable, +{ + let tcx = s.base().tcx; + let def_kind = get_def_kind(tcx, def_id); + let kind = { + let state_with_id = with_owner_id(s.base(), (), (), def_id); + def_kind.sinto(&state_with_id) + }; + + let source_span = def_id.as_local().map(|ldid| tcx.source_span(ldid)); + let source_text = source_span + .filter(|source_span| source_span.ctxt().is_root()) + .and_then(|source_span| tcx.sess.source_map().span_to_snippet(source_span).ok()); + + FullDef { + def_id: def_id.sinto(s), + parent: tcx.opt_parent(def_id).sinto(s), + span: get_def_span(tcx, def_id, def_kind).sinto(s), + source_span: source_span.sinto(s), + source_text, + attributes: get_def_attrs(tcx, def_id, def_kind).sinto(s), + visibility: get_def_visibility(tcx, def_id, def_kind), + lang_item: s + .base() + .tcx + .as_lang_item(def_id) + .map(|litem| litem.name()) + .sinto(s), + diagnostic_item: tcx.get_diagnostic_name(def_id).sinto(s), + kind, + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S, Body> SInto>> for RDefId +where + Body: IsBody + TypeMappable, + S: BaseState<'tcx>, +{ + fn sinto(&self, s: &S) -> Arc> { + if let Some(def) = s.with_item_cache(*self, |cache| cache.full_def.get().cloned()) { + return def; + } + let def = Arc::new(translate_full_def(s, *self)); + s.with_item_cache(*self, |cache| cache.full_def.insert(def.clone())); + def + } } /// Imbues [`rustc_hir::def::DefKind`] with a lot of extra information. /// Important: the `owner_id()` must be the id of this definition. #[derive(AdtInto)] -#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::def::DefKind, state: S as s)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_hir::def::DefKind, state: S as s, where Body: IsBody + TypeMappable)] #[derive_group(Serializers)] #[derive(Clone, Debug, JsonSchema)] -pub enum FullDefKind { - // Type namespace - Mod, +pub enum FullDefKind { + // Types /// Refers to the struct definition, [`DefKind::Ctor`] refers to its constructor if it exists. Struct { #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] @@ -70,25 +118,6 @@ pub enum FullDefKind { #[value(s.base().tcx.adt_def(s.owner_id()).sinto(s))] def: AdtDef, }, - /// Refers to the variant definition, [`DefKind::Ctor`] refers to its constructor if it exists. - Variant, - Trait { - #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] - generics: TyGenerics, - #[value(get_generic_predicates(s, s.owner_id()))] - predicates: GenericPredicates, - // `predicates_of` has the special `Self: Trait` clause as its last element. - #[value({ - use ty::Upcast; - let tcx = s.base().tcx; - let pred: ty::TraitPredicate = ty::TraitRef::identity(tcx, s.owner_id()).upcast(tcx); - pred.sinto(s) - })] - self_predicate: TraitPredicate, - /// Associated items, in definition order. - #[value(s.base().tcx.associated_items(s.owner_id()).in_definition_order().collect::>().sinto(s))] - items: Vec, - }, /// Type alias: `type Foo = Bar;` TyAlias { #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] @@ -107,8 +136,6 @@ pub enum FullDefKind { }, /// Type from an `extern` block. ForeignTy, - /// Trait alias: `trait IntIterator = Iterator;` - TraitAlias, /// Associated type: `trait MyTrait { type Assoc; }` AssocTy { #[value(s.base().tcx.parent(s.owner_id()).sinto(s))] @@ -116,6 +143,7 @@ pub enum FullDefKind { #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] generics: TyGenerics, #[value(get_item_predicates(s, s.owner_id()))] + // FIXME: clarify implied vs required predicates predicates: GenericPredicates, #[value(s.base().tcx.associated_item(s.owner_id()).sinto(s))] associated_item: AssocItem, @@ -129,10 +157,72 @@ pub enum FullDefKind { })] value: Option, }, - /// Type parameter: the `T` in `struct Vec { ... }` - TyParam, + /// Opaque type, aka `impl Trait`. + OpaqueTy, + + // Traits + Trait { + #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] + generics: TyGenerics, + #[value(get_generic_predicates(s, s.owner_id()))] + predicates: GenericPredicates, + /// The special `Self: Trait` clause. + #[value({ + use ty::Upcast; + let tcx = s.base().tcx; + let pred: ty::TraitPredicate = + crate::traits::self_predicate(tcx, s.owner_id()) + .unwrap() + .no_bound_vars() + .unwrap() + .upcast(tcx); + pred.sinto(s) + })] + self_predicate: TraitPredicate, + /// Associated items, in definition order. + #[value( + s + .base() + .tcx + .associated_items(s.owner_id()) + .in_definition_order() + .map(|assoc| (assoc, assoc.def_id)) + .collect::>() + .sinto(s) + )] + items: Vec<(AssocItem, Arc>)>, + }, + /// Trait alias: `trait IntIterator = Iterator;` + TraitAlias, + #[custom_arm( + // Returns `TraitImpl` or `InherentImpl`. + RDefKind::Impl { .. } => get_impl_contents(s), + )] + TraitImpl { + generics: TyGenerics, + predicates: GenericPredicates, + /// The trait that is implemented by this impl block. + trait_pred: TraitPredicate, + /// The `ImplExpr`s required to satisfy the predicates on the trait declaration. E.g.: + /// ```ignore + /// trait Foo: Bar {} + /// impl Foo for () {} // would supply an `ImplExpr` for `Self: Bar`. + /// ``` + required_impl_exprs: Vec, + /// Associated items, in the order of the trait declaration. Includes defaulted items. + items: Vec>, + }, + #[disable_mapping] + InherentImpl { + generics: TyGenerics, + predicates: GenericPredicates, + /// The type to which this block applies. + ty: Ty, + /// Associated items, in definition order. + items: Vec<(AssocItem, Arc>)>, + }, - // Value namespace + // Functions Fn { #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] generics: TyGenerics, @@ -140,8 +230,12 @@ pub enum FullDefKind { predicates: GenericPredicates, #[value(s.base().tcx.codegen_fn_attrs(s.owner_id()).inline.sinto(s))] inline: InlineAttr, + #[value(s.base().tcx.constness(s.owner_id()) == rustc_hir::Constness::Const)] + is_const: bool, #[value(s.base().tcx.fn_sig(s.owner_id()).instantiate_identity().sinto(s))] sig: PolyFnSig, + #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] + body: Option, }, /// Associated function: `impl MyStruct { fn associated() {} }` or `trait Foo { fn associated() /// {} }` @@ -156,8 +250,12 @@ pub enum FullDefKind { predicates: GenericPredicates, #[value(s.base().tcx.codegen_fn_attrs(s.owner_id()).inline.sinto(s))] inline: InlineAttr, + #[value(s.base().tcx.constness(s.owner_id()) == rustc_hir::Constness::Const)] + is_const: bool, #[value(s.base().tcx.fn_sig(s.owner_id()).instantiate_identity().sinto(s))] sig: PolyFnSig, + #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] + body: Option, }, /// A closure, coroutine, or coroutine-closure. /// @@ -169,6 +267,8 @@ pub enum FullDefKind { /// constant. #[value(s.base().tcx.parent(s.owner_id()).sinto(s))] parent: DefId, + #[value(s.base().tcx.constness(s.owner_id()) == rustc_hir::Constness::Const)] + is_const: bool, #[value({ let fun_type = s.base().tcx.type_of(s.owner_id()).instantiate_identity(); match fun_type.kind() { @@ -178,6 +278,8 @@ pub enum FullDefKind { })] args: ClosureArgs, }, + + // Constants Const { #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] generics: TyGenerics, @@ -185,6 +287,8 @@ pub enum FullDefKind { predicates: GenericPredicates, #[value(s.base().tcx.type_of(s.owner_id()).instantiate_identity().sinto(s))] ty: Ty, + #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] + body: Option, }, /// Associated constant: `trait MyTrait { const ASSOC: usize; }` AssocConst { @@ -198,7 +302,13 @@ pub enum FullDefKind { predicates: GenericPredicates, #[value(s.base().tcx.type_of(s.owner_id()).instantiate_identity().sinto(s))] ty: Ty, + #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] + body: Option, }, + /// Anonymous constant, e.g. the `1 + 2` in `[u8; 1 + 2]` + AnonConst, + /// An inline constant, e.g. `const { 1 + 2 }` + InlineConst, Static { /// Whether it's a `unsafe static`, `safe static` (inside extern only) or just a `static`. safety: Safety, @@ -212,49 +322,107 @@ pub enum FullDefKind { predicates: GenericPredicates, #[value(s.base().tcx.type_of(s.owner_id()).instantiate_identity().sinto(s))] ty: Ty, + #[value(s.owner_id().as_local().map(|ldid| Body::body(ldid, s)))] + body: Option, }, - /// Constant generic parameter: `struct Foo { ... }` - ConstParam, - /// Refers to the struct or enum variant's constructor. - Ctor(CtorOf, CtorKind), - // Macro namespace - Macro(MacroKind), - - // Not namespaced (or they are, but we don't treat them so) + // Crates and modules ExternCrate, Use, + Mod { + #[value(get_mod_children(s.base().tcx, s.owner_id()).sinto(s))] + items: Vec, + }, /// An `extern` block. - ForeignMod, - /// Anonymous constant, e.g. the `1 + 2` in `[u8; 1 + 2]` - AnonConst, - /// An inline constant, e.g. `const { 1 + 2 }` - InlineConst, - /// Opaque type, aka `impl Trait`. - OpaqueTy, - Impl { - #[value(s.base().tcx.generics_of(s.owner_id()).sinto(s))] - generics: TyGenerics, - #[value(get_generic_predicates(s, s.owner_id()))] - predicates: GenericPredicates, - #[value(s.base().tcx.impl_subject(s.owner_id()).instantiate_identity().sinto(s))] - impl_subject: ImplSubject, - /// Associated items, in definition order. - #[value(s.base().tcx.associated_items(s.owner_id()).in_definition_order().collect::>().sinto(s))] - items: Vec, + ForeignMod { + #[value(get_foreign_mod_children(s.base().tcx, s.owner_id()).sinto(s))] + items: Vec, }, + + // Type-level parameters + /// Type parameter: the `T` in `struct Vec { ... }` + TyParam, + /// Constant generic parameter: `struct Foo { ... }` + ConstParam, + /// Lifetime parameter: the `'a` in `struct Foo<'a> { ... }` + LifetimeParam, + + // ADT parts + /// Refers to the variant definition, [`DefKind::Ctor`] refers to its constructor if it exists. + Variant, + /// Refers to the struct or enum variant's constructor. + Ctor(CtorOf, CtorKind), /// A field in a struct, enum or union. e.g. /// - `bar` in `struct Foo { bar: u8 }` /// - `Foo::Bar::0` in `enum Foo { Bar(u8) }` Field, - /// Lifetime parameter: the `'a` in `struct Foo<'a> { ... }` - LifetimeParam, + + // Others + /// Macros + Macro(MacroKind), /// A use of `global_asm!`. GlobalAsm, + /// A synthetic coroutine body created by the lowering of a coroutine-closure, such as an async + /// closure. + SyntheticCoroutineBody, +} + +/// An associated item in a trait impl. This can be an item provided by the trait impl, or an item +/// that reuses the trait decl default value. +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct ImplAssocItem { + pub name: Symbol, + /// The definition of the item from the trait declaration. This is `AssocTy`, `AssocFn` or + /// `AssocConst`. + pub decl_def: Arc>, + /// The `ImplExpr`s required to satisfy the predicates on the associated type. E.g.: + /// ```ignore + /// trait Foo { + /// type Type: Clone, + /// } + /// impl Foo for () { + /// type Type: Arc; // would supply an `ImplExpr` for `Arc: Clone`. + /// } + /// ``` + /// Empty if this item is an associated const or fn. + pub required_impl_exprs: Vec, + /// The value of the implemented item. + pub value: ImplAssocItemValue, } -impl FullDef { - pub fn kind(&self) -> &FullDefKind { +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ImplAssocItemValue { + /// The item is provided by the trait impl. + Provided { + /// The definition of the item in the trait impl. This is `AssocTy`, `AssocFn` or + /// `AssocConst`. + def: Arc>, + /// Whether the trait had a default value for this item (which is therefore overriden). + is_override: bool, + }, + /// This is an associated type that reuses the trait declaration default. + DefaultedTy { + /// The default type, with generics properly instantiated. Note that this can be a GAT; + /// relevant generics and predicates can be found in `decl_def`. + ty: Ty, + }, + /// This is a non-overriden default method. + /// FIXME: provide properly instantiated generics. + DefaultedFn {}, + /// This is an associated const that reuses the trait declaration default. The default const + /// value can be found in `decl_def`. + DefaultedConst, +} + +impl FullDef { + #[cfg(feature = "rustc")] + pub fn rust_def_id(&self) -> RDefId { + (&self.def_id).into() + } + + pub fn kind(&self) -> &FullDefKind { &self.kind } @@ -317,7 +485,12 @@ impl FullDef { predicates, .. } - | Impl { + | TraitImpl { + generics, + predicates, + .. + } + | InherentImpl { generics, predicates, .. @@ -327,13 +500,63 @@ impl FullDef { } } +impl ImplAssocItem { + /// The relevant definition: the provided implementation if any, otherwise the default + /// declaration from the trait declaration. + pub fn def(&self) -> &FullDef { + match &self.value { + ImplAssocItemValue::Provided { def, .. } => def.as_ref(), + _ => self.decl_def.as_ref(), + } + } + + /// The kind of item this is. + pub fn assoc_kind(&self) -> AssocKind { + match self.def().kind() { + FullDefKind::AssocTy { .. } => AssocKind::Type, + FullDefKind::AssocFn { .. } => AssocKind::Fn, + FullDefKind::AssocConst { .. } => AssocKind::Const, + _ => unreachable!(), + } + } +} + +/// Gets the kind of the definition. +#[cfg(feature = "rustc")] +pub fn get_def_kind<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> RDefKind { + if def_id == rustc_span::def_id::CRATE_DEF_ID.to_def_id() { + // Horrible hack: without this, `def_kind` crashes on the crate root. Presumably some table + // isn't properly initialized otherwise. + let _ = tcx.def_span(def_id); + }; + tcx.def_kind(def_id) +} + +/// Gets the attributes of the definition. +#[cfg(feature = "rustc")] +pub fn get_def_span<'tcx>( + tcx: ty::TyCtxt<'tcx>, + def_id: RDefId, + def_kind: RDefKind, +) -> rustc_span::Span { + use RDefKind::*; + match def_kind { + // These kinds cause `def_span` to panic. + ForeignMod => rustc_span::DUMMY_SP, + _ => tcx.def_span(def_id), + } +} + /// Gets the visibility (`pub` or not) of the definition. Returns `None` for defs that don't have a /// meaningful visibility. #[cfg(feature = "rustc")] -fn get_def_visibility<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) -> Option { - use rustc_hir::def::DefKind::*; - let tcx = s.base().tcx; - match tcx.def_kind(def_id) { +fn get_def_visibility<'tcx>( + tcx: ty::TyCtxt<'tcx>, + def_id: RDefId, + def_kind: RDefKind, +) -> Option { + use RDefKind::*; + match def_kind { AssocConst | AssocFn | Const @@ -364,10 +587,181 @@ fn get_def_visibility<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) -> Option | InlineConst | LifetimeParam | OpaqueTy + | SyntheticCoroutineBody | TyParam => None, } } +/// Gets the attributes of the definition. +#[cfg(feature = "rustc")] +fn get_def_attrs<'tcx>( + tcx: ty::TyCtxt<'tcx>, + def_id: RDefId, + def_kind: RDefKind, +) -> &'tcx [rustc_ast::ast::Attribute] { + use RDefKind::*; + match def_kind { + // These kinds cause `get_attrs_unchecked` to panic. + ConstParam | LifetimeParam | TyParam | ForeignMod => &[], + _ => tcx.get_attrs_unchecked(def_id), + } +} + +/// Gets the children of a module. +#[cfg(feature = "rustc")] +fn get_mod_children<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> Vec { + match def_id.as_local() { + Some(ldid) => match tcx.hir_node_by_def_id(ldid) { + rustc_hir::Node::Crate(m) + | rustc_hir::Node::Item(&rustc_hir::Item { + kind: rustc_hir::ItemKind::Mod(m), + .. + }) => m + .item_ids + .iter() + .map(|item_id| item_id.owner_id.to_def_id()) + .collect(), + node => panic!("DefKind::Module is an unexpected node: {node:?}"), + }, + None => tcx + .module_children(def_id) + .iter() + .map(|child| child.res.def_id()) + .collect(), + } +} + +/// Gets the children of an `extern` block. Empty if the block is not defined in the current crate. +#[cfg(feature = "rustc")] +fn get_foreign_mod_children<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> Vec { + match def_id.as_local() { + Some(ldid) => tcx + .hir_node_by_def_id(ldid) + .expect_item() + .expect_foreign_mod() + .1 + .iter() + .map(|foreign_item_ref| foreign_item_ref.id.owner_id.to_def_id()) + .collect(), + None => vec![], + } +} + +#[cfg(feature = "rustc")] +fn get_impl_contents<'tcx, S, Body>(s: &S) -> FullDefKind +where + S: UnderOwnerState<'tcx>, + Body: IsBody + TypeMappable, +{ + use std::collections::HashMap; + let tcx = s.base().tcx; + let impl_def_id = s.owner_id(); + let generics = tcx.generics_of(impl_def_id).sinto(s); + let predicates = get_generic_predicates(s, impl_def_id); + match tcx.impl_subject(impl_def_id).instantiate_identity() { + ty::ImplSubject::Inherent(ty) => { + let items = tcx + .associated_items(impl_def_id) + .in_definition_order() + .map(|assoc| (assoc, assoc.def_id)) + .collect::>() + .sinto(s); + FullDefKind::InherentImpl { + generics, + predicates, + ty: ty.sinto(s), + items, + } + } + ty::ImplSubject::Trait(trait_ref) => { + // Also record the polarity. + let polarity = tcx.impl_polarity(impl_def_id); + let trait_pred = TraitPredicate { + trait_ref: trait_ref.sinto(s), + is_positive: matches!(polarity, ty::ImplPolarity::Positive), + }; + // Impl exprs required by the trait. + let required_impl_exprs = + solve_item_implied_traits(s, trait_ref.def_id, trait_ref.args); + + let mut item_map: HashMap = tcx + .associated_items(impl_def_id) + .in_definition_order() + .map(|assoc| (assoc.trait_item_def_id.unwrap(), assoc)) + .collect(); + let items = tcx + .associated_items(trait_ref.def_id) + .in_definition_order() + .map(|decl_assoc| { + let decl_def_id = decl_assoc.def_id; + let decl_def = decl_def_id.sinto(s); + // Impl exprs required by the item. + let required_impl_exprs; + let value = match item_map.remove(&decl_def_id) { + Some(impl_assoc) => { + required_impl_exprs = { + let item_args = + ty::GenericArgs::identity_for_item(tcx, impl_assoc.def_id); + // Subtlety: we have to add the GAT arguments (if any) to the trait ref arguments. + let args = item_args.rebase_onto(tcx, impl_def_id, trait_ref.args); + let state_with_id = + with_owner_id(s.base(), (), (), impl_assoc.def_id); + solve_item_implied_traits(&state_with_id, decl_def_id, args) + }; + + ImplAssocItemValue::Provided { + def: impl_assoc.def_id.sinto(s), + is_override: decl_assoc.defaultness(tcx).has_value(), + } + } + None => { + required_impl_exprs = if tcx.generics_of(decl_def_id).is_own_empty() { + // Non-GAT case. + let item_args = + ty::GenericArgs::identity_for_item(tcx, decl_def_id); + let args = item_args.rebase_onto(tcx, impl_def_id, trait_ref.args); + let state_with_id = with_owner_id(s.base(), (), (), impl_def_id); + solve_item_implied_traits(&state_with_id, decl_def_id, args) + } else { + // FIXME: For GATs, we need a param_env that has the arguments of + // the impl plus those of the associated type, but there's no + // def_id with that param_env. + vec![] + }; + match decl_assoc.kind { + ty::AssocKind::Type => { + let ty = tcx + .type_of(decl_def_id) + .instantiate(tcx, trait_ref.args) + .sinto(s); + ImplAssocItemValue::DefaultedTy { ty } + } + ty::AssocKind::Fn => ImplAssocItemValue::DefaultedFn {}, + ty::AssocKind::Const => ImplAssocItemValue::DefaultedConst {}, + } + } + }; + + ImplAssocItem { + name: decl_assoc.name.sinto(s), + value, + required_impl_exprs, + decl_def, + } + }) + .collect(); + assert!(item_map.is_empty()); + FullDefKind::TraitImpl { + generics, + predicates, + trait_pred, + required_impl_exprs, + items, + } + } + } +} + /// This normalizes trait clauses before calling `sinto` on them. This is a bit of a hack required /// by charon for now. We can't normalize all clauses as this would lose region information in /// outlives clauses. @@ -401,8 +795,7 @@ fn get_generic_predicates<'tcx, S: UnderOwnerState<'tcx>>( s: &S, def_id: RDefId, ) -> GenericPredicates { - // We use `predicates_defined_on` to skip the implied `Self` clause. - let predicates = s.base().tcx.predicates_defined_on(def_id); + let predicates = predicates_defined_on(s.base().tcx, def_id); let pred_list = normalize_trait_clauses(s, predicates.predicates); GenericPredicates { parent: predicates.parent.sinto(s), diff --git a/frontend/exporter/src/types/new/mod.rs b/frontend/exporter/src/types/new/mod.rs index c53569f9b..d6c48f609 100644 --- a/frontend/exporter/src/types/new/mod.rs +++ b/frontend/exporter/src/types/new/mod.rs @@ -6,9 +6,11 @@ mod impl_infos; mod item_attributes; mod predicate_id; mod typed_constant_kind; +mod variant_infos; pub use full_def::*; pub use impl_infos::*; pub use item_attributes::*; pub use predicate_id::*; pub use typed_constant_kind::*; +pub use variant_infos::*; diff --git a/frontend/exporter/src/types/new/variant_infos.rs b/frontend/exporter/src/types/new/variant_infos.rs new file mode 100644 index 000000000..d43f1b8d5 --- /dev/null +++ b/frontend/exporter/src/types/new/variant_infos.rs @@ -0,0 +1,35 @@ +use crate::prelude::*; +use crate::sinto_as_usize; + +/// Describe the kind of a variant +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum VariantKind { + /// The variant is the only variant of a `struct` type + Struct { + /// Are the fields on this struct all named? + named: bool, + }, + /// The variant is the only variant of a `union` type + Union, + /// The variant is one of the many variants of a `enum` type + Enum { + /// The index of this variant in the `enum` + index: VariantIdx, + /// Are the fields on this struct all named? + named: bool, + }, +} + +sinto_as_usize!(rustc_target::abi, VariantIdx); + +/// Describe a variant +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct VariantInformations { + pub type_namespace: DefId, + + pub typ: DefId, + pub variant: DefId, + pub kind: VariantKind, +} diff --git a/frontend/exporter/src/types/replaced.rs b/frontend/exporter/src/types/replaced.rs deleted file mode 100644 index fb4731238..000000000 --- a/frontend/exporter/src/types/replaced.rs +++ /dev/null @@ -1,23 +0,0 @@ -pub type Path = Vec; -pub type Mutability = bool; - -#[cfg(feature = "rustc")] -mod rustc { - use super::*; - use crate::prelude::*; - - impl<'t, S> SInto for rustc_span::symbol::Symbol { - fn sinto(&self, _s: &S) -> Symbol { - self.to_ident_string() - } - } - - impl SInto for rustc_hir::Mutability { - fn sinto(&self, _s: &S) -> Mutability { - match self { - rustc_hir::Mutability::Mut => true, - _ => false, - } - } - } -} diff --git a/frontend/exporter/src/types/span.rs b/frontend/exporter/src/types/span.rs new file mode 100644 index 000000000..a179ca54e --- /dev/null +++ b/frontend/exporter/src/types/span.rs @@ -0,0 +1,252 @@ +use crate::prelude::*; +use crate::sinto_todo; + +/// Reflects [`rustc_span::Loc`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct Loc { + pub line: usize, + pub col: usize, +} + +/// Reflects [`rustc_span::hygiene::DesugaringKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(, from: rustc_span::hygiene::DesugaringKind, state: S as _s)] +pub enum DesugaringKind { + CondTemporary, + QuestionMark, + TryBlock, + YeetExpr, + OpaqueTy, + Async, + Await, + ForLoop, + WhileLoop, + BoundModifier, +} + +/// Reflects [`rustc_span::hygiene::AstPass`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(, from: rustc_span::hygiene::AstPass, state: S as _s)] +pub enum AstPass { + StdImports, + TestHarness, + ProcMacroHarness, +} + +/// Reflects [`rustc_span::hygiene::MacroKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(, from: rustc_span::hygiene::MacroKind, state: S as _s)] +pub enum MacroKind { + Bang, + Attr, + Derive, +} + +/// Reflects [`rustc_span::hygiene::ExpnKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_span::hygiene::ExpnKind, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum ExpnKind { + Root, + Macro(MacroKind, Symbol), + AstPass(AstPass), + Desugaring(DesugaringKind), +} + +/// Reflects [`rustc_span::edition::Edition`] +#[derive(AdtInto)] +#[args(, from: rustc_span::edition::Edition, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Edition { + Edition2015, + Edition2018, + Edition2021, + Edition2024, +} + +/// Reflects [`rustc_span::hygiene::ExpnData`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_span::hygiene::ExpnData, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct ExpnData { + pub kind: ExpnKind, + // pub parent: Box, + pub call_site: Span, + pub def_site: Span, + #[map(x.as_ref().map(|x| x.clone().iter().map(|x|x.sinto(state)).collect()))] + pub allow_internal_unstable: Option>, + pub edition: Edition, + pub macro_def_id: Option, + pub parent_module: Option, + pub local_inner_macros: bool, +} + +/// Reflects [`rustc_span::Span`] +#[derive(::serde::Serialize, ::serde::Deserialize, Clone, Debug, JsonSchema, Eq, Ord)] +pub struct Span { + pub lo: Loc, + pub hi: Loc, + pub filename: FileName, + /// Original rustc span; can be useful for reporting rustc + /// diagnostics (this is used in Charon) + #[cfg(feature = "rustc")] + #[serde(skip)] + pub rust_span_data: Option, + #[cfg(not(feature = "rustc"))] + #[serde(skip)] + pub rust_span_data: Option<()>, + // expn_backtrace: Vec, +} + +const _: () = { + // `rust_span_data` is a metadata that should *not* be taken into + // account while hashing or comparing + + impl std::hash::Hash for Span { + fn hash(&self, state: &mut H) { + self.lo.hash(state); + self.hi.hash(state); + self.filename.hash(state); + } + } + impl PartialEq for Span { + fn eq(&self, other: &Self) -> bool { + self.lo == other.lo && self.hi == other.hi && self.filename == other.filename + } + } + + impl PartialOrd for Span { + fn partial_cmp(&self, other: &Self) -> Option { + Some( + self.lo.partial_cmp(&other.lo)?.then( + self.hi + .partial_cmp(&other.hi)? + .then(self.filename.partial_cmp(&other.filename)?), + ), + ) + } + } +}; + +#[cfg(feature = "rustc")] +impl From for Loc { + fn from(val: rustc_span::Loc) -> Self { + Loc { + line: val.line, + col: val.col_display, + } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: BaseState<'tcx>> SInto for rustc_span::Span { + fn sinto(&self, s: &S) -> Span { + if let Some(span) = s.with_global_cache(|cache| cache.spans.get(self).cloned()) { + return span; + } + let span = translate_span(*self, s.base().tcx.sess); + s.with_global_cache(|cache| cache.spans.insert(*self, span.clone())); + span + } +} + +/// Reflects [`rustc_span::source_map::Spanned`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Spanned { + pub node: T, + pub span: Span, +} +#[cfg(feature = "rustc")] +impl<'s, S: UnderOwnerState<'s>, T: SInto, U> SInto> + for rustc_span::source_map::Spanned +{ + fn sinto<'a>(&self, s: &S) -> Spanned { + Spanned { + node: self.node.sinto(s), + span: self.span.sinto(s), + } + } +} + +impl<'tcx, S> SInto for PathBuf { + fn sinto(&self, _: &S) -> PathBuf { + self.clone() + } +} + +/// Reflects [`rustc_span::RealFileName`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[args(, from: rustc_span::RealFileName, state: S as _s)] +pub enum RealFileName { + LocalPath(PathBuf), + Remapped { + local_path: Option, + virtual_name: PathBuf, + }, +} + +#[cfg(feature = "rustc")] +impl SInto for rustc_data_structures::stable_hasher::Hash64 { + fn sinto(&self, _: &S) -> u64 { + self.as_u64() + } +} + +/// Reflects [`rustc_span::FileName`] +#[derive(AdtInto)] +#[args(, from: rustc_span::FileName, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum FileName { + Real(RealFileName), + QuoteExpansion(u64), + Anon(u64), + MacroExpansion(u64), + ProcMacroSourceCode(u64), + CliCrateAttr(u64), + Custom(String), + // #[map(FileName::DocTest(x.0.to_str().unwrap().into()))] + #[custom_arm(FROM_TYPE::DocTest(x, _) => TO_TYPE::DocTest(x.to_str().unwrap().into()),)] + DocTest(String), + InlineAsm(u64), +} + +impl FileName { + pub fn to_string(&self) -> String { + match self { + Self::Real(RealFileName::LocalPath(path)) + | Self::Real(RealFileName::Remapped { + local_path: Some(path), + .. + }) + | Self::Real(RealFileName::Remapped { + virtual_name: path, .. + }) => format!("{}", path.display()), + _ => format!("{:?}", self), + } + } + pub fn to_path(&self) -> Option<&std::path::Path> { + match self { + Self::Real(RealFileName::LocalPath(path)) + | Self::Real(RealFileName::Remapped { + local_path: Some(path), + .. + }) + | Self::Real(RealFileName::Remapped { + virtual_name: path, .. + }) => Some(path), + _ => None, + } + } +} + +sinto_todo!(rustc_span, ErrorGuaranteed); diff --git a/frontend/exporter/src/types/thir.rs b/frontend/exporter/src/types/thir.rs new file mode 100644 index 000000000..381620a76 --- /dev/null +++ b/frontend/exporter/src/types/thir.rs @@ -0,0 +1,903 @@ +//! Copies of the relevant `THIR` types. THIR represents a HIR (function) body augmented with type +//! information and lightly desugared. +use crate::prelude::*; + +#[cfg(feature = "rustc")] +use rustc_middle::thir; + +/// Reflects [`thir::LogicalOp`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'a, S>, from: thir::LogicalOp, state: S as _s)] +pub enum LogicalOp { + And, + Or, +} + +/// Reflects [`thir::LintLevel`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'slt, S: UnderOwnerState<'slt> + HasThir<'slt>>, from: thir::LintLevel, state: S as gstate)] +pub enum LintLevel { + Inherited, + Explicit(HirId), +} + +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::FruInfo<'tcx>, state: S as gstate)] +/// Field Record Update (FRU) informations, this reflects [`thir::FruInfo`] +pub struct FruInfo { + /// The base, e.g. `Foo {x: 1, .. base}` + pub base: Expr, + pub field_types: Vec, +} + +/// A field expression: a field name along with a value +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct FieldExpr { + pub field: DefId, + pub value: Expr, +} + +/// Reflects [`thir::AdtExpr`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct AdtExpr { + pub info: VariantInformations, + pub user_ty: Option, + pub fields: Vec, + pub base: Option, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::AdtExpr<'tcx> { + fn sinto(&self, s: &S) -> AdtExpr { + let variants = self.adt_def.variants(); + let variant: &rustc_middle::ty::VariantDef = &variants[self.variant_index]; + AdtExpr { + info: get_variant_information(&self.adt_def, self.variant_index, s), + fields: self + .fields + .iter() + .map(|f| FieldExpr { + field: variant.fields[f.name].did.sinto(s), + value: f.expr.sinto(s), + }) + .collect(), + base: self.base.sinto(s), + user_ty: self.user_ty.sinto(s), + } + } +} + +/// Reflects [`thir::LocalVarId`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct LocalIdent { + pub name: String, + pub id: HirId, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for thir::LocalVarId { + fn sinto(&self, s: &S) -> LocalIdent { + LocalIdent { + name: s + .base() + .local_ctx + .borrow() + .vars + .get(self) + .s_unwrap(s) + .to_string(), + id: self.0.sinto(s), + } + } +} + +/// Reflects [`thir::BlockSafety`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S>, from: thir::BlockSafety, state: S as _s)] +pub enum BlockSafety { + Safe, + BuiltinUnsafe, + #[custom_arm(FROM_TYPE::ExplicitUnsafe{..} => BlockSafety::ExplicitUnsafe,)] + ExplicitUnsafe, +} + +/// Reflects [`thir::Block`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Block, state: S as gstate)] +pub struct Block { + pub targeted_by_break: bool, + pub region_scope: Scope, + pub span: Span, + pub stmts: Vec, + pub expr: Option, + pub safety_mode: BlockSafety, +} + +/// Reflects [`thir::Stmt`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Stmt<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Stmt { + pub kind: StmtKind, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::BlockId { + fn sinto(&self, s: &S) -> Block { + s.thir().blocks[*self].sinto(s) + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::StmtId { + fn sinto(&self, s: &S) -> Stmt { + s.thir().stmts[*self].sinto(s) + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::Expr<'tcx> { + fn sinto(&self, s: &S) -> Expr { + let (hir_id, attributes) = self.hir_id_and_attributes(s); + let hir_id = hir_id.map(|hir_id| hir_id.index()); + let unrolled = self.unroll_scope(s); + let thir::Expr { span, kind, ty, .. } = unrolled; + let contents = match macro_invocation_of_span(span, s).map(ExprKind::MacroInvokation) { + Some(contents) => contents, + None => match kind { + // Introduce intermediate `Cast` from `T` to `U` when casting from a `#[repr(T)]` enum to `U` + thir::ExprKind::Cast { source } => { + if let rustc_middle::ty::TyKind::Adt(adt, _) = s.thir().exprs[source].ty.kind() + { + let tcx = s.base().tcx; + let contents = kind.sinto(s); + let repr_type = if adt.is_enum() { + use crate::rustc_middle::ty::util::IntTypeExt; + adt.repr().discr_type().to_ty(tcx) + } else { + ty + }; + if repr_type == ty { + contents + } else { + ExprKind::Cast { + source: Decorated { + ty: repr_type.sinto(s), + span: span.sinto(s), + contents: Box::new(contents), + hir_id, + attributes: vec![], + }, + } + } + } else { + kind.sinto(s) + } + } + thir::ExprKind::NonHirLiteral { lit, .. } => { + let cexpr: ConstantExpr = + (ConstantExprKind::Literal(scalar_int_to_constant_literal(s, lit, ty))) + .decorate(ty.sinto(s), span.sinto(s)); + return cexpr.into(); + } + thir::ExprKind::ZstLiteral { .. } => { + if ty.is_phantom_data() { + let rustc_middle::ty::Adt(def, _) = ty.kind() else { + supposely_unreachable_fatal!(s[span], "PhantomDataNotAdt"; {kind, ty}) + }; + let adt_def = AdtExpr { + info: get_variant_information(def, rustc_target::abi::FIRST_VARIANT, s), + user_ty: None, + base: None, + fields: vec![], + }; + return Expr { + contents: Box::new(ExprKind::Adt(adt_def)), + span: self.span.sinto(s), + ty: ty.sinto(s), + hir_id, + attributes, + }; + } + let def_id = match ty.kind() { + rustc_middle::ty::Adt(adt_def, generics) => { + // Here, we should only get `struct Name;` structs. + s_assert!(s, adt_def.variants().len() == 1); + s_assert!(s, generics.is_empty()); + adt_def.did() + } + rustc_middle::ty::TyKind::FnDef(def_id, _generics) => *def_id, + ty_kind => { + let ty_kind = ty_kind.sinto(s); + supposely_unreachable_fatal!( + s[span], + "ZstLiteral ty≠FnDef(...) or PhantomData or naked Struct"; + {kind, span, ty, ty_kind} + ); + } + }; + let tcx = s.base().tcx; + let constructor = if tcx.is_constructor(def_id) { + let adt_def = + tcx.adt_def(rustc_utils::get_closest_parent_type(&tcx, def_id)); + let variant_index = adt_def.variant_index_with_id(tcx.parent(def_id)); + Some(rustc_utils::get_variant_information( + &adt_def, + variant_index, + s, + )) + } else { + None + }; + return Expr { + contents: Box::new(ExprKind::GlobalName { + id: def_id.sinto(s), + constructor, + }), + span: self.span.sinto(s), + ty: ty.sinto(s), + hir_id, + attributes, + }; + } + thir::ExprKind::Field { + lhs, + variant_index, + name, + } => { + let lhs_ty = s.thir().exprs[lhs].ty.kind(); + let idx = variant_index.index(); + if idx != 0 { + let _ = supposely_unreachable!( + s[span], + "ExprKindFieldIdxNonZero"; { + kind, + span, + ty, + ty.kind() + } + ); + }; + match lhs_ty { + rustc_middle::ty::TyKind::Adt(adt_def, _generics) => { + let variant = adt_def.variant(variant_index); + ExprKind::Field { + field: variant.fields[name].did.sinto(s), + lhs: lhs.sinto(s), + } + } + rustc_middle::ty::TyKind::Tuple(..) => ExprKind::TupleField { + field: name.index(), + lhs: lhs.sinto(s), + }, + _ => supposely_unreachable_fatal!( + s[span], + "ExprKindFieldBadTy"; { + kind, + span, + ty.kind(), + lhs_ty + } + ), + } + } + _ => kind.sinto(s), + }, + }; + Decorated { + ty: ty.sinto(s), + span: span.sinto(s), + contents: Box::new(contents), + hir_id, + attributes, + } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::ExprId { + fn sinto(&self, s: &S) -> Expr { + s.thir().exprs[*self].sinto(s) + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::Pat<'tcx> { + fn sinto(&self, s: &S) -> Pat { + let thir::Pat { span, kind, ty } = self; + let contents = match kind { + thir::PatKind::Leaf { subpatterns } => match ty.kind() { + rustc_middle::ty::TyKind::Adt(adt_def, args) => (thir::PatKind::Variant { + adt_def: *adt_def, + args, + variant_index: rustc_target::abi::VariantIdx::from_usize(0), + subpatterns: subpatterns.clone(), + }) + .sinto(s), + rustc_middle::ty::TyKind::Tuple(..) => PatKind::Tuple { + subpatterns: subpatterns + .iter() + .map(|pat| pat.pattern.clone()) + .collect::>() + .sinto(s), + }, + _ => supposely_unreachable_fatal!( + s[span], + "PatLeafNonAdtTy"; + {ty.kind(), kind} + ), + }, + _ => kind.sinto(s), + }; + Decorated { + ty: ty.sinto(s), + span: span.sinto(s), + contents: Box::new(contents), + hir_id: None, + attributes: vec![], + } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: ExprState<'tcx>> SInto for thir::ArmId { + fn sinto(&self, s: &S) -> Arm { + s.thir().arms[*self].sinto(s) + } +} + +/// Reflects [`thir::StmtKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::StmtKind<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum StmtKind { + Expr { + scope: Scope, + expr: Expr, + }, + Let { + remainder_scope: Scope, + init_scope: Scope, + pattern: Pat, + initializer: Option, + else_block: Option, + lint_level: LintLevel, + #[value(attribute_from_scope(gstate, init_scope).1)] + /// The attribute on this `let` binding + attributes: Vec, + }, +} + +/// Reflects [`thir::Ascription`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: thir::Ascription<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Ascription { + pub annotation: CanonicalUserTypeAnnotation, + pub variance: Variance, +} + +/// Reflects [`thir::PatRange`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: thir::PatRange<'tcx>, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct PatRange { + pub lo: PatRangeBoundary, + pub hi: PatRangeBoundary, + pub end: RangeEnd, +} + +/// Reflects [`thir::PatRangeBoundary`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: thir::PatRangeBoundary<'tcx>, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum PatRangeBoundary { + Finite(ConstantExpr), + NegInfinity, + PosInfinity, +} + +/// A field pattern: a field name along with a pattern +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct FieldPat { + pub field: DefId, + pub pattern: Pat, +} + +pub type Pat = Decorated; + +/// Reflects [`thir::PatKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::PatKind<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +#[append(thir::PatKind::Leaf {..} => fatal!(gstate, "PatKind::Leaf: should never come up"),)] +pub enum PatKind { + Wild, + AscribeUserType { + ascription: Ascription, + subpattern: Pat, + }, + #[custom_arm( + thir::PatKind::Binding {name, mode, var, ty, subpattern, is_primary} => { + let local_ctx = gstate.base().local_ctx; + local_ctx.borrow_mut().vars.insert(*var, name.to_string()); + PatKind::Binding { + mode: mode.sinto(gstate), + var: var.sinto(gstate), + ty: ty.sinto(gstate), + subpattern: subpattern.sinto(gstate), + is_primary: is_primary.sinto(gstate), + } + } + )] + Binding { + mode: BindingMode, + var: LocalIdent, // name VS var? TODO + ty: Ty, + subpattern: Option, + is_primary: bool, + }, + #[custom_arm( + FROM_TYPE::Variant {adt_def, variant_index, args, subpatterns} => { + let variants = adt_def.variants(); + let variant: &rustc_middle::ty::VariantDef = &variants[*variant_index]; + TO_TYPE::Variant { + info: get_variant_information(adt_def, *variant_index, gstate), + subpatterns: subpatterns + .iter() + .map(|f| FieldPat { + field: variant.fields[f.field].did.sinto(gstate), + pattern: f.pattern.sinto(gstate), + }) + .collect(), + args: args.sinto(gstate), + } + } + )] + Variant { + info: VariantInformations, + args: Vec, + subpatterns: Vec, + }, + #[disable_mapping] + Tuple { + subpatterns: Vec, + }, + Deref { + subpattern: Pat, + }, + DerefPattern { + subpattern: Pat, + }, + Constant { + value: ConstantExpr, + }, + InlineConstant { + def: DefId, + subpattern: Pat, + }, + Range(PatRange), + Slice { + prefix: Vec, + slice: Option, + suffix: Vec, + }, + Array { + prefix: Vec, + slice: Option, + suffix: Vec, + }, + Or { + pats: Vec, + }, + Never, + Error(ErrorGuaranteed), +} + +/// Reflects [`thir::Arm`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Arm<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Arm { + pub pattern: Pat, + pub guard: Option, + pub body: Expr, + pub lint_level: LintLevel, + pub scope: Scope, + pub span: Span, + #[value(attribute_from_scope(gstate, scope).1)] + attributes: Vec, +} + +/// Reflects [`thir::Param`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Param<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Param { + pub pat: Option, + pub ty: Ty, + pub ty_span: Option, + pub self_kind: Option, + pub hir_id: Option, + #[value(hir_id.map(|id| { + s.base().tcx.hir().attrs(id).sinto(s) + }).unwrap_or(vec![]))] + /// attributes on this parameter + pub attributes: Vec, +} + +pub type ThirBody = Expr; +pub type Expr = Decorated; + +/// Reflects [`thir::ExprKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: ExprState<'tcx>>, from: thir::ExprKind<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +#[append( + thir::ExprKind::Scope {..} => { + fatal!(gstate, "Scope should have been eliminated at this point"); + }, + thir::ExprKind::Field {..} => { + fatal!(gstate, "Field should have been eliminated at this point"); + }, + thir::ExprKind::NonHirLiteral {..} => { + fatal!(gstate, "NonHirLiteral should have been eliminated at this point"); + }, +)] +pub enum ExprKind { + Box { + value: Expr, + }, + #[disable_mapping] + MacroInvokation(MacroInvokation), + /// Resugared macros calls. This is deprecated: see + /// . + If { + if_then_scope: Scope, + cond: Expr, + then: Expr, + else_opt: Option, + }, + #[map({ + let e = gstate.thir().exprs[*fun].unroll_scope(gstate); + let (generic_args, r#trait, bounds_impls); + let fun = match e.ty.kind() { + rustc_middle::ty::TyKind::FnDef(def_id, generics) => { + let (hir_id, attributes) = e.hir_id_and_attributes(gstate); + let hir_id = hir_id.map(|hir_id| hir_id.index()); + let contents = Box::new(ExprKind::GlobalName { + id: def_id.sinto(gstate), + constructor: None + }); + let mut translated_generics = generics.sinto(gstate); + let tcx = gstate.base().tcx; + r#trait = (|| { + let assoc_item = tcx.opt_associated_item(*def_id)?; + let impl_expr = self_clause_for_item(gstate, &assoc_item, generics)?; + let assoc_generics = tcx.generics_of(assoc_item.def_id); + let assoc_generics = translated_generics.drain(0..assoc_generics.parent_count).collect(); + Some((impl_expr, assoc_generics)) + })(); + generic_args = translated_generics; + bounds_impls = solve_item_required_traits(gstate, *def_id, generics); + Expr { + contents, + span: e.span.sinto(gstate), + ty: e.ty.sinto(gstate), + hir_id, + attributes, + } + }, + rustc_middle::ty::TyKind::FnPtr(..) => { + generic_args = vec![]; // A function pointer has no generics + bounds_impls = vec![]; // A function pointer has no bounds + r#trait = None; // A function pointer is not a method + e.sinto(gstate) + }, + ty_kind => { + let ty_norm: Ty = gstate.base().tcx.normalize_erasing_regions(gstate.param_env(), *ty).sinto(gstate); + let ty_kind_sinto = ty_kind.sinto(gstate); + supposely_unreachable_fatal!( + gstate[e.span], + "CallNotTyFnDef"; + {e, ty_kind, ty_kind_sinto, ty_norm} + ); + } + }; + TO_TYPE::Call { + ty: ty.sinto(gstate), + args: args.sinto(gstate), + generic_args, + from_hir_call: from_hir_call.sinto(gstate), + fn_span: fn_span.sinto(gstate), + bounds_impls, + r#trait, + fun, + } + })] + /// A call to a function or a method. + /// + /// Example: `f(0i8)`, where `f` has signature `fn f(t: T) -> ()`. + Call { + /// The type of the function, substitution applied. + /// + /// Example: for the call `f(0i8)`, this is `i8 -> ()`. + ty: Ty, + /// The function itself. This can be something else than a + /// name, e.g. a closure. + /// + /// Example: for the call `f(0i8)`, this is `f`. + fun: Expr, // TODO: can [ty] and [fun.ty] be different? + /// The arguments given to the function. + /// + /// Example: for the call `f(0i8)`, this is `[0i8]`. + args: Vec, + from_hir_call: bool, + fn_span: Span, + /// The generic arguments given to the function. + /// + /// Example: for the call `f(0i8)`, this is the type `i8`. + #[not_in_source] + generic_args: Vec, + /// The implementations for the bounds of the function. + /// + /// Example: for the call `f(0i8)`, this is two implementation + /// expressions, one for the explicit bound `i8: Clone` and + /// one for the implicit `i8: Sized`. + #[not_in_source] + bounds_impls: Vec, + /// `trait` is `None` if this is a function call or a method + /// to an inherent trait. If this is a method call from a + /// trait `Trait`, then it contains the concrete + /// implementation of `Trait` it is called on, and the generic + /// arguments that comes from the trait declaration. + /// + /// Example: `f(0i8)` is a function call, hence the field + /// `impl` is `None`. + /// + /// Example: + /// ```ignore + /// trait MyTrait { + /// fn meth(...) {...} + /// } + /// fn example_call>(x: SelfType) { + /// x.meth::(...) + /// } + /// ``` + /// Here, in the call `x.meth::(...)`, `r#trait` will + /// be `Some((..., [SelfType, TraitType, 12]))`, and `generic_args` + /// will be `[String]`. + #[not_in_source] + r#trait: Option<(ImplExpr, Vec)>, + }, + Deref { + arg: Expr, + }, + Binary { + op: BinOp, + lhs: Expr, + rhs: Expr, + }, + LogicalOp { + op: LogicalOp, + lhs: Expr, + rhs: Expr, + }, + Unary { + op: UnOp, + arg: Expr, + }, + Cast { + source: Expr, + }, + Use { + source: Expr, + }, // Use a lexpr to get a vexpr. + NeverToAny { + source: Expr, + }, + PointerCoercion { + cast: PointerCoercion, + source: Expr, + }, + Loop { + body: Expr, + }, + Match { + scrutinee: Expr, + arms: Vec, + }, + Let { + expr: Expr, + pat: Pat, + }, + Block { + #[serde(flatten)] + block: Block, + }, + Assign { + lhs: Expr, + rhs: Expr, + }, + AssignOp { + op: BinOp, + lhs: Expr, + rhs: Expr, + }, + #[disable_mapping] + Field { + field: DefId, + lhs: Expr, + }, + + #[disable_mapping] + TupleField { + field: usize, + lhs: Expr, + }, + Index { + lhs: Expr, + index: Expr, + }, + VarRef { + id: LocalIdent, + }, + #[disable_mapping] + ConstRef { + id: ParamConst, + }, + #[disable_mapping] + GlobalName { + id: GlobalIdent, + constructor: Option, + }, + UpvarRef { + closure_def_id: DefId, + var_hir_id: LocalIdent, + }, + Borrow { + borrow_kind: BorrowKind, + arg: Expr, + }, + RawBorrow { + mutability: Mutability, + arg: Expr, + }, + Break { + label: Scope, + value: Option, + }, + Continue { + label: Scope, + }, + Return { + value: Option, + }, + ConstBlock { + did: DefId, + args: Vec, + }, + Repeat { + value: Expr, + count: ConstantExpr, + }, + Array { + fields: Vec, + }, + Tuple { + fields: Vec, + }, + Adt(AdtExpr), + PlaceTypeAscription { + source: Expr, + user_ty: Option, + }, + ValueTypeAscription { + source: Expr, + user_ty: Option, + }, + #[custom_arm(FROM_TYPE::Closure(e) => { + let (thir, expr_entrypoint) = get_thir(e.closure_id, gstate); + let s = &State::from_thir(gstate.base(), gstate.owner_id(), thir.clone()); + TO_TYPE::Closure { + params: thir.params.raw.sinto(s), + body: expr_entrypoint.sinto(s), + upvars: e.upvars.sinto(gstate), + movability: e.movability.sinto(gstate) + } + }, + )] + Closure { + params: Vec, + body: Expr, + upvars: Vec, + movability: Option, + }, + Literal { + lit: Spanned, + neg: bool, // TODO + }, + //zero space type + // This is basically used for functions! e.g. `::from` + ZstLiteral { + user_ty: Option, + }, + NamedConst { + def_id: GlobalIdent, + args: Vec, + user_ty: Option, + #[not_in_source] + #[value({ + let tcx = gstate.base().tcx; + tcx.opt_associated_item(*def_id).as_ref().and_then(|assoc| { + self_clause_for_item(gstate, assoc, args) + }) + })] + r#impl: Option, + }, + ConstParam { + param: ParamConst, + def_id: GlobalIdent, + }, + StaticRef { + alloc_id: u64, + ty: Ty, + def_id: GlobalIdent, + }, + Yield { + value: Expr, + }, + #[todo] + Todo(String), +} + +#[cfg(feature = "rustc")] +pub trait ExprKindExt<'tcx> { + fn hir_id_and_attributes>( + &self, + s: &S, + ) -> (Option, Vec); + fn unroll_scope + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx>; +} + +#[cfg(feature = "rustc")] +impl<'tcx> ExprKindExt<'tcx> for thir::Expr<'tcx> { + fn hir_id_and_attributes>( + &self, + s: &S, + ) -> (Option, Vec) { + match &self.kind { + thir::ExprKind::Scope { + region_scope: scope, + .. + } => attribute_from_scope(s, scope), + _ => (None, vec![]), + } + } + fn unroll_scope + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx> { + // TODO: when we see a loop, we should lookup its label! label is actually a scope id + // we remove scopes here, whence the TODO + match self.kind { + thir::ExprKind::Scope { value, .. } => s.thir().exprs[value].unroll_scope(s), + _ => self.clone(), + } + } +} diff --git a/frontend/exporter/src/types/todo.rs b/frontend/exporter/src/types/todo.rs deleted file mode 100644 index c164478e1..000000000 --- a/frontend/exporter/src/types/todo.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::prelude::*; -use crate::sinto_todo; -sinto_todo!(rustc_middle::ty, ScalarInt); -sinto_todo!(rustc_middle::ty, AdtFlags); -sinto_todo!(rustc_middle::ty, NormalizesTo<'tcx>); -sinto_todo!(rustc_abi, IntegerType); -sinto_todo!(rustc_abi, ReprFlags); -sinto_todo!(rustc_abi, Align); -sinto_todo!(rustc_middle::mir::interpret, ConstAllocation<'a>); -sinto_todo!(rustc_middle::mir, UnwindTerminateReason); -sinto_todo!(rustc_ast::tokenstream, DelimSpan); -sinto_todo!(rustc_ast::tokenstream, DelimSpacing); -sinto_todo!(rustc_hir::def, DefKind); -sinto_todo!(rustc_hir, GenericArgs<'a> as HirGenericArgs); -sinto_todo!(rustc_hir, InlineAsm<'a>); -sinto_todo!(rustc_target::spec::abi, Abi); -sinto_todo!(rustc_hir, MissingLifetimeKind); -sinto_todo!(rustc_hir, QPath<'tcx>); -sinto_todo!(rustc_hir, WhereRegionPredicate<'tcx>); -sinto_todo!(rustc_hir, WhereEqPredicate<'tcx>); -sinto_todo!(rustc_hir, OwnerId); diff --git a/frontend/exporter/src/types/ty.rs b/frontend/exporter/src/types/ty.rs new file mode 100644 index 000000000..c254bb0b7 --- /dev/null +++ b/frontend/exporter/src/types/ty.rs @@ -0,0 +1,1409 @@ +//! Copies of the relevant type-level types. These are semantically-rich representations of +//! type-level concepts such as types and trait references. +use crate::prelude::*; +use crate::sinto_as_usize; +use crate::sinto_todo; +use std::sync::Arc; + +#[cfg(feature = "rustc")] +use rustc_middle::ty; + +/// Generic container for decorating items with a type, a span, +/// attributes and other meta-data. +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Decorated { + pub ty: Ty, + pub span: Span, + pub contents: Box, + pub hir_id: Option<(usize, usize)>, + pub attributes: Vec, +} + +/// Reflects [`rustc_middle::infer::canonical::CanonicalTyVarKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::infer::canonical::CanonicalTyVarKind, state: S as gstate)] +pub enum CanonicalTyVarKind { + General(UniverseIndex), + Int, + Float, +} + +sinto_as_usize!(rustc_middle::ty, UniverseIndex); + +/// Reflects [`ty::ParamTy`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ParamTy, state: S as gstate)] +pub struct ParamTy { + pub index: u32, + pub name: Symbol, +} + +/// Reflects [`ty::ParamConst`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(, from: ty::ParamConst, state: S as gstate)] +pub struct ParamConst { + pub index: u32, + pub name: Symbol, +} + +/// A predicate without `Self`, for use in `dyn Trait`. +/// +/// Reflects [`ty::ExistentialPredicate`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ExistentialPredicate<'tcx>, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum ExistentialPredicate { + /// E.g. `From`. Note that this isn't `T: From` with a given `T`, this is just + /// `From`. Could be written `?: From`. + Trait(ExistentialTraitRef), + /// E.g. `Iterator::Item = u64`. Could be written `::Item = u64`. + Projection(ExistentialProjection), + /// E.g. `Send`. + AutoTrait(DefId), +} + +/// Reflects [`rustc_type_ir::ExistentialTraitRef`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialTraitRef>, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct ExistentialTraitRef { + pub def_id: DefId, + pub args: Vec, +} + +/// Reflects [`rustc_type_ir::ExistentialProjection`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialProjection>, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct ExistentialProjection { + pub def_id: DefId, + pub args: Vec, + pub term: Term, +} + +/// Reflects [`ty::DynKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(, from: ty::DynKind, state: S as _s)] +pub enum DynKind { + Dyn, + DynStar, +} + +/// Reflects [`ty::BoundTyKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTyKind, state: S as gstate)] +pub enum BoundTyKind { + Anon, + Param(DefId, Symbol), +} + +/// Reflects [`ty::BoundTy`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTy, state: S as gstate)] +pub struct BoundTy { + pub var: BoundVar, + pub kind: BoundTyKind, +} + +sinto_as_usize!(rustc_middle::ty, BoundVar); + +/// Reflects [`ty::BoundRegionKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegionKind, state: S as gstate)] +pub enum BoundRegionKind { + BrAnon, + BrNamed(DefId, Symbol), + BrEnv, +} + +/// Reflects [`ty::BoundRegion`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegion, state: S as gstate)] +pub struct BoundRegion { + pub var: BoundVar, + pub kind: BoundRegionKind, +} + +/// Reflects [`ty::PlaceholderRegion`] +pub type PlaceholderRegion = Placeholder; +/// Reflects [`ty::PlaceholderConst`] +pub type PlaceholderConst = Placeholder; +/// Reflects [`ty::PlaceholderType`] +pub type PlaceholderType = Placeholder; + +/// Reflects [`ty::Placeholder`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Placeholder { + pub universe: UniverseIndex, + pub bound: T, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> + for ty::Placeholder +{ + fn sinto(&self, s: &S) -> Placeholder { + Placeholder { + universe: self.universe.sinto(s), + bound: self.bound.sinto(s), + } + } +} + +/// Reflects [`rustc_middle::infer::canonical::Canonical`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct Canonical { + pub max_universe: UniverseIndex, + pub variables: Vec, + pub value: T, +} +/// Reflects [`ty::CanonicalUserType`] +pub type CanonicalUserType = Canonical; + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, T: SInto, U> SInto> + for rustc_middle::infer::canonical::Canonical<'tcx, T> +{ + fn sinto(&self, s: &S) -> Canonical { + Canonical { + max_universe: self.max_universe.sinto(s), + variables: self.variables.iter().map(|v| v.kind.sinto(s)).collect(), + value: self.value.sinto(s), + } + } +} + +/// Reflects [`rustc_middle::infer::canonical::CanonicalVarKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::infer::canonical::CanonicalVarKind>, state: S as gstate)] +pub enum CanonicalVarInfo { + Ty(CanonicalTyVarKind), + PlaceholderTy(PlaceholderType), + Region(UniverseIndex), + PlaceholderRegion(PlaceholderRegion), + Const(UniverseIndex), + PlaceholderConst(PlaceholderConst), + Effect, +} + +/// Reflects [`ty::UserSelfTy`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserSelfTy<'tcx>, state: S as gstate)] +pub struct UserSelfTy { + pub impl_def_id: DefId, + pub self_ty: Ty, +} + +/// Reflects [`ty::UserArgs`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserArgs<'tcx>, state: S as gstate)] +pub struct UserArgs { + pub args: Vec, + pub user_self_ty: Option, +} + +/// Reflects [`ty::UserType`]: this is currently +/// disabled, and everything is printed as debug in the +/// [`UserType::Todo`] variant. +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserType<'tcx>, state: S as _s)] +pub enum UserType { + // TODO: for now, we don't use user types at all. + // We disable it for now, since it cause the following to fail: + // + // pub const MY_VAL: u16 = 5; + // pub type Alias = MyStruct; // Using the literal 5, it goes through + // + // pub struct MyStruct {} + // + // impl MyStruct { + // pub const MY_CONST: u16 = VAL; + // } + // + // pub fn do_something() -> u32 { + // u32::from(Alias::MY_CONST) + // } + // + // In this case, we get a [ty::ConstKind::Bound] in + // [do_something], which we are not able to translate. + // See: https://github.com/hacspec/hax/pull/209 + + // Ty(Ty), + // TypeOf(DefId, UserArgs), + #[todo] + Todo(String), +} + +/// Reflects [`ty::VariantDiscr`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::VariantDiscr, state: S as gstate)] +pub enum DiscriminantDefinition { + Explicit(DefId), + Relative(u32), +} + +/// Reflects [`ty::util::Discr`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::util::Discr<'tcx>, state: S as gstate)] +pub struct DiscriminantValue { + pub val: u128, + pub ty: Ty, +} + +/// Reflects [`ty::Visibility`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Visibility { + Public, + Restricted(Id), +} + +#[cfg(feature = "rustc")] +impl, U> SInto> for ty::Visibility { + fn sinto(&self, s: &S) -> Visibility { + use ty::Visibility as T; + match self { + T::Public => Visibility::Public, + T::Restricted(id) => Visibility::Restricted(id.sinto(s)), + } + } +} + +/// Reflects [`ty::FieldDef`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct FieldDef { + pub did: DefId, + /// Field definition of [tuple + /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types) + /// are anonymous, in that case `name` is [`None`]. + pub name: Option, + pub vis: Visibility, + pub ty: Ty, + pub span: Span, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::FieldDef { + fn sinto(&self, s: &S) -> FieldDef { + let tcx = s.base().tcx; + let ty = { + let generics = ty::GenericArgs::identity_for_item(tcx, self.did); + self.ty(tcx, generics).sinto(s) + }; + let name = { + let name = self.name.sinto(s); + let is_user_provided = { + // SH: Note that the only way I found of checking if the user wrote the name or if it + // is just an integer generated by rustc is by checking if it is just made of + // numerals... + name.parse::().is_err() + }; + is_user_provided.then_some(name) + }; + + FieldDef { + did: self.did.sinto(s), + name, + vis: self.vis.sinto(s), + ty, + span: tcx.def_span(self.did).sinto(s), + } + } +} + +/// Reflects [`ty::VariantDef`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct VariantDef { + pub def_id: DefId, + pub ctor: Option<(CtorKind, DefId)>, + pub name: Symbol, + pub discr_def: DiscriminantDefinition, + pub discr_val: DiscriminantValue, + /// The definitions of the fields on this variant. In case of + /// [tuple + /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types), + /// the fields are anonymous, otherwise fields are named. + pub fields: Vec, + /// Span of the definition of the variant + pub span: Span, +} + +#[cfg(feature = "rustc")] +impl VariantDef { + fn sfrom<'tcx, S: UnderOwnerState<'tcx>>( + s: &S, + def: &ty::VariantDef, + discr_val: ty::util::Discr<'tcx>, + ) -> Self { + VariantDef { + def_id: def.def_id.sinto(s), + ctor: def.ctor.sinto(s), + name: def.name.sinto(s), + discr_def: def.discr.sinto(s), + discr_val: discr_val.sinto(s), + fields: def.fields.raw.sinto(s), + span: s.base().tcx.def_span(def.def_id).sinto(s), + } + } +} + +/// Reflects [`ty::EarlyParamRegion`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::EarlyParamRegion, state: S as gstate)] +pub struct EarlyParamRegion { + pub index: u32, + pub name: Symbol, +} + +/// Reflects [`ty::LateParamRegion`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::LateParamRegion, state: S as gstate)] +pub struct LateParamRegion { + pub scope: DefId, + pub bound_region: BoundRegionKind, +} + +/// Reflects [`ty::RegionKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::RegionKind<'tcx>, state: S as gstate)] +pub enum RegionKind { + ReEarlyParam(EarlyParamRegion), + ReBound(DebruijnIndex, BoundRegion), + ReLateParam(LateParamRegion), + ReStatic, + ReVar(RegionVid), + RePlaceholder(PlaceholderRegion), + ReErased, + ReError(ErrorGuaranteed), +} + +sinto_as_usize!(rustc_middle::ty, DebruijnIndex); +sinto_as_usize!(rustc_middle::ty, RegionVid); + +/// Reflects [`ty::Region`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Region<'tcx>, state: S as s)] +pub struct Region { + #[value(self.kind().sinto(s))] + pub kind: RegionKind, +} + +/// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericArgKind<'tcx>, state: S as s)] +pub enum GenericArg { + Lifetime(Region), + Type(Ty), + Const(ConstantExpr), +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::GenericArg<'tcx> { + fn sinto(&self, s: &S) -> GenericArg { + self.unpack().sinto(s) + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto> for ty::GenericArgsRef<'tcx> { + fn sinto(&self, s: &S) -> Vec { + self.iter().map(|v| v.unpack().sinto(s)).collect() + } +} + +/// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitIntType, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum LitIntType { + Signed(IntTy), + Unsigned(UintTy), + Unsuffixed, +} + +/// Reflects partially [`ty::InferTy`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S>, from: ty::InferTy, state: S as gstate)] +pub enum InferTy { + #[custom_arm(FROM_TYPE::TyVar(..) => TO_TYPE::TyVar,)] + TyVar, /*TODO?*/ + #[custom_arm(FROM_TYPE::IntVar(..) => TO_TYPE::IntVar,)] + IntVar, /*TODO?*/ + #[custom_arm(FROM_TYPE::FloatVar(..) => TO_TYPE::FloatVar,)] + FloatVar, /*TODO?*/ + FreshTy(u32), + FreshIntTy(u32), + FreshFloatTy(u32), +} + +/// Reflects [`rustc_type_ir::IntTy`] +#[derive(AdtInto)] +#[args(, from: rustc_type_ir::IntTy, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum IntTy { + Isize, + I8, + I16, + I32, + I64, + I128, +} + +/// Reflects [`rustc_type_ir::FloatTy`] +#[derive(AdtInto)] +#[args(, from: rustc_type_ir::FloatTy, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum FloatTy { + F16, + F32, + F64, + F128, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S> SInto for rustc_ast::ast::FloatTy { + fn sinto(&self, _: &S) -> FloatTy { + use rustc_ast::ast::FloatTy as T; + match self { + T::F16 => FloatTy::F16, + T::F32 => FloatTy::F32, + T::F64 => FloatTy::F64, + T::F128 => FloatTy::F128, + } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S> SInto for rustc_ast::ast::IntTy { + fn sinto(&self, _: &S) -> IntTy { + use rustc_ast::ast::IntTy as T; + match self { + T::Isize => IntTy::Isize, + T::I8 => IntTy::I8, + T::I16 => IntTy::I16, + T::I32 => IntTy::I32, + T::I64 => IntTy::I64, + T::I128 => IntTy::I128, + } + } +} +#[cfg(feature = "rustc")] +impl<'tcx, S> SInto for rustc_ast::ast::UintTy { + fn sinto(&self, _: &S) -> UintTy { + use rustc_ast::ast::UintTy as T; + match self { + T::Usize => UintTy::Usize, + T::U8 => UintTy::U8, + T::U16 => UintTy::U16, + T::U32 => UintTy::U32, + T::U64 => UintTy::U64, + T::U128 => UintTy::U128, + } + } +} + +/// Reflects [`rustc_type_ir::UintTy`] +#[derive(AdtInto)] +#[args(, from: rustc_type_ir::UintTy, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum UintTy { + Usize, + U8, + U16, + U32, + U64, + U128, +} + +impl ToString for IntTy { + fn to_string(&self) -> String { + use IntTy::*; + match self { + Isize => "isize".to_string(), + I8 => "i8".to_string(), + I16 => "i16".to_string(), + I32 => "i32".to_string(), + I64 => "i64".to_string(), + I128 => "i128".to_string(), + } + } +} + +impl ToString for UintTy { + fn to_string(&self) -> String { + use UintTy::*; + match self { + Usize => "usize".to_string(), + U8 => "u8".to_string(), + U16 => "u16".to_string(), + U32 => "u32".to_string(), + U64 => "u64".to_string(), + U128 => "u128".to_string(), + } + } +} + +/// Reflects [`ty::TypeAndMut`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TypeAndMut<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct TypeAndMut { + pub ty: Box, + pub mutbl: Mutability, +} + +#[cfg(feature = "rustc")] +impl> SInto> for ty::List { + fn sinto(&self, s: &S) -> Vec { + self.iter().map(|x| x.sinto(s)).collect() + } +} + +/// Reflects [`ty::GenericParamDef`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericParamDef, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct GenericParamDef { + pub name: Symbol, + pub def_id: DefId, + pub index: u32, + pub pure_wrt_drop: bool, + #[value( + match self.kind { + ty::GenericParamDefKind::Lifetime => GenericParamDefKind::Lifetime, + ty::GenericParamDefKind::Type { has_default, synthetic } => GenericParamDefKind::Type { has_default, synthetic }, + ty::GenericParamDefKind::Const { has_default, is_host_effect, .. } => { + let ty = s.base().tcx.type_of(self.def_id).instantiate_identity().sinto(s); + GenericParamDefKind::Const { has_default, is_host_effect, ty } + }, + } + )] + pub kind: GenericParamDefKind, +} + +/// Reflects [`ty::GenericParamDefKind`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum GenericParamDefKind { + Lifetime, + Type { + has_default: bool, + synthetic: bool, + }, + Const { + has_default: bool, + is_host_effect: bool, + ty: Ty, + }, +} + +/// Reflects [`ty::Generics`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Generics, state: S as state)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct TyGenerics { + pub parent: Option, + pub parent_count: usize, + #[from(own_params)] + pub params: Vec, + // pub param_def_id_to_index: FxHashMap, + pub has_self: bool, + pub has_late_bound_regions: Option, +} + +/// This type merges the information from +/// `rustc_type_ir::AliasKind` and `ty::AliasTy` +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Alias { + pub kind: AliasKind, + pub args: Vec, + pub def_id: DefId, +} + +/// Reflects [`ty::AliasKind`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AliasKind { + /// The projection of a trait type: `>::Type<...>` + Projection { + /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. + impl_expr: ImplExpr, + /// The `Type` in `Ty: Trait<..., Type = U>`. + assoc_item: AssocItem, + }, + /// An associated type in an inherent impl. + Inherent, + /// An `impl Trait` opaque type. + Opaque { + /// The real type hidden inside this opaque type. + hidden_ty: Ty, + }, + /// A type alias that references opaque types. Likely to always be normalized away. + Weak, +} + +#[cfg(feature = "rustc")] +impl Alias { + #[tracing::instrument(level = "trace", skip(s))] + fn from<'tcx, S: UnderOwnerState<'tcx>>( + s: &S, + alias_kind: &rustc_type_ir::AliasTyKind, + alias_ty: &ty::AliasTy<'tcx>, + ) -> TyKind { + let tcx = s.base().tcx; + use rustc_type_ir::AliasTyKind as RustAliasKind; + let kind = match alias_kind { + RustAliasKind::Projection => { + let trait_ref = alias_ty.trait_ref(tcx); + // In a case like: + // ``` + // impl Trait for Result + // where + // for<'a> &'a Result: IntoIterator, + // for<'a> <&'a Result as IntoIterator>::Item: Copy, + // {} + // ``` + // the `&'a Result as IntoIterator` trait ref has escaping bound variables + // yet we dont have a binder around (could even be several). Binding this correctly + // is therefore difficult. Since our trait resolution ignores lifetimes anyway, we + // just erase them. See also https://github.com/hacspec/hax/issues/747. + let trait_ref = crate::traits::erase_and_norm(tcx, s.param_env(), trait_ref); + AliasKind::Projection { + assoc_item: tcx.associated_item(alias_ty.def_id).sinto(s), + impl_expr: solve_trait(s, ty::Binder::dummy(trait_ref)), + } + } + RustAliasKind::Inherent => AliasKind::Inherent, + RustAliasKind::Opaque => { + // Reveal the underlying `impl Trait` type. + let ty = tcx.type_of(alias_ty.def_id).instantiate(tcx, alias_ty.args); + AliasKind::Opaque { + hidden_ty: ty.sinto(s), + } + } + RustAliasKind::Weak => AliasKind::Weak, + }; + TyKind::Alias(Alias { + kind, + args: alias_ty.args.sinto(s), + def_id: alias_ty.def_id.sinto(s), + }) + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto> for ty::Ty<'tcx> { + fn sinto(&self, s: &S) -> Box { + Box::new(self.sinto(s)) + } +} + +/// Reflects [`rustc_middle::ty::Ty`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[serde(transparent)] +pub struct Ty { + pub(crate) kind: id_table::Node, +} + +impl Ty { + pub fn inner(&self) -> &Arc { + self.kind.inner() + } + + pub fn kind(&self) -> &TyKind { + self.inner().as_ref() + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for rustc_middle::ty::Ty<'tcx> { + fn sinto(&self, s: &S) -> Ty { + if let Some(ty) = s.with_cache(|cache| cache.tys.get(self).cloned()) { + return ty; + } + let ty_kind: TyKind = self.kind().sinto(s); + s.with_global_cache(|cache| { + let table_session = &mut cache.id_table_session; + let cache = cache.per_item.entry(s.owner_id()).or_default(); + let kind = id_table::Node::new(ty_kind, table_session); + let ty = Ty { kind }; + cache.tys.insert(*self, ty.clone()); + ty + }) + } +} + +/// Reflects [`ty::TyKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TyKind<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum TyKind { + Bool, + Char, + Int(IntTy), + Uint(UintTy), + Float(FloatTy), + + #[custom_arm( + ty::TyKind::FnPtr(tys, header) => { + let sig = tys.map_bound(|tys| ty::FnSig { + inputs_and_output: tys.inputs_and_output, + c_variadic: header.c_variadic, + safety: header.safety, + abi: header.abi, + }); + TyKind::Arrow(Box::new(sig.sinto(s))) + }, + ty::TyKind::FnDef(def, generics) => { + let tcx = s.base().tcx; + let sig = tcx.fn_sig(*def).instantiate(tcx, generics); + TyKind::Arrow(Box::new(sig.sinto(s))) + }, + ty::TyKind::Closure (_def_id, generics) => { + let sig = generics.as_closure().sig(); + let sig = s.base().tcx.signature_unclosure(sig, rustc_hir::Safety::Safe); + TyKind::Arrow(Box::new(sig.sinto(s))) + }, + )] + /// Reflects [`ty::TyKind::FnPtr`], [`ty::TyKind::FnDef`] and [`ty::TyKind::Closure`] + Arrow(Box), + + #[custom_arm( + ty::TyKind::Adt(adt_def, generics) => { + let def_id = adt_def.did().sinto(s); + let generic_args: Vec = generics.sinto(s); + let trait_refs = solve_item_required_traits(s, adt_def.did(), generics); + TyKind::Adt { def_id, generic_args, trait_refs } + }, + )] + Adt { + /// Reflects [`ty::TyKind::Adt`]'s substitutions + generic_args: Vec, + /// Predicates required by the type, e.g. `T: Sized` for `Option` or `B: 'a + ToOwned` + /// for `Cow<'a, B>`. + trait_refs: Vec, + def_id: DefId, + }, + Foreign(DefId), + Str, + Array(Box, #[map(Box::new(x.sinto(s)))] Box), + Slice(Box), + RawPtr(Box, Mutability), + Ref(Region, Box, Mutability), + Dynamic(Vec>, Region, DynKind), + Coroutine(DefId, Vec), + Never, + Tuple(Vec), + #[custom_arm( + ty::TyKind::Alias(alias_kind, alias_ty) => { + Alias::from(s, alias_kind, alias_ty) + }, + )] + Alias(Alias), + Param(ParamTy), + Bound(DebruijnIndex, BoundTy), + Placeholder(PlaceholderType), + Infer(InferTy), + #[custom_arm(ty::TyKind::Error(..) => TyKind::Error,)] + Error, + #[todo] + Todo(String), +} + +/// Reflects [`ty::Variance`] +#[derive(AdtInto)] +#[args(, from: ty::Variance, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum Variance { + Covariant, + Invariant, + Contravariant, + Bivariant, +} + +/// Reflects [`ty::CanonicalUserTypeAnnotation`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CanonicalUserTypeAnnotation<'tcx>, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct CanonicalUserTypeAnnotation { + pub user_ty: CanonicalUserType, + pub span: Span, + pub inferred_ty: Ty, +} + +/// Reflects [`ty::AdtKind`] +#[derive_group(Serializers)] +#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::AdtKind, state: S as _s)] +pub enum AdtKind { + Struct, + Union, + Enum, +} + +// This comes from MIR +// TODO: add the generics and the predicates +/// Reflects [`ty::AdtDef`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub struct AdtDef { + pub did: DefId, + pub adt_kind: AdtKind, + pub variants: IndexVec, + pub flags: AdtFlags, + pub repr: ReprOptions, +} + +sinto_todo!(rustc_middle::ty, AdtFlags); + +/// Reflects [`ty::ReprOptions`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ReprOptions, state: S as s)] +pub struct ReprOptions { + pub int: Option, + #[value({ + use crate::rustc_middle::ty::util::IntTypeExt; + self.discr_type().to_ty(s.base().tcx).sinto(s) + })] + pub typ: Ty, + pub align: Option, + pub pack: Option, + pub flags: ReprFlags, + pub field_shuffle_seed: u64, +} + +sinto_todo!(rustc_abi, IntegerType); +sinto_todo!(rustc_abi, ReprFlags); +sinto_todo!(rustc_abi, Align); + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::AdtDef<'tcx> { + fn sinto(&self, s: &S) -> AdtDef { + let variants = self + .variants() + .iter_enumerated() + .map(|(variant_idx, variant)| { + let discr = if self.is_enum() { + self.discriminant_for_variant(s.base().tcx, variant_idx) + } else { + // Structs and unions have a single variant. + assert_eq!(variant_idx.index(), 0); + ty::util::Discr { + val: 0, + ty: s.base().tcx.types.isize, + } + }; + VariantDef::sfrom(s, variant, discr) + }) + .collect(); + AdtDef { + did: self.did().sinto(s), + adt_kind: self.adt_kind().sinto(s), + variants, + flags: self.flags().sinto(s), + repr: self.repr().sinto(s), + } + } +} + +/// Reflects [`ty::adjustment::PointerCoercion`] +#[derive(AdtInto)] +#[args(, from: ty::adjustment::PointerCoercion, state: S as gstate)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema)] +pub enum PointerCoercion { + ReifyFnPointer, + UnsafeFnPointer, + ClosureFnPointer(Safety), + MutToConstPointer, + ArrayToPointer, + DynStar, + Unsize, +} + +sinto_todo!(rustc_middle::ty, ScalarInt); + +/// Reflects [`ty::FnSig`] +#[derive_group(Serializers)] +#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::FnSig<'tcx>, state: S as s)] +pub struct TyFnSig { + #[value(self.inputs().sinto(s))] + pub inputs: Vec, + #[value(self.output().sinto(s))] + pub output: Ty, + pub c_variadic: bool, + pub safety: Safety, + pub abi: Abi, +} + +/// Reflects [`ty::PolyFnSig`] +pub type PolyFnSig = Binder; + +/// Reflects [`ty::TraitRef`] +#[derive_group(Serializers)] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TraitRef<'tcx>, state: S as tcx)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct TraitRef { + pub def_id: DefId, + #[from(args)] + /// reflects the `args` field + pub generic_args: Vec, +} + +/// Reflects [`ty::TraitPredicate`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TraitPredicate<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct TraitPredicate { + pub trait_ref: TraitRef, + #[map(*x == ty::PredicatePolarity::Positive)] + #[from(polarity)] + pub is_positive: bool, +} + +/// Reflects [`ty::OutlivesPredicate`] as a named struct +/// instead of a tuple struct. This is because the script converting +/// JSONSchema types to OCaml doesn't support tuple structs, and this +/// is the only tuple struct in the whole AST. +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct OutlivesPredicate { + pub lhs: T, + pub rhs: Region, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, T, U> SInto> + for ty::OutlivesPredicate<'tcx, T> +where + T: SInto, +{ + fn sinto(&self, s: &S) -> OutlivesPredicate where { + OutlivesPredicate { + lhs: self.0.sinto(s), + rhs: self.1.sinto(s), + } + } +} + +/// Reflects [`ty::RegionOutlivesPredicate`] +pub type RegionOutlivesPredicate = OutlivesPredicate; +/// Reflects [`ty::TypeOutlivesPredicate`] +pub type TypeOutlivesPredicate = OutlivesPredicate; + +/// Reflects [`ty::Term`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum Term { + Ty(Ty), + Const(ConstantExpr), +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Term<'tcx> { + fn sinto(&self, s: &S) -> Term { + use ty::TermKind; + match self.unpack() { + TermKind::Ty(ty) => Term::Ty(ty.sinto(s)), + TermKind::Const(c) => Term::Const(c.sinto(s)), + } + } +} + +/// Expresses a constraints over an associated type. +/// +/// For instance: +/// ```text +/// fn f>(...) +/// ^^^^^^^^^^ +/// ``` +/// (provided the trait `Foo` has an associated type `S`). +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct ProjectionPredicate { + /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`. + pub impl_expr: ImplExpr, + /// The `Type` in `Ty: Trait<..., Type = U>`. + pub assoc_item: AssocItem, + /// The type `U` in `Ty: Trait<..., Type = U>`. + pub ty: Ty, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderBinderState<'tcx>> SInto + for ty::ProjectionPredicate<'tcx> +{ + fn sinto(&self, s: &S) -> ProjectionPredicate { + let tcx = s.base().tcx; + let alias_ty = &self.projection_term.expect_ty(tcx); + let poly_trait_ref = s.binder().rebind(alias_ty.trait_ref(tcx)); + let Term::Ty(ty) = self.term.sinto(s) else { + unreachable!() + }; + ProjectionPredicate { + impl_expr: solve_trait(s, poly_trait_ref), + assoc_item: tcx.associated_item(alias_ty.def_id).sinto(s), + ty, + } + } +} + +/// Reflects [`ty::ClauseKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::ClauseKind<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum ClauseKind { + Trait(TraitPredicate), + RegionOutlives(RegionOutlivesPredicate), + TypeOutlives(TypeOutlivesPredicate), + Projection(ProjectionPredicate), + ConstArgHasType(ConstantExpr, Ty), + WellFormed(GenericArg), + ConstEvaluatable(ConstantExpr), +} + +/// Reflects [`ty::Clause`] and adds a hash-consed predicate identifier. +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Clause { + pub kind: Binder, + pub id: PredicateId, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Clause<'tcx> { + fn sinto(&self, s: &S) -> Clause { + let kind = self.kind().sinto(s); + let id = kind.clone().map(PredicateKind::Clause).predicate_id(); + Clause { kind, id } + } +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::PolyTraitPredicate<'tcx> { + fn sinto(&self, s: &S) -> Clause { + let kind: Binder<_> = self.sinto(s); + let kind: Binder = kind.map(ClauseKind::Trait); + let id = kind.clone().map(PredicateKind::Clause).predicate_id(); + Clause { kind, id } + } +} + +/// Reflects [`ty::Predicate`] and adds a hash-consed predicate identifier. +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Predicate { + pub kind: Binder, + pub id: PredicateId, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>> SInto for ty::Predicate<'tcx> { + fn sinto(&self, s: &S) -> Predicate { + let kind = self.kind().sinto(s); + let id = kind.predicate_id(); + Predicate { kind, id } + } +} + +/// Reflects [`ty::BoundVariableKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundVariableKind, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum BoundVariableKind { + Ty(BoundTyKind), + Region(BoundRegionKind), + Const, +} + +/// Reflects [`ty::Binder`] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Binder { + pub value: T, + pub bound_vars: Vec, +} + +impl Binder { + pub fn as_ref(&self) -> Binder<&T> { + Binder { + value: &self.value, + bound_vars: self.bound_vars.clone(), + } + } + + pub fn hax_skip_binder(self) -> T { + self.value + } + + pub fn hax_skip_binder_ref(&self) -> &T { + &self.value + } + + pub fn map(self, f: impl FnOnce(T) -> U) -> Binder { + Binder { + value: f(self.value), + bound_vars: self.bound_vars, + } + } + + pub fn inner_mut(&mut self) -> &mut T { + &mut self.value + } + + pub fn rebind(&self, value: U) -> Binder { + self.as_ref().map(|_| value) + } +} + +/// Reflects [`ty::GenericPredicates`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericPredicates<'tcx>, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct GenericPredicates { + pub parent: Option, + // FIXME: Switch from `Predicate` to `Clause` (will require correct handling of binders). + #[value(self.predicates.iter().map(|(clause, span)| (clause.as_predicate().sinto(s), span.sinto(s))).collect())] + pub predicates: Vec<(Predicate, Span)>, +} + +#[cfg(feature = "rustc")] +impl<'tcx, S: UnderOwnerState<'tcx>, T1, T2> SInto> for ty::Binder<'tcx, T1> +where + T1: SInto, T2>, +{ + fn sinto(&self, s: &S) -> Binder { + let bound_vars = self.bound_vars().sinto(s); + let value = { + let under_binder_s = &State { + base: s.base(), + owner_id: s.owner_id(), + binder: self.as_ref().map_bound(|_| ()), + thir: (), + mir: (), + }; + self.as_ref().skip_binder().sinto(under_binder_s) + }; + Binder { value, bound_vars } + } +} + +/// Reflects [`ty::SubtypePredicate`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::SubtypePredicate<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct SubtypePredicate { + pub a_is_expected: bool, + pub a: Ty, + pub b: Ty, +} + +/// Reflects [`ty::CoercePredicate`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CoercePredicate<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct CoercePredicate { + pub a: Ty, + pub b: Ty, +} + +/// Reflects [`ty::AliasRelationDirection`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::AliasRelationDirection, state: S as _tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AliasRelationDirection { + Equate, + Subtype, +} + +/// Reflects [`ty::ClosureArgs`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: ty::ClosureArgs>, state: S as s)] +#[derive(Clone, Debug, JsonSchema)] +#[derive_group(Serializers)] +pub struct ClosureArgs { + #[value(self.kind().sinto(s))] + pub kind: ClosureKind, + #[value(self.parent_args().sinto(s))] + pub parent_args: Vec, + #[value(self.sig().sinto(s))] + pub sig: PolyFnSig, + #[value(self.upvar_tys().sinto(s))] + pub upvar_tys: Vec, +} + +/// Reflects [`ty::ClosureKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ClosureKind, state: S as _tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum ClosureKind { + Fn, + FnMut, + FnOnce, +} + +sinto_todo!(rustc_middle::ty, NormalizesTo<'tcx>); + +/// Reflects [`ty::PredicateKind`] +#[derive(AdtInto)] +#[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::PredicateKind<'tcx>, state: S as tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum PredicateKind { + Clause(ClauseKind), + DynCompatible(DefId), + Subtype(SubtypePredicate), + Coerce(CoercePredicate), + ConstEquate(ConstantExpr, ConstantExpr), + Ambiguous, + AliasRelate(Term, Term, AliasRelationDirection), + NormalizesTo(NormalizesTo), +} + +#[cfg(feature = "rustc")] +fn get_container_for_assoc_item<'tcx, S: BaseState<'tcx>>( + s: &S, + item: &ty::AssocItem, +) -> AssocItemContainer { + let tcx = s.base().tcx; + let container_id = item.container_id(tcx); + match item.container { + ty::AssocItemContainer::TraitContainer => AssocItemContainer::TraitContainer { + trait_id: container_id.sinto(s), + }, + ty::AssocItemContainer::ImplContainer => { + if let Some(implemented_trait_item) = item.trait_item_def_id { + // The trait ref that is being implemented by this `impl` block. + let implemented_trait_ref = tcx + .impl_trait_ref(container_id) + .unwrap() + .instantiate_identity(); + AssocItemContainer::TraitImplContainer { + impl_id: container_id.sinto(s), + implemented_trait: implemented_trait_ref.def_id.sinto(s), + implemented_trait_item: implemented_trait_item.sinto(s), + overrides_default: tcx.defaultness(implemented_trait_item).has_value(), + } + } else { + AssocItemContainer::InherentImplContainer { + impl_id: container_id.sinto(s), + } + } + } + } +} + +/// Reflects [`ty::AssocItem`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: ty::AssocItem, state: S as s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct AssocItem { + pub def_id: DefId, + pub name: Symbol, + pub kind: AssocKind, + #[value(get_container_for_assoc_item(s, self))] + pub container: AssocItemContainer, + /// Whether this item has a value (e.g. this is `false` for trait methods without default + /// implementations). + #[value(self.defaultness(s.base().tcx).has_value())] + pub has_value: bool, + pub fn_has_self_parameter: bool, + pub opt_rpitit_info: Option, +} + +/// Reflects [`ty::ImplTraitInTraitData`] +#[derive(AdtInto)] +#[args(<'tcx, S: BaseState<'tcx>>, from: ty::ImplTraitInTraitData, state: S as _s)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum ImplTraitInTraitData { + Trait { + fn_def_id: DefId, + opaque_def_id: DefId, + }, + Impl { + fn_def_id: DefId, + }, +} + +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AssocItemContainer { + TraitContainer { + trait_id: DefId, + }, + TraitImplContainer { + impl_id: DefId, + implemented_trait: DefId, + implemented_trait_item: DefId, + /// Whether the corresponding trait item had a default (and therefore this one overrides + /// it). + overrides_default: bool, + }, + InherentImplContainer { + impl_id: DefId, + }, +} + +/// Reflects [`ty::AssocKind`] +#[derive(AdtInto)] +#[args(, from: ty::AssocKind, state: S as _tcx)] +#[derive_group(Serializers)] +#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum AssocKind { + Const, + Fn, + Type, +} diff --git a/frontend/exporter/src/utils.rs b/frontend/exporter/src/utils/error_macros.rs similarity index 100% rename from frontend/exporter/src/utils.rs rename to frontend/exporter/src/utils/error_macros.rs diff --git a/frontend/exporter/src/utils/mod.rs b/frontend/exporter/src/utils/mod.rs new file mode 100644 index 000000000..3e1fb05ee --- /dev/null +++ b/frontend/exporter/src/utils/mod.rs @@ -0,0 +1,5 @@ +mod error_macros; +mod type_map; + +pub use error_macros::*; +pub use type_map::*; diff --git a/frontend/exporter/src/utils/type_map.rs b/frontend/exporter/src/utils/type_map.rs new file mode 100644 index 000000000..f7a3bc3a5 --- /dev/null +++ b/frontend/exporter/src/utils/type_map.rs @@ -0,0 +1,52 @@ +use std::{ + any::{Any, TypeId}, + collections::HashMap, + marker::PhantomData, +}; + +pub trait TypeMappable = Any + Send + Sync; + +/// Defines a mapping from types to types. +pub trait TypeMapper { + type Value: TypeMappable; +} + +/// A map that maps types to values in a generic manner: we store for each type `T` a value of +/// type `M::Value`. +pub struct TypeMap { + data: HashMap>, + phantom: PhantomData, +} + +impl TypeMap { + pub fn get(&self) -> Option<&M::Value> { + self.data + .get(&TypeId::of::()) + // We must be careful to not accidentally cast the box itself as `dyn Any`. + .map(|val: &Box| &**val) + .and_then(|val: &dyn TypeMappable| (val as &dyn Any).downcast_ref()) + } + + pub fn get_mut(&mut self) -> Option<&mut M::Value> { + self.data + .get_mut(&TypeId::of::()) + // We must be careful to not accidentally cast the box itself as `dyn Any`. + .map(|val: &mut Box| &mut **val) + .and_then(|val: &mut dyn TypeMappable| (val as &mut dyn Any).downcast_mut()) + } + + pub fn insert(&mut self, val: M::Value) -> Option>> { + self.data + .insert(TypeId::of::(), Box::new(val)) + .and_then(|val: Box| (val as Box).downcast().ok()) + } +} + +impl Default for TypeMap { + fn default() -> Self { + Self { + data: Default::default(), + phantom: Default::default(), + } + } +} diff --git a/hax-types/Cargo.toml b/hax-types/Cargo.toml index 37f39615e..caeba72f6 100644 --- a/hax-types/Cargo.toml +++ b/hax-types/Cargo.toml @@ -21,8 +21,10 @@ colored.workspace = true serde_json.workspace = true annotate-snippets.workspace = true hax-adt-into.workspace = true -bincode.workspace = true +tracing.workspace = true +serde-brief ={ version = "*", features = ["std", "alloc"]} zstd = "0.13.1" +miette = "7.2.0" [features] rustc = ["hax-frontend-exporter/rustc"] diff --git a/hax-types/build.rs b/hax-types/build.rs index b4ab527a0..bda3c1c49 100644 --- a/hax-types/build.rs +++ b/hax-types/build.rs @@ -1,24 +1,43 @@ -macro_rules! set_empty_env_var_with_git { - ($var:literal, $args: expr) => { - if let None = option_env!($var) { - println!( - "cargo:rustc-env={}={}", - $var, - std::process::Command::new("git") - .args($args) - .output() - .map(|output| String::from_utf8(output.stdout).unwrap()) - .unwrap_or("unknown".into()) - ); - } +macro_rules! set_empty_env_var_with { + ($var:literal, $f: expr) => {{ println!("cargo:rurun-if-env-changed={}", $var); - }; + match option_env!($var) { + Some(value) => value.to_string(), + None => { + let value = $f; + println!("cargo:rustc-env={}={}", $var, value); + value + } + } + }}; +} + +const UNKNOWN: &str = "unknown"; + +fn git_command(args: &[&str]) -> String { + std::process::Command::new("git") + .args(args) + .output() + .map(|output| String::from_utf8(output.stdout).unwrap().trim().to_string()) + .ok() + .filter(|s| !s.is_empty()) + .unwrap_or(UNKNOWN.to_string()) } fn main() { - set_empty_env_var_with_git!( - "HAX_GIT_DESCRIBE", - ["describe", "--tags", "--always", "--abbrev=0"] - ); - set_empty_env_var_with_git!("HAX_GIT_COMMIT_HASH", ["rev-parse", "HEAD"]); + let commit_hash = + set_empty_env_var_with!("HAX_GIT_COMMIT_HASH", git_command(&["rev-parse", "HEAD"])); + + set_empty_env_var_with!("HAX_VERSION", { + if commit_hash == UNKNOWN { + env!("CARGO_PKG_VERSION").into() + } else { + git_command(&["tag", "--contains", &commit_hash]) + .lines() + .next() + .and_then(|tag| tag.split_once("hax-v")) + .map(|(_, version)| version.trim().to_string()) + .unwrap_or_else(|| format!("untagged-git-rev-{}", &commit_hash[0..10])) + } + }); } diff --git a/hax-types/src/cli_options/extension.rs b/hax-types/src/cli_options/extension.rs index efed466c6..fa1084dc5 100644 --- a/hax-types/src/cli_options/extension.rs +++ b/hax-types/src/cli_options/extension.rs @@ -14,14 +14,11 @@ macro_rules! trait_alias { trait_alias!( ExtensionPoint = - bincode::Decode - + bincode::Encode - + std::fmt::Debug + std::fmt::Debug + for<'a> serde::Deserialize<'a> + serde::Serialize + JsonSchema + Clone - + for<'a> bincode::BorrowDecode<'a> ); trait_alias!(SubcommandExtensionPoint = ExtensionPoint + clap::Subcommand); diff --git a/hax-types/src/cli_options/mod.rs b/hax-types/src/cli_options/mod.rs index adecc485f..6ef102e13 100644 --- a/hax-types/src/cli_options/mod.rs +++ b/hax-types/src/cli_options/mod.rs @@ -308,6 +308,10 @@ pub struct BackendOptions { #[arg(short, long, action = clap::ArgAction::Count)] pub verbose: u8, + /// Enables profiling for the engine + #[arg(short, long)] + pub profile: bool, + /// Enable engine debugging: dumps the AST at each phase. /// /// The value of `` can be either: @@ -378,6 +382,13 @@ pub enum Command { )] kind: Vec, + /// By default, `cargo hax json` outputs a JSON where every + /// piece of information is inlined. This however creates very + /// large JSON files. This flag enables the use of unique IDs + /// and outputs a map from IDs to actual objects. + #[arg(long)] + use_ids: bool, + /// Whether to include extra informations about `DefId`s. #[arg(short = 'E', long = "include-extra", default_value = "false")] include_extra: bool, @@ -405,7 +416,14 @@ pub enum ExportBodyKind { #[derive_group(Serializers)] #[derive(JsonSchema, Parser, Debug, Clone)] -#[command(author, version = concat!("commit=", env!("HAX_GIT_COMMIT_HASH"), " ", "describe=", env!("HAX_GIT_DESCRIBE")), name = "hax", about, long_about = None)] +#[command( + author, + version = crate::HAX_VERSION, + long_version = concat!("\nversion=", env!("HAX_VERSION"), "\n", "commit=", env!("HAX_GIT_COMMIT_HASH")), + name = "hax", + about, + long_about = None +)] pub struct ExtensibleOptions { /// Replace the expansion of each macro matching PATTERN by their /// invocation. PATTERN denotes a rust path (i.e. `A::B::c`) in diff --git a/hax-types/src/diagnostics/message.rs b/hax-types/src/diagnostics/message.rs index 2a94c0b56..99fe003b0 100644 --- a/hax-types/src/diagnostics/message.rs +++ b/hax-types/src/diagnostics/message.rs @@ -23,6 +23,7 @@ pub enum HaxMessage { WarnExperimentalBackend { backend: Backend<()>, } = 4, + ProfilingData(crate::engine_api::ProfilingData) = 5, } impl HaxMessage { diff --git a/hax-types/src/diagnostics/report.rs b/hax-types/src/diagnostics/report.rs index cf84959b6..e1cc96ef5 100644 --- a/hax-types/src/diagnostics/report.rs +++ b/hax-types/src/diagnostics/report.rs @@ -1,5 +1,6 @@ use super::Diagnostics; use annotate_snippets::*; +use miette::SourceOffset; use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::rc::Rc; @@ -11,29 +12,8 @@ pub struct ReportCtx { } /// Translates a line and column position into an absolute offset -fn compute_offset(src: &str, mut line: usize, col: usize) -> usize { - let mut chars = src.chars().enumerate(); - while line > 1 { - while let Some((_offset, ch)) = chars.next() { - if ch == '\n' { - break; - } - } - line -= 1; - } - let offset = chars - .clone() - .next() - .map(|(offset, _ch)| offset) - .unwrap_or(0); - let are_col_first_chars_blank = chars - .take(col) - .all(|(_offset, ch)| matches!(ch, ' ' | '\t')); - if are_col_first_chars_blank { - offset - } else { - offset + col - } +fn compute_offset(src: &str, line: usize, col: usize) -> usize { + SourceOffset::from_location(src, line, col).offset() + 1 } impl ReportCtx { diff --git a/hax-types/src/driver_api.rs b/hax-types/src/driver_api.rs index cf64d1158..51eb86bbc 100644 --- a/hax-types/src/driver_api.rs +++ b/hax-types/src/driver_api.rs @@ -30,19 +30,26 @@ pub struct HaxMeta { pub comments: Vec<(hax_frontend_exporter::Span, String)>, } +use hax_frontend_exporter::id_table; + impl HaxMeta where - Body: bincode::Encode + bincode::Decode, + Body: serde::Serialize + for<'de> serde::Deserialize<'de>, { - pub fn write(self, write: &mut impl std::io::Write) { + #[tracing::instrument(level = "trace", skip(self, write, id_table))] + pub fn write(self, write: &mut impl std::io::Write, id_table: id_table::Table) { let mut write = zstd::stream::write::Encoder::new(write, 0).unwrap(); - bincode::encode_into_std_write(self, &mut write, bincode::config::standard()).unwrap(); - write.finish().unwrap(); + + id_table::WithTable::run(id_table, self, |with_table| { + serde_brief::to_writer(with_table, &mut write).unwrap(); + write.finish().unwrap(); + }) } - pub fn read(reader: impl std::io::Read) -> Self { + #[tracing::instrument(level = "trace", skip(reader))] + pub fn read(reader: impl std::io::Read) -> (Self, id_table::Table) { let reader = zstd::stream::read::Decoder::new(reader).unwrap(); let reader = std::io::BufReader::new(reader); - bincode::decode_from_reader(reader, bincode::config::standard()).unwrap() + id_table::WithTable::destruct(serde_brief::from_reader(reader).unwrap()) } } diff --git a/hax-types/src/engine_api.rs b/hax-types/src/engine_api.rs index 38df58943..0f41211a2 100644 --- a/hax-types/src/engine_api.rs +++ b/hax-types/src/engine_api.rs @@ -14,11 +14,40 @@ pub struct EngineOptions { )>, } +#[derive_group(Serializers)] +#[allow(non_snake_case)] +#[derive(JsonSchema, Debug, Clone)] +pub struct SourceMap { + pub mappings: String, + pub sourceRoot: String, + pub sources: Vec, + pub sourcesContent: Vec>, + pub names: Vec, + pub version: u8, + pub file: String, +} + +impl SourceMap { + pub fn inline_sources_content(&mut self) { + self.sourcesContent = vec![]; + for source in &self.sources { + let path = if self.sourceRoot.is_empty() { + source.clone() + } else { + format!("{}/{}", &self.sourceRoot, source) + }; + let contents = Some(std::fs::read_to_string(path).unwrap()); + self.sourcesContent.push(contents); + } + } +} + #[derive_group(Serializers)] #[derive(JsonSchema, Debug, Clone)] pub struct File { pub path: String, pub contents: String, + pub sourcemap: Option, } #[derive_group(Serializers)] @@ -29,6 +58,25 @@ pub struct Output { pub debug_json: Option, } +#[derive_group(Serializers)] +#[derive(JsonSchema, Debug, Clone)] +pub struct ProfilingData { + /// What context are we profiling? + pub context: String, + /// How long this took? + pub time_ns: u64, + /// How much memory this took? This is using OCaml's + /// `Gc.minor_words`, and is probably not very precise. + pub memory: u64, + /// How many things were processed? (often, this is the number of + /// items a phase processes) + pub quantity: u32, + /// Did the action errored? This is important since a failed + /// action might have exited very early, making the numbers + /// unusable. + pub errored: bool, +} + pub mod protocol { use super::*; #[derive_group(Serializers)] @@ -39,6 +87,7 @@ pub mod protocol { PrettyPrintDiagnostic(crate::diagnostics::Diagnostics), PrettyPrintRust(String), DebugString(String), + ProfilingData(ProfilingData), Exit, Ping, } diff --git a/hax-types/src/lib.rs b/hax-types/src/lib.rs index ab974d41b..167e9ccd9 100644 --- a/hax-types/src/lib.rs +++ b/hax-types/src/lib.rs @@ -1,3 +1,4 @@ +#![cfg_attr(feature = "rustc", feature(rustc_private))] //! This crate contains the type definitions that are used to communicate between: //! - the command line (the `cargo-hax` binary); //! - the custom rustc driver; @@ -25,3 +26,6 @@ pub mod driver_api; /// The types used to communicate between `cargo-hax` and /// `hax-engine`. pub mod engine_api; + +/// Compile-time version of hax +pub const HAX_VERSION: &str = env!("HAX_VERSION"); diff --git a/justfile b/justfile new file mode 100644 index 000000000..0edcc0754 --- /dev/null +++ b/justfile @@ -0,0 +1,116 @@ +@_default: + just --list + +# Build Rust and OCaml parts and install binaries in PATH. To build +# only OCaml parts or only Rust parts, set target to `rust` or +# `ocaml`. +@build target='rust+ocaml': + ./.utils/rebuild.sh {{target}} + +alias b := build + +# alias for `build rust` +@rust: + just build rust + +# alias for `build ocaml` +@ocaml: + just build ocaml + +# `cargo expand` a crate, but sets flags and crate attributes so that the expansion is exactly what hax receives. This is useful to debug hax macros. +[no-cd] +expand *FLAGS: + RUSTFLAGS='-Zcrate-attr=register_tool(_hax) -Zcrate-attr=feature(register_tool) --cfg hax_compilation --cfg _hax --cfg hax --cfg hax_backend_fstar --cfg hax' cargo expand {{FLAGS}} + +# Show the generated module `concrete_ident_generated.ml`, that contains all the Rust names the engine knows about. Those names are declared in the `./engine/names` crate. +@list-names: + hax-engine-names-extract | sed '/include .val/,$d' | just _pager + +# Show the Rust to OCaml generated types available to the engine. +@list-types: + just _ensure_command_in_path ocamlformat ocamlformat + cd engine && dune describe pp lib/types.ml \ + | sed -e '1,/open ParseError/ d' \ + | sed '/let rec pp_/,$d' \ + | ocamlformat --impl - \ + | just _pager + +# Show the OCaml module `Generated_generic_printer_base` +@show-generated-printer-ml: + just _ensure_command_in_path ocamlformat ocamlformat + cd engine && dune describe pp lib/generated_generic_printer_base.ml \ + | ocamlformat --impl - \ + | just _pager + + +# Format all the code +fmt: + cargo fmt + cd engine && dune fmt + +# Run hax tests: each test crate has a snapshot, so that we track changes in extracted code. If a snapshot changed, please review them with `just test-review`. +test: + cargo test --test toolchain + +_test: + CARGO_TESTS_ASSUME_BUILT=1 cargo test --test toolchain + +# Review snapshots +test-review: (_ensure_command_in_path "cargo-insta" "Insta (https://insta.rs)") + cargo insta review + +# Serve the book +book: (_ensure_command_in_path "mdbook" "mdBook (https://rust-lang.github.io/mdBook/)") + cd book && mdbook serve + +# Check the coherency between issues labeled `marked-unimplemented` on GitHub and issues mentionned in the engine in the `Unimplemented {issue_id: ...}` errors. +@check-issues: + just _ensure_command_in_path jq "jq (https://jqlang.github.io/jq/)" + just _ensure_command_in_path gh "GitHub CLI (https://cli.github.com/)" + just _ensure_command_in_path rg "ripgrep (https://github.com/BurntSushi/ripgrep)" + just _ensure_command_in_path sd "sd (https://github.com/chmln/sd)" + diff -U0 \ + <(gh issue -R hacspec/hax list --label 'marked-unimplemented' --json number,closed -L 200 \ + | jq '.[] | select(.closed | not) | .number' | sort -u) \ + <(rg 'issue_id:(\d+)' -Ior '$1' | sort -u) \ + | rg '^[+-]\d' \ + | sd '[-](\d+)' '#$1\t is labeled `marked-unimplemented`, but was not found in the code' \ + | sd '[+](\d+)' '#$1\t is *not* labeled `marked-unimplemented` or is closed' + +# Check that the licenses of every crate and every package are compliant with `deny.toml` +check-licenses: + #!/usr/bin/env bash + just _ensure_command_in_path cargo-deny "cargo-deny (https://embarkstudios.github.io/cargo-deny/)" + just _ensure_command_in_path toml2json "toml2json (https://github.com/woodruffw/toml2json)" + echo "> Check licenses for Rust" + cargo deny check licenses + cd engine + echo "> Check licenses for OCaml" + # initialize opam if needed + opam env >& /dev/null || opam init --no + # pin package `hax-engine` if needed + opam list --required-by=hax-engine --column=name,license: -s >& /dev/null || opam pin . --yes + # Check that every pacakge matches licenses of `deny.toml` + if opam list --required-by=hax-engine --column=name,license: -s \ + | grep -Pvi $(toml2json ../deny.toml| jq '.licenses.allow | join("|")'); then + echo "Some licenses were non compliant to our policy (see `deny.toml`)" + else + echo "licenses ok" + fi + +_ensure_command_in_path BINARY NAME: + #!/usr/bin/env bash + command -v {{BINARY}} &> /dev/null || { + >&2 echo -e "\033[0;31mSorry, the binary \033[1m{{BINARY}}\033[0m\033[0;31m is required for this command.\033[0m" + >&2 echo -e " \033[0;31m→ please install \033[1m{{NAME}}\033[0m" + >&2 echo "" + exit 1 + } + +_pager: + #!/usr/bin/env bash + if command -v bat &> /dev/null; then + bat -l ml + else + less + fi diff --git a/proof-libs/fstar-secret-integers/.envrc b/proof-libs/fstar-secret-integers/.envrc index 7045e0610..42800cde0 100644 --- a/proof-libs/fstar-secret-integers/.envrc +++ b/proof-libs/fstar-secret-integers/.envrc @@ -1 +1 @@ -use flake .#fstar +use flake .#examples diff --git a/proof-libs/fstar/.envrc b/proof-libs/fstar/.envrc index 7045e0610..42800cde0 100644 --- a/proof-libs/fstar/.envrc +++ b/proof-libs/fstar/.envrc @@ -1 +1 @@ -use flake .#fstar +use flake .#examples diff --git a/proof-libs/fstar/core/Core.Iter.Traits.Iterator.fst b/proof-libs/fstar/core/Core.Iter.Traits.Iterator.fst index 6a9c67564..4573af0d4 100644 --- a/proof-libs/fstar/core/Core.Iter.Traits.Iterator.fst +++ b/proof-libs/fstar/core/Core.Iter.Traits.Iterator.fst @@ -7,7 +7,7 @@ on their own. This is handy for revealing only certain fields of the instances of the `iterator` trait. *) unfold type t_next self item - = self -> self * option item + = self -> self * Core.Option.t_Option item unfold type t_contains self item = self -> item -> Type0 unfold type t_fold self (item: Type0) (contains: t_contains self item) @@ -31,7 +31,7 @@ unfold type t_all self item class iterator (self: Type u#0): Type u#1 = { [@@@FStar.Tactics.Typeclasses.no_method] f_Item: Type0; - f_next: self -> self * option f_Item; + f_next: self -> self * Core.Option.t_Option f_Item; f_contains: self -> f_Item -> Type0; f_fold: #b:Type0 -> s:self -> b -> (b -> i:f_Item{f_contains s i} -> b) -> b; f_enumerate: self -> Core.Iter.Adapters.Enumerate.t_Enumerate self; diff --git a/proof-libs/fstar/core/Core.Iter.fsti b/proof-libs/fstar/core/Core.Iter.fsti index ef2095e7f..f7dfc58a7 100644 --- a/proof-libs/fstar/core/Core.Iter.fsti +++ b/proof-libs/fstar/core/Core.Iter.fsti @@ -28,10 +28,10 @@ instance iterator_enumerate it {| i: iterator it |}: iterator (Core.Iter.Adapter let open Core.Ops in let iter, opt = f_next iter in match opt with - | Some value -> if v count = max_usize - then {iter; count }, None - else {iter; count = count +. sz 1}, Some (count, value) - | None -> {iter; count}, None + | Core.Option.Option_Some value -> if v count = max_usize + then {iter; count }, Core.Option.Option_None + else {iter; count = count +. sz 1}, Core.Option.Option_Some (count, value) + | Core.Option.Option_None -> {iter; count}, Core.Option.Option_None ); f_contains = iterator_enumerate_contains it i; f_fold = iterator_enumerate_fold it i; @@ -84,7 +84,7 @@ val iterator_slice_all (t: eqtype): t_all (t_Slice t) t instance iterator_slice (t: eqtype): iterator (t_Slice t) = { f_Item = t; f_next = iterator_slice_next t; - // size_hint = (fun s -> Some (Rust_primitives.Arrays.length s)); + // size_hint = (fun s -> Core.Option.Option_Some (Rust_primitives.Arrays.length s)); f_contains = iterator_slice_contains t; f_fold = iterator_slice_fold t; f_enumerate = iterator_slice_enumerate t; @@ -106,7 +106,7 @@ val iterator_array_all (t: eqtype) len: t_all (t_Array t len) t instance iterator_array (t: eqtype) len: iterator (t_Array t len) = { f_Item = t; f_next = iterator_array_next t len; - // size_hint = (fun (_s: t_Array t len) -> Some len); + // size_hint = (fun (_s: t_Array t len) -> Core.Option.Option_Some len); f_contains = iterator_array_contains t len; f_fold = iterator_array_fold t len; f_enumerate = iterator_array_enumerate t len; diff --git a/proof-libs/fstar/core/Core.Num.fsti b/proof-libs/fstar/core/Core.Num.fsti index 0f436ec05..7c994c747 100644 --- a/proof-libs/fstar/core/Core.Num.fsti +++ b/proof-libs/fstar/core/Core.Num.fsti @@ -46,6 +46,7 @@ val impl__i16__pow (base: i16) (exponent: u32): result: i16 {v base == 2 /\ v ex val impl__i32__pow (base: i32) (exponent: u32): result: i32 {v base == 2 /\ v exponent <= 16 ==> result == mk_int #Lib.IntTypes.S32 (pow2 (v exponent))} val impl__u8__count_ones: u8 -> r:u32{v r <= 8} +val impl__i32__count_ones: i32 -> r:u32{v r <= 32} val impl__u8__from_str_radix: string -> u32 -> Core.Result.t_Result u8 Core.Num.Error.t_ParseIntError diff --git a/proof-libs/fstar/core/Core.Ops.Range.fsti b/proof-libs/fstar/core/Core.Ops.Range.fsti index fffe32635..edb10cf4b 100644 --- a/proof-libs/fstar/core/Core.Ops.Range.fsti +++ b/proof-libs/fstar/core/Core.Ops.Range.fsti @@ -23,8 +23,8 @@ val iterator_range_all t: t_all (t_Range (Rust_primitives.int_t t)) (Rust_primit instance iterator_range t: iterator (t_Range (Rust_primitives.int_t t)) = { f_Item = Rust_primitives.int_t t; f_next = (fun {f_start; f_end} -> - if f_start >=. f_end then ({f_start; f_end}, None) - else ({f_start = f_start +. Rust_primitives.mk_int 0; f_end}, Some f_start) + if f_start >=. f_end then ({f_start; f_end}, Core.Option.Option_None) + else ({f_start = f_start +. Rust_primitives.mk_int 0; f_end}, Core.Option.Option_Some f_start) ); f_contains = (fun x i -> v i < v x.f_end /\ v i >= v x.f_start); f_fold = (fun #b r init f -> if r.f_start >=. r.f_end then init diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti index 181f35135..f0ac821a8 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti @@ -108,3 +108,44 @@ let rec fold_range then fold_range (start +! mk_int 1) end_ inv (f init start) f else init +let rec fold_range_cf + (#acc_t: Type0) (#u: Lib.IntTypes.inttype) + (start: int_t u) + (end_: int_t u) + (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0) + (acc: acc_t ) + (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\ fold_range_wf_index start end_ true (v i) } + -> tuple:((Core.Ops.Control_flow.t_ControlFlow (unit & acc_t) acc_t)) + { + let acc = match tuple with + | Core.Ops.Control_flow.ControlFlow_Break ((), acc) + | Core.Ops.Control_flow.ControlFlow_Continue acc -> acc in + inv acc (mk_int (v i + 1))})) +: Tot acc_t (decreases v end_ - v start) + = + if v start < v end_ + then match f acc start with + | Core.Ops.Control_flow.ControlFlow_Break ((), acc) -> acc + | Core.Ops.Control_flow.ControlFlow_Continue acc -> + fold_range_cf (start +! mk_int 1) end_ inv acc f + else acc + +let rec fold_range_return + (#acc_t: Type0) (#ret_t: Type0) (#u: Lib.IntTypes.inttype) + (start: int_t u) + (end_: int_t u) + (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0) + (acc: acc_t ) + (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\ fold_range_wf_index start end_ true (v i) } + -> tuple:((Core.Ops.Control_flow.t_ControlFlow (Core.Ops.Control_flow.t_ControlFlow ret_t (unit & acc_t))) acc_t) + )) +: Tot (Core.Ops.Control_flow.t_ControlFlow ret_t acc_t) (decreases v end_ - v start) + = + if v start < v end_ + then match f acc start with + | Core.Ops.Control_flow.ControlFlow_Break (Core.Ops.Control_flow.ControlFlow_Break res)-> Core.Ops.Control_flow.ControlFlow_Break res + + | Core.Ops.Control_flow.ControlFlow_Break (Core.Ops.Control_flow.ControlFlow_Continue ((), res)) -> Core.Ops.Control_flow.ControlFlow_Continue res + | Core.Ops.Control_flow.ControlFlow_Continue acc -> + fold_range_return (start +! mk_int 1) end_ inv acc f + else Core.Ops.Control_flow.ControlFlow_Continue acc diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst index b85761e91..8093a8a52 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst @@ -4,9 +4,6 @@ open Rust_primitives open Rust_primitives.Hax open Core.Ops.Range -let update_at_usize s i x = - update_at s i x - let update_at_range #n s i x = let res = update_at s i x in admit(); // To be proved // see issue #423 diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti b/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti index 9a03ba89a..9f1c5b531 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti @@ -85,6 +85,44 @@ let v (#t:inttype) (x:int_t t) : range_t t = LI.v #t #LI.PUB x [@(strict_on_arguments [0])] val mk_int (#t:inttype) (n:range_t t) : int_t t +let mk_u8 x = FStar.UInt8.uint_to_t x +let mk_i8 x = FStar.Int8.int_to_t x +let mk_u16 x = FStar.UInt16.uint_to_t x +let mk_i16 x = FStar.Int16.int_to_t x +let mk_u32 x = FStar.UInt32.uint_to_t x +let mk_i32 x = FStar.Int32.int_to_t x +let mk_u64 x = FStar.UInt64.uint_to_t x +let mk_i64 x = FStar.Int64.int_to_t x +let mk_u128 x = FStar.UInt128.uint_to_t x +let mk_i128 x = FStar.Int128.int_to_t x +let mk_usize x = FStar.UInt32.uint_to_t x +let mk_isize x = FStar.Int32.int_to_t x + +let from_uint8 (x:FStar.UInt8.t) : u8 = x +let from_int8 (x:FStar.Int8.t) : i8 = x +let from_uint16 (x:FStar.UInt16.t) : u16 = x +let from_int16 (x:FStar.Int16.t) : i16 = x +let from_uint32 (x:FStar.UInt32.t) : u32 = x +let from_int32 (x:FStar.Int32.t) : i32 = x +let from_uint64 (x:FStar.UInt64.t) : u64 = x +let from_int64 (x:FStar.Int64.t) : i64 = x +let from_uint128 (x:FStar.UInt128.t) : u128 = x +let from_int128 (x:FStar.Int128.t) : i128 = x +let from_usize (x:FStar.UInt32.t) : usize = mk_int (FStar.UInt32.v x) +let from_isize (x:FStar.Int32.t) : isize = mk_int (FStar.Int32.v x) + +let to_uint8 (x:u8) : FStar.UInt8.t = x +let to_int8 (x:i8) : FStar.Int8.t = x +let to_uint16 (x:u16) : FStar.UInt16.t = x +let to_int16 (x:i16) : FStar.Int16.t = x +let to_uint32 (x:u32) : FStar.UInt32.t = x +let to_int32 (x:i32) : FStar.Int32.t = x +let to_uint64 (x:u64) : FStar.UInt64.t = x +let to_int64 (x:i64) : FStar.Int64.t = x +let to_uint128 (x:u128) : FStar.UInt128.t = x +let to_int128 (x:i128) : FStar.Int128.t = x + + [@(strict_on_arguments [0])] val mk_int_equiv_lemma #t (n:range_t t) : Lemma ( diff --git a/proof-libs/fstar/rust_primitives/Rust_primitives.fst b/proof-libs/fstar/rust_primitives/Rust_primitives.fst index d80eabfde..2037912ef 100644 --- a/proof-libs/fstar/rust_primitives/Rust_primitives.fst +++ b/proof-libs/fstar/rust_primitives/Rust_primitives.fst @@ -35,6 +35,10 @@ instance cast_tc_integers (t:inttype) (t':inttype) : cast_tc (int_t t) (int_t t') = { cast = (fun x -> Rust_primitives.Integers.cast_mod #t #t' x) } +instance cast_tc_bool_integer (t:inttype) + : cast_tc bool (int_t t) + = { cast = (fun x -> if x then Rust_primitives.Integers.mk_int 1 else Rust_primitives.Integers.mk_int 0) } + class unsize_tc source = { output: Type; unsize: source -> output; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 9d093adde..43384737a 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2024-08-11" +channel = "nightly-2024-10-23" components = [ "rustc-dev", "llvm-tools-preview" , "rust-analysis" , "rust-src" , "rustfmt" ] diff --git a/test-harness/src/harness.rs b/test-harness/src/harness.rs index 6d3c0db90..88753913e 100644 --- a/test-harness/src/harness.rs +++ b/test-harness/src/harness.rs @@ -1,3 +1,4 @@ +#![feature(rustc_private)] mod command_hax_ext; use command_hax_ext::*; use serde_json::{Map, Value}; diff --git a/test-harness/src/snapshots/toolchain__assert into-coq.snap b/test-harness/src/snapshots/toolchain__assert into-coq.snap index fd9ff5e71..a000cc9f9 100644 --- a/test-harness/src/snapshots/toolchain__assert into-coq.snap +++ b/test-harness/src/snapshots/toolchain__assert into-coq.snap @@ -28,24 +28,31 @@ diagnostics = [] [stdout.files] "Assert.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -(*Not implemented yet? todo(item)*) + + +(* NotImplementedYet *) Definition asserts (_ : unit) : unit := - let _ := assert true : unit in - let _ := assert ((@repr WORDSIZE32 1)=.?(@repr WORDSIZE32 1)) : unit in - let _ := match ((@repr WORDSIZE32 2),(@repr WORDSIZE32 2)) with - | '(left_val,right_val) => - assert (left_val=.?right_val) - end : unit in - let _ := match ((@repr WORDSIZE32 1),(@repr WORDSIZE32 2)) with - | '(left_val,right_val) => - assert (not (left_val=.?right_val)) - end : unit in + let _ := assert (true) in + let _ := assert (t_PartialEq_f_eq (1) (1)) in + let _ := match (2,2) with + | (left_val,right_val) => + assert (t_PartialEq_f_eq (left_val) (right_val)) + end in + let _ := match (1,2) with + | (left_val,right_val) => + assert (negb (t_PartialEq_f_eq (left_val) (right_val))) + end in tt. ''' diff --git a/test-harness/src/snapshots/toolchain__attributes into-fstar.snap b/test-harness/src/snapshots/toolchain__attributes into-fstar.snap index 31ac2cfb9..e65784071 100644 --- a/test-harness/src/snapshots/toolchain__attributes into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__attributes into-fstar.snap @@ -118,16 +118,11 @@ module Attributes.Newtype_pattern open Core open FStar.Mul -let v_MAX: usize = sz 10 - type t_SafeIndex = { f_i:f_i: usize{f_i <. v_MAX} } let impl__SafeIndex__as_usize (self: t_SafeIndex) : usize = self.f_i -let impl__SafeIndex__new (i: usize) : Core.Option.t_Option t_SafeIndex = - if i <. v_MAX - then Core.Option.Option_Some ({ f_i = i } <: t_SafeIndex) <: Core.Option.t_Option t_SafeIndex - else Core.Option.Option_None <: Core.Option.t_Option t_SafeIndex +let v_MAX: usize = sz 10 [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1 (#v_T: Type0) : Core.Ops.Index.t_Index (t_Array v_T (sz 10)) t_SafeIndex = @@ -137,6 +132,11 @@ let impl_1 (#v_T: Type0) : Core.Ops.Index.t_Index (t_Array v_T (sz 10)) t_SafeIn f_index_post = (fun (self: t_Array v_T (sz 10)) (index: t_SafeIndex) (out: v_T) -> true); f_index = fun (self: t_Array v_T (sz 10)) (index: t_SafeIndex) -> self.[ index.f_i ] } + +let impl__SafeIndex__new (i: usize) : Core.Option.t_Option t_SafeIndex = + if i <. v_MAX + then Core.Option.Option_Some ({ f_i = i } <: t_SafeIndex) <: Core.Option.t_Option t_SafeIndex + else Core.Option.Option_None <: Core.Option.t_Option t_SafeIndex ''' "Attributes.Pre_post_on_traits_and_impls.fst" = ''' module Attributes.Pre_post_on_traits_and_impls @@ -169,14 +169,10 @@ class t_TraitWithRequiresAndEnsures (v_Self: Type0) = { -> Prims.Pure u8 (f_method_pre x0 x1) (fun result -> f_method_post x0 x1 result) } -let test - (#v_T: Type0) - (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_TraitWithRequiresAndEnsures v_T) - (x: v_T) - : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x 99uy <: u8) -! 88uy - type t_ViaAdd = | ViaAdd : t_ViaAdd +type t_ViaMul = | ViaMul : t_ViaMul + [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_Operation t_ViaAdd = { @@ -194,8 +190,6 @@ let impl: t_Operation t_ViaAdd = f_double = fun (x: u8) -> x +! x } -type t_ViaMul = | ViaMul : t_ViaMul - [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_1: t_Operation t_ViaMul = { @@ -212,6 +206,12 @@ let impl_1: t_Operation t_ViaMul = (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int)); f_double = fun (x: u8) -> x *! 2uy } + +let test + (#v_T: Type0) + (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_TraitWithRequiresAndEnsures v_T) + (x: v_T) + : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x 99uy <: u8) -! 88uy ''' "Attributes.Refined_arithmetic.fst" = ''' module Attributes.Refined_arithmetic @@ -247,10 +247,10 @@ module Attributes.Refined_indexes open Core open FStar.Mul -let v_MAX: usize = sz 10 - type t_MyArray = | MyArray : t_Array u8 (sz 10) -> t_MyArray +let v_MAX: usize = sz 10 + /// Triple dash comment (** Multiline double star comment Maecenas blandit accumsan feugiat. Done vitae ullamcorper est. @@ -288,18 +288,18 @@ module Attributes.Refinement_types open Core open FStar.Mul -let t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX} - /// Example of a specific constraint on a value let t_CompressionFactor = x: u8{x =. 4uy || x =. 5uy || x =. 10uy || x =. 11uy} -/// Even `u8` numbers. Constructing pub Even values triggers static -/// proofs in the extraction. -let t_Even = x: u8{(x %! 2uy <: u8) =. 0uy} +let t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX} /// A field element let t_FieldElement = x: u16{x <=. 2347us} +/// Even `u8` numbers. Constructing pub Even values triggers static +/// proofs in the extraction. +let t_Even = x: u8{(x %! 2uy <: u8) =. 0uy} + /// A modular mutliplicative inverse let t_ModInverse (v_MOD: u32) = n: @@ -330,14 +330,14 @@ let t_NoE = in ~.out } +let double_refine (x: u8) : Prims.Pure t_Even (requires x <. 127uy) (fun _ -> Prims.l_True) = + x +! x <: t_Even + let bounded_u8 (x: t_BoundedU8 12uy 15uy) (y: t_BoundedU8 10uy 11uy) : t_BoundedU8 1uy 23uy = (x <: u8) +! (y <: u8) <: t_BoundedU8 1uy 23uy let double (x: u8) : Prims.Pure t_Even (requires x <. 127uy) (fun _ -> Prims.l_True) = x +! x <: t_Even - -let double_refine (x: u8) : Prims.Pure t_Even (requires x <. 127uy) (fun _ -> Prims.l_True) = - x +! x <: t_Even ''' "Attributes.Requires_mut.fst" = ''' module Attributes.Requires_mut @@ -454,6 +454,12 @@ module Attributes open Core open FStar.Mul +type t_Foo = { + f_x:u32; + f_y:f_y: u32{f_y >. 3ul}; + f_z:f_z: u32{((f_y +! f_x <: u32) +! f_z <: u32) >. 3ul} +} + let inlined_code__V: u8 = 12uy let issue_844_ (v__x: u8) @@ -498,14 +504,6 @@ let add3_lemma (x: u32) x <=. 10ul || x >=. (u32_max /! 3ul <: u32) || (add3 x x x <: u32) =. (x *! 3ul <: u32)) = () -type t_Foo = { - f_x:u32; - f_y:f_y: u32{f_y >. 3ul}; - f_z:f_z: u32{((f_y +! f_x <: u32) +! f_z <: u32) >. 3ul} -} - -unfold let some_function _ = "hello from F*" - let before_inlined_code = "example before" let inlined_code (foo: t_Foo) : Prims.unit = @@ -518,4 +516,6 @@ let inlined_code (foo: t_Foo) : Prims.unit = () let inlined_code_after = "example after" + +unfold let some_function _ = "hello from F*" ''' diff --git a/test-harness/src/snapshots/toolchain__constructor-as-closure into-fstar.snap b/test-harness/src/snapshots/toolchain__constructor-as-closure into-fstar.snap new file mode 100644 index 000000000..6f8919b71 --- /dev/null +++ b/test-harness/src/snapshots/toolchain__constructor-as-closure into-fstar.snap @@ -0,0 +1,46 @@ +--- +source: test-harness/src/harness.rs +expression: snapshot +info: + kind: + Translate: + backend: fstar + info: + name: constructor-as-closure + manifest: constructor-as-closure/Cargo.toml + description: ~ + spec: + optional: false + broken: false + issue_id: ~ + positive: true + snapshot: + stderr: false + stdout: true + include_flag: ~ + backend_options: ~ +--- +exit = 0 + +[stdout] +diagnostics = [] + +[stdout.files] +"Constructor_as_closure.fst" = ''' +module Constructor_as_closure +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +type t_Context = + | Context_A : i32 -> t_Context + | Context_B : i32 -> t_Context + +type t_Test = | Test : i32 -> t_Test + +let impl__Test__test (x: Core.Option.t_Option i32) : Core.Option.t_Option t_Test = + Core.Option.impl__map #i32 #t_Test x Test + +let impl__Context__test (x: Core.Option.t_Option i32) : Core.Option.t_Option t_Context = + Core.Option.impl__map #i32 #t_Context x Context_B +''' diff --git a/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap b/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap index 453e0d2a1..af07fc045 100644 --- a/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap @@ -32,7 +32,7 @@ module Cyclic_modules.B open Core open FStar.Mul -include Cyclic_modules.Rec_bundle_507852343 {g as g} +include Cyclic_modules.Rec_bundle_318256792 {g as g} ''' "Cyclic_modules.C.fst" = ''' module Cyclic_modules.C @@ -42,15 +42,29 @@ open FStar.Mul let i (_: Prims.unit) : Prims.unit = () ''' +"Cyclic_modules.D.Rec_bundle_773034964.fst" = ''' +module Cyclic_modules.D.Rec_bundle_773034964 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +let d1 (_: Prims.unit) : Prims.unit = () + +let e1 (_: Prims.unit) : Prims.unit = d1 () + +let de1 (_: Prims.unit) : Prims.unit = e1 () + +let d2 (_: Prims.unit) : Prims.unit = de1 () +''' "Cyclic_modules.D.fst" = ''' module Cyclic_modules.D #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.E.Rec_bundle_868781766 {d1 as d1} +include Cyclic_modules.D.Rec_bundle_773034964 {d1 as d1} -include Cyclic_modules.E.Rec_bundle_868781766 {d2 as d2} +include Cyclic_modules.D.Rec_bundle_773034964 {d2 as d2} ''' "Cyclic_modules.De.fst" = ''' module Cyclic_modules.De @@ -58,21 +72,41 @@ module Cyclic_modules.De open Core open FStar.Mul -include Cyclic_modules.E.Rec_bundle_868781766 {de1 as de1} +include Cyclic_modules.D.Rec_bundle_773034964 {de1 as de1} ''' -"Cyclic_modules.E.Rec_bundle_868781766.fst" = ''' -module Cyclic_modules.E.Rec_bundle_868781766 +"Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688.fst" = ''' +module Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -let d1 (_: Prims.unit) : Prims.unit = () +let g (_: Prims.unit) : Prims.unit = () -let e1 (_: Prims.unit) : Prims.unit = d1 () +let h (_: Prims.unit) : Prims.unit = () -let de1 (_: Prims.unit) : Prims.unit = e1 () +let f (_: Prims.unit) : Prims.unit = h () -let d2 (_: Prims.unit) : Prims.unit = de1 () +let i (_: Prims.unit) : Prims.unit = g () +''' +"Cyclic_modules.Disjoint_cycle_a.fst" = ''' +module Cyclic_modules.Disjoint_cycle_a +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {g as g} + +include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {f as f} +''' +"Cyclic_modules.Disjoint_cycle_b.fst" = ''' +module Cyclic_modules.Disjoint_cycle_b +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {h as h} + +include Cyclic_modules.Disjoint_cycle_a.Rec_bundle_317759688 {i as i} ''' "Cyclic_modules.E.fst" = ''' module Cyclic_modules.E @@ -80,7 +114,7 @@ module Cyclic_modules.E open Core open FStar.Mul -include Cyclic_modules.E.Rec_bundle_868781766 {e1 as e1} +include Cyclic_modules.D.Rec_bundle_773034964 {e1 as e1} ''' "Cyclic_modules.Enums_a.fst" = ''' module Cyclic_modules.Enums_a @@ -88,18 +122,18 @@ module Cyclic_modules.Enums_a open Core open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {t_T240131830 as t_T} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_T240131830 as t_T} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T240131830_A as T_A} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_A as T_A} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T240131830_B as T_B} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_B as T_B} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T240131830_C as T_C} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_C as T_C} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T240131830_D as T_D} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T240131830_D as T_D} ''' -"Cyclic_modules.Enums_b.Rec_bundle_573885887.fst" = ''' -module Cyclic_modules.Enums_b.Rec_bundle_573885887 +"Cyclic_modules.Enums_b.Rec_bundle_994866580.fst" = ''' +module Cyclic_modules.Enums_b.Rec_bundle_994866580 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -119,6 +153,8 @@ and t_T240131830 = | T240131830_B : t_T240131830 | T240131830_C : Alloc.Vec.t_Vec t_U Alloc.Alloc.t_Global -> t_T240131830 | T240131830_D : Alloc.Vec.t_Vec t_T366415196 Alloc.Alloc.t_Global -> t_T240131830 + +let f (_: Prims.unit) : t_T366415196 = T366415196_A <: t_T366415196 ''' "Cyclic_modules.Enums_b.fst" = ''' module Cyclic_modules.Enums_b @@ -126,23 +162,86 @@ module Cyclic_modules.Enums_b open Core open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {t_T366415196 as t_T} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_T366415196 as t_T} + +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_A as T_A} + +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_B as T_B} + +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {T366415196_C as T_C} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T366415196_A as T_A} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {t_U as t_U} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T366415196_B as T_B} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_A as U_A} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {T366415196_C as T_C} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_B as U_B} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {t_U as t_U} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {U_C as U_C} -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {U_A as U_A} +include Cyclic_modules.Enums_b.Rec_bundle_994866580 {f as f} +''' +"Cyclic_modules.Late_skip_a.fst" = ''' +module Cyclic_modules.Late_skip_a +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {U_B as U_B} +include Cyclic_modules.Late_skip_b.Rec_bundle_447022631 {f749016415 as f} +''' +"Cyclic_modules.Late_skip_b.Rec_bundle_447022631.fst" = ''' +module Cyclic_modules.Late_skip_b.Rec_bundle_447022631 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul -include Cyclic_modules.Enums_b.Rec_bundle_573885887 {U_C as U_C} +let rec f749016415 (_: Prims.unit) : Prims.unit = f377825240 () -let f (_: Prims.unit) : t_T = T_A <: t_T +and f377825240 (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = + f749016415 () +''' +"Cyclic_modules.Late_skip_b.fst" = ''' +module Cyclic_modules.Late_skip_b +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.Late_skip_b.Rec_bundle_447022631 {f377825240 as f} +''' +"Cyclic_modules.M1.fst" = ''' +module Cyclic_modules.M1 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.M2.Rec_bundle_489499412 {a as a} +''' +"Cyclic_modules.M2.Rec_bundle_489499412.fst" = ''' +module Cyclic_modules.M2.Rec_bundle_489499412 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +let c (_: Prims.unit) : Prims.unit = () + +let a (_: Prims.unit) : Prims.unit = c () + +let d (_: Prims.unit) : Prims.unit = () + +let b (_: Prims.unit) : Prims.unit = + let _:Prims.unit = a () in + d () +''' +"Cyclic_modules.M2.fst" = ''' +module Cyclic_modules.M2 +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.M2.Rec_bundle_489499412 {c as c} + +include Cyclic_modules.M2.Rec_bundle_489499412 {d as d} + +include Cyclic_modules.M2.Rec_bundle_489499412 {b as b} ''' "Cyclic_modules.Rec.fst" = ''' module Cyclic_modules.Rec @@ -174,23 +273,23 @@ and g2 (x: t_T) : t_T = | T_t1 -> g1 x | T_t2 -> hf x ''' -"Cyclic_modules.Rec1_same_name.fst" = ''' -module Cyclic_modules.Rec1_same_name +"Cyclic_modules.Rec1_same_name.Rec_bundle_213192514.fst" = ''' +module Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.Rec2_same_name.Rec_bundle_784146069 {f533409751 as f} +let rec f533409751 (x: i32) : i32 = f91805216 x + +and f91805216 (x: i32) : i32 = if x >. 0l then f533409751 (x -! 1l <: i32) else 0l ''' -"Cyclic_modules.Rec2_same_name.Rec_bundle_784146069.fst" = ''' -module Cyclic_modules.Rec2_same_name.Rec_bundle_784146069 +"Cyclic_modules.Rec1_same_name.fst" = ''' +module Cyclic_modules.Rec1_same_name #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -let rec f533409751 (x: i32) : i32 = f91805216 x - -and f91805216 (x: i32) : i32 = if x >. 0l then f533409751 (x -! 1l <: i32) else 0l +include Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 {f533409751 as f} ''' "Cyclic_modules.Rec2_same_name.fst" = ''' module Cyclic_modules.Rec2_same_name @@ -198,10 +297,10 @@ module Cyclic_modules.Rec2_same_name open Core open FStar.Mul -include Cyclic_modules.Rec2_same_name.Rec_bundle_784146069 {f91805216 as f} +include Cyclic_modules.Rec1_same_name.Rec_bundle_213192514 {f91805216 as f} ''' -"Cyclic_modules.Rec_bundle_507852343.fst" = ''' -module Cyclic_modules.Rec_bundle_507852343 +"Cyclic_modules.Rec_bundle_318256792.fst" = ''' +module Cyclic_modules.Rec_bundle_318256792 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul @@ -213,6 +312,8 @@ let g (_: Prims.unit) : Prims.unit = f () let h (_: Prims.unit) : Prims.unit = let _:Prims.unit = g () in Cyclic_modules.C.i () + +let h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i () ''' "Cyclic_modules.Typ_a.fst" = ''' module Cyclic_modules.Typ_a @@ -220,22 +321,30 @@ module Cyclic_modules.Typ_a open Core open FStar.Mul -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {t_T as t_T} +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T as t_T} -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {T_T as T_T} +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T_T as T_T} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {t_TRec as t_TRec} +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_TRec as t_TRec} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {TRec_T as TRec_T} +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {TRec_T as TRec_T} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {TRec_Empty as TRec_Empty} +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {TRec_Empty as TRec_Empty} ''' -"Cyclic_modules.Typ_b.Rec_bundle_445945170.fst" = ''' -module Cyclic_modules.Typ_b.Rec_bundle_445945170 +"Cyclic_modules.Typ_b.Rec_bundle_546955701.fst" = ''' +module Cyclic_modules.Typ_b.Rec_bundle_546955701 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul +type t_T1 = | T1_T1 : t_T1 + +type t_T = | T_T : t_T1 -> t_T + +let t_T1_cast_to_repr (x: t_T1) : isize = match x with | T1_T1 -> isz 0 + +type t_T2 = | T2_T2 : t_T -> t_T2 + type t_TRec = | TRec_T : t_T1Rec -> t_TRec | TRec_Empty : t_TRec @@ -244,41 +353,70 @@ and t_T1Rec = | T1Rec_T1 : Alloc.Boxed.t_Box t_T2Rec Alloc.Alloc.t_Global -> t_T and t_T2Rec = | T2Rec_T2 : t_TRec -> t_T2Rec ''' -"Cyclic_modules.Typ_b.Rec_bundle_684725220.fst" = ''' -module Cyclic_modules.Typ_b.Rec_bundle_684725220 +"Cyclic_modules.Typ_b.fst" = ''' +module Cyclic_modules.Typ_b #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -type t_T1 = | T1_T1 : t_T1 +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1 as t_T1} -type t_T = | T_T : t_T1 -> t_T +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T1_T1 as T1_T1} -type t_T2 = | T2_T2 : t_T -> t_T2 +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1_cast_to_repr as t_T1_cast_to_repr} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T2 as t_T2} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T2_T2 as T2_T2} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T1Rec as t_T1Rec} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T1Rec_T1 as T1Rec_T1} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {t_T2Rec as t_T2Rec} + +include Cyclic_modules.Typ_b.Rec_bundle_546955701 {T2Rec_T2 as T2Rec_T2} ''' -"Cyclic_modules.Typ_b.fst" = ''' -module Cyclic_modules.Typ_b +"Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539.fst" = ''' +module Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {t_T1 as t_T1} +type t_Context = + | Context_A : i32 -> t_Context + | Context_B : i32 -> t_Context -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {T1_T1 as T1_T1} +let test (x: Core.Option.t_Option i32) : Core.Option.t_Option t_Context = + Core.Option.impl__map #i32 #t_Context x Context_A -let t_T1_cast_to_repr (x: t_T1) : isize = match x with | T1_T1 -> isz 0 +let h (_: Prims.unit) : t_Context = Context_A 1l <: t_Context -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {t_T2 as t_T2} +let f (_: Prims.unit) : t_Context = h () +''' +"Cyclic_modules.Variant_constructor_a.fst" = ''' +module Cyclic_modules.Variant_constructor_a +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul -include Cyclic_modules.Typ_b.Rec_bundle_684725220 {T2_T2 as T2_T2} +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {t_Context as t_Context} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {t_T1Rec as t_T1Rec} +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {Context_A as Context_A} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {T1Rec_T1 as T1Rec_T1} +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {Context_B as Context_B} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {t_T2Rec as t_T2Rec} +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {test as impl__Context__test} -include Cyclic_modules.Typ_b.Rec_bundle_445945170 {T2Rec_T2 as T2Rec_T2} +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {f as f} +''' +"Cyclic_modules.Variant_constructor_b.fst" = ''' +module Cyclic_modules.Variant_constructor_b +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +include Cyclic_modules.Variant_constructor_a.Rec_bundle_584097539 {h as h} ''' "Cyclic_modules.fst" = ''' module Cyclic_modules @@ -286,9 +424,9 @@ module Cyclic_modules open Core open FStar.Mul -include Cyclic_modules.Rec_bundle_507852343 {f as f} +include Cyclic_modules.Rec_bundle_318256792 {f as f} -include Cyclic_modules.Rec_bundle_507852343 {h as h} +include Cyclic_modules.Rec_bundle_318256792 {h as h} -let h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i () +include Cyclic_modules.Rec_bundle_318256792 {h2 as h2} ''' diff --git a/test-harness/src/snapshots/toolchain__enum-repr into-coq.snap b/test-harness/src/snapshots/toolchain__enum-repr into-coq.snap index 22894e237..bfe3bcc0a 100644 --- a/test-harness/src/snapshots/toolchain__enum-repr into-coq.snap +++ b/test-harness/src/snapshots/toolchain__enum-repr into-coq.snap @@ -29,48 +29,57 @@ diagnostics = [] [stdout.files] "Enum_repr.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -Definition discriminant_EnumWithRepr_ExplicitDiscr1 : int16 := - (@repr WORDSIZE16 1). -Definition discriminant_EnumWithRepr_ExplicitDiscr2 : int16 := - (@repr WORDSIZE16 5). + +Definition discriminant_EnumWithRepr_ExplicitDiscr1 : t_u16 := + 1. + +Definition discriminant_EnumWithRepr_ExplicitDiscr2 : t_u16 := + 5. Inductive t_EnumWithRepr : Type := -| EnumWithRepr_ExplicitDiscr1 : t_EnumWithRepr -| EnumWithRepr_ExplicitDiscr2 : t_EnumWithRepr -| EnumWithRepr_ImplicitDiscrEmptyTuple : t_EnumWithRepr -| EnumWithRepr_ImplicitDiscrEmptyStruct : t_EnumWithRepr. +| EnumWithRepr_ExplicitDiscr1 +| EnumWithRepr_ExplicitDiscr2 +| EnumWithRepr_ImplicitDiscrEmptyTuple +| EnumWithRepr_ImplicitDiscrEmptyStruct. +Arguments t_EnumWithRepr:clear implicits. +Arguments t_EnumWithRepr. -Definition t_EnumWithRepr_cast_to_repr (x : t_EnumWithRepr_t) : int16 := +Definition t_EnumWithRepr_cast_to_repr (x : t_EnumWithRepr) : t_u16 := match x with - | EnumWithRepr_ExplicitDiscr1 => + | EnumWithRepr_ExplicitDiscr1 => discriminant_EnumWithRepr_ExplicitDiscr1 - | EnumWithRepr_ExplicitDiscr2 => + | EnumWithRepr_ExplicitDiscr2 => discriminant_EnumWithRepr_ExplicitDiscr2 - | EnumWithRepr_ImplicitDiscrEmptyTuple => - discriminant_EnumWithRepr_ExplicitDiscr2.+(@repr WORDSIZE16 1) - | EnumWithRepr_ImplicitDiscrEmptyStruct => - discriminant_EnumWithRepr_ExplicitDiscr2.+(@repr WORDSIZE16 2) + | EnumWithRepr_ImplicitDiscrEmptyTuple => + t_Add_f_add (discriminant_EnumWithRepr_ExplicitDiscr2) (1) + | EnumWithRepr_ImplicitDiscrEmptyStruct => + t_Add_f_add (discriminant_EnumWithRepr_ExplicitDiscr2) (2) end. -(*Not implemented yet? todo(item)*) +(* NotImplementedYet *) -Definition f (_ : unit) : int32 := - let _x := cast (discriminant_EnumWithRepr_ExplicitDiscr2.+(@repr WORDSIZE16 0)) : int16 in - (cast (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyTuplet_EnumWithRepr_t)).+(cast (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyStructt_EnumWithRepr_t)). +Definition f (_ : unit) : t_u32 := + let v__x := cast (t_Add_f_add (discriminant_EnumWithRepr_ExplicitDiscr2) (0)) in + t_Add_f_add (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyTuple))) (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyStruct))). -Definition ff__CONST : int16 := - cast (discriminant_EnumWithRepr_ExplicitDiscr1.+(@repr WORDSIZE16 0)). +Definition ff__CONST : t_u16 := + cast (t_Add_f_add (discriminant_EnumWithRepr_ExplicitDiscr1) (0)). -Definition get_casted_repr (x : t_EnumWithRepr_t) : int64 := - cast (t_EnumWithRepr_cast_to_repr x). +Definition get_casted_repr (x : t_EnumWithRepr) : t_u64 := + cast (t_EnumWithRepr_cast_to_repr (x)). -Definition get_repr (x : t_EnumWithRepr_t) : int16 := - t_EnumWithRepr_cast_to_repr x. +Definition get_repr (x : t_EnumWithRepr) : t_u16 := + t_EnumWithRepr_cast_to_repr (x). ''' diff --git a/test-harness/src/snapshots/toolchain__functions into-coq.snap b/test-harness/src/snapshots/toolchain__functions into-coq.snap index 56ca87c28..29fe45a24 100644 --- a/test-harness/src/snapshots/toolchain__functions into-coq.snap +++ b/test-harness/src/snapshots/toolchain__functions into-coq.snap @@ -20,10 +20,12 @@ info: include_flag: ~ backend_options: ~ --- -exit = 0 - -[stdout] -diagnostics = [] +exit = 1 +[[stdout.diagnostics]] +message = ''' +(Coq backend) something is not implemented yet. +[ty] node typ''' +spans = ['Span { lo: Loc { line: 11, col: 4 }, hi: Loc { line: 17, col: 5 }, filename: Real(LocalPath("functions/src/lib.rs")), rust_span_data: None }'] [stdout.files] "Functions.v" = ''' @@ -36,6 +38,8 @@ Open Scope bool_scope. (*Not implemented yet? todo(item)*) +(*Not implemented yet? todo(item)*) + Definition calling_function_pointer__f (_ : unit) : unit := tt. @@ -44,3 +48,19 @@ Definition calling_function_pointer (_ : unit) : unit := let _ := calling_function_pointer__f tt : unit in tt. ''' +"Functions_Issue_1048_.v" = ''' +(* File automatically generated by Hacspec *) +From Hacspec Require Import Hacspec_Lib MachineIntegers. +From Coq Require Import ZArith. +Import List.ListNotations. +Open Scope Z_scope. +Open Scope bool_scope. + +Record t_CallableViaDeref : Type := { +}. + +(*item error backend*) + +Definition call_via_deref (_ : unit) : bool := + f_deref CallableViaDereft_CallableViaDeref_t tt. +''' diff --git a/test-harness/src/snapshots/toolchain__functions into-fstar.snap b/test-harness/src/snapshots/toolchain__functions into-fstar.snap index 21a6f688a..df12b8b6e 100644 --- a/test-harness/src/snapshots/toolchain__functions into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__functions into-fstar.snap @@ -26,6 +26,34 @@ exit = 0 diagnostics = [] [stdout.files] +"Functions.Issue_1048_.fst" = ''' +module Functions.Issue_1048_ +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +type t_CallableViaDeref = | CallableViaDeref : t_CallableViaDeref + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: Core.Ops.Deref.t_Deref t_CallableViaDeref = + { + f_Target = Prims.unit -> bool; + f_deref_pre = (fun (self: t_CallableViaDeref) -> true); + f_deref_post = (fun (self: t_CallableViaDeref) (out: (Prims.unit -> bool)) -> true); + f_deref + = + fun (self: t_CallableViaDeref) -> + fun temp_0_ -> + let _:Prims.unit = temp_0_ in + true + } + +let call_via_deref (_: Prims.unit) : bool = + Core.Ops.Deref.f_deref #t_CallableViaDeref + #FStar.Tactics.Typeclasses.solve + (CallableViaDeref <: t_CallableViaDeref) + () +''' "Functions.fst" = ''' module Functions #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" diff --git a/test-harness/src/snapshots/toolchain__generics into-fstar.snap b/test-harness/src/snapshots/toolchain__generics into-fstar.snap index c875fcfa5..fa558e443 100644 --- a/test-harness/src/snapshots/toolchain__generics into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__generics into-fstar.snap @@ -46,6 +46,8 @@ open FStar.Mul let impl__Bar__inherent_impl_generics (#v_T: Type0) (v_N: usize) (x: t_Array v_T v_N) : Prims.unit = () +type t_Bar = | Bar : t_Bar + class t_Foo (v_Self: Type0) = { f_const_add_pre:v_N: usize -> v_Self -> Type0; f_const_add_post:v_N: usize -> v_Self -> usize -> Type0; @@ -131,6 +133,4 @@ let repeat (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core.Marker.t_Copy v_T) (x: v_T) : t_Array v_T v_LEN = Rust_primitives.Hax.repeat x v_LEN - -type t_Bar = | Bar : t_Bar ''' diff --git a/test-harness/src/snapshots/toolchain__guards into-coq.snap b/test-harness/src/snapshots/toolchain__guards into-coq.snap index 858c6b949..abd2a3274 100644 --- a/test-harness/src/snapshots/toolchain__guards into-coq.snap +++ b/test-harness/src/snapshots/toolchain__guards into-coq.snap @@ -28,126 +28,133 @@ diagnostics = [] [stdout.files] "Guards.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -(*Not implemented yet? todo(item)*) -Definition equivalent (x : t_Option_t (t_Result_t int32 int32)) : int32 := + +(* NotImplementedYet *) + +Definition equivalent (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with - | Option_None => - (@repr WORDSIZE32 0) + | Option_None => + 0 | _ => match match x with - | Option_Some v => - match v with - | Result_Ok y => - Option_Some y - | _ => - Option_Nonet_Option_t int32 - end + | Option_Some (v) => + match v with + | Result_Ok (y) => + Option_Some (y) | _ => - Option_Nonet_Option_t int32 - end with - | Option_Some y => + Option_None + end + | _ => + Option_None + end with + | Option_Some (y) => y - | Option_None => + | Option_None => match x with - | Option_Some Result_Err y => + | Option_Some (Result_Err (y)) => y | _ => - (@repr WORDSIZE32 1) + 1 end end end. -Definition if_guard (x : t_Option_t int32) : int32 := +Definition if_guard (x : t_Option ((t_i32))) : t_i32 := match match x with - | Option_Some v => - match v>.?(@repr WORDSIZE32 0) with - | true => - Option_Some v - | _ => - Option_Nonet_Option_t int32 - end + | Option_Some (v) => + match t_PartialOrd_f_gt (v) (0) with + | true => + Option_Some (v) | _ => - Option_Nonet_Option_t int32 - end with - | Option_Some x => + Option_None + end + | _ => + Option_None + end with + | Option_Some (x) => x - | Option_None => - (@repr WORDSIZE32 0) + | Option_None => + 0 end. -Definition if_let_guard (x : t_Option_t (t_Result_t int32 int32)) : int32 := +Definition if_let_guard (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with - | Option_None => - (@repr WORDSIZE32 0) + | Option_None => + 0 | _ => match match x with - | Option_Some v => - match v with - | Result_Ok y => - Option_Some y - | _ => - Option_Nonet_Option_t int32 - end + | Option_Some (v) => + match v with + | Result_Ok (y) => + Option_Some (y) | _ => - Option_Nonet_Option_t int32 - end with - | Option_Some x => + Option_None + end + | _ => + Option_None + end with + | Option_Some (x) => x - | Option_None => + | Option_None => match x with - | Option_Some Result_Err y => + | Option_Some (Result_Err (y)) => y | _ => - (@repr WORDSIZE32 1) + 1 end end end. -Definition multiple_guards (x : t_Option_t (t_Result_t int32 int32)) : int32 := +Definition multiple_guards (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 := match x with - | Option_None => - (@repr WORDSIZE32 0) + | Option_None => + 0 | _ => match match x with - | Option_Some Result_Ok v => - match Option_Some (v.+(@repr WORDSIZE32 1)) with - | Option_Some (@repr WORDSIZE32 1) => - Option_Some (@repr WORDSIZE32 0) - | _ => - Option_Nonet_Option_t int32 - end + | Option_Some (Result_Ok (v)) => + match Option_Some (t_Add_f_add (v) (1)) with + | Option_Some (1) => + Option_Some (0) | _ => - Option_Nonet_Option_t int32 - end with - | Option_Some x => + Option_None + end + | _ => + Option_None + end with + | Option_Some (x) => x - | Option_None => + | Option_None => match match x with - | Option_Some v => - match v with - | Result_Ok y => - Option_Some y - | _ => - Option_Nonet_Option_t int32 - end + | Option_Some (v) => + match v with + | Result_Ok (y) => + Option_Some (y) | _ => - Option_Nonet_Option_t int32 - end with - | Option_Some x => + Option_None + end + | _ => + Option_None + end with + | Option_Some (x) => x - | Option_None => + | Option_None => match x with - | Option_Some Result_Err y => + | Option_Some (Result_Err (y)) => y | _ => - (@repr WORDSIZE32 1) + 1 end end end diff --git a/test-harness/src/snapshots/toolchain__include-flag into-coq.snap b/test-harness/src/snapshots/toolchain__include-flag into-coq.snap index 48ac454a6..7cc86205b 100644 --- a/test-harness/src/snapshots/toolchain__include-flag into-coq.snap +++ b/test-harness/src/snapshots/toolchain__include-flag into-coq.snap @@ -18,6 +18,7 @@ info: stderr: false stdout: true include_flag: ~ + backend_options: ~ --- exit = 0 @@ -27,16 +28,39 @@ diagnostics = [] [stdout.files] "Include_flag.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -Class t_Trait (Self : Type) := { -}. -(*Not implemented yet? todo(item)*) + +Record t_Foo : Type := + { + }. +Arguments t_Foo:clear implicits. +Arguments t_Foo. +Arguments Build_t_Foo. +#[export] Instance settable_t_Foo : Settable _ := + settable! (@Build_t_Foo) <>. + +Class t_Trait `{v_Self : Type} : Type := + { + }. +Arguments t_Trait:clear implicits. +Arguments t_Trait (_). + +Instance t_Trait_187936720 : t_Trait ((t_Foo)) := + { + }. + +(* NotImplementedYet *) Definition main_a_a (_ : unit) : unit := tt. @@ -47,10 +71,10 @@ Definition main_a_b (_ : unit) : unit := Definition main_a_c (_ : unit) : unit := tt. -Definition main_a (x : T) : unit := - let _ := main_a_a tt : unit in - let _ := main_a_b tt : unit in - let _ := main_a_c tt : unit in +Definition main_a `{v_T : Type} `{t_Sized (v_T)} `{t_Trait (v_T)} (x : v_T) : unit := + let _ := main_a_a (tt) in + let _ := main_a_b (tt) in + let _ := main_a_c (tt) in tt. Definition main_b_a (_ : unit) : unit := @@ -63,9 +87,9 @@ Definition main_b_c (_ : unit) : unit := tt. Definition main_b (_ : unit) : unit := - let _ := main_b_a tt : unit in - let _ := main_b_b tt : unit in - let _ := main_b_c tt : unit in + let _ := main_b_a (tt) in + let _ := main_b_b (tt) in + let _ := main_b_c (tt) in tt. Definition main_c_a (_ : unit) : unit := @@ -78,20 +102,14 @@ Definition main_c_c (_ : unit) : unit := tt. Definition main_c (_ : unit) : unit := - let _ := main_c_a tt : unit in - let _ := main_c_b tt : unit in - let _ := main_c_c tt : unit in + let _ := main_c_a (tt) in + let _ := main_c_b (tt) in + let _ := main_c_c (tt) in tt. -Record t_Foo : Type := { -}. - -#[global] Instance t_Foo_t_t_Trait : t_Trait t_Foo_t := { -}. - Definition main (_ : unit) : unit := - let _ := main_a Foot_Foo_t : unit in - let _ := main_b tt : unit in - let _ := main_c tt : unit in + let _ := main_a (Build_t_Foo) in + let _ := main_b (tt) in + let _ := main_c (tt) in tt. ''' diff --git a/test-harness/src/snapshots/toolchain__include-flag into-fstar.snap b/test-harness/src/snapshots/toolchain__include-flag into-fstar.snap index a4ce27894..41cd2ac28 100644 --- a/test-harness/src/snapshots/toolchain__include-flag into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__include-flag into-fstar.snap @@ -32,8 +32,13 @@ module Include_flag open Core open FStar.Mul +type t_Foo = | Foo : t_Foo + class t_Trait (v_Self: Type0) = { __marker_trait_t_Trait:Prims.unit } +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_Trait_for_Foo: t_Trait t_Foo = { __marker_trait = () } + /// Indirect dependencies let main_a_a (_: Prims.unit) : Prims.unit = () @@ -73,11 +78,6 @@ let main_c (_: Prims.unit) : Prims.unit = let _:Prims.unit = main_c_c () in () -type t_Foo = | Foo : t_Foo - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_Trait_for_Foo: t_Trait t_Foo = { __marker_trait = () } - /// Entrypoint let main (_: Prims.unit) : Prims.unit = let _:Prims.unit = main_a #t_Foo (Foo <: t_Foo) in diff --git a/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap b/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap index cf2674e03..5426ecae4 100644 --- a/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__interface-only into-fstar.snap @@ -33,19 +33,6 @@ module Interface_only open Core open FStar.Mul -/// This item contains unsafe blocks and raw references, two features -/// not supported by hax. Thanks to the `-i` flag and the `+:` -/// modifier, `f` is still extractable as an interface. -/// Expressions within type are still extracted, as well as pre- and -/// post-conditions. -val f (x: u8) - : Prims.Pure (t_Array u8 (sz 4)) - (requires x <. 254uy) - (ensures - fun r -> - let r:t_Array u8 (sz 4) = r in - (r.[ sz 0 ] <: u8) >. x) - type t_Bar = | Bar : t_Bar /// Non-inherent implementations are extracted, their bodies are not @@ -69,4 +56,17 @@ let impl_1: Core.Convert.t_From t_Bar u8 = f_from_post = (fun (x: u8) (out: t_Bar) -> true); f_from = fun (x: u8) -> from__from x } + +/// This item contains unsafe blocks and raw references, two features +/// not supported by hax. Thanks to the `-i` flag and the `+:` +/// modifier, `f` is still extractable as an interface. +/// Expressions within type are still extracted, as well as pre- and +/// post-conditions. +val f (x: u8) + : Prims.Pure (t_Array u8 (sz 4)) + (requires x <. 254uy) + (ensures + fun r -> + let r:t_Array u8 (sz 4) = r in + (r.[ sz 0 ] <: u8) >. x) ''' diff --git a/test-harness/src/snapshots/toolchain__let-else into-coq.snap b/test-harness/src/snapshots/toolchain__let-else into-coq.snap index 85110e910..330c601b3 100644 --- a/test-harness/src/snapshots/toolchain__let-else into-coq.snap +++ b/test-harness/src/snapshots/toolchain__let-else into-coq.snap @@ -28,28 +28,35 @@ diagnostics = [] [stdout.files] "Let_else.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -(*Not implemented yet? todo(item)*) -Definition let_else (opt : t_Option_t int32) : bool := - run match opt with - | Option_Some x => - ControlFlow_Continue true + +(* NotImplementedYet *) + +Definition let_else (opt : t_Option ((t_u32))) : bool := + run (match opt with + | Option_Some (x) => + ControlFlow_Continue (true) | _ => - ControlFlow_Break false - end. + ControlFlow_Break (false) + end). -Definition let_else_different_type (opt : t_Option_t int32) : bool := +Definition let_else_different_type (opt : t_Option ((t_u32))) : bool := run (let hoist1 := match opt with - | Option_Some x => - ControlFlow_Continue (Option_Some (x.+(@repr WORDSIZE32 1))) - | _ => - ControlFlow_Break false - end : t_Option_t int32 in - ControlFlow_Continue (let_else hoist1)). + | Option_Some (x) => + ControlFlow_Continue (Option_Some (t_Add_f_add (x) (1))) + | _ => + ControlFlow_Break (false) + end in + ControlFlow_Continue (let_else (hoist1))). ''' diff --git a/test-harness/src/snapshots/toolchain__literals into-coq.snap b/test-harness/src/snapshots/toolchain__literals into-coq.snap index c18c543fa..77d4150c3 100644 --- a/test-harness/src/snapshots/toolchain__literals into-coq.snap +++ b/test-harness/src/snapshots/toolchain__literals into-coq.snap @@ -29,100 +29,110 @@ diagnostics = [] [stdout.files] "Literals.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -Require Import Hax_lib_Int. -Export Hax_lib_Int. -(*Not implemented yet? todo(item)*) -Definition casts (x8 : int8) (x16 : int16) (x32 : int32) (x64 : int64) (xs : uint_size) : unit := - let (_ : int64) := ((((cast x8).+(cast x16)).+(cast x32)).+x64).+(cast xs) : int64 in - let (_ : int32) := ((((cast x8).+(cast x16)).+x32).+(cast x64)).+(cast xs) : int32 in - let (_ : int16) := ((((cast x8).+x16).+(cast x32)).+(cast x64)).+(cast xs) : int16 in - let (_ : int8) := (((x8.+(cast x16)).+(cast x32)).+(cast x64)).+(cast xs) : int8 in - let (_ : int64) := ((((cast x8).+(cast x16)).+(cast x32)).+(cast x64)).+(cast xs) : int64 in - let (_ : int32) := ((((cast x8).+(cast x16)).+(cast x32)).+(cast x64)).+(cast xs) : int32 in - let (_ : int16) := ((((cast x8).+(cast x16)).+(cast x32)).+(cast x64)).+(cast xs) : int16 in - let (_ : int8) := ((((cast x8).+(cast x16)).+(cast x32)).+(cast x64)).+(cast xs) : int8 in +From Literals Require Import Hax_lib (t_int). +Export Hax_lib (t_int). + +Record t_Foo : Type := + { + f_field : t_u8; + }. +Arguments t_Foo:clear implicits. +Arguments t_Foo. +Arguments Build_t_Foo. +#[export] Instance settable_t_Foo : Settable _ := + settable! (@Build_t_Foo) . + +(* NotImplementedYet *) + +Definition v_CONSTANT : t_Foo := + Build_t_Foo (3). + +Definition casts (x8 : t_u8) (x16 : t_u16) (x32 : t_u32) (x64 : t_u64) (xs : t_usize) : unit := + let _ : t_u64 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (cast (x32))) (x64)) (cast (xs)) in + let _ : t_u32 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (x32)) (cast (x64))) (cast (xs)) in + let _ : t_u16 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (x16)) (cast (x32))) (cast (x64))) (cast (xs)) in + let _ : t_u8 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (x8) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in + let _ : t_i64 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in + let _ : t_i32 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in + let _ : t_i16 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in + let _ : t_i8 := t_Add_f_add (t_Add_f_add (t_Add_f_add (t_Add_f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in tt. Definition fn_pointer_cast (_ : unit) : unit := - let (f : int32 -> int32) := fun x => - x : int32 -> int32 in + let f : t_u32 -> t_u32 := fun x => + x in tt. -Definition math_integers (x : t_Int_t) : int8 := - let (_ : t_Int_t) := f_lift (@repr WORDSIZE32 3) : t_Int_t in - let _ := (impl__Int___unsafe_from_str -340282366920938463463374607431768211455000)>.?(impl__Int___unsafe_from_str 340282366920938463463374607431768211455000) : bool in - let _ := x<.?x : bool in - let _ := x>=.?x : bool in - let _ := x<=.?x : bool in - let _ := x<>x : bool in - let _ := x=.?x : bool in - let _ := x.+x : t_Int_t in - let _ := x.-x : t_Int_t in - let _ := x.*x : t_Int_t in - let _ := x./x : t_Int_t in - let (_ : int16) := impl__Int__to_i16 x : int16 in - let (_ : int32) := impl__Int__to_i32 x : int32 in - let (_ : int64) := impl__Int__to_i64 x : int64 in - let (_ : int128) := impl__Int__to_i128 x : int128 in - let (_ : uint_size) := impl__Int__to_isize x : uint_size in - let (_ : int16) := impl__Int__to_u16 x : int16 in - let (_ : int32) := impl__Int__to_u32 x : int32 in - let (_ : int64) := impl__Int__to_u64 x : int64 in - let (_ : int128) := impl__Int__to_u128 x : int128 in - let (_ : uint_size) := impl__Int__to_usize x : uint_size in - impl__Int__to_u8 (x.+(x.*x)). +Definition math_integers (x : t_Int) `{andb (f_gt (x) (impl__Int___unsafe_from_str ("0"%string))) (f_lt (x) (impl__Int___unsafe_from_str ("16"%string))) = true} : t_u8 := + let _ : t_Int := f_lift (3) in + let _ := f_gt (impl__Int___unsafe_from_str ("-340282366920938463463374607431768211455000"%string)) (impl__Int___unsafe_from_str ("340282366920938463463374607431768211455000"%string)) in + let _ := f_lt (x) (x) in + let _ := f_ge (x) (x) in + let _ := f_le (x) (x) in + let _ := f_ne (x) (x) in + let _ := f_eq (x) (x) in + let _ := f_add (x) (x) in + let _ := f_sub (x) (x) in + let _ := f_mul (x) (x) in + let _ := f_div (x) (x) in + let _ : t_i16 := impl__Int__to_i16 (x) in + let _ : t_i32 := impl__Int__to_i32 (x) in + let _ : t_i64 := impl__Int__to_i64 (x) in + let _ : t_i128 := impl__Int__to_i128 (x) in + let _ : t_isize := impl__Int__to_isize (x) in + let _ : t_u16 := impl__Int__to_u16 (x) in + let _ : t_u32 := impl__Int__to_u32 (x) in + let _ : t_u64 := impl__Int__to_u64 (x) in + let _ : t_u128 := impl__Int__to_u128 (x) in + let _ : t_usize := impl__Int__to_usize (x) in + impl__Int__to_u8 (f_add (x) (f_mul (x) (x))). Definition numeric (_ : unit) : unit := - let (_ : uint_size) := (@repr WORDSIZE32 123) : uint_size in - let (_ : uint_size) := (@repr WORDSIZE32 42) : uint_size in - let (_ : uint_size) := (@repr WORDSIZE32 42) : uint_size in - let (_ : int32) := (@repr WORDSIZE32 42) : int32 in - let (_ : int128) := (@repr WORDSIZE128 22222222222222222222) : int128 in + let _ : t_usize := 123 in + let _ : t_isize := -42 in + let _ : t_isize := 42 in + let _ : t_i32 := -42 in + let _ : t_u128 := 22222222222222222222 in tt. -Definition panic_with_msg (_ : unit) : unit := - never_to_any (panic_fmt (impl_2__new_const (array_from_list [with msg]))). - -Definition empty_array (_ : unit) : unit := - let (_ : seq int8) := unsize !TODO empty array! : seq int8 in +Definition patterns (_ : unit) : unit := + let _ := match 1 with + | 2 => + tt + | _ => + tt + end in + let _ := match ("hello"%string,(123,["a"%string; "b"%string])) with + | ("hello"%string,(123,v__todo)) => + tt + | _ => + tt + end in + let _ := match Build_t_Foo (4) with + | Foo (3) => + tt + | _ => + tt + end in tt. -Record t_Foo : Type := { - f_field : int8; -}. - -Definition v_CONSTANT : t_Foo_t := - Build_Foo (f_field := (@repr WORDSIZE8 3)). +Definition panic_with_msg (_ : unit) : unit := + never_to_any (panic_fmt (impl_2__new_const (["with msg"%string]))). -Definition patterns (_ : unit) : unit := - let _ := match (@repr WORDSIZE8 1) with - | (@repr WORDSIZE8 2) => - tt - | _ => - tt - end : unit in - let _ := match (hello,((@repr WORDSIZE32 123),array_from_list [a; - b])) with - | '(hello,((@repr WORDSIZE32 123),_todo)) => - tt - | _ => - tt - end : unit in - let _ := match Build_Foo (f_field := (@repr WORDSIZE8 4)) with - | {| - f_field := (@repr WORDSIZE8 3) - |} => - tt - | _ => - tt - end : unit in +Definition empty_array (_ : unit) : unit := + let _ : t_Slice t_u8 := unsize ([]) in tt. ''' diff --git a/test-harness/src/snapshots/toolchain__literals into-fstar.snap b/test-harness/src/snapshots/toolchain__literals into-fstar.snap index be54f22ba..f6c262b10 100644 --- a/test-harness/src/snapshots/toolchain__literals into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__literals into-fstar.snap @@ -33,6 +33,10 @@ module Literals open Core open FStar.Mul +type t_Foo = { f_field:u8 } + +let v_CONSTANT: t_Foo = { f_field = 3uy } <: t_Foo + let casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit = let (_: u64):u64 = ((((cast (x8 <: u8) <: u64) +! (cast (x16 <: u16) <: u64) <: u64) +! (cast (x32 <: u32) <: u64) @@ -145,30 +149,6 @@ let numeric (_: Prims.unit) : Prims.unit = let (_: u128):u128 = pub_u128 22222222222222222222 in () -let panic_with_msg (_: Prims.unit) : Prims.unit = - Rust_primitives.Hax.never_to_any (Core.Panicking.panic_fmt (Core.Fmt.impl_2__new_const (sz 1) - (let list = ["with msg"] in - FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); - Rust_primitives.Hax.array_of_list 1 list) - <: - Core.Fmt.t_Arguments) - <: - Rust_primitives.Hax.t_Never) - -let empty_array (_: Prims.unit) : Prims.unit = - let (_: t_Slice u8):t_Slice u8 = - (let list:Prims.list u8 = [] in - FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 0); - Rust_primitives.Hax.array_of_list 0 list) - <: - t_Slice u8 - in - () - -type t_Foo = { f_field:u8 } - -let v_CONSTANT: t_Foo = { f_field = 3uy } <: t_Foo - let patterns (_: Prims.unit) : Prims.unit = let _:Prims.unit = match 1uy with @@ -196,4 +176,24 @@ let patterns (_: Prims.unit) : Prims.unit = | _ -> () <: Prims.unit in () + +let panic_with_msg (_: Prims.unit) : Prims.unit = + Rust_primitives.Hax.never_to_any (Core.Panicking.panic_fmt (Core.Fmt.impl_2__new_const (sz 1) + (let list = ["with msg"] in + FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1); + Rust_primitives.Hax.array_of_list 1 list) + <: + Core.Fmt.t_Arguments) + <: + Rust_primitives.Hax.t_Never) + +let empty_array (_: Prims.unit) : Prims.unit = + let (_: t_Slice u8):t_Slice u8 = + (let list:Prims.list u8 = [] in + FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 0); + Rust_primitives.Hax.array_of_list 0 list) + <: + t_Slice u8 + in + () ''' diff --git a/test-harness/src/snapshots/toolchain__loops into-fstar.snap b/test-harness/src/snapshots/toolchain__loops into-fstar.snap index 695f8bf5b..2c35945ca 100644 --- a/test-harness/src/snapshots/toolchain__loops into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__loops into-fstar.snap @@ -27,6 +27,297 @@ stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' diagnostics = [] [stdout.files] +"Loops.Control_flow.fst" = ''' +module Loops.Control_flow +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +type t_M = { f_m:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } + +let impl__M__decoded_message (self: t_M) + : Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = + match + Rust_primitives.Hax.Folds.fold_range_return (sz 0) + (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global self.f_m <: usize) + (fun temp_0_ temp_1_ -> + let _:Prims.unit = temp_0_ in + let _:usize = temp_1_ in + true) + () + (fun temp_0_ i -> + let _:Prims.unit = temp_0_ in + let i:usize = i in + if i >. sz 5 <: bool + then + Core.Ops.Control_flow.ControlFlow_Break + (Core.Ops.Control_flow.ControlFlow_Break + (Core.Option.Option_None + <: + Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + (Prims.unit & Prims.unit)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow + (Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + (Prims.unit & Prims.unit)) Prims.unit + else + Core.Ops.Control_flow.ControlFlow_Continue () + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow + (Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + (Prims.unit & Prims.unit)) Prims.unit) + with + | Core.Ops.Control_flow.ControlFlow_Break ret -> ret + | Core.Ops.Control_flow.ControlFlow_Continue _ -> + Core.Option.Option_Some + (Core.Clone.f_clone #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) + #FStar.Tactics.Typeclasses.solve + self.f_m) + <: + Core.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) + +let bigger_power_2_ (x: i32) : i32 = + let pow:i32 = 1l in + Rust_primitives.f_while_loop_cf (fun pow -> + let pow:i32 = pow in + pow <. 1000000l <: bool) + pow + (fun pow -> + let pow:i32 = pow in + let pow:i32 = pow *! 2l in + if pow <. x + then + let pow:i32 = pow *! 3l in + if true + then + Core.Ops.Control_flow.ControlFlow_Break ((), pow <: (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (pow *! 2l) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (pow *! 2l) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) + +let double_sum (_: Prims.unit) : i32 = + let sum:i32 = 0l in + let sum:i32 = + Rust_primitives.Hax.Folds.fold_range_cf 1l + 10l + (fun sum temp_1_ -> + let sum:i32 = sum in + let _:i32 = temp_1_ in + true) + sum + (fun sum i -> + let sum:i32 = sum in + let i:i32 = i in + if i <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) + in + sum *! 2l + +let double_sum2 (_: Prims.unit) : i32 = + let sum:i32 = 0l in + let sum2:i32 = 0l in + let sum, sum2:(i32 & i32) = + Rust_primitives.Hax.Folds.fold_range_cf 1l + 10l + (fun temp_0_ temp_1_ -> + let sum, sum2:(i32 & i32) = temp_0_ in + let _:i32 = temp_1_ in + true) + (sum, sum2 <: (i32 & i32)) + (fun temp_0_ i -> + let sum, sum2:(i32 & i32) = temp_0_ in + let i:i32 = i in + if i <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break + ((), (sum, sum2 <: (i32 & i32)) <: (Prims.unit & (i32 & i32))) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32) + else + let sum:i32 = sum +! i in + Core.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32)) + in + sum +! sum2 + +let double_sum2_return (v: t_Slice i32) : i32 = + let sum:i32 = 0l in + let sum2:i32 = 0l in + match + Rust_primitives.Hax.f_fold_return (Core.Iter.Traits.Collect.f_into_iter #(t_Slice i32) + #FStar.Tactics.Typeclasses.solve + v + <: + Core.Slice.Iter.t_Iter i32) + (sum, sum2 <: (i32 & i32)) + (fun temp_0_ i -> + let sum, sum2:(i32 & i32) = temp_0_ in + let i:i32 = i in + if i <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break + (Core.Ops.Control_flow.ControlFlow_Break 0l + <: + Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) (i32 & i32) + else + let sum:i32 = sum +! i in + Core.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32))) (i32 & i32)) + with + | Core.Ops.Control_flow.ControlFlow_Break ret -> ret + | Core.Ops.Control_flow.ControlFlow_Continue (sum, sum2) -> sum +! sum2 + +let double_sum_return (v: t_Slice i32) : i32 = + let sum:i32 = 0l in + match + Rust_primitives.Hax.f_fold_return (Core.Iter.Traits.Collect.f_into_iter #(t_Slice i32) + #FStar.Tactics.Typeclasses.solve + v + <: + Core.Slice.Iter.t_Iter i32) + sum + (fun sum i -> + let sum:i32 = sum in + let i:i32 = i in + if i <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break + (Core.Ops.Control_flow.ControlFlow_Break 0l + <: + Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) + with + | Core.Ops.Control_flow.ControlFlow_Break ret -> ret + | Core.Ops.Control_flow.ControlFlow_Continue sum -> sum *! 2l + +let nested (_: Prims.unit) : i32 = + let sum:i32 = 0l in + let sum:i32 = + Rust_primitives.Hax.Folds.fold_range 1l + 10l + (fun sum temp_1_ -> + let sum:i32 = sum in + let _:i32 = temp_1_ in + true) + sum + (fun sum i -> + let sum:i32 = sum in + let i:i32 = i in + let sum:i32 = + Rust_primitives.Hax.Folds.fold_range_cf 1l + 10l + (fun sum temp_1_ -> + let sum:i32 = sum in + let _:i32 = temp_1_ in + true) + sum + (fun sum j -> + let sum:i32 = sum in + let j:i32 = j in + if j <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32) + <: + Core.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32) + in + sum +! i) + in + sum *! 2l + +let nested_return (_: Prims.unit) : i32 = + let sum:i32 = 0l in + match + Rust_primitives.Hax.Folds.fold_range_return 1l + 10l + (fun sum temp_1_ -> + let sum:i32 = sum in + let _:i32 = temp_1_ in + true) + sum + (fun sum i -> + let sum:i32 = sum in + let i:i32 = i in + match + Rust_primitives.Hax.Folds.fold_range_return 1l + 10l + (fun sum temp_1_ -> + let sum:i32 = sum in + let _:i32 = temp_1_ in + true) + sum + (fun sum j -> + let sum:i32 = sum in + let j:i32 = j in + if j <. 0l <: bool + then + Core.Ops.Control_flow.ControlFlow_Break + (Core.Ops.Control_flow.ControlFlow_Break 0l + <: + Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 + else + Core.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) + <: + Core.Ops.Control_flow.t_ControlFlow i32 i32 + with + | Core.Ops.Control_flow.ControlFlow_Break ret -> + Core.Ops.Control_flow.ControlFlow_Break + (Core.Ops.Control_flow.ControlFlow_Break ret + <: + Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32 + | Core.Ops.Control_flow.ControlFlow_Continue sum -> + Core.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32) + <: + Core.Ops.Control_flow.t_ControlFlow + (Core.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32) + with + | Core.Ops.Control_flow.ControlFlow_Break ret -> ret + | Core.Ops.Control_flow.ControlFlow_Continue sum -> sum *! 2l +''' "Loops.For_loops.fst" = ''' module Loops.For_loops #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" diff --git a/test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap b/test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap index 955a69474..fee5eaadb 100644 --- a/test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap @@ -32,12 +32,45 @@ module Mut_ref_functionalization open Core open FStar.Mul +type t_Bar = { + f_a:u8; + f_b:u8 +} + +type t_Foo = { f_field:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } + class t_FooTrait (v_Self: Type0) = { f_z_pre:v_Self -> Type0; f_z_post:v_Self -> v_Self -> Type0; f_z:x0: v_Self -> Prims.Pure v_Self (f_z_pre x0) (fun result -> f_z_post x0 result) } +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_FooTrait_for_Foo: t_FooTrait t_Foo = + { + f_z_pre = (fun (self: t_Foo) -> true); + f_z_post = (fun (self: t_Foo) (out: t_Foo) -> true); + f_z = fun (self: t_Foo) -> self + } + +type t_Pair (v_T: Type0) = { + f_a:v_T; + f_b:t_Foo +} + +type t_S = { f_b:t_Array u8 (sz 5) } + +let impl__S__update (self: t_S) (x: u8) : t_S = + let self:t_S = + { + self with + f_b = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize self.f_b (sz 0) x + } + <: + t_S + in + self + let array (x: t_Array u8 (sz 10)) : t_Array u8 (sz 10) = let x:t_Array u8 (sz 10) = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize x (sz 1) (x.[ sz 2 ] <: u8) @@ -60,6 +93,19 @@ let h (x: u8) : u8 = let x:u8 = x +! 10uy in x +let i (bar: t_Bar) : (t_Bar & u8) = + let bar:t_Bar = { bar with f_b = bar.f_b +! bar.f_a } <: t_Bar in + let bar:t_Bar = { bar with f_a = h bar.f_a } <: t_Bar in + let hax_temp_output:u8 = bar.f_a +! bar.f_b in + bar, hax_temp_output <: (t_Bar & u8) + +let j (x: t_Bar) : (t_Bar & u8) = + let out:u8 = 123uy in + let tmp0, out1:(t_Bar & u8) = i x in + let x:t_Bar = tmp0 in + let hax_temp_output:u8 = out1 +! out in + x, hax_temp_output <: (t_Bar & u8) + let k (vec: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) (arg_1_wild3: u16) @@ -82,6 +128,73 @@ let k <: (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & u16 & Prims.unit & u64) +let foo (lhs rhs: t_S) : t_S = + let lhs:t_S = + Rust_primitives.Hax.Folds.fold_range (sz 0) + (sz 1) + (fun lhs temp_1_ -> + let lhs:t_S = lhs in + let _:usize = temp_1_ in + true) + lhs + (fun lhs i -> + let lhs:t_S = lhs in + let i:usize = i in + { + lhs with + f_b + = + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize lhs.f_b + i + ((lhs.f_b.[ i ] <: u8) +! (rhs.f_b.[ i ] <: u8) <: u8) + <: + t_Array u8 (sz 5) + } + <: + t_S) + in + lhs + +let g (x: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = + Rust_primitives.Hax.Folds.fold_range 1uy + 10uy + (fun x temp_1_ -> + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in + let _:u8 = temp_1_ in + true) + x + (fun x i -> + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in + let i:u8 = i in + { + x with + f_a + = + Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global x.f_a i + <: + Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global + } + <: + t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) + in + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = + { x with f_a = Core.Slice.impl__swap #u8 x.f_a (sz 0) (sz 1) } + <: + t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) + in + let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = + { + x with + f_b = { x.f_b with f_field = Core.Slice.impl__swap #u8 x.f_b.f_field (sz 0) (sz 1) } <: t_Foo + } + <: + t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) + in + x.f_a + let build_vec (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Slice.impl__into_vec #u8 #Alloc.Alloc.t_Global @@ -157,124 +270,11 @@ let test_append (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp0 in let vec2:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp1 in let _:Prims.unit = () in - let vec1:(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = + let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl_1__append #u8 #Alloc.Alloc.t_Global vec1 (build_vec () <: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) in vec1 - -type t_Bar = { - f_a:u8; - f_b:u8 -} - -type t_Foo = { f_field:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global } - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_FooTrait_for_Foo: t_FooTrait t_Foo = - { - f_z_pre = (fun (self: t_Foo) -> true); - f_z_post = (fun (self: t_Foo) (out: t_Foo) -> true); - f_z = fun (self: t_Foo) -> self - } - -type t_S = { f_b:t_Array u8 (sz 5) } - -let impl__S__update (self: t_S) (x: u8) : t_S = - let self:t_S = - { - self with - f_b = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize self.f_b (sz 0) x - } - <: - t_S - in - self - -let foo (lhs rhs: t_S) : t_S = - let lhs:t_S = - Rust_primitives.Hax.Folds.fold_range (sz 0) - (sz 1) - (fun lhs temp_1_ -> - let lhs:t_S = lhs in - let _:usize = temp_1_ in - true) - lhs - (fun lhs i -> - let lhs:t_S = lhs in - let i:usize = i in - { - lhs with - f_b - = - Rust_primitives.Hax.Monomorphized_update_at.update_at_usize lhs.f_b - i - ((lhs.f_b.[ i ] <: u8) +! (rhs.f_b.[ i ] <: u8) <: u8) - <: - t_Array u8 (sz 5) - } - <: - t_S) - in - lhs - -let i (bar: t_Bar) : (t_Bar & u8) = - let bar:t_Bar = { bar with f_b = bar.f_b +! bar.f_a } <: t_Bar in - let bar:t_Bar = { bar with f_a = h bar.f_a } <: t_Bar in - let hax_temp_output:u8 = bar.f_a +! bar.f_b in - bar, hax_temp_output <: (t_Bar & u8) - -let j (x: t_Bar) : (t_Bar & u8) = - let out:u8 = 123uy in - let tmp0, out1:(t_Bar & u8) = i x in - let x:t_Bar = tmp0 in - let hax_temp_output:u8 = out1 +! out in - x, hax_temp_output <: (t_Bar & u8) - -type t_Pair (v_T: Type0) = { - f_a:v_T; - f_b:t_Foo -} - -let g (x: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) - : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = - Rust_primitives.Hax.Folds.fold_range 1uy - 10uy - (fun x temp_1_ -> - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in - let _:u8 = temp_1_ in - true) - x - (fun x i -> - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in - let i:u8 = i in - { - x with - f_a - = - Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global x.f_a i - <: - Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global - } - <: - t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) - in - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = - { x with f_a = Core.Slice.impl__swap #u8 x.f_a (sz 0) (sz 1) } - <: - t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) - in - let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = - { - x with - f_b = { x.f_b with f_field = Core.Slice.impl__swap #u8 x.f_b.f_field (sz 0) (sz 1) } <: t_Foo - } - <: - t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) - in - x.f_a ''' diff --git a/test-harness/src/snapshots/toolchain__naming into-fstar.snap b/test-harness/src/snapshots/toolchain__naming into-fstar.snap index 585900c82..f25ae1575 100644 --- a/test-harness/src/snapshots/toolchain__naming into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__naming into-fstar.snap @@ -94,6 +94,14 @@ module Naming open Core open FStar.Mul +type t_Arity1 (v_T: Type0) = | Arity1 : v_T -> t_Arity1 v_T + +type t_B = | B : t_B + +let impl__B__f (self: t_B) : t_B = B <: t_B + +type t_C = { f_x:usize } + type t_Foo = | Foo_A : t_Foo | Foo_B { f_x:usize }: t_Foo @@ -106,6 +114,22 @@ type t_Foo2 = class t_FooTrait (v_Self: Type0) = { f_ASSOCIATED_CONSTANT:usize } +type t_Foobar = { f_a:t_Foo } + +type t_StructA = { f_a:usize } + +type t_StructB = { + f_a:usize; + f_b:usize +} + +type t_StructC = { f_a:usize } + +type t_StructD = { + f_a:usize; + f_b:usize +} + class t_T1 (v_Self: Type0) = { __marker_trait_t_T1:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] @@ -116,61 +140,25 @@ let impl_T1_for_tuple_Foo_u8: t_T1 (t_Foo & u8) = { __marker_trait = () } class t_T2_for_a (v_Self: Type0) = { __marker_trait_t_T2_for_a:Prims.unit } +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl_T2_e_for_a_for_Arity1_of_tuple_Foo_u8: t_T2_for_a (t_Arity1 (t_Foo & u8)) = + { __marker_trait = () } + class t_T3_e_for_a (v_Self: Type0) = { __marker_trait_t_T3_e_for_a:Prims.unit } [@@ FStar.Tactics.Typeclasses.tcinstance] let impl_T3_e_e_for_a_for_Foo: t_T3_e_for_a t_Foo = { __marker_trait = () } +type t_X = | X : t_X + let v_INHERENT_CONSTANT: usize = sz 3 let constants (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_FooTrait v_T) (_: Prims.unit) - : usize = f_ASSOCIATED_CONSTANT +! v_INHERENT_CONSTANT - -let ff__g (_: Prims.unit) : Prims.unit = () - -type t_f__g__impl__g__Foo = - | C_f__g__impl__g__Foo_A : t_f__g__impl__g__Foo - | C_f__g__impl__g__Foo_B { f_x:usize }: t_f__g__impl__g__Foo - -let ff__g__impl_1__g (self: t_Foo) : usize = sz 1 - -let reserved_names (v_val v_noeq v_of: u8) : u8 = (v_val +! v_noeq <: u8) +! v_of - -/// From issue https://github.com/hacspec/hax/issues/839 -let string_shadows (v_string n: string) : Prims.unit = () - -type t_Arity1 (v_T: Type0) = | Arity1 : v_T -> t_Arity1 v_T - -[@@ FStar.Tactics.Typeclasses.tcinstance] -let impl_T2_e_for_a_for_Arity1_of_tuple_Foo_u8: t_T2_for_a (t_Arity1 (t_Foo & u8)) = - { __marker_trait = () } - -type t_B = | B : t_B - -let impl__B__f (self: t_B) : t_B = B <: t_B - -type t_C = { f_x:usize } - -type t_Foobar = { f_a:t_Foo } - -type t_StructA = { f_a:usize } - -type t_StructB = { - f_a:usize; - f_b:usize -} - -type t_StructC = { f_a:usize } - -type t_StructD = { - f_a:usize; - f_b:usize -} - -type t_X = | X : t_X + : usize = + (f_ASSOCIATED_CONSTANT #FStar.Tactics.Typeclasses.solve <: usize) +! v_INHERENT_CONSTANT let construct_structs (a b: usize) : Prims.unit = let _:t_StructA = { f_a = a } <: t_StructA in @@ -179,12 +167,25 @@ let construct_structs (a b: usize) : Prims.unit = let _:t_StructD = { f_a = a; f_b = b } <: t_StructD in () -let f (x: t_Foobar) : usize = ff__g__impl_1__g x.f_a +let ff__g (_: Prims.unit) : Prims.unit = () let ff__g__impl__g (self: t_B) : usize = sz 0 +type t_f__g__impl__g__Foo = + | C_f__g__impl__g__Foo_A : t_f__g__impl__g__Foo + | C_f__g__impl__g__Foo_B { f_x:usize }: t_f__g__impl__g__Foo + +let ff__g__impl_1__g (self: t_Foo) : usize = sz 1 + +let f (x: t_Foobar) : usize = ff__g__impl_1__g x.f_a + let mk_c (_: Prims.unit) : t_C = let _:t_Foo = Foo_B ({ Naming.Foo.f_x = sz 3 }) <: t_Foo in let _:t_X = X <: t_X in { f_x = sz 3 } <: t_C + +let reserved_names (v_val v_noeq v_of: u8) : u8 = (v_val +! v_noeq <: u8) +! v_of + +/// From issue https://github.com/hacspec/hax/issues/839 +let string_shadows (v_string n: string) : Prims.unit = () ''' diff --git a/test-harness/src/snapshots/toolchain__pattern-or into-coq.snap b/test-harness/src/snapshots/toolchain__pattern-or into-coq.snap index a7897333d..f4b3cbfe0 100644 --- a/test-harness/src/snapshots/toolchain__pattern-or into-coq.snap +++ b/test-harness/src/snapshots/toolchain__pattern-or into-coq.snap @@ -29,63 +29,83 @@ diagnostics = [] [stdout.files] "Pattern_or.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. + + Inductive t_E : Type := -| E_A : t_E -| E_B : t_E. +| E_A +| E_B. +Arguments t_E:clear implicits. +Arguments t_E. -Definition t_E_cast_to_repr (x : t_E_t) : uint_size := +Definition t_E_cast_to_repr (x : t_E) : t_isize := match x with - | E_A => - (@repr WORDSIZE32 0) - | E_B => - (@repr WORDSIZE32 1) + | E_A => + 0 + | E_B => + 1 end. -(*Not implemented yet? todo(item)*) +(* NotImplementedYet *) -Definition bar (x : t_E_t) : unit := +Definition bar (x : t_E) : unit := match x with - | E_A | E_B => + | E_A + | E_B => tt end. -Definition deep (x : int32 × t_Option_t int32) : int32 := +Definition deep (x : (t_i32*t_Option ((t_i32)))) : t_i32 := match x with - | '((@repr WORDSIZE32 1) | (@repr WORDSIZE32 2),Option_Some (@repr WORDSIZE32 3) | (@repr WORDSIZE32 4)) => - (@repr WORDSIZE32 0) - | '(x,_) => + | (1 + | 2,Option_Some (3 + | 4)) => + 0 + | (x,_) => x end. -Definition deep_capture (x : t_Result_t (int32 × int32) (int32 × int32)) : int32 := +Definition deep_capture (x : t_Result (((t_i32*t_i32))) (((t_i32*t_i32)))) : t_i32 := match x with - | Result_Ok ((@repr WORDSIZE32 1) | (@repr WORDSIZE32 2),x) | Result_Err ((@repr WORDSIZE32 3) | (@repr WORDSIZE32 4),x) => + | Result_Ok ((1 + | 2,x)) + | Result_Err ((3 + | 4,x)) => x - | Result_Ok (x,_) | Result_Err (x,_) => + | Result_Ok ((x,_)) + | Result_Err ((x,_)) => x end. -Definition equivalent (x : int32 × t_Option_t int32) : int32 := +Definition equivalent (x : (t_i32*t_Option ((t_i32)))) : t_i32 := match x with - | '((@repr WORDSIZE32 1),Option_Some (@repr WORDSIZE32 3)) | '((@repr WORDSIZE32 1),Option_Some (@repr WORDSIZE32 4)) | '((@repr WORDSIZE32 2),Option_Some (@repr WORDSIZE32 3)) | '((@repr WORDSIZE32 2),Option_Some (@repr WORDSIZE32 4)) => - (@repr WORDSIZE32 0) - | '(x,_) => + | (1,Option_Some (3)) + | (1,Option_Some (4)) + | (2,Option_Some (3)) + | (2,Option_Some (4)) => + 0 + | (x,_) => x end. -Definition nested (x : t_Option_t int32) : int32 := +Definition nested (x : t_Option ((t_i32))) : t_i32 := match x with - | Option_Some (@repr WORDSIZE32 1) | (@repr WORDSIZE32 2) => - (@repr WORDSIZE32 1) - | Option_Some x => + | Option_Some (1 + | 2) => + 1 + | Option_Some (x) => x - | Option_None => - (@repr WORDSIZE32 0) + | Option_None => + 0 end. ''' diff --git a/test-harness/src/snapshots/toolchain__reordering into-coq.snap b/test-harness/src/snapshots/toolchain__reordering into-coq.snap index 316198481..d3b0567d8 100644 --- a/test-harness/src/snapshots/toolchain__reordering into-coq.snap +++ b/test-harness/src/snapshots/toolchain__reordering into-coq.snap @@ -28,39 +28,54 @@ diagnostics = [] [stdout.files] "Reordering.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. + + Inductive t_Foo : Type := -| Foo_A : t_Foo -| Foo_B : t_Foo. +| Foo_A +| Foo_B. +Arguments t_Foo:clear implicits. +Arguments t_Foo. + +Record t_Bar : Type := + { + 0 : t_Foo; + }. +Arguments t_Bar:clear implicits. +Arguments t_Bar. +Arguments Build_t_Bar. +#[export] Instance settable_t_Bar : Settable _ := + settable! (@Build_t_Bar) <0>. -Definition t_Foo_cast_to_repr (x : t_Foo_t) : uint_size := +Definition t_Foo_cast_to_repr (x : t_Foo) : t_isize := match x with - | Foo_A => - (@repr WORDSIZE32 0) - | Foo_B => - (@repr WORDSIZE32 1) + | Foo_A => + 0 + | Foo_B => + 1 end. -(*Not implemented yet? todo(item)*) +(* NotImplementedYet *) -Definition f (_ : int32) : t_Foo_t := - Foo_At_Foo_t. +Definition f (_ : t_u32) : t_Foo := + Foo_A. + +Definition g (_ : unit) : t_Bar := + Build_t_Bar (f (32)). Definition no_dependency_1_ (_ : unit) : unit := tt. Definition no_dependency_2_ (_ : unit) : unit := tt. - -Record t_Bar : Type := { - 0 : t_Foo_t; -}. - -Definition g (_ : unit) : t_Bar_t := - Bar (f (@repr WORDSIZE32 32)). ''' diff --git a/test-harness/src/snapshots/toolchain__reordering into-fstar.snap b/test-harness/src/snapshots/toolchain__reordering into-fstar.snap index f78c53805..385642dd2 100644 --- a/test-harness/src/snapshots/toolchain__reordering into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__reordering into-fstar.snap @@ -36,6 +36,8 @@ type t_Foo = | Foo_A : t_Foo | Foo_B : t_Foo +type t_Bar = | Bar : t_Foo -> t_Bar + let t_Foo_cast_to_repr (x: t_Foo) : isize = match x with | Foo_A -> isz 0 @@ -43,11 +45,9 @@ let t_Foo_cast_to_repr (x: t_Foo) : isize = let f (_: u32) : t_Foo = Foo_A <: t_Foo +let g (_: Prims.unit) : t_Bar = Bar (f 32ul) <: t_Bar + let no_dependency_1_ (_: Prims.unit) : Prims.unit = () let no_dependency_2_ (_: Prims.unit) : Prims.unit = () - -type t_Bar = | Bar : t_Foo -> t_Bar - -let g (_: Prims.unit) : t_Bar = Bar (f 32ul) <: t_Bar ''' diff --git a/test-harness/src/snapshots/toolchain__reordering into-ssprove.snap b/test-harness/src/snapshots/toolchain__reordering into-ssprove.snap index 44e58bfbc..b6655f747 100644 --- a/test-harness/src/snapshots/toolchain__reordering into-ssprove.snap +++ b/test-harness/src/snapshots/toolchain__reordering into-ssprove.snap @@ -66,6 +66,20 @@ Equations Foo_B {L : {fset Location}} {I : Interface} : both L I t_Foo := solve_lift (ret_both (inr (tt : 'unit) : t_Foo)) : both L I t_Foo. Fail Next Obligation. +Definition t_Bar : choice_type := + (t_Foo). +Equations 0 {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I t_Foo := + 0 s := + bind_both s (fun x => + solve_lift (ret_both (x : t_Foo))) : both L I t_Foo. +Fail Next Obligation. +Equations Build_t_Bar {L0 : {fset Location}} {I0 : Interface} {0 : both L0 I0 t_Foo} : both L0 I0 (t_Bar) := + Build_t_Bar := + bind_both 0 (fun 0 => + solve_lift (ret_both ((0) : (t_Bar)))) : both L0 I0 (t_Bar). +Fail Next Obligation. +Notation "'Build_t_Bar' '[' x ']' '(' '0' ':=' y ')'" := (Build_t_Bar (0 := y)). + Equations t_Foo_cast_to_repr {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 t_Foo) : both L1 I1 uint_size := t_Foo_cast_to_repr x := matchb x with @@ -83,6 +97,11 @@ Equations f {L1 : {fset Location}} {I1 : Interface} (_ : both L1 I1 int32) : bot Foo_A : both L1 I1 t_Foo. Fail Next Obligation. +Equations g {L1 : {fset Location}} {I1 : Interface} (_ : both L1 I1 'unit) : both L1 I1 t_Bar := + g _ := + Bar (solve_lift (f (ret_both (32 : int32)))) : both L1 I1 t_Bar. +Fail Next Obligation. + Equations no_dependency_1_ {L1 : {fset Location}} {I1 : Interface} (_ : both L1 I1 'unit) : both L1 I1 'unit := no_dependency_1_ _ := solve_lift (ret_both (tt : 'unit)) : both L1 I1 'unit. @@ -92,23 +111,4 @@ Equations no_dependency_2_ {L1 : {fset Location}} {I1 : Interface} (_ : both L1 no_dependency_2_ _ := solve_lift (ret_both (tt : 'unit)) : both L1 I1 'unit. Fail Next Obligation. - -Definition t_Bar : choice_type := - (t_Foo). -Equations 0 {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I t_Foo := - 0 s := - bind_both s (fun x => - solve_lift (ret_both (x : t_Foo))) : both L I t_Foo. -Fail Next Obligation. -Equations Build_t_Bar {L0 : {fset Location}} {I0 : Interface} {0 : both L0 I0 t_Foo} : both L0 I0 (t_Bar) := - Build_t_Bar := - bind_both 0 (fun 0 => - solve_lift (ret_both ((0) : (t_Bar)))) : both L0 I0 (t_Bar). -Fail Next Obligation. -Notation "'Build_t_Bar' '[' x ']' '(' '0' ':=' y ')'" := (Build_t_Bar (0 := y)). - -Equations g {L1 : {fset Location}} {I1 : Interface} (_ : both L1 I1 'unit) : both L1 I1 t_Bar := - g _ := - Bar (solve_lift (f (ret_both (32 : int32)))) : both L1 I1 t_Bar. -Fail Next Obligation. ''' diff --git a/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap b/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap index 1a54ff379..a2038278d 100644 --- a/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__side-effects into-fstar.snap @@ -27,16 +27,174 @@ stderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs' diagnostics = [] [stdout.files] +"Side_effects.Issue_1083_.fst" = ''' +module Side_effects.Issue_1083_ +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +class t_MyFrom (v_Self: Type0) (v_T: Type0) = { + f_my_from_pre:v_T -> Type0; + f_my_from_post:v_T -> v_Self -> Type0; + f_my_from:x0: v_T -> Prims.Pure v_Self (f_my_from_pre x0) (fun result -> f_my_from_post x0 result) +} + +[@@ FStar.Tactics.Typeclasses.tcinstance] +let impl: t_MyFrom u16 u8 = + { + f_my_from_pre = (fun (x: u8) -> true); + f_my_from_post = (fun (x: u8) (out: u16) -> true); + f_my_from = fun (x: u8) -> cast (x <: u8) <: u16 + } + +let f (x: u8) : Core.Result.t_Result u16 u16 = + match Core.Result.Result_Err 1uy <: Core.Result.t_Result Prims.unit u8 with + | Core.Result.Result_Ok _ -> + Core.Result.Result_Ok (f_my_from #u16 #u8 #FStar.Tactics.Typeclasses.solve x) + <: + Core.Result.t_Result u16 u16 + | Core.Result.Result_Err err -> + Core.Result.Result_Err (Core.Convert.f_from #u16 #u8 #FStar.Tactics.Typeclasses.solve err) + <: + Core.Result.t_Result u16 u16 +''' +"Side_effects.Issue_1089_.fst" = ''' +module Side_effects.Issue_1089_ +#set-options "--fuel 0 --ifuel 1 --z3rlimit 15" +open Core +open FStar.Mul + +let test (x y: Core.Option.t_Option i32) : Core.Option.t_Option i32 = + match + Core.Option.impl__map #i32 + #(Core.Option.t_Option i32) + x + (fun i -> + let i:i32 = i in + match y with + | Core.Option.Option_Some hoist1 -> + Core.Option.Option_Some (i +! hoist1 <: i32) <: Core.Option.t_Option i32 + | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option i32) + with + | Core.Option.Option_Some some -> some + | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option i32 +''' "Side_effects.fst" = ''' module Side_effects #set-options "--fuel 0 --ifuel 1 --z3rlimit 15" open Core open FStar.Mul +type t_A = | A : t_A + +type t_B = | B : t_B + +type t_Bar = { + f_a:bool; + f_b:(t_Array (bool & bool) (sz 6) & bool) +} + +type t_Foo = { + f_x:bool; + f_y:(bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global); + f_z:t_Array t_Bar (sz 6); + f_bar:t_Bar +} + /// Helper function let add3 (x y z: u32) : u32 = Core.Num.impl__u32__wrapping_add (Core.Num.impl__u32__wrapping_add x y <: u32) z +/// Test assignation on non-trivial places +let assign_non_trivial_lhs (foo: t_Foo) : t_Foo = + let foo:t_Foo = { foo with f_x = true } <: t_Foo in + let foo:t_Foo = { foo with f_bar = { foo.f_bar with f_a = true } <: t_Bar } <: t_Foo in + let foo:t_Foo = + { + foo with + f_bar + = + { + foo.f_bar with + f_b + = + { + foo.f_bar.f_b with + _1 + = + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_bar.f_b._1 + (sz 3) + ({ (foo.f_bar.f_b._1.[ sz 3 ] <: (bool & bool)) with _2 = true } <: (bool & bool)) + } + <: + (t_Array (bool & bool) (sz 6) & bool) + } + <: + t_Bar + } + <: + t_Foo + in + let foo:t_Foo = + { + foo with + f_z + = + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_z + (sz 3) + ({ (foo.f_z.[ sz 3 ] <: t_Bar) with f_a = true } <: t_Bar) + } + <: + t_Foo + in + let foo:t_Foo = + { + foo with + f_y + = + { + foo.f_y with + _2 + = + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_y._2 + (sz 3) + ({ + (foo.f_y._2.[ sz 3 ] <: t_Bar) with + f_b + = + { + (foo.f_y._2.[ sz 3 ] <: t_Bar).f_b with + _1 + = + Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (foo.f_y._2.[ sz 3 ] + <: + t_Bar) + .f_b + ._1 + (sz 5) + ({ + ((foo.f_y._2.[ sz 3 ] <: t_Bar).f_b._1.[ sz 5 ] <: (bool & bool)) with + _1 = true + } + <: + (bool & bool)) + <: + t_Array (bool & bool) (sz 6) + } + <: + (t_Array (bool & bool) (sz 6) & bool) + } + <: + t_Bar) + } + <: + (bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global) + } + <: + t_Foo + in + foo + /// Question mark without error coercion let direct_result_question_mark (y: Core.Result.t_Result Prims.unit u32) : Core.Result.t_Result i8 u32 = @@ -48,9 +206,9 @@ let direct_result_question_mark (y: Core.Result.t_Result Prims.unit u32) let direct_result_question_mark_coercion (y: Core.Result.t_Result i8 u16) : Core.Result.t_Result i8 u32 = match y with - | Core.Result.Result_Ok hoist1 -> Core.Result.Result_Ok hoist1 <: Core.Result.t_Result i8 u32 + | Core.Result.Result_Ok hoist5 -> Core.Result.Result_Ok hoist5 <: Core.Result.t_Result i8 u32 | Core.Result.Result_Err err -> - Core.Result.Result_Err (Core.Convert.f_from #FStar.Tactics.Typeclasses.solve err) + Core.Result.Result_Err (Core.Convert.f_from #u32 #u16 #FStar.Tactics.Typeclasses.solve err) <: Core.Result.t_Result i8 u32 @@ -63,13 +221,13 @@ let early_returns (x: u32) : u32 = then match true with | true -> 34ul - | _ -> Core.Num.impl__u32__wrapping_add (Core.Num.impl__u32__wrapping_add 123ul 3ul <: u32) x + | _ -> + let x, hoist9:(u32 & u32) = x, 3ul <: (u32 & u32) in + Core.Num.impl__u32__wrapping_add (Core.Num.impl__u32__wrapping_add 123ul hoist9 <: u32) x else let x:u32 = x +! 9ul in - Core.Num.impl__u32__wrapping_add (Core.Num.impl__u32__wrapping_add 123ul (x +! 1ul <: u32) - <: - u32) - x + let x, hoist9:(u32 & u32) = x, x +! 1ul <: (u32 & u32) in + Core.Num.impl__u32__wrapping_add (Core.Num.impl__u32__wrapping_add 123ul hoist9 <: u32) x /// Exercise local mutation with control flow and loops let local_mutation (x: u32) : u32 = @@ -97,7 +255,7 @@ let local_mutation (x: u32) : u32 = in Core.Num.impl__u32__wrapping_add x y else - let (x, y), hoist15:((u32 & u32) & u32) = + let (x, y), hoist19:((u32 & u32) & u32) = match x with | 12ul -> let y:u32 = Core.Num.impl__u32__wrapping_add x y in @@ -109,38 +267,47 @@ let local_mutation (x: u32) : u32 = ((u32 & u32) & u32) | _ -> (x, y <: (u32 & u32)), 0ul <: ((u32 & u32) & u32) in - let x:u32 = hoist15 in + let x:u32 = hoist19 in Core.Num.impl__u32__wrapping_add x y +/// Combine `?` and early return +let monad_lifting (x: u8) : Core.Result.t_Result t_A t_B = + if x >. 123uy + then + match Core.Result.Result_Err (B <: t_B) <: Core.Result.t_Result t_A t_B with + | Core.Result.Result_Ok hoist20 -> Core.Result.Result_Ok hoist20 <: Core.Result.t_Result t_A t_B + | Core.Result.Result_Err err -> Core.Result.Result_Err err <: Core.Result.t_Result t_A t_B + else Core.Result.Result_Ok (A <: t_A) <: Core.Result.t_Result t_A t_B + /// Test question mark on `Option`s with some control flow let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core.Option.t_Option u8 = match x with - | Core.Option.Option_Some hoist19 -> - if hoist19 >. 10uy + | Core.Option.Option_Some hoist26 -> + if hoist26 >. 10uy then match x with - | Core.Option.Option_Some hoist21 -> + | Core.Option.Option_Some hoist28 -> (match - Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist21 3uy) + Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist28 3uy) <: Core.Option.t_Option u8 with - | Core.Option.Option_Some hoist27 -> - (match hoist27 with + | Core.Option.Option_Some hoist34 -> + (match hoist34 with | 3uy -> (match Core.Option.Option_None <: Core.Option.t_Option u8 with | Core.Option.Option_Some some -> let v:u8 = some in (match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v - hoist28 + hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> @@ -150,18 +317,18 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8) | 4uy -> (match z with - | Core.Option.Option_Some hoist16 -> - let v:u8 = 4uy +! (if hoist16 >. 4uL <: bool then 0uy else 3uy) in + | Core.Option.Option_Some hoist23 -> + let v:u8 = 4uy +! (if hoist23 >. 4uL <: bool then 0uy else 3uy) in (match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v - hoist28 + hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> @@ -172,14 +339,14 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | _ -> let v:u8 = 12uy in match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some - (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v hoist28 + (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8 @@ -189,30 +356,30 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option u8 else (match x with - | Core.Option.Option_Some hoist24 -> + | Core.Option.Option_Some hoist31 -> (match y with - | Core.Option.Option_Some hoist23 -> + | Core.Option.Option_Some hoist30 -> (match - Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist24 hoist23) + Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add hoist31 hoist30) <: Core.Option.t_Option u8 with - | Core.Option.Option_Some hoist27 -> - (match hoist27 with + | Core.Option.Option_Some hoist34 -> + (match hoist34 with | 3uy -> (match Core.Option.Option_None <: Core.Option.t_Option u8 with | Core.Option.Option_Some some -> let v:u8 = some in (match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v - hoist28 + hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> @@ -223,18 +390,18 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. Core.Option.Option_None <: Core.Option.t_Option u8) | 4uy -> (match z with - | Core.Option.Option_Some hoist16 -> - let v:u8 = 4uy +! (if hoist16 >. 4uL <: bool then 0uy else 3uy) in + | Core.Option.Option_Some hoist23 -> + let v:u8 = 4uy +! (if hoist23 >. 4uL <: bool then 0uy else 3uy) in (match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v - hoist28 + hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> @@ -246,15 +413,15 @@ let options (x y: Core.Option.t_Option u8) (z: Core.Option.t_Option u64) : Core. | _ -> let v:u8 = 12uy in match x with - | Core.Option.Option_Some hoist28 -> + | Core.Option.Option_Some hoist35 -> (match y with - | Core.Option.Option_Some hoist29 -> + | Core.Option.Option_Some hoist36 -> Core.Option.Option_Some (Core.Num.impl__u8__wrapping_add (Core.Num.impl__u8__wrapping_add v - hoist28 + hoist35 <: u8) - hoist29) + hoist36) <: Core.Option.t_Option u8 | Core.Option.Option_None -> @@ -278,12 +445,11 @@ let question_mark (x: u32) : Core.Result.t_Result u32 u32 = then match Core.Result.Result_Err 12uy <: Core.Result.t_Result Prims.unit u8 with | Core.Result.Result_Ok ok -> - let _:Prims.unit = ok in Core.Result.Result_Ok (Core.Num.impl__u32__wrapping_add 3ul x) <: Core.Result.t_Result u32 u32 | Core.Result.Result_Err err -> - Core.Result.Result_Err (Core.Convert.f_from #FStar.Tactics.Typeclasses.solve err) + Core.Result.Result_Err (Core.Convert.f_from #u32 #u8 #FStar.Tactics.Typeclasses.solve err) <: Core.Result.t_Result u32 u32 else @@ -295,8 +461,8 @@ let simplifiable_question_mark (c: bool) (x: Core.Option.t_Option i32) : Core.Op if c then match x with - | Core.Option.Option_Some hoist33 -> - let a:i32 = hoist33 +! 10l in + | Core.Option.Option_Some hoist40 -> + let a:i32 = hoist40 +! 10l in let b:i32 = 20l in Core.Option.Option_Some (a +! b) <: Core.Option.t_Option i32 | Core.Option.Option_None -> Core.Option.Option_None <: Core.Option.t_Option i32 @@ -312,128 +478,7 @@ let simplifiable_return (c1 c2 c3: bool) : i32 = if c2 then let x:i32 = x +! 10l in - if c3 - then 1l - else - let x:i32 = x +! 1l in - x - else - let x:i32 = x +! 1l in - x + if c3 then 1l else x +! 1l + else x +! 1l else x - -type t_A = | A : t_A - -type t_B = | B : t_B - -type t_Bar = { - f_a:bool; - f_b:(t_Array (bool & bool) (sz 6) & bool) -} - -/// Combine `?` and early return -let monad_lifting (x: u8) : Core.Result.t_Result t_A t_B = - if x >. 123uy - then - match Core.Result.Result_Err (B <: t_B) <: Core.Result.t_Result t_A t_B with - | Core.Result.Result_Ok hoist35 -> Core.Result.Result_Ok hoist35 <: Core.Result.t_Result t_A t_B - | Core.Result.Result_Err err -> Core.Result.Result_Err err <: Core.Result.t_Result t_A t_B - else Core.Result.Result_Ok (A <: t_A) <: Core.Result.t_Result t_A t_B - -type t_Foo = { - f_x:bool; - f_y:(bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global); - f_z:t_Array t_Bar (sz 6); - f_bar:t_Bar -} - -/// Test assignation on non-trivial places -let assign_non_trivial_lhs (foo: t_Foo) : t_Foo = - let foo:t_Foo = { foo with f_x = true } <: t_Foo in - let foo:t_Foo = { foo with f_bar = { foo.f_bar with f_a = true } <: t_Bar } <: t_Foo in - let foo:t_Foo = - { - foo with - f_bar - = - { - foo.f_bar with - f_b - = - { - foo.f_bar.f_b with - _1 - = - Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_bar.f_b._1 - (sz 3) - ({ (foo.f_bar.f_b._1.[ sz 3 ] <: (bool & bool)) with _2 = true } <: (bool & bool)) - } - <: - (t_Array (bool & bool) (sz 6) & bool) - } - <: - t_Bar - } - <: - t_Foo - in - let foo:t_Foo = - { - foo with - f_z - = - Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_z - (sz 3) - ({ (foo.f_z.[ sz 3 ] <: t_Bar) with f_a = true } <: t_Bar) - } - <: - t_Foo - in - let foo:t_Foo = - { - foo with - f_y - = - { - foo.f_y with - _2 - = - Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_y._2 - (sz 3) - ({ - (foo.f_y._2.[ sz 3 ] <: t_Bar) with - f_b - = - { - (foo.f_y._2.[ sz 3 ] <: t_Bar).f_b with - _1 - = - Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (foo.f_y._2.[ sz 3 ] - <: - t_Bar) - .f_b - ._1 - (sz 5) - ({ - ((foo.f_y._2.[ sz 3 ] <: t_Bar).f_b._1.[ sz 5 ] <: (bool & bool)) with - _1 = true - } - <: - (bool & bool)) - <: - t_Array (bool & bool) (sz 6) - } - <: - (t_Array (bool & bool) (sz 6) & bool) - } - <: - t_Bar) - } - <: - (bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global) - } - <: - t_Foo - in - foo ''' diff --git a/test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap b/test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap index ebff02a35..2f2304adb 100644 --- a/test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap +++ b/test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap @@ -54,6 +54,80 @@ Import choice.Choice.Exports. Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. +Definition t_A : choice_type := + 'unit. +Equations Build_t_A : both (fset []) (fset []) (t_A) := + Build_t_A := + solve_lift (ret_both (tt (* Empty tuple *) : (t_A))) : both (fset []) (fset []) (t_A). +Fail Next Obligation. + +Definition t_B : choice_type := + 'unit. +Equations Build_t_B : both (fset []) (fset []) (t_B) := + Build_t_B := + solve_lift (ret_both (tt (* Empty tuple *) : (t_B))) : both (fset []) (fset []) (t_B). +Fail Next Obligation. + +Definition t_Bar : choice_type := + ('bool × nseq ('bool × 'bool) 6 × 'bool). +Equations f_a {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I 'bool := + f_a s := + bind_both s (fun x => + solve_lift (ret_both (fst x : 'bool))) : both L I 'bool. +Fail Next Obligation. +Equations f_b {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I (nseq ('bool × 'bool) 6 × 'bool) := + f_b s := + bind_both s (fun x => + solve_lift (ret_both (snd x : (nseq ('bool × 'bool) 6 × 'bool)))) : both L I (nseq ('bool × 'bool) 6 × 'bool). +Fail Next Obligation. +Equations Build_t_Bar {L0 : {fset Location}} {L1 : {fset Location}} {I0 : Interface} {I1 : Interface} {f_a : both L0 I0 'bool} {f_b : both L1 I1 (nseq ('bool × 'bool) 6 × 'bool)} : both (L0:|:L1) (I0:|:I1) (t_Bar) := + Build_t_Bar := + bind_both f_b (fun f_b => + bind_both f_a (fun f_a => + solve_lift (ret_both ((f_a,f_b) : (t_Bar))))) : both (L0:|:L1) (I0:|:I1) (t_Bar). +Fail Next Obligation. +Notation "'Build_t_Bar' '[' x ']' '(' 'f_a' ':=' y ')'" := (Build_t_Bar (f_a := y) (f_b := f_b x)). +Notation "'Build_t_Bar' '[' x ']' '(' 'f_b' ':=' y ')'" := (Build_t_Bar (f_a := f_a x) (f_b := y)). + +Definition t_Foo : choice_type := + ('bool × 'bool × t_Vec t_Bar t_Global × nseq t_Bar 6 × t_Bar). +Equations f_x {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I 'bool := + f_x s := + bind_both s (fun x => + solve_lift (ret_both (fst (fst (fst x)) : 'bool))) : both L I 'bool. +Fail Next Obligation. +Equations f_y {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I ('bool × t_Vec t_Bar t_Global) := + f_y s := + bind_both s (fun x => + solve_lift (ret_both (snd (fst (fst x)) : ('bool × t_Vec t_Bar t_Global)))) : both L I ('bool × t_Vec t_Bar t_Global). +Fail Next Obligation. +Equations f_z {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I (nseq t_Bar 6) := + f_z s := + bind_both s (fun x => + solve_lift (ret_both (snd (fst x) : (nseq t_Bar 6)))) : both L I (nseq t_Bar 6). +Fail Next Obligation. +Equations f_bar {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I t_Bar := + f_bar s := + bind_both s (fun x => + solve_lift (ret_both (snd x : t_Bar))) : both L I t_Bar. +Fail Next Obligation. +Equations Build_t_Foo {L0 : {fset Location}} {L1 : {fset Location}} {L2 : {fset Location}} {L3 : {fset Location}} {I0 : Interface} {I1 : Interface} {I2 : Interface} {I3 : Interface} {f_x : both L0 I0 'bool} {f_y : both L1 I1 ('bool × t_Vec t_Bar t_Global)} {f_z : both L2 I2 (nseq t_Bar 6)} {f_bar : both L3 I3 t_Bar} : both (L0:|:L1:|:L2:|:L3) (I0:|:I1:|:I2:|:I3) (t_Foo) := + Build_t_Foo := + bind_both f_bar (fun f_bar => + bind_both f_z (fun f_z => + bind_both f_y (fun f_y => + bind_both f_x (fun f_x => + solve_lift (ret_both ((f_x,f_y,f_z,f_bar) : (t_Foo))))))) : both (L0:|:L1:|:L2:|:L3) (I0:|:I1:|:I2:|:I3) (t_Foo). +Fail Next Obligation. +Notation "'Build_t_Foo' '[' x ']' '(' 'f_x' ':=' y ')'" := (Build_t_Foo (f_x := y) (f_y := f_y x) (f_z := f_z x) (f_bar := f_bar x)). +Notation "'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := y) (f_z := f_z x) (f_bar := f_bar x)). +Notation "'Build_t_Foo' '[' x ']' '(' 'f_z' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := y) (f_bar := f_bar x)). +Notation "'Build_t_Foo' '[' x ']' '(' 'f_bar' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := f_z x) (f_bar := y)). + +(*Not implemented yet? todo(item)*) + +(*Not implemented yet? todo(item)*) + (*Not implemented yet? todo(item)*) Equations add3 {L1 : {fset Location}} {L2 : {fset Location}} {L3 : {fset Location}} {I1 : Interface} {I2 : Interface} {I3 : Interface} (x : both L1 I1 int32) (y : both L2 I2 int32) (z : both L3 I3 int32) : both (L1 :|: L2 :|: L3) (I1 :|: I2 :|: I3) int32 := @@ -61,6 +135,16 @@ Equations add3 {L1 : {fset Location}} {L2 : {fset Location}} {L3 : {fset Locatio solve_lift (impl__u32__wrapping_add (impl__u32__wrapping_add x y) z) : both (L1 :|: L2 :|: L3) (I1 :|: I2 :|: I3) int32. Fail Next Obligation. +Equations assign_non_trivial_lhs {L1 : {fset Location}} {I1 : Interface} (foo : both L1 I1 t_Foo) : both L1 I1 t_Foo := + assign_non_trivial_lhs foo := + letb _ := assign todo(term) in + letb _ := assign todo(term) in + letb _ := assign todo(term) in + letb _ := assign todo(term) in + letb _ := assign todo(term) in + solve_lift foo : both L1 I1 t_Foo. +Fail Next Obligation. + Equations direct_result_question_mark {L1 : {fset Location}} {I1 : Interface} (y : both L1 I1 (t_Result 'unit int32)) : both L1 I1 (t_Result int8 int32) := direct_result_question_mark y := solve_lift (run (letm[choice_typeMonad.result_bind_code int32] _ := y in @@ -69,31 +153,31 @@ Fail Next Obligation. Equations direct_result_question_mark_coercion {L1 : {fset Location}} {I1 : Interface} (y : both L1 I1 (t_Result int8 int16)) : both L1 I1 (t_Result int8 int32) := direct_result_question_mark_coercion y := - solve_lift (run (letm[choice_typeMonad.result_bind_code int32] hoist1 := impl__map_err y f_from in - Result_Ok (Result_Ok hoist1))) : both L1 I1 (t_Result int8 int32). + solve_lift (run (letm[choice_typeMonad.result_bind_code int32] hoist5 := impl__map_err y f_from in + Result_Ok (Result_Ok hoist5))) : both L1 I1 (t_Result int8 int32). Fail Next Obligation. Equations early_returns {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 int32) : both L1 I1 int32 := early_returns x := solve_lift (run (letm[choice_typeMonad.result_bind_code int32] _ := ifb x >.? (ret_both (3 : int32)) - then letm[choice_typeMonad.result_bind_code int32] hoist2 := ControlFlow_Break (ret_both (0 : int32)) in - ControlFlow_Continue (never_to_any hoist2) + then letm[choice_typeMonad.result_bind_code int32] hoist6 := ControlFlow_Break (ret_both (0 : int32)) in + ControlFlow_Continue (never_to_any hoist6) else () in - letb hoist3 := x >.? (ret_both (30 : int32)) in - letm[choice_typeMonad.result_bind_code int32] hoist5 := ifb hoist3 + letb hoist7 := x >.? (ret_both (30 : int32)) in + letm[choice_typeMonad.result_bind_code int32] hoist9 := ifb hoist7 then matchb ret_both (true : 'bool) with | true => - letm[choice_typeMonad.result_bind_code int32] hoist4 := ControlFlow_Break (ret_both (34 : int32)) in - ControlFlow_Continue (solve_lift (never_to_any hoist4)) + letm[choice_typeMonad.result_bind_code int32] hoist8 := ControlFlow_Break (ret_both (34 : int32)) in + ControlFlow_Continue (solve_lift (never_to_any hoist8)) | _ => ControlFlow_Continue (solve_lift (ret_both (3 : int32))) end else ControlFlow_Continue (letb _ := assign todo(term) in x .+ (ret_both (1 : int32))) in - letb hoist6 := impl__u32__wrapping_add (ret_both (123 : int32)) hoist5 in - letb hoist7 := impl__u32__wrapping_add hoist6 x in - letm[choice_typeMonad.result_bind_code int32] hoist8 := ControlFlow_Break hoist7 in - ControlFlow_Continue (never_to_any hoist8))) : both L1 I1 int32. + letb hoist10 := impl__u32__wrapping_add (ret_both (123 : int32)) hoist9 in + letb hoist11 := impl__u32__wrapping_add hoist10 x in + letm[choice_typeMonad.result_bind_code int32] hoist12 := ControlFlow_Break hoist11 in + ControlFlow_Continue (never_to_any hoist12))) : both L1 I1 int32. Fail Next Obligation. Definition y_loc : Location := @@ -104,27 +188,27 @@ Equations local_mutation {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 local_mutation x := letb y loc(y_loc) := ret_both (0 : int32) in letb _ := assign todo(term) in - letb hoist9 := x >.? (ret_both (3 : int32)) in - solve_lift (ifb hoist9 + letb hoist13 := x >.? (ret_both (3 : int32)) in + solve_lift (ifb hoist13 then letb _ := assign todo(term) in letb y loc(y_loc) := x ./ (ret_both (2 : int32)) in letb _ := assign todo(term) in - letb hoist10 := ret_both (0 : int32) in - letb hoist11 := Build_t_Range (f_start := hoist10) (f_end := ret_both (10 : int32)) in - letb hoist12 := f_into_iter hoist11 in - letb _ := foldi_both_list hoist12 (fun i => + letb hoist14 := ret_both (0 : int32) in + letb hoist15 := Build_t_Range (f_start := hoist14) (f_end := ret_both (10 : int32)) in + letb hoist16 := f_into_iter hoist15 in + letb _ := foldi_both_list hoist16 (fun i => ssp (fun _ => assign todo(term) : (both (*0*)(L1:|:fset []) (I1) 'unit))) (ret_both (tt : 'unit)) in impl__u32__wrapping_add x y - else letb hoist15 := matchb x with + else letb hoist19 := matchb x with | 12 => letb _ := assign todo(term) in solve_lift (ret_both (3 : int32)) | 13 => - letb hoist14 := x in + letb hoist18 := x in letb _ := assign todo(term) in - letb hoist13 := impl__u32__wrapping_add (ret_both (123 : int32)) x in - solve_lift (add3 hoist14 hoist13 x) + letb hoist17 := impl__u32__wrapping_add (ret_both (123 : int32)) x in + solve_lift (add3 hoist18 hoist17 x) | _ => solve_lift (ret_both (0 : int32)) end in @@ -132,37 +216,47 @@ Equations local_mutation {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 impl__u32__wrapping_add x y) : both (L1 :|: fset [y_loc;y_loc]) I1 int32. Fail Next Obligation. +Equations monad_lifting {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 int8) : both L1 I1 (t_Result t_A t_B) := + monad_lifting x := + solve_lift (run (ifb x >.? (ret_both (123 : int8)) + then letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist20 := ControlFlow_Continue (Result_Err B) in + letb hoist21 := Result_Ok hoist20 in + letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist22 := ControlFlow_Break hoist21 in + ControlFlow_Continue (never_to_any hoist22) + else ControlFlow_Continue (Result_Ok A))) : both L1 I1 (t_Result t_A t_B). +Fail Next Obligation. + Equations options {L1 : {fset Location}} {L2 : {fset Location}} {L3 : {fset Location}} {I1 : Interface} {I2 : Interface} {I3 : Interface} (x : both L1 I1 (t_Option int8)) (y : both L2 I2 (t_Option int8)) (z : both L3 I3 (t_Option int64)) : both (L1 :|: L2 :|: L3) (I1 :|: I2 :|: I3) (t_Option int8) := options x y z := - solve_lift (run (letm[choice_typeMonad.option_bind_code] hoist19 := x in - letb hoist20 := hoist19 >.? (ret_both (10 : int8)) in - letm[choice_typeMonad.option_bind_code] hoist26 := ifb hoist20 - then letm[choice_typeMonad.option_bind_code] hoist21 := x in - Option_Some (letb hoist22 := impl__u8__wrapping_add hoist21 (ret_both (3 : int8)) in - Option_Some hoist22) - else letm[choice_typeMonad.option_bind_code] hoist24 := x in - letm[choice_typeMonad.option_bind_code] hoist23 := y in - Option_Some (letb hoist25 := impl__u8__wrapping_add hoist24 hoist23 in - Option_Some hoist25) in - letm[choice_typeMonad.option_bind_code] hoist27 := hoist26 in - letm[choice_typeMonad.option_bind_code] v := matchb hoist27 with + solve_lift (run (letm[choice_typeMonad.option_bind_code] hoist26 := x in + letb hoist27 := hoist26 >.? (ret_both (10 : int8)) in + letm[choice_typeMonad.option_bind_code] hoist33 := ifb hoist27 + then letm[choice_typeMonad.option_bind_code] hoist28 := x in + Option_Some (letb hoist29 := impl__u8__wrapping_add hoist28 (ret_both (3 : int8)) in + Option_Some hoist29) + else letm[choice_typeMonad.option_bind_code] hoist31 := x in + letm[choice_typeMonad.option_bind_code] hoist30 := y in + Option_Some (letb hoist32 := impl__u8__wrapping_add hoist31 hoist30 in + Option_Some hoist32) in + letm[choice_typeMonad.option_bind_code] hoist34 := hoist33 in + letm[choice_typeMonad.option_bind_code] v := matchb hoist34 with | 3 => Option_None | 4 => - letm[choice_typeMonad.option_bind_code] hoist16 := z in - Option_Some (letb hoist17 := hoist16 >.? (ret_both (4 : int64)) in - letb hoist18 := ifb hoist17 + letm[choice_typeMonad.option_bind_code] hoist23 := z in + Option_Some (letb hoist24 := hoist23 >.? (ret_both (4 : int64)) in + letb hoist25 := ifb hoist24 then ret_both (0 : int8) else ret_both (3 : int8) in - solve_lift ((ret_both (4 : int8)) .+ hoist18)) + solve_lift ((ret_both (4 : int8)) .+ hoist25)) | _ => Option_Some (solve_lift (ret_both (12 : int8))) end in - letm[choice_typeMonad.option_bind_code] hoist28 := x in - letb hoist30 := impl__u8__wrapping_add v hoist28 in - letm[choice_typeMonad.option_bind_code] hoist29 := y in - Option_Some (letb hoist31 := impl__u8__wrapping_add hoist30 hoist29 in - Option_Some hoist31))) : both (L1 :|: L2 :|: L3) (I1 :|: I2 :|: I3) (t_Option int8). + letm[choice_typeMonad.option_bind_code] hoist35 := x in + letb hoist37 := impl__u8__wrapping_add v hoist35 in + letm[choice_typeMonad.option_bind_code] hoist36 := y in + Option_Some (letb hoist38 := impl__u8__wrapping_add hoist37 hoist36 in + Option_Some hoist38))) : both (L1 :|: L2 :|: L3) (I1 :|: I2 :|: I3) (t_Option int8). Fail Next Obligation. Definition y_loc : Location := @@ -174,8 +268,8 @@ Equations question_mark {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 letb _ := assign todo(term) in letb _ := assign todo(term) in letb _ := assign todo(term) in - letb hoist32 := x >.? (ret_both (90 : int32)) in - ifb hoist32 + letb hoist39 := x >.? (ret_both (90 : int32)) in + ifb hoist39 then impl__map_err (Result_Err (ret_both (12 : int8))) f_from else () else () in @@ -185,8 +279,8 @@ Fail Next Obligation. Equations simplifiable_question_mark {L1 : {fset Location}} {L2 : {fset Location}} {I1 : Interface} {I2 : Interface} (c : both L1 I1 'bool) (x : both L2 I2 (t_Option int32)) : both (L1 :|: L2) (I1 :|: I2) (t_Option int32) := simplifiable_question_mark c x := solve_lift (run (letm[choice_typeMonad.option_bind_code] a := ifb c - then letm[choice_typeMonad.option_bind_code] hoist33 := x in - Option_Some (hoist33 .+ (ret_both (10 : int32))) + then letm[choice_typeMonad.option_bind_code] hoist40 := x in + Option_Some (hoist40 .+ (ret_both (10 : int32))) else Option_Some (ret_both (0 : int32)) in Option_Some (letb b := ret_both (20 : int32) in Option_Some (a .+ b)))) : both (L1 :|: L2) (I1 :|: I2) (t_Option int32). @@ -201,8 +295,8 @@ Equations simplifiable_return {L1 : {fset Location}} {L2 : {fset Location}} {L3 then letm[choice_typeMonad.result_bind_code int32] _ := ifb c2 then letb _ := assign todo(term) in ifb c3 - then letm[choice_typeMonad.result_bind_code int32] hoist34 := ControlFlow_Break (ret_both (1 : int32)) in - ControlFlow_Continue (never_to_any hoist34) + then letm[choice_typeMonad.result_bind_code int32] hoist41 := ControlFlow_Break (ret_both (1 : int32)) in + ControlFlow_Continue (never_to_any hoist41) else () else () in ControlFlow_Continue (letb _ := assign todo(term) in @@ -210,94 +304,87 @@ Equations simplifiable_return {L1 : {fset Location}} {L2 : {fset Location}} {L3 else () in ControlFlow_Continue x)) : both (L1 :|: L2 :|: L3 :|: fset [x_loc]) (I1 :|: I2 :|: I3) int32. Fail Next Obligation. +''' +"Side_effects_Issue_1083_.v" = ''' +(* File automatically generated by Hacspec *) +Set Warnings "-notation-overridden,-ambiguous-paths". +From Crypt Require Import choice_type Package Prelude. +Import PackageNotation. +From extructures Require Import ord fset. +From mathcomp Require Import word_ssrZ word. +From Jasmin Require Import word. -Definition t_A : choice_type := - 'unit. -Equations Build_t_A : both (fset []) (fset []) (t_A) := - Build_t_A := - solve_lift (ret_both (tt (* Empty tuple *) : (t_A))) : both (fset []) (fset []) (t_A). -Fail Next Obligation. +From Coq Require Import ZArith. +From Coq Require Import Strings.String. +Import List.ListNotations. +Open Scope list_scope. +Open Scope Z_scope. +Open Scope bool_scope. -Definition t_B : choice_type := - 'unit. -Equations Build_t_B : both (fset []) (fset []) (t_B) := - Build_t_B := - solve_lift (ret_both (tt (* Empty tuple *) : (t_B))) : both (fset []) (fset []) (t_B). -Fail Next Obligation. +From Hacspec Require Import ChoiceEquality. +From Hacspec Require Import LocationUtility. +From Hacspec Require Import Hacspec_Lib_Comparable. +From Hacspec Require Import Hacspec_Lib_Pre. +From Hacspec Require Import Hacspec_Lib. -Definition t_Bar : choice_type := - ('bool × nseq ('bool × 'bool) 6 × 'bool). -Equations f_a {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I 'bool := - f_a s := - bind_both s (fun x => - solve_lift (ret_both (fst x : 'bool))) : both L I 'bool. -Fail Next Obligation. -Equations f_b {L : {fset Location}} {I : Interface} (s : both L I t_Bar) : both L I (nseq ('bool × 'bool) 6 × 'bool) := - f_b s := - bind_both s (fun x => - solve_lift (ret_both (snd x : (nseq ('bool × 'bool) 6 × 'bool)))) : both L I (nseq ('bool × 'bool) 6 × 'bool). -Fail Next Obligation. -Equations Build_t_Bar {L0 : {fset Location}} {L1 : {fset Location}} {I0 : Interface} {I1 : Interface} {f_a : both L0 I0 'bool} {f_b : both L1 I1 (nseq ('bool × 'bool) 6 × 'bool)} : both (L0:|:L1) (I0:|:I1) (t_Bar) := - Build_t_Bar := - bind_both f_b (fun f_b => - bind_both f_a (fun f_a => - solve_lift (ret_both ((f_a,f_b) : (t_Bar))))) : both (L0:|:L1) (I0:|:I1) (t_Bar). -Fail Next Obligation. -Notation "'Build_t_Bar' '[' x ']' '(' 'f_a' ':=' y ')'" := (Build_t_Bar (f_a := y) (f_b := f_b x)). -Notation "'Build_t_Bar' '[' x ']' '(' 'f_b' ':=' y ')'" := (Build_t_Bar (f_a := f_a x) (f_b := y)). +Open Scope hacspec_scope. +Import choice.Choice.Exports. -Equations monad_lifting {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 int8) : both L1 I1 (t_Result t_A t_B) := - monad_lifting x := - solve_lift (run (ifb x >.? (ret_both (123 : int8)) - then letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist35 := ControlFlow_Continue (Result_Err B) in - letb hoist36 := Result_Ok hoist35 in - letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist37 := ControlFlow_Break hoist36 in - ControlFlow_Continue (never_to_any hoist37) - else ControlFlow_Continue (Result_Ok A))) : both L1 I1 (t_Result t_A t_B). -Fail Next Obligation. +Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. -Definition t_Foo : choice_type := - ('bool × 'bool × t_Vec t_Bar t_Global × nseq t_Bar 6 × t_Bar). -Equations f_x {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I 'bool := - f_x s := - bind_both s (fun x => - solve_lift (ret_both (fst (fst (fst x)) : 'bool))) : both L I 'bool. -Fail Next Obligation. -Equations f_y {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I ('bool × t_Vec t_Bar t_Global) := - f_y s := - bind_both s (fun x => - solve_lift (ret_both (snd (fst (fst x)) : ('bool × t_Vec t_Bar t_Global)))) : both L I ('bool × t_Vec t_Bar t_Global). -Fail Next Obligation. -Equations f_z {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I (nseq t_Bar 6) := - f_z s := - bind_both s (fun x => - solve_lift (ret_both (snd (fst x) : (nseq t_Bar 6)))) : both L I (nseq t_Bar 6). -Fail Next Obligation. -Equations f_bar {L : {fset Location}} {I : Interface} (s : both L I t_Foo) : both L I t_Bar := - f_bar s := - bind_both s (fun x => - solve_lift (ret_both (snd x : t_Bar))) : both L I t_Bar. +Class t_MyFrom (Self : choice_type) := { + f_my_from_loc : {fset Location} ; + f_my_from : (forall {L1 I1}, both L1 I1 v_T -> both (L1 :|: f_my_from_loc) I1 v_Self) ; +}. +Hint Unfold f_my_from_loc. + +#[global] Program Instance int16_t_MyFrom : t_MyFrom int16 int8 := + let f_my_from := fun {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 int8) => solve_lift (cast_int (WS2 := _) x) : both (L1 :|: fset []) I1 int16 in + {| f_my_from_loc := (fset [] : {fset Location}); + f_my_from := (@f_my_from)|}. Fail Next Obligation. -Equations Build_t_Foo {L0 : {fset Location}} {L1 : {fset Location}} {L2 : {fset Location}} {L3 : {fset Location}} {I0 : Interface} {I1 : Interface} {I2 : Interface} {I3 : Interface} {f_x : both L0 I0 'bool} {f_y : both L1 I1 ('bool × t_Vec t_Bar t_Global)} {f_z : both L2 I2 (nseq t_Bar 6)} {f_bar : both L3 I3 t_Bar} : both (L0:|:L1:|:L2:|:L3) (I0:|:I1:|:I2:|:I3) (t_Foo) := - Build_t_Foo := - bind_both f_bar (fun f_bar => - bind_both f_z (fun f_z => - bind_both f_y (fun f_y => - bind_both f_x (fun f_x => - solve_lift (ret_both ((f_x,f_y,f_z,f_bar) : (t_Foo))))))) : both (L0:|:L1:|:L2:|:L3) (I0:|:I1:|:I2:|:I3) (t_Foo). +Hint Unfold int16_t_MyFrom. + +Equations f {L1 : {fset Location}} {I1 : Interface} (x : both L1 I1 int8) : both L1 I1 (t_Result int16 int16) := + f x := + solve_lift (run (letm[choice_typeMonad.result_bind_code int16] _ := impl__map_err (Result_Err (ret_both (1 : int8))) f_from in + Result_Ok (Result_Ok (f_my_from x)))) : both L1 I1 (t_Result int16 int16). Fail Next Obligation. -Notation "'Build_t_Foo' '[' x ']' '(' 'f_x' ':=' y ')'" := (Build_t_Foo (f_x := y) (f_y := f_y x) (f_z := f_z x) (f_bar := f_bar x)). -Notation "'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := y) (f_z := f_z x) (f_bar := f_bar x)). -Notation "'Build_t_Foo' '[' x ']' '(' 'f_z' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := y) (f_bar := f_bar x)). -Notation "'Build_t_Foo' '[' x ']' '(' 'f_bar' ':=' y ')'" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := f_z x) (f_bar := y)). +''' +"Side_effects_Issue_1089_.v" = ''' +(* File automatically generated by Hacspec *) +Set Warnings "-notation-overridden,-ambiguous-paths". +From Crypt Require Import choice_type Package Prelude. +Import PackageNotation. +From extructures Require Import ord fset. +From mathcomp Require Import word_ssrZ word. +From Jasmin Require Import word. -Equations assign_non_trivial_lhs {L1 : {fset Location}} {I1 : Interface} (foo : both L1 I1 t_Foo) : both L1 I1 t_Foo := - assign_non_trivial_lhs foo := - letb _ := assign todo(term) in - letb _ := assign todo(term) in - letb _ := assign todo(term) in - letb _ := assign todo(term) in - letb _ := assign todo(term) in - solve_lift foo : both L1 I1 t_Foo. +From Coq Require Import ZArith. +From Coq Require Import Strings.String. +Import List.ListNotations. +Open Scope list_scope. +Open Scope Z_scope. +Open Scope bool_scope. + +From Hacspec Require Import ChoiceEquality. +From Hacspec Require Import LocationUtility. +From Hacspec Require Import Hacspec_Lib_Comparable. +From Hacspec Require Import Hacspec_Lib_Pre. +From Hacspec Require Import Hacspec_Lib. + +Open Scope hacspec_scope. +Import choice.Choice.Exports. + +Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations. + +Equations test {L1 : {fset Location}} {L2 : {fset Location}} {I1 : Interface} {I2 : Interface} (x : both L1 I1 (t_Option int32)) (y : both L2 I2 (t_Option int32)) : both (L1 :|: L2) (I1 :|: I2) (t_Option int32) := + test x y := + solve_lift (run (letb hoist3 := fun i => + letm[choice_typeMonad.option_bind_code] hoist1 := y in + Option_Some (letb hoist2 := i .+ hoist1 in + Option_Some hoist2) in + letb hoist4 := impl__map x hoist3 in + hoist4)) : both (L1 :|: L2) (I1 :|: I2) (t_Option int32). Fail Next Obligation. ''' diff --git a/test-harness/src/snapshots/toolchain__slices into-coq.snap b/test-harness/src/snapshots/toolchain__slices into-coq.snap index af66d22c6..93fa425b0 100644 --- a/test-harness/src/snapshots/toolchain__slices into-coq.snap +++ b/test-harness/src/snapshots/toolchain__slices into-coq.snap @@ -29,24 +29,30 @@ diagnostics = [] [stdout.files] "Slices.v" = ''' (* File automatically generated by Hacspec *) -From Hacspec Require Import Hacspec_Lib MachineIntegers. From Coq Require Import ZArith. +Require Import List. Import List.ListNotations. Open Scope Z_scope. Open Scope bool_scope. +Require Import Ascii. +Require Import String. +Require Import Coq.Floats.Floats. +From RecordUpdate Require Import RecordSet. +Import RecordSetNotations. -(*Not implemented yet? todo(item)*) -Definition v_VERSION : seq int8 := - unsize (array_from_list [(@repr WORDSIZE8 118); - (@repr WORDSIZE8 49)]). -Definition do_something (_ : seq int8) : unit := +(* NotImplementedYet *) + +Definition v_VERSION : t_Slice t_u8 := + unsize ([118; 49]). + +Definition do_something (_ : t_Slice t_u8) : unit := tt. -Definition r#unsized (_ : nseq (seq int8) TODO: Int.to_string length) : unit := +Definition r#unsized (_ : t_Array (t_Slice t_u8) (1)) : unit := tt. -Definition sized (x : nseq (nseq int8 TODO: Int.to_string length) TODO: Int.to_string length) : unit := - r#unsized (array_from_list [unsize (x.[(@repr WORDSIZE32 0)])]). +Definition sized (x : t_Array (t_Array (t_u8) (4)) (1)) : unit := + r#unsized ([unsize (index (x) (0))]). ''' diff --git a/test-harness/src/snapshots/toolchain__traits into-fstar.snap b/test-harness/src/snapshots/toolchain__traits into-fstar.snap index 3618a12ac..3ada99292 100644 --- a/test-harness/src/snapshots/toolchain__traits into-fstar.snap +++ b/test-harness/src/snapshots/toolchain__traits into-fstar.snap @@ -387,11 +387,11 @@ let method_caller () class t_SubTrait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = { - [@@@ FStar.Tactics.Typeclasses.no_method]_super_8779313392680198588:t_Trait v_Self + [@@@ FStar.Tactics.Typeclasses.no_method]_super_11748868061750783190:t_Trait v_Self v_TypeArg v_ConstArg; f_AssocType:Type0; - f_AssocType_6369404467997533198:t_Trait f_AssocType v_TypeArg v_ConstArg + f_AssocType_10469511598065652520:t_Trait f_AssocType v_TypeArg v_ConstArg } ''' "Traits.Interlaced_consts_types.fst" = ''' @@ -400,6 +400,9 @@ module Traits.Interlaced_consts_types open Core open FStar.Mul +type t_Bar (v_FooConst: usize) (v_FooType: Type0) = + | Bar : t_Array v_FooType v_FooConst -> t_Bar v_FooConst v_FooType + class t_Foo (v_Self: Type0) (v_FooConst: usize) (v_FooType: Type0) = { f_fun_pre: v_FunConst: usize -> @@ -456,9 +459,6 @@ let impl (v_FooConst: usize) (#v_FooType #v_SelfType: Type0) : t_Foo v_SelfType -> () } - -type t_Bar (v_FooConst: usize) (v_FooType: Type0) = - | Bar : t_Array v_FooType v_FooConst -> t_Bar v_FooConst v_FooType ''' "Traits.Recursive_trait_with_assoc_type.fst" = ''' module Traits.Recursive_trait_with_assoc_type @@ -491,17 +491,14 @@ module Traits.Unconstrainted_types_issue_677_ open Core open FStar.Mul +type t_Plus = | Plus : t_Plus + class t_PolyOp (v_Self: Type0) = { f_op_pre:u32 -> u32 -> Type0; f_op_post:u32 -> u32 -> u32 -> Type0; f_op:x0: u32 -> x1: u32 -> Prims.Pure u32 (f_op_pre x0 x1) (fun result -> f_op_post x0 x1 result) } -let twice (#v_OP: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PolyOp v_OP) (x: u32) - : u32 = f_op #v_OP #FStar.Tactics.Typeclasses.solve x x - -type t_Plus = | Plus : t_Plus - [@@ FStar.Tactics.Typeclasses.tcinstance] let impl: t_PolyOp t_Plus = { @@ -520,6 +517,9 @@ let impl_1: t_PolyOp t_Times = f_op = fun (x: u32) (y: u32) -> x *! y } +let twice (#v_OP: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PolyOp v_OP) (x: u32) + : u32 = f_op #v_OP #FStar.Tactics.Typeclasses.solve x x + let both (x: u32) : (u32 & u32) = twice #t_Plus x, twice #t_Times x <: (u32 & u32) ''' "Traits.fst" = ''' @@ -546,6 +546,8 @@ let impl__Error__for_application_callback (_: Prims.unit) : Prims.unit -> t_Err let t_Error_cast_to_repr (x: t_Error) : isize = match x with | Error_Fail -> isz 0 +type t_Struct = | Struct : t_Struct + class t_SuperTrait (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]_super_9442900250278684536:Core.Clone.t_Clone v_Self; f_function_of_super_trait_pre:v_Self -> Type0; @@ -600,6 +602,15 @@ let iter_option (#v_T: Type0) (x: Core.Option.t_Option v_T) : Core.Option.t_Into #FStar.Tactics.Typeclasses.solve (Core.Option.impl__as_ref #v_T x <: Core.Option.t_Option v_T) +let use_impl_trait (_: Prims.unit) : Prims.unit = + let iter:_ = iter_option #bool (Core.Option.Option_Some false <: Core.Option.t_Option bool) in + let tmp0, out:(_ & Core.Option.t_Option bool) = + Core.Iter.Traits.Iterator.f_next #_ #FStar.Tactics.Typeclasses.solve iter + in + let iter:_ = tmp0 in + let _:Core.Option.t_Option bool = out in + () + class t_Foo (v_Self: Type0) = { f_AssocType:Type0; f_AssocType_15525962639250476383:t_SuperTrait f_AssocType; @@ -629,8 +640,6 @@ class t_Lang (v_Self: Type0) = { -> Prims.Pure (v_Self & f_Var) (f_s_pre x0 x1) (fun result -> f_s_post x0 x1 result) } -type t_Struct = | Struct : t_Struct - let f (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Foo v_T) (x: v_T) : Prims.unit = let _:Prims.unit = f_assoc_f #v_T #FStar.Tactics.Typeclasses.solve () in f_method_f #v_T #FStar.Tactics.Typeclasses.solve x diff --git a/tests/Cargo.lock b/tests/Cargo.lock index a8af7da68..5fcbe4dc7 100644 --- a/tests/Cargo.lock +++ b/tests/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "abstract_integers" @@ -149,6 +149,10 @@ dependencies = [ "os_str_bytes", ] +[[package]] +name = "constructor-as-closure" +version = "0.1.0" + [[package]] name = "criterion" version = "0.4.0" @@ -219,6 +223,9 @@ dependencies = [ [[package]] name = "cyclic-modules" version = "0.1.0" +dependencies = [ + "hax-lib", +] [[package]] name = "dyn" diff --git a/tests/Cargo.toml b/tests/Cargo.toml index 10969e04d..df3b03ffa 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -35,5 +35,6 @@ members = [ "guards", "cyclic-modules", "unsafe", + "constructor-as-closure", ] resolver = "2" diff --git a/tests/constructor-as-closure/Cargo.toml b/tests/constructor-as-closure/Cargo.toml new file mode 100644 index 000000000..523a0ff06 --- /dev/null +++ b/tests/constructor-as-closure/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "constructor-as-closure" +version = "0.1.0" +edition = "2021" + +[dependencies] + +[package.metadata.hax-tests] +into."fstar" = { broken = false, snapshot = "stdout", issue_id = "914" } diff --git a/tests/constructor-as-closure/src/lib.rs b/tests/constructor-as-closure/src/lib.rs new file mode 100644 index 000000000..5d11085fe --- /dev/null +++ b/tests/constructor-as-closure/src/lib.rs @@ -0,0 +1,15 @@ +struct Test(i32); +impl Test { + pub fn test(x: Option) -> Option { + x.map(Self) + } +} +pub enum Context { + A(i32), + B(i32), +} +impl Context { + pub fn test(x: Option) -> Option { + x.map(Self::B) + } +} diff --git a/tests/cyclic-modules/Cargo.toml b/tests/cyclic-modules/Cargo.toml index 092cfd078..480511746 100644 --- a/tests/cyclic-modules/Cargo.toml +++ b/tests/cyclic-modules/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] +hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] into."fstar" = { broken = false, snapshot = "stdout", issue_id = "396" } diff --git a/tests/cyclic-modules/src/lib.rs b/tests/cyclic-modules/src/lib.rs index daa7050eb..216a54812 100644 --- a/tests/cyclic-modules/src/lib.rs +++ b/tests/cyclic-modules/src/lib.rs @@ -118,3 +118,63 @@ mod enums_b { T::A } } + +mod m1 { + pub fn a() { + super::m2::c() + } +} + +mod m2 { + pub fn d() {} + pub fn b() { + super::m1::a(); + d() + } + pub fn c() {} +} + +pub mod disjoint_cycle_a { + pub fn f() { + super::disjoint_cycle_b::h() + } + pub fn g() {} +} +pub mod disjoint_cycle_b { + pub fn h() {} + pub fn i() { + super::disjoint_cycle_a::g() + } +} + +pub mod variant_constructor_a { + pub enum Context { + A(i32), + B(i32), + } + pub fn f() -> Context { + super::variant_constructor_b::h() + } + impl Context { + pub fn test(x: Option) -> Option { + x.map(Self::A) + } + } +} +pub mod variant_constructor_b { + pub fn h() -> super::variant_constructor_a::Context { + super::variant_constructor_a::Context::A(1) + } +} + +pub mod late_skip_a { + pub fn f() { + super::late_skip_b::f() + } +} +pub mod late_skip_b { + #[hax_lib::requires(true)] + pub fn f() { + super::late_skip_a::f() + } +} diff --git a/tests/functions/Cargo.toml b/tests/functions/Cargo.toml index 215bd726c..e430cbdf8 100644 --- a/tests/functions/Cargo.toml +++ b/tests/functions/Cargo.toml @@ -7,4 +7,4 @@ edition = "2021" hax-lib = { path = "../../hax-lib" } [package.metadata.hax-tests] -into."fstar+coq" = { snapshot = "stdout" } +into."fstar" = { snapshot = "stdout" } diff --git a/tests/functions/src/lib.rs b/tests/functions/src/lib.rs index f3c4b4801..8d80d6ad1 100644 --- a/tests/functions/src/lib.rs +++ b/tests/functions/src/lib.rs @@ -4,3 +4,19 @@ fn calling_function_pointer() { let f_ptr = f::; f_ptr(); } + +mod issue_1048 { + pub struct CallableViaDeref; + + impl core::ops::Deref for CallableViaDeref { + type Target = fn() -> bool; + + fn deref(&self) -> &Self::Target { + &((|| true) as fn() -> bool) + } + } + + pub fn call_via_deref() -> bool { + CallableViaDeref() + } +} diff --git a/tests/loops/src/lib.rs b/tests/loops/src/lib.rs index 7c209eee1..e9f5c89fa 100644 --- a/tests/loops/src/lib.rs +++ b/tests/loops/src/lib.rs @@ -139,3 +139,108 @@ mod while_loops { x + 12 } } + +mod control_flow { + fn double_sum() -> i32 { + let mut sum = 0; + for i in 1..10 { + if i < 0 { + break; + } + sum += i; + } + sum *= 2; + sum + } + fn double_sum2() -> i32 { + let mut sum = 0; + let mut sum2 = 0; + for i in 1..10 { + if i < 0 { + break; + } + sum += i; + sum2 += i + } + sum + sum2 + } + fn double_sum_return(v: &[i32]) -> i32 { + let mut sum = 0; + for i in v { + if *i < 0 { + return 0; + } + sum += *i; + } + sum *= 2; + sum + } + fn double_sum2_return(v: &[i32]) -> i32 { + let mut sum = 0; + let mut sum2 = 0; + for i in v { + if *i < 0 { + return 0; + } + sum += *i; + sum2 += *i + } + sum + sum2 + } + fn bigger_power_2(x: i32) -> i32 { + let mut pow = 1; + while pow < 1000000 { + pow *= 2; + if pow < x { + pow *= 3; + if true { + break; + } + } + pow *= 2 + } + pow + } + struct M { + m: Vec, + } + + impl M { + fn decoded_message(&self) -> Option> { + for i in 0..self.m.len() { + if i > 5 { + return None; + } + } + return Some(self.m.clone()); + } + } + fn nested() -> i32 { + let mut sum = 0; + for i in 1..10 { + for j in 1..10 { + if j < 0 { + break; + } + sum += j; + } + sum += i; + } + sum *= 2; + sum + } + fn nested_return() -> i32 { + let mut sum = 0; + for i in 1..10 { + for j in 1..10 { + if j < 0 { + return 0; + } + sum += j; + } + sum += i; + } + sum *= 2; + sum + } +} diff --git a/tests/side-effects/src/lib.rs b/tests/side-effects/src/lib.rs index 7bef76cb4..6c8f45a19 100644 --- a/tests/side-effects/src/lib.rs +++ b/tests/side-effects/src/lib.rs @@ -156,3 +156,26 @@ fn assign_non_trivial_lhs(mut foo: Foo) -> Foo { foo.y.1[3].b.0[5].0 = true; foo } + +mod issue_1083 { + trait MyFrom { + fn my_from(x: T) -> Self; + } + + impl MyFrom for u16 { + fn my_from(x: u8) -> u16 { + x as u16 + } + } + + fn f(x: u8) -> Result { + Err(1u8)?; + Ok(u16::my_from(x)) + } +} + +mod issue_1089 { + fn test(x: Option, y: Option) -> Option { + x.map(|i| Some(i + y?))? + } +} diff --git a/tests/traits/src/lib.rs b/tests/traits/src/lib.rs index 4b3ced22f..9fea11184 100644 --- a/tests/traits/src/lib.rs +++ b/tests/traits/src/lib.rs @@ -83,6 +83,12 @@ fn iter_option<'a, T>(x: &'a Option) -> impl Iterator { x.as_ref().into_iter() } +// Issue #684 +fn use_impl_trait() { + let mut iter = iter_option(&Some(false)); + let _ = iter.next(); +} + mod for_clauses { trait Foo { fn to_t(&self) -> T;