Merge branch 'trunk' into gen-dev/records-base

This commit is contained in:
Brendan Hansknecht 2021-08-14 21:21:31 -07:00
commit cc1b7df06a
466 changed files with 40237 additions and 19367 deletions

4
.earthignore Normal file
View file

@ -0,0 +1,4 @@
AUTHORS
nix
.envrc
.gitignore

155
.envrc
View file

@ -1,154 +1 @@
# Load environment variables from `nix-shell` and export it out. use nix
#
# Usage: use_nix [-s <nix-expression>] [-w <path>] [-w <path>] ...
# -s nix-expression: The nix expression to use for building the shell environment.
# -w path: watch a file for changes. It can be specified multiple times. The
# shell specified with -s is automatically watched.
#
# If no nix-expression were given with -s, it will attempt to find and load
# the shell using the following files in order: shell.nix and default.nix.
#
# Example:
# - use_nix
# - use_nix -s shell.nix -w .nixpkgs-version.json
#
# The dependencies pulled by nix-shell are added to Nix's garbage collector
# roots, such that the environment remains persistent.
#
# Nix-shell is invoked only once per environment, and the output is cached for
# better performance. If any of the watched files change, then the environment
# is rebuilt.
#
# To remove old environments, and allow the GC to collect their dependencies:
# rm -f .direnv
#
use_nix() {
if ! validate_version; then
echo "This .envrc requires direnv version 2.18.2 or above."
exit 1
fi
# define all local variables
local shell
local files_to_watch=()
local opt OPTARG OPTIND # define vars used by getopts locally
while getopts ":n:s:w:" opt; do
case "${opt}" in
s)
shell="${OPTARG}"
files_to_watch=("${files_to_watch[@]}" "${shell}")
;;
w)
files_to_watch=("${files_to_watch[@]}" "${OPTARG}")
;;
:)
fail "Invalid option: $OPTARG requires an argument"
;;
\?)
fail "Invalid option: $OPTARG"
;;
esac
done
shift $((OPTIND -1))
if [[ -z "${shell}" ]]; then
if [[ -f shell.nix ]]; then
shell=shell.nix
files_to_watch=("${files_to_watch[@]}" shell.nix)
elif [[ -f default.nix ]]; then
shell=default.nix
files_to_watch=("${files_to_watch[@]}" default.nix)
else
fail "ERR: no shell was given"
fi
fi
local f
for f in "${files_to_watch[@]}"; do
if ! [[ -f "${f}" ]]; then
fail "cannot watch file ${f} because it does not exist"
fi
done
# compute the hash of all the files that makes up the development environment
local env_hash="$(hash_contents "${files_to_watch[@]}")"
# define the paths
local dir="$(direnv_layout_dir)"
local wd="${dir}/wd-${env_hash}"
local drv="${wd}/env.drv"
local dump="${wd}/dump.env"
# Generate the environment if we do not have one generated already.
if [[ ! -f "${drv}" ]]; then
mkdir -p "${wd}"
log_status "use nix: deriving new environment"
IN_NIX_SHELL=1 nix-instantiate --add-root "${drv}" --indirect "${shell}" > /dev/null
nix-store -r $(nix-store --query --references "${drv}") --add-root "${wd}/dep" --indirect > /dev/null
if [[ "${?}" -ne 0 ]] || [[ ! -f "${drv}" ]]; then
rm -rf "${wd}"
fail "use nix: was not able to derive the new environment. Please run 'direnv reload' to try again."
fi
log_status "use nix: updating cache"
nix-shell --pure "${drv}" --show-trace --run "$(join_args "$direnv" dump bash)" > "${dump}"
if [[ "${?}" -ne 0 ]] || [[ ! -f "${dump}" ]] || ! grep -q IN_NIX_SHELL "${dump}"; then
rm -rf "${wd}"
fail "use nix: was not able to update the cache of the environment. Please run 'direnv reload' to try again."
fi
fi
# evaluate the dump created by nix-shell earlier, but have to merge the PATH
# with the current PATH
# NOTE: we eval the dump here as opposed to direnv_load it because we don't
# want to persist environment variables coming from the shell at the time of
# the dump. See https://github.com/direnv/direnv/issues/405 for context.
local path_backup="${PATH}"
eval $(cat "${dump}")
export PATH="${PATH}:${path_backup}"
# cleanup the environment of variables that are not requried, or are causing issues.
unset shellHook # when shellHook is present, then any nix-shell'd script will execute it!
# watch all the files we were asked to watch for the environment
for f in "${files_to_watch[@]}"; do
watch_file "${f}"
done
}
fail() {
log_error "${@}"
exit 1
}
hash_contents() {
if has md5sum; then
cat "${@}" | md5sum | cut -c -32
elif has md5; then
cat "${@}" | md5 -q
fi
}
hash_file() {
if has md5sum; then
md5sum "${@}" | cut -c -32
elif has md5; then
md5 -q "${@}"
fi
}
validate_version() {
local version="$("${direnv}" version)"
local major="$(echo "${version}" | cut -d. -f1)"
local minor="$(echo "${version}" | cut -d. -f2)"
local patch="$(echo "${version}" | cut -d. -f3)"
if [[ "${major}" -gt 2 ]]; then return 0; fi
if [[ "${major}" -eq 2 ]] && [[ "${minor}" -gt 18 ]]; then return 0; fi
if [[ "${major}" -eq 2 ]] && [[ "${minor}" -eq 18 ]] && [[ "${patch}" -ge 2 ]]; then return 0; fi
return 1
}
use_nix -s shell.nix

45
.github/workflows/benchmarks.yml vendored Normal file
View file

@ -0,0 +1,45 @@
on:
pull_request:
paths-ignore:
- '**.md'
name: Benchmarks
env:
RUST_BACKTRACE: 1
jobs:
prep-dependency-container:
name: benchmark roc programs
runs-on: [self-hosted, i7-6700K]
timeout-minutes: 60
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
ref: "trunk"
clean: "true"
- name: Earthly version
run: earthly --version
- name: on trunk; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=trunk +prep-bench-folder
- uses: actions/checkout@v2
with:
clean: "false" # we want to keep the benchmark folder
- name: on current branch; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh +prep-bench-folder
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: build benchmark runner
run: cd ci/bench-runner && cargo build --release && cd ../..
- name: run benchmarks with regression check
run: ./ci/bench-runner/target/release/bench-runner --check-executables-changed

View file

@ -1,4 +1,7 @@
on: [pull_request] on:
pull_request:
paths-ignore:
- '**.md'
name: CI name: CI
@ -6,10 +9,10 @@ env:
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
jobs: jobs:
prep-dependency-container: build-fmt-clippy-test:
name: fmt, clippy, test --release name: fmt, clippy, test --release
runs-on: [self-hosted] runs-on: [self-hosted]
timeout-minutes: 60 timeout-minutes: 90
env: env:
FORCE_COLOR: 1 FORCE_COLOR: 1
steps: steps:
@ -21,4 +24,4 @@ jobs:
run: earthly --version run: earthly --version
- name: install dependencies, build, run zig tests, rustfmt, clippy, cargo test --release - name: install dependencies, build, run zig tests, rustfmt, clippy, cargo test --release
run: earthly +test-all run: ./ci/safe-earthly.sh +test-all

View file

@ -1,4 +1,4 @@
on: on:
schedule: schedule:
- cron: '0 0 * * *' - cron: '0 0 * * *'

24
.github/workflows/spellcheck.yml vendored Normal file
View file

@ -0,0 +1,24 @@
on: [pull_request]
name: SpellCheck
env:
RUST_BACKTRACE: 1
jobs:
spell-check:
name: spell check
runs-on: [self-hosted]
timeout-minutes: 10
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
clean: "true"
- name: Earthly version
run: earthly --version
- name: install spell checker, do spell check
run: ./ci/safe-earthly.sh +check-typos

22
.github/workflows/www.yml vendored Normal file
View file

@ -0,0 +1,22 @@
name: deploy www.roc-lang.org
# Whenever a commit lands on trunk, deploy the site
on:
push:
branches:
- deploy-www # TODO change to trunk
jobs:
deploy:
name: 'Deploy to Netlify'
runs-on: [self-hosted]
steps:
- uses: jsmrcaga/action-netlify-deploy@v1.6.0
with:
install_command: 'pwd; cd ../../www'
build_command: 'bash build.sh'
build_directory: 'build'
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
NETLIFY_DEPLOY_MESSAGE: "Deploy git ref ${{ github.ref }}"
NETLIFY_DEPLOY_TO_PROD: true

7
.gitignore vendored
View file

@ -6,6 +6,7 @@ zig-cache
# llvm human-readable output # llvm human-readable output
*.ll *.ll
*.bc
#valgrind #valgrind
vgcore.* vgcore.*
@ -13,6 +14,7 @@ vgcore.*
#editors #editors
.idea/ .idea/
.vscode/ .vscode/
.ignore
#files too big to track in git #files too big to track in git
editor/benches/resources/100000_lines.roc editor/benches/resources/100000_lines.roc
@ -26,3 +28,8 @@ editor/benches/resources/500_lines.roc
# rust cache (sccache folder) # rust cache (sccache folder)
sccache_dir sccache_dir
# self-contained benchmark folder
bench-folder*
# earthly
earthly_log.txt

View file

@ -1 +1 @@
10.0.0 12.0.0

View file

@ -6,7 +6,7 @@
To build the compiler, you need these installed: To build the compiler, you need these installed:
* `libunwind` (macOS should already have this one installed) * `libunwind` (macOS should already have this one installed)
* `libc++-dev` * `libc++-dev` and `libc++abi-dev`
* Python 2.7 (Windows only), `python-is-python3` (Ubuntu) * Python 2.7 (Windows only), `python-is-python3` (Ubuntu)
* [Zig](https://ziglang.org/), see below for version * [Zig](https://ziglang.org/), see below for version
* LLVM, see below for version * LLVM, see below for version
@ -20,8 +20,8 @@ For debugging LLVM IR, we use [DebugIR](https://github.com/vaivaswatha/debugir).
### libunwind & libc++-dev ### libunwind & libc++-dev
MacOS systems should already have `libunwind`, but other systems will need to install it (On Ubuntu, this can be donw with `sudo apt-get install libunwind-dev`). MacOS systems should already have `libunwind`, but other systems will need to install it (On Ubuntu, this can be done with `sudo apt-get install libunwind-dev`).
Some systems may already have `libc++-dev` on them, but if not, you may need to install it. (On Ubuntu, this can be done with `sudo apt-get install libc++-dev`.) Some systems may already have `libc++-dev` on them, but if not, you may need to install it. (On Ubuntu, this can be done with `sudo apt-get install libc++-dev libc++abi-dev`.)
### libcxb libraries ### libcxb libraries
@ -40,23 +40,43 @@ sudo apt-get install libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev
``` ```
### Zig ### Zig
**version: 0.7.x** **version: 0.8.0**
If you're on MacOS, you can install with `brew install zig` For any OS, you can use [`zigup`](https://github.com/marler8997/zigup) to manage zig installations.
If you're on Ubuntu and use Snap, you can install with `snap install zig --classic --beta`
For any other OS, checkout the [Zig installation page](https://github.com/ziglang/zig/wiki/Install-Zig-from-a-Package-Manager) If you prefer a package manager, you can try the following:
- For MacOS, you can install with `brew install zig`
- For, Ubuntu, you can use Snap, you can install with `snap install zig --classic --beta`
- For other systems, checkout this [page](https://github.com/ziglang/zig/wiki/Install-Zig-from-a-Package-Manager)
If you want to install it manually, you can also download Zig directly [here](https://ziglang.org/download/). Just make sure you download the right version, the bleeding edge master build is the first download link on this page.
### LLVM ### LLVM
**version: 10.0.x** **version: 12.0.x**
For macOS, you can install LLVM 12 using `brew install llvm@12` and then adding
`/usr/local/opt/llvm/bin` to your `PATH`. You can confirm this worked by
running `llc --version` - it should mention "LLVM version 12.0.0" at the top.
For Ubuntu and Debian, you can use the `Automatic installation script` at [apt.llvm.org](https://apt.llvm.org): For Ubuntu and Debian, you can use the `Automatic installation script` at [apt.llvm.org](https://apt.llvm.org):
``` ```
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
``` ```
For macOS, check the troubleshooting section below. If you use this script, you'll need to add `clang` and `llvm-as` to your `PATH`.
By default, the script installs them as `llvm-as-12` and `clang-12`,
respectively. You can address this with symlinks like so:
There are also plenty of alternative options at http://releases.llvm.org/download.html ```
sudo ln -s /usr/bin/clang-12 /usr/bin/clang
```
```
sudo ln -s /usr/bin/llvm-as-12 /usr/bin/llvm-as
````
There are also alternative installation options at http://releases.llvm.org/download.html
[Troubleshooting](#troubleshooting)
## Using Nix ## Using Nix
@ -94,6 +114,10 @@ You should be in a shell with everything needed to build already installed. Next
You should be in a repl now. Have fun! You should be in a repl now. Have fun!
### Extra tips
If you plan on using `nix-shell` regularly, check out [direnv](https://direnv.net/) and [lorri](https://github.com/target/lorri). Whenever you `cd` into `roc/`, they will automatically load the Nix dependecies into your current shell, so you never have to run nix-shell directly!
### Editor ### Editor
When you want to run the editor from Ubuntu inside nix you need to install [nixGL](https://github.com/guibou/nixGL) as well: When you want to run the editor from Ubuntu inside nix you need to install [nixGL](https://github.com/guibou/nixGL) as well:
@ -127,38 +151,31 @@ That will help us improve this document for everyone who reads it in the future!
### LLVM installation on Linux ### LLVM installation on Linux
For a current list of all dependency versions and their names in apt, see the Earthfile.
On some Linux systems we've seen the error "failed to run custom build command for x11". On some Linux systems we've seen the error "failed to run custom build command for x11".
On Ubuntu, running `sudo apt install pkg-config cmake libx11-dev` fixed this. On Ubuntu, running `sudo apt install pkg-config cmake libx11-dev` fixed this.
If you encounter `cannot find -lz` run `sudo apt install zlib1g-dev`. If you encounter `cannot find -lz` run `sudo apt install zlib1g-dev`.
If you encounter:
```
error: No suitable version of LLVM was found system-wide or pointed
to by LLVM_SYS_120_PREFIX.
```
Add `export LLVM_SYS_120_PREFIX=/usr/lib/llvm-12` to your `~/.bashrc` or equivalent file for your shell.
### LLVM installation on macOS ### LLVM installation on macOS
By default homebrew will try to install llvm 11, which is currently If installing LLVM fails, it might help to run `sudo xcode-select -r` before installing again.
unsupported. You need to install an older version (10.0.0_3) by doing:
```
$ brew edit llvm
# Replace the contents of the file with https://raw.githubusercontent.com/Homebrew/homebrew-core/6616d50fb0b24dbe30f5e975210bdad63257f517/Formula/llvm.rb
# we expect llvm-as-10 to be present
$ ln -s /usr/local/opt/llvm/bin/{llvm-as,llvm-as-10}
# "pinning" ensures that homebrew doesn't update it automatically
$ brew pin llvm
```
It might also be useful to add these exports to your shell: It might also be useful to add these exports to your shell:
``` ```
export PATH="/usr/local/opt/llvm/bin:$PATH"
export LDFLAGS="-L/usr/local/opt/llvm/lib -Wl,-rpath,/usr/local/opt/llvm/lib" export LDFLAGS="-L/usr/local/opt/llvm/lib -Wl,-rpath,/usr/local/opt/llvm/lib"
export CPPFLAGS="-I/usr/local/opt/llvm/include" export CPPFLAGS="-I/usr/local/opt/llvm/include"
``` ```
If installing LLVM still fails, it might help to run `sudo xcode-select -r` before installing again.
### LLVM installation on Windows ### LLVM installation on Windows
Installing LLVM's prebuilt binaries doesn't seem to be enough for the `llvm-sys` crate that Roc depends on, so I had to build LLVM from source Installing LLVM's prebuilt binaries doesn't seem to be enough for the `llvm-sys` crate that Roc depends on, so I had to build LLVM from source
@ -190,8 +207,8 @@ Create `~/.cargo/config.toml` if it does not exist and add this to it:
rustflags = ["-C", "link-arg=-fuse-ld=lld", "-C", "target-cpu=native"] rustflags = ["-C", "link-arg=-fuse-ld=lld", "-C", "target-cpu=native"]
``` ```
Then install `lld` version 9 (e.g. with `$ sudo apt-get install lld-9`) Then install `lld` version 12 (e.g. with `$ sudo apt-get install lld-12`)
and add make sure there's a `ld.lld` executable on your `PATH` which and add make sure there's a `ld.lld` executable on your `PATH` which
is symlinked to `lld-9`. is symlinked to `lld-12`.
That's it! Enjoy the faster builds. That's it! Enjoy the faster builds.

View file

@ -10,9 +10,8 @@ Check [Build from source](BUILDING_FROM_SOURCE.md) for instructions.
## Running Tests ## Running Tests
To run all tests as they are run on CI, [install earthly](https://earthly.dev/get-earthly) and run: To run all tests and checks as they are run on CI, [install earthly](https://earthly.dev/get-earthly) and run:
``` ```
mkdir -p sccache_dir
earthly +test-all earthly +test-all
``` ```
@ -20,8 +19,9 @@ Earthly may temporarily use a lot of disk space, up to 90 GB. This disk space is
## Contribution Tips ## Contribution Tips
- Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip](https://roc.zulipchat.com/join/rz7n4d42v7tfilp3njzbm5eg/) first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation. - Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip](https://roc.zulipchat.com) first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation.
- It's a good idea to open a work-in-progress pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Be sure to include "WIP" in the title of the PR as long as it's not ready for review! - It's a good idea to open a work-in-progress pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Be sure to include "WIP" in the title of the PR as long as it's not ready for review!
- Make sure to create a branch on the roc repository for your changes. We do not allow CI to be run on forks for security.
## Can we do better? ## Can we do better?

932
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,6 @@
[workspace] [workspace]
members = [ members = [
"compiler/ident",
"compiler/region", "compiler/region",
"compiler/collections", "compiler/collections",
"compiler/module", "compiler/module",
@ -14,20 +15,31 @@ members = [
"compiler/reporting", "compiler/reporting",
"compiler/fmt", "compiler/fmt",
"compiler/mono", "compiler/mono",
"compiler/test_mono_macros",
"compiler/test_mono",
"compiler/load", "compiler/load",
"compiler/gen", "compiler/gen_llvm",
"compiler/gen_dev", "compiler/gen_dev",
"compiler/build", "compiler/build",
"compiler/arena_pool", "compiler/arena_pool",
"compiler/test_gen", "compiler/test_gen",
"vendor/ena", "vendor/ena",
"vendor/inkwell",
"vendor/pathfinding", "vendor/pathfinding",
"vendor/pretty", "vendor/pretty",
"editor", "editor",
"cli", "cli",
"cli/cli_utils",
"roc_std", "roc_std",
"docs" "docs",
] ]
exclude = [ "ci/bench-runner" ]
# Needed to be able to run `cargo run -p roc_cli --no-default-features` -
# see www/build.sh for more.
#
# Without the `-p` flag, cargo ignores `--no-default-features` when you have a
# workspace, and without `resolver = "2"` here, you can't use `-p` like this.
resolver = "2"
# Optimizations based on https://deterministic.space/high-performance-rust.html # Optimizations based on https://deterministic.space/high-performance-rust.html
[profile.release] [profile.release]

View file

@ -32,7 +32,7 @@ These are the policies for upholding our community's standards of conduct. If yo
In the Roc community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely. In the Roc community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Roc progammers comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust. And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Roc programmers comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
The enforcement policies listed above apply to all official Roc venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Roc Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion. The enforcement policies listed above apply to all official Roc venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Roc Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.

View file

@ -1,4 +1,4 @@
FROM rust:1.52-slim-buster FROM rust:1.54-slim-buster
WORKDIR /earthbuild WORKDIR /earthbuild
prep-debian: prep-debian:
@ -13,17 +13,18 @@ install-other-libs:
install-zig-llvm-valgrind-clippy-rustfmt: install-zig-llvm-valgrind-clippy-rustfmt:
FROM +install-other-libs FROM +install-other-libs
# zig # zig
RUN wget -c https://ziglang.org/download/0.7.1/zig-linux-x86_64-0.7.1.tar.xz --no-check-certificate RUN wget -c https://ziglang.org/download/0.8.0/zig-linux-x86_64-0.8.0.tar.xz --no-check-certificate
RUN tar -xf zig-linux-x86_64-0.7.1.tar.xz RUN tar -xf zig-linux-x86_64-0.8.0.tar.xz
RUN ln -s /earthbuild/zig-linux-x86_64-0.7.1/zig /usr/bin/zig RUN ln -s /earthbuild/zig-linux-x86_64-0.8.0/zig /usr/bin/zig
# llvm # llvm
RUN apt -y install lsb-release software-properties-common gnupg RUN apt -y install lsb-release software-properties-common gnupg
RUN wget https://apt.llvm.org/llvm.sh RUN wget https://apt.llvm.org/llvm.sh
RUN chmod +x llvm.sh RUN chmod +x llvm.sh
RUN ./llvm.sh 10 RUN ./llvm.sh 12
RUN ln -s /usr/bin/clang-10 /usr/bin/clang RUN ln -s /usr/bin/clang-12 /usr/bin/clang
RUN ln -s /usr/bin/llvm-as-12 /usr/bin/llvm-as
# use lld as linker # use lld as linker
RUN ln -s /usr/bin/lld-10 /usr/bin/ld.lld RUN ln -s /usr/bin/lld-12 /usr/bin/ld.lld
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native" ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
# valgrind # valgrind
RUN apt -y install autotools-dev cmake automake libc6-dbg RUN apt -y install autotools-dev cmake automake libc6-dbg
@ -38,6 +39,8 @@ install-zig-llvm-valgrind-clippy-rustfmt:
RUN rustup component add clippy RUN rustup component add clippy
# rustfmt # rustfmt
RUN rustup component add rustfmt RUN rustup component add rustfmt
# criterion
RUN cargo install cargo-criterion
# sccache # sccache
RUN apt -y install libssl-dev RUN apt -y install libssl-dev
RUN cargo install sccache RUN cargo install sccache
@ -97,15 +100,49 @@ check-rustfmt:
RUN cargo fmt --version RUN cargo fmt --version
RUN cargo fmt --all -- --check RUN cargo fmt --all -- --check
check-typos:
RUN cargo install --version 1.0.11 typos-cli
COPY --dir .github ci cli compiler docs editor examples nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix ./
RUN typos
test-rust: test-rust:
FROM +copy-dirs-and-cache FROM +copy-dirs-and-cache
ENV RUST_BACKTRACE=1 ENV RUST_BACKTRACE=1
RUN --mount=type=cache,target=$SCCACHE_DIR \ RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --release && sccache --show-stats cargo test --release && sccache --show-stats
verify-no-git-changes:
FROM +test-rust
# If running tests caused anything to be changed or added (without being
# included in a .gitignore somewhere), fail the build!
#
# How it works: the `git ls-files` command lists all the modified or
# uncommitted files in the working tree, the `| grep -E .` command returns a
# zero exit code if it listed any files and nonzero otherwise (which is the
# opposite of what we want), and the `!` at the start inverts the exit code.
RUN ! git ls-files --deleted --modified --others --exclude-standard | grep -E .
test-all: test-all:
BUILD +test-zig BUILD +test-zig
BUILD +check-rustfmt BUILD +check-rustfmt
BUILD +check-clippy BUILD +check-clippy
BUILD +check-typos
BUILD +test-rust BUILD +test-rust
BUILD +verify-no-git-changes
# compile everything needed for benchmarks and output a self-contained folder
prep-bench-folder:
FROM +copy-dirs-and-cache
ARG BENCH_SUFFIX=branch
RUN cargo criterion -V
RUN --mount=type=cache,target=$SCCACHE_DIR cd cli && cargo criterion --no-run
RUN mkdir -p bench-folder/compiler/builtins/bitcode/src
RUN mkdir -p bench-folder/target/release/deps
RUN mkdir -p bench-folder/examples/benchmarks
RUN cp examples/benchmarks/*.roc bench-folder/examples/benchmarks/
RUN cp -r examples/benchmarks/platform bench-folder/examples/benchmarks/
RUN cp compiler/builtins/bitcode/src/str.zig bench-folder/compiler/builtins/bitcode/src
RUN cp target/release/roc bench-folder/target/release
# copy the most recent time bench to bench-folder
RUN cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
SAVE ARTIFACT bench-folder AS LOCAL bench-folder-$BENCH_SUFFIX

View file

@ -496,3 +496,22 @@ of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project. either expressed or implied, of the FreeBSD Project.
=========================================================== ===========================================================
* morphic_lib - https://github.com/morphic-lang/morphic_lib
This source code can be found in vendor/morphic_lib and is licensed under the following terms:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================

View file

@ -17,6 +17,19 @@ If you're curious about where the language's name and logo came from,
``` ```
4. Check out [these tests](https://github.com/rtfeldman/roc/blob/trunk/cli/tests/repl_eval.rs) for examples of using the REPL 4. Check out [these tests](https://github.com/rtfeldman/roc/blob/trunk/cli/tests/repl_eval.rs) for examples of using the REPL
### Examples
Took a look at the `examples` folder, `examples/benchmarks` contains some larger examples.
Run examples as follows:
1. Navigate to `/examples`
2. Run with:
```
cargo run hello-world/Hello.roc
```
Some examples like `examples/benchmarks/NQueens.roc` require input after running.
For NQueens, input 10 in the terminal and press enter.
## Applications and Platforms ## Applications and Platforms
Applications are often built on a *framework.* Typically, both application and framework are written in the same language. Applications are often built on a *framework.* Typically, both application and framework are written in the same language.
@ -46,7 +59,7 @@ By using systems-level programming languages like C and C++, platform authors sa
Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible. Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible.
* **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference. * **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference.
* **Platform** authors code almost exclusively in a systems-level langauge like C, C++, Rust, or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API. * **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API.
Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one! Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one!

View file

@ -1,22 +1,61 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3
[[package]] [[package]]
name = "base64" name = "aho-corasick"
version = "0.13.0" version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bench-runner"
version = "0.1.0"
dependencies = [
"clap",
"data-encoding",
"is_executable",
"regex",
"ring",
]
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.6.1" version = "3.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe" checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.67" version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" checksum = "e70cc2f62c6ce1868963827bd677764c62d07c3d9a3e1fb1177ee1a9ab199eb2"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -25,45 +64,91 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "chunked_transfer" name = "clap"
version = "1.4.0" version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e" checksum = "4bd1061998a501ee7d4b6d449020df3266ca3124b941ec56cf2005c3779ca142"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"indexmap",
"lazy_static",
"os_str_bytes",
"strsim",
"termcolor",
"textwrap",
"unicode-width",
"vec_map",
]
[[package]] [[package]]
name = "form_urlencoded" name = "clap_derive"
version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "370f715b81112975b1b69db93e0b56ea4cd4e5002ac43b2da8474106a54096a1"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "data-encoding"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57"
[[package]]
name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "indexmap"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "is_executable"
version = "1.0.1" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" checksum = "fa9acdc6d67b75e626ad644734e8bc6df893d9cd2a834129065d3dd6158ea9c8"
dependencies = [ dependencies = [
"matches", "winapi",
"percent-encoding",
]
[[package]]
name = "host"
version = "0.1.0"
dependencies = [
"roc_std",
"ureq",
]
[[package]]
name = "idna"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21"
dependencies = [
"matches",
"unicode-bidi",
"unicode-normalization",
] ]
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.50" version = "0.3.51"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c" checksum = "83bdfbace3a0e81a4253f73b49e960b053e396a11012cbd49b9b74d6a2b67062"
dependencies = [ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
@ -76,9 +161,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.92" version = "0.2.98"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56d855069fafbb9b344c0f962150cd2c1187975cb1c22c1522c240d8c4986714" checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790"
[[package]] [[package]]
name = "log" name = "log"
@ -90,28 +175,52 @@ dependencies = [
] ]
[[package]] [[package]]
name = "matches" name = "memchr"
version = "0.1.8" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc"
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.7.2" version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3" checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
[[package]] [[package]]
name = "percent-encoding" name = "os_str_bytes"
version = "2.1.0" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" checksum = "afb2e1c3ee07430c2cf76151675e583e0f19985fa6efae47d6848a3e2c824f85"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.26" version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec" checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
@ -125,6 +234,23 @@ dependencies = [
"proc-macro2", "proc-macro2",
] ]
[[package]]
name = "regex"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.16.20" version = "0.16.20"
@ -140,36 +266,6 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "roc_std"
version = "0.1.0"
dependencies = [
"libc",
]
[[package]]
name = "rustls"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "064fd21ff87c6e87ed4506e68beb42459caa4a0e2eb144932e6776768556980b"
dependencies = [
"base64",
"log",
"ring",
"sct",
"webpki",
]
[[package]]
name = "sct"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c"
dependencies = [
"ring",
"untrusted",
]
[[package]] [[package]]
name = "spin" name = "spin"
version = "0.5.2" version = "0.5.2"
@ -177,10 +273,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]] [[package]]
name = "syn" name = "strsim"
version = "1.0.68" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ce15dd3ed8aa2f8eeac4716d6ef5ab58b6b9256db41d7e1a0224c2788e8fd87" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
version = "1.0.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -188,43 +290,40 @@ dependencies = [
] ]
[[package]] [[package]]
name = "tinyvec" name = "termcolor"
version = "1.2.0" version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
dependencies = [ dependencies = [
"tinyvec_macros", "winapi-util",
] ]
[[package]] [[package]]
name = "tinyvec_macros" name = "textwrap"
version = "0.1.0" version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" checksum = "203008d98caf094106cfaba70acfed15e18ed3ddb7d94e49baec153a2b462789"
[[package]]
name = "unicode-bidi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
dependencies = [ dependencies = [
"matches", "unicode-width",
] ]
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-segmentation"
version = "0.1.17" version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef" checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b"
dependencies = [
"tinyvec", [[package]]
] name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.1" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
[[package]] [[package]]
name = "untrusted" name = "untrusted"
@ -233,38 +332,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]] [[package]]
name = "ureq" name = "vec_map"
version = "2.1.0" version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fbeb1aabb07378cf0e084971a74f24241273304653184f54cdce113c0d7df1b" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
dependencies = [
"base64",
"chunked_transfer",
"log",
"once_cell",
"rustls",
"url",
"webpki",
"webpki-roots",
]
[[package]] [[package]]
name = "url" name = "version_check"
version = "2.2.1" version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ccd964113622c8e9322cfac19eb1004a07e636c545f325da085d5cdde6f1f8b" checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
dependencies = [
"form_urlencoded",
"idna",
"matches",
"percent-encoding",
]
[[package]] [[package]]
name = "wasm-bindgen" name = "wasm-bindgen"
version = "0.2.73" version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9" checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"wasm-bindgen-macro", "wasm-bindgen-macro",
@ -272,9 +355,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-backend" name = "wasm-bindgen-backend"
version = "0.2.73" version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae" checksum = "3b33f6a0694ccfea53d94db8b2ed1c3a8a4c86dd936b13b9f0a15ec4a451b900"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"lazy_static", "lazy_static",
@ -287,9 +370,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.73" version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f" checksum = "088169ca61430fe1e58b8096c24975251700e7b1f6fd91cc9d59b04fb9b18bd4"
dependencies = [ dependencies = [
"quote", "quote",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
@ -297,9 +380,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro-support" name = "wasm-bindgen-macro-support"
version = "0.2.73" version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c" checksum = "be2241542ff3d9f241f5e2cb6dd09b37efe786df8851c54957683a49f0987a97"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -310,39 +393,20 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-shared" name = "wasm-bindgen-shared"
version = "0.2.73" version = "0.2.74"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f"
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.50" version = "0.3.51"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be" checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "webpki"
version = "0.21.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "webpki-roots"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940"
dependencies = [
"webpki",
]
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.3.9" version = "0.3.9"
@ -359,6 +423,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]] [[package]]
name = "winapi-x86_64-pc-windows-gnu" name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"

View file

@ -0,0 +1,13 @@
[package]
name = "bench-runner"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = "3.0.0-beta.2"
regex = "1.5.4"
is_executable = "1.0.1"
ring = "0.16.20"
data-encoding = "2.3.2"

256
ci/bench-runner/src/main.rs Normal file
View file

@ -0,0 +1,256 @@
use clap::{AppSettings, Clap};
use data_encoding::HEXUPPER;
use is_executable::IsExecutable;
use regex::Regex;
use ring::digest::{Context, Digest, SHA256};
use std::fs::File;
use std::io::Read;
use std::{
collections::{HashMap, HashSet, VecDeque},
io::{self, BufRead, BufReader},
path::Path,
process::{self, Command, Stdio},
};
const BENCH_FOLDER_TRUNK: &str = "bench-folder-trunk";
const BENCH_FOLDER_BRANCH: &str = "bench-folder-branch";
fn main() {
let optional_args: OptionalArgs = OptionalArgs::parse();
if Path::new(BENCH_FOLDER_TRUNK).exists() && Path::new(BENCH_FOLDER_BRANCH).exists() {
delete_old_bench_results();
if optional_args.check_executables_changed {
println!("Doing a test run to verify benchmarks are working correctly and generate executables.");
std::env::set_var("BENCH_DRY_RUN", "1");
do_benchmark("trunk");
do_benchmark("branch");
std::env::set_var("BENCH_DRY_RUN", "0");
if check_if_bench_executables_changed() {
println!(
"Comparison of sha256 of executables reveals changes, doing full benchmarks..."
);
let all_regressed_benches = do_all_benches(optional_args.nr_repeat_benchmarks);
finish(all_regressed_benches, optional_args.nr_repeat_benchmarks);
} else {
println!("No benchmark executables have changed");
}
} else {
let all_regressed_benches = do_all_benches(optional_args.nr_repeat_benchmarks);
finish(all_regressed_benches, optional_args.nr_repeat_benchmarks);
}
} else {
eprintln!(
r#"I can't find bench-folder-trunk and bench-folder-branch from the current directory.
I should be executed from the repo root.
Use `./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=trunk +prep-bench-folder` to generate bench-folder-trunk.
Use `./ci/safe-earthly.sh +prep-bench-folder` to generate bench-folder-branch."#
);
process::exit(1)
}
}
fn finish(all_regressed_benches: HashSet<String>, nr_repeat_benchmarks: usize) {
if !all_regressed_benches.is_empty() {
eprintln!(
r#"
FAILED: The following benchmarks have shown a regression {:?} times: {:?}
"#,
nr_repeat_benchmarks, all_regressed_benches
);
process::exit(1);
}
}
// returns all benchmarks that have regressed
fn do_all_benches(nr_repeat_benchmarks: usize) -> HashSet<String> {
delete_old_bench_results();
do_benchmark("trunk");
let mut all_regressed_benches = do_benchmark("branch");
// if no benches regressed this round, abort early
if all_regressed_benches.is_empty() {
return HashSet::new();
}
for _ in 1..nr_repeat_benchmarks {
delete_old_bench_results();
do_benchmark("trunk");
let regressed_benches = do_benchmark("branch");
// if no benches regressed this round, abort early
if regressed_benches.is_empty() {
return HashSet::new();
}
all_regressed_benches = all_regressed_benches
.intersection(&regressed_benches)
.map(|bench_name_str| bench_name_str.to_owned())
.collect();
}
all_regressed_benches
}
// returns Vec with names of regressed benchmarks
fn do_benchmark(branch_name: &'static str) -> HashSet<String> {
let mut cmd_child = Command::new(format!(
"./bench-folder-{}/target/release/deps/time_bench",
branch_name
))
.args(&["--bench", "--noplot"])
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap_or_else(|_| panic!("Failed to benchmark {}.", branch_name));
let stdout = cmd_child.stdout.as_mut().unwrap();
let stdout_reader = BufReader::new(stdout);
let stdout_lines = stdout_reader.lines();
let mut regressed_benches: HashSet<String> = HashSet::new();
let mut last_three_lines_queue: VecDeque<String> = VecDeque::with_capacity(3);
let bench_name_regex = Regex::new(r#"".*""#).expect("Failed to build regex");
for line in stdout_lines {
let line_str = line.expect("Failed to get output from banchmark command.");
if line_str.contains("regressed") {
let regressed_bench_name_line = last_three_lines_queue.get(2).expect(
"Failed to get line that contains benchmark name from last_three_lines_queue.",
);
let regex_match = bench_name_regex.find(regressed_bench_name_line).expect("This line should hoave the benchmark name between double quotes but I could not match it");
regressed_benches.insert(regex_match.as_str().to_string().replace("\"", ""));
}
last_three_lines_queue.push_front(line_str.clone());
println!("bench {:?}: {:?}", branch_name, line_str);
}
regressed_benches
}
fn delete_old_bench_results() {
remove("target/criterion");
}
// does not error if fileOrFolder does not exist (-f flag)
fn remove(file_or_folder: &str) {
Command::new("rm")
.args(&["-rf", file_or_folder])
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output()
.unwrap_or_else(|_| panic!("Something went wrong trying to remove {}", file_or_folder));
}
#[derive(Clap)]
#[clap(setting = AppSettings::ColoredHelp)]
struct OptionalArgs {
/// How many times to repeat the benchmarks. A single benchmark has to fail every for a regression to be reported.
#[clap(long, default_value = "3")]
nr_repeat_benchmarks: usize,
/// Do not run full benchmarks if no benchmark executable has changed
#[clap(long)]
check_executables_changed: bool,
}
fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest, io::Error> {
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
Ok(context.finish())
}
fn sha_file(file_path: &Path) -> Result<String, io::Error> {
let input = File::open(file_path)?;
let reader = BufReader::new(input);
let digest = sha256_digest(reader)?;
Ok(HEXUPPER.encode(digest.as_ref()))
}
fn calc_hashes_for_folder(benches_path_str: &str) -> HashMap<String, String> {
let benches_path = Path::new(benches_path_str);
let all_bench_files =
std::fs::read_dir(benches_path).expect("Failed to create iterator for files in dir.");
let non_src_files = all_bench_files
.into_iter()
.map(|file_res| {
file_res
.expect("Failed to get DirEntry from ReadDir all_bench_files")
.file_name()
.into_string()
.expect("Failed to create String from OsString for file_name.")
})
.filter(|file_name_str| !file_name_str.contains(".roc"));
let mut files_w_sha = HashMap::new();
for file_name in non_src_files {
let full_path_str = [benches_path_str, &file_name].join("");
let full_path = Path::new(&full_path_str);
if full_path.is_executable() {
files_w_sha.insert(
file_name.clone(),
sha_file(full_path).expect("Failed to calculate sha of file"),
);
}
}
files_w_sha
}
fn check_if_bench_executables_changed() -> bool {
let bench_folder_str = "/examples/benchmarks/";
let trunk_benches_path_str = [BENCH_FOLDER_TRUNK, bench_folder_str].join("");
let trunk_bench_hashes = calc_hashes_for_folder(&trunk_benches_path_str);
let branch_benches_path_str = [BENCH_FOLDER_BRANCH, bench_folder_str].join("");
let branch_bench_hashes = calc_hashes_for_folder(&branch_benches_path_str);
if trunk_bench_hashes.keys().len() == branch_bench_hashes.keys().len() {
for key in trunk_bench_hashes.keys() {
if let Some(trunk_hash_val) = trunk_bench_hashes.get(key) {
if let Some(branch_hash_val) = branch_bench_hashes.get(key) {
if !trunk_hash_val.eq(branch_hash_val) {
return true;
}
} else {
return true;
}
}
}
false
} else {
true
}
}

2
ci/earthly-conf.yml Normal file
View file

@ -0,0 +1,2 @@
global:
cache_size_mb: 25000

23
ci/safe-earthly.sh Executable file
View file

@ -0,0 +1,23 @@
#!/usr/bin/env bash
LOG_FILE="earthly_log.txt"
touch $LOG_FILE
# first arg + everything after
ARGS=${@:1}
FULL_CMD="earthly --config ci/earthly-conf.yml $ARGS"
echo $FULL_CMD
script -efq $LOG_FILE -c "$FULL_CMD"
EXIT_CODE=$?
if grep -q "failed to mount" "$LOG_FILE"; then
echo ""
echo ""
echo "------<<<<<<!!!!!!>>>>>>------"
echo "DETECTED FAILURE TO MOUNT ERROR: running without cache"
echo "------<<<<<<!!!!!!>>>>>>------"
echo ""
echo ""
earthly --config ci/earthly-conf.yml --no-cache $ARGS
else
exit $EXIT_CODE
fi

View file

@ -15,7 +15,12 @@ test = false
bench = false bench = false
[features] [features]
default = ["target-x86"] default = ["target-x86", "llvm", "editor"]
# This is a separate feature because when we generate docs on Netlify,
# it doesn't have LLVM installed. (Also, it doesn't need to do code gen.)
llvm = ["inkwell", "roc_gen_llvm", "roc_build/llvm"]
editor = ["roc_editor"]
target-x86 = [] target-x86 = []
@ -45,41 +50,23 @@ roc_unify = { path = "../compiler/unify" }
roc_solve = { path = "../compiler/solve" } roc_solve = { path = "../compiler/solve" }
roc_mono = { path = "../compiler/mono" } roc_mono = { path = "../compiler/mono" }
roc_load = { path = "../compiler/load" } roc_load = { path = "../compiler/load" }
roc_gen = { path = "../compiler/gen" } roc_gen_llvm = { path = "../compiler/gen_llvm", optional = true }
roc_build = { path = "../compiler/build" } roc_build = { path = "../compiler/build", default-features = false }
roc_fmt = { path = "../compiler/fmt" } roc_fmt = { path = "../compiler/fmt" }
roc_reporting = { path = "../compiler/reporting" } roc_reporting = { path = "../compiler/reporting" }
roc_editor = { path = "../editor" } roc_editor = { path = "../editor", optional = true }
# TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it # TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it
clap = { git = "https://github.com/rtfeldman/clap", branch = "master" } clap = { git = "https://github.com/rtfeldman/clap", branch = "master" }
const_format = "0.2.8" const_format = "0.2"
rustyline = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" } rustyline = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" }
rustyline-derive = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" } rustyline-derive = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" }
im = "14" # im and im-rc should always have the same version! im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.2", features = ["collections"] } bumpalo = { version = "3.2", features = ["collections"] }
inlinable_string = "0.1"
libc = "0.2" libc = "0.2"
libloading = "0.6" libloading = "0.6"
# NOTE: rtfeldman/inkwell is a fork of TheDan64/inkwell which does not change anything. inkwell = { path = "../vendor/inkwell", optional = true }
#
# The reason for this fork is that the way Inkwell is designed, you have to use
# a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
# breaking changes get pushed directly to that branch, which breaks our build
# without warning.
#
# We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
# but although that worked locally, it did not work on GitHub Actions. (After a few
# hours of investigation, gave up trying to figure out why.) So this is the workaround:
# having an immutable tag on the rtfeldman/inkwell fork which points to
# a particular "release" of Inkwell.
#
# When we want to update Inkwell, we can sync up rtfeldman/inkwell to the latest
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
# This way, GitHub Actions works and nobody's builds get broken.
inkwell = { git = "https://github.com/rtfeldman/inkwell", tag = "llvm10-0.release4", features = [ "llvm10-0" ] }
target-lexicon = "0.10" target-lexicon = "0.10"
tempfile = "3.1.0" tempfile = "3.1.0"
@ -89,8 +76,12 @@ maplit = "1.0.1"
indoc = "0.3.3" indoc = "0.3.3"
quickcheck = "0.8" quickcheck = "0.8"
quickcheck_macros = "0.8" quickcheck_macros = "0.8"
strip-ansi-escapes = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde-xml-rs = "0.4"
serial_test = "0.5" serial_test = "0.5"
tempfile = "3.1.0" tempfile = "3.1.0"
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
cli_utils = { path = "cli_utils" }
[[bench]]
name = "time_bench"
harness = false

17
cli/benches/README.md Normal file
View file

@ -0,0 +1,17 @@
# Running the benchmarks
Install cargo criterion:
```
cargo install cargo-criterion
```
To prevent stack overflow on the `CFold` benchmark:
```
ulimit -s unlimited
```
In the `cli` folder execute:
```
cargo criterion
```

71
cli/benches/time_bench.rs Normal file
View file

@ -0,0 +1,71 @@
use std::time::Duration;
use cli_utils::bench_utils::{
bench_cfold, bench_deriv, bench_nqueens, bench_quicksort, bench_rbtree_ck, bench_rbtree_delete,
};
use criterion::{measurement::WallTime, BenchmarkGroup, Criterion, SamplingMode};
fn bench_group_wall_time(c: &mut Criterion) {
let mut group = c.benchmark_group("bench-group_wall-time");
// calculate statistics based on a fixed(flat) x runs
group.sampling_mode(SamplingMode::Flat);
let default_nr_of_runs = 200;
let nr_of_runs = match std::env::var("BENCH_DRY_RUN") {
Ok(val) => {
if val == "1" {
10 // minimum value allowed by criterion
} else {
default_nr_of_runs
}
}
Err(_) => default_nr_of_runs,
};
group.sample_size(nr_of_runs);
let bench_funcs: Vec<fn(Option<&mut BenchmarkGroup<WallTime>>) -> ()> = vec![
bench_nqueens, // queens 11
bench_cfold, // e = mkExpr 17 1
bench_deriv, // nest deriv 8 f
bench_rbtree_ck, // ms = makeMap 5 80000
bench_rbtree_delete, // m = makeMap 100000
bench_quicksort, // list size 10000
];
for bench_func in bench_funcs.iter() {
bench_func(Some(&mut group))
}
group.finish();
}
// use short warm up and measurement time on dry run
fn make_config() -> Criterion {
let default_config = Criterion::default();
match std::env::var("BENCH_DRY_RUN") {
Ok(val) => {
if val == "1" {
default_config
.warm_up_time(Duration::new(1, 0))
.measurement_time(Duration::new(1, 0))
} else {
default_config
}
}
Err(_) => default_config,
}
}
fn all_benches() {
let mut criterion: Criterion<_> = make_config().configure_from_args();
bench_group_wall_time(&mut criterion);
}
fn main() {
all_benches();
Criterion::default().configure_from_args().final_summary();
}

24
cli/cli_utils/Cargo.toml Normal file
View file

@ -0,0 +1,24 @@
[package]
name = "cli_utils"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
description = "Shared code for cli tests and benchmarks"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
roc_cli = { path = "../../cli" }
roc_collections = { path = "../../compiler/collections" }
roc_load = { path = "../../compiler/load" }
roc_module = { path = "../../compiler/module" }
bumpalo = { version = "3.6.1", features = ["collections"] }
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
inlinable_string = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde-xml-rs = "0.4"
strip-ansi-escapes = "0.1"
tempfile = "3.1.0"
rlimit = "0.6.2"

File diff suppressed because one or more lines are too long

View file

@ -1,5 +1,4 @@
extern crate bumpalo; extern crate bumpalo;
extern crate inlinable_string;
extern crate roc_collections; extern crate roc_collections;
extern crate roc_load; extern crate roc_load;
extern crate roc_module; extern crate roc_module;
@ -65,7 +64,7 @@ pub fn run_roc(args: &[&str]) -> Out {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn run_cmd(cmd_name: &str, stdin_str: &str, args: &[&str]) -> Out { pub fn run_cmd(cmd_name: &str, stdin_vals: &[&str], args: &[&str]) -> Out {
let mut cmd = Command::new(cmd_name); let mut cmd = Command::new(cmd_name);
for arg in args { for arg in args {
@ -81,9 +80,12 @@ pub fn run_cmd(cmd_name: &str, stdin_str: &str, args: &[&str]) -> Out {
{ {
let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin = child.stdin.as_mut().expect("Failed to open stdin");
stdin
.write_all(stdin_str.as_bytes()) for stdin_str in stdin_vals {
.expect("Failed to write to stdin"); stdin
.write_all(stdin_str.as_bytes())
.expect("Failed to write to stdin");
}
} }
let output = child let output = child
@ -98,7 +100,7 @@ pub fn run_cmd(cmd_name: &str, stdin_str: &str, args: &[&str]) -> Out {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn run_with_valgrind(stdin_str: &str, args: &[&str]) -> (Out, String) { pub fn run_with_valgrind(stdin_vals: &[&str], args: &[&str]) -> (Out, String) {
//TODO: figure out if there is a better way to get the valgrind executable. //TODO: figure out if there is a better way to get the valgrind executable.
let mut cmd = Command::new("valgrind"); let mut cmd = Command::new("valgrind");
let named_tempfile = let named_tempfile =
@ -142,9 +144,12 @@ pub fn run_with_valgrind(stdin_str: &str, args: &[&str]) -> (Out, String) {
{ {
let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin = child.stdin.as_mut().expect("Failed to open stdin");
stdin
.write_all(stdin_str.as_bytes()) for stdin_str in stdin_vals {
.expect("Failed to write to stdin"); stdin
.write_all(stdin_str.as_bytes())
.expect("Failed to write to stdin");
}
} }
let output = child let output = child
@ -228,7 +233,7 @@ pub fn extract_valgrind_errors(xml: &str) -> Result<Vec<ValgrindError>, serde_xm
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn example_dir(dir_name: &str) -> PathBuf { pub fn root_dir() -> PathBuf {
let mut path = env::current_exe().ok().unwrap(); let mut path = env::current_exe().ok().unwrap();
// Get rid of the filename in target/debug/deps/cli_run-99c65e4e9a1fbd06 // Get rid of the filename in target/debug/deps/cli_run-99c65e4e9a1fbd06
@ -243,6 +248,13 @@ pub fn example_dir(dir_name: &str) -> PathBuf {
path.pop(); path.pop();
path.pop(); path.pop();
path
}
#[allow(dead_code)]
pub fn examples_dir(dir_name: &str) -> PathBuf {
let mut path = root_dir();
// Descend into examples/{dir_name} // Descend into examples/{dir_name}
path.push("examples"); path.push("examples");
path.push(dir_name); path.push(dir_name);
@ -252,7 +264,7 @@ pub fn example_dir(dir_name: &str) -> PathBuf {
#[allow(dead_code)] #[allow(dead_code)]
pub fn example_file(dir_name: &str, file_name: &str) -> PathBuf { pub fn example_file(dir_name: &str, file_name: &str) -> PathBuf {
let mut path = example_dir(dir_name); let mut path = examples_dir(dir_name);
path.push(file_name); path.push(file_name);
@ -261,19 +273,7 @@ pub fn example_file(dir_name: &str, file_name: &str) -> PathBuf {
#[allow(dead_code)] #[allow(dead_code)]
pub fn fixtures_dir(dir_name: &str) -> PathBuf { pub fn fixtures_dir(dir_name: &str) -> PathBuf {
let mut path = env::current_exe().ok().unwrap(); let mut path = root_dir();
// Get rid of the filename in target/debug/deps/cli_run-99c65e4e9a1fbd06
path.pop();
// If we're in deps/ get rid of deps/ in target/debug/deps/
if path.ends_with("deps") {
path.pop();
}
// Get rid of target/debug/ so we're back at the project root
path.pop();
path.pop();
// Descend into cli/tests/fixtures/{dir_name} // Descend into cli/tests/fixtures/{dir_name}
path.push("cli"); path.push("cli");

2
cli/cli_utils/src/lib.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod bench_utils;
pub mod helpers;

View file

@ -5,8 +5,8 @@ use roc_build::{
}; };
use roc_can::builtins::builtin_defs_map; use roc_can::builtins::builtin_defs_map;
use roc_collections::all::MutMap; use roc_collections::all::MutMap;
use roc_gen::llvm::build::OptLevel;
use roc_load::file::LoadingProblem; use roc_load::file::LoadingProblem;
use roc_mono::ir::OptLevel;
use std::path::PathBuf; use std::path::PathBuf;
use std::time::{Duration, SystemTime}; use std::time::{Duration, SystemTime};
use target_lexicon::Triple; use target_lexicon::Triple;
@ -26,12 +26,23 @@ pub enum BuildOutcome {
Errors, Errors,
} }
impl BuildOutcome {
pub fn status_code(&self) -> i32 {
match self {
Self::NoProblems => 0,
Self::OnlyWarnings => 1,
Self::Errors => 2,
}
}
}
pub struct BuiltFile { pub struct BuiltFile {
pub binary_path: PathBuf, pub binary_path: PathBuf,
pub outcome: BuildOutcome, pub outcome: BuildOutcome,
pub total_time: Duration, pub total_time: Duration,
} }
#[cfg(feature = "llvm")]
pub fn build_file<'a>( pub fn build_file<'a>(
arena: &'a Bump, arena: &'a Bump,
target: &Triple, target: &Triple,
@ -54,7 +65,7 @@ pub fn build_file<'a>(
}; };
let loaded = roc_load::file::load_and_monomorphize( let loaded = roc_load::file::load_and_monomorphize(
&arena, arena,
roc_file_path.clone(), roc_file_path.clone(),
stdlib, stdlib,
src_dir.as_path(), src_dir.as_path(),
@ -117,11 +128,11 @@ pub fn build_file<'a>(
let cwd = roc_file_path.parent().unwrap(); let cwd = roc_file_path.parent().unwrap();
let binary_path = cwd.join(&*loaded.output_path); // TODO should join ".exe" on Windows let binary_path = cwd.join(&*loaded.output_path); // TODO should join ".exe" on Windows
let code_gen_timing = program::gen_from_mono_module( let code_gen_timing = program::gen_from_mono_module(
&arena, arena,
loaded, loaded,
&roc_file_path, &roc_file_path,
Triple::host(), Triple::host(),
&app_o_file, app_o_file,
opt_level, opt_level,
emit_debug_info, emit_debug_info,
); );
@ -204,10 +215,14 @@ pub fn build_file<'a>(
let total_time = compilation_start.elapsed().unwrap(); let total_time = compilation_start.elapsed().unwrap();
// If the cmd errored out, return the Err. // If the cmd errored out, return the Err.
cmd_result?; let exit_status = cmd_result?;
// TODO change this to report whether there were errors or warnings! // TODO change this to report whether there were errors or warnings!
let outcome = BuildOutcome::NoProblems; let outcome = if exit_status.success() {
BuildOutcome::NoProblems
} else {
BuildOutcome::Errors
};
Ok(BuiltFile { Ok(BuiltFile {
binary_path, binary_path,

View file

@ -1,12 +1,15 @@
#[macro_use] #[macro_use]
extern crate clap; extern crate clap;
use build::{build_file, BuildOutcome, BuiltFile}; #[macro_use]
extern crate const_format;
use build::{BuildOutcome, BuiltFile};
use bumpalo::Bump; use bumpalo::Bump;
use clap::{App, AppSettings, Arg, ArgMatches}; use clap::{App, AppSettings, Arg, ArgMatches};
use roc_build::link::LinkType; use roc_build::link::LinkType;
use roc_gen::llvm::build::OptLevel;
use roc_load::file::LoadingProblem; use roc_load::file::LoadingProblem;
use roc_mono::ir::OptLevel;
use std::env; use std::env;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -25,15 +28,17 @@ pub const CMD_DOCS: &str = "docs";
pub const FLAG_DEBUG: &str = "debug"; pub const FLAG_DEBUG: &str = "debug";
pub const FLAG_OPTIMIZE: &str = "optimize"; pub const FLAG_OPTIMIZE: &str = "optimize";
pub const FLAG_LIB: &str = "lib";
pub const ROC_FILE: &str = "ROC_FILE"; pub const ROC_FILE: &str = "ROC_FILE";
pub const DIRECTORY_OR_FILES: &str = "DIRECTORY_OR_FILES"; pub const DIRECTORY_OR_FILES: &str = "DIRECTORY_OR_FILES";
pub const ARGS_FOR_APP: &str = "ARGS_FOR_APP"; pub const ARGS_FOR_APP: &str = "ARGS_FOR_APP";
pub fn build_app<'a>() -> App<'a> { pub fn build_app<'a>() -> App<'a> {
App::new("roc") let app = App::new("roc")
.version(crate_version!()) .version(concatcp!(crate_version!(), "\n"))
.about("Runs the given .roc file. Use one of the SUBCOMMANDS below to do something else!")
.subcommand(App::new(CMD_BUILD) .subcommand(App::new(CMD_BUILD)
.about("Build a program") .about("Build a binary from the given .roc file, but don't run it")
.arg( .arg(
Arg::with_name(ROC_FILE) Arg::with_name(ROC_FILE)
.help("The .roc file to build") .help("The .roc file to build")
@ -42,7 +47,13 @@ pub fn build_app<'a>() -> App<'a> {
.arg( .arg(
Arg::with_name(FLAG_OPTIMIZE) Arg::with_name(FLAG_OPTIMIZE)
.long(FLAG_OPTIMIZE) .long(FLAG_OPTIMIZE)
.help("Optimize the compiled program to run faster. (Optimization takes time to complete.)") .help("Optimize your compiled Roc program to run faster. (Optimization takes time to complete.)")
.required(false),
)
.arg(
Arg::with_name(FLAG_LIB)
.long(FLAG_LIB)
.help("Build a C library instead of an executable.")
.required(false), .required(false),
) )
.arg( .arg(
@ -53,7 +64,7 @@ pub fn build_app<'a>() -> App<'a> {
) )
) )
.subcommand(App::new(CMD_RUN) .subcommand(App::new(CMD_RUN)
.about("Build and run a program") .about("DEPRECATED - now use `roc [FILE]` instead of `roc run [FILE]`")
.setting(AppSettings::TrailingVarArg) .setting(AppSettings::TrailingVarArg)
.arg( .arg(
Arg::with_name(FLAG_OPTIMIZE) Arg::with_name(FLAG_OPTIMIZE)
@ -69,7 +80,7 @@ pub fn build_app<'a>() -> App<'a> {
) )
.arg( .arg(
Arg::with_name(ROC_FILE) Arg::with_name(ROC_FILE)
.help("The .roc file of an app to build and run") .help("The .roc file of an app to run")
.required(true), .required(true),
) )
.arg( .arg(
@ -81,26 +92,57 @@ pub fn build_app<'a>() -> App<'a> {
.subcommand(App::new(CMD_REPL) .subcommand(App::new(CMD_REPL)
.about("Launch the interactive Read Eval Print Loop (REPL)") .about("Launch the interactive Read Eval Print Loop (REPL)")
) )
.subcommand(App::new(CMD_EDIT)
.about("Launch the Roc editor")
.arg(Arg::with_name(DIRECTORY_OR_FILES)
.index(1)
.multiple(true)
.required(false)
.help("(optional) The directory or files to open on launch.")
)
)
.subcommand( .subcommand(
App::new(CMD_DOCS) App::new(CMD_DOCS)
.about("Generate documentation for Roc modules") .about("Generate documentation for Roc modules")
.arg(Arg::with_name(DIRECTORY_OR_FILES) .arg(Arg::with_name(DIRECTORY_OR_FILES)
.index(1) .index(1)
.multiple(true) .multiple(true)
.required(true) .required(false)
.help("The directory or files to build documentation for") .help("The directory or files to build documentation for")
) )
) )
.setting(AppSettings::TrailingVarArg)
.arg(
Arg::with_name(FLAG_OPTIMIZE)
.long(FLAG_OPTIMIZE)
.help("Optimize the compiled program to run faster. (Optimization takes time to complete.)")
.requires(ROC_FILE)
.required(false),
)
.arg(
Arg::with_name(FLAG_DEBUG)
.long(FLAG_DEBUG)
.help("Store LLVM debug information in the generated program")
.requires(ROC_FILE)
.required(false),
)
.arg(
Arg::with_name(ROC_FILE)
.help("The .roc file of an app to build and run")
.required(false),
)
.arg(
Arg::with_name(ARGS_FOR_APP)
.help("Arguments to pass into the app being run")
.requires(ROC_FILE)
.multiple(true),
);
if cfg!(feature = "editor") {
app.subcommand(
App::new(CMD_EDIT).about("Launch the Roc editor").arg(
Arg::with_name(DIRECTORY_OR_FILES)
.index(1)
.multiple(true)
.required(false)
.help("(optional) The directory or files to open on launch."),
),
)
} else {
app
}
} }
pub fn docs(files: Vec<PathBuf>) { pub fn docs(files: Vec<PathBuf>) {
@ -111,12 +153,15 @@ pub fn docs(files: Vec<PathBuf>) {
) )
} }
#[derive(Debug, PartialEq, Eq)]
pub enum BuildConfig { pub enum BuildConfig {
BuildOnly, BuildOnly,
BuildAndRun { roc_file_arg_index: usize }, BuildAndRun { roc_file_arg_index: usize },
} }
#[cfg(feature = "llvm")]
pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> { pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> {
use build::build_file;
use BuildConfig::*; use BuildConfig::*;
let arena = Bump::new(); let arena = Bump::new();
@ -130,8 +175,13 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
}; };
let emit_debug_info = matches.is_present(FLAG_DEBUG); let emit_debug_info = matches.is_present(FLAG_DEBUG);
let path = Path::new(filename).canonicalize().unwrap(); let link_type = if matches.is_present(FLAG_LIB) {
let src_dir = path.parent().unwrap().canonicalize().unwrap(); LinkType::Dylib
} else {
LinkType::Executable
};
let path = Path::new(filename);
// Spawn the root task // Spawn the root task
let path = path.canonicalize().unwrap_or_else(|err| { let path = path.canonicalize().unwrap_or_else(|err| {
@ -152,6 +202,7 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
} }
}); });
let src_dir = path.parent().unwrap().canonicalize().unwrap();
let res_binary_path = build_file( let res_binary_path = build_file(
&arena, &arena,
target, target,
@ -159,7 +210,7 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
path, path,
opt_level, opt_level,
emit_debug_info, emit_debug_info,
LinkType::Executable, link_type,
); );
match res_binary_path { match res_binary_path {
@ -175,13 +226,6 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
.strip_prefix(env::current_dir().unwrap()) .strip_prefix(env::current_dir().unwrap())
.unwrap_or(&binary_path); .unwrap_or(&binary_path);
// Return a nonzero exit code if there were problems
let status_code = match outcome {
BuildOutcome::NoProblems => 0,
BuildOutcome::OnlyWarnings => 1,
BuildOutcome::Errors => 2,
};
// No need to waste time freeing this memory, // No need to waste time freeing this memory,
// since the process is about to exit anyway. // since the process is about to exit anyway.
std::mem::forget(arena); std::mem::forget(arena);
@ -192,7 +236,8 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
total_time.as_millis() total_time.as_millis()
); );
Ok(status_code) // Return a nonzero exit code if there were problems
Ok(outcome.status_code())
} }
BuildAndRun { roc_file_arg_index } => { BuildAndRun { roc_file_arg_index } => {
let mut cmd = Command::new(binary_path); let mut cmd = Command::new(binary_path);
@ -200,7 +245,7 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
// Forward all the arguments after the .roc file argument // Forward all the arguments after the .roc file argument
// to the new process. This way, you can do things like: // to the new process. This way, you can do things like:
// //
// roc run app.roc foo bar baz // roc app.roc foo bar baz
// //
// ...and have it so that app.roc will receive only `foo`, // ...and have it so that app.roc will receive only `foo`,
// `bar`, and `baz` as its arguments. // `bar`, and `baz` as its arguments.
@ -210,23 +255,9 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
} }
} }
// Run the compiled app match outcome {
let exit_status = cmd BuildOutcome::Errors => Ok(outcome.status_code()),
.current_dir(original_cwd) _ => roc_run(cmd.current_dir(original_cwd)),
.spawn()
.unwrap_or_else(|err| panic!("Failed to run app after building it: {:?}", err))
.wait()
.expect("TODO gracefully handle block_on failing when roc run spawns a subprocess for the compiled app");
// `roc run` exits with the same status code as the app it ran.
//
// If you want to know whether there were compilation problems
// via status code, use either `roc build` or `roc check` instead!
match exit_status.code() {
Some(code) => Ok(code),
None => {
todo!("TODO gracefully handle the roc run subprocess terminating with a signal.");
}
} }
} }
} }
@ -241,3 +272,37 @@ pub fn build(target: &Triple, matches: &ArgMatches, config: BuildConfig) -> io::
} }
} }
} }
#[cfg(target_family = "unix")]
fn roc_run(cmd: &mut Command) -> io::Result<i32> {
use std::os::unix::process::CommandExt;
// This is much faster than spawning a subprocess if we're on a UNIX system!
let err = cmd.exec();
// If exec actually returned, it was definitely an error! (Otherwise,
// this process would have been replaced by the other one, and we'd
// never actually reach this line of code.)
Err(err)
}
#[cfg(not(target_family = "unix"))]
fn roc_run(cmd: &mut Command) -> io::Result<i32> {
// Run the compiled app
let exit_status = cmd
.spawn()
.unwrap_or_else(|err| panic!("Failed to run app after building it: {:?}", err))
.wait()
.expect("TODO gracefully handle block_on failing when `roc` spawns a subprocess for the compiled app");
// `roc [FILE]` exits with the same status code as the app it ran.
//
// If you want to know whether there were compilation problems
// via status code, use either `roc build` or `roc check` instead!
match exit_status.code() {
Some(code) => Ok(code),
None => {
todo!("TODO gracefully handle the `roc [FILE]` subprocess terminating with a signal.");
}
}
}

View file

@ -1,20 +1,43 @@
use roc_cli::{ use roc_cli::{
build, build_app, docs, repl, BuildConfig, CMD_BUILD, CMD_DOCS, CMD_EDIT, CMD_REPL, CMD_RUN, build_app, docs, repl, BuildConfig, CMD_BUILD, CMD_DOCS, CMD_EDIT, CMD_REPL, CMD_RUN,
DIRECTORY_OR_FILES, ROC_FILE, DIRECTORY_OR_FILES, ROC_FILE,
}; };
use std::fs::{self, FileType};
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use target_lexicon::Triple; use target_lexicon::Triple;
#[cfg(feature = "llvm")]
use roc_cli::build;
use std::ffi::{OsStr, OsString};
#[cfg(not(feature = "llvm"))]
fn build(_target: &Triple, _matches: &clap::ArgMatches, _config: BuildConfig) -> io::Result<i32> {
panic!("Building without LLVM is not currently supported.");
}
fn main() -> io::Result<()> { fn main() -> io::Result<()> {
let matches = build_app().get_matches(); let matches = build_app().get_matches();
let exit_code = match matches.subcommand_name() { let exit_code = match matches.subcommand_name() {
None => { None => {
roc_editor::launch(&[])?; match matches.index_of(ROC_FILE) {
Some(arg_index) => {
let roc_file_arg_index = arg_index + 1; // Not sure why this +1 is necessary, but it is!
// rustc couldn't infer the error type here build(
Result::<i32, io::Error>::Ok(0) &Triple::host(),
&matches,
BuildConfig::BuildAndRun { roc_file_arg_index },
)
}
None => {
launch_editor(&[])?;
Ok(0)
}
}
} }
Some(CMD_BUILD) => Ok(build( Some(CMD_BUILD) => Ok(build(
&Triple::host(), &Triple::host(),
@ -22,14 +45,15 @@ fn main() -> io::Result<()> {
BuildConfig::BuildOnly, BuildConfig::BuildOnly,
)?), )?),
Some(CMD_RUN) => { Some(CMD_RUN) => {
let subcmd_matches = matches.subcommand_matches(CMD_RUN).unwrap(); // TODO remove CMD_RUN altogether if it is currently September 2021 or later.
let roc_file_arg_index = subcmd_matches.index_of(ROC_FILE).unwrap() + 1; // Not sure why this +1 is necessary, but it is! println!(
r#"`roc run` is deprecated!
If you're using a prebuilt binary, you no longer need the `run` - just do `roc [FILE]` instead of `roc run [FILE]`.
If you're building the compiler from source you'll want to do `cargo run [FILE]` instead of `cargo run run [FILE]`.
"#
);
Ok(build( Ok(1)
&Triple::host(),
subcmd_matches,
BuildConfig::BuildAndRun { roc_file_arg_index },
)?)
} }
Some(CMD_REPL) => { Some(CMD_REPL) => {
repl::main()?; repl::main()?;
@ -44,14 +68,14 @@ fn main() -> io::Result<()> {
.values_of_os(DIRECTORY_OR_FILES) .values_of_os(DIRECTORY_OR_FILES)
{ {
None => { None => {
roc_editor::launch(&[])?; launch_editor(&[])?;
} }
Some(values) => { Some(values) => {
let paths = values let paths = values
.map(|os_str| Path::new(os_str)) .map(|os_str| Path::new(os_str))
.collect::<Vec<&Path>>(); .collect::<Vec<&Path>>();
roc_editor::launch(&paths)?; launch_editor(&paths)?;
} }
} }
@ -59,17 +83,37 @@ fn main() -> io::Result<()> {
Ok(0) Ok(0)
} }
Some(CMD_DOCS) => { Some(CMD_DOCS) => {
let values = matches let maybe_values = matches
.subcommand_matches(CMD_DOCS) .subcommand_matches(CMD_DOCS)
.unwrap() .unwrap()
.values_of_os(DIRECTORY_OR_FILES) .values_of_os(DIRECTORY_OR_FILES);
.unwrap();
let paths = values let mut values: Vec<OsString> = Vec::new();
.map(|os_str| Path::new(os_str).to_path_buf())
.collect::<Vec<PathBuf>>();
docs(paths); match maybe_values {
None => {
let mut os_string_values: Vec<OsString> = Vec::new();
read_all_roc_files(&OsStr::new("./").to_os_string(), &mut os_string_values)?;
for os_string in os_string_values {
values.push(os_string);
}
}
Some(os_values) => {
for os_str in os_values {
values.push(os_str.to_os_string());
}
}
}
let mut roc_files = Vec::new();
// Populate roc_files
for os_str in values {
let metadata = fs::metadata(os_str.clone())?;
roc_files_recursive(os_str.as_os_str(), metadata.file_type(), &mut roc_files)?;
}
docs(roc_files);
Ok(0) Ok(0)
} }
@ -78,3 +122,51 @@ fn main() -> io::Result<()> {
std::process::exit(exit_code); std::process::exit(exit_code);
} }
fn read_all_roc_files(
dir: &OsString,
mut roc_file_paths: &mut Vec<OsString>,
) -> Result<(), std::io::Error> {
let entries = fs::read_dir(dir)?;
for entry in entries {
let path = entry?.path();
if path.is_dir() {
read_all_roc_files(&path.into_os_string(), &mut roc_file_paths)?;
} else if path.extension().and_then(OsStr::to_str) == Some("roc") {
let file_path = path.into_os_string();
roc_file_paths.push(file_path);
}
}
Ok(())
}
fn roc_files_recursive<P: AsRef<Path>>(
path: P,
file_type: FileType,
roc_files: &mut Vec<PathBuf>,
) -> io::Result<()> {
if file_type.is_dir() {
for entry_res in fs::read_dir(path)? {
let entry = entry_res?;
roc_files_recursive(entry.path(), entry.file_type()?, roc_files)?;
}
} else {
roc_files.push(path.as_ref().to_path_buf());
}
Ok(())
}
#[cfg(feature = "editor")]
fn launch_editor(filepaths: &[&Path]) -> io::Result<()> {
roc_editor::launch(filepaths)
}
#[cfg(not(feature = "editor"))]
fn launch_editor(_filepaths: &[&Path]) -> io::Result<()> {
panic!("Cannot launch the editor because this build of roc did not include `feature = \"editor\"`!");
}

View file

@ -1,15 +1,12 @@
use const_format::concatcp; use const_format::concatcp;
#[cfg(feature = "llvm")]
use gen::{gen_and_eval, ReplOutput}; use gen::{gen_and_eval, ReplOutput};
use roc_gen::llvm::build::OptLevel;
use roc_parse::parser::{EExpr, SyntaxError}; use roc_parse::parser::{EExpr, SyntaxError};
use rustyline::error::ReadlineError;
use rustyline::highlight::{Highlighter, PromptInfo}; use rustyline::highlight::{Highlighter, PromptInfo};
use rustyline::validate::{self, ValidationContext, ValidationResult, Validator}; use rustyline::validate::{self, ValidationContext, ValidationResult, Validator};
use rustyline::Editor;
use rustyline_derive::{Completer, Helper, Hinter}; use rustyline_derive::{Completer, Helper, Hinter};
use std::borrow::Cow; use std::borrow::Cow;
use std::io; use std::io;
use target_lexicon::Triple;
const BLUE: &str = "\u{001b}[36m"; const BLUE: &str = "\u{001b}[36m";
const PINK: &str = "\u{001b}[35m"; const PINK: &str = "\u{001b}[35m";
@ -30,7 +27,9 @@ pub const INSTRUCTIONS: &str = "Enter an expression, or :help, or :exit/:q.\n";
pub const PROMPT: &str = concatcp!("\n", BLUE, "»", END_COL, " "); pub const PROMPT: &str = concatcp!("\n", BLUE, "»", END_COL, " ");
pub const CONT_PROMPT: &str = concatcp!(BLUE, "", END_COL, " "); pub const CONT_PROMPT: &str = concatcp!(BLUE, "", END_COL, " ");
#[cfg(feature = "llvm")]
mod eval; mod eval;
#[cfg(feature = "llvm")]
mod gen; mod gen;
#[derive(Completer, Helper, Hinter)] #[derive(Completer, Helper, Hinter)]
@ -107,7 +106,16 @@ impl Validator for InputValidator {
} }
} }
#[cfg(not(feature = "llvm"))]
pub fn main() -> io::Result<()> { pub fn main() -> io::Result<()> {
panic!("The REPL currently requires being built with LLVM.");
}
#[cfg(feature = "llvm")]
pub fn main() -> io::Result<()> {
use rustyline::error::ReadlineError;
use rustyline::Editor;
// To debug rustyline: // To debug rustyline:
// <UNCOMMENT> env_logger::init(); // <UNCOMMENT> env_logger::init();
// <RUN WITH:> RUST_LOG=rustyline=debug cargo run repl 2> debug.log // <RUN WITH:> RUST_LOG=rustyline=debug cargo run repl 2> debug.log
@ -226,7 +234,11 @@ fn report_parse_error(fail: SyntaxError) {
println!("TODO Gracefully report parse error in repl: {:?}", fail); println!("TODO Gracefully report parse error in repl: {:?}", fail);
} }
#[cfg(feature = "llvm")]
fn eval_and_format<'a>(src: &str) -> Result<String, SyntaxError<'a>> { fn eval_and_format<'a>(src: &str) -> Result<String, SyntaxError<'a>> {
use roc_mono::ir::OptLevel;
use target_lexicon::Triple;
gen_and_eval(src.as_bytes(), Triple::host(), OptLevel::Normal).map(|output| match output { gen_and_eval(src.as_bytes(), Triple::host(), OptLevel::Normal).map(|output| match output {
ReplOutput::NoProblems { expr, expr_type } => { ReplOutput::NoProblems { expr, expr_type } => {
format!("\n{} {}:{} {}", expr, PINK, END_COL, expr_type) format!("\n{} {}:{} {}", expr, PINK, END_COL, expr_type)

View file

@ -1,16 +1,15 @@
use bumpalo::collections::Vec; use bumpalo::collections::Vec;
use bumpalo::Bump; use bumpalo::Bump;
use libloading::Library; use libloading::Library;
use roc_collections::all::MutMap; use roc_gen_llvm::{run_jit_function, run_jit_function_dynamic_type};
use roc_gen::{run_jit_function, run_jit_function_dynamic_type}; use roc_module::ident::TagName;
use roc_module::ident::{Lowercase, TagName};
use roc_module::operator::CalledVia; use roc_module::operator::CalledVia;
use roc_module::symbol::{Interns, ModuleId, Symbol}; use roc_module::symbol::{Interns, ModuleId, Symbol};
use roc_mono::ir::ProcLayout;
use roc_mono::layout::{union_sorted_tags_help, Builtin, Layout, UnionLayout, UnionVariant}; use roc_mono::layout::{union_sorted_tags_help, Builtin, Layout, UnionLayout, UnionVariant};
use roc_parse::ast::{AssignedField, Expr, StrLiteral}; use roc_parse::ast::{AssignedField, Expr, StrLiteral};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{Content, FlatType, Subs, Variable}; use roc_types::subs::{Content, FlatType, GetSubsSlice, RecordFields, Subs, UnionTags, Variable};
use roc_types::types::RecordField;
struct Env<'a, 'env> { struct Env<'a, 'env> {
arena: &'a Bump, arena: &'a Bump,
@ -37,7 +36,7 @@ pub unsafe fn jit_to_ast<'a>(
arena: &'a Bump, arena: &'a Bump,
lib: Library, lib: Library,
main_fn_name: &str, main_fn_name: &str,
layout: &Layout<'a>, layout: ProcLayout<'a>,
content: &Content, content: &Content,
interns: &Interns, interns: &Interns,
home: ModuleId, home: ModuleId,
@ -52,7 +51,16 @@ pub unsafe fn jit_to_ast<'a>(
home, home,
}; };
jit_to_ast_help(&env, lib, main_fn_name, layout, content) match layout {
ProcLayout {
arguments: [],
result,
} => {
// this is a thunk
jit_to_ast_help(&env, lib, main_fn_name, &result, content)
}
_ => Err(ToAstProblem::FunctionLayout),
}
} }
fn jit_to_ast_help<'a>( fn jit_to_ast_help<'a>(
@ -73,12 +81,26 @@ fn jit_to_ast_help<'a>(
) )
} }
Layout::Builtin(Builtin::Usize) => Ok(run_jit_function!(lib, main_fn_name, usize, |num| { Layout::Builtin(Builtin::Usize) => Ok(run_jit_function!(lib, main_fn_name, usize, |num| {
num_to_ast(env, nat_to_ast(env.arena, num), content) num_to_ast(env, number_literal_to_ast(env.arena, num), content)
})), })),
Layout::Builtin(Builtin::Int16) => {
Ok(run_jit_function!(lib, main_fn_name, i16, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Int32) => {
Ok(run_jit_function!(lib, main_fn_name, i32, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Int64) => { Layout::Builtin(Builtin::Int64) => {
Ok(run_jit_function!(lib, main_fn_name, i64, |num| num_to_ast( Ok(run_jit_function!(lib, main_fn_name, i64, |num| num_to_ast(
env, env,
i64_to_ast(env.arena, num), number_literal_to_ast(env.arena, num),
content content
))) )))
} }
@ -87,13 +109,20 @@ fn jit_to_ast_help<'a>(
lib, lib,
main_fn_name, main_fn_name,
i128, i128,
|num| num_to_ast(env, i128_to_ast(env.arena, num), content) |num| num_to_ast(env, number_literal_to_ast(env.arena, num), content)
)) ))
} }
Layout::Builtin(Builtin::Float32) => {
Ok(run_jit_function!(lib, main_fn_name, f32, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Float64) => { Layout::Builtin(Builtin::Float64) => {
Ok(run_jit_function!(lib, main_fn_name, f64, |num| num_to_ast( Ok(run_jit_function!(lib, main_fn_name, f64, |num| num_to_ast(
env, env,
f64_to_ast(env.arena, num), number_literal_to_ast(env.arena, num),
content content
))) )))
} }
@ -110,7 +139,7 @@ fn jit_to_ast_help<'a>(
} }
})) }))
} }
Layout::Builtin(Builtin::List(_, elem_layout)) => Ok(run_jit_function!( Layout::Builtin(Builtin::List(elem_layout)) => Ok(run_jit_function!(
lib, lib,
main_fn_name, main_fn_name,
(*const u8, usize), (*const u8, usize),
@ -119,26 +148,44 @@ fn jit_to_ast_help<'a>(
Layout::Builtin(other) => { Layout::Builtin(other) => {
todo!("add support for rendering builtin {:?} to the REPL", other) todo!("add support for rendering builtin {:?} to the REPL", other)
} }
Layout::PhantomEmptyStruct => Ok(run_jit_function!(lib, main_fn_name, &u8, |_| {
Expr::Record {
fields: &[],
final_comments: env.arena.alloc([]),
}
})),
Layout::Struct(field_layouts) => { Layout::Struct(field_layouts) => {
let ptr_to_ast = |ptr: *const u8| match content { let ptr_to_ast = |ptr: *const u8| match content {
Content::Structure(FlatType::Record(fields, _)) => { Content::Structure(FlatType::Record(fields, _)) => {
struct_to_ast(env, ptr, field_layouts, fields) Ok(struct_to_ast(env, ptr, field_layouts, *fields))
}
Content::Structure(FlatType::EmptyRecord) => {
struct_to_ast(env, ptr, field_layouts, &MutMap::default())
} }
Content::Structure(FlatType::EmptyRecord) => Ok(struct_to_ast(
env,
ptr,
field_layouts,
RecordFields::empty(),
)),
Content::Structure(FlatType::TagUnion(tags, _)) => { Content::Structure(FlatType::TagUnion(tags, _)) => {
debug_assert_eq!(tags.len(), 1); debug_assert_eq!(tags.len(), 1);
let (tag_name, payload_vars) = tags.iter().next().unwrap(); let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
single_tag_union_to_ast(env, ptr, field_layouts, tag_name.clone(), payload_vars) Ok(single_tag_union_to_ast(
env,
ptr,
field_layouts,
tag_name,
payload_vars,
))
}
Content::Structure(FlatType::FunctionOrTagUnion(tag_name, _, _)) => {
let tag_name = &env.subs[*tag_name];
Ok(single_tag_union_to_ast(
env,
ptr,
field_layouts,
tag_name,
&[],
))
}
Content::Structure(FlatType::Func(_, _, _)) => {
// a function with a struct as the closure environment
Err(ToAstProblem::FunctionLayout)
} }
other => { other => {
unreachable!( unreachable!(
@ -153,81 +200,151 @@ fn jit_to_ast_help<'a>(
let result_stack_size = layout.stack_size(env.ptr_bytes); let result_stack_size = layout.stack_size(env.ptr_bytes);
Ok(run_jit_function_dynamic_type!( run_jit_function_dynamic_type!(
lib, lib,
main_fn_name, main_fn_name,
result_stack_size as usize, result_stack_size as usize,
|bytes: *const u8| { ptr_to_ast(bytes as *const u8) } |bytes: *const u8| { ptr_to_ast(bytes as *const u8) }
)) )
} }
Layout::Union(UnionLayout::NonRecursive(union_layouts)) => match content { Layout::Union(UnionLayout::NonRecursive(union_layouts)) => {
Content::Structure(FlatType::TagUnion(tags, _)) => { let union_layout = UnionLayout::NonRecursive(union_layouts);
debug_assert_eq!(union_layouts.len(), tags.len());
let tags_vec: std::vec::Vec<(TagName, std::vec::Vec<Variable>)> = match content {
tags.iter().map(|(a, b)| (a.clone(), b.clone())).collect(); Content::Structure(FlatType::TagUnion(tags, _)) => {
debug_assert_eq!(union_layouts.len(), tags.len());
let union_variant = union_sorted_tags_help(env.arena, tags_vec, None, env.subs); let tags_vec: std::vec::Vec<(TagName, std::vec::Vec<Variable>)> = tags
.unsorted_iterator(env.subs, Variable::EMPTY_TAG_UNION)
.map(|(a, b)| (a.clone(), b.to_vec()))
.collect();
let size = layout.stack_size(env.ptr_bytes); let tags_map: roc_collections::all::MutMap<_, _> =
use roc_mono::layout::WrappedVariant::*; tags_vec.iter().cloned().collect();
match union_variant {
UnionVariant::Wrapped(variant) => { let union_variant = union_sorted_tags_help(env.arena, tags_vec, None, env.subs);
match variant {
NonRecursive { let size = layout.stack_size(env.ptr_bytes);
sorted_tag_layouts: tags_and_layouts, use roc_mono::layout::WrappedVariant::*;
match union_variant {
UnionVariant::Wrapped(variant) => {
match variant {
NonRecursive {
sorted_tag_layouts: tags_and_layouts,
} => {
Ok(run_jit_function_dynamic_type!(
lib,
main_fn_name,
size as usize,
|ptr: *const u8| {
// Because this is a `Wrapped`, the first 8 bytes encode the tag ID
let offset = tags_and_layouts
.iter()
.map(|(_, fields)| {
fields
.iter()
.map(|l| l.stack_size(env.ptr_bytes))
.sum()
})
.max()
.unwrap_or(0);
let tag_id = match union_layout.tag_id_builtin() {
Builtin::Int1 => {
*(ptr.add(offset as usize) as *const i8) as i64
}
Builtin::Int8 => {
*(ptr.add(offset as usize) as *const i8) as i64
}
Builtin::Int16 => {
*(ptr.add(offset as usize) as *const i16) as i64
}
Builtin::Int64 => {
// used by non-recursive unions at the
// moment, remove if that is no longer the case
*(ptr.add(offset as usize) as *const i64) as i64
}
_ => unreachable!("invalid tag id layout"),
};
// use the tag ID as an index, to get its name and layout of any arguments
let (tag_name, arg_layouts) =
&tags_and_layouts[tag_id as usize];
let tag_expr = tag_name_to_expr(env, tag_name);
let loc_tag_expr =
&*env.arena.alloc(Located::at_zero(tag_expr));
let variables = &tags_map[tag_name];
debug_assert_eq!(arg_layouts.len(), variables.len());
// NOTE assumes the data bytes are the first bytes
let it =
variables.iter().copied().zip(arg_layouts.iter());
let output = sequence_of_expr(env, ptr, it);
let output = output.into_bump_slice();
Expr::Apply(loc_tag_expr, output, CalledVia::Space)
}
))
}
Recursive {
sorted_tag_layouts: tags_and_layouts,
} => {
Ok(run_jit_function_dynamic_type!(
lib,
main_fn_name,
size as usize,
|ptr: *const u8| {
// Because this is a `Wrapped`, the first 8 bytes encode the tag ID
let tag_id = *(ptr as *const i64);
// use the tag ID as an index, to get its name and layout of any arguments
let (tag_name, arg_layouts) =
&tags_and_layouts[tag_id as usize];
let tag_expr = tag_name_to_expr(env, tag_name);
let loc_tag_expr =
&*env.arena.alloc(Located::at_zero(tag_expr));
let variables = &tags_map[tag_name];
// because the arg_layouts include the tag ID, it is one longer
debug_assert_eq!(
arg_layouts.len() - 1,
variables.len()
);
// skip forward to the start of the first element, ignoring the tag id
let ptr = ptr.offset(8);
let it =
variables.iter().copied().zip(&arg_layouts[1..]);
let output = sequence_of_expr(env, ptr, it);
let output = output.into_bump_slice();
Expr::Apply(loc_tag_expr, output, CalledVia::Space)
}
))
}
_ => todo!(),
} }
| Recursive {
sorted_tag_layouts: tags_and_layouts,
} => {
Ok(run_jit_function_dynamic_type!(
lib,
main_fn_name,
size as usize,
|ptr: *const u8| {
// Because this is a `Wrapped`, the first 8 bytes encode the tag ID
let tag_id = *(ptr as *const i64);
// use the tag ID as an index, to get its name and layout of any arguments
let (tag_name, arg_layouts) =
&tags_and_layouts[tag_id as usize];
let tag_expr = tag_name_to_expr(env, tag_name);
let loc_tag_expr =
&*env.arena.alloc(Located::at_zero(tag_expr));
let variables = &tags[tag_name];
// because the arg_layouts include the tag ID, it is one longer
debug_assert_eq!(arg_layouts.len() - 1, variables.len());
// skip forward to the start of the first element, ignoring the tag id
let ptr = ptr.offset(8);
let it = variables.iter().copied().zip(&arg_layouts[1..]);
let output = sequence_of_expr(env, ptr, it);
let output = output.into_bump_slice();
Expr::Apply(loc_tag_expr, output, CalledVia::Space)
}
))
}
_ => todo!(),
} }
_ => unreachable!("any other variant would have a different layout"),
} }
_ => unreachable!("any other variant would have a different layout"),
} }
} Content::Structure(FlatType::RecursiveTagUnion(_, _, _)) => {
Content::Structure(FlatType::RecursiveTagUnion(_, _, _)) => { todo!("print recursive tag unions in the REPL")
todo!("print recursive tag unions in the REPL") }
} Content::Alias(_, _, actual) => {
Content::Alias(_, _, actual) => { let content = env.subs.get_content_without_compacting(*actual);
let content = env.subs.get_without_compacting(*actual).content;
jit_to_ast_help(env, lib, main_fn_name, layout, &content) jit_to_ast_help(env, lib, main_fn_name, layout, content)
}
other => unreachable!("Weird content for Union layout: {:?}", other),
} }
other => unreachable!("Weird content for Union layout: {:?}", other), }
},
Layout::Union(UnionLayout::Recursive(_)) Layout::Union(UnionLayout::Recursive(_))
| Layout::Union(UnionLayout::NullableWrapped { .. }) | Layout::Union(UnionLayout::NullableWrapped { .. })
| Layout::Union(UnionLayout::NullableUnwrapped { .. }) | Layout::Union(UnionLayout::NullableUnwrapped { .. })
@ -236,10 +353,7 @@ fn jit_to_ast_help<'a>(
todo!("add support for rendering recursive tag unions in the REPL") todo!("add support for rendering recursive tag unions in the REPL")
} }
Layout::FunctionPointer(_, _) | Layout::Closure(_, _, _) => { Layout::Closure(_, _, _) => Err(ToAstProblem::FunctionLayout),
Err(ToAstProblem::FunctionLayout)
}
Layout::Pointer(_) => todo!("add support for rendering pointers in the REPL"),
} }
} }
@ -247,11 +361,11 @@ fn tag_name_to_expr<'a>(env: &Env<'a, '_>, tag_name: &TagName) -> Expr<'a> {
match tag_name { match tag_name {
TagName::Global(_) => Expr::GlobalTag( TagName::Global(_) => Expr::GlobalTag(
env.arena env.arena
.alloc_str(&tag_name.as_string(env.interns, env.home)), .alloc_str(&tag_name.as_ident_str(env.interns, env.home)),
), ),
TagName::Private(_) => Expr::PrivateTag( TagName::Private(_) => Expr::PrivateTag(
env.arena env.arena
.alloc_str(&tag_name.as_string(env.interns, env.home)), .alloc_str(&tag_name.as_ident_str(env.interns, env.home)),
), ),
TagName::Closure(_) => unreachable!("User cannot type this"), TagName::Closure(_) => unreachable!("User cannot type this"),
} }
@ -264,15 +378,30 @@ fn ptr_to_ast<'a>(
content: &Content, content: &Content,
) -> Expr<'a> { ) -> Expr<'a> {
match layout { match layout {
Layout::Builtin(Builtin::Int128) => {
let num = unsafe { *(ptr as *const i128) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int64) => { Layout::Builtin(Builtin::Int64) => {
let num = unsafe { *(ptr as *const i64) }; let num = unsafe { *(ptr as *const i64) };
num_to_ast(env, i64_to_ast(env.arena, num), content) num_to_ast(env, number_literal_to_ast(env.arena, num), content)
} }
Layout::Builtin(Builtin::Usize) => { Layout::Builtin(Builtin::Int32) => {
let num = unsafe { *(ptr as *const usize) }; let num = unsafe { *(ptr as *const i32) };
num_to_ast(env, nat_to_ast(env.arena, num), content) num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int16) => {
let num = unsafe { *(ptr as *const i16) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int8) => {
let num = unsafe { *(ptr as *const i8) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
} }
Layout::Builtin(Builtin::Int1) => { Layout::Builtin(Builtin::Int1) => {
// TODO: bits are not as expected here. // TODO: bits are not as expected here.
@ -281,16 +410,26 @@ fn ptr_to_ast<'a>(
bool_to_ast(env, num, content) bool_to_ast(env, num, content)
} }
Layout::Builtin(Builtin::Usize) => {
let num = unsafe { *(ptr as *const usize) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Float64) => { Layout::Builtin(Builtin::Float64) => {
let num = unsafe { *(ptr as *const f64) }; let num = unsafe { *(ptr as *const f64) };
num_to_ast(env, f64_to_ast(env.arena, num), content) num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Float32) => {
let num = unsafe { *(ptr as *const f32) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
} }
Layout::Builtin(Builtin::EmptyList) => Expr::List { Layout::Builtin(Builtin::EmptyList) => Expr::List {
items: &[], items: &[],
final_comments: &[], final_comments: &[],
}, },
Layout::Builtin(Builtin::List(_, elem_layout)) => { Layout::Builtin(Builtin::List(elem_layout)) => {
// Turn the (ptr, len) wrapper struct into actual ptr and len values. // Turn the (ptr, len) wrapper struct into actual ptr and len values.
let len = unsafe { *(ptr.offset(env.ptr_bytes as isize) as *const usize) }; let len = unsafe { *(ptr.offset(env.ptr_bytes as isize) as *const usize) };
let ptr = unsafe { *(ptr as *const *const u8) }; let ptr = unsafe { *(ptr as *const *const u8) };
@ -305,16 +444,20 @@ fn ptr_to_ast<'a>(
} }
Layout::Struct(field_layouts) => match content { Layout::Struct(field_layouts) => match content {
Content::Structure(FlatType::Record(fields, _)) => { Content::Structure(FlatType::Record(fields, _)) => {
struct_to_ast(env, ptr, field_layouts, fields) struct_to_ast(env, ptr, field_layouts, *fields)
} }
Content::Structure(FlatType::TagUnion(tags, _)) => { Content::Structure(FlatType::TagUnion(tags, _)) => {
debug_assert_eq!(tags.len(), 1); debug_assert_eq!(tags.len(), 1);
let (tag_name, payload_vars) = tags.iter().next().unwrap(); let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
single_tag_union_to_ast(env, ptr, field_layouts, tag_name.clone(), payload_vars) single_tag_union_to_ast(env, ptr, field_layouts, tag_name, payload_vars)
}
Content::Structure(FlatType::FunctionOrTagUnion(tag_name, _, _)) => {
let tag_name = &env.subs[*tag_name];
single_tag_union_to_ast(env, ptr, field_layouts, tag_name, &[])
} }
Content::Structure(FlatType::EmptyRecord) => { Content::Structure(FlatType::EmptyRecord) => {
struct_to_ast(env, ptr, &[], &MutMap::default()) struct_to_ast(env, ptr, &[], RecordFields::empty())
} }
other => { other => {
unreachable!( unreachable!(
@ -343,9 +486,10 @@ fn list_to_ast<'a>(
Content::Structure(FlatType::Apply(Symbol::LIST_LIST, vars)) => { Content::Structure(FlatType::Apply(Symbol::LIST_LIST, vars)) => {
debug_assert_eq!(vars.len(), 1); debug_assert_eq!(vars.len(), 1);
let elem_var = *vars.first().unwrap(); let elem_var_index = vars.into_iter().next().unwrap();
let elem_var = env.subs[elem_var_index];
env.subs.get_without_compacting(elem_var).content env.subs.get_content_without_compacting(elem_var)
} }
other => { other => {
unreachable!( unreachable!(
@ -356,14 +500,14 @@ fn list_to_ast<'a>(
}; };
let arena = env.arena; let arena = env.arena;
let mut output = Vec::with_capacity_in(len, &arena); let mut output = Vec::with_capacity_in(len, arena);
let elem_size = elem_layout.stack_size(env.ptr_bytes) as usize; let elem_size = elem_layout.stack_size(env.ptr_bytes) as usize;
for index in 0..len { for index in 0..len {
let offset_bytes = index * elem_size; let offset_bytes = index * elem_size;
let elem_ptr = unsafe { ptr.add(offset_bytes) }; let elem_ptr = unsafe { ptr.add(offset_bytes) };
let loc_expr = &*arena.alloc(Located { let loc_expr = &*arena.alloc(Located {
value: ptr_to_ast(env, elem_ptr, elem_layout, &elem_content), value: ptr_to_ast(env, elem_ptr, elem_layout, elem_content),
region: Region::zero(), region: Region::zero(),
}); });
@ -382,14 +526,14 @@ fn single_tag_union_to_ast<'a>(
env: &Env<'a, '_>, env: &Env<'a, '_>,
ptr: *const u8, ptr: *const u8,
field_layouts: &'a [Layout<'a>], field_layouts: &'a [Layout<'a>],
tag_name: TagName, tag_name: &TagName,
payload_vars: &[Variable], payload_vars: &[Variable],
) -> Expr<'a> { ) -> Expr<'a> {
debug_assert_eq!(field_layouts.len(), payload_vars.len()); debug_assert_eq!(field_layouts.len(), payload_vars.len());
let arena = env.arena; let arena = env.arena;
let tag_expr = tag_name_to_expr(env, &tag_name); let tag_expr = tag_name_to_expr(env, tag_name);
let loc_tag_expr = &*arena.alloc(Located::at_zero(tag_expr)); let loc_tag_expr = &*arena.alloc(Located::at_zero(tag_expr));
@ -410,14 +554,14 @@ where
{ {
let arena = env.arena; let arena = env.arena;
let subs = env.subs; let subs = env.subs;
let mut output = Vec::with_capacity_in(sequence.len(), &arena); let mut output = Vec::with_capacity_in(sequence.len(), arena);
// We'll advance this as we iterate through the fields // We'll advance this as we iterate through the fields
let mut field_ptr = ptr as *const u8; let mut field_ptr = ptr as *const u8;
for (var, layout) in sequence { for (var, layout) in sequence {
let content = subs.get_without_compacting(var).content; let content = subs.get_content_without_compacting(var);
let expr = ptr_to_ast(env, field_ptr, layout, &content); let expr = ptr_to_ast(env, field_ptr, layout, content);
let loc_expr = Located::at_zero(expr); let loc_expr = Located::at_zero(expr);
output.push(&*arena.alloc(loc_expr)); output.push(&*arena.alloc(loc_expr));
@ -433,31 +577,25 @@ fn struct_to_ast<'a>(
env: &Env<'a, '_>, env: &Env<'a, '_>,
ptr: *const u8, ptr: *const u8,
field_layouts: &'a [Layout<'a>], field_layouts: &'a [Layout<'a>],
fields: &MutMap<Lowercase, RecordField<Variable>>, record_fields: RecordFields,
) -> Expr<'a> { ) -> Expr<'a> {
let arena = env.arena; let arena = env.arena;
let subs = env.subs; let subs = env.subs;
let mut output = Vec::with_capacity_in(field_layouts.len(), &arena); let mut output = Vec::with_capacity_in(field_layouts.len(), arena);
// The fields, sorted alphabetically let sorted_fields: Vec<_> = Vec::from_iter_in(
let mut sorted_fields = { record_fields.sorted_iterator(env.subs, Variable::EMPTY_RECORD),
let mut vec = fields env.arena,
.iter() );
.collect::<std::vec::Vec<(&Lowercase, &RecordField<Variable>)>>();
vec.sort_by(|(label1, _), (label2, _)| label1.cmp(label2));
vec
};
if sorted_fields.len() == 1 { if sorted_fields.len() == 1 {
// this is a 1-field wrapper record around another record or 1-tag tag union // this is a 1-field wrapper record around another record or 1-tag tag union
let (label, field) = sorted_fields.pop().unwrap(); let (label, field) = sorted_fields.into_iter().next().unwrap();
let inner_content = env.subs.get_without_compacting(field.into_inner()).content; let inner_content = env.subs.get_content_without_compacting(field.into_inner());
let loc_expr = &*arena.alloc(Located { let loc_expr = &*arena.alloc(Located {
value: ptr_to_ast(env, ptr, &Layout::Struct(field_layouts), &inner_content), value: ptr_to_ast(env, ptr, &Layout::Struct(field_layouts), inner_content),
region: Region::zero(), region: Region::zero(),
}); });
@ -482,10 +620,12 @@ fn struct_to_ast<'a>(
// We'll advance this as we iterate through the fields // We'll advance this as we iterate through the fields
let mut field_ptr = ptr; let mut field_ptr = ptr;
for ((label, field), field_layout) in sorted_fields.iter().zip(field_layouts.iter()) { for ((label, field), field_layout) in sorted_fields.into_iter().zip(field_layouts.iter()) {
let content = subs.get_without_compacting(*field.as_inner()).content; let var = field.into_inner();
let content = subs.get_content_without_compacting(var);
let loc_expr = &*arena.alloc(Located { let loc_expr = &*arena.alloc(Located {
value: ptr_to_ast(env, field_ptr, field_layout, &content), value: ptr_to_ast(env, field_ptr, field_layout, content),
region: Region::zero(), region: Region::zero(),
}); });
@ -514,6 +654,36 @@ fn struct_to_ast<'a>(
} }
} }
fn unpack_single_element_tag_union(subs: &Subs, tags: UnionTags) -> (&TagName, &[Variable]) {
let (tag_name_index, payload_vars_index) = tags.iter_all().next().unwrap();
let tag_name = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars = subs.get_subs_slice(*subs_slice);
(tag_name, payload_vars)
}
fn unpack_two_element_tag_union(
subs: &Subs,
tags: UnionTags,
) -> (&TagName, &[Variable], &TagName, &[Variable]) {
let mut it = tags.iter_all();
let (tag_name_index, payload_vars_index) = it.next().unwrap();
let tag_name1 = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars1 = subs.get_subs_slice(*subs_slice);
let (tag_name_index, payload_vars_index) = it.next().unwrap();
let tag_name2 = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars2 = subs.get_subs_slice(*subs_slice);
(tag_name1, payload_vars1, tag_name2, payload_vars2)
}
fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a> { fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a> {
use Content::*; use Content::*;
@ -525,7 +695,11 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
FlatType::Record(fields, _) => { FlatType::Record(fields, _) => {
debug_assert_eq!(fields.len(), 1); debug_assert_eq!(fields.len(), 1);
let (label, field) = fields.iter().next().unwrap(); let (label, field) = fields
.sorted_iterator(env.subs, Variable::EMPTY_RECORD)
.next()
.unwrap();
let loc_label = Located { let loc_label = Located {
value: &*arena.alloc_str(label.as_str()), value: &*arena.alloc_str(label.as_str()),
region: Region::zero(), region: Region::zero(),
@ -536,9 +710,9 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
// and/or records (e.g. { a: { b: { c: True } } }), // and/or records (e.g. { a: { b: { c: True } } }),
// so we need to do this recursively on the field type. // so we need to do this recursively on the field type.
let field_var = *field.as_inner(); let field_var = *field.as_inner();
let field_content = env.subs.get_without_compacting(field_var).content; let field_content = env.subs.get_content_without_compacting(field_var);
let loc_expr = Located { let loc_expr = Located {
value: bool_to_ast(env, value, &field_content), value: bool_to_ast(env, value, field_content),
region: Region::zero(), region: Region::zero(),
}; };
@ -556,10 +730,10 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
} }
} }
FlatType::TagUnion(tags, _) if tags.len() == 1 => { FlatType::TagUnion(tags, _) if tags.len() == 1 => {
let (tag_name, payload_vars) = tags.iter().next().unwrap(); let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
let loc_tag_expr = { let loc_tag_expr = {
let tag_name = &tag_name.as_string(env.interns, env.home); let tag_name = &tag_name.as_ident_str(env.interns, env.home);
let tag_expr = if tag_name.starts_with('@') { let tag_expr = if tag_name.starts_with('@') {
Expr::PrivateTag(arena.alloc_str(tag_name)) Expr::PrivateTag(arena.alloc_str(tag_name))
} else { } else {
@ -578,10 +752,10 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
debug_assert_eq!(payload_vars.len(), 1); debug_assert_eq!(payload_vars.len(), 1);
let var = *payload_vars.iter().next().unwrap(); let var = *payload_vars.iter().next().unwrap();
let content = env.subs.get_without_compacting(var).content; let content = env.subs.get_content_without_compacting(var);
let loc_payload = &*arena.alloc(Located { let loc_payload = &*arena.alloc(Located {
value: bool_to_ast(env, value, &content), value: bool_to_ast(env, value, content),
region: Region::zero(), region: Region::zero(),
}); });
@ -591,20 +765,19 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
Expr::Apply(loc_tag_expr, payload, CalledVia::Space) Expr::Apply(loc_tag_expr, payload, CalledVia::Space)
} }
FlatType::TagUnion(tags, _) if tags.len() == 2 => { FlatType::TagUnion(tags, _) if tags.len() == 2 => {
let mut tags_iter = tags.iter(); let (tag_name_1, payload_vars_1, tag_name_2, payload_vars_2) =
let (tag_name_1, payload_vars_1) = tags_iter.next().unwrap(); unpack_two_element_tag_union(env.subs, *tags);
let (tag_name_2, payload_vars_2) = tags_iter.next().unwrap();
debug_assert!(payload_vars_1.is_empty()); debug_assert!(payload_vars_1.is_empty());
debug_assert!(payload_vars_2.is_empty()); debug_assert!(payload_vars_2.is_empty());
let tag_name = if value { let tag_name = if value {
max_by_key(tag_name_1, tag_name_2, |n| { max_by_key(tag_name_1, tag_name_2, |n| {
n.as_string(env.interns, env.home) n.as_ident_str(env.interns, env.home)
}) })
} else { } else {
min_by_key(tag_name_1, tag_name_2, |n| { min_by_key(tag_name_1, tag_name_2, |n| {
n.as_string(env.interns, env.home) n.as_ident_str(env.interns, env.home)
}) })
}; };
@ -616,9 +789,9 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
} }
} }
Alias(_, _, var) => { Alias(_, _, var) => {
let content = env.subs.get_without_compacting(*var).content; let content = env.subs.get_content_without_compacting(*var);
bool_to_ast(env, value, &content) bool_to_ast(env, value, content)
} }
other => { other => {
unreachable!("Unexpected FlatType {:?} in bool_to_ast", other); unreachable!("Unexpected FlatType {:?} in bool_to_ast", other);
@ -637,7 +810,11 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
FlatType::Record(fields, _) => { FlatType::Record(fields, _) => {
debug_assert_eq!(fields.len(), 1); debug_assert_eq!(fields.len(), 1);
let (label, field) = fields.iter().next().unwrap(); let (label, field) = fields
.sorted_iterator(env.subs, Variable::EMPTY_RECORD)
.next()
.unwrap();
let loc_label = Located { let loc_label = Located {
value: &*arena.alloc_str(label.as_str()), value: &*arena.alloc_str(label.as_str()),
region: Region::zero(), region: Region::zero(),
@ -648,9 +825,9 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
// and/or records (e.g. { a: { b: { c: True } } }), // and/or records (e.g. { a: { b: { c: True } } }),
// so we need to do this recursively on the field type. // so we need to do this recursively on the field type.
let field_var = *field.as_inner(); let field_var = *field.as_inner();
let field_content = env.subs.get_without_compacting(field_var).content; let field_content = env.subs.get_content_without_compacting(field_var);
let loc_expr = Located { let loc_expr = Located {
value: byte_to_ast(env, value, &field_content), value: byte_to_ast(env, value, field_content),
region: Region::zero(), region: Region::zero(),
}; };
@ -668,10 +845,10 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
} }
} }
FlatType::TagUnion(tags, _) if tags.len() == 1 => { FlatType::TagUnion(tags, _) if tags.len() == 1 => {
let (tag_name, payload_vars) = tags.iter().next().unwrap(); let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
let loc_tag_expr = { let loc_tag_expr = {
let tag_name = &tag_name.as_string(env.interns, env.home); let tag_name = &tag_name.as_ident_str(env.interns, env.home);
let tag_expr = if tag_name.starts_with('@') { let tag_expr = if tag_name.starts_with('@') {
Expr::PrivateTag(arena.alloc_str(tag_name)) Expr::PrivateTag(arena.alloc_str(tag_name))
} else { } else {
@ -690,10 +867,10 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
debug_assert_eq!(payload_vars.len(), 1); debug_assert_eq!(payload_vars.len(), 1);
let var = *payload_vars.iter().next().unwrap(); let var = *payload_vars.iter().next().unwrap();
let content = env.subs.get_without_compacting(var).content; let content = env.subs.get_content_without_compacting(var);
let loc_payload = &*arena.alloc(Located { let loc_payload = &*arena.alloc(Located {
value: byte_to_ast(env, value, &content), value: byte_to_ast(env, value, content),
region: Region::zero(), region: Region::zero(),
}); });
@ -706,8 +883,10 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
// anything with fewer tags is not a byte // anything with fewer tags is not a byte
debug_assert!(tags.len() > 2); debug_assert!(tags.len() > 2);
let tags_vec: std::vec::Vec<(TagName, std::vec::Vec<Variable>)> = let tags_vec: std::vec::Vec<(TagName, std::vec::Vec<Variable>)> = tags
tags.iter().map(|(a, b)| (a.clone(), b.clone())).collect(); .unsorted_iterator(env.subs, Variable::EMPTY_TAG_UNION)
.map(|(a, b)| (a.clone(), b.to_vec()))
.collect();
let union_variant = union_sorted_tags_help(env.arena, tags_vec, None, env.subs); let union_variant = union_sorted_tags_help(env.arena, tags_vec, None, env.subs);
@ -727,9 +906,9 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
} }
} }
Alias(_, _, var) => { Alias(_, _, var) => {
let content = env.subs.get_without_compacting(*var).content; let content = env.subs.get_content_without_compacting(*var);
byte_to_ast(env, value, &content) byte_to_ast(env, value, content)
} }
other => { other => {
unreachable!("Unexpected FlatType {:?} in bool_to_ast", other); unreachable!("Unexpected FlatType {:?} in bool_to_ast", other);
@ -753,7 +932,11 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
// Its type signature will tell us that. // Its type signature will tell us that.
debug_assert_eq!(fields.len(), 1); debug_assert_eq!(fields.len(), 1);
let (label, field) = fields.iter().next().unwrap(); let (label, field) = fields
.sorted_iterator(env.subs, Variable::EMPTY_RECORD)
.next()
.unwrap();
let loc_label = Located { let loc_label = Located {
value: &*arena.alloc_str(label.as_str()), value: &*arena.alloc_str(label.as_str()),
region: Region::zero(), region: Region::zero(),
@ -764,9 +947,9 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
// and/or records (e.g. { a: { b: { c: 5 } } }), // and/or records (e.g. { a: { b: { c: 5 } } }),
// so we need to do this recursively on the field type. // so we need to do this recursively on the field type.
let field_var = *field.as_inner(); let field_var = *field.as_inner();
let field_content = env.subs.get_without_compacting(field_var).content; let field_content = env.subs.get_content_without_compacting(field_var);
let loc_expr = Located { let loc_expr = Located {
value: num_to_ast(env, num_expr, &field_content), value: num_to_ast(env, num_expr, field_content),
region: Region::zero(), region: Region::zero(),
}; };
@ -786,7 +969,7 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
// This was a single-tag union that got unwrapped at runtime. // This was a single-tag union that got unwrapped at runtime.
debug_assert_eq!(tags.len(), 1); debug_assert_eq!(tags.len(), 1);
let (tag_name, payload_vars) = tags.iter().next().unwrap(); let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
// If this tag union represents a number, skip right to // If this tag union represents a number, skip right to
// returning tis as an Expr::Num // returning tis as an Expr::Num
@ -795,7 +978,7 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
} }
let loc_tag_expr = { let loc_tag_expr = {
let tag_name = &tag_name.as_string(env.interns, env.home); let tag_name = &tag_name.as_ident_str(env.interns, env.home);
let tag_expr = if tag_name.starts_with('@') { let tag_expr = if tag_name.starts_with('@') {
Expr::PrivateTag(arena.alloc_str(tag_name)) Expr::PrivateTag(arena.alloc_str(tag_name))
} else { } else {
@ -814,10 +997,10 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
debug_assert_eq!(payload_vars.len(), 1); debug_assert_eq!(payload_vars.len(), 1);
let var = *payload_vars.iter().next().unwrap(); let var = *payload_vars.iter().next().unwrap();
let content = env.subs.get_without_compacting(var).content; let content = env.subs.get_content_without_compacting(var);
let loc_payload = &*arena.alloc(Located { let loc_payload = &*arena.alloc(Located {
value: num_to_ast(env, num_expr, &content), value: num_to_ast(env, num_expr, content),
region: Region::zero(), region: Region::zero(),
}); });
@ -832,9 +1015,9 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
} }
} }
Alias(_, _, var) => { Alias(_, _, var) => {
let content = env.subs.get_without_compacting(*var).content; let content = env.subs.get_content_without_compacting(*var);
num_to_ast(env, num_expr, &content) num_to_ast(env, num_expr, content)
} }
other => { other => {
panic!("Unexpected FlatType {:?} in num_to_ast", other); panic!("Unexpected FlatType {:?} in num_to_ast", other);
@ -844,25 +1027,7 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
/// This is centralized in case we want to format it differently later, /// This is centralized in case we want to format it differently later,
/// e.g. adding underscores for large numbers /// e.g. adding underscores for large numbers
fn nat_to_ast(arena: &Bump, num: usize) -> Expr<'_> { fn number_literal_to_ast<T: std::fmt::Display>(arena: &Bump, num: T) -> Expr<'_> {
Expr::Num(arena.alloc(format!("{}", num)))
}
/// This is centralized in case we want to format it differently later,
/// e.g. adding underscores for large numbers
fn i64_to_ast(arena: &Bump, num: i64) -> Expr<'_> {
Expr::Num(arena.alloc(format!("{}", num)))
}
/// This is centralized in case we want to format it differently later,
/// e.g. adding underscores for large numbers
fn i128_to_ast(arena: &Bump, num: i128) -> Expr<'_> {
Expr::Num(arena.alloc(format!("{}", num)))
}
/// This is centralized in case we want to format it differently later,
/// e.g. adding underscores for large numbers
fn f64_to_ast(arena: &Bump, num: f64) -> Expr<'_> {
Expr::Num(arena.alloc(format!("{}", num))) Expr::Num(arena.alloc(format!("{}", num)))
} }

View file

@ -1,14 +1,16 @@
use crate::repl::eval; use crate::repl::eval;
use bumpalo::Bump; use bumpalo::Bump;
use inkwell::context::Context; use inkwell::context::Context;
use inkwell::module::Linkage;
use roc_build::link::module_to_dylib; use roc_build::link::module_to_dylib;
use roc_build::program::FunctionIterator; use roc_build::program::FunctionIterator;
use roc_can::builtins::builtin_defs_map; use roc_can::builtins::builtin_defs_map;
use roc_collections::all::{MutMap, MutSet}; use roc_collections::all::{MutMap, MutSet};
use roc_fmt::annotation::Formattable; use roc_fmt::annotation::Formattable;
use roc_fmt::annotation::{Newlines, Parens}; use roc_fmt::annotation::{Newlines, Parens};
use roc_gen::llvm::build::{build_proc, build_proc_header, OptLevel}; use roc_gen_llvm::llvm::externs::add_default_roc_externs;
use roc_load::file::LoadingProblem; use roc_load::file::LoadingProblem;
use roc_mono::ir::OptLevel;
use roc_parse::parser::SyntaxError; use roc_parse::parser::SyntaxError;
use roc_types::pretty_print::{content_to_string, name_all_type_vars}; use roc_types::pretty_print::{content_to_string, name_all_type_vars};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -66,7 +68,8 @@ pub fn gen_and_eval<'a>(
use roc_load::file::MonomorphizedModule; use roc_load::file::MonomorphizedModule;
let MonomorphizedModule { let MonomorphizedModule {
mut procedures, procedures,
entry_point,
interns, interns,
exposed_to_host, exposed_to_host,
mut subs, mut subs,
@ -126,14 +129,17 @@ pub fn gen_and_eval<'a>(
Ok(ReplOutput::Problems(lines)) Ok(ReplOutput::Problems(lines))
} else { } else {
let context = Context::create(); let context = Context::create();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let module = arena.alloc(roc_gen::llvm::build::module_from_builtins(&context, ""));
let builder = context.create_builder(); let builder = context.create_builder();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let module = arena.alloc(roc_gen_llvm::llvm::build::module_from_builtins(
&context, "",
));
// Add roc_alloc, roc_realloc, and roc_dealloc, since the repl has no
// platform to provide them.
add_default_roc_externs(&context, module, &builder, ptr_bytes);
// mark our zig-defined builtins as internal // mark our zig-defined builtins as internal
use inkwell::module::Linkage;
for function in FunctionIterator::from_module(module) { for function in FunctionIterator::from_module(module) {
let name = function.get_name().to_str().unwrap(); let name = function.get_name().to_str().unwrap();
if name.starts_with("roc_builtins") { if name.starts_with("roc_builtins") {
@ -148,8 +154,8 @@ pub fn gen_and_eval<'a>(
// pretty-print the expr type string for later. // pretty-print the expr type string for later.
name_all_type_vars(main_fn_var, &mut subs); name_all_type_vars(main_fn_var, &mut subs);
let content = subs.get(main_fn_var).content; let content = subs.get_content_without_compacting(main_fn_var);
let expr_type_str = content_to_string(content.clone(), &subs, home, &interns); let expr_type_str = content_to_string(content, &subs, home, &interns);
let (_, main_fn_layout) = match procedures.keys().find(|(s, _)| *s == main_fn_symbol) { let (_, main_fn_layout) = match procedures.keys().find(|(s, _)| *s == main_fn_symbol) {
Some(layout) => *layout, Some(layout) => *layout,
@ -163,12 +169,12 @@ pub fn gen_and_eval<'a>(
let module = arena.alloc(module); let module = arena.alloc(module);
let (module_pass, function_pass) = let (module_pass, function_pass) =
roc_gen::llvm::build::construct_optimization_passes(module, opt_level); roc_gen_llvm::llvm::build::construct_optimization_passes(module, opt_level);
let (dibuilder, compile_unit) = roc_gen::llvm::build::Env::new_debug_info(module); let (dibuilder, compile_unit) = roc_gen_llvm::llvm::build::Env::new_debug_info(module);
// Compile and add all the Procs before adding main // Compile and add all the Procs before adding main
let env = roc_gen::llvm::build::Env { let env = roc_gen_llvm::llvm::build::Env {
arena: &arena, arena: &arena,
builder: &builder, builder: &builder,
dibuilder: &dibuilder, dibuilder: &dibuilder,
@ -182,65 +188,11 @@ pub fn gen_and_eval<'a>(
exposed_to_host: MutSet::default(), exposed_to_host: MutSet::default(),
}; };
let mut layout_ids = roc_mono::layout::LayoutIds::default(); let (main_fn_name, main_fn) = roc_gen_llvm::llvm::build::build_procedures_return_main(
let mut headers = Vec::with_capacity(procedures.len());
// Add all the Proc headers to the module.
// We have to do this in a separate pass first,
// because their bodies may reference each other.
let mut scope = roc_gen::llvm::build::Scope::default();
for ((symbol, layout), proc) in procedures.drain() {
let fn_val = build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc);
if proc.args.is_empty() {
// this is a 0-argument thunk, i.e. a top-level constant definition
// it must be in-scope everywhere in the module!
scope.insert_top_level_thunk(symbol, layout, fn_val);
}
headers.push((proc, fn_val));
}
// Build each proc using its header info.
for (proc, fn_val) in headers {
let mut current_scope = scope.clone();
// only have top-level thunks for this proc's module in scope
// this retain is not needed for correctness, but will cause less confusion when debugging
let home = proc.name.module_id();
current_scope.retain_top_level_thunks_for_module(home);
build_proc(&env, &mut layout_ids, scope.clone(), proc, fn_val);
// call finalize() before any code generation/verification
env.dibuilder.finalize();
if fn_val.verify(true) {
function_pass.run_on(&fn_val);
} else {
let mode = "NON-OPTIMIZED";
eprintln!(
"\n\nFunction {:?} failed LLVM verification in {} build. Its content was:\n",
fn_val.get_name().to_str().unwrap(),
mode,
);
fn_val.print_to_stderr();
panic!(
"The preceding code was from {:?}, which failed LLVM verification in {} build.",
fn_val.get_name().to_str().unwrap(),
mode,
);
}
}
let (main_fn_name, main_fn) = roc_gen::llvm::build::promote_to_main_function(
&env, &env,
&mut layout_ids, opt_level,
main_fn_symbol, procedures,
&main_fn_layout, entry_point,
); );
env.dibuilder.finalize(); env.dibuilder.finalize();
@ -256,23 +208,26 @@ pub fn gen_and_eval<'a>(
module_pass.run_on(env.module); module_pass.run_on(env.module);
// Verify the module
if let Err(errors) = env.module.verify() {
panic!("Errors defining module: {:?}", errors);
}
// Uncomment this to see the module's optimized LLVM instruction output: // Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr(); // env.module.print_to_stderr();
let lib = module_to_dylib(&env.module, &target, opt_level) // Verify the module
if let Err(errors) = env.module.verify() {
panic!(
"Errors defining module: {}\n\nUncomment things nearby to see more details.",
errors
);
}
let lib = module_to_dylib(env.module, &target, opt_level)
.expect("Error loading compiled dylib for test"); .expect("Error loading compiled dylib for test");
let res_answer = unsafe { let res_answer = unsafe {
eval::jit_to_ast( eval::jit_to_ast(
&arena, &arena,
lib, lib,
main_fn_name, main_fn_name,
&main_fn_layout, main_fn_layout,
&content, content,
&env.interns, &env.interns,
home, home,
&subs, &subs,

View file

@ -1,23 +1,23 @@
// #[macro_use] #[macro_use]
extern crate pretty_assertions; extern crate pretty_assertions;
extern crate bumpalo; extern crate bumpalo;
extern crate inlinable_string;
extern crate roc_collections; extern crate roc_collections;
extern crate roc_load; extern crate roc_load;
extern crate roc_module; extern crate roc_module;
mod helpers;
#[cfg(test)] #[cfg(test)]
mod cli_run { mod cli_run {
use crate::helpers::{ use cli_utils::helpers::{
example_file, extract_valgrind_errors, fixture_file, run_cmd, run_roc, run_with_valgrind, example_file, examples_dir, extract_valgrind_errors, fixture_file, run_cmd, run_roc,
ValgrindError, ValgrindErrorXWhat, run_with_valgrind, ValgrindError, ValgrindErrorXWhat,
}; };
use serial_test::serial; use serial_test::serial;
use std::path::Path; use std::path::Path;
#[cfg(not(debug_assertions))]
use roc_collections::all::MutMap;
#[cfg(not(target_os = "macos"))] #[cfg(not(target_os = "macos"))]
const ALLOW_VALGRIND: bool = true; const ALLOW_VALGRIND: bool = true;
@ -27,26 +27,18 @@ mod cli_run {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
const ALLOW_VALGRIND: bool = false; const ALLOW_VALGRIND: bool = false;
fn check_output( #[derive(Debug, PartialEq, Eq)]
file: &Path, struct Example<'a> {
executable_filename: &str, filename: &'a str,
flags: &[&str], executable_filename: &'a str,
expected_ending: &str, stdin: &'a [&'a str],
expected_ending: &'a str,
use_valgrind: bool, use_valgrind: bool,
) {
check_output_with_stdin(
file,
"",
executable_filename,
flags,
expected_ending,
use_valgrind,
)
} }
fn check_output_with_stdin( fn check_output_with_stdin(
file: &Path, file: &Path,
stdin_str: &str, stdin: &[&str],
executable_filename: &str, executable_filename: &str,
flags: &[&str], flags: &[&str],
expected_ending: &str, expected_ending: &str,
@ -61,7 +53,7 @@ mod cli_run {
let out = if use_valgrind && ALLOW_VALGRIND { let out = if use_valgrind && ALLOW_VALGRIND {
let (valgrind_out, raw_xml) = run_with_valgrind( let (valgrind_out, raw_xml) = run_with_valgrind(
stdin_str, stdin,
&[file.with_file_name(executable_filename).to_str().unwrap()], &[file.with_file_name(executable_filename).to_str().unwrap()],
); );
@ -103,7 +95,7 @@ mod cli_run {
} else { } else {
run_cmd( run_cmd(
file.with_file_name(executable_filename).to_str().unwrap(), file.with_file_name(executable_filename).to_str().unwrap(),
stdin_str, stdin,
&[], &[],
) )
}; };
@ -116,179 +108,344 @@ mod cli_run {
assert!(out.status.success()); assert!(out.status.success());
} }
#[test] /// This macro does two things.
#[serial(hello_world)] ///
fn run_hello_world() { /// First, it generates and runs a separate test for each of the given
check_output( /// Example expressions. Each of these should test a particular .roc file
&example_file("hello-world", "Hello.roc"), /// in the examples/ directory.
"hello-world", ///
&[], /// Second, it generates an extra test which (non-recursively) traverses the
"Hello, World!!!!!!!!!!!!!\n", /// examples/ directory and verifies that each of the .roc files in there
true, /// has had a corresponding test generated in the previous step. This test
); /// will fail if we ever add a new .roc file to examples/ and forget to
/// add a test for it here!
macro_rules! examples {
($($test_name:ident:$name:expr => $example:expr,)+) => {
$(
#[test]
fn $test_name() {
let dir_name = $name;
let example = $example;
let file_name = example_file(dir_name, example.filename);
// Check with and without optimizations
check_output_with_stdin(
&file_name,
example.stdin,
example.executable_filename,
&[],
example.expected_ending,
example.use_valgrind,
);
check_output_with_stdin(
&file_name,
example.stdin,
example.executable_filename,
&["--optimize"],
example.expected_ending,
example.use_valgrind,
);
}
)*
#[test]
#[cfg(not(debug_assertions))]
fn all_examples_have_tests() {
let mut all_examples: MutMap<&str, Example<'_>> = MutMap::default();
$(
all_examples.insert($name, $example);
)*
check_for_tests("../examples", &mut all_examples);
}
}
} }
#[test] // examples! macro format:
#[serial(hello_world)] //
fn run_hello_world_optimized() { // "name-of-subdirectory-inside-examples-dir" => [
check_output( // test_name_1: Example {
&example_file("hello-world", "Hello.roc"), // ...
"hello-world", // },
&[], // test_name_2: Example {
"Hello, World!!!!!!!!!!!!!\n", // ...
true, // },
); // ]
examples! {
hello_world:"hello-world" => Example {
filename: "Hello.roc",
executable_filename: "hello-world",
stdin: &[],
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
hello_zig:"hello-zig" => Example {
filename: "Hello.roc",
executable_filename: "hello-world",
stdin: &[],
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
hello_rust:"hello-rust" => Example {
filename: "Hello.roc",
executable_filename: "hello-world",
stdin: &[],
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
quicksort:"quicksort" => Example {
filename: "Quicksort.roc",
executable_filename: "quicksort",
stdin: &[],
expected_ending: "[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n",
use_valgrind: true,
},
// shared_quicksort:"shared-quicksort" => Example {
// filename: "Quicksort.roc",
// executable_filename: "quicksort",
// stdin: &[],
// expected_ending: "[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n",
// use_valgrind: true,
// },
effect:"effect" => Example {
filename: "Main.roc",
executable_filename: "effect-example",
stdin: &["hi there!"],
expected_ending: "hi there!\n",
use_valgrind: true,
},
// tea:"tea" => Example {
// filename: "Main.roc",
// executable_filename: "tea-example",
// stdin: &[],
// expected_ending: "",
// use_valgrind: true,
// },
// cli:"cli" => Example {
// filename: "Echo.roc",
// executable_filename: "echo",
// stdin: &["Giovanni\n", "Giorgio\n"],
// expected_ending: "Giovanni Giorgio!\n",
// use_valgrind: true,
// },
// custom_malloc:"custom-malloc" => Example {
// filename: "Main.roc",
// executable_filename: "custom-malloc-example",
// stdin: &[],
// expected_ending: "ms!\nThe list was small!\n",
// use_valgrind: true,
// },
// task:"task" => Example {
// filename: "Main.roc",
// executable_filename: "task-example",
// stdin: &[],
// expected_ending: "successfully wrote to file\n",
// use_valgrind: true,
// },
} }
#[test] macro_rules! benchmarks {
#[serial(quicksort)] ($($test_name:ident => $benchmark:expr,)+) => {
fn run_quicksort_not_optimized() { $(
check_output( #[test]
&example_file("quicksort", "Quicksort.roc"), #[cfg_attr(not(debug_assertions), serial(benchmark))]
"quicksort", fn $test_name() {
&[], let benchmark = $benchmark;
"[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n", let file_name = examples_dir("benchmarks").join(benchmark.filename);
true,
); // TODO fix QuicksortApp and RBTreeCk and then remove this!
match benchmark.filename {
"QuicksortApp.roc" | "RBTreeCk.roc" => {
eprintln!("WARNING: skipping testing benchmark {} because the test is broken right now!", benchmark.filename);
return;
}
_ => {}
}
// Check with and without optimizations
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&[],
benchmark.expected_ending,
benchmark.use_valgrind,
);
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&["--optimize"],
benchmark.expected_ending,
benchmark.use_valgrind,
);
}
)*
#[test]
#[cfg(not(debug_assertions))]
fn all_benchmarks_have_tests() {
let mut all_benchmarks: MutMap<&str, Example<'_>> = MutMap::default();
$(
let benchmark = $benchmark;
all_benchmarks.insert(benchmark.filename, benchmark);
)*
check_for_benchmarks("../examples/benchmarks", &mut all_benchmarks);
}
}
} }
#[test] benchmarks! {
#[serial(quicksort)] nqueens => Example {
fn run_quicksort_optimized() { filename: "NQueens.roc",
check_output( executable_filename: "nqueens",
&example_file("quicksort", "Quicksort.roc"), stdin: &["6"],
"quicksort", expected_ending: "4\n",
&["--optimize"], use_valgrind: true,
"[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n", },
true, cfold => Example {
); filename: "CFold.roc",
executable_filename: "cfold",
stdin: &["3"],
expected_ending: "11 & 11\n",
use_valgrind: true,
},
deriv => Example {
filename: "Deriv.roc",
executable_filename: "deriv",
stdin: &["2"],
expected_ending: "1 count: 6\n2 count: 22\n",
use_valgrind: true,
},
rbtree_ck => Example {
filename: "RBTreeCk.roc",
executable_filename: "rbtree-ck",
stdin: &[],
expected_ending: "Node Black 0 {} Empty Empty\n",
use_valgrind: true,
},
rbtree_insert => Example {
filename: "RBTreeInsert.roc",
executable_filename: "rbtree-insert",
stdin: &[],
expected_ending: "Node Black 0 {} Empty Empty\n",
use_valgrind: true,
},
rbtree_del => Example {
filename: "RBTreeDel.roc",
executable_filename: "rbtree-del",
stdin: &["420"],
expected_ending: "30\n",
use_valgrind: true,
},
astar => Example {
filename: "TestAStar.roc",
executable_filename: "test-astar",
stdin: &[],
expected_ending: "True\n",
use_valgrind: false,
},
base64 => Example {
filename: "TestBase64.roc",
executable_filename: "test-base64",
stdin: &[],
expected_ending: "encoded: SGVsbG8gV29ybGQ=\ndecoded: Hello World\n",
use_valgrind: true,
},
closure => Example {
filename: "Closure.roc",
executable_filename: "closure",
stdin: &[],
expected_ending: "",
use_valgrind: true,
},
quicksort_app => Example {
filename: "QuicksortApp.roc",
executable_filename: "quicksortapp",
stdin: &[],
expected_ending: "todo put the correct quicksort answer here",
use_valgrind: true,
},
} }
#[test] #[cfg(not(debug_assertions))]
#[serial(quicksort)] fn check_for_tests(examples_dir: &str, all_examples: &mut MutMap<&str, Example<'_>>) {
fn run_quicksort_optimized_valgrind() { let entries = std::fs::read_dir(examples_dir).unwrap_or_else(|err| {
check_output( panic!(
&example_file("quicksort", "Quicksort.roc"), "Error trying to read {} as an examples directory: {}",
"quicksort", examples_dir, err
&["--optimize"], );
"[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n", });
true,
); for entry in entries {
let entry = entry.unwrap();
if entry.file_type().unwrap().is_dir() {
let example_dir_name = entry.file_name().into_string().unwrap();
// We test benchmarks separately
if example_dir_name != "benchmarks" {
all_examples.remove(example_dir_name.as_str()).unwrap_or_else(|| {
panic!("The example directory {}/{} does not have any corresponding tests in cli_run. Please add one, so if it ever stops working, we'll know about it right away!", examples_dir, example_dir_name);
});
}
}
}
assert_eq!(all_examples, &mut MutMap::default());
} }
#[test] #[cfg(not(debug_assertions))]
#[serial(nqueens)] fn check_for_benchmarks(benchmarks_dir: &str, all_benchmarks: &mut MutMap<&str, Example<'_>>) {
fn run_nqueens_not_optimized() { use std::ffi::OsStr;
check_output_with_stdin( use std::fs::File;
&example_file("benchmarks", "NQueens.roc"), use std::io::Read;
"",
"nqueens",
&[],
"4\n",
true,
);
}
#[test] let entries = std::fs::read_dir(benchmarks_dir).unwrap_or_else(|err| {
#[serial(cfold)] panic!(
fn run_cfold_not_optimized() { "Error trying to read {} as a benchmark directory: {}",
check_output( benchmarks_dir, err
&example_file("benchmarks", "CFold.roc"), );
"cfold", });
&[],
"11 & 11\n",
true,
);
}
#[test] for entry in entries {
#[serial(deriv)] let entry = entry.unwrap();
fn run_deriv_not_optimized() { let path = entry.path();
check_output(
&example_file("benchmarks", "Deriv.roc"),
"deriv",
&[],
"1 count: 6\n2 count: 22\n",
true,
);
}
#[test] if let Some("roc") = path.extension().and_then(OsStr::to_str) {
#[serial(deriv)] let benchmark_file_name = entry.file_name().into_string().unwrap();
fn run_rbtree_insert_not_optimized() {
check_output(
&example_file("benchmarks", "RBTreeInsert.roc"),
"rbtree-insert",
&[],
"Node Black 0 {} Empty Empty\n",
true,
);
}
#[test] // Verify that this is an app module by reading the first 3
#[serial(deriv)] // bytes of the file.
fn run_rbtree_delete_not_optimized() { let buf: &mut [u8] = &mut [0, 0, 0];
check_output( let mut file = File::open(path).unwrap();
&example_file("benchmarks", "RBTreeDel.roc"),
"rbtree-del",
&[],
"30\n",
true,
);
}
#[test] file.read_exact(buf).unwrap();
#[serial(astar)]
fn run_astar_optimized_1() {
check_output(
&example_file("benchmarks", "TestAStar.roc"),
"test-astar",
&[],
"True\n",
false,
);
}
#[test] // Only app modules in this directory are considered benchmarks.
#[serial(base64)] if "app".as_bytes() == buf {
fn base64() { all_benchmarks.remove(benchmark_file_name.as_str()).unwrap_or_else(|| {
check_output( panic!("The benchmark {}/{} does not have any corresponding tests in cli_run. Please add one, so if it ever stops working, we'll know about it right away!", benchmarks_dir, benchmark_file_name);
&example_file("benchmarks", "TestBase64.roc"), });
"test-base64", }
&[], }
"encoded: SGVsbG8gV29ybGQ=\ndecoded: Hello World\n", }
true,
);
}
#[test] assert_eq!(all_benchmarks, &mut MutMap::default());
#[serial(closure)]
fn closure() {
check_output(
&example_file("benchmarks", "Closure.roc"),
"closure",
&[],
"",
true,
);
} }
// #[test]
// #[serial(effect)]
// fn run_effect_unoptimized() {
// check_output(
// &example_file("effect", "Main.roc"),
// &[],
// "I am Dep2.str2\n",
// true,
// );
// }
#[test] #[test]
#[serial(multi_dep_str)] #[serial(multi_dep_str)]
fn run_multi_dep_str_unoptimized() { fn run_multi_dep_str_unoptimized() {
check_output( check_output_with_stdin(
&fixture_file("multi-dep-str", "Main.roc"), &fixture_file("multi-dep-str", "Main.roc"),
&[],
"multi-dep-str", "multi-dep-str",
&[], &[],
"I am Dep2.str2\n", "I am Dep2.str2\n",
@ -299,8 +456,9 @@ mod cli_run {
#[test] #[test]
#[serial(multi_dep_str)] #[serial(multi_dep_str)]
fn run_multi_dep_str_optimized() { fn run_multi_dep_str_optimized() {
check_output( check_output_with_stdin(
&fixture_file("multi-dep-str", "Main.roc"), &fixture_file("multi-dep-str", "Main.roc"),
&[],
"multi-dep-str", "multi-dep-str",
&["--optimize"], &["--optimize"],
"I am Dep2.str2\n", "I am Dep2.str2\n",
@ -311,8 +469,9 @@ mod cli_run {
#[test] #[test]
#[serial(multi_dep_thunk)] #[serial(multi_dep_thunk)]
fn run_multi_dep_thunk_unoptimized() { fn run_multi_dep_thunk_unoptimized() {
check_output( check_output_with_stdin(
&fixture_file("multi-dep-thunk", "Main.roc"), &fixture_file("multi-dep-thunk", "Main.roc"),
&[],
"multi-dep-thunk", "multi-dep-thunk",
&[], &[],
"I am Dep2.value2\n", "I am Dep2.value2\n",
@ -323,8 +482,9 @@ mod cli_run {
#[test] #[test]
#[serial(multi_dep_thunk)] #[serial(multi_dep_thunk)]
fn run_multi_dep_thunk_optimized() { fn run_multi_dep_thunk_optimized() {
check_output( check_output_with_stdin(
&fixture_file("multi-dep-thunk", "Main.roc"), &fixture_file("multi-dep-thunk", "Main.roc"),
&[],
"multi-dep-thunk", "multi-dep-thunk",
&["--optimize"], &["--optimize"],
"I am Dep2.value2\n", "I am Dep2.value2\n",

View file

@ -14,7 +14,6 @@ comptime {
// -fcompiler-rt in link.rs instead of doing this. Note that this // -fcompiler-rt in link.rs instead of doing this. Note that this
// workaround is present in many host.zig files, so make sure to undo // workaround is present in many host.zig files, so make sure to undo
// it everywhere! // it everywhere!
if (std.builtin.os.tag == .macos) { if (std.builtin.os.tag == .macos) {
_ = @import("compiler_rt"); _ = @import("compiler_rt");
} }
@ -25,6 +24,22 @@ const Allocator = mem.Allocator;
extern fn roc__mainForHost_1_exposed(*RocCallResult) void; extern fn roc__mainForHost_1_exposed(*RocCallResult) void;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const RocCallResult = extern struct { flag: usize, content: RocStr }; const RocCallResult = extern struct { flag: usize, content: RocStr };
const Unit = extern struct {}; const Unit = extern struct {};
@ -44,9 +59,9 @@ pub export fn main() i32 {
roc__mainForHost_1_exposed(&callresult); roc__mainForHost_1_exposed(&callresult);
// stdout the result // stdout the result
stdout.print("{}\n", .{callresult.content.asSlice()}) catch unreachable; stdout.print("{s}\n", .{callresult.content.asSlice()}) catch unreachable;
callresult.content.deinit(std.heap.c_allocator); callresult.content.deinit();
// end time // end time
var ts2: std.os.timespec = undefined; var ts2: std.os.timespec = undefined;

View file

@ -24,6 +24,22 @@ const Allocator = mem.Allocator;
extern fn roc__mainForHost_1_exposed(*RocCallResult) void; extern fn roc__mainForHost_1_exposed(*RocCallResult) void;
extern fn malloc(size: usize) callconv(.C) ?*c_void;
extern fn realloc(c_ptr: [*]align(@alignOf(u128)) u8, size: usize) callconv(.C) ?*c_void;
extern fn free(c_ptr: [*]align(@alignOf(u128)) u8) callconv(.C) void;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
return malloc(size);
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
return realloc(@alignCast(16, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(16, @ptrCast([*]u8, c_ptr)));
}
const RocCallResult = extern struct { flag: usize, content: RocStr }; const RocCallResult = extern struct { flag: usize, content: RocStr };
const Unit = extern struct {}; const Unit = extern struct {};
@ -43,9 +59,9 @@ pub export fn main() i32 {
roc__mainForHost_1_exposed(&callresult); roc__mainForHost_1_exposed(&callresult);
// stdout the result // stdout the result
stdout.print("{}\n", .{callresult.content.asSlice()}) catch unreachable; stdout.print("{s}\n", .{callresult.content.asSlice()}) catch unreachable;
callresult.content.deinit(std.heap.c_allocator); callresult.content.deinit();
// end time // end time
var ts2: std.os.timespec = undefined; var ts2: std.os.timespec = undefined;

View file

@ -4,12 +4,10 @@ extern crate pretty_assertions;
#[macro_use] #[macro_use]
extern crate indoc; extern crate indoc;
mod helpers;
#[cfg(test)] #[cfg(test)]
mod repl_eval { mod repl_eval {
use crate::helpers; use cli_utils::helpers;
use roc_gen::run_roc::RocCallResult; use roc_gen_llvm::run_roc::RocCallResult;
#[test] #[test]
fn check_discriminant_size() { fn check_discriminant_size() {
@ -18,7 +16,8 @@ mod repl_eval {
let value: i64 = 1234; let value: i64 = 1234;
assert_eq!( assert_eq!(
std::mem::size_of_val(&RocCallResult::Success(value)), std::mem::size_of_val(&RocCallResult::Success(value)),
roc_gen::run_roc::ROC_CALL_RESULT_DISCRIMINANT_SIZE + std::mem::size_of_val(&value) roc_gen_llvm::run_roc::ROC_CALL_RESULT_DISCRIMINANT_SIZE
+ std::mem::size_of_val(&value)
) )
} }
@ -500,15 +499,11 @@ mod repl_eval {
#[test] #[test]
fn identity_lambda() { fn identity_lambda() {
// Even though this gets unwrapped at runtime, the repl should still
// report it as a record
expect_success("\\x -> x", "<function> : a -> a"); expect_success("\\x -> x", "<function> : a -> a");
} }
#[test] #[test]
fn stdlib_function() { fn stdlib_function() {
// Even though this gets unwrapped at runtime, the repl should still
// report it as a record
expect_success("Num.abs", "<function> : Num a -> Num a"); expect_success("Num.abs", "<function> : Num a -> Num a");
} }

View file

@ -19,7 +19,7 @@ roc_unify = { path = "../unify" }
roc_solve = { path = "../solve" } roc_solve = { path = "../solve" }
roc_mono = { path = "../mono" } roc_mono = { path = "../mono" }
roc_load = { path = "../load" } roc_load = { path = "../load" }
roc_gen = { path = "../gen" } roc_gen_llvm = { path = "../gen_llvm", optional = true }
roc_reporting = { path = "../reporting" } roc_reporting = { path = "../reporting" }
im = "14" # im and im-rc should always have the same version! im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "14" # im and im-rc should always have the same version!
@ -28,24 +28,7 @@ inlinable_string = "0.1.0"
libloading = "0.6" libloading = "0.6"
tempfile = "3.1.0" tempfile = "3.1.0"
serde_json = "1.0" serde_json = "1.0"
# NOTE: rtfeldman/inkwell is a fork of TheDan64/inkwell which does not change anything. inkwell = { path = "../../vendor/inkwell", optional = true }
#
# The reason for this fork is that the way Inkwell is designed, you have to use
# a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
# breaking changes get pushed directly to that branch, which breaks our build
# without warning.
#
# We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
# but although that worked locally, it did not work on GitHub Actions. (After a few
# hours of investigation, gave up trying to figure out why.) So this is the workaround:
# having an immutable tag on the rtfeldman/inkwell fork which points to
# a particular "release" of Inkwell.
#
# When we want to update Inkwell, we can sync up rtfeldman/inkwell to the latest
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
# This way, GitHub Actions works and nobody's builds get broken.
inkwell = { git = "https://github.com/rtfeldman/inkwell", tag = "llvm10-0.release4", features = [ "llvm10-0" ] }
target-lexicon = "0.10" target-lexicon = "0.10"
[dev-dependencies] [dev-dependencies]
@ -56,6 +39,10 @@ quickcheck = "0.8"
quickcheck_macros = "0.8" quickcheck_macros = "0.8"
[features] [features]
default = ["llvm"]
target-arm = [] target-arm = []
target-aarch64 = [] target-aarch64 = []
target-webassembly = [] target-webassembly = []
# This is a separate feature because when we generate docs on Netlify,
# it doesn't have LLVM installed. (Also, it doesn't need to do code gen.)
llvm = ["inkwell", "roc_gen_llvm"]

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod link; pub mod link;

View file

@ -1,21 +1,21 @@
use crate::target;
use crate::target::arch_str; use crate::target::arch_str;
use inkwell::module::Module; #[cfg(feature = "llvm")]
use inkwell::targets::{CodeModel, FileType, RelocMode};
use libloading::{Error, Library}; use libloading::{Error, Library};
use roc_gen::llvm::build::OptLevel; #[cfg(feature = "llvm")]
use roc_mono::ir::OptLevel;
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::{Child, Command, Output}; use std::process::{Child, Command, Output};
use target_lexicon::{Architecture, OperatingSystem, Triple}; use target_lexicon::{Architecture, OperatingSystem, Triple};
use tempfile::tempdir;
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum LinkType { pub enum LinkType {
Executable, // These numbers correspond to the --lib flag; if it's present
Dylib, // (e.g. is_present returns `1 as bool`), this will be 1 as well.
Executable = 0,
Dylib = 1,
} }
/// input_paths can include the host as well as the app. e.g. &["host.o", "roc_app.o"] /// input_paths can include the host as well as the app. e.g. &["host.o", "roc_app.o"]
@ -56,7 +56,7 @@ fn find_zig_str_path() -> PathBuf {
} }
#[cfg(not(target_os = "macos"))] #[cfg(not(target_os = "macos"))]
fn build_zig_host( pub fn build_zig_host(
env_path: &str, env_path: &str,
env_home: &str, env_home: &str,
emit_bin: &str, emit_bin: &str,
@ -86,7 +86,7 @@ fn build_zig_host(
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn build_zig_host( pub fn build_zig_host(
env_path: &str, env_path: &str,
env_home: &str, env_home: &str,
emit_bin: &str, emit_bin: &str,
@ -140,7 +140,7 @@ fn build_zig_host(
.args(&[ .args(&[
"build-obj", "build-obj",
zig_host_src, zig_host_src,
&emit_bin, emit_bin,
"--pkg-begin", "--pkg-begin",
"str", "str",
zig_str_path, zig_str_path,
@ -291,38 +291,60 @@ pub fn rebuild_host(host_input_path: &Path) {
} }
} }
fn nixos_path() -> String {
env::var("NIXOS_GLIBC_PATH").unwrap_or_else(|_| {
panic!(
"We couldn't find glibc! We tried looking for NIXOS_GLIBC_PATH
to find it via Nix, but that didn't work either. Please file a bug report.
This will only be an issue until we implement surgical linking.",
)
})
}
fn library_path<const N: usize>(segments: [&str; N]) -> Option<PathBuf> {
let mut guess_path = PathBuf::new();
for s in segments {
guess_path.push(s);
}
if guess_path.exists() {
Some(guess_path)
} else {
None
}
}
fn link_linux( fn link_linux(
target: &Triple, target: &Triple,
output_path: PathBuf, output_path: PathBuf,
input_paths: &[&str], input_paths: &[&str],
link_type: LinkType, link_type: LinkType,
) -> io::Result<(Child, PathBuf)> { ) -> io::Result<(Child, PathBuf)> {
let usr_lib_path = Path::new("/usr/lib").to_path_buf(); let architecture = format!("{}-linux-gnu", target.architecture);
let usr_lib_gnu_path = usr_lib_path.join(format!("{}-linux-gnu", target.architecture));
let lib_gnu_path = Path::new("/lib/").join(format!("{}-linux-gnu", target.architecture));
let libcrt_path = if usr_lib_gnu_path.exists() { let libcrt_path = library_path(["/usr", "lib", &architecture])
&usr_lib_gnu_path .or_else(|| library_path(["/usr", "lib"]))
} else { .or_else(|| library_path([&nixos_path()]))
&usr_lib_path .unwrap();
};
let libgcc_name = "libgcc_s.so.1"; let libgcc_name = "libgcc_s.so.1";
let libgcc_path = if lib_gnu_path.join(libgcc_name).exists() { let libgcc_path = library_path(["/lib", &architecture, libgcc_name])
lib_gnu_path.join(libgcc_name) .or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name]))
} else if usr_lib_gnu_path.join(libgcc_name).exists() { .or_else(|| library_path(["/usr", "lib", libgcc_name]))
usr_lib_gnu_path.join(libgcc_name) .or_else(|| library_path([&nixos_path(), libgcc_name]))
} else { .unwrap();
usr_lib_path.join(libgcc_name)
};
let ld_linux = match target.architecture { let ld_linux = match target.architecture {
Architecture::X86_64 => "/lib64/ld-linux-x86-64.so.2", Architecture::X86_64 => library_path(["/lib64", "ld-linux-x86-64.so.2"])
Architecture::Aarch64(_) => "/lib/ld-linux-aarch64.so.1", .or_else(|| library_path([&nixos_path(), "ld-linux-x86-64.so.2"])),
Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]),
_ => panic!( _ => panic!(
"TODO gracefully handle unsupported linux architecture: {:?}", "TODO gracefully handle unsupported linux architecture: {:?}",
target.architecture target.architecture
), ),
}; };
let ld_linux = ld_linux.unwrap();
let ld_linux = ld_linux.to_str().unwrap();
let mut soname; let mut soname;
let (base_args, output_path) = match link_type { let (base_args, output_path) = match link_type {
@ -333,7 +355,7 @@ fn link_linux(
output_path, output_path,
), ),
LinkType::Dylib => { LinkType::Dylib => {
// TODO: do we acually need the version number on this? // TODO: do we actually need the version number on this?
// Do we even need the "-soname" argument? // Do we even need the "-soname" argument?
// //
// See https://software.intel.com/content/www/us/en/develop/articles/create-a-unix-including-linux-shared-library.html // See https://software.intel.com/content/www/us/en/develop/articles/create-a-unix-including-linux-shared-library.html
@ -360,6 +382,9 @@ fn link_linux(
}; };
let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string()); let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string());
init_arch(target);
// NOTE: order of arguments to `ld` matters here! // NOTE: order of arguments to `ld` matters here!
// The `-l` flags should go after the `.o` arguments // The `-l` flags should go after the `.o` arguments
Ok(( Ok((
@ -429,7 +454,7 @@ fn link_macos(
// This path only exists on macOS Big Sur, and it causes ld errors // This path only exists on macOS Big Sur, and it causes ld errors
// on Catalina if it's specified with -L, so we replace it with a // on Catalina if it's specified with -L, so we replace it with a
// redundant -lSystem if the directory isn't there. // redundant -lSystem if the directory isn't there.
let big_sur_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/lib"; let big_sur_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib";
let big_sur_fix = if Path::new(big_sur_path).exists() { let big_sur_fix = if Path::new(big_sur_path).exists() {
format!("-L{}", big_sur_path) format!("-L{}", big_sur_path)
} else { } else {
@ -466,8 +491,8 @@ fn link_macos(
"-lc++", "-lc++",
// "-lc++abi", // "-lc++abi",
// "-lunwind", // TODO will eventually need this, see https://github.com/rtfeldman/roc/pull/554#discussion_r496370840 // "-lunwind", // TODO will eventually need this, see https://github.com/rtfeldman/roc/pull/554#discussion_r496370840
"-framework", // "-framework", // Uncomment this line & the following ro run the `rand` crate in examples/cli
"Security", // This "-framework Security" arg is needed for the `rand` crate in examples/cli // "Security",
// Output // Output
"-o", "-o",
output_path.to_str().unwrap(), // app output_path.to_str().unwrap(), // app
@ -477,22 +502,27 @@ fn link_macos(
)) ))
} }
#[cfg(feature = "llvm")]
pub fn module_to_dylib( pub fn module_to_dylib(
module: &Module, module: &inkwell::module::Module,
target: &Triple, target: &Triple,
opt_level: OptLevel, opt_level: OptLevel,
) -> Result<Library, Error> { ) -> Result<Library, Error> {
let dir = tempdir().unwrap(); use crate::target::{self, convert_opt_level};
use inkwell::targets::{CodeModel, FileType, RelocMode};
let dir = tempfile::tempdir().unwrap();
let filename = PathBuf::from("Test.roc"); let filename = PathBuf::from("Test.roc");
let file_path = dir.path().join(filename); let file_path = dir.path().join(filename);
let mut app_o_file = file_path; let mut app_o_file = file_path;
app_o_file.set_file_name("app.o"); app_o_file.set_file_name("app.o");
// Emit the .o file using position-indepedent code (PIC) - needed for dylibs // Emit the .o file using position-independent code (PIC) - needed for dylibs
let reloc = RelocMode::PIC; let reloc = RelocMode::PIC;
let model = CodeModel::Default; let model = CodeModel::Default;
let target_machine = target::target_machine(target, opt_level.into(), reloc, model).unwrap(); let target_machine =
target::target_machine(target, convert_opt_level(opt_level), reloc, model).unwrap();
target_machine target_machine
.write_to_file(module, FileType::Object, &app_o_file) .write_to_file(module, FileType::Object, &app_o_file)
@ -529,3 +559,13 @@ fn validate_output(file_name: &str, cmd_name: &str, output: Output) {
} }
} }
} }
#[cfg(feature = "llvm")]
fn init_arch(target: &Triple) {
crate::target::init_arch(target);
}
#[cfg(not(feature = "llvm"))]
fn init_arch(_target: &Triple) {
panic!("Tried to initialize LLVM when crate was not built with `feature = \"llvm\"` enabled");
}

View file

@ -1,14 +1,14 @@
use crate::target; #[cfg(feature = "llvm")]
use bumpalo::Bump; use roc_gen_llvm::llvm::build::module_from_builtins;
use inkwell::context::Context; #[cfg(feature = "llvm")]
use inkwell::targets::{CodeModel, FileType, RelocMode}; pub use roc_gen_llvm::llvm::build::FunctionIterator;
use inkwell::values::FunctionValue; #[cfg(feature = "llvm")]
use roc_gen::llvm::build::{build_proc, build_proc_header, module_from_builtins, OptLevel, Scope};
use roc_load::file::MonomorphizedModule; use roc_load::file::MonomorphizedModule;
use roc_mono::layout::LayoutIds; #[cfg(feature = "llvm")]
use roc_mono::ir::OptLevel;
#[cfg(feature = "llvm")]
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::{Duration, SystemTime}; use std::time::Duration;
use target_lexicon::Triple;
#[derive(Debug, Clone, Copy, Default)] #[derive(Debug, Clone, Copy, Default)]
pub struct CodeGenTiming { pub struct CodeGenTiming {
@ -16,19 +16,31 @@ pub struct CodeGenTiming {
pub emit_o_file: Duration, pub emit_o_file: Duration,
} }
// TODO: If modules besides this one start needing to know which version of
// llvm we're using, consider moving me somewhere else.
const LLVM_VERSION: &str = "12";
// TODO how should imported modules factor into this? What if those use builtins too? // TODO how should imported modules factor into this? What if those use builtins too?
// TODO this should probably use more helper functions // TODO this should probably use more helper functions
// TODO make this polymorphic in the llvm functions so it can be reused for another backend. // TODO make this polymorphic in the llvm functions so it can be reused for another backend.
#[cfg(feature = "llvm")]
#[allow(clippy::cognitive_complexity)] #[allow(clippy::cognitive_complexity)]
pub fn gen_from_mono_module( pub fn gen_from_mono_module(
arena: &Bump, arena: &bumpalo::Bump,
mut loaded: MonomorphizedModule, mut loaded: MonomorphizedModule,
roc_file_path: &Path, roc_file_path: &Path,
target: Triple, target: target_lexicon::Triple,
app_o_file: &Path, app_o_file: &Path,
opt_level: OptLevel, opt_level: OptLevel,
emit_debug_info: bool, emit_debug_info: bool,
) -> CodeGenTiming { ) -> CodeGenTiming {
use crate::target::{self, convert_opt_level};
use inkwell::attributes::{Attribute, AttributeLoc};
use inkwell::context::Context;
use inkwell::module::Linkage;
use inkwell::targets::{CodeModel, FileType, RelocMode};
use std::time::SystemTime;
use roc_reporting::report::{ use roc_reporting::report::{
can_problem, mono_problem, type_problem, RocDocAllocator, DEFAULT_PALETTE, can_problem, mono_problem, type_problem, RocDocAllocator, DEFAULT_PALETTE,
}; };
@ -88,9 +100,6 @@ pub fn gen_from_mono_module(
// module.strip_debug_info(); // module.strip_debug_info();
// mark our zig-defined builtins as internal // mark our zig-defined builtins as internal
use inkwell::attributes::{Attribute, AttributeLoc};
use inkwell::module::Linkage;
let app_ll_file = { let app_ll_file = {
let mut temp = PathBuf::from(roc_file_path); let mut temp = PathBuf::from(roc_file_path);
temp.set_extension("ll"); temp.set_extension("ll");
@ -100,31 +109,34 @@ pub fn gen_from_mono_module(
let kind_id = Attribute::get_named_enum_kind_id("alwaysinline"); let kind_id = Attribute::get_named_enum_kind_id("alwaysinline");
debug_assert!(kind_id > 0); debug_assert!(kind_id > 0);
let attr = context.create_enum_attribute(kind_id, 1); let enum_attr = context.create_enum_attribute(kind_id, 1);
for function in FunctionIterator::from_module(module) { for function in FunctionIterator::from_module(module) {
let name = function.get_name().to_str().unwrap(); let name = function.get_name().to_str().unwrap();
// mark our zig-defined builtins as internal
if name.starts_with("roc_builtins") { if name.starts_with("roc_builtins") {
function.set_linkage(Linkage::Internal); function.set_linkage(Linkage::Internal);
} }
if name.starts_with("roc_builtins.dict") || name.starts_with("dict.RocDict") { if name.starts_with("roc_builtins.dict")
function.add_attribute(AttributeLoc::Function, attr); || name.starts_with("roc_builtins.list")
} || name.starts_with("roc_builtins.dec")
|| name.starts_with("list.RocList")
if name.starts_with("roc_builtins.list") || name.starts_with("list.RocList") { || name.starts_with("dict.RocDict")
function.add_attribute(AttributeLoc::Function, attr); {
function.add_attribute(AttributeLoc::Function, enum_attr);
} }
} }
let builder = context.create_builder(); let builder = context.create_builder();
let (dibuilder, compile_unit) = roc_gen::llvm::build::Env::new_debug_info(module); let (dibuilder, compile_unit) = roc_gen_llvm::llvm::build::Env::new_debug_info(module);
let (mpm, fpm) = roc_gen::llvm::build::construct_optimization_passes(module, opt_level); let (mpm, _fpm) = roc_gen_llvm::llvm::build::construct_optimization_passes(module, opt_level);
// Compile and add all the Procs before adding main // Compile and add all the Procs before adding main
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32; let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
let env = roc_gen::llvm::build::Env { let env = roc_gen_llvm::llvm::build::Env {
arena: &arena, arena,
builder: &builder, builder: &builder,
dibuilder: &dibuilder, dibuilder: &dibuilder,
compile_unit: &compile_unit, compile_unit: &compile_unit,
@ -136,55 +148,13 @@ pub fn gen_from_mono_module(
exposed_to_host: loaded.exposed_to_host.keys().copied().collect(), exposed_to_host: loaded.exposed_to_host.keys().copied().collect(),
}; };
// Populate Procs further and get the low-level Expr from the canonical Expr roc_gen_llvm::llvm::build::build_procedures(
let mut headers = Vec::with_capacity(loaded.procedures.len()); &env,
opt_level,
// Add all the Proc headers to the module. loaded.procedures,
// We have to do this in a separate pass first, loaded.entry_point,
// because their bodies may reference each other. Some(&app_ll_file),
let mut layout_ids = LayoutIds::default(); );
let mut scope = Scope::default();
for ((symbol, layout), proc) in loaded.procedures {
let fn_val = build_proc_header(&env, &mut layout_ids, symbol, &layout, &proc);
if proc.args.is_empty() {
// this is a 0-argument thunk, i.e. a top-level constant definition
// it must be in-scope everywhere in the module!
scope.insert_top_level_thunk(symbol, layout, fn_val);
}
headers.push((proc, fn_val));
}
// Build each proc using its header info.
for (proc, fn_val) in headers {
// NOTE: This is here to be uncommented in case verification fails.
// (This approach means we don't have to defensively clone name here.)
//
// println!("\n\nBuilding and then verifying function {:?}\n\n", proc);
build_proc(&env, &mut layout_ids, scope.clone(), proc, fn_val);
// call finalize() before any code generation/verification
env.dibuilder.finalize();
if fn_val.verify(true) {
fpm.run_on(&fn_val);
} else {
fn_val.print_to_stderr();
// write the ll code to a file, so we can modify it
env.module.print_to_file(&app_ll_file).unwrap();
// env.module.print_to_stderr();
// NOTE: If this fails, uncomment the above println to debug.
panic!(
r"Non-main function {:?} failed LLVM verification. I wrote the full LLVM IR to {:?}",
fn_val.get_name(),
app_ll_file,
);
}
}
env.dibuilder.finalize(); env.dibuilder.finalize();
@ -199,8 +169,9 @@ pub fn gen_from_mono_module(
env.module.print_to_file(&app_ll_file).unwrap(); env.module.print_to_file(&app_ll_file).unwrap();
panic!( panic!(
"😱 LLVM errors when defining module; I wrote the full LLVM IR to {:?}\n\n {:?}", "😱 LLVM errors when defining module; I wrote the full LLVM IR to {:?}\n\n {}",
app_ll_file, errors, app_ll_file,
errors.to_string(),
); );
} }
@ -228,8 +199,7 @@ pub fn gen_from_mono_module(
// run the debugir https://github.com/vaivaswatha/debugir tool // run the debugir https://github.com/vaivaswatha/debugir tool
match Command::new("debugir") match Command::new("debugir")
.env_clear() .args(&["-instnamer", app_ll_file.to_str().unwrap()])
.args(&[app_ll_file.to_str().unwrap()])
.output() .output()
{ {
Ok(_) => {} Ok(_) => {}
@ -245,8 +215,7 @@ pub fn gen_from_mono_module(
} }
// assemble the .ll into a .bc // assemble the .ll into a .bc
let _ = Command::new("llvm-as-10") let _ = Command::new("llvm-as")
.env_clear()
.args(&[ .args(&[
app_ll_dbg_file.to_str().unwrap(), app_ll_dbg_file.to_str().unwrap(),
"-o", "-o",
@ -255,28 +224,36 @@ pub fn gen_from_mono_module(
.output() .output()
.unwrap(); .unwrap();
let llc_args = &[
"-filetype=obj",
app_bc_file.to_str().unwrap(),
"-o",
app_o_file.to_str().unwrap(),
];
// write the .o file. Note that this builds the .o for the local machine, // write the .o file. Note that this builds the .o for the local machine,
// and ignores the `target_machine` entirely. // and ignores the `target_machine` entirely.
let _ = Command::new("llc-10") //
.env_clear() // different systems name this executable differently, so we shotgun for
.args(&[ // the most common ones and then give up.
"-filetype=obj", let _: Result<std::process::Output, std::io::Error> =
app_bc_file.to_str().unwrap(), Command::new(format!("llc-{}", LLVM_VERSION))
"-o", .args(llc_args)
app_o_file.to_str().unwrap(), .output()
]) .or_else(|_| Command::new("llc").args(llc_args).output())
.output() .map_err(|_| {
.unwrap(); panic!("We couldn't find llc-{} on your machine!", LLVM_VERSION);
});
} else { } else {
// Emit the .o file // Emit the .o file
let reloc = RelocMode::Default; let reloc = RelocMode::Default;
let model = CodeModel::Default; let model = CodeModel::Default;
let target_machine = let target_machine =
target::target_machine(&target, opt_level.into(), reloc, model).unwrap(); target::target_machine(&target, convert_opt_level(opt_level), reloc, model).unwrap();
target_machine target_machine
.write_to_file(&env.module, FileType::Object, &app_o_file) .write_to_file(env.module, FileType::Object, app_o_file)
.expect("Writing .o file failed"); .expect("Writing .o file failed");
} }
@ -287,30 +264,3 @@ pub fn gen_from_mono_module(
emit_o_file, emit_o_file,
} }
} }
pub struct FunctionIterator<'ctx> {
next: Option<FunctionValue<'ctx>>,
}
impl<'ctx> FunctionIterator<'ctx> {
pub fn from_module(module: &inkwell::module::Module<'ctx>) -> Self {
Self {
next: module.get_first_function(),
}
}
}
impl<'ctx> Iterator for FunctionIterator<'ctx> {
type Item = FunctionValue<'ctx>;
fn next(&mut self) -> Option<Self::Item> {
match self.next {
Some(function) => {
self.next = function.get_next_function();
Some(function)
}
None => None,
}
}
}

View file

@ -1,7 +1,10 @@
use inkwell::targets::{ #[cfg(feature = "llvm")]
CodeModel, InitializationConfig, RelocMode, Target, TargetMachine, TargetTriple, use inkwell::{
targets::{CodeModel, InitializationConfig, RelocMode, Target, TargetMachine, TargetTriple},
OptimizationLevel,
}; };
use inkwell::OptimizationLevel; #[cfg(feature = "llvm")]
use roc_mono::ir::OptLevel;
use target_lexicon::{Architecture, OperatingSystem, Triple}; use target_lexicon::{Architecture, OperatingSystem, Triple};
pub fn target_triple_str(target: &Triple) -> &'static str { pub fn target_triple_str(target: &Triple) -> &'static str {
@ -28,36 +31,20 @@ pub fn target_triple_str(target: &Triple) -> &'static str {
} }
} }
/// NOTE: arch_str is *not* the same as the beginning of the magic target triple #[cfg(feature = "llvm")]
/// string! For example, if it's "x86-64" here, the magic target triple string pub fn init_arch(target: &Triple) {
/// will begin with "x86_64" (with an underscore) instead.
pub fn arch_str(target: &Triple) -> &'static str {
// Best guide I've found on how to determine these magic strings:
//
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
match target.architecture { match target.architecture {
Architecture::X86_64 => { Architecture::X86_64 => {
Target::initialize_x86(&InitializationConfig::default()); Target::initialize_x86(&InitializationConfig::default());
"x86-64"
} }
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => { Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => {
Target::initialize_aarch64(&InitializationConfig::default()); Target::initialize_aarch64(&InitializationConfig::default());
"aarch64"
} }
Architecture::Arm(_) if cfg!(feature = "target-arm") => { Architecture::Arm(_) if cfg!(feature = "target-arm") => {
// NOTE: why not enable arm and wasm by default?
//
// We had some trouble getting them to link properly. This may be resolved in the
// future, or maybe it was just some weird configuration on one machine.
Target::initialize_arm(&InitializationConfig::default()); Target::initialize_arm(&InitializationConfig::default());
"arm"
} }
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => { Architecture::Wasm32 if cfg!(feature = "target-webassembly") => {
Target::initialize_webassembly(&InitializationConfig::default()); Target::initialize_webassembly(&InitializationConfig::default());
"wasm32"
} }
_ => panic!( _ => panic!(
"TODO gracefully handle unsupported target architecture: {:?}", "TODO gracefully handle unsupported target architecture: {:?}",
@ -66,6 +53,26 @@ pub fn arch_str(target: &Triple) -> &'static str {
} }
} }
/// NOTE: arch_str is *not* the same as the beginning of the magic target triple
/// string! For example, if it's "x86-64" here, the magic target triple string
/// will begin with "x86_64" (with an underscore) instead.
pub fn arch_str(target: &Triple) -> &'static str {
// Best guide I've found on how to determine these magic strings:
//
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
match target.architecture {
Architecture::X86_64 => "x86-64",
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => "aarch64",
Architecture::Arm(_) if cfg!(feature = "target-arm") => "arm",
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => "wasm32",
_ => panic!(
"TODO gracefully handle unsupported target architecture: {:?}",
target.architecture
),
}
}
#[cfg(feature = "llvm")]
pub fn target_machine( pub fn target_machine(
target: &Triple, target: &Triple,
opt: OptimizationLevel, opt: OptimizationLevel,
@ -74,6 +81,8 @@ pub fn target_machine(
) -> Option<TargetMachine> { ) -> Option<TargetMachine> {
let arch = arch_str(target); let arch = arch_str(target);
init_arch(target);
Target::from_name(arch).unwrap().create_target_machine( Target::from_name(arch).unwrap().create_target_machine(
&TargetTriple::create(target_triple_str(target)), &TargetTriple::create(target_triple_str(target)),
"generic", "generic",
@ -83,3 +92,11 @@ pub fn target_machine(
model, model,
) )
} }
#[cfg(feature = "llvm")]
pub fn convert_opt_level(level: OptLevel) -> OptimizationLevel {
match level {
OptLevel::Normal => OptimizationLevel::None,
OptLevel::Optimize => OptimizationLevel::Aggressive,
}
}

View file

@ -9,15 +9,15 @@ Towards the bottom of `symbol.rs` there is a `define_builtins!` macro being used
Some of these have `#` inside their name (`first#list`, `#lt` ..). This is a trick we are doing to hide implementation details from Roc programmers. To a Roc programmer, a name with `#` in it is invalid, because `#` means everything after it is parsed to a comment. We are constructing these functions manually, so we are circumventing the parsing step and dont have such restrictions. We get to make functions and values with `#` which as a consequence are not accessible to Roc programmers. Roc programmers simply cannot reference them. Some of these have `#` inside their name (`first#list`, `#lt` ..). This is a trick we are doing to hide implementation details from Roc programmers. To a Roc programmer, a name with `#` in it is invalid, because `#` means everything after it is parsed to a comment. We are constructing these functions manually, so we are circumventing the parsing step and dont have such restrictions. We get to make functions and values with `#` which as a consequence are not accessible to Roc programmers. Roc programmers simply cannot reference them.
But we can use these values and some of these are necessary for implementing builtins. For example, `List.get` returns tags, and it is not easy for us to create tags when composing LLVM. What is easier however, is: But we can use these values and some of these are necessary for implementing builtins. For example, `List.get` returns tags, and it is not easy for us to create tags when composing LLVM. What is easier however, is:
- ..writing `List.#getUnsafe` that has the dangerous signature of `List elem, Int -> elem` in LLVM - ..writing `List.#getUnsafe` that has the dangerous signature of `List elem, Nat -> elem` in LLVM
- ..writing `List elem, Int -> Result elem [ OutOfBounds ]*` in a type safe way that uses `getUnsafe` internally, only after it checks if the `elem` at `Int` index exists. - ..writing `List elem, Nat -> Result elem [ OutOfBounds ]*` in a type safe way that uses `getUnsafe` internally, only after it checks if the `elem` at `Nat` index exists.
### can/src/builtins.rs ### can/src/builtins.rs
Right at the top of this module is a function called `builtin_defs`. All this is doing is mapping the `Symbol` defined in `module/src/symbol.rs` to its implementation. Some of the builtins are quite complex, such as `list_get`. What makes `list_get` is that it returns tags, and in order to return tags it first has to defer to lower-level functions via an if statement. Right at the top of this module is a function called `builtin_defs`. All this is doing is mapping the `Symbol` defined in `module/src/symbol.rs` to its implementation. Some of the builtins are quite complex, such as `list_get`. What makes `list_get` is that it returns tags, and in order to return tags it first has to defer to lower-level functions via an if statement.
Lets look at `List.repeat : elem, Int -> List elem`, which is more straight-forward, and points directly to its lower level implementation: Lets look at `List.repeat : elem, Nat -> List elem`, which is more straight-forward, and points directly to its lower level implementation:
``` ```
fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def {
let elem_var = var_store.fresh(); let elem_var = var_store.fresh();
@ -42,7 +42,7 @@ fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
``` ```
In these builtin definitions you will need to allocate for and list the arguments. For `List.repeat`, the arguments are the `elem_var` and the `len_var`. So in both the `body` and `defn` we list these arguments in a vector, with the `Symobl::ARG_1` adn` Symvol::ARG_2` designating which argument is which. In these builtin definitions you will need to allocate for and list the arguments. For `List.repeat`, the arguments are the `elem_var` and the `len_var`. So in both the `body` and `defn` we list these arguments in a vector, with the `Symobl::ARG_1` and` Symvol::ARG_2` designating which argument is which.
Since `List.repeat` is implemented entirely as low level functions, its `body` is a `RunLowLevel`, and the `op` is `LowLevel::ListRepeat`. Lets talk about `LowLevel` in the next section. Since `List.repeat` is implemented entirely as low level functions, its `body` is a `RunLowLevel`, and the `op` is `LowLevel::ListRepeat`. Lets talk about `LowLevel` in the next section.
@ -60,7 +60,7 @@ Its one thing to actually write these functions, its _another_ thing to let the
## Specifying how we pass args to the function ## Specifying how we pass args to the function
### builtins/mono/src/borrow.rs ### builtins/mono/src/borrow.rs
After we have all of this, we need to specify if the arguements we're passing are owned, borrowed or irrelvant. Towards the bottom of this file, add a new case for you builtin and specify each arg. Be sure to read the comment, as it explains this in more detail. After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelvant. Towards the bottom of this file, add a new case for you builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
## Specifying the uniqueness of a function ## Specifying the uniqueness of a function
### builtins/src/unique.rs ### builtins/src/unique.rs

View file

@ -1,5 +1,7 @@
zig-cache zig-cache
src/zig-cache src/zig-cache
benchmark/zig-cache
builtins.ll builtins.ll
builtins.bc builtins.bc
builtins.o builtins.o
dec

View file

@ -3,10 +3,10 @@
## Adding a bitcode builtin ## Adding a bitcode builtin
To add a builtin: To add a builtin:
1. Add the function to the relevent module. For `Num` builtin use it in `src/num.zig`, for `Str` builtins use `src/str.zig`, and so on. **For anything you add, you must add tests for it!** Not only does to make the builtins more maintainable, it's the the easiest way to test these functions on Zig. To run the test, run: `zig build test` 1. Add the function to the relevant module. For `Num` builtin use it in `src/num.zig`, for `Str` builtins use `src/str.zig`, and so on. **For anything you add, you must add tests for it!** Not only does to make the builtins more maintainable, it's the the easiest way to test these functions on Zig. To run the test, run: `zig build test`
2. Make sure the function is public with the `pub` keyword and uses the C calling convention. This is really easy, just add `pub` and `callconv(.C)` to the function declaration like so: `pub fn atan(num: f64) callconv(.C) f64 { ... }` 2. Make sure the function is public with the `pub` keyword and uses the C calling convention. This is really easy, just add `pub` and `callconv(.C)` to the function declaration like so: `pub fn atan(num: f64) callconv(.C) f64 { ... }`
3. In `src/main.zig`, export the function. This is also organized by module. For example, for a `Num` function find the `Num` section and add: `comptime { exportNumFn(num.atan, "atan"); }`. The first arguement is the function, the second is the name of it in LLVM. 3. In `src/main.zig`, export the function. This is also organized by module. For example, for a `Num` function find the `Num` section and add: `comptime { exportNumFn(num.atan, "atan"); }`. The first argument is the function, the second is the name of it in LLVM.
4. In `compiler/builtins/src/bitcode.rs`, add a constant for the new function. This is how we use it in Rust. Once again, this is organized by module, so just find the relevent area and add your new function. 4. In `compiler/builtins/src/bitcode.rs`, add a constant for the new function. This is how we use it in Rust. Once again, this is organized by module, so just find the relevant area and add your new function.
5. You can now your function in Rust using `call_bitcode_fn` in `llvm/src/build.rs`! 5. You can now your function in Rust using `call_bitcode_fn` in `llvm/src/build.rs`!
## How it works ## How it works
@ -32,4 +32,4 @@ There will be two directories like `roc_builtins-[some random characters]`, look
## Calling bitcode functions ## Calling bitcode functions
use the `call_bitcode_fn` function defined in `llvm/src/build.rs` to call bitcode funcitons. use the `call_bitcode_fn` function defined in `llvm/src/build.rs` to call bitcode functions.

View file

@ -0,0 +1,6 @@
#!/bin/bash
set -euxo pipefail
zig build-exe benchmark/dec.zig -O ReleaseFast --main-pkg-path .
./dec

View file

@ -0,0 +1,174 @@
const std = @import("std");
const time = std.time;
const Timer = time.Timer;
const RocStr = @import("../src/str.zig").RocStr;
const RocDec = @import("../src/dec.zig").RocDec;
var timer: Timer = undefined;
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
timer = try Timer.start();
try stdout.print("7 additions took ", .{});
try avg_runs(add7);
try stdout.print("7 subtractions took ", .{});
try avg_runs(sub7);
try stdout.print("7 multiplications took ", .{});
try avg_runs(mul7);
try stdout.print("7 divisions took ", .{});
try avg_runs(div7);
}
fn avg_runs(func: fn() u64) !void {
const stdout = std.io.getStdOut().writer();
var first_run = func();
var lowest = first_run;
var highest = first_run;
var sum = first_run;
// 31 runs
var runs = [_]u64{ first_run, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
var next_run: usize = 1; // we already did first_run
while (next_run < runs.len) {
const run = func();
lowest = std.math.min(lowest, run);
highest = std.math.max(highest, run);
runs[next_run] = run;
next_run += 1;
}
std.sort.sort(u64, &runs, {}, comptime std.sort.asc(u64));
const median = runs[runs.len / 2];
try stdout.print("{}ns (lowest: {}ns, highest: {}ns)\n", .{median, lowest, highest});
}
fn add7() u64 {
var str1 = RocStr.init("1.2", 3);
const dec1 = RocDec.fromStr(str1).?;
var str2 = RocStr.init("3.4", 3);
const dec2 = RocDec.fromStr(str2).?;
timer.reset();
var a = dec1.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
a = a.add(dec1);
a = a.add(dec2);
return timer.read();
}
fn sub7() u64 {
var str1 = RocStr.init("1.2", 3);
const dec1 = RocDec.fromStr(str1).?;
var str2 = RocStr.init("3.4", 3);
const dec2 = RocDec.fromStr(str2).?;
timer.reset();
var a = dec1.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
a = a.sub(dec1);
a = a.sub(dec2);
return timer.read();
}
fn mul7() u64 {
var str1 = RocStr.init("1.2", 3);
const dec1 = RocDec.fromStr(str1).?;
var str2 = RocStr.init("3.4", 3);
const dec2 = RocDec.fromStr(str2).?;
timer.reset();
var a = dec1.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
a = a.mul(dec1);
a = a.mul(dec2);
return timer.read();
}
fn div7() u64 {
var str1 = RocStr.init("1.2", 3);
const dec1 = RocDec.fromStr(str1).?;
var str2 = RocStr.init("3.4", 3);
const dec2 = RocDec.fromStr(str2).?;
timer.reset();
var a = dec1.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
a = a.div(dec1);
a = a.div(dec2);
return timer.read();
}

View file

@ -0,0 +1,5 @@
#!/bin/bash
set -euxo pipefail
zig build-obj src/main.zig -O ReleaseFast -femit-llvm-ir=builtins.ll -femit-bin=builtins.o --strip

View file

@ -1,16 +1,15 @@
const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const Builder = std.build.Builder; const Builder = std.build.Builder;
pub fn build(b: *Builder) void { pub fn build(b: *Builder) void {
// b.setPreferredReleaseMode(builtin.Mode.Debug); // b.setPreferredReleaseMode(builtin.Mode.Debug
b.setPreferredReleaseMode(builtin.Mode.ReleaseFast); b.setPreferredReleaseMode(.ReleaseFast);
const mode = b.standardReleaseOptions(); const mode = b.standardReleaseOptions();
// Options // Options
const fallback_main_path = "./src/main.zig"; const fallback_main_path = "./src/main.zig";
const main_path_desc = b.fmt("Override path to main.zig. Used by \"ir\" and \"test\". Defaults to \"{}\". ", .{fallback_main_path}); const main_path_desc = b.fmt("Override path to main.zig. Used by \"ir\" and \"test\". Defaults to \"{s}\". ", .{fallback_main_path});
const main_path = b.option([]const u8, "main-path", main_path_desc) orelse fallback_main_path; const main_path = b.option([]const u8, "main-path", main_path_desc) orelse fallback_main_path;
// Tests // Tests
@ -28,7 +27,6 @@ pub fn build(b: *Builder) void {
llvm_obj.strip = true; llvm_obj.strip = true;
llvm_obj.emit_llvm_ir = true; llvm_obj.emit_llvm_ir = true;
llvm_obj.emit_bin = false; llvm_obj.emit_bin = false;
llvm_obj.bundle_compiler_rt = true;
const ir = b.step("ir", "Build LLVM ir"); const ir = b.step("ir", "Build LLVM ir");
ir.dependOn(&llvm_obj.step); ir.dependOn(&llvm_obj.step);

View file

@ -6,4 +6,4 @@ set -euxo pipefail
zig build test zig build test
# fmt every zig # fmt every zig
find src/*.zig -type f -print0 | xargs -n 1 -0 zig fmt --check find src/*.zig -type f -print0 | xargs -n 1 -0 zig fmt --check || (echo "zig fmt --check FAILED! Check the previuous lines to see which files were improperly formatted." && exit 1)

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,6 @@ const std = @import("std");
const testing = std.testing; const testing = std.testing;
const expectEqual = testing.expectEqual; const expectEqual = testing.expectEqual;
const mem = std.mem; const mem = std.mem;
const Allocator = mem.Allocator;
const assert = std.debug.assert; const assert = std.debug.assert;
const utils = @import("utils.zig"); const utils = @import("utils.zig");
@ -21,13 +20,9 @@ const Slot = packed enum(u8) {
PreviouslyFilled, PreviouslyFilled,
}; };
const MaybeIndexTag = enum { const MaybeIndexTag = enum { index, not_found };
index, not_found
};
const MaybeIndex = union(MaybeIndexTag) { const MaybeIndex = union(MaybeIndexTag) { index: usize, not_found: void };
index: usize, not_found: void
};
fn nextSeed(seed: u64) u64 { fn nextSeed(seed: u64) u64 {
// TODO is this a valid way to get a new seed? are there better ways? // TODO is this a valid way to get a new seed? are there better ways?
@ -74,7 +69,7 @@ const Alignment = packed enum(u8) {
Align8KeyFirst, Align8KeyFirst,
Align8ValueFirst, Align8ValueFirst,
fn toUsize(self: Alignment) usize { fn toU32(self: Alignment) u32 {
switch (self) { switch (self) {
.Align16KeyFirst => return 16, .Align16KeyFirst => return 16,
.Align16ValueFirst => return 16, .Align16ValueFirst => return 16,
@ -94,20 +89,18 @@ const Alignment = packed enum(u8) {
}; };
pub fn decref( pub fn decref(
allocator: *Allocator,
alignment: Alignment,
bytes_or_null: ?[*]u8, bytes_or_null: ?[*]u8,
data_bytes: usize, data_bytes: usize,
alignment: Alignment,
) void { ) void {
return utils.decref(allocator, alignment.toUsize(), bytes_or_null, data_bytes); return utils.decref(bytes_or_null, data_bytes, alignment.toU32());
} }
pub fn allocateWithRefcount( pub fn allocateWithRefcount(
allocator: *Allocator,
alignment: Alignment,
data_bytes: usize, data_bytes: usize,
alignment: Alignment,
) [*]u8 { ) [*]u8 {
return utils.allocateWithRefcount(allocator, alignment.toUsize(), data_bytes); return utils.allocateWithRefcount(data_bytes, alignment.toU32());
} }
pub const RocDict = extern struct { pub const RocDict = extern struct {
@ -124,7 +117,6 @@ pub const RocDict = extern struct {
} }
pub fn allocate( pub fn allocate(
allocator: *Allocator,
number_of_levels: usize, number_of_levels: usize,
number_of_entries: usize, number_of_entries: usize,
alignment: Alignment, alignment: Alignment,
@ -136,7 +128,7 @@ pub const RocDict = extern struct {
const data_bytes = number_of_slots * slot_size; const data_bytes = number_of_slots * slot_size;
return RocDict{ return RocDict{
.dict_bytes = allocateWithRefcount(allocator, alignment, data_bytes), .dict_bytes = allocateWithRefcount(data_bytes, alignment),
.number_of_levels = number_of_levels, .number_of_levels = number_of_levels,
.dict_entries_len = number_of_entries, .dict_entries_len = number_of_entries,
}; };
@ -144,7 +136,6 @@ pub const RocDict = extern struct {
pub fn reallocate( pub fn reallocate(
self: RocDict, self: RocDict,
allocator: *Allocator,
alignment: Alignment, alignment: Alignment,
key_width: usize, key_width: usize,
value_width: usize, value_width: usize,
@ -157,7 +148,7 @@ pub const RocDict = extern struct {
const delta_capacity = new_capacity - old_capacity; const delta_capacity = new_capacity - old_capacity;
const data_bytes = new_capacity * slot_size; const data_bytes = new_capacity * slot_size;
const first_slot = allocateWithRefcount(allocator, alignment, data_bytes); const first_slot = allocateWithRefcount(data_bytes, alignment);
// transfer the memory // transfer the memory
@ -204,7 +195,7 @@ pub const RocDict = extern struct {
}; };
// NOTE we fuse an increment of all keys/values with a decrement of the input dict // NOTE we fuse an increment of all keys/values with a decrement of the input dict
decref(allocator, alignment, self.dict_bytes, self.capacity() * slotSize(key_width, value_width)); decref(self.dict_bytes, self.capacity() * slotSize(key_width, value_width), alignment);
return result; return result;
} }
@ -236,7 +227,7 @@ pub const RocDict = extern struct {
return totalCapacityAtLevel(self.number_of_levels); return totalCapacityAtLevel(self.number_of_levels);
} }
pub fn makeUnique(self: RocDict, allocator: *Allocator, alignment: Alignment, key_width: usize, value_width: usize) RocDict { pub fn makeUnique(self: RocDict, alignment: Alignment, key_width: usize, value_width: usize) RocDict {
if (self.isEmpty()) { if (self.isEmpty()) {
return self; return self;
} }
@ -246,7 +237,7 @@ pub const RocDict = extern struct {
} }
// unfortunately, we have to clone // unfortunately, we have to clone
var new_dict = RocDict.allocate(allocator, self.number_of_levels, self.dict_entries_len, alignment, key_width, value_width); var new_dict = RocDict.allocate(self.number_of_levels, self.dict_entries_len, alignment, key_width, value_width);
var old_bytes: [*]u8 = @ptrCast([*]u8, self.dict_bytes); var old_bytes: [*]u8 = @ptrCast([*]u8, self.dict_bytes);
var new_bytes: [*]u8 = @ptrCast([*]u8, new_dict.dict_bytes); var new_bytes: [*]u8 = @ptrCast([*]u8, new_dict.dict_bytes);
@ -256,7 +247,7 @@ pub const RocDict = extern struct {
// NOTE we fuse an increment of all keys/values with a decrement of the input dict // NOTE we fuse an increment of all keys/values with a decrement of the input dict
const data_bytes = self.capacity() * slotSize(key_width, value_width); const data_bytes = self.capacity() * slotSize(key_width, value_width);
decref(allocator, alignment, self.dict_bytes, data_bytes); decref(self.dict_bytes, data_bytes, alignment);
return new_dict; return new_dict;
} }
@ -414,13 +405,16 @@ const HashFn = fn (u64, ?[*]u8) callconv(.C) u64;
const EqFn = fn (?[*]u8, ?[*]u8) callconv(.C) bool; const EqFn = fn (?[*]u8, ?[*]u8) callconv(.C) bool;
const Inc = fn (?[*]u8) callconv(.C) void; const Inc = fn (?[*]u8) callconv(.C) void;
const IncN = fn (?[*]u8, usize) callconv(.C) void;
const Dec = fn (?[*]u8) callconv(.C) void; const Dec = fn (?[*]u8) callconv(.C) void;
const Caller3 = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void;
// Dict.insert : Dict k v, k, v -> Dict k v // Dict.insert : Dict k v, k, v -> Dict k v
pub fn dictInsert(input: RocDict, alignment: Alignment, key: Opaque, key_width: usize, value: Opaque, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void { pub fn dictInsert(input: RocDict, alignment: Alignment, key: Opaque, key_width: usize, value: Opaque, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void {
var seed: u64 = INITIAL_SEED; var seed: u64 = INITIAL_SEED;
var result = input.makeUnique(std.heap.c_allocator, alignment, key_width, value_width); var result = input.makeUnique(alignment, key_width, value_width);
var current_level: usize = 1; var current_level: usize = 1;
var current_level_size: usize = 8; var current_level_size: usize = 8;
@ -428,7 +422,7 @@ pub fn dictInsert(input: RocDict, alignment: Alignment, key: Opaque, key_width:
while (true) { while (true) {
if (current_level > result.number_of_levels) { if (current_level > result.number_of_levels) {
result = result.reallocate(std.heap.c_allocator, alignment, key_width, value_width); result = result.reallocate(alignment, key_width, value_width);
} }
const hash = hash_fn(seed, key); const hash = hash_fn(seed, key);
@ -484,7 +478,7 @@ pub fn dictRemove(input: RocDict, alignment: Alignment, key: Opaque, key_width:
return; return;
}, },
MaybeIndex.index => |index| { MaybeIndex.index => |index| {
var dict = input.makeUnique(std.heap.c_allocator, alignment, key_width, value_width); var dict = input.makeUnique(alignment, key_width, value_width);
assert(index < dict.capacity()); assert(index < dict.capacity());
@ -499,7 +493,7 @@ pub fn dictRemove(input: RocDict, alignment: Alignment, key: Opaque, key_width:
// if the dict is now completely empty, free its allocation // if the dict is now completely empty, free its allocation
if (dict.dict_entries_len == 0) { if (dict.dict_entries_len == 0) {
const data_bytes = dict.capacity() * slotSize(key_width, value_width); const data_bytes = dict.capacity() * slotSize(key_width, value_width);
decref(std.heap.c_allocator, alignment, dict.dict_bytes, data_bytes); decref(dict.dict_bytes, data_bytes, alignment);
output.* = RocDict.empty(); output.* = RocDict.empty();
return; return;
} }
@ -572,15 +566,7 @@ pub fn dictKeys(dict: RocDict, alignment: Alignment, key_width: usize, value_wid
} }
const data_bytes = length * key_width; const data_bytes = length * key_width;
var ptr = allocateWithRefcount(std.heap.c_allocator, alignment, data_bytes); var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
break :blk 0;
} else {
break :blk (dict.capacity() * value_width);
}
};
i = 0; i = 0;
var copied: usize = 0; var copied: usize = 0;
@ -621,15 +607,7 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
} }
const data_bytes = length * value_width; const data_bytes = length * value_width;
var ptr = allocateWithRefcount(std.heap.c_allocator, alignment, data_bytes); var ptr = allocateWithRefcount(data_bytes, alignment);
var offset = blk: {
if (alignment.keyFirst()) {
break :blk (dict.capacity() * key_width);
} else {
break :blk 0;
}
};
i = 0; i = 0;
var copied: usize = 0; var copied: usize = 0;
@ -650,12 +628,12 @@ pub fn dictValues(dict: RocDict, alignment: Alignment, key_width: usize, value_w
output.* = RocList{ .bytes = ptr, .length = length }; output.* = RocList{ .bytes = ptr, .length = length };
} }
fn doNothing(ptr: Opaque) callconv(.C) void { fn doNothing(_: Opaque) callconv(.C) void {
return; return;
} }
pub fn dictUnion(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, inc_key: Inc, inc_value: Inc, output: *RocDict) callconv(.C) void { pub fn dictUnion(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, inc_key: Inc, inc_value: Inc, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width); output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0; var i: usize = 0;
while (i < dict2.capacity()) : (i += 1) { while (i < dict2.capacity()) : (i += 1) {
@ -690,7 +668,7 @@ pub fn dictUnion(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width
} }
pub fn dictIntersection(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Inc, dec_value: Inc, output: *RocDict) callconv(.C) void { pub fn dictIntersection(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Inc, dec_value: Inc, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width); output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0; var i: usize = 0;
const size = dict1.capacity(); const size = dict1.capacity();
@ -715,7 +693,7 @@ pub fn dictIntersection(dict1: RocDict, dict2: RocDict, alignment: Alignment, ke
} }
pub fn dictDifference(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void { pub fn dictDifference(dict1: RocDict, dict2: RocDict, alignment: Alignment, key_width: usize, value_width: usize, hash_fn: HashFn, is_eq: EqFn, dec_key: Dec, dec_value: Dec, output: *RocDict) callconv(.C) void {
output.* = dict1.makeUnique(std.heap.c_allocator, alignment, key_width, value_width); output.* = dict1.makeUnique(alignment, key_width, value_width);
var i: usize = 0; var i: usize = 0;
const size = dict1.capacity(); const size = dict1.capacity();
@ -756,16 +734,32 @@ pub fn setFromList(list: RocList, alignment: Alignment, key_width: usize, value_
// NOTE: decref checks for the empty case // NOTE: decref checks for the empty case
const data_bytes = size * key_width; const data_bytes = size * key_width;
decref(std.heap.c_allocator, alignment, list.bytes, data_bytes); decref(list.bytes, data_bytes, alignment);
} }
const StepperCaller = fn (?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8, ?[*]u8) callconv(.C) void; pub fn dictWalk(
pub fn dictWalk(dict: RocDict, stepper: Opaque, stepper_caller: StepperCaller, accum: Opaque, alignment: Alignment, key_width: usize, value_width: usize, accum_width: usize, inc_key: Inc, inc_value: Inc, output: Opaque) callconv(.C) void { dict: RocDict,
caller: Caller3,
data: Opaque,
inc_n_data: IncN,
data_is_owned: bool,
accum: Opaque,
alignment: Alignment,
key_width: usize,
value_width: usize,
accum_width: usize,
output: Opaque,
) callconv(.C) void {
const alignment_u32 = alignment.toU32();
// allocate space to write the result of the stepper into // allocate space to write the result of the stepper into
// experimentally aliasing the accum and output pointers is not a good idea // experimentally aliasing the accum and output pointers is not a good idea
const alloc: [*]u8 = @ptrCast([*]u8, std.heap.c_allocator.alloc(u8, accum_width) catch unreachable); const bytes_ptr: [*]u8 = utils.alloc(accum_width, alignment_u32);
var b1 = output orelse unreachable; var b1 = output orelse unreachable;
var b2 = alloc; var b2 = bytes_ptr;
if (data_is_owned) {
inc_n_data(data, dict.len());
}
@memcpy(b2, accum orelse unreachable, accum_width); @memcpy(b2, accum orelse unreachable, accum_width);
@ -777,19 +771,14 @@ pub fn dictWalk(dict: RocDict, stepper: Opaque, stepper_caller: StepperCaller, a
const key = dict.getKey(i, alignment, key_width, value_width); const key = dict.getKey(i, alignment, key_width, value_width);
const value = dict.getValue(i, alignment, key_width, value_width); const value = dict.getValue(i, alignment, key_width, value_width);
stepper_caller(stepper, key, value, b2, b1); caller(data, key, value, b2, b1);
const temp = b1; std.mem.swap([*]u8, &b1, &b2);
b2 = b1;
b1 = temp;
}, },
else => {}, else => {},
} }
} }
@memcpy(output orelse unreachable, b2, accum_width); @memcpy(output orelse unreachable, b2, accum_width);
std.heap.c_allocator.free(alloc[0..accum_width]); utils.dealloc(bytes_ptr, alignment_u32);
const data_bytes = dict.capacity() * slotSize(key_width, value_width);
decref(std.heap.c_allocator, alignment, dict.dict_bytes, data_bytes);
} }

View file

@ -8,10 +8,9 @@ const str = @import("str.zig");
const mem = std.mem; const mem = std.mem;
pub fn wyhash(seed: u64, bytes: ?[*]const u8, length: usize) callconv(.C) u64 { pub fn wyhash(seed: u64, bytes: ?[*]const u8, length: usize) callconv(.C) u64 {
const stdout = std.io.getStdOut().writer();
if (bytes) |nonnull| { if (bytes) |nonnull| {
return wyhash_hash(seed, nonnull[0..length]); const slice = nonnull[0..length];
return wyhash_hash(seed, slice);
} else { } else {
return 42; return 42;
} }
@ -202,13 +201,13 @@ const expectEqual = std.testing.expectEqual;
test "test vectors" { test "test vectors" {
const hash = Wyhash.hash; const hash = Wyhash.hash;
expectEqual(hash(0, ""), 0x0); try expectEqual(hash(0, ""), 0x0);
expectEqual(hash(1, "a"), 0xbed235177f41d328); try expectEqual(hash(1, "a"), 0xbed235177f41d328);
expectEqual(hash(2, "abc"), 0xbe348debe59b27c3); try expectEqual(hash(2, "abc"), 0xbe348debe59b27c3);
expectEqual(hash(3, "message digest"), 0x37320f657213a290); try expectEqual(hash(3, "message digest"), 0x37320f657213a290);
expectEqual(hash(4, "abcdefghijklmnopqrstuvwxyz"), 0xd0b270e1d8a7019c); try expectEqual(hash(4, "abcdefghijklmnopqrstuvwxyz"), 0xd0b270e1d8a7019c);
expectEqual(hash(5, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"), 0x602a1894d3bbfe7f); try expectEqual(hash(5, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"), 0x602a1894d3bbfe7f);
expectEqual(hash(6, "12345678901234567890123456789012345678901234567890123456789012345678901234567890"), 0x829e9c148b75970e); try expectEqual(hash(6, "12345678901234567890123456789012345678901234567890123456789012345678901234567890"), 0x829e9c148b75970e);
} }
test "test vectors streaming" { test "test vectors streaming" {
@ -216,19 +215,19 @@ test "test vectors streaming" {
for ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789") |e| { for ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789") |e| {
wh.update(mem.asBytes(&e)); wh.update(mem.asBytes(&e));
} }
expectEqual(wh.final(), 0x602a1894d3bbfe7f); try expectEqual(wh.final(), 0x602a1894d3bbfe7f);
const pattern = "1234567890"; const pattern = "1234567890";
const count = 8; const count = 8;
const result = 0x829e9c148b75970e; const result = 0x829e9c148b75970e;
expectEqual(Wyhash.hash(6, pattern ** 8), result); try expectEqual(Wyhash.hash(6, pattern ** 8), result);
wh = Wyhash.init(6); wh = Wyhash.init(6);
var i: u32 = 0; var i: u32 = 0;
while (i < count) : (i += 1) { while (i < count) : (i += 1) {
wh.update(pattern); wh.update(pattern);
} }
expectEqual(wh.final(), result); try expectEqual(wh.final(), result);
} }
test "iterative non-divisible update" { test "iterative non-divisible update" {
@ -250,6 +249,6 @@ test "iterative non-divisible update" {
} }
const iterative_hash = wy.final(); const iterative_hash = wy.final();
std.testing.expectEqual(iterative_hash, non_iterative_hash); try std.testing.expectEqual(iterative_hash, non_iterative_hash);
} }
} }

View file

@ -42,7 +42,7 @@ pub const BoundClass = enum(u8) {
}; };
test "Bound Class" { test "Bound Class" {
expectEqual(0, @enumToInt(BoundClass.START)); try expectEqual(0, @enumToInt(BoundClass.START));
} }
// https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L261 // https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L261
@ -112,7 +112,7 @@ fn unsafeCodepointToBoundClass(codepoint: u21) *const BoundClass {
} }
test "unsafeCodepointToBoundClass: valid" { test "unsafeCodepointToBoundClass: valid" {
expectEqual(BoundClass.CONTROL, unsafeCodepointToBoundClass(8).*); try expectEqual(BoundClass.CONTROL, unsafeCodepointToBoundClass(8).*);
} }
// https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L242 // https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L242
@ -125,11 +125,11 @@ fn codepointToBoundClass(codepoint: u21) *const BoundClass {
} }
test "codepointToBoundClass: valid" { test "codepointToBoundClass: valid" {
expectEqual(BoundClass.CONTROL, codepointToBoundClass(8).*); try expectEqual(BoundClass.CONTROL, codepointToBoundClass(8).*);
} }
test "codepointToBoundClass: invalid" { test "codepointToBoundClass: invalid" {
expectEqual(BoundClass.OTHER, codepointToBoundClass(codepoint_max + 5).*); try expectEqual(BoundClass.OTHER, codepointToBoundClass(codepoint_max + 5).*);
} }
// https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L319 // https://github.com/JuliaStrings/utf8proc/blob/master/utf8proc.c#L319

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,20 @@ const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const testing = std.testing; const testing = std.testing;
// Dec Module
const dec = @import("dec.zig");
comptime {
exportDecFn(dec.fromF64C, "from_f64");
exportDecFn(dec.eqC, "eq");
exportDecFn(dec.neqC, "neq");
exportDecFn(dec.negateC, "negate");
exportDecFn(dec.addC, "add_with_overflow");
exportDecFn(dec.subC, "sub_with_overflow");
exportDecFn(dec.mulC, "mul_with_overflow");
exportDecFn(dec.divC, "div");
}
// List Module // List Module
const list = @import("list.zig"); const list = @import("list.zig");
@ -19,12 +33,17 @@ comptime {
exportListFn(list.listContains, "contains"); exportListFn(list.listContains, "contains");
exportListFn(list.listRepeat, "repeat"); exportListFn(list.listRepeat, "repeat");
exportListFn(list.listAppend, "append"); exportListFn(list.listAppend, "append");
exportListFn(list.listPrepend, "prepend");
exportListFn(list.listSingle, "single"); exportListFn(list.listSingle, "single");
exportListFn(list.listJoin, "join"); exportListFn(list.listJoin, "join");
exportListFn(list.listRange, "range"); exportListFn(list.listRange, "range");
exportListFn(list.listReverse, "reverse"); exportListFn(list.listReverse, "reverse");
exportListFn(list.listSortWith, "sort_with"); exportListFn(list.listSortWith, "sort_with");
exportListFn(list.listConcat, "concat"); exportListFn(list.listConcat, "concat");
exportListFn(list.listDrop, "drop");
exportListFn(list.listSet, "set");
exportListFn(list.listSetInPlace, "set_in_place");
exportListFn(list.listSwap, "swap");
} }
// Dict Module // Dict Module
@ -70,7 +89,7 @@ comptime {
exportStrFn(str.countSegments, "count_segments"); exportStrFn(str.countSegments, "count_segments");
exportStrFn(str.countGraphemeClusters, "count_grapheme_clusters"); exportStrFn(str.countGraphemeClusters, "count_grapheme_clusters");
exportStrFn(str.startsWith, "starts_with"); exportStrFn(str.startsWith, "starts_with");
exportStrFn(str.startsWithCodePoint, "starts_with_code_point"); exportStrFn(str.startsWithCodePt, "starts_with_code_point");
exportStrFn(str.endsWith, "ends_with"); exportStrFn(str.endsWith, "ends_with");
exportStrFn(str.strConcatC, "concat"); exportStrFn(str.strConcatC, "concat");
exportStrFn(str.strJoinWithC, "joinWith"); exportStrFn(str.strJoinWithC, "joinWith");
@ -78,8 +97,9 @@ comptime {
exportStrFn(str.strFromIntC, "from_int"); exportStrFn(str.strFromIntC, "from_int");
exportStrFn(str.strFromFloatC, "from_float"); exportStrFn(str.strFromFloatC, "from_float");
exportStrFn(str.strEqual, "equal"); exportStrFn(str.strEqual, "equal");
exportStrFn(str.strToBytesC, "to_bytes"); exportStrFn(str.strToUtf8C, "to_utf8");
exportStrFn(str.fromUtf8C, "from_utf8"); exportStrFn(str.fromUtf8C, "from_utf8");
exportStrFn(str.fromUtf8RangeC, "from_utf8_range");
} }
// Export helpers - Must be run inside a comptime // Export helpers - Must be run inside a comptime
@ -95,13 +115,71 @@ fn exportStrFn(comptime func: anytype, comptime func_name: []const u8) void {
fn exportDictFn(comptime func: anytype, comptime func_name: []const u8) void { fn exportDictFn(comptime func: anytype, comptime func_name: []const u8) void {
exportBuiltinFn(func, "dict." ++ func_name); exportBuiltinFn(func, "dict." ++ func_name);
} }
fn exportListFn(comptime func: anytype, comptime func_name: []const u8) void { fn exportListFn(comptime func: anytype, comptime func_name: []const u8) void {
exportBuiltinFn(func, "list." ++ func_name); exportBuiltinFn(func, "list." ++ func_name);
} }
fn exportDecFn(comptime func: anytype, comptime func_name: []const u8) void {
exportBuiltinFn(func, "dec." ++ func_name);
}
// Custom panic function, as builtin Zig version errors during LLVM verification
pub fn panic(message: []const u8, stacktrace: ?*std.builtin.StackTrace) noreturn {
std.debug.print("{s}: {?}", .{ message, stacktrace });
unreachable;
}
// Run all tests in imported modules // Run all tests in imported modules
// https://github.com/ziglang/zig/blob/master/lib/std/std.zig#L94 // https://github.com/ziglang/zig/blob/master/lib/std/std.zig#L94
test "" { test "" {
testing.refAllDecls(@This()); testing.refAllDecls(@This());
} }
// For unclear reasons, sometimes this function is not linked in on some machines.
// Therefore we provide it as LLVM bitcode and mark it as externally linked during our LLVM codegen
//
// Taken from
// https://github.com/ziglang/zig/blob/85755c51d529e7d9b406c6bdf69ce0a0f33f3353/lib/std/special/compiler_rt/muloti4.zig
//
// Thank you Zig Contributors!
export fn __muloti4(a: i128, b: i128, overflow: *c_int) callconv(.C) i128 {
// @setRuntimeSafety(builtin.is_test);
const min = @bitCast(i128, @as(u128, 1 << (128 - 1)));
const max = ~min;
overflow.* = 0;
const r = a *% b;
if (a == min) {
if (b != 0 and b != 1) {
overflow.* = 1;
}
return r;
}
if (b == min) {
if (a != 0 and a != 1) {
overflow.* = 1;
}
return r;
}
const sa = a >> (128 - 1);
const abs_a = (a ^ sa) -% sa;
const sb = b >> (128 - 1);
const abs_b = (b ^ sb) -% sb;
if (abs_a < 2 or abs_b < 2) {
return r;
}
if (sa == sb) {
if (abs_a > @divTrunc(max, abs_b)) {
overflow.* = 1;
}
} else {
if (abs_a > @divTrunc(min, -abs_b)) {
overflow.* = 1;
}
}
return r;
}

File diff suppressed because it is too large Load diff

View file

@ -1,8 +1,64 @@
const std = @import("std"); const std = @import("std");
const Allocator = std.mem.Allocator; const always_inline = std.builtin.CallOptions.Modifier.always_inline;
const REFCOUNT_MAX_ISIZE: comptime isize = 0; pub fn WithOverflow(comptime T: type) type {
const REFCOUNT_ONE_ISIZE: comptime isize = std.math.minInt(isize); return extern struct { value: T, has_overflowed: bool };
}
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void;
// This should never be passed a null pointer.
// If allocation fails, this must cxa_throw - it must not return a null pointer!
extern fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void;
// This should never be passed a null pointer.
extern fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void;
comptime {
// During tetsts, use the testing allocators to satisfy these functions.
if (std.builtin.is_test) {
@export(testing_roc_alloc, .{ .name = "roc_alloc", .linkage = .Strong });
@export(testing_roc_realloc, .{ .name = "roc_realloc", .linkage = .Strong });
@export(testing_roc_dealloc, .{ .name = "roc_dealloc", .linkage = .Strong });
}
}
fn testing_roc_alloc(size: usize, _: u32) callconv(.C) ?*c_void {
return @ptrCast(?*c_void, std.testing.allocator.alloc(u8, size) catch unreachable);
}
fn testing_roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, _: u32) callconv(.C) ?*c_void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
const slice = ptr[0..old_size];
return @ptrCast(?*c_void, std.testing.allocator.realloc(slice, new_size) catch unreachable);
}
fn testing_roc_dealloc(c_ptr: *c_void, _: u32) callconv(.C) void {
const ptr = @ptrCast([*]u8, @alignCast(16, c_ptr));
std.testing.allocator.destroy(ptr);
}
pub fn alloc(size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_alloc, .{ size, alignment }));
}
pub fn realloc(c_ptr: [*]u8, new_size: usize, old_size: usize, alignment: u32) [*]u8 {
return @ptrCast([*]u8, @call(.{ .modifier = always_inline }, roc_realloc, .{ c_ptr, new_size, old_size, alignment }));
}
pub fn dealloc(c_ptr: [*]u8, alignment: u32) void {
return @call(.{ .modifier = always_inline }, roc_dealloc, .{ c_ptr, alignment });
}
pub const Inc = fn (?[*]u8) callconv(.C) void;
pub const IncN = fn (?[*]u8, u64) callconv(.C) void;
pub const Dec = fn (?[*]u8) callconv(.C) void;
const REFCOUNT_MAX_ISIZE: isize = 0;
pub const REFCOUNT_ONE_ISIZE: isize = std.math.minInt(isize);
pub const REFCOUNT_ONE: usize = @bitCast(usize, REFCOUNT_ONE_ISIZE); pub const REFCOUNT_ONE: usize = @bitCast(usize, REFCOUNT_ONE_ISIZE);
pub const IntWidth = enum(u8) { pub const IntWidth = enum(u8) {
@ -19,49 +75,10 @@ pub const IntWidth = enum(u8) {
Usize, Usize,
}; };
pub fn intWidth(width: IntWidth) anytype {
switch (width) {
IntWidth.U8 => {
return u8;
},
IntWidth.U16 => {
return u16;
},
IntWidth.U32 => {
return u32;
},
IntWidth.U64 => {
return u64;
},
IntWidth.U128 => {
return u128;
},
IntWidth.I8 => {
return i8;
},
IntWidth.I16 => {
return i16;
},
IntWidth.I32 => {
return i32;
},
IntWidth.I64 => {
return i64;
},
IntWidth.I128 => {
return i128;
},
IntWidth.Usize => {
return usize;
},
}
}
pub fn decref( pub fn decref(
allocator: *Allocator,
alignment: usize,
bytes_or_null: ?[*]u8, bytes_or_null: ?[*]u8,
data_bytes: usize, data_bytes: usize,
alignment: u32,
) void { ) void {
if (data_bytes == 0) { if (data_bytes == 0) {
return; return;
@ -77,7 +94,7 @@ pub fn decref(
switch (alignment) { switch (alignment) {
16 => { 16 => {
if (refcount == REFCOUNT_ONE_ISIZE) { if (refcount == REFCOUNT_ONE_ISIZE) {
allocator.free((bytes - 16)[0 .. 16 + data_bytes]); dealloc(bytes - 16, alignment);
} else if (refcount_isize < 0) { } else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1; (isizes - 1)[0] = refcount - 1;
} }
@ -85,7 +102,7 @@ pub fn decref(
else => { else => {
// NOTE enums can currently have an alignment of < 8 // NOTE enums can currently have an alignment of < 8
if (refcount == REFCOUNT_ONE_ISIZE) { if (refcount == REFCOUNT_ONE_ISIZE) {
allocator.free((bytes - 8)[0 .. 8 + data_bytes]); dealloc(bytes - 8, alignment);
} else if (refcount_isize < 0) { } else if (refcount_isize < 0) {
(isizes - 1)[0] = refcount - 1; (isizes - 1)[0] = refcount - 1;
} }
@ -94,17 +111,16 @@ pub fn decref(
} }
pub fn allocateWithRefcount( pub fn allocateWithRefcount(
allocator: *Allocator,
alignment: usize,
data_bytes: usize, data_bytes: usize,
alignment: u32,
) [*]u8 { ) [*]u8 {
comptime const result_in_place = false; const result_in_place = false;
switch (alignment) { switch (alignment) {
16 => { 16 => {
const length = 2 * @sizeOf(usize) + data_bytes; const length = 2 * @sizeOf(usize) + data_bytes;
var new_bytes: []align(16) u8 = allocator.alignedAlloc(u8, 16, length) catch unreachable; var new_bytes: [*]align(16) u8 = @alignCast(16, alloc(length, alignment));
var as_usize_array = @ptrCast([*]usize, new_bytes); var as_usize_array = @ptrCast([*]usize, new_bytes);
if (result_in_place) { if (result_in_place) {
@ -123,13 +139,13 @@ pub fn allocateWithRefcount(
else => { else => {
const length = @sizeOf(usize) + data_bytes; const length = @sizeOf(usize) + data_bytes;
var new_bytes: []align(8) u8 = allocator.alignedAlloc(u8, 8, length) catch unreachable; var new_bytes: [*]align(8) u8 = @alignCast(8, alloc(length, alignment));
var as_usize_array = @ptrCast([*]isize, new_bytes); var as_isize_array = @ptrCast([*]isize, new_bytes);
if (result_in_place) { if (result_in_place) {
as_usize_array[0] = @intCast(isize, number_of_slots); as_isize_array[0] = @intCast(isize, number_of_slots);
} else { } else {
as_usize_array[0] = REFCOUNT_ONE_ISIZE; as_isize_array[0] = REFCOUNT_ONE_ISIZE;
} }
var as_u8_array = @ptrCast([*]u8, new_bytes); var as_u8_array = @ptrCast([*]u8, new_bytes);
@ -142,8 +158,7 @@ pub fn allocateWithRefcount(
pub fn unsafeReallocate( pub fn unsafeReallocate(
source_ptr: [*]u8, source_ptr: [*]u8,
allocator: *Allocator, alignment: u32,
alignment: usize,
old_length: usize, old_length: usize,
new_length: usize, new_length: usize,
element_width: usize, element_width: usize,
@ -161,8 +176,8 @@ pub fn unsafeReallocate(
// TODO handle out of memory // TODO handle out of memory
// NOTE realloc will dealloc the original allocation // NOTE realloc will dealloc the original allocation
const old_allocation = (source_ptr - align_width)[0..old_width]; const old_allocation = source_ptr - align_width;
const new_allocation = allocator.realloc(old_allocation, new_width) catch unreachable; const new_allocation = realloc(old_allocation, new_width, old_width, alignment);
const new_source = @ptrCast([*]u8, new_allocation) + align_width; const new_source = @ptrCast([*]u8, new_allocation) + align_width;
return new_source; return new_source;

View file

@ -9,6 +9,23 @@ use std::str;
fn main() { fn main() {
let out_dir = env::var_os("OUT_DIR").unwrap(); let out_dir = env::var_os("OUT_DIR").unwrap();
let dest_obj_path = Path::new(&out_dir).join("builtins.o");
let dest_obj = dest_obj_path.to_str().expect("Invalid dest object path");
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rustc-env=BUILTINS_O={}", dest_obj);
// When we build on Netlify, zig is not installed (but also not used,
// since all we're doing is generating docs), so we can skip the steps
// that require having zig installed.
if env::var_os("NO_ZIG_INSTALLED").is_some() {
// We still need to do the other things before this point, because
// setting the env vars is needed for other parts of the build.
return;
}
let big_sur_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/lib";
let use_build_script = Path::new(big_sur_path).exists();
// "." is relative to where "build.rs" is // "." is relative to where "build.rs" is
let build_script_dir_path = fs::canonicalize(Path::new(".")).unwrap(); let build_script_dir_path = fs::canonicalize(Path::new(".")).unwrap();
@ -16,35 +33,31 @@ fn main() {
let src_obj_path = bitcode_path.join("builtins.o"); let src_obj_path = bitcode_path.join("builtins.o");
let src_obj = src_obj_path.to_str().expect("Invalid src object path"); let src_obj = src_obj_path.to_str().expect("Invalid src object path");
println!("Compiling zig object to: {}", src_obj);
run_command(&bitcode_path, "zig", &["build", "object", "-Drelease=true"]); let dest_ir_path = bitcode_path.join("builtins.ll");
let dest_ir = dest_ir_path.to_str().expect("Invalid dest ir path");
if use_build_script {
println!("Compiling zig object & ir to: {} and {}", src_obj, dest_ir);
run_command_with_no_args(&bitcode_path, "./build.sh");
} else {
println!("Compiling zig object to: {}", src_obj);
run_command(&bitcode_path, "zig", &["build", "object", "-Drelease=true"]);
println!("Compiling ir to: {}", dest_ir);
run_command(&bitcode_path, "zig", &["build", "ir", "-Drelease=true"]);
}
let dest_obj_path = Path::new(&out_dir).join("builtins.o");
let dest_obj = dest_obj_path.to_str().expect("Invalid dest object path");
println!("Moving zig object to: {}", dest_obj); println!("Moving zig object to: {}", dest_obj);
run_command(&bitcode_path, "mv", &[src_obj, dest_obj]); run_command(&bitcode_path, "mv", &[src_obj, dest_obj]);
let dest_ir_path = bitcode_path.join("builtins.ll"); let dest_bc_path = bitcode_path.join("builtins.bc");
let dest_ir = dest_ir_path.to_str().expect("Invalid dest ir path");
println!("Compiling ir to: {}", dest_ir);
run_command(&bitcode_path, "zig", &["build", "ir", "-Drelease=true"]);
let dest_bc_path = Path::new(&out_dir).join("builtins.bc");
let dest_bc = dest_bc_path.to_str().expect("Invalid dest bc path"); let dest_bc = dest_bc_path.to_str().expect("Invalid dest bc path");
println!("Compiling bitcode to: {}", dest_bc); println!("Compiling bitcode to: {}", dest_bc);
run_command( run_command(build_script_dir_path, "llvm-as", &[dest_ir, "-o", dest_bc]);
build_script_dir_path,
"llvm-as-10",
&[dest_ir, "-o", dest_bc],
);
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rustc-env=BUILTINS_BC={}", dest_bc);
println!("cargo:rustc-env=BUILTINS_O={}", dest_obj);
get_zig_files(bitcode_path.as_path(), &|path| { get_zig_files(bitcode_path.as_path(), &|path| {
let path: &Path = path; let path: &Path = path;
println!( println!(
@ -79,6 +92,25 @@ where
} }
} }
fn run_command_with_no_args<P: AsRef<Path>>(path: P, command_str: &str) {
let output_result = Command::new(OsStr::new(&command_str))
.current_dir(path)
.output();
match output_result {
Ok(output) => match output.status.success() {
true => (),
false => {
let error_str = match str::from_utf8(&output.stderr) {
Ok(stderr) => stderr.to_string(),
Err(_) => format!("Failed to run \"{}\"", command_str),
};
panic!("{} failed: {}", command_str, error_str);
}
},
Err(reason) => panic!("{} failed: {}", command_str, reason),
}
}
fn get_zig_files(dir: &Path, cb: &dyn Fn(&Path)) -> io::Result<()> { fn get_zig_files(dir: &Path, cb: &dyn Fn(&Path)) -> io::Result<()> {
if dir.is_dir() { if dir.is_dir() {
for entry in fs::read_dir(dir)? { for entry in fs::read_dir(dir)? {

View file

@ -55,36 +55,36 @@ and : Bool, Bool -> Bool
## ##
## In some languages, `&&` and `||` are special-cased in the compiler to skip ## In some languages, `&&` and `||` are special-cased in the compiler to skip
## evaluating the expression after the operator under certain circumstances. ## evaluating the expression after the operator under certain circumstances.
## # In Roc, this is not the case. See the performance notes for #Bool.and for details. ## In Roc, this is not the case. See the performance notes for [Bool.and] for details.
or : Bool, Bool -> Bool or : Bool, Bool -> Bool
## Exclusive or ## Exclusive or
xor : Bool, Bool -> Bool xor : Bool, Bool -> Bool
# TODO: removed `'` from signature because parser does not support it yet
# Original signature: `isEq : 'val, 'val -> Bool`
## Returns `True` if the two values are *structurally equal*, and `False` otherwise. ## Returns `True` if the two values are *structurally equal*, and `False` otherwise.
## ##
## `a == b` is shorthand for `Bool.isEq a b` ## `a == b` is shorthand for `Bool.isEq a b`
## ##
## Structural equality works as follows: ## Structural equality works as follows:
## ##
## 1. #Int and #Float values are equal if their numbers are equal. ## 1. Global tags are equal if they are the same tag, and also their contents (if any) are equal.
## 2. Records are equal if all their fields are equal. ## 2. Private tags are equal if they are the same tag, in the same module, and also their contents (if any) are equal.
## 3. Global tags are equal if they are the same tag, and also their contents (if any) are equal. ## 3. Records are equal if all their fields are equal.
## 4. Private tags are equal if they are the same tag, in the same module, and also their contents (if any) are equal. ## 4. Collections ([Str], [List], [Dict], and [Set]) are equal if they are the same length, and also all their corresponding elements are equal.
## 5. Collections (#String, #List, #Map, #Set, and #Bytes) are equal if they are the same length, and also all their corresponding elements are equal. ## 5. [Num] values are equal if their numbers are equal, with one exception: if both arguments to `isEq` are *NaN*, then `isEq` returns `False`. See `Num.isNaN` for more about *NaN*.
## ##
## Note that `isEq` takes `'val` instead of `val`, which means `isEq` does not ## Note that `isEq` takes `'val` instead of `val`, which means `isEq` does not
## accept arguments whose types contain functions. ## accept arguments whose types contain functions.
# TODO: removed `'` from signature because parser does not support it yet
# Original signature: `isEq : 'val, 'val -> Bool`
isEq : val, val -> Bool isEq : val, val -> Bool
## Calls #eq on the given values, then calls #not on the result. # TODO: removed `'` from signature because parser does not support it yet
# Original signature: `isNotEq : 'val, 'val -> Bool`
## Calls [isEq] on the given values, then calls [not] on the result.
## ##
## `a != b` is shorthand for `Bool.isNotEq a b` ## `a != b` is shorthand for `Bool.isNotEq a b`
## ##
## Note that `isNotEq` takes `'val` instead of `val`, which means `isNotEq` does not ## Note that `isNotEq` takes `'val` instead of `val`, which means `isNotEq` does not
## accept arguments whose types contain functions. ## accept arguments whose types contain functions.
# TODO: removed `'` from signature because parser does not support it yet
# Original signature: `isNotEq : 'val, 'val -> Bool`
isNotEq : val, val -> Bool isNotEq : val, val -> Bool

View file

@ -1,7 +0,0 @@
interface Defaults
exposes []
imports [
Dict.{ Dict },
Set.{ Set },
Num.{ Num, Int, Float }
]

View file

@ -1,5 +1,21 @@
interface Dict interface Dict
exposes [ isEmpty, map ] exposes
[
Dict,
empty,
single,
get,
walk,
insert,
len,
remove,
contains,
keys,
values,
union,
intersection,
difference
]
imports [] imports []
size : Dict * * -> Nat size : Dict * * -> Nat
@ -14,8 +30,20 @@ isEmpty : Dict * * -> Bool
## >>> Dict.map {[ "", "a", "bc" ]} Str.isEmpty ## >>> Dict.map {[ "", "a", "bc" ]} Str.isEmpty
## ##
## `map` functions like this are common in Roc, and they all work similarly. ## `map` functions like this are common in Roc, and they all work similarly.
## See for example #Result.map, #List.map, and #Set.map. ## See for example [List.map], [Result.map], and `Set.map`.
map : map :
Dict beforeKey beforeValue, Dict beforeKey beforeValue,
({ key: beforeKey, value: beforeValue } -> { key: afterKey, value: afterValue }) ({ key: beforeKey, value: beforeValue } -> { key: afterKey, value: afterValue })
-> Dict afterKey afterValue -> Dict afterKey afterValue
# DESIGN NOTES: The reason for panicking when given NaN is that:
# * If we allowed NaN in, Dict.insert would no longer be idempotent.
# * If we allowed NaN but overrode its semantics to make it feel like "NaN == NaN" we'd need isNaN checks in all hashing operations as well as all equality checks (during collision detection), not just insert. This would be much worse for performance than panicking on insert, which only requires one extra conditional on insert.
# * It's obviously invalid; the whole point of NaN is that an error occurred. Giving a runtime error notifies you when this problem happens. Giving it only on insert is the best for performance, because it means you aren't paying for isNaN checks on lookups as well.
# TODO: removed `'` from signature because parser does not support it yet
# Original signature: insert : Dict 'key val, 'key, val -> Dict 'key val
## Make sure never to insert a key of *NaN* into a [Dict]! Because *NaN* is
## defined to be unequal to *NaN*, inserting a *NaN* key results in an entry
## that can never be retrieved or removed from the [Dict].
insert : Dict key val, key, val -> Dict key val

View file

@ -1,79 +1,64 @@
interface List2 interface List
exposes exposes
[ List [
, single List,
, empty isEmpty,
, repeat get,
, range set,
, reverse append,
, sort map,
, map len,
, mapWithIndex walkBackwards,
, mapOrCancel concat,
, mapOks first,
, update single,
, updater repeat,
, allOks reverse,
, append prepend,
, prepend join,
, concat keepIf,
, join contains,
, joinMap sum,
, oks walk,
, zip last,
, zipMap keepOks,
, keepIf keepErrs,
, dropIf mapWithIndex,
, first map2,
, last map3,
, get product,
, max walkUntil,
, min range,
, put sortWith,
, drop drop,
, append swap
, prepend
, dropLast
, dropFirst
, takeFirst
, takeLast
, split
, sublist
, walk
, walkBackwards
, walkUntil
, walkBackwardsUntil
, len
, isEmpty
, contains
, all
, any
] ]
imports [] imports []
## Types ## Types
## A sequential list of values. ## A sequential list of values.
## # >>> [ 1, 2, 3 ] # a list of numbers # # >>> [ "a", "b", "c" ] # a list of strings
## ##
## >>> [ [ 1.1 ], [], [ 2.2, 3.3 ] ] # a list of lists of floats ## >>> [ 1, 2, 3 ] # a list of numbers
## >>> [ "a", "b", "c" ] # a list of strings
## >>> [ [ 1.1 ], [], [ 2.2, 3.3 ] ] # a list of lists of numbers
## ##
## The list [ 1, "a" ] gives an error, because each element in a list must have ## The list `[ 1, "a" ]` gives an error, because each element in a list must have
## the same type. If you want to put a mix of #Int and #Str values into a list, try this: ## the same type. If you want to put a mix of [I64] and [Str] values into a list, try this:
## ##
## ``` ## ```
## mixedList : List [ IntElem Int, StrElem Str ]* ## mixedList : List [ IntElem I64, StrElem Str ]*
## mixedList = [ IntElem 1, IntElem 2, StrElem "a", StrElem "b" ] ## mixedList = [ IntElem 1, IntElem 2, StrElem "a", StrElem "b" ]
## ``` ## ```
## ##
## The maximum size of a #List is limited by the amount of heap memory available ## The maximum size of a [List] is limited by the amount of heap memory available
## to the current process. If there is not enough memory available, attempting to ## to the current process. If there is not enough memory available, attempting to
## create the list could crash. (On Linux, where [overcommit](https://www.etalabs.net/overcommit.html) ## create the list could crash. (On Linux, where [overcommit](https://www.etalabs.net/overcommit.html)
## is normally enabled, not having enough memory could result in the list appearing ## is normally enabled, not having enough memory could result in the list appearing
## to be created just fine, but then crashing later.) ## to be created just fine, but then crashing later.)
## ##
## > The theoretical maximum length for a list created in Roc is ## > The theoretical maximum length for a list created in Roc is half of
## > #Int.maxNat divided by 2. Attempting to create a list bigger than that ## > `Num.maxNat`. Attempting to create a list bigger than that
## > in Roc code will always fail, although in practice it is likely to fail ## > in Roc code will always fail, although in practice it is likely to fail
## > at much smaller lengths due to insufficient memory being available. ## > at much smaller lengths due to insufficient memory being available.
## ##
@ -147,13 +132,13 @@ interface List2
## ##
## List.first (getRatings 5).bar ## List.first (getRatings 5).bar
## ##
## This call to #List.first means that even the list in the `bar` field has become ## This call to [List.first] means that even the list in the `bar` field has become
## inaccessible. As such, this line will cause the list's refcount to get ## inaccessible. As such, this line will cause the list's refcount to get
## decremented all the way to 0. At that point, nothing is referencing the list ## decremented all the way to 0. At that point, nothing is referencing the list
## anymore, and its memory will get freed. ## anymore, and its memory will get freed.
## ##
## Things are different if this is a list of lists instead of a list of numbers. ## Things are different if this is a list of lists instead of a list of numbers.
## Let's look at a simpler example using #List.first - first with a list of numbers, ## Let's look at a simpler example using [List.first] - first with a list of numbers,
## and then with a list of lists, to see how they differ. ## and then with a list of lists, to see how they differ.
## ##
## Here's the example using a list of numbers. ## Here's the example using a list of numbers.
@ -165,7 +150,7 @@ interface List2
## ##
## first ## first
## ##
## It makes a list, calls #List.first and #List.last on it, and then returns `first`. ## It makes a list, calls [List.first] and [List.last] on it, and then returns `first`.
## ##
## Here's the equivalent code with a list of lists: ## Here's the equivalent code with a list of lists:
## ##
@ -180,7 +165,7 @@ interface List2
## we can free it immediately because there are no other refcounts. However, ## we can free it immediately because there are no other refcounts. However,
## in the case of `lists`, we have to iterate through the list and decrement ## in the case of `lists`, we have to iterate through the list and decrement
## the refcounts of each of its contained lists - because they, too, have ## the refcounts of each of its contained lists - because they, too, have
## refcounts! Importantly, beacuse the first element had its refcount incremented ## refcounts! Importantly, because the first element had its refcount incremented
## because the function returned `first`, that element will actually end up ## because the function returned `first`, that element will actually end up
## *not* getting freed at the end - but all the others will be. ## *not* getting freed at the end - but all the others will be.
## ##
@ -189,12 +174,12 @@ interface List2
## their own refcounts - to go inside that list. (The empty list at the end ## their own refcounts - to go inside that list. (The empty list at the end
## does not use heap memory, and thus has no refcount.) ## does not use heap memory, and thus has no refcount.)
## ##
## At the end, we once again call #List.first on the list, but this time ## At the end, we once again call [List.first] on the list, but this time
## ##
## * Copying small lists (64 elements or fewer) is typically slightly faster than copying small persistent data structures. This is because, at small sizes, persistent data structures tend to be thin wrappers around flat arrays anyway. They don't have any copying advantage until crossing a certain minimum size threshold. ## * Copying small lists (64 elements or fewer) is typically slightly faster than copying small persistent data structures. This is because, at small sizes, persistent data structures tend to be thin wrappers around flat arrays anyway. They don't have any copying advantage until crossing a certain minimum size threshold.
## * Even when copying is faster, other list operations may still be slightly slower with persistent data structures. For example, even if it were a persistent data structure, #List.map, #List.fold, and #List.keepIf would all need to traverse every element in the list and build up the result from scratch. These operations are all ## * Even when copying is faster, other list operations may still be slightly slower with persistent data structures. For example, even if it were a persistent data structure, [List.map], [List.walk], and [List.keepIf] would all need to traverse every element in the list and build up the result from scratch. These operations are all
## * Roc's compiler optimizes many list operations into in-place mutations behind the scenes, depending on how the list is being used. For example, #List.map, #List.keepIf, and #List.set can all be optimized to perform in-place mutations. ## * Roc's compiler optimizes many list operations into in-place mutations behind the scenes, depending on how the list is being used. For example, [List.map], [List.keepIf], and [List.set] can all be optimized to perform in-place mutations.
## * If possible, it is usually best for performance to use large lists in a way where the optimizer can turn them into in-place mutations. If this is not possible, a persistent data structure might be faster - but this is a rare enough scenario that it would not be good for the average Roc program's performance if this were the way #List worked by default. Instead, you can look outside Roc's standard modules for an implementation of a persistent data structure - likely built using #List under the hood! ## * If possible, it is usually best for performance to use large lists in a way where the optimizer can turn them into in-place mutations. If this is not possible, a persistent data structure might be faster - but this is a rare enough scenario that it would not be good for the average Roc program's performance if this were the way [List] worked by default. Instead, you can look outside Roc's standard modules for an implementation of a persistent data structure - likely built using [List] under the hood!
List elem : [ @List elem ] List elem : [ @List elem ]
## Initialize ## Initialize
@ -232,9 +217,28 @@ reverse : List elem -> List elem
## Sorts a list using a function which specifies how two elements are ordered. ## Sorts a list using a function which specifies how two elements are ordered.
## ##
## ## When sorting by numeric values, it's more efficient to use [sortAsc] or
## [sortDesc] instead.
sort : List elem, (elem, elem -> [ Lt, Eq, Gt ]) -> List elem sort : List elem, (elem, elem -> [ Lt, Eq, Gt ]) -> List elem
## Sorts a list in ascending order (lowest to highest), using a function which
## specifies a way to represent each element as a number.
##
## This is more efficient than [sort] because it skips
## calculating the `[ Lt, Eq, Gt ]` value and uses the number directly instead.
##
## To sort in descending order (highest to lowest), use [List.sortDesc] instead.
sortAsc : List elem, (elem -> Num *) -> List elem
## Sorts a list in descending order (highest to lowest), using a function which
## specifies a way to represent each element as a number.
##
## This is more efficient than [sort] because it skips
## calculating the `[ Lt, Eq, Gt ]` value and uses the number directly instead.
##
## To sort in ascending order (lowest to highest), use [List.sortAsc] instead.
sortDesc : List elem, (elem -> Num *) -> List elem
## Convert each element in the list to something new, by calling a conversion ## Convert each element in the list to something new, by calling a conversion
## function on each of them. Then return a new list of the converted values. ## function on each of them. Then return a new list of the converted values.
## ##
@ -243,18 +247,18 @@ sort : List elem, (elem, elem -> [ Lt, Eq, Gt ]) -> List elem
## > List.map [ "", "a", "bc" ] Str.isEmpty ## > List.map [ "", "a", "bc" ] Str.isEmpty
## ##
## `map` functions like this are common in Roc, and they all work similarly. ## `map` functions like this are common in Roc, and they all work similarly.
## See for example #Result.map, #Set.map, and #Map.map. ## See for example `Set.map`, `Dict.map`, and [Result.map].
map : List before, (before -> after) -> List after map : List before, (before -> after) -> List after
## This works like #List.map, except it also passes the index ## This works like [List.map], except it also passes the index
## of the element to the conversion function. ## of the element to the conversion function.
mapWithIndex : List before, (before, Int -> after) -> List after mapWithIndex : List before, (before, Nat -> after) -> List after
## This works like #List.map, except at any time you can return `Err` to ## This works like [List.map], except at any time you can return `Err` to
## cancel the entire operation immediately, and return that #Err. ## cancel the entire operation immediately, and return that #Err.
mapOrCancel : List before, (before -> Result after err) -> Result (List after) err mapOrCancel : List before, (before -> Result after err) -> Result (List after) err
## This works like #List.map, except only the transformed values that are ## This works like [List.map], except only the transformed values that are
## wrapped in `Ok` are kept. Any that are wrapped in `Err` are dropped. ## wrapped in `Ok` are kept. Any that are wrapped in `Err` are dropped.
## ##
## >>> List.mapOks [ [ "a", "b" ], [], [], [ "c", "d", "e" ] ] List.last ## >>> List.mapOks [ [ "a", "b" ], [], [], [ "c", "d", "e" ] ] List.last
@ -268,18 +272,18 @@ mapOks : List before, (before -> Result after *) -> List after
## the given function. ## the given function.
## ##
## For a version of this which gives you more control over when to perform ## For a version of this which gives you more control over when to perform
## the transformation, see #List.updater ## the transformation, see `List.updater`
## ##
## ## Performance notes ## ## Performance notes
## ##
## In particular when updating nested collections, this is potentially much more ## In particular when updating nested collections, this is potentially much more
## efficient than using #List.get to obtain the element, transforming it, ## efficient than using [List.get] to obtain the element, transforming it,
## and then putting it back in the same place. ## and then putting it back in the same place.
update : List elem, Nat, (elem -> elem) -> List elem update : List elem, Nat, (elem -> elem) -> List elem
## A more flexible version of #List.update, which returns an "updater" function ## A more flexible version of `List.update`, which returns an "updater" function
## that lets you delay performing the update until later. ## that lets you delay performing the update until later.
updater : List elem, Nat -> { elem, new : elem -> List elem } updater : List elem, Nat -> { elem, new : (elem -> List elem) }
## If all the elements in the list are #Ok, return a new list containing the ## If all the elements in the list are #Ok, return a new list containing the
## contents of those #Ok tags. If any elements are #Err, return #Err. ## contents of those #Ok tags. If any elements are #Err, return #Err.
@ -318,15 +322,15 @@ concat : List elem, List elem -> List elem
## >>> List.join [] ## >>> List.join []
join : List (List elem) -> List elem join : List (List elem) -> List elem
## Like #List.map, except the transformation function wraps the return value ## Like [List.map], except the transformation function wraps the return value
## in a list. At the end, all the lists get joined together into one list. ## in a list. At the end, all the lists get joined together into one list.
joinMap : List before, (before -> List after) -> List after joinMap : List before, (before -> List after) -> List after
## Like #List.join, but only keeps elements tagged with `Ok`. Elements ## Like [List.join], but only keeps elements tagged with `Ok`. Elements
## tagged with `Err` are dropped. ## tagged with `Err` are dropped.
## ##
## This can be useful after using an operation that returns a #Result ## This can be useful after using an operation that returns a #Result
## on each element of a list, for example #List.first: ## on each element of a list, for example [List.first]:
## ##
## >>> [ [ 1, 2, 3 ], [], [], [ 4, 5 ] ] ## >>> [ [ 1, 2, 3 ], [], [], [ 4, 5 ] ]
## >>> |> List.map List.first ## >>> |> List.map List.first
@ -368,16 +372,16 @@ zipMap : List a, List b, (a, b -> c) -> List c
## ##
## ## Performance Details ## ## Performance Details
## ##
## #List.keepIf always returns a list that takes up exactly the same amount ## [List.keepIf] always returns a list that takes up exactly the same amount
## of memory as the original, even if its length decreases. This is becase it ## of memory as the original, even if its length decreases. This is becase it
## can't know in advance exactly how much space it will need, and if it guesses a ## can't know in advance exactly how much space it will need, and if it guesses a
## length that's too low, it would have to re-allocate. ## length that's too low, it would have to re-allocate.
## ##
## (If you want to do an operation like this which reduces the memory footprint ## (If you want to do an operation like this which reduces the memory footprint
## of the resulting list, you can do two passes over the lis with #List.fold - one ## of the resulting list, you can do two passes over the lis with [List.walk] - one
## to calculate the precise new size, and another to populate the new list.) ## to calculate the precise new size, and another to populate the new list.)
## ##
## If given a unique list, #List.keepIf will mutate it in place to assemble the appropriate list. ## If given a unique list, [List.keepIf] will mutate it in place to assemble the appropriate list.
## If that happens, this function will not allocate any new memory on the heap. ## If that happens, this function will not allocate any new memory on the heap.
## If all elements in the list end up being kept, Roc will return the original ## If all elements in the list end up being kept, Roc will return the original
## list unaltered. ## list unaltered.
@ -391,7 +395,7 @@ keepIf : List elem, (elem -> Bool) -> List elem
## ##
## ## Performance Details ## ## Performance Details
## ##
## #List.dropIf has the same performance characteristics as #List.keepIf. ## `List.dropIf` has the same performance characteristics as [List.keepIf].
## See its documentation for details on those characteristics! ## See its documentation for details on those characteristics!
dropIf : List elem, (elem -> Bool) -> List elem dropIf : List elem, (elem -> Bool) -> List elem
@ -418,14 +422,14 @@ min : List (Num a) -> Result (Num a) [ ListWasEmpty ]*
## If the given index is outside the bounds of the list, returns the original ## If the given index is outside the bounds of the list, returns the original
## list unmodified. ## list unmodified.
## ##
## To drop the element at a given index, instead of replacing it, see #List.drop. ## To drop the element at a given index, instead of replacing it, see [List.drop].
set : List elem, Nat, elem -> List elem set : List elem, Nat, elem -> List elem
## Drops the element at the given index from the list. ## Drops the element at the given index from the list.
## ##
## This has no effect if the given index is outside the bounds of the list. ## This has no effect if the given index is outside the bounds of the list.
## ##
## To replace the element at a given index, instead of dropping it, see #List.set. ## To replace the element at a given index, instead of dropping it, see [List.set].
drop : List elem, Nat -> List elem drop : List elem, Nat -> List elem
## Adds a new element to the end of the list. ## Adds a new element to the end of the list.
@ -447,12 +451,12 @@ append : List elem, elem -> List elem
## ## Performance Details ## ## Performance Details
## ##
## This always clones the entire list, even when given a Unique list. That means ## This always clones the entire list, even when given a Unique list. That means
## it runs about as fast as #List.addLast when both are given a Shared list. ## it runs about as fast as `List.addLast` when both are given a Shared list.
## ##
## If you have a Unique list instead, #List.append will run much faster than ## If you have a Unique list instead, [List.append] will run much faster than
## #List.prepend except in the specific case where the list has no excess capacity, ## [List.append] except in the specific case where the list has no excess capacity,
## and needs to *clone and grow*. In that uncommon case, both #List.append and ## and needs to *clone and grow*. In that uncommon case, both [List.append] and
## #List.prepend will run at about the same speed—since #List.prepend always ## [List.append] will run at about the same speed—since [List.append] always
## has to clone and grow. ## has to clone and grow.
## ##
## | Unique list | Shared list | ## | Unique list | Shared list |
@ -474,11 +478,11 @@ prepend : List elem, elem -> List elem
## ##
## ## Performance Details ## ## Performance Details
## ##
## Calling #List.pop on a Unique list runs extremely fast. It's essentially ## Calling `List.pop` on a Unique list runs extremely fast. It's essentially
## the same as a #List.last except it also returns the #List it was given, ## the same as a [List.last] except it also returns the [List] it was given,
## with its length decreased by 1. ## with its length decreased by 1.
## ##
## In contrast, calling #List.pop on a Shared list creates a new list, then ## In contrast, calling `List.pop` on a Shared list creates a new list, then
## copies over every element in the original list except the last one. This ## copies over every element in the original list except the last one. This
## takes much longer. ## takes much longer.
dropLast : List elem -> Result { others : List elem, last : elem } [ ListWasEmpty ]* dropLast : List elem -> Result { others : List elem, last : elem } [ ListWasEmpty ]*
@ -492,8 +496,8 @@ dropLast : List elem -> Result { others : List elem, last : elem } [ ListWasEmpt
## ##
## ## Performance Details ## ## Performance Details
## ##
## When calling either #List.dropFirst or #List.dropLast on a Unique list, #List.dropLast ## When calling either `List.dropFirst` or `List.dropLast` on a Unique list, `List.dropLast`
## runs *much* faster. This is because for #List.dropLast, removing the last element ## runs *much* faster. This is because for `List.dropLast`, removing the last element
## in-place is as easy as reducing the length of the list by 1. In contrast, ## in-place is as easy as reducing the length of the list by 1. In contrast,
## removing the first element from the list involves copying every other element ## removing the first element from the list involves copying every other element
## in the list into the index before it - which is massively more costly. ## in the list into the index before it - which is massively more costly.
@ -502,8 +506,8 @@ dropLast : List elem -> Result { others : List elem, last : elem } [ ListWasEmpt
## ##
## | Unique list | Shared list | ## | Unique list | Shared list |
##-----------+----------------------------------+---------------------------------+ ##-----------+----------------------------------+---------------------------------+
## dropFirst | #List.last + length change | #List.last + clone rest of list | ## dropFirst | [List.last] + length change | [List.last] + clone rest of list |
## dropLast | #List.last + clone rest of list | #List.last + clone rest of list | ## dropLast | [List.last] + clone rest of list | [List.last] + clone rest of list |
dropFirst : List elem -> Result { first: elem, others : List elem } [ ListWasEmpty ]* dropFirst : List elem -> Result { first: elem, others : List elem } [ ListWasEmpty ]*
## Returns the given number of elements from the beginning of the list. ## Returns the given number of elements from the beginning of the list.
@ -515,21 +519,21 @@ dropFirst : List elem -> Result { first: elem, others : List elem } [ ListWasEmp
## ##
## >>> List.takeFirst 5 [ 1, 2 ] ## >>> List.takeFirst 5 [ 1, 2 ]
## ##
## To *remove* elements from the beginning of the list, use #List.takeLast. ## To *remove* elements from the beginning of the list, use `List.takeLast`.
## ##
## To remove elements from both the beginning and end of the list, ## To remove elements from both the beginning and end of the list,
## use #List.sublist. ## use `List.sublist`.
## ##
## To split the list into two lists, use #List.split. ## To split the list into two lists, use `List.split`.
## ##
## ## Performance Details ## ## Performance Details
## ##
## When given a Unique list, this runs extremely fast. It sets the list's length ## When given a Unique list, this runs extremely fast. It sets the list's length
## to the given length value, and frees the leftover elements. This runs very ## to the given length value, and frees the leftover elements. This runs very
## slightly faster than #List.takeLast. ## slightly faster than `List.takeLast`.
## ##
## In fact, `List.takeFirst 1 list` runs faster than `List.first list` when given ## In fact, `List.takeFirst 1 list` runs faster than `List.first list` when given
## a Unique list, because #List.first returns the first element as well - ## a Unique list, because [List.first] returns the first element as well -
## which introduces a conditional bounds check as well as a memory load. ## which introduces a conditional bounds check as well as a memory load.
takeFirst : List elem, Nat -> List elem takeFirst : List elem, Nat -> List elem
@ -542,22 +546,22 @@ takeFirst : List elem, Nat -> List elem
## ##
## >>> List.takeLast 5 [ 1, 2 ] ## >>> List.takeLast 5 [ 1, 2 ]
## ##
## To *remove* elements from the end of the list, use #List.takeFirst. ## To *remove* elements from the end of the list, use `List.takeFirst`.
## ##
## To remove elements from both the beginning and end of the list, ## To remove elements from both the beginning and end of the list,
## use #List.sublist. ## use `List.sublist`.
## ##
## To split the list into two lists, use #List.split. ## To split the list into two lists, use `List.split`.
## ##
## ## Performance Details ## ## Performance Details
## ##
## When given a Unique list, this runs extremely fast. It moves the list's ## When given a Unique list, this runs extremely fast. It moves the list's
## pointer to the index at the given length value, updates its length, ## pointer to the index at the given length value, updates its length,
## and frees the leftover elements. This runs very nearly as fast as ## and frees the leftover elements. This runs very nearly as fast as
## #List.takeFirst on a Unique list. ## `List.takeFirst` on a Unique list.
## ##
## In fact, `List.takeLast 1 list` runs faster than `List.first list` when given ## In fact, `List.takeLast 1 list` runs faster than `List.first list` when given
## a Unique list, because #List.first returns the first element as well - ## a Unique list, because [List.first] returns the first element as well -
## which introduces a conditional bounds check as well as a memory load. ## which introduces a conditional bounds check as well as a memory load.
takeLast : List elem, Nat -> List elem takeLast : List elem, Nat -> List elem
@ -584,7 +588,7 @@ split : List elem, Nat -> { before: List elem, others: List elem }
## >>> List.sublist { start: 2, len: 10 } [ 1, 2, 3, 4, 5 ] ## >>> List.sublist { start: 2, len: 10 } [ 1, 2, 3, 4, 5 ]
## ##
## > If you want a sublist which goes all the way to the end of the list, no ## > If you want a sublist which goes all the way to the end of the list, no
## > matter how long the list is, #List.takeLast can do that more efficiently. ## > matter how long the list is, `List.takeLast` can do that more efficiently.
## ##
## Some languages have a function called **`slice`** which works similarly to this. ## Some languages have a function called **`slice`** which works similarly to this.
sublist : List elem, { start : Nat, len : Nat } -> List elem sublist : List elem, { start : Nat, len : Nat } -> List elem
@ -604,7 +608,7 @@ sublist : List elem, { start : Nat, len : Nat } -> List elem
## * `state` starts at 0 (because of `start: 0`) ## * `state` starts at 0 (because of `start: 0`)
## * Each `step` runs `Num.add state elem`, and the return value becomes the new `state`. ## * Each `step` runs `Num.add state elem`, and the return value becomes the new `state`.
## ##
## Here is a table of how `state` changes as #List.walk walks over the elements ## Here is a table of how `state` changes as [List.walk] walks over the elements
## `[ 2, 4, 8 ]` using #Num.add as its `step` function to determine the next `state`. ## `[ 2, 4, 8 ]` using #Num.add as its `step` function to determine the next `state`.
## ##
## `state` | `elem` | `step state elem` (`Num.add state elem`) ## `state` | `elem` | `step state elem` (`Num.add state elem`)
@ -629,29 +633,29 @@ walk : List elem, { start : state, step : (state, elem -> state) } -> state
## Note that in other languages, `walkBackwards` is sometimes called `reduceRight`, ## Note that in other languages, `walkBackwards` is sometimes called `reduceRight`,
## `fold`, `foldRight`, or `foldr`. ## `fold`, `foldRight`, or `foldr`.
walkBackwards : List elem, { start : state, step : (state, elem -> state ]) } -> state walkBackwards : List elem, { start : state, step : (state, elem -> state) } -> state
## Same as #List.walk, except you can stop walking early. ## Same as [List.walk], except you can stop walking early.
## ##
## ## Performance Details ## ## Performance Details
## ##
## Compared to #List.walk, this can potentially visit fewer elements (which can ## Compared to [List.walk], this can potentially visit fewer elements (which can
## improve performance) at the cost of making each step take longer. ## improve performance) at the cost of making each step take longer.
## However, the added cost to each step is extremely small, and can easily ## However, the added cost to each step is extremely small, and can easily
## be outweighed if it results in skipping even a small number of elements. ## be outweighed if it results in skipping even a small number of elements.
## ##
## As such, it is typically better for performance to use this over #List.walk ## As such, it is typically better for performance to use this over [List.walk]
## if returning `Done` earlier than the last element is expected to be common. ## if returning `Done` earlier than the last element is expected to be common.
walkUntil : List elem, { start : state, step : (state, elem -> [ Continue state, Done state ]) } -> state walkUntil : List elem, { start : state, step : (state, elem -> [ Continue state, Done state ]) } -> state
# Same as #List.walkBackwards, except you can stop walking early. # Same as [List.walk]Backwards, except you can stop walking early.
walkBackwardsUntil : List elem, { start : state, step : (state, elem -> [ Continue state, Done state ]) } -> state walkBackwardsUntil : List elem, { start : state, step : (state, elem -> [ Continue state, Done state ]) } -> state
## Check ## Check
## Returns the length of the list - the number of elements it contains. ## Returns the length of the list - the number of elements it contains.
## ##
## One #List can store up to 2,147,483,648 elements (just over 2 billion), which ## One [List] can store up to 2,147,483,648 elements (just over 2 billion), which
## is exactly equal to the highest valid #I32 value. This means the #U32 this function ## is exactly equal to the highest valid #I32 value. This means the #U32 this function
## returns can always be safely converted to an #I32 without losing any data. ## returns can always be safely converted to an #I32 without losing any data.
len : List * -> Nat len : List * -> Nat

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,55 @@
interface Result
exposes
[
Result,
map,
mapErr,
withDefault,
after
]
imports []
## The result of an operation that could fail: either the operation went
## okay, or else there was an error of some sort.
Result ok err : [ @Result ok err ]
## If the result is `Ok`, return the value it holds. Otherwise, return
## the given default value.
##
## >>> Result.withDefault (Ok 7) 42
##
## >>> Result.withDefault (Err "uh oh") 42
withDefault : Result ok err, ok -> ok
## If the result is `Ok`, transform the entire result by running a conversion
## function on the value the `Ok` holds. Then return that new result.
##
## (If the result is `Err`, this has no effect. Use `afterErr` to transform an `Err`.)
##
## >>> Result.after (Ok -1) \num -> if num < 0 then Err "negative!" else Ok -num
##
## >>> Result.after (Err "yipes!") \num -> if num < 0 then Err "negative!" else Ok -num
after : Result before err, (before -> Result after err) -> Result after err
## If the result is `Ok`, transform the value it holds by running a conversion
## function on it. Then return a new `Ok` holding the transformed value.
##
## (If the result is `Err`, this has no effect. Use [mapErr] to transform an `Err`.)
##
## >>> Result.map (Ok 12) Num.negate
##
## >>> Result.map (Err "yipes!") Num.negate
##
## `map` functions like this are common in Roc, and they all work similarly.
## See for example [List.map], `Set.map`, and `Dict.map`.
map : Result before err, (before -> after) -> Result after err
## If the result is `Err`, transform the value it holds by running a conversion
## function on it. Then return a new `Err` holding the transformed value.
##
## (If the result is `Ok`, this has no effect. Use [map] to transform an `Ok`.)
##
## >>> Result.mapErr (Err "yipes!") Str.isEmpty
##
## >>> Result.mapErr (Ok 12) Str.isEmpty
mapErr : Result ok before, (before -> after) -> Result ok after

View file

@ -1,9 +1,22 @@
interface Set interface Set
exposes [ Set, empty, isEmpty, len, add, drop, map ] exposes
[
Set,
empty,
single,
len,
insert,
remove,
union,
difference,
intersection,
toList,
fromList,
walk,
contains
]
imports [] imports []
## Set
## A Set is an unordered collection of unique elements. ## A Set is an unordered collection of unique elements.
Set elem : [ @Set elem ] Set elem : [ @Set elem ]
@ -18,6 +31,9 @@ len : Set * -> Nat
# TODO: removed `'` from signature because parser does not support it yet # TODO: removed `'` from signature because parser does not support it yet
# Original signature: `add : Set 'elem, 'elem -> Set 'elem` # Original signature: `add : Set 'elem, 'elem -> Set 'elem`
## Make sure never to add a *NaN* to a [Set]! Because *NaN* is defined to be
## unequal to *NaN*, adding a *NaN* results in an entry that can never be
## retrieved or removed from the [Set].
add : Set elem, elem -> Set elem add : Set elem, elem -> Set elem
## Drops the given element from the set. ## Drops the given element from the set.
@ -33,7 +49,7 @@ drop : Set elem, elem -> Set elem
## >>> Set.map {: "", "a", "bc" :} Str.isEmpty ## >>> Set.map {: "", "a", "bc" :} Str.isEmpty
## ##
## `map` functions like this are common in Roc, and they all work similarly. ## `map` functions like this are common in Roc, and they all work similarly.
## See for example #Result.map, #List.map, and #Map.map. ## See for example [List.map], `Dict.map`, and [Result.map].
# TODO: removed `'` from signature because parser does not support it yet # TODO: removed `'` from signature because parser does not support it yet
# Original signature: `map : Set 'elem, ('before -> 'after) -> Set 'after` # Original signature: `map : Set 'elem, ('before -> 'after) -> Set 'after`
map : Set elem, (before -> after) -> Set after map : Set elem, (before -> after) -> Set after

View file

@ -2,36 +2,21 @@ interface Str
exposes exposes
[ [
Str, Str,
decimal,
split,
isEmpty, isEmpty,
append,
concat,
joinWith,
split,
countGraphemes,
startsWith, startsWith,
endsWith, endsWith,
contains, fromInt,
anyGraphemes, fromFloat,
allGraphemes, fromUtf8,
join, Utf8Problem,
joinWith, Utf8ByteProblem,
padGraphemesStart,
padGraphemesEnd,
graphemes,
reverseGraphemes,
isCaseInsensitiveEq,
isCaseInsensitiveNeq,
walkGraphemes,
isCapitalized,
isAllUppercase,
isAllLowercase,
toUtf8, toUtf8,
toUtf16, startsWithCodePt
toUtf32,
trim,
walkUtf8,
walkUtf16,
walkUtf32,
walkRevUtf8,
walkRevUtf16,
walkRevUtf32
] ]
imports [] imports []
@ -63,7 +48,7 @@ interface Str
## programming, and "extended grapheme cluster" is a mouthful, in Roc we use the ## programming, and "extended grapheme cluster" is a mouthful, in Roc we use the
## term "grapheme" as a shorthand for the more precise "extended grapheme cluster." ## term "grapheme" as a shorthand for the more precise "extended grapheme cluster."
## ##
## You can get the number of graphemes in a string by calling #Str.countGraphemes on it: ## You can get the number of graphemes in a string by calling [Str.countGraphemes] on it:
## ##
## Str.countGraphemes "Roc!" ## Str.countGraphemes "Roc!"
## Str.countGraphemes "折り紙" ## Str.countGraphemes "折り紙"
@ -126,7 +111,7 @@ interface Str
## potentially change it without breaking existing Roc applications. (UTF-8 ## potentially change it without breaking existing Roc applications. (UTF-8
## seems pretty great today, but so did UTF-16 at an earlier point in history.) ## seems pretty great today, but so did UTF-16 at an earlier point in history.)
## ##
## This module has functions to can convert a #Str to a #List of raw [code unit](https://unicode.org/glossary/#code_unit) ## This module has functions to can convert a [Str] to a [List] of raw [code unit](https://unicode.org/glossary/#code_unit)
## integers (not to be confused with the [code points](https://unicode.org/glossary/#code_point) ## integers (not to be confused with the [code points](https://unicode.org/glossary/#code_point)
## mentioned earlier) in a particular encoding. If you need encoding-specific functions, ## mentioned earlier) in a particular encoding. If you need encoding-specific functions,
## you should take a look at the [roc/unicode](roc/unicode) package. ## you should take a look at the [roc/unicode](roc/unicode) package.
@ -137,15 +122,15 @@ Str : [ @Str ]
## Convert ## Convert
## Convert a #Float to a decimal string, rounding off to the given number of decimal places. ## Convert a [Float] to a decimal string, rounding off to the given number of decimal places.
## ##
## Since #Float values are imprecise, it's usually best to limit this to the lowest ## If you want to keep all the digits, use [Str.num] instead.
## number you can choose that will make sense for what you want to display.
##
## If you want to keep all the digits, passing the same float to #Str.num
## will do that.
decimal : Float *, Nat -> Str decimal : Float *, Nat -> Str
## Convert a [Num] to a string.
num : Float *, Nat -> Str
## Split a string around a separator. ## Split a string around a separator.
## ##
## >>> Str.split "1,2,3" "," ## >>> Str.split "1,2,3" ","
@ -155,13 +140,13 @@ decimal : Float *, Nat -> Str
## ##
## >>> Str.split "1,2,3" "" ## >>> Str.split "1,2,3" ""
## ##
## To split a string into its individual graphemes, use #Str.graphemes ## To split a string into its individual graphemes, use `Str.graphemes`
split : Str, Str -> List Str split : Str, Str -> List Str
## Split a string around newlines. ## Split a string around newlines.
## ##
## On strings that use `"\n"` for their line endings, this gives the same answer ## On strings that use `"\n"` for their line endings, this gives the same answer
## as passing `"\n"` to #Str.split. However, on strings that use `"\n\r"` (such ## as passing `"\n"` to [Str.split]. However, on strings that use `"\n\r"` (such
## as [in Windows files](https://en.wikipedia.org/wiki/Newline#History)), this ## as [in Windows files](https://en.wikipedia.org/wiki/Newline#History)), this
## will consume the entire `"\n\r"` instead of just the `"\n"`. ## will consume the entire `"\n\r"` instead of just the `"\n"`.
## ##
@ -169,13 +154,13 @@ split : Str, Str -> List Str
## ##
## >>> Str.lines "Hello, World!\n\rNice to meet you!" ## >>> Str.lines "Hello, World!\n\rNice to meet you!"
## ##
## To split a string using a custom separator, use #Str.split. For more advanced ## To split a string using a custom separator, use [Str.split]. For more advanced
## string splitting, use a #Parser. ## string splitting, use a #Parser.
lines : Str, Str -> List Str lines : Str, Str -> List Str
## Check ## Check
## Returns #True if the string is empty, and #False otherwise. ## Returns `True` if the string is empty, and `False` otherwise.
## ##
## >>> Str.isEmpty "hi!" ## >>> Str.isEmpty "hi!"
## ##
@ -192,13 +177,13 @@ startsWith : Str, Str -> Bool
## ##
## **Performance Note:** This runs slightly faster than [Str.startsWith], so ## **Performance Note:** This runs slightly faster than [Str.startsWith], so
## if you want to check whether a string begins with something that's representable ## if you want to check whether a string begins with something that's representable
## in a single code point, you can use (for example) `Str.startsWithCodePoint '鹏'` ## in a single code point, you can use (for example) `Str.startsWithCodePt '鹏'`
## instead of `Str.startsWithCodePoint "鹏"`. ('鹏' evaluates to the [U32] ## instead of `Str.startsWithCodePt "鹏"`. ('鹏' evaluates to the [U32]
## value `40527`.) This will not work for graphemes which take up mulitple code ## value `40527`.) This will not work for graphemes which take up multiple code
## points, however; `Str.startsWithCodePoint '👩‍👩‍👦‍👦'` would be a compiler error ## points, however; `Str.startsWithCodePt '👩‍👩‍👦‍👦'` would be a compiler error
## because 👩‍👩‍👦‍👦 takes up multiple code points and cannot be represented as a ## because 👩‍👩‍👦‍👦 takes up multiple code points and cannot be represented as a
## single [U32]. You'd need to use `Str.startsWithCodePoint "🕊"` instead. ## single [U32]. You'd need to use `Str.startsWithCodePt "🕊"` instead.
startsWithCodePoint : Str, U32 -> Bool startsWithCodePt : Str, U32 -> Bool
endsWith : Str, Str -> Bool endsWith : Str, Str -> Bool
@ -255,9 +240,13 @@ padGraphemesEnd : Str, Nat, Str -> Str
## ##
graphemes : Str -> List Str graphemes : Str -> List Str
## Count the number of [extended grapheme clusters](http://www.unicode.org/glossary/#extended_grapheme_cluster)
## in the string.
##
## Str.countGraphemes "Roc!" # 4 ## Str.countGraphemes "Roc!" # 4
## Str.countGraphemes "七巧板" # 3 ## Str.countGraphemes "七巧板" # 3
## Str.countGraphemes "🕊" # 1 ## Str.countGraphemes "🕊" # 1
countGraphemes : Str -> Nat
## Reverse the order of the string's individual graphemes. ## Reverse the order of the string's individual graphemes.
## ##
@ -268,7 +257,7 @@ graphemes : Str -> List Str
## >>> Str.reversegraphemes "Crème Brûlée" ## >>> Str.reversegraphemes "Crème Brûlée"
reverseGraphemes : Str -> Str reverseGraphemes : Str -> Str
## Returns #True if the two strings are equal when ignoring case. ## Returns `True` if the two strings are equal when ignoring case.
## ##
## >>> Str.caseInsensitiveEq "hi" "Hi" ## >>> Str.caseInsensitiveEq "hi" "Hi"
isCaseInsensitiveEq : Str, Str -> Bool isCaseInsensitiveEq : Str, Str -> Bool
@ -280,7 +269,7 @@ walkGraphemesUntil : Str, { start: state, step: (state, Str -> [ Continue state,
walkGraphemesBackwards : Str, { start: state, step: (state, Str -> state) } -> state walkGraphemesBackwards : Str, { start: state, step: (state, Str -> state) } -> state
walkGraphemesBackwardsUntil : Str, { start: state, step: (state, Str -> [ Continue state, Done state ]) } -> state walkGraphemesBackwardsUntil : Str, { start: state, step: (state, Str -> [ Continue state, Done state ]) } -> state
## Returns #True if the string begins with an uppercase letter. ## Returns `True` if the string begins with an uppercase letter.
## ##
## >>> Str.isCapitalized "Hi" ## >>> Str.isCapitalized "Hi"
## ##
@ -305,7 +294,7 @@ walkGraphemesBackwardsUntil : Str, { start: state, step: (state, Str -> [ Contin
## package for functions which capitalize strings. ## package for functions which capitalize strings.
isCapitalized : Str -> Bool isCapitalized : Str -> Bool
## Returns #True if the string consists entirely of uppercase letters. ## Returns `True` if the string consists entirely of uppercase letters.
## ##
## >>> Str.isAllUppercase "hi" ## >>> Str.isAllUppercase "hi"
## ##
@ -326,7 +315,7 @@ isCapitalized : Str -> Bool
## >>> Str.isAllUppercase "" ## >>> Str.isAllUppercase ""
isAllUppercase : Str -> Bool isAllUppercase : Str -> Bool
## Returns #True if the string consists entirely of lowercase letters. ## Returns `True` if the string consists entirely of lowercase letters.
## ##
## >>> Str.isAllLowercase "hi" ## >>> Str.isAllLowercase "hi"
## ##
@ -354,36 +343,36 @@ trim : Str -> Str
## If the given [U32] is a valid [Unicode Scalar Value](http://www.unicode.org/glossary/#unicode_scalar_value), ## If the given [U32] is a valid [Unicode Scalar Value](http://www.unicode.org/glossary/#unicode_scalar_value),
## return a [Str] containing only that scalar. ## return a [Str] containing only that scalar.
fromScalar : U32 -> Result Str [ BadScalar ]* fromScalar : U32 -> Result Str [ BadScalar ]*
fromCodePoints : List U32 -> Result Str [ BadCodePoint U32 ]* fromCodePts : List U32 -> Result Str [ BadCodePt U32 ]*
fromUtf8 : List U8 -> Result Str [ BadUtf8 ]* fromUtf8 : List U8 -> Result Str [ BadUtf8 ]*
## Create a [Str] from bytes encoded as [UTF-16LE](https://en.wikipedia.org/wiki/UTF-16#Byte-order_encoding_schemes). ## Create a [Str] from bytes encoded as [UTF-16LE](https://en.wikipedia.org/wiki/UTF-16#Byte-order_encoding_schemes).
fromUtf16Le : List U8 -> Result Str [ BadUtf16Le Endi ]* # fromUtf16Le : List U8 -> Result Str [ BadUtf16Le Endi ]*
## Create a [Str] from bytes encoded as [UTF-16BE](https://en.wikipedia.org/wiki/UTF-16#Byte-order_encoding_schemes). # ## Create a [Str] from bytes encoded as [UTF-16BE](https://en.wikipedia.org/wiki/UTF-16#Byte-order_encoding_schemes).
fromUtf16Be : List U8 -> Result Str [ BadUtf16Be Endi ]* # fromUtf16Be : List U8 -> Result Str [ BadUtf16Be Endi ]*
## Create a [Str] from bytes encoded as UTF-16 with a [Byte Order Mark](https://en.wikipedia.org/wiki/Byte_order_mark). # ## Create a [Str] from bytes encoded as UTF-16 with a [Byte Order Mark](https://en.wikipedia.org/wiki/Byte_order_mark).
fromUtf16Bom : List U8 -> Result Str [ BadUtf16 Endi, NoBom ]* # fromUtf16Bom : List U8 -> Result Str [ BadUtf16 Endi, NoBom ]*
## Create a [Str] from bytes encoded as [UTF-32LE](https://web.archive.org/web/20120322145307/http://mail.apps.ietf.org/ietf/charsets/msg01095.html) # ## Create a [Str] from bytes encoded as [UTF-32LE](https://web.archive.org/web/20120322145307/http://mail.apps.ietf.org/ietf/charsets/msg01095.html)
fromUtf32Le : List U8 -> Result Str [ BadUtf32Le Endi ]* # fromUtf32Le : List U8 -> Result Str [ BadUtf32Le Endi ]*
## Create a [Str] from bytes encoded as [UTF-32BE](https://web.archive.org/web/20120322145307/http://mail.apps.ietf.org/ietf/charsets/msg01095.html) # ## Create a [Str] from bytes encoded as [UTF-32BE](https://web.archive.org/web/20120322145307/http://mail.apps.ietf.org/ietf/charsets/msg01095.html)
fromUtf32Be : List U8 -> Result Str [ BadUtf32Be Endi ]* # fromUtf32Be : List U8 -> Result Str [ BadUtf32Be Endi ]*
## Create a [Str] from bytes encoded as UTF-32 with a [Byte Order Mark](https://en.wikipedia.org/wiki/Byte_order_mark). # ## Create a [Str] from bytes encoded as UTF-32 with a [Byte Order Mark](https://en.wikipedia.org/wiki/Byte_order_mark).
fromUtf32Bom : List U8 -> Result Str [ BadUtf32 Endi, NoBom ]* # fromUtf32Bom : List U8 -> Result Str [ BadUtf32 Endi, NoBom ]*
## Convert from UTF-8, substituting the replacement character ("<22>") for any # ## Convert from UTF-8, substituting the replacement character ("<22>") for any
## invalid sequences encountered. # ## invalid sequences encountered.
fromUtf8Sub : List U8 -> Str # fromUtf8Sub : List U8 -> Str
fromUtf16Sub : List U8, Endi -> Str # fromUtf16Sub : List U8, Endi -> Str
fromUtf16BomSub : List U8 -> Result Str [ NoBom ]* # fromUtf16BomSub : List U8 -> Result Str [ NoBom ]*
## Return a #List of the string's #U8 UTF-8 [code units](https://unicode.org/glossary/#code_unit). ## Return a [List] of the string's [U8] UTF-8 [code units](https://unicode.org/glossary/#code_unit).
## (To split the string into a #List of smaller #Str values instead of #U8 values, ## (To split the string into a [List] of smaller [Str] values instead of [U8] values,
## see #Str.split and #Str.graphemes.) ## see [Str.split] and `Str.graphemes`.)
## ##
## >>> Str.toUtf8 "👩‍👩‍👦‍👦" ## >>> Str.toUtf8 "👩‍👩‍👦‍👦"
## ##
@ -393,15 +382,15 @@ fromUtf16BomSub : List U8 -> Result Str [ NoBom ]*
## ##
## >>> Str.toUtf8 "🐦" ## >>> Str.toUtf8 "🐦"
## ##
## For a more flexible function that walks through each of these #U8 code units ## For a more flexible function that walks through each of these [U8] code units
## without creating a #List, see #Str.walkUtf8 and #Str.walkRevUtf8. ## without creating a [List], see `Str.walkUtf8` and `Str.walkRevUtf8`.
toUtf8 : Str -> List U8 toUtf8 : Str -> List U8
toUtf16Be : Str -> List U8 toUtf16Be : Str -> List U8
toUtf16Le : Str -> List U8 toUtf16Le : Str -> List U8
toUtf16Bom : Str, Endi -> List U8 # toUtf16Bom : Str, Endi -> List U8
toUtf32Be : Str -> List U8 toUtf32Be : Str -> List U8
toUtf32Le : Str -> List U8 toUtf32Le : Str -> List U8
toUtf32Bom : Str, Endi -> List U8 # toUtf32Bom : Str, Endi -> List U8
# Parsing # Parsing
@ -417,7 +406,7 @@ parseGrapheme : Str -> Result { val : Str, rest : Str } [ Expected [ Grapheme ]*
## ##
## If the string does not begin with a valid code point, for example because it was ## If the string does not begin with a valid code point, for example because it was
## empty, return `Err`. ## empty, return `Err`.
parseCodePoint : Str -> Result { val : U32, rest : Str } [ Expected [ CodePoint ]* Str ]* parseCodePt : Str -> Result { val : U32, rest : Str } [ Expected [ CodePt ]* Str ]*
## If the first string begins with the second, return whatever comes ## If the first string begins with the second, return whatever comes
## after the second. ## after the second.
@ -425,20 +414,70 @@ chomp : Str, Str -> Result Str [ Expected [ ExactStr Str ]* Str ]*
## If the string begins with a [Unicode code point](http://www.unicode.org/glossary/#code_point) ## If the string begins with a [Unicode code point](http://www.unicode.org/glossary/#code_point)
## equal to the given [U32], return whatever comes after that code point. ## equal to the given [U32], return whatever comes after that code point.
chompCodePoint : Str, U32 -> Result Str [ Expected [ ExactCodePoint U32 ]* Str ]* chompCodePt : Str, U32 -> Result Str [ Expected [ ExactCodePt U32 ]* Str ]*
## If the string begins with digits which can represent a valid #U8, return ## If the string represents a valid [U8] number, return that number.
## that number along with the rest of the string after the digits. ##
parseU8 : Str -> Result { val : U8, rest : Str } [ Expected [ NumU8 ]* Str ]* ## For more advanced options, see [parseU8].
parseI8 : Str -> Result { val : I8, rest : Str } [ Expected [ NumI8 ]* Str ]* toU8 : Str -> Result U8 [ InvalidU8 ]*
parseU16 : Str -> Result { val : U16, rest : Str } [ Expected [ NumU16 ]* Str ]* toI8 : Str -> Result I8 [ InvalidI8 ]*
parseI16 : Str -> Result { val : I16, rest : Str } [ Expected [ NumI16 ]* Str ]* toU16 : Str -> Result U16 [ InvalidU16 ]*
parseU32 : Str -> Result { val : U32, rest : Str } [ Expected [ NumU32 ]* Str ]* toI16 : Str -> Result I16 [ InvalidI16 ]*
parseI32 : Str -> Result { val : I32, rest : Str } [ Expected [ NumI32 ]* Str ]* toU32 : Str -> Result U32 [ InvalidU32 ]*
parseU64 : Str -> Result { val : U64, rest : Str } [ Expected [ NumU64 ]* Str ]* toI32 : Str -> Result I32 [ InvalidI32 ]*
parseI64 : Str -> Result { val : I64, rest : Str } [ Expected [ NumI64 ]* Str ]* toU64 : Str -> Result U64 [ InvalidU64 ]*
parseU128 : Str -> Result { val : U128, rest : Str } [ Expected [ NumU128 ]* Str ]* toI64 : Str -> Result I64 [ InvalidI64 ]*
parseI128 : Str -> Result { val : I128, rest : Str } [ Expected [ NumI128 ]* Str ]* toU128 : Str -> Result U128 [ InvalidU128 ]*
toI128 : Str -> Result I128 [ InvalidI128 ]*
toF64 : Str -> Result U128 [ InvalidF64 ]*
toF32 : Str -> Result I128 [ InvalidF32 ]*
toDec : Str -> Result Dec [ InvalidDec ]*
parseF64 : Str -> Result { val : U128, rest : Str } [ Expected [ NumF64 ]* Str ]* ## If the string represents a valid number, return that number.
parseF32 : Str -> Result { val : I128, rest : Str } [ Expected [ NumF32 ]* Str ]* ##
## The exact number type to look for will be inferred from usage. Here's an
## example where the `Err` branch matches `Integer Signed64`, which causes this to
## parse an [I64] because [I64] is defined as `I64 : Num [ Integer [ Signed64 ] ]`.
##
## >>> when Str.toNum "12345" is
## >>> Ok i64 -> "The I64 was: \(i64)"
## >>> Err (ExpectedNum (Integer Signed64)) -> "Not a valid I64!"
##
## If the string is exactly `"NaN"`, `"∞"`, or `"-∞"`, they will be accepted
## only when converting to [F64] or [F32] numbers, and will be translated accordingly.
##
## This never accepts numbers with underscores or commas in them. For more
## advanced options, see [parseNum].
toNum : Str -> Result (Num a) [ ExpectedNum a ]*
## If the string begins with an [Int] or a [finite](Num.isFinite) [Frac], return
## that number along with the rest of the string after it.
##
## The exact number type to look for will be inferred from usage. Here's an
## example where the `Err` branch matches `Float Binary64`, which causes this to
## parse an [F64] because [F64] is defined as `F64 : Num [ Fraction [ Float64 ] ]`.
##
## >>> when Str.parseNum input {} is
## >>> Ok { val: f64, rest } -> "The F64 was: \(f64)"
## >>> Err (ExpectedNum (Fraction Float64)) -> "Not a valid F64!"
##
## If the string begins with `"NaN"`, `"∞"`, and `"-∞"` (which do not represent
## [finite](Num.isFinite) numbers), they will be accepted only when parsing
## [F64] or [F32] numbers, and translated accordingly.
# parseNum : Str, NumParseConfig -> Result { val : Num a, rest : Str } [ ExpectedNum a ]*
## Notes:
## * You can allow a decimal mark for integers; they'll only parse if the numbers after it are all 0.
## * For `wholeSep`, `Required` has a payload for how many digits (e.g. "required every 3 digits")
## * For `wholeSep`, `Allowed` allows the separator to appear anywhere.
# NumParseConfig :
# {
# base ? [ Decimal, Hexadecimal, Octal, Binary ],
# notation ? [ Standard, Scientific, Any ],
# decimalMark ? [ Allowed Str, Required Str, Disallowed ],
# decimalDigits ? [ Any, AtLeast U16, Exactly U16 ],
# wholeDigits ? [ Any, AtLeast U16, Exactly U16 ],
# leadingZeroes ? [ Allowed, Disallowed ],
# trailingZeroes ? [ Allowed, Disallowed ],
# wholeSep ? { mark : Str, policy : [ Allowed, Required U64 ] }
# }

View file

@ -1,28 +1,8 @@
use std::fs::File;
use std::io::prelude::Read;
use std::vec::Vec;
const BC_PATH: &str = env!(
"BUILTINS_BC",
"Env var BUILTINS_BC not found. Is there a problem with the build script?"
);
pub const OBJ_PATH: &str = env!( pub const OBJ_PATH: &str = env!(
"BUILTINS_O", "BUILTINS_O",
"Env var BUILTINS_O not found. Is there a problem with the build script?" "Env var BUILTINS_O not found. Is there a problem with the build script?"
); );
pub fn get_bytes() -> Vec<u8> {
// In the build script for the builtins module, we compile the builtins bitcode and set
// BUILTINS_BC to the path to the compiled output.
let mut builtins_bitcode = File::open(BC_PATH).expect("Unable to find builtins bitcode source");
let mut buffer = Vec::new();
builtins_bitcode
.read_to_end(&mut buffer)
.expect("Unable to read builtins bitcode");
buffer
}
pub const NUM_ASIN: &str = "roc_builtins.num.asin"; pub const NUM_ASIN: &str = "roc_builtins.num.asin";
pub const NUM_ACOS: &str = "roc_builtins.num.acos"; pub const NUM_ACOS: &str = "roc_builtins.num.acos";
pub const NUM_ATAN: &str = "roc_builtins.num.atan"; pub const NUM_ATAN: &str = "roc_builtins.num.atan";
@ -36,14 +16,15 @@ pub const STR_JOIN_WITH: &str = "roc_builtins.str.joinWith";
pub const STR_STR_SPLIT_IN_PLACE: &str = "roc_builtins.str.str_split_in_place"; pub const STR_STR_SPLIT_IN_PLACE: &str = "roc_builtins.str.str_split_in_place";
pub const STR_COUNT_GRAPEHEME_CLUSTERS: &str = "roc_builtins.str.count_grapheme_clusters"; pub const STR_COUNT_GRAPEHEME_CLUSTERS: &str = "roc_builtins.str.count_grapheme_clusters";
pub const STR_STARTS_WITH: &str = "roc_builtins.str.starts_with"; pub const STR_STARTS_WITH: &str = "roc_builtins.str.starts_with";
pub const STR_STARTS_WITH_CODE_POINT: &str = "roc_builtins.str.starts_with_code_point"; pub const STR_STARTS_WITH_CODE_PT: &str = "roc_builtins.str.starts_with_code_point";
pub const STR_ENDS_WITH: &str = "roc_builtins.str.ends_with"; pub const STR_ENDS_WITH: &str = "roc_builtins.str.ends_with";
pub const STR_NUMBER_OF_BYTES: &str = "roc_builtins.str.number_of_bytes"; pub const STR_NUMBER_OF_BYTES: &str = "roc_builtins.str.number_of_bytes";
pub const STR_FROM_INT: &str = "roc_builtins.str.from_int"; pub const STR_FROM_INT: &str = "roc_builtins.str.from_int";
pub const STR_FROM_FLOAT: &str = "roc_builtins.str.from_float"; pub const STR_FROM_FLOAT: &str = "roc_builtins.str.from_float";
pub const STR_EQUAL: &str = "roc_builtins.str.equal"; pub const STR_EQUAL: &str = "roc_builtins.str.equal";
pub const STR_TO_BYTES: &str = "roc_builtins.str.to_bytes"; pub const STR_TO_UTF8: &str = "roc_builtins.str.to_utf8";
pub const STR_FROM_UTF8: &str = "roc_builtins.str.from_utf8"; pub const STR_FROM_UTF8: &str = "roc_builtins.str.from_utf8";
pub const STR_FROM_UTF8_RANGE: &str = "roc_builtins.str.from_utf8_range";
pub const DICT_HASH: &str = "roc_builtins.dict.hash"; pub const DICT_HASH: &str = "roc_builtins.dict.hash";
pub const DICT_HASH_STR: &str = "roc_builtins.dict.hash_str"; pub const DICT_HASH_STR: &str = "roc_builtins.dict.hash_str";
@ -76,9 +57,23 @@ pub const LIST_WALK_BACKWARDS: &str = "roc_builtins.list.walk_backwards";
pub const LIST_CONTAINS: &str = "roc_builtins.list.contains"; pub const LIST_CONTAINS: &str = "roc_builtins.list.contains";
pub const LIST_REPEAT: &str = "roc_builtins.list.repeat"; pub const LIST_REPEAT: &str = "roc_builtins.list.repeat";
pub const LIST_APPEND: &str = "roc_builtins.list.append"; pub const LIST_APPEND: &str = "roc_builtins.list.append";
pub const LIST_PREPEND: &str = "roc_builtins.list.prepend";
pub const LIST_DROP: &str = "roc_builtins.list.drop";
pub const LIST_SWAP: &str = "roc_builtins.list.swap";
pub const LIST_SINGLE: &str = "roc_builtins.list.single"; pub const LIST_SINGLE: &str = "roc_builtins.list.single";
pub const LIST_JOIN: &str = "roc_builtins.list.join"; pub const LIST_JOIN: &str = "roc_builtins.list.join";
pub const LIST_RANGE: &str = "roc_builtins.list.range"; pub const LIST_RANGE: &str = "roc_builtins.list.range";
pub const LIST_REVERSE: &str = "roc_builtins.list.reverse"; pub const LIST_REVERSE: &str = "roc_builtins.list.reverse";
pub const LIST_SORT_WITH: &str = "roc_builtins.list.sort_with"; pub const LIST_SORT_WITH: &str = "roc_builtins.list.sort_with";
pub const LIST_CONCAT: &str = "roc_builtins.list.concat"; pub const LIST_CONCAT: &str = "roc_builtins.list.concat";
pub const LIST_SET: &str = "roc_builtins.list.set";
pub const LIST_SET_IN_PLACE: &str = "roc_builtins.list.set_in_place";
pub const DEC_FROM_F64: &str = "roc_builtins.dec.from_f64";
pub const DEC_EQ: &str = "roc_builtins.dec.eq";
pub const DEC_NEQ: &str = "roc_builtins.dec.neq";
pub const DEC_NEGATE: &str = "roc_builtins.dec.negate";
pub const DEC_ADD_WITH_OVERFLOW: &str = "roc_builtins.dec.add_with_overflow";
pub const DEC_SUB_WITH_OVERFLOW: &str = "roc_builtins.dec.sub_with_overflow";
pub const DEC_MUL_WITH_OVERFLOW: &str = "roc_builtins.dec.mul_with_overflow";
pub const DEC_DIV: &str = "roc_builtins.dec.div";

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod bitcode; pub mod bitcode;

File diff suppressed because it is too large Load diff

View file

@ -17,7 +17,6 @@ ven_graph = { path = "../../vendor/pathfinding" }
im = "14" # im and im-rc should always have the same version! im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.6.1", features = ["collections"] } bumpalo = { version = "3.6.1", features = ["collections"] }
inlinable_string = "0.1"
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.5.1" pretty_assertions = "0.5.1"

View file

@ -6,7 +6,7 @@ use roc_module::symbol::Symbol;
use roc_parse::ast::{AssignedField, Tag, TypeAnnotation}; use roc_parse::ast::{AssignedField, Tag, TypeAnnotation};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable}; use roc_types::subs::{VarStore, Variable};
use roc_types::types::{Alias, Problem, RecordField, Type}; use roc_types::types::{Alias, LambdaSet, Problem, RecordField, Type};
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub struct Annotation { pub struct Annotation {
@ -159,7 +159,7 @@ fn can_annotation_help(
Err(problem) => { Err(problem) => {
env.problem(roc_problem::can::Problem::RuntimeError(problem)); env.problem(roc_problem::can::Problem::RuntimeError(problem));
return Type::Erroneous(Problem::UnrecognizedIdent(ident.into())); return Type::Erroneous(Problem::UnrecognizedIdent(ident));
} }
} }
} else { } else {
@ -227,14 +227,27 @@ fn can_annotation_help(
} }
// make sure hidden variables are freshly instantiated // make sure hidden variables are freshly instantiated
for var in alias.lambda_set_variables.iter() { let mut lambda_set_variables =
substitutions.insert(var.into_inner(), Type::Variable(var_store.fresh())); Vec::with_capacity(alias.lambda_set_variables.len());
for typ in alias.lambda_set_variables.iter() {
if let Type::Variable(var) = typ.0 {
let fresh = var_store.fresh();
substitutions.insert(var, Type::Variable(fresh));
lambda_set_variables.push(LambdaSet(Type::Variable(fresh)));
} else {
unreachable!("at this point there should be only vars in there");
}
} }
// instantiate variables // instantiate variables
actual.substitute(&substitutions); actual.substitute(&substitutions);
Type::Alias(symbol, vars, Box::new(actual)) Type::Alias {
symbol,
type_arguments: vars,
lambda_set_variables,
actual: Box::new(actual),
}
} }
None => { None => {
let mut args = Vec::new(); let mut args = Vec::new();
@ -373,12 +386,18 @@ fn can_annotation_help(
introduced_variables.insert_host_exposed_alias(symbol, actual_var); introduced_variables.insert_host_exposed_alias(symbol, actual_var);
Type::HostExposedAlias { Type::HostExposedAlias {
name: symbol, name: symbol,
arguments: vars, type_arguments: vars,
lambda_set_variables: alias.lambda_set_variables.clone(),
actual: Box::new(alias.typ.clone()), actual: Box::new(alias.typ.clone()),
actual_var, actual_var,
} }
} else { } else {
Type::Alias(symbol, vars, Box::new(alias.typ.clone())) Type::Alias {
symbol,
type_arguments: vars,
lambda_set_variables: alias.lambda_set_variables.clone(),
actual: Box::new(alias.typ.clone()),
}
} }
} }
_ => { _ => {

View file

@ -30,7 +30,7 @@ macro_rules! macro_magic {
/// Some builtins cannot be constructed in code gen alone, and need to be defined /// Some builtins cannot be constructed in code gen alone, and need to be defined
/// as separate Roc defs. For example, List.get has this type: /// as separate Roc defs. For example, List.get has this type:
/// ///
/// List.get : List elem, Int -> Result elem [ OutOfBounds ]* /// List.get : List elem, Nat -> Result elem [ OutOfBounds ]*
/// ///
/// Because this returns an open tag union for its Err type, it's not possible /// Because this returns an open tag union for its Err type, it's not possible
/// for code gen to return a hardcoded value for OutOfBounds. For example, /// for code gen to return a hardcoded value for OutOfBounds. For example,
@ -58,12 +58,13 @@ pub fn builtin_defs_map(symbol: Symbol, var_store: &mut VarStore) -> Option<Def>
STR_SPLIT => str_split, STR_SPLIT => str_split,
STR_IS_EMPTY => str_is_empty, STR_IS_EMPTY => str_is_empty,
STR_STARTS_WITH => str_starts_with, STR_STARTS_WITH => str_starts_with,
STR_STARTS_WITH_CODE_POINT => str_starts_with_code_point, STR_STARTS_WITH_CODE_PT => str_starts_with_code_point,
STR_ENDS_WITH => str_ends_with, STR_ENDS_WITH => str_ends_with,
STR_COUNT_GRAPHEMES => str_count_graphemes, STR_COUNT_GRAPHEMES => str_count_graphemes,
STR_FROM_INT => str_from_int, STR_FROM_INT => str_from_int,
STR_FROM_UTF8 => str_from_utf8, STR_FROM_UTF8 => str_from_utf8,
STR_TO_BYTES => str_to_bytes, STR_FROM_UTF8_RANGE => str_from_utf8_range,
STR_TO_UTF8 => str_to_utf8,
STR_FROM_FLOAT=> str_from_float, STR_FROM_FLOAT=> str_from_float,
LIST_LEN => list_len, LIST_LEN => list_len,
LIST_GET => list_get, LIST_GET => list_get,
@ -84,6 +85,8 @@ pub fn builtin_defs_map(symbol: Symbol, var_store: &mut VarStore) -> Option<Def>
LIST_MAP => list_map, LIST_MAP => list_map,
LIST_MAP2 => list_map2, LIST_MAP2 => list_map2,
LIST_MAP3 => list_map3, LIST_MAP3 => list_map3,
LIST_DROP => list_drop,
LIST_SWAP => list_swap,
LIST_MAP_WITH_INDEX => list_map_with_index, LIST_MAP_WITH_INDEX => list_map_with_index,
LIST_KEEP_IF => list_keep_if, LIST_KEEP_IF => list_keep_if,
LIST_KEEP_OKS => list_keep_oks, LIST_KEEP_OKS => list_keep_oks,
@ -448,13 +451,12 @@ fn num_add(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumAdd) num_binop(symbol, var_store, LowLevel::NumAdd)
} }
/// Num.addWrap : Int, Int -> Int /// Num.addWrap : Int a, Int a -> Int a
fn num_add_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_add_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumAddWrap) num_binop(symbol, var_store, LowLevel::NumAddWrap)
} }
/// Num.addChecked : Num a, Num a -> Result (Num a) [ Overflow ]* fn num_overflow_checked(symbol: Symbol, var_store: &mut VarStore, lowlevel: LowLevel) -> Def {
fn num_add_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh(); let bool_var = var_store.fresh();
let num_var_1 = var_store.fresh(); let num_var_1 = var_store.fresh();
let num_var_2 = var_store.fresh(); let num_var_2 = var_store.fresh();
@ -462,7 +464,7 @@ fn num_add_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
let ret_var = var_store.fresh(); let ret_var = var_store.fresh();
let record_var = var_store.fresh(); let record_var = var_store.fresh();
// let arg_3 = RunLowLevel NumAddChecked arg_1 arg_2 // let arg_3 = RunLowLevel NumXXXChecked arg_1 arg_2
// //
// if arg_3.b then // if arg_3.b then
// # overflow // # overflow
@ -515,11 +517,11 @@ fn num_add_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
), ),
}; };
// arg_3 = RunLowLevel NumAddChecked arg_1 arg_2 // arg_3 = RunLowLevel NumXXXChecked arg_1 arg_2
let def = crate::def::Def { let def = crate::def::Def {
loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_3)), loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_3)),
loc_expr: no_region(RunLowLevel { loc_expr: no_region(RunLowLevel {
op: LowLevel::NumAddChecked, op: lowlevel,
args: vec![ args: vec![
(num_var_1, Var(Symbol::ARG_1)), (num_var_1, Var(Symbol::ARG_1)),
(num_var_2, Var(Symbol::ARG_2)), (num_var_2, Var(Symbol::ARG_2)),
@ -542,103 +544,24 @@ fn num_add_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.addChecked : Num a, Num a -> Result (Num a) [ Overflow ]*
fn num_add_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_overflow_checked(symbol, var_store, LowLevel::NumAddChecked)
}
/// Num.sub : Num a, Num a -> Num a /// Num.sub : Num a, Num a -> Num a
fn num_sub(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_sub(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumSub) num_binop(symbol, var_store, LowLevel::NumSub)
} }
/// Num.subWrap : Int, Int -> Int /// Num.subWrap : Int a, Int a -> Int a
fn num_sub_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_sub_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumSubWrap) num_binop(symbol, var_store, LowLevel::NumSubWrap)
} }
/// Num.subChecked : Num a, Num a -> Result (Num a) [ Overflow ]* /// Num.subChecked : Num a, Num a -> Result (Num a) [ Overflow ]*
fn num_sub_checked(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_sub_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh(); num_overflow_checked(symbol, var_store, LowLevel::NumSubChecked)
let num_var_1 = var_store.fresh();
let num_var_2 = var_store.fresh();
let num_var_3 = var_store.fresh();
let ret_var = var_store.fresh();
let record_var = var_store.fresh();
// let arg_3 = RunLowLevel NumSubChecked arg_1 arg_2
//
// if arg_3.b then
// # overflow
// Err Overflow
// else
// # all is well
// Ok arg_3.a
let cont = If {
branch_var: ret_var,
cond_var: bool_var,
branches: vec![(
// if-condition
no_region(
// arg_3.b
Access {
record_var,
ext_var: var_store.fresh(),
field: "b".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
),
// overflow!
no_region(tag(
"Err",
vec![tag("Overflow", Vec::new(), var_store)],
var_store,
)),
)],
final_else: Box::new(
// all is well
no_region(
// Ok arg_3.a
tag(
"Ok",
vec![
// arg_3.a
Access {
record_var,
ext_var: var_store.fresh(),
field: "a".into(),
field_var: num_var_3,
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
],
var_store,
),
),
),
};
// arg_3 = RunLowLevel NumSubChecked arg_1 arg_2
let def = crate::def::Def {
loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_3)),
loc_expr: no_region(RunLowLevel {
op: LowLevel::NumSubChecked,
args: vec![
(num_var_1, Var(Symbol::ARG_1)),
(num_var_2, Var(Symbol::ARG_2)),
],
ret_var: record_var,
}),
expr_var: record_var,
pattern_vars: SendMap::default(),
annotation: None,
};
let body = LetNonRec(Box::new(def), Box::new(no_region(cont)), ret_var);
defn(
symbol,
vec![(num_var_1, Symbol::ARG_1), (num_var_2, Symbol::ARG_2)],
var_store,
body,
ret_var,
)
} }
/// Num.mul : Num a, Num a -> Num a /// Num.mul : Num a, Num a -> Num a
@ -646,98 +569,14 @@ fn num_mul(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumMul) num_binop(symbol, var_store, LowLevel::NumMul)
} }
/// Num.mulWrap : Int, Int -> Int /// Num.mulWrap : Int a, Int a -> Int a
fn num_mul_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_mul_wrap(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumMulWrap) num_binop(symbol, var_store, LowLevel::NumMulWrap)
} }
/// Num.mulChecked : Num a, Num a -> Result (Num a) [ Overflow ]* /// Num.mulChecked : Num a, Num a -> Result (Num a) [ Overflow ]*
fn num_mul_checked(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_mul_checked(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh(); num_overflow_checked(symbol, var_store, LowLevel::NumMulChecked)
let num_var_1 = var_store.fresh();
let num_var_2 = var_store.fresh();
let num_var_3 = var_store.fresh();
let ret_var = var_store.fresh();
let record_var = var_store.fresh();
// let arg_3 = RunLowLevel NumMulChecked arg_1 arg_2
//
// if arg_3.b then
// # overflow
// Err Overflow
// else
// # all is well
// Ok arg_3.a
let cont = If {
branch_var: ret_var,
cond_var: bool_var,
branches: vec![(
// if-condition
no_region(
// arg_3.b
Access {
record_var,
ext_var: var_store.fresh(),
field: "b".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
),
// overflow!
no_region(tag(
"Err",
vec![tag("Overflow", Vec::new(), var_store)],
var_store,
)),
)],
final_else: Box::new(
// all is well
no_region(
// Ok arg_3.a
tag(
"Ok",
vec![
// arg_3.a
Access {
record_var,
ext_var: var_store.fresh(),
field: "a".into(),
field_var: num_var_3,
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
],
var_store,
),
),
),
};
// arg_3 = RunLowLevel NumMulChecked arg_1 arg_2
let def = crate::def::Def {
loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_3)),
loc_expr: no_region(RunLowLevel {
op: LowLevel::NumMulChecked,
args: vec![
(num_var_1, Var(Symbol::ARG_1)),
(num_var_2, Var(Symbol::ARG_2)),
],
ret_var: record_var,
}),
expr_var: record_var,
pattern_vars: SendMap::default(),
annotation: None,
};
let body = LetNonRec(Box::new(def), Box::new(no_region(cont)), ret_var);
defn(
symbol,
vec![(num_var_1, Symbol::ARG_1), (num_var_2, Symbol::ARG_2)],
var_store,
body,
ret_var,
)
} }
/// Num.isGt : Num a, Num a -> Bool /// Num.isGt : Num a, Num a -> Bool
@ -1150,7 +989,7 @@ fn num_ceiling(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.powInt : Int, Int -> Int /// Num.powInt : Int a, Int a -> Int a
fn num_pow_int(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_pow_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
let int_var = var_store.fresh(); let int_var = var_store.fresh();
@ -1249,17 +1088,17 @@ fn num_asin(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.bitwiseAnd : Int, Int -> Int /// Num.bitwiseAnd : Int a, Int a -> Int a
fn num_bitwise_and(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_bitwise_and(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumBitwiseAnd) num_binop(symbol, var_store, LowLevel::NumBitwiseAnd)
} }
/// Num.bitwiseXor : Int, Int -> Int /// Num.bitwiseXor : Int a, Int a -> Int a
fn num_bitwise_xor(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_bitwise_xor(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumBitwiseXor) num_binop(symbol, var_store, LowLevel::NumBitwiseXor)
} }
/// Num.bitwiseOr: Int, Int -> Int /// Num.bitwiseOr: Int a, Int a -> Int a
fn num_bitwise_or(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_bitwise_or(symbol: Symbol, var_store: &mut VarStore) -> Def {
num_binop(symbol, var_store, LowLevel::NumBitwiseOr) num_binop(symbol, var_store, LowLevel::NumBitwiseOr)
} }
@ -1449,9 +1288,9 @@ fn str_starts_with(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_2(symbol, LowLevel::StrStartsWith, var_store) lowlevel_2(symbol, LowLevel::StrStartsWith, var_store)
} }
/// Str.startsWithCodePoint : Str, U32 -> Bool /// Str.startsWithCodePt : Str, U32 -> Bool
fn str_starts_with_code_point(symbol: Symbol, var_store: &mut VarStore) -> Def { fn str_starts_with_code_point(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_2(symbol, LowLevel::StrStartsWithCodePoint, var_store) lowlevel_2(symbol, LowLevel::StrStartsWithCodePt, var_store)
} }
/// Str.endsWith : Str, Str -> Bool /// Str.endsWith : Str, Str -> Bool
@ -1514,7 +1353,7 @@ fn str_from_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Str.fromUtf8 : List U8 -> Result Str [ BadUtf8 Utf8Problem ]* /// Str.fromUtf8 : List U8 -> Result Str [ BadUtf8 { byteIndex : Nat, problem : Utf8Problem } } ]*
fn str_from_utf8(symbol: Symbol, var_store: &mut VarStore) -> Def { fn str_from_utf8(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bytes_var = var_store.fresh(); let bytes_var = var_store.fresh();
let bool_var = var_store.fresh(); let bool_var = var_store.fresh();
@ -1617,10 +1456,183 @@ fn str_from_utf8(symbol: Symbol, var_store: &mut VarStore) -> Def {
ret_var, ret_var,
) )
} }
/// Str.fromUtf8Range : List U8, { start : Nat, count : Nat } -> Result Str [ BadUtf8 { byteIndex : Nat, problem : Utf8Problem } } ]*
fn str_from_utf8_range(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bytes_var = var_store.fresh();
let bool_var = var_store.fresh();
let arg_record_var = var_store.fresh();
let ll_record_var = var_store.fresh();
let ret_var = var_store.fresh();
/// Str.toBytes : Str -> List U8 // let arg_3 = RunLowLevel FromUtf8Range arg_1 arg_2
fn str_to_bytes(symbol: Symbol, var_store: &mut VarStore) -> Def { //
lowlevel_1(symbol, LowLevel::StrToBytes, var_store) // arg_3 :
// { a : Bool -- isOk
// , b : String -- result_str
// , c : Nat -- problem_byte_index
// , d : I8 -- problem_code
// }
//
// if arg_3.a then
// Ok arg_3.str
// else
// Err (BadUtf8 { byteIndex: arg_3.byteIndex, problem : arg_3.problem })
let def = crate::def::Def {
loc_pattern: no_region(Pattern::Identifier(Symbol::ARG_3)),
loc_expr: no_region(RunLowLevel {
op: LowLevel::StrFromUtf8Range,
args: vec![
(bytes_var, Var(Symbol::ARG_1)),
(arg_record_var, Var(Symbol::ARG_2)),
],
ret_var: ll_record_var,
}),
expr_var: ll_record_var,
pattern_vars: SendMap::default(),
annotation: None,
};
let cont = If {
branch_var: ret_var,
cond_var: bool_var,
branches: vec![(
// if-condition
no_region(
// arg_2.c -> Bool
Access {
record_var: ll_record_var,
ext_var: var_store.fresh(),
field: "c_isOk".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
),
// all is good
no_region(tag(
"Ok",
// arg_2.a -> Str
vec![Access {
record_var: ll_record_var,
ext_var: var_store.fresh(),
field: "b_str".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
}],
var_store,
)),
)],
final_else: Box::new(
// bad!!
no_region(tag(
"Err",
vec![tag(
"BadUtf8",
vec![
Access {
record_var: ll_record_var,
ext_var: var_store.fresh(),
field: "d_problem".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
Access {
record_var: ll_record_var,
ext_var: var_store.fresh(),
field: "a_byteIndex".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_3))),
},
],
var_store,
)],
var_store,
)),
),
};
let roc_result = LetNonRec(Box::new(def), Box::new(no_region(cont)), ret_var);
// Only do the business with the let if we're in bounds!
let bounds_var = var_store.fresh();
let bounds_bool = var_store.fresh();
let add_var = var_store.fresh();
let body = If {
cond_var: bounds_bool,
branch_var: ret_var,
branches: vec![(
no_region(RunLowLevel {
op: LowLevel::NumLte,
args: vec![
(
bounds_var,
RunLowLevel {
op: LowLevel::NumAdd,
args: vec![
(
add_var,
Access {
record_var: arg_record_var,
ext_var: var_store.fresh(),
field: "start".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_2))),
},
),
(
add_var,
Access {
record_var: arg_record_var,
ext_var: var_store.fresh(),
field: "count".into(),
field_var: var_store.fresh(),
loc_expr: Box::new(no_region(Var(Symbol::ARG_2))),
},
),
],
ret_var: add_var,
},
),
(
bounds_var,
RunLowLevel {
op: LowLevel::ListLen,
args: vec![(bytes_var, Var(Symbol::ARG_1))],
ret_var: bounds_var,
},
),
],
ret_var: bounds_bool,
}),
no_region(roc_result),
)],
final_else: Box::new(
// else-branch
no_region(
// Err
tag(
"Err",
vec![tag("OutOfBounds", Vec::new(), var_store)],
var_store,
),
),
),
};
defn(
symbol,
vec![(bytes_var, Symbol::ARG_1), (arg_record_var, Symbol::ARG_2)],
var_store,
body,
ret_var,
)
}
/// Str.toUtf8 : Str -> List U8
fn str_to_utf8(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_1(symbol, LowLevel::StrToUtf8, var_store)
} }
/// Str.fromFloat : Float * -> Str /// Str.fromFloat : Float * -> Str
@ -1665,7 +1677,7 @@ fn list_concat(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// List.repeat : elem, Int -> List elem /// List.repeat : elem, Nat -> List elem
fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def {
let elem_var = var_store.fresh(); let elem_var = var_store.fresh();
let len_var = var_store.fresh(); let len_var = var_store.fresh();
@ -1807,7 +1819,7 @@ fn list_get(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// List.set : List elem, Int, elem -> List elem /// List.set : List elem, Nat, elem -> List elem
/// ///
/// List.set : /// List.set :
/// Attr (w | u | v) (List (Attr u a)), /// Attr (w | u | v) (List (Attr u a)),
@ -1882,6 +1894,58 @@ fn list_set(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// List.swap : List elem, Nat, Nat -> List elem
fn list_swap(symbol: Symbol, var_store: &mut VarStore) -> Def {
let list_var = var_store.fresh();
let index1_var = var_store.fresh();
let index2_var = var_store.fresh();
let body = RunLowLevel {
op: LowLevel::ListSwap,
args: vec![
(list_var, Var(Symbol::ARG_1)),
(index1_var, Var(Symbol::ARG_2)),
(index2_var, Var(Symbol::ARG_3)),
],
ret_var: list_var,
};
defn(
symbol,
vec![
(list_var, Symbol::ARG_1),
(index1_var, Symbol::ARG_2),
(index2_var, Symbol::ARG_3),
],
var_store,
body,
list_var,
)
}
/// List.drop : List elem, Nat -> List elem
fn list_drop(symbol: Symbol, var_store: &mut VarStore) -> Def {
let list_var = var_store.fresh();
let index_var = var_store.fresh();
let body = RunLowLevel {
op: LowLevel::ListDrop,
args: vec![
(list_var, Var(Symbol::ARG_1)),
(index_var, Var(Symbol::ARG_2)),
],
ret_var: list_var,
};
defn(
symbol,
vec![(list_var, Symbol::ARG_1), (index_var, Symbol::ARG_2)],
var_store,
body,
list_var,
)
}
/// List.append : List elem, elem -> List elem /// List.append : List elem, elem -> List elem
fn list_append(symbol: Symbol, var_store: &mut VarStore) -> Def { fn list_append(symbol: Symbol, var_store: &mut VarStore) -> Def {
let list_var = var_store.fresh(); let list_var = var_store.fresh();
@ -2244,7 +2308,7 @@ fn dict_get(symbol: Symbol, var_store: &mut VarStore) -> Def {
let make_err = tag( let make_err = tag(
"Err", "Err",
vec![tag("OutOfBounds", Vec::new(), var_store)], vec![tag("KeyNotFound", Vec::new(), var_store)],
var_store, var_store,
); );
@ -2477,7 +2541,7 @@ fn set_walk(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.rem : Int, Int -> Result Int [ DivByZero ]* /// Num.rem : Int a, Int a -> Result (Int a) [ DivByZero ]*
fn num_rem(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_rem(symbol: Symbol, var_store: &mut VarStore) -> Def {
let num_var = var_store.fresh(); let num_var = var_store.fresh();
let unbound_zero_var = var_store.fresh(); let unbound_zero_var = var_store.fresh();
@ -2536,7 +2600,7 @@ fn num_rem(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.isMultipleOf : Int, Int -> Bool /// Num.isMultipleOf : Int a, Int a -> Bool
fn num_is_multiple_of(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_is_multiple_of(symbol: Symbol, var_store: &mut VarStore) -> Def {
lowlevel_2(symbol, LowLevel::NumIsMultipleOf, var_store) lowlevel_2(symbol, LowLevel::NumIsMultipleOf, var_store)
} }
@ -2642,7 +2706,7 @@ fn num_div_float(symbol: Symbol, var_store: &mut VarStore) -> Def {
) )
} }
/// Num.div : Int, Int -> Result Int [ DivByZero ]* /// Num.div : Int a , Int a -> Result (Int a) [ DivByZero ]*
fn num_div_int(symbol: Symbol, var_store: &mut VarStore) -> Def { fn num_div_int(symbol: Symbol, var_store: &mut VarStore) -> Def {
let bool_var = var_store.fresh(); let bool_var = var_store.fresh();
let num_var = var_store.fresh(); let num_var = var_store.fresh();

View file

@ -60,33 +60,49 @@ impl Constraint {
true true
} }
pub fn contains_save_the_environment(&self) -> bool {
match self {
Constraint::Eq(_, _, _, _) => false,
Constraint::Store(_, _, _, _) => false,
Constraint::Lookup(_, _, _) => false,
Constraint::Pattern(_, _, _, _) => false,
Constraint::True => false,
Constraint::SaveTheEnvironment => true,
Constraint::Let(boxed) => {
boxed.ret_constraint.contains_save_the_environment()
|| boxed.defs_constraint.contains_save_the_environment()
}
Constraint::And(cs) => cs.iter().any(|c| c.contains_save_the_environment()),
}
}
} }
fn subtract(declared: &Declared, detail: &VariableDetail, accum: &mut VariableDetail) { fn subtract(declared: &Declared, detail: &VariableDetail, accum: &mut VariableDetail) {
for var in &detail.type_variables { for var in &detail.type_variables {
if !(declared.rigid_vars.contains(&var) || declared.flex_vars.contains(&var)) { if !(declared.rigid_vars.contains(var) || declared.flex_vars.contains(var)) {
accum.type_variables.insert(*var); accum.type_variables.insert(*var);
} }
} }
// lambda set variables are always flex // lambda set variables are always flex
for var in &detail.lambda_set_variables { for var in &detail.lambda_set_variables {
if declared.rigid_vars.contains(&var.into_inner()) { if declared.rigid_vars.contains(var) {
panic!("lambda set variable {:?} is declared as rigid", var); panic!("lambda set variable {:?} is declared as rigid", var);
} }
if !declared.flex_vars.contains(&var.into_inner()) { if !declared.flex_vars.contains(var) {
accum.lambda_set_variables.insert(*var); accum.lambda_set_variables.push(*var);
} }
} }
// recursion vars should be always rigid // recursion vars should be always rigid
for var in &detail.recursion_variables { for var in &detail.recursion_variables {
if declared.flex_vars.contains(&var) { if declared.flex_vars.contains(var) {
panic!("recursion variable {:?} is declared as flex", var); panic!("recursion variable {:?} is declared as flex", var);
} }
if !declared.rigid_vars.contains(&var) { if !declared.rigid_vars.contains(var) {
accum.recursion_variables.insert(*var); accum.recursion_variables.insert(*var);
} }
} }

View file

@ -380,7 +380,7 @@ pub fn sort_can_defs(
// //
// In the above example, `f` cannot reference `a`, and in the closure // In the above example, `f` cannot reference `a`, and in the closure
// a call to `f` cannot cycle back to `a`. // a call to `f` cannot cycle back to `a`.
let mut loc_succ = local_successors(&references, &env.closures); let mut loc_succ = local_successors(references, &env.closures);
// if the current symbol is a closure, peek into its body // if the current symbol is a closure, peek into its body
if let Some(References { lookups, .. }) = env.closures.get(symbol) { if let Some(References { lookups, .. }) = env.closures.get(symbol) {
@ -430,7 +430,7 @@ pub fn sort_can_defs(
// //
// In the above example, `f` cannot reference `a`, and in the closure // In the above example, `f` cannot reference `a`, and in the closure
// a call to `f` cannot cycle back to `a`. // a call to `f` cannot cycle back to `a`.
let mut loc_succ = local_successors(&references, &env.closures); let mut loc_succ = local_successors(references, &env.closures);
// if the current symbol is a closure, peek into its body // if the current symbol is a closure, peek into its body
if let Some(References { lookups, .. }) = env.closures.get(symbol) { if let Some(References { lookups, .. }) = env.closures.get(symbol) {
@ -454,7 +454,7 @@ pub fn sort_can_defs(
let direct_successors = |symbol: &Symbol| -> ImSet<Symbol> { let direct_successors = |symbol: &Symbol| -> ImSet<Symbol> {
match refs_by_symbol.get(symbol) { match refs_by_symbol.get(symbol) {
Some((_, references)) => { Some((_, references)) => {
let mut loc_succ = local_successors(&references, &env.closures); let mut loc_succ = local_successors(references, &env.closures);
// NOTE: if the symbol is a closure we DONT look into its body // NOTE: if the symbol is a closure we DONT look into its body
@ -540,7 +540,7 @@ pub fn sort_can_defs(
), ),
Some((region, _)) => { Some((region, _)) => {
let expr_region = let expr_region =
can_defs_by_symbol.get(&symbol).unwrap().loc_expr.region; can_defs_by_symbol.get(symbol).unwrap().loc_expr.region;
let entry = CycleEntry { let entry = CycleEntry {
symbol: *symbol, symbol: *symbol,
@ -561,14 +561,14 @@ pub fn sort_can_defs(
))); )));
declarations.push(Declaration::InvalidCycle(entries)); declarations.push(Declaration::InvalidCycle(entries));
// other groups may depend on the symbols defined here, so
// also push this cycle onto the groups
groups.push(cycle);
} else {
// slightly inefficient, because we know this becomes exactly one DeclareRec already
groups.push(cycle);
} }
// if it's an invalid cycle, other groups may depend on the
// symbols defined here, so also push this cycle onto the groups
//
// if it's not an invalid cycle, this is slightly inefficient,
// because we know this becomes exactly one DeclareRec already
groups.push(cycle);
} }
// now we have a collection of groups whose dependencies are not cyclic. // now we have a collection of groups whose dependencies are not cyclic.
@ -662,11 +662,11 @@ fn group_to_declaration(
// for a definition, so every definition is only inserted (thus typechecked and emitted) once // for a definition, so every definition is only inserted (thus typechecked and emitted) once
let mut seen_pattern_regions: ImSet<Region> = ImSet::default(); let mut seen_pattern_regions: ImSet<Region> = ImSet::default();
for cycle in strongly_connected_components(&group, filtered_successors) { for cycle in strongly_connected_components(group, filtered_successors) {
if cycle.len() == 1 { if cycle.len() == 1 {
let symbol = &cycle[0]; let symbol = &cycle[0];
if let Some(can_def) = can_defs_by_symbol.get(&symbol) { if let Some(can_def) = can_defs_by_symbol.get(symbol) {
let mut new_def = can_def.clone(); let mut new_def = can_def.clone();
// Determine recursivity of closures that are not tail-recursive // Determine recursivity of closures that are not tail-recursive
@ -678,7 +678,7 @@ fn group_to_declaration(
*recursive = closure_recursivity(*symbol, closures); *recursive = closure_recursivity(*symbol, closures);
} }
let is_recursive = successors(&symbol).contains(&symbol); let is_recursive = successors(symbol).contains(symbol);
if !seen_pattern_regions.contains(&new_def.loc_pattern.region) { if !seen_pattern_regions.contains(&new_def.loc_pattern.region) {
if is_recursive { if is_recursive {
@ -854,7 +854,7 @@ fn canonicalize_pending_def<'a>(
}; };
for (_, (symbol, _)) in scope.idents() { for (_, (symbol, _)) in scope.idents() {
if !vars_by_symbol.contains_key(&symbol) { if !vars_by_symbol.contains_key(symbol) {
continue; continue;
} }
@ -999,7 +999,7 @@ fn canonicalize_pending_def<'a>(
// //
// Only defs of the form (foo = ...) can be closure declarations or self tail calls. // Only defs of the form (foo = ...) can be closure declarations or self tail calls.
if let ( if let (
&ast::Pattern::Identifier(ref _name), &ast::Pattern::Identifier(_name),
&Pattern::Identifier(ref defined_symbol), &Pattern::Identifier(ref defined_symbol),
&Closure { &Closure {
function_type, function_type,
@ -1021,7 +1021,7 @@ fn canonicalize_pending_def<'a>(
// Since everywhere in the code it'll be referred to by its defined name, // Since everywhere in the code it'll be referred to by its defined name,
// remove its generated name from the closure map. (We'll re-insert it later.) // remove its generated name from the closure map. (We'll re-insert it later.)
let references = env.closures.remove(&symbol).unwrap_or_else(|| { let references = env.closures.remove(symbol).unwrap_or_else(|| {
panic!( panic!(
"Tried to remove symbol {:?} from procedures, but it was not found: {:?}", "Tried to remove symbol {:?} from procedures, but it was not found: {:?}",
symbol, env.closures symbol, env.closures
@ -1065,7 +1065,7 @@ fn canonicalize_pending_def<'a>(
// Store the referenced locals in the refs_by_symbol map, so we can later figure out // Store the referenced locals in the refs_by_symbol map, so we can later figure out
// which defined names reference each other. // which defined names reference each other.
for (_, (symbol, region)) in scope.idents() { for (_, (symbol, region)) in scope.idents() {
if !vars_by_symbol.contains_key(&symbol) { if !vars_by_symbol.contains_key(symbol) {
continue; continue;
} }
@ -1110,10 +1110,8 @@ fn canonicalize_pending_def<'a>(
// identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called. // identifier (e.g. `f = \x -> ...`), then this symbol can be tail-called.
let outer_identifier = env.tailcallable_symbol; let outer_identifier = env.tailcallable_symbol;
if let ( if let (&ast::Pattern::Identifier(_name), &Pattern::Identifier(ref defined_symbol)) =
&ast::Pattern::Identifier(ref _name), (&loc_pattern.value, &loc_can_pattern.value)
&Pattern::Identifier(ref defined_symbol),
) = (&loc_pattern.value, &loc_can_pattern.value)
{ {
env.tailcallable_symbol = Some(*defined_symbol); env.tailcallable_symbol = Some(*defined_symbol);
@ -1144,7 +1142,7 @@ fn canonicalize_pending_def<'a>(
// //
// Only defs of the form (foo = ...) can be closure declarations or self tail calls. // Only defs of the form (foo = ...) can be closure declarations or self tail calls.
if let ( if let (
&ast::Pattern::Identifier(ref _name), &ast::Pattern::Identifier(_name),
&Pattern::Identifier(ref defined_symbol), &Pattern::Identifier(ref defined_symbol),
&Closure { &Closure {
function_type, function_type,
@ -1166,7 +1164,7 @@ fn canonicalize_pending_def<'a>(
// Since everywhere in the code it'll be referred to by its defined name, // Since everywhere in the code it'll be referred to by its defined name,
// remove its generated name from the closure map. (We'll re-insert it later.) // remove its generated name from the closure map. (We'll re-insert it later.)
let references = env.closures.remove(&symbol).unwrap_or_else(|| { let references = env.closures.remove(symbol).unwrap_or_else(|| {
panic!( panic!(
"Tried to remove symbol {:?} from procedures, but it was not found: {:?}", "Tried to remove symbol {:?} from procedures, but it was not found: {:?}",
symbol, env.closures symbol, env.closures
@ -1555,7 +1553,7 @@ fn correct_mutual_recursive_type_alias<'a>(
let mut loc_succ = alias.typ.symbols(); let mut loc_succ = alias.typ.symbols();
// remove anything that is not defined in the current block // remove anything that is not defined in the current block
loc_succ.retain(|key| symbols_introduced.contains(key)); loc_succ.retain(|key| symbols_introduced.contains(key));
loc_succ.remove(&symbol); loc_succ.remove(symbol);
loc_succ loc_succ
} }
@ -1634,7 +1632,7 @@ fn make_tag_union_recursive<'a>(
typ.substitute_alias(symbol, &Type::Variable(rec_var)); typ.substitute_alias(symbol, &Type::Variable(rec_var));
} }
Type::RecursiveTagUnion(_, _, _) => {} Type::RecursiveTagUnion(_, _, _) => {}
Type::Alias(_, _, actual) => make_tag_union_recursive( Type::Alias { actual, .. } => make_tag_union_recursive(
env, env,
symbol, symbol,
region, region,

View file

@ -1,7 +1,6 @@
use crate::procedure::References; use crate::procedure::References;
use inlinable_string::InlinableString;
use roc_collections::all::{MutMap, MutSet}; use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::ModuleName; use roc_module::ident::{Ident, ModuleName};
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol}; use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
use roc_problem::can::{Problem, RuntimeError}; use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
@ -12,7 +11,7 @@ pub struct Env<'a> {
/// are assumed to be relative to this path. /// are assumed to be relative to this path.
pub home: ModuleId, pub home: ModuleId,
pub dep_idents: MutMap<ModuleId, IdentIds>, pub dep_idents: &'a MutMap<ModuleId, IdentIds>,
pub module_ids: &'a ModuleIds, pub module_ids: &'a ModuleIds,
@ -40,7 +39,7 @@ pub struct Env<'a> {
impl<'a> Env<'a> { impl<'a> Env<'a> {
pub fn new( pub fn new(
home: ModuleId, home: ModuleId,
dep_idents: MutMap<ModuleId, IdentIds>, dep_idents: &'a MutMap<ModuleId, IdentIds>,
module_ids: &'a ModuleIds, module_ids: &'a ModuleIds,
exposed_ident_ids: IdentIds, exposed_ident_ids: IdentIds,
) -> Env<'a> { ) -> Env<'a> {
@ -62,22 +61,21 @@ impl<'a> Env<'a> {
/// Returns Err if the symbol resolved, but it was not exposed by the given module /// Returns Err if the symbol resolved, but it was not exposed by the given module
pub fn qualified_lookup( pub fn qualified_lookup(
&mut self, &mut self,
module_name: &str, module_name_str: &str,
ident: &str, ident: &str,
region: Region, region: Region,
) -> Result<Symbol, RuntimeError> { ) -> Result<Symbol, RuntimeError> {
debug_assert!( debug_assert!(
!module_name.is_empty(), !module_name_str.is_empty(),
"Called env.qualified_lookup with an unqualified ident: {:?}", "Called env.qualified_lookup with an unqualified ident: {:?}",
ident ident
); );
let module_name: InlinableString = module_name.into(); let module_name = ModuleName::from(module_name_str);
let ident = Ident::from(ident);
match self.module_ids.get_id(&module_name) { match self.module_ids.get_id(&module_name) {
Some(&module_id) => { Some(&module_id) => {
let ident: InlinableString = ident.into();
// You can do qualified lookups on your own module, e.g. // You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup. // if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home { if module_id == self.home {
@ -114,7 +112,7 @@ impl<'a> Env<'a> {
Ok(symbol) Ok(symbol)
} }
None => Err(RuntimeError::ValueNotExposed { None => Err(RuntimeError::ValueNotExposed {
module_name: ModuleName::from(module_name), module_name,
ident, ident,
region, region,
}), }),

View file

@ -9,7 +9,6 @@ use crate::num::{
use crate::pattern::{canonicalize_pattern, Pattern}; use crate::pattern::{canonicalize_pattern, Pattern};
use crate::procedure::References; use crate::procedure::References;
use crate::scope::Scope; use crate::scope::Scope;
use inlinable_string::InlinableString;
use roc_collections::all::{ImSet, MutMap, MutSet, SendMap}; use roc_collections::all::{ImSet, MutMap, MutSet, SendMap};
use roc_module::ident::{ForeignSymbol, Lowercase, TagName}; use roc_module::ident::{ForeignSymbol, Lowercase, TagName};
use roc_module::low_level::LowLevel; use roc_module::low_level::LowLevel;
@ -58,9 +57,8 @@ pub enum Expr {
// Int and Float store a variable to generate better error messages // Int and Float store a variable to generate better error messages
Int(Variable, Variable, i128), Int(Variable, Variable, i128),
Float(Variable, Variable, f64), Float(Variable, Variable, f64),
Str(InlinableString), Str(Box<str>),
List { List {
list_var: Variable, // required for uniqueness of the list
elem_var: Variable, elem_var: Variable,
loc_elems: Vec<Located<Expr>>, loc_elems: Vec<Located<Expr>>,
}, },
@ -135,9 +133,12 @@ pub enum Expr {
}, },
/// field accessor as a function, e.g. (.foo) expr /// field accessor as a function, e.g. (.foo) expr
Accessor { Accessor {
/// accessors are desugared to closures; they need to have a name
/// so the closure can have a correct lambda set
name: Symbol,
function_var: Variable, function_var: Variable,
record_var: Variable, record_var: Variable,
closure_var: Variable, closure_ext_var: Variable,
ext_var: Variable, ext_var: Variable,
field_var: Variable, field_var: Variable,
field: Lowercase, field: Lowercase,
@ -158,6 +159,14 @@ pub enum Expr {
arguments: Vec<(Variable, Located<Expr>)>, arguments: Vec<(Variable, Located<Expr>)>,
}, },
ZeroArgumentTag {
closure_name: Symbol,
variant_var: Variable,
ext_var: Variable,
name: TagName,
arguments: Vec<(Variable, Located<Expr>)>,
},
// Test // Test
Expect(Box<Located<Expr>>, Box<Located<Expr>>), Expect(Box<Located<Expr>>, Box<Located<Expr>>),
@ -293,7 +302,6 @@ pub fn canonicalize_expr<'a>(
if loc_elems.is_empty() { if loc_elems.is_empty() {
( (
List { List {
list_var: var_store.fresh(),
elem_var: var_store.fresh(), elem_var: var_store.fresh(),
loc_elems: Vec::new(), loc_elems: Vec::new(),
}, },
@ -320,7 +328,6 @@ pub fn canonicalize_expr<'a>(
( (
List { List {
list_var: var_store.fresh(),
elem_var: var_store.fresh(), elem_var: var_store.fresh(),
loc_elems: can_elems, loc_elems: can_elems,
}, },
@ -392,6 +399,17 @@ pub fn canonicalize_expr<'a>(
name, name,
arguments: args, arguments: args,
}, },
ZeroArgumentTag {
variant_var,
ext_var,
name,
..
} => Tag {
variant_var,
ext_var,
name,
arguments: args,
},
_ => { _ => {
// This could be something like ((if True then fn1 else fn2) arg1 arg2). // This could be something like ((if True then fn1 else fn2) arg1 arg2).
Call( Call(
@ -581,7 +599,7 @@ pub fn canonicalize_expr<'a>(
// A "when" with no branches is a runtime error, but it will mess things up // A "when" with no branches is a runtime error, but it will mess things up
// if code gen mistakenly thinks this is a tail call just because its condition // if code gen mistakenly thinks this is a tail call just because its condition
// happend to be one. (The condition gave us our initial output value.) // happened to be one. (The condition gave us our initial output value.)
if branches.is_empty() { if branches.is_empty() {
output.tail_call = None; output.tail_call = None;
} }
@ -613,10 +631,11 @@ pub fn canonicalize_expr<'a>(
} }
ast::Expr::AccessorFunction(field) => ( ast::Expr::AccessorFunction(field) => (
Accessor { Accessor {
name: env.gen_unique_symbol(),
function_var: var_store.fresh(), function_var: var_store.fresh(),
record_var: var_store.fresh(), record_var: var_store.fresh(),
ext_var: var_store.fresh(), ext_var: var_store.fresh(),
closure_var: var_store.fresh(), closure_ext_var: var_store.fresh(),
field_var: var_store.fresh(), field_var: var_store.fresh(),
field: (*field).into(), field: (*field).into(),
}, },
@ -626,11 +645,14 @@ pub fn canonicalize_expr<'a>(
let variant_var = var_store.fresh(); let variant_var = var_store.fresh();
let ext_var = var_store.fresh(); let ext_var = var_store.fresh();
let symbol = env.gen_unique_symbol();
( (
Tag { ZeroArgumentTag {
name: TagName::Global((*tag).into()), name: TagName::Global((*tag).into()),
arguments: vec![], arguments: vec![],
variant_var, variant_var,
closure_name: symbol,
ext_var, ext_var,
}, },
Output::default(), Output::default(),
@ -641,13 +663,15 @@ pub fn canonicalize_expr<'a>(
let ext_var = var_store.fresh(); let ext_var = var_store.fresh();
let tag_ident = env.ident_ids.get_or_insert(&(*tag).into()); let tag_ident = env.ident_ids.get_or_insert(&(*tag).into());
let symbol = Symbol::new(env.home, tag_ident); let symbol = Symbol::new(env.home, tag_ident);
let lambda_set_symbol = env.gen_unique_symbol();
( (
Tag { ZeroArgumentTag {
name: TagName::Private(symbol), name: TagName::Private(symbol),
arguments: vec![], arguments: vec![],
variant_var, variant_var,
ext_var, ext_var,
closure_name: lambda_set_symbol,
}, },
Output::default(), Output::default(),
) )
@ -955,7 +979,7 @@ where
visited.insert(defined_symbol); visited.insert(defined_symbol);
for local in refs.lookups.iter() { for local in refs.lookups.iter() {
if !visited.contains(&local) { if !visited.contains(local) {
let other_refs: References = let other_refs: References =
references_from_local(*local, visited, refs_by_def, closures); references_from_local(*local, visited, refs_by_def, closures);
@ -966,7 +990,7 @@ where
} }
for call in refs.calls.iter() { for call in refs.calls.iter() {
if !visited.contains(&call) { if !visited.contains(call) {
let other_refs = references_from_call(*call, visited, refs_by_def, closures); let other_refs = references_from_call(*call, visited, refs_by_def, closures);
answer = answer.union(other_refs); answer = answer.union(other_refs);
@ -997,7 +1021,7 @@ where
visited.insert(call_symbol); visited.insert(call_symbol);
for closed_over_local in references.lookups.iter() { for closed_over_local in references.lookups.iter() {
if !visited.contains(&closed_over_local) { if !visited.contains(closed_over_local) {
let other_refs = let other_refs =
references_from_local(*closed_over_local, visited, refs_by_def, closures); references_from_local(*closed_over_local, visited, refs_by_def, closures);
@ -1008,7 +1032,7 @@ where
} }
for call in references.calls.iter() { for call in references.calls.iter() {
if !visited.contains(&call) { if !visited.contains(call) {
let other_refs = references_from_call(*call, visited, refs_by_def, closures); let other_refs = references_from_call(*call, visited, refs_by_def, closures);
answer = answer.union(other_refs); answer = answer.union(other_refs);
@ -1206,7 +1230,6 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
| other @ ForeignCall { .. } => other, | other @ ForeignCall { .. } => other,
List { List {
list_var,
elem_var, elem_var,
loc_elems, loc_elems,
} => { } => {
@ -1222,7 +1245,6 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
} }
List { List {
list_var,
elem_var, elem_var,
loc_elems: new_elems, loc_elems: new_elems,
} }
@ -1427,6 +1449,23 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
); );
} }
ZeroArgumentTag {
closure_name,
variant_var,
ext_var,
name,
arguments,
} => {
todo!(
"Inlining for ZeroArgumentTag with closure_name {:?}, variant_var {:?}, ext_var {:?}, name {:?}, arguments {:?}",
closure_name,
variant_var,
ext_var,
name,
arguments
);
}
Call(boxed_tuple, args, called_via) => { Call(boxed_tuple, args, called_via) => {
let (fn_var, loc_expr, closure_var, expr_var) = *boxed_tuple; let (fn_var, loc_expr, closure_var, expr_var) = *boxed_tuple;
@ -1534,7 +1573,7 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
enum StrSegment { enum StrSegment {
Interpolation(Located<Expr>), Interpolation(Located<Expr>),
Plaintext(InlinableString), Plaintext(Box<str>),
} }
fn flatten_str_lines<'a>( fn flatten_str_lines<'a>(
@ -1561,10 +1600,10 @@ fn flatten_str_lines<'a>(
buf.push(ch); buf.push(ch);
} }
None => { None => {
env.problem(Problem::InvalidUnicodeCodePoint(loc_hex_digits.region)); env.problem(Problem::InvalidUnicodeCodePt(loc_hex_digits.region));
return ( return (
Expr::RuntimeError(RuntimeError::InvalidUnicodeCodePoint( Expr::RuntimeError(RuntimeError::InvalidUnicodeCodePt(
loc_hex_digits.region, loc_hex_digits.region,
)), )),
output, output,

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod annotation; pub mod annotation;

View file

@ -36,6 +36,7 @@ pub struct ModuleOutput {
pub problems: Vec<Problem>, pub problems: Vec<Problem>,
pub ident_ids: IdentIds, pub ident_ids: IdentIds,
pub references: MutSet<Symbol>, pub references: MutSet<Symbol>,
pub scope: Scope,
} }
// TODO trim these down // TODO trim these down
@ -46,7 +47,7 @@ pub fn canonicalize_module_defs<'a, F>(
home: ModuleId, home: ModuleId,
module_ids: &ModuleIds, module_ids: &ModuleIds,
exposed_ident_ids: IdentIds, exposed_ident_ids: IdentIds,
dep_idents: MutMap<ModuleId, IdentIds>, dep_idents: &'a MutMap<ModuleId, IdentIds>,
aliases: MutMap<Symbol, Alias>, aliases: MutMap<Symbol, Alias>,
exposed_imports: MutMap<Ident, (Symbol, Region)>, exposed_imports: MutMap<Ident, (Symbol, Region)>,
exposed_symbols: &MutSet<Symbol>, exposed_symbols: &MutSet<Symbol>,
@ -97,7 +98,7 @@ where
// Here we essentially add those "defs" to "the beginning of the module" // Here we essentially add those "defs" to "the beginning of the module"
// by canonicalizing them right before we canonicalize the actual ast::Def nodes. // by canonicalizing them right before we canonicalize the actual ast::Def nodes.
for (ident, (symbol, region)) in exposed_imports { for (ident, (symbol, region)) in exposed_imports {
let first_char = ident.as_inline_str().chars().next().unwrap(); let first_char = ident.as_inline_str().as_str().chars().next().unwrap();
if first_char.is_lowercase() { if first_char.is_lowercase() {
// this is a value definition // this is a value definition
@ -138,7 +139,7 @@ where
} }
} }
let (defs, _scope, output, symbols_introduced) = canonicalize_defs( let (defs, scope, output, symbols_introduced) = canonicalize_defs(
&mut env, &mut env,
Output::default(), Output::default(),
var_store, var_store,
@ -309,6 +310,7 @@ where
} }
Ok(ModuleOutput { Ok(ModuleOutput {
scope,
aliases, aliases,
rigid_variables, rigid_variables,
declarations, declarations,
@ -509,7 +511,7 @@ fn fix_values_captured_in_closure_expr(
fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols); fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols);
} }
Tag { arguments, .. } => { Tag { arguments, .. } | ZeroArgumentTag { arguments, .. } => {
for (_, loc_arg) in arguments.iter_mut() { for (_, loc_arg) in arguments.iter_mut() {
fix_values_captured_in_closure_expr(&mut loc_arg.value, no_capture_symbols); fix_values_captured_in_closure_expr(&mut loc_arg.value, no_capture_symbols);
} }

View file

@ -276,7 +276,7 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
}) })
} }
When(loc_cond_expr, branches) => { When(loc_cond_expr, branches) => {
let loc_desugared_cond = &*arena.alloc(desugar_expr(arena, &loc_cond_expr)); let loc_desugared_cond = &*arena.alloc(desugar_expr(arena, loc_cond_expr));
let mut desugared_branches = Vec::with_capacity_in(branches.len(), arena); let mut desugared_branches = Vec::with_capacity_in(branches.len(), arena);
for branch in branches.iter() { for branch in branches.iter() {
@ -345,8 +345,8 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
) )
} }
If(if_thens, final_else_branch) => { If(if_thens, final_else_branch) => {
// If does not get desugared into `when` so we can give more targetted error messages during type checking. // If does not get desugared into `when` so we can give more targeted error messages during type checking.
let desugared_final_else = &*arena.alloc(desugar_expr(arena, &final_else_branch)); let desugared_final_else = &*arena.alloc(desugar_expr(arena, final_else_branch));
let mut desugared_if_thens = Vec::with_capacity_in(if_thens.len(), arena); let mut desugared_if_thens = Vec::with_capacity_in(if_thens.len(), arena);
@ -363,8 +363,8 @@ pub fn desugar_expr<'a>(arena: &'a Bump, loc_expr: &'a Located<Expr<'a>>) -> &'a
}) })
} }
Expect(condition, continuation) => { Expect(condition, continuation) => {
let desugared_condition = &*arena.alloc(desugar_expr(arena, &condition)); let desugared_condition = &*arena.alloc(desugar_expr(arena, condition));
let desugared_continuation = &*arena.alloc(desugar_expr(arena, &continuation)); let desugared_continuation = &*arena.alloc(desugar_expr(arena, continuation));
arena.alloc(Located { arena.alloc(Located {
value: Expect(desugared_condition, desugared_continuation), value: Expect(desugared_condition, desugared_continuation),
region: loc_expr.region, region: loc_expr.region,

View file

@ -185,7 +185,7 @@ pub fn canonicalize_pattern<'a>(
} }
} }
FloatLiteral(ref string) => match pattern_type { FloatLiteral(string) => match pattern_type {
WhenBranch => match finish_parsing_float(string) { WhenBranch => match finish_parsing_float(string) {
Err(_error) => { Err(_error) => {
let problem = MalformedPatternProblem::MalformedFloat; let problem = MalformedPatternProblem::MalformedFloat;

View file

@ -1,4 +1,4 @@
use roc_collections::all::{ImMap, MutSet}; use roc_collections::all::{MutSet, SendMap};
use roc_module::ident::{Ident, Lowercase}; use roc_module::ident::{Ident, Lowercase};
use roc_module::symbol::{IdentIds, ModuleId, Symbol}; use roc_module::symbol::{IdentIds, ModuleId, Symbol};
use roc_problem::can::RuntimeError; use roc_problem::can::RuntimeError;
@ -10,14 +10,14 @@ use roc_types::types::{Alias, Type};
pub struct Scope { pub struct Scope {
/// All the identifiers in scope, mapped to were they were defined and /// All the identifiers in scope, mapped to were they were defined and
/// the Symbol they resolve to. /// the Symbol they resolve to.
idents: ImMap<Ident, (Symbol, Region)>, idents: SendMap<Ident, (Symbol, Region)>,
/// A cache of all the symbols in scope. This makes lookups much /// A cache of all the symbols in scope. This makes lookups much
/// faster when checking for unused defs and unused arguments. /// faster when checking for unused defs and unused arguments.
symbols: ImMap<Symbol, Region>, symbols: SendMap<Symbol, Region>,
/// The type aliases currently in scope /// The type aliases currently in scope
aliases: ImMap<Symbol, Alias>, aliases: SendMap<Symbol, Alias>,
/// The current module being processed. This will be used to turn /// The current module being processed. This will be used to turn
/// unqualified idents into Symbols. /// unqualified idents into Symbols.
@ -28,7 +28,7 @@ impl Scope {
pub fn new(home: ModuleId, var_store: &mut VarStore) -> Scope { pub fn new(home: ModuleId, var_store: &mut VarStore) -> Scope {
use roc_types::solved_types::{BuiltinAlias, FreeVars}; use roc_types::solved_types::{BuiltinAlias, FreeVars};
let solved_aliases = roc_types::builtin_aliases::aliases(); let solved_aliases = roc_types::builtin_aliases::aliases();
let mut aliases = ImMap::default(); let mut aliases = SendMap::default();
for (symbol, builtin_alias) in solved_aliases { for (symbol, builtin_alias) in solved_aliases {
let BuiltinAlias { region, vars, typ } = builtin_alias; let BuiltinAlias { region, vars, typ } = builtin_alias;
@ -47,7 +47,7 @@ impl Scope {
let alias = Alias { let alias = Alias {
region, region,
typ, typ,
lambda_set_variables: MutSet::default(), lambda_set_variables: Vec::new(),
recursion_variables: MutSet::default(), recursion_variables: MutSet::default(),
type_variables: variables, type_variables: variables,
}; };
@ -58,7 +58,7 @@ impl Scope {
Scope { Scope {
home, home,
idents: Symbol::default_in_scope(), idents: Symbol::default_in_scope(),
symbols: ImMap::default(), symbols: SendMap::default(),
aliases, aliases,
} }
} }
@ -89,7 +89,7 @@ impl Scope {
None => Err(RuntimeError::LookupNotInScope( None => Err(RuntimeError::LookupNotInScope(
Located { Located {
region, region,
value: ident.clone().into(), value: ident.clone(),
}, },
self.idents.keys().map(|v| v.as_ref().into()).collect(), self.idents.keys().map(|v| v.as_ref().into()).collect(),
)), )),
@ -124,9 +124,9 @@ impl Scope {
// If this IdentId was already added previously // If this IdentId was already added previously
// when the value was exposed in the module header, // when the value was exposed in the module header,
// use that existing IdentId. Otherwise, create a fresh one. // use that existing IdentId. Otherwise, create a fresh one.
let ident_id = match exposed_ident_ids.get_id(&ident.as_inline_str()) { let ident_id = match exposed_ident_ids.get_id(&ident) {
Some(ident_id) => *ident_id, Some(ident_id) => *ident_id,
None => all_ident_ids.add(ident.clone().into()), None => all_ident_ids.add(ident.clone()),
}; };
let symbol = Symbol::new(self.home, ident_id); let symbol = Symbol::new(self.home, ident_id);
@ -143,7 +143,7 @@ impl Scope {
/// ///
/// Used for record guards like { x: Just _ } /// Used for record guards like { x: Just _ }
pub fn ignore(&mut self, ident: Ident, all_ident_ids: &mut IdentIds) -> Symbol { pub fn ignore(&mut self, ident: Ident, all_ident_ids: &mut IdentIds) -> Symbol {
let ident_id = all_ident_ids.add(ident.into()); let ident_id = all_ident_ids.add(ident);
Symbol::new(self.home, ident_id) Symbol::new(self.home, ident_id)
} }
@ -198,6 +198,11 @@ impl Scope {
true true
}); });
let lambda_set_variables: Vec<_> = lambda_set_variables
.into_iter()
.map(|v| roc_types::types::LambdaSet(Type::Variable(v)))
.collect();
let alias = Alias { let alias = Alias {
region, region,
type_variables: vars, type_variables: vars,

View file

@ -313,7 +313,7 @@ pub fn canonical_string_literal<'a>(_arena: &Bump, _raw: &'a str, _region: Regio
// problems.push(Loc { // problems.push(Loc {
// region, // region,
// value: Problem::UnicodeCodePointTooLarge, // value: Problem::UnicodeCodePtTooLarge,
// }); // });
// } else { // } else {
// // If it all checked out, add it to // // If it all checked out, add it to
@ -322,7 +322,7 @@ pub fn canonical_string_literal<'a>(_arena: &Bump, _raw: &'a str, _region: Regio
// Some(ch) => buf.push(ch), // Some(ch) => buf.push(ch),
// None => { // None => {
// problems.push(loc_escaped_unicode( // problems.push(loc_escaped_unicode(
// Problem::InvalidUnicodeCodePoint, // Problem::InvalidUnicodeCodePt,
// &state, // &state,
// start_of_unicode, // start_of_unicode,
// hex_str.len(), // hex_str.len(),
@ -335,7 +335,7 @@ pub fn canonical_string_literal<'a>(_arena: &Bump, _raw: &'a str, _region: Regio
// let problem = if hex_str.is_empty() { // let problem = if hex_str.is_empty() {
// Problem::NoUnicodeDigits // Problem::NoUnicodeDigits
// } else { // } else {
// Problem::NonHexCharsInUnicodeCodePoint // Problem::NonHexCharsInUnicodeCodePt
// }; // };
// problems.push(loc_escaped_unicode( // problems.push(loc_escaped_unicode(

View file

@ -34,7 +34,7 @@ pub struct CanExprOut {
#[allow(dead_code)] #[allow(dead_code)]
pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut { pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut {
let loc_expr = roc_parse::test_helpers::parse_loc_with(&arena, expr_str).unwrap_or_else(|e| { let loc_expr = roc_parse::test_helpers::parse_loc_with(arena, expr_str).unwrap_or_else(|e| {
panic!( panic!(
"can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}", "can_expr_with() got a parse error when attempting to canonicalize:\n\n{:?} {:?}",
expr_str, e expr_str, e
@ -56,7 +56,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
let mut scope = Scope::new(home, &mut var_store); let mut scope = Scope::new(home, &mut var_store);
let dep_idents = IdentIds::exposed_builtins(0); let dep_idents = IdentIds::exposed_builtins(0);
let mut env = Env::new(home, dep_idents, &module_ids, IdentIds::default()); let mut env = Env::new(home, &dep_idents, &module_ids, IdentIds::default());
let (loc_expr, output) = canonicalize_expr( let (loc_expr, output) = canonicalize_expr(
&mut env, &mut env,
&mut var_store, &mut var_store,

View file

@ -145,7 +145,7 @@ mod test_can {
let region = Region::zero(); let region = Region::zero();
assert_can( assert_can(
&string.clone(), string.clone(),
RuntimeError(RuntimeError::InvalidFloat( RuntimeError(RuntimeError::InvalidFloat(
FloatErrorKind::Error, FloatErrorKind::Error,
region, region,
@ -658,7 +658,7 @@ mod test_can {
recursive: recursion, recursive: recursion,
.. ..
}) => recursion.clone(), }) => recursion.clone(),
Some(other @ _) => { Some(other) => {
panic!("assignment at {} is not a closure, but a {:?}", i, other) panic!("assignment at {} is not a closure, but a {:?}", i, other)
} }
None => { None => {
@ -680,7 +680,7 @@ mod test_can {
recursive: recursion, recursive: recursion,
.. ..
} => recursion.clone(), } => recursion.clone(),
other @ _ => { other => {
panic!("assignment at {} is not a closure, but a {:?}", i, other) panic!("assignment at {} is not a closure, but a {:?}", i, other)
} }
} }
@ -1590,7 +1590,7 @@ mod test_can {
// // (Rust has this restriction. I assume it's a good idea.) // // (Rust has this restriction. I assume it's a good idea.)
// assert_malformed_str( // assert_malformed_str(
// r#""abc\u{110000}def""#, // r#""abc\u{110000}def""#,
// vec![Located::new(0, 7, 0, 12, Problem::UnicodeCodePointTooLarge)], // vec![Located::new(0, 7, 0, 12, Problem::UnicodeCodePtTooLarge)],
// ); // );
// } // }

View file

@ -79,7 +79,7 @@ where
let mut buf = String::new_in(arena); let mut buf = String::new_in(arena);
if let Some(first) = strings.next() { if let Some(first) = strings.next() {
buf.push_str(&first); buf.push_str(first);
for string in strings { for string in strings {
buf.reserve(join_str.len() + string.len()); buf.reserve(join_str.len() + string.len());
@ -133,7 +133,7 @@ where
let mut answer = MutMap::default(); let mut answer = MutMap::default();
for (key, right_value) in map2 { for (key, right_value) in map2 {
match std::collections::HashMap::get(map1, &key) { match std::collections::HashMap::get(map1, key) {
None => (), None => (),
Some(left_value) => { Some(left_value) => {
answer.insert(key.clone(), (left_value.clone(), right_value.clone())); answer.insert(key.clone(), (left_value.clone(), right_value.clone()));

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]

View file

@ -2,7 +2,7 @@ use roc_can::constraint::Constraint::{self, *};
use roc_can::constraint::LetConstraint; use roc_can::constraint::LetConstraint;
use roc_can::expected::Expected::{self, *}; use roc_can::expected::Expected::{self, *};
use roc_collections::all::SendMap; use roc_collections::all::SendMap;
use roc_module::ident::TagName; use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::Symbol; use roc_module::symbol::Symbol;
use roc_region::all::Region; use roc_region::all::Region;
use roc_types::subs::Variable; use roc_types::subs::Variable;
@ -13,7 +13,7 @@ use roc_types::types::Type::{self, *};
#[inline(always)] #[inline(always)]
pub fn int_literal( pub fn int_literal(
num_var: Variable, num_var: Variable,
percision_var: Variable, precision_var: Variable,
expected: Expected<Type>, expected: Expected<Type>,
region: Region, region: Region,
) -> Constraint { ) -> Constraint {
@ -25,7 +25,7 @@ pub fn int_literal(
And(vec![ And(vec![
Eq( Eq(
num_type.clone(), num_type.clone(),
ForReason(reason, num_int(Type::Variable(percision_var)), region), ForReason(reason, num_int(Type::Variable(precision_var)), region),
Category::Int, Category::Int,
region, region,
), ),
@ -89,9 +89,23 @@ pub fn str_type() -> Type {
builtin_type(Symbol::STR_STR, Vec::new()) builtin_type(Symbol::STR_STR, Vec::new())
} }
#[inline(always)]
fn builtin_alias(
symbol: Symbol,
type_arguments: Vec<(Lowercase, Type)>,
actual: Box<Type>,
) -> Type {
Type::Alias {
symbol,
type_arguments,
actual,
lambda_set_variables: vec![],
}
}
#[inline(always)] #[inline(always)]
pub fn num_float(range: Type) -> Type { pub fn num_float(range: Type) -> Type {
Type::Alias( builtin_alias(
Symbol::NUM_FLOAT, Symbol::NUM_FLOAT,
vec![("range".into(), range.clone())], vec![("range".into(), range.clone())],
Box::new(num_num(num_floatingpoint(range))), Box::new(num_num(num_floatingpoint(range))),
@ -108,7 +122,7 @@ pub fn num_floatingpoint(range: Type) -> Type {
Box::new(Type::EmptyTagUnion), Box::new(Type::EmptyTagUnion),
); );
Type::Alias( builtin_alias(
Symbol::NUM_FLOATINGPOINT, Symbol::NUM_FLOATINGPOINT,
vec![("range".into(), range)], vec![("range".into(), range)],
Box::new(alias_content), Box::new(alias_content),
@ -122,12 +136,12 @@ pub fn num_binary64() -> Type {
Box::new(Type::EmptyTagUnion), Box::new(Type::EmptyTagUnion),
); );
Type::Alias(Symbol::NUM_BINARY64, vec![], Box::new(alias_content)) builtin_alias(Symbol::NUM_BINARY64, vec![], Box::new(alias_content))
} }
#[inline(always)] #[inline(always)]
pub fn num_int(range: Type) -> Type { pub fn num_int(range: Type) -> Type {
Type::Alias( builtin_alias(
Symbol::NUM_INT, Symbol::NUM_INT,
vec![("range".into(), range.clone())], vec![("range".into(), range.clone())],
Box::new(num_num(num_integer(range))), Box::new(num_num(num_integer(range))),
@ -141,7 +155,7 @@ pub fn num_signed64() -> Type {
Box::new(Type::EmptyTagUnion), Box::new(Type::EmptyTagUnion),
); );
Type::Alias(Symbol::NUM_SIGNED64, vec![], Box::new(alias_content)) builtin_alias(Symbol::NUM_SIGNED64, vec![], Box::new(alias_content))
} }
#[inline(always)] #[inline(always)]
@ -154,7 +168,7 @@ pub fn num_integer(range: Type) -> Type {
Box::new(Type::EmptyTagUnion), Box::new(Type::EmptyTagUnion),
); );
Type::Alias( builtin_alias(
Symbol::NUM_INTEGER, Symbol::NUM_INTEGER,
vec![("range".into(), range)], vec![("range".into(), range)],
Box::new(alias_content), Box::new(alias_content),
@ -168,7 +182,7 @@ pub fn num_num(typ: Type) -> Type {
Box::new(Type::EmptyTagUnion), Box::new(Type::EmptyTagUnion),
); );
Type::Alias( builtin_alias(
Symbol::NUM_NUM, Symbol::NUM_NUM,
vec![("range".into(), typ)], vec![("range".into(), typ)],
Box::new(alias_content), Box::new(alias_content),

View file

@ -10,7 +10,7 @@ use roc_can::expr::Expr::{self, *};
use roc_can::expr::{Field, WhenBranch}; use roc_can::expr::{Field, WhenBranch};
use roc_can::pattern::Pattern; use roc_can::pattern::Pattern;
use roc_collections::all::{ImMap, Index, SendMap}; use roc_collections::all::{ImMap, Index, SendMap};
use roc_module::ident::Lowercase; use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
use roc_types::subs::Variable; use roc_types::subs::Variable;
@ -96,7 +96,7 @@ pub fn constrain_expr(
expected: Expected<Type>, expected: Expected<Type>,
) -> Constraint { ) -> Constraint {
match expr { match expr {
Int(var, percision, _) => int_literal(*var, *percision, expected, region), Int(var, precision, _) => int_literal(*var, *precision, expected, region),
Num(var, _) => exists( Num(var, _) => exists(
vec![*var], vec![*var],
Eq( Eq(
@ -106,7 +106,7 @@ pub fn constrain_expr(
region, region,
), ),
), ),
Float(var, percision, _) => float_literal(*var, *percision, expected, region), Float(var, precision, _) => float_literal(*var, *precision, expected, region),
EmptyRecord => constrain_empty_record(region, expected), EmptyRecord => constrain_empty_record(region, expected),
Expr::Record { record_var, fields } => { Expr::Record { record_var, fields } => {
if fields.is_empty() { if fields.is_empty() {
@ -220,7 +220,6 @@ pub fn constrain_expr(
List { List {
elem_var, elem_var,
loc_elems, loc_elems,
list_var: _unused,
} => { } => {
if loc_elems.is_empty() { if loc_elems.is_empty() {
exists( exists(
@ -712,10 +711,11 @@ pub fn constrain_expr(
) )
} }
Accessor { Accessor {
name: closure_name,
function_var, function_var,
field, field,
record_var, record_var,
closure_var, closure_ext_var: closure_var,
ext_var, ext_var,
field_var, field_var,
} => { } => {
@ -739,9 +739,15 @@ pub fn constrain_expr(
region, region,
); );
let ext = Type::Variable(*closure_var);
let lambda_set = Type::TagUnion(
vec![(TagName::Closure(*closure_name), vec![])],
Box::new(ext),
);
let function_type = Type::Function( let function_type = Type::Function(
vec![record_type], vec![record_type],
Box::new(Type::Variable(*closure_var)), Box::new(lambda_set),
Box::new(field_type), Box::new(field_type),
); );
@ -860,6 +866,58 @@ pub fn constrain_expr(
exists(vars, And(arg_cons)) exists(vars, And(arg_cons))
} }
ZeroArgumentTag {
variant_var,
ext_var,
name,
arguments,
closure_name,
} => {
let mut vars = Vec::with_capacity(arguments.len());
let mut types = Vec::with_capacity(arguments.len());
let mut arg_cons = Vec::with_capacity(arguments.len());
for (var, loc_expr) in arguments {
let arg_con = constrain_expr(
env,
loc_expr.region,
&loc_expr.value,
Expected::NoExpectation(Type::Variable(*var)),
);
arg_cons.push(arg_con);
vars.push(*var);
types.push(Type::Variable(*var));
}
let union_con = Eq(
Type::FunctionOrTagUnion(
name.clone(),
*closure_name,
Box::new(Type::Variable(*ext_var)),
),
expected.clone(),
Category::TagApply {
tag_name: name.clone(),
args_count: arguments.len(),
},
region,
);
let ast_con = Eq(
Type::Variable(*variant_var),
expected,
Category::Storage(std::file!(), std::line!()),
region,
);
vars.push(*variant_var);
vars.push(*ext_var);
arg_cons.push(union_con);
arg_cons.push(ast_con);
exists(vars, And(arg_cons))
}
RunLowLevel { args, ret_var, op } => { RunLowLevel { args, ret_var, op } => {
// This is a modified version of what we do for function calls. // This is a modified version of what we do for function calls.
@ -1063,7 +1121,7 @@ pub fn constrain_decls(home: ModuleId, decls: &[Declaration]) -> Constraint {
} }
// this assert make the "root" of the constraint wasn't dropped // this assert make the "root" of the constraint wasn't dropped
debug_assert!(format!("{:?}", &constraint).contains("SaveTheEnvironment")); debug_assert!(constraint.contains_save_the_environment());
constraint constraint
} }
@ -1156,7 +1214,7 @@ fn constrain_def(env: &Env, def: &Def, body_con: Constraint) -> Constraint {
name, name,
.. ..
}, },
Type::Function(arg_types, _closure_type, ret_type), Type::Function(arg_types, signature_closure_type, ret_type),
) => { ) => {
// NOTE if we ever have problems with the closure, the ignored `_closure_type` // NOTE if we ever have problems with the closure, the ignored `_closure_type`
// is probably a good place to start the investigation! // is probably a good place to start the investigation!
@ -1261,6 +1319,19 @@ fn constrain_def(env: &Env, def: &Def, body_con: Constraint) -> Constraint {
defs_constraint, defs_constraint,
ret_constraint, ret_constraint,
})), })),
Eq(
Type::Variable(closure_var),
Expected::FromAnnotation(
def.loc_pattern.clone(),
arity,
AnnotationSource::TypedBody {
region: annotation.region,
},
*signature_closure_type.clone(),
),
Category::ClosureSize,
region,
),
Store(signature.clone(), *fn_var, std::file!(), std::line!()), Store(signature.clone(), *fn_var, std::file!(), std::line!()),
Store(signature, expr_var, std::file!(), std::line!()), Store(signature, expr_var, std::file!(), std::line!()),
Store(ret_type, ret_var, std::file!(), std::line!()), Store(ret_type, ret_var, std::file!(), std::line!()),
@ -1375,7 +1446,7 @@ fn instantiate_rigids(
let mut rigid_substitution: ImMap<Variable, Type> = ImMap::default(); let mut rigid_substitution: ImMap<Variable, Type> = ImMap::default();
for (name, var) in introduced_vars.var_by_name.iter() { for (name, var) in introduced_vars.var_by_name.iter() {
if let Some(existing_rigid) = ftv.get(&name) { if let Some(existing_rigid) = ftv.get(name) {
rigid_substitution.insert(*var, Type::Variable(*existing_rigid)); rigid_substitution.insert(*var, Type::Variable(*existing_rigid));
} else { } else {
// It's possible to use this rigid in nested defs // It's possible to use this rigid in nested defs

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod builtins; pub mod builtins;

View file

@ -1,7 +1,7 @@
use crate::expr::constrain_decls; use crate::expr::constrain_decls;
use roc_builtins::std::StdLib; use roc_builtins::std::StdLib;
use roc_can::constraint::{Constraint, LetConstraint}; use roc_can::constraint::{Constraint, LetConstraint};
use roc_can::module::ModuleOutput; use roc_can::def::Declaration;
use roc_collections::all::{MutMap, MutSet, SendMap}; use roc_collections::all::{MutMap, MutSet, SendMap};
use roc_module::symbol::{ModuleId, Symbol}; use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Located, Region}; use roc_region::all::{Located, Region};
@ -22,16 +22,18 @@ pub struct ConstrainedModule {
pub constraint: Constraint, pub constraint: Constraint,
} }
pub fn constrain_module(module: &ModuleOutput, home: ModuleId) -> Constraint { pub fn constrain_module(
aliases: &MutMap<Symbol, Alias>,
declarations: &[Declaration],
home: ModuleId,
) -> Constraint {
let mut send_aliases = SendMap::default(); let mut send_aliases = SendMap::default();
for (symbol, alias) in module.aliases.iter() { for (symbol, alias) in aliases.iter() {
send_aliases.insert(*symbol, alias.clone()); send_aliases.insert(*symbol, alias.clone());
} }
let decls = &module.declarations; constrain_decls(home, declarations)
constrain_decls(home, decls)
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -55,7 +57,7 @@ pub fn constrain_imported_values(
// an imported symbol can be either an alias or a value // an imported symbol can be either an alias or a value
match import.solved_type { match import.solved_type {
SolvedType::Alias(symbol, _, _) if symbol == loc_symbol.value => { SolvedType::Alias(symbol, _, _, _) if symbol == loc_symbol.value => {
// do nothing, in the future the alias definitions should not be in the list of imported values // do nothing, in the future the alias definitions should not be in the list of imported values
} }
_ => { _ => {
@ -144,7 +146,7 @@ pub fn pre_constrain_imports(
// Translate referenced symbols into constraints. We do this on the main // Translate referenced symbols into constraints. We do this on the main
// thread because we need exclusive access to the exposed_types map, in order // thread because we need exclusive access to the exposed_types map, in order
// to get the necessary constraint info for any aliases we imported. We also // to get the necessary constraint info for any aliases we imported. We also
// resolve builtin types now, so we can use a refernce to stdlib instead of // resolve builtin types now, so we can use a reference to stdlib instead of
// having to either clone it or recreate it from scratch on the other thread. // having to either clone it or recreate it from scratch on the other thread.
for &symbol in references.iter() { for &symbol in references.iter() {
let module_id = symbol.module_id(); let module_id = symbol.module_id();

View file

@ -119,7 +119,7 @@ fn headers_from_annotation_help(
} }
/// This accepts PatternState (rather than returning it) so that the caller can /// This accepts PatternState (rather than returning it) so that the caller can
/// intiialize the Vecs in PatternState using with_capacity /// initialize the Vecs in PatternState using with_capacity
/// based on its knowledge of their lengths. /// based on its knowledge of their lengths.
pub fn constrain_pattern( pub fn constrain_pattern(
env: &Env, env: &Env,
@ -206,7 +206,7 @@ pub fn constrain_pattern(
let pat_type = Type::Variable(*var); let pat_type = Type::Variable(*var);
let expected = PExpected::NoExpectation(pat_type.clone()); let expected = PExpected::NoExpectation(pat_type.clone());
if !state.headers.contains_key(&symbol) { if !state.headers.contains_key(symbol) {
state state
.headers .headers
.insert(*symbol, Located::at(region, pat_type.clone())); .insert(*symbol, Located::at(region, pat_type.clone()));

View file

@ -13,7 +13,6 @@ roc_parse = { path = "../parse" }
im = "14" # im and im-rc should always have the same version! im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version! im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.6.1", features = ["collections"] } bumpalo = { version = "3.6.1", features = ["collections"] }
inlinable_string = "0.1"
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.5.1" pretty_assertions = "0.5.1"

View file

@ -295,7 +295,7 @@ impl<'a> Formattable<'a> for Expr<'a> {
items, items,
final_comments, final_comments,
} => { } => {
fmt_list(buf, &items, final_comments, indent); fmt_list(buf, items, final_comments, indent);
} }
BinOps(lefts, right) => fmt_bin_ops(buf, lefts, right, false, parens, indent), BinOps(lefts, right) => fmt_bin_ops(buf, lefts, right, false, parens, indent),
UnaryOp(sub_expr, unary_op) => { UnaryOp(sub_expr, unary_op) => {
@ -1027,7 +1027,7 @@ fn format_field_multiline<'a, T>(
format_field_multiline(buf, sub_field, indent, separator_prefix); format_field_multiline(buf, sub_field, indent, separator_prefix);
} }
AssignedField::SpaceAfter(sub_field, spaces) => { AssignedField::SpaceAfter(sub_field, spaces) => {
// We have somethig like that: // We have something like that:
// ``` // ```
// field # comment // field # comment
// , otherfield // , otherfield

View file

@ -1,4 +1,4 @@
#![warn(clippy::all, clippy::dbg_macro)] #![warn(clippy::dbg_macro)]
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check. // See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)] #![allow(clippy::large_enum_variant)]
pub mod annotation; pub mod annotation;

View file

@ -116,7 +116,7 @@ mod test_fmt {
} }
#[test] #[test]
fn force_space_at_begining_of_comment() { fn force_space_at_beginning_of_comment() {
expr_formats_to( expr_formats_to(
indoc!( indoc!(
r#" r#"

Some files were not shown because too many files have changed in this diff Show more