mirror of
https://github.com/roc-lang/roc.git
synced 2025-08-04 04:08:19 +00:00
Merge remote-tracking branch 'remote/main' into rebuild-platform
This commit is contained in:
commit
012a2fc63f
197 changed files with 5769 additions and 3820 deletions
43
.github/workflows/basic_cli_build_release.yml
vendored
43
.github/workflows/basic_cli_build_release.yml
vendored
|
@ -11,7 +11,7 @@ env:
|
|||
# use .tar.gz for quick testing
|
||||
ARCHIVE_FORMAT: .tar.br
|
||||
# Make a new basic-cli git tag and set it here before starting this workflow
|
||||
RELEASE_TAG: 0.12.0
|
||||
RELEASE_TAG: 0.14.0
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
|
@ -34,14 +34,14 @@ jobs:
|
|||
fi
|
||||
|
||||
# get latest nightly releases
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
|
||||
#- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-TESTING.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-TESTING.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-TESTING.tar.gz
|
||||
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-TESTING.tar.gz
|
||||
|
||||
- name: Save roc_nightly archives
|
||||
uses: actions/upload-artifact@v4
|
||||
|
@ -60,16 +60,16 @@ jobs:
|
|||
- name: build basic-cli with surgical linker and also with legacy linker
|
||||
env:
|
||||
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
|
||||
run: ./ci/build_basic_cli.sh linux_x86_64 "--linker legacy"
|
||||
run: ./ci/build_basic_cli.sh linux_x86_64
|
||||
|
||||
- name: Save .rh, .rm and .o file
|
||||
- name: Save .rh, .rm and .a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-x86_64-files
|
||||
path: |
|
||||
basic-cli/platform/metadata_linux-x64.rm
|
||||
basic-cli/platform/linux-x64.rh
|
||||
basic-cli/platform/linux-x64.o
|
||||
basic-cli/platform/linux-x64.a
|
||||
|
||||
|
||||
build-linux-arm64-files:
|
||||
|
@ -89,12 +89,12 @@ jobs:
|
|||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
|
||||
run: ./ci/build_basic_cli.sh linux_arm64
|
||||
|
||||
- name: Save .o file
|
||||
- name: Save .a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-arm64-files
|
||||
path: |
|
||||
basic-cli/platform/linux-arm64.o
|
||||
basic-cli/platform/linux-arm64.a
|
||||
|
||||
build-macos-x86_64-files:
|
||||
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||
|
@ -107,15 +107,15 @@ jobs:
|
|||
|
||||
- run: ./ci/build_basic_cli.sh macos_x86_64
|
||||
|
||||
- name: Save .o files
|
||||
- name: Save .a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-x86_64-files
|
||||
path: |
|
||||
basic-cli/platform/macos-x64.o
|
||||
basic-cli/platform/macos-x64.a
|
||||
|
||||
build-macos-apple-silicon-files:
|
||||
name: build apple silicon .o file
|
||||
name: build apple silicon .a file
|
||||
runs-on: [self-hosted, macOS, ARM64]
|
||||
needs: [prepare]
|
||||
steps:
|
||||
|
@ -126,12 +126,12 @@ jobs:
|
|||
|
||||
- run: ./ci/build_basic_cli.sh macos_apple_silicon
|
||||
|
||||
- name: Save macos-arm64.o file
|
||||
- name: Save macos-arm64.a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-apple-silicon-files
|
||||
path: |
|
||||
basic-cli/platform/macos-arm64.o
|
||||
basic-cli/platform/macos-arm64.a
|
||||
|
||||
create-release-archive:
|
||||
needs: [build-linux-x86_64-files, build-linux-arm64-files, build-macos-x86_64-files, build-macos-apple-silicon-files]
|
||||
|
@ -250,10 +250,11 @@ jobs:
|
|||
git checkout ${{ env.RELEASE_TAG }}
|
||||
cp -r examples ../..
|
||||
cp -r ci ../..
|
||||
cp -r LICENSE ../..
|
||||
# LICENSE is necessary for command test
|
||||
cp -r LICENSE ../..
|
||||
|
||||
- name: run tests
|
||||
run: |
|
||||
cd basic-cli-platform
|
||||
ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ROC_BUILD_FLAGS=--prebuilt-platform ./ci/all_tests.sh
|
||||
# no need to build platform anymore
|
||||
NO_BUILD=1 ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ./ci/all_tests.sh
|
||||
|
|
|
@ -10,7 +10,7 @@ concurrency:
|
|||
env:
|
||||
# use .tar.gz for quick testing
|
||||
ARCHIVE_FORMAT: .tar.br
|
||||
BASIC_WEBSERVER_BRANCH: main
|
||||
RELEASE_TAG: 0.8.0
|
||||
|
||||
jobs:
|
||||
fetch-releases:
|
||||
|
@ -46,16 +46,16 @@ jobs:
|
|||
- name: build basic-webserver with legacy linker
|
||||
env:
|
||||
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
|
||||
run: ./ci/build_basic_webserver.sh linux_x86_64 "--linker legacy"
|
||||
run: ./ci/build_basic_webserver.sh linux_x86_64
|
||||
|
||||
- name: Save .rh, .rm and .o file
|
||||
- name: Save .rh, .rm and .a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-x86_64-files
|
||||
path: |
|
||||
basic-webserver/platform/metadata_linux-x64.rm
|
||||
basic-webserver/platform/linux-x64.rh
|
||||
basic-webserver/platform/linux-x64.o
|
||||
basic-webserver/platform/linux-x64.a
|
||||
|
||||
|
||||
build-linux-arm64-files:
|
||||
|
@ -75,12 +75,12 @@ jobs:
|
|||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
|
||||
run: ./ci/build_basic_webserver.sh linux_arm64
|
||||
|
||||
- name: Save .o file
|
||||
- name: Save .a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-arm64-files
|
||||
path: |
|
||||
basic-webserver/platform/linux-arm64.o
|
||||
basic-webserver/platform/linux-arm64.a
|
||||
|
||||
build-macos-x86_64-files:
|
||||
runs-on: [macos-12] # I expect the generated files to work on macOS 12 and up
|
||||
|
@ -93,15 +93,15 @@ jobs:
|
|||
|
||||
- run: ./ci/build_basic_webserver.sh macos_x86_64
|
||||
|
||||
- name: Save .o files
|
||||
- name: Save .a files
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-x86_64-files
|
||||
path: |
|
||||
basic-webserver/platform/macos-x64.o
|
||||
basic-webserver/platform/macos-x64.a
|
||||
|
||||
build-macos-apple-silicon-files:
|
||||
name: build apple silicon .o file
|
||||
name: build apple silicon .a file
|
||||
runs-on: [self-hosted, macOS, ARM64]
|
||||
needs: [fetch-releases]
|
||||
steps:
|
||||
|
@ -112,12 +112,12 @@ jobs:
|
|||
|
||||
- run: ./ci/build_basic_webserver.sh macos_apple_silicon
|
||||
|
||||
- name: Save macos-arm64.o file
|
||||
- name: Save macos-arm64.a file
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-apple-silicon-files
|
||||
path: |
|
||||
basic-webserver/platform/macos-arm64.o
|
||||
basic-webserver/platform/macos-arm64.a
|
||||
|
||||
create-release-archive:
|
||||
needs: [build-linux-x86_64-files, build-linux-arm64-files, build-macos-x86_64-files, build-macos-apple-silicon-files]
|
||||
|
@ -147,7 +147,7 @@ jobs:
|
|||
- run: |
|
||||
git clone https://github.com/roc-lang/basic-webserver.git
|
||||
cd basic-webserver
|
||||
git checkout ${{ env.BASIC_WEBSERVER_BRANCH }}
|
||||
git checkout ${{ env.RELEASE_TAG }}
|
||||
cd ..
|
||||
|
||||
- run: cp macos-apple-silicon-files/* ./basic-webserver/platform
|
||||
|
|
26
.github/workflows/docker.yml
vendored
26
.github/workflows/docker.yml
vendored
|
@ -1,5 +1,7 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
# pull_request:
|
||||
# TODO remove pull_request trigger
|
||||
|
||||
name: Docker images tests
|
||||
|
||||
|
@ -15,10 +17,10 @@ jobs:
|
|||
run: cp docker/nightly-ubuntu-latest/docker-compose.example.yml docker/nightly-ubuntu-latest/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-ubuntu-latest/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-ubuntu-latest/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-ubuntu-latest/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-ubuntu-latest/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
||||
|
||||
nightly-ubuntu-2204:
|
||||
|
@ -32,10 +34,10 @@ jobs:
|
|||
run: cp docker/nightly-ubuntu-2204/docker-compose.example.yml docker/nightly-ubuntu-2204/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-ubuntu-2204/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-ubuntu-2204/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-ubuntu-2204/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-ubuntu-2204/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
||||
nightly-ubuntu-2004:
|
||||
name: nightly-ubuntu-2004
|
||||
|
@ -48,10 +50,10 @@ jobs:
|
|||
run: cp docker/nightly-ubuntu-2004/docker-compose.example.yml docker/nightly-ubuntu-2004/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-ubuntu-2004/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-ubuntu-2004/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-ubuntu-2004/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-ubuntu-2004/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
||||
nightly-debian-latest:
|
||||
name: nightly-debian-latest
|
||||
|
@ -64,10 +66,10 @@ jobs:
|
|||
run: cp docker/nightly-debian-latest/docker-compose.example.yml docker/nightly-debian-latest/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-debian-latest/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-debian-latest/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-debian-latest/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-debian-latest/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
||||
nightly-debian-bookworm:
|
||||
name: nightly-debian-bookworm
|
||||
|
@ -80,10 +82,10 @@ jobs:
|
|||
run: cp docker/nightly-debian-bookworm/docker-compose.example.yml docker/nightly-debian-bookworm/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-debian-bookworm/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-debian-bookworm/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-debian-bookworm/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-debian-bookworm/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
||||
nightly-debian-buster:
|
||||
name: nightly-debian-buster
|
||||
|
@ -96,7 +98,7 @@ jobs:
|
|||
run: cp docker/nightly-debian-buster/docker-compose.example.yml docker/nightly-debian-buster/docker-compose.yml
|
||||
|
||||
- name: Build image
|
||||
run: docker-compose -f docker/nightly-debian-buster/docker-compose.yml build
|
||||
run: docker compose -f docker/nightly-debian-buster/docker-compose.yml build
|
||||
|
||||
- name: Run hello world test
|
||||
run: docker-compose -f docker/nightly-debian-buster/docker-compose.yml run roc examples/helloWorld.roc
|
||||
run: docker compose -f docker/nightly-debian-buster/docker-compose.yml run roc examples/helloWorld.roc
|
||||
|
|
2
.github/workflows/test_nightly_many_os.yml
vendored
2
.github/workflows/test_nightly_many_os.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
|
||||
os: [ macos-12, macos-13, ubuntu-20.04, ubuntu-22.04, ubuntu-24.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
|
|
|
@ -94,6 +94,21 @@ If so, you can fix it like so:
|
|||
sudo apt-get install libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev
|
||||
```
|
||||
|
||||
### libz libzstd libraries
|
||||
|
||||
You may see an error like this during builds:
|
||||
|
||||
```text
|
||||
/usr/bin/ld: cannot find -lz: No such file or directory
|
||||
/usr/bin/ld: cannot find -lzstd: No such file or directory
|
||||
```
|
||||
|
||||
If so, you can fix it like so:
|
||||
|
||||
```sh
|
||||
sudo apt-get install libz-dev libzstd-dev
|
||||
```
|
||||
|
||||
### Zig
|
||||
|
||||
**version: 0.11.0**
|
||||
|
|
14
Cargo.lock
generated
14
Cargo.lock
generated
|
@ -2772,6 +2772,7 @@ dependencies = [
|
|||
"roc_derive_key",
|
||||
"roc_error_macros",
|
||||
"roc_late_solve",
|
||||
"roc_lower_params",
|
||||
"roc_module",
|
||||
"roc_mono",
|
||||
"roc_packaging",
|
||||
|
@ -2792,6 +2793,19 @@ dependencies = [
|
|||
"ven_pretty",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roc_lower_params"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"roc_can",
|
||||
"roc_collections",
|
||||
"roc_module",
|
||||
"roc_region",
|
||||
"roc_solve_problem",
|
||||
"roc_types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "roc_module"
|
||||
version = "0.0.1"
|
||||
|
|
|
@ -37,16 +37,23 @@ rm roc_nightly.tar.gz
|
|||
mv roc_nightly* roc_nightly
|
||||
|
||||
cd roc_nightly
|
||||
export PATH="$(pwd -P):$PATH"
|
||||
cd ..
|
||||
|
||||
# temp test
|
||||
roc version
|
||||
|
||||
cd basic-cli
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
if [[ $(uname -m) == "aarch64" ]]; then
|
||||
target_arch="aarch64-unknown-linux-musl"
|
||||
else
|
||||
target_arch="x86_64-unknown-linux-musl"
|
||||
fi
|
||||
fi
|
||||
./jump-start.sh
|
||||
|
||||
# build the basic cli platform
|
||||
./roc build ../basic-cli/examples/countdown.roc --optimize
|
||||
|
||||
# We need this extra variable so we can safely check if $2 is empty later
|
||||
EXTRA_ARGS=${2:-}
|
||||
|
||||
# In some rare cases it's nice to be able to use the legacy linker, so we produce the .o file to be able to do that
|
||||
if [ -n "${EXTRA_ARGS}" ];
|
||||
then ./roc build $EXTRA_ARGS ../basic-cli/examples/countdown.roc --optimize
|
||||
fi
|
||||
roc build.roc --prebuilt-platform
|
||||
|
||||
cd ..
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
set -euxo pipefail
|
||||
|
||||
git clone https://github.com/roc-lang/basic-webserver.git
|
||||
cd basic-webserver
|
||||
git checkout $RELEASE_TAG
|
||||
cd ..
|
||||
|
||||
OS=$(uname -s)
|
||||
ARCH=$(uname -m)
|
||||
|
@ -37,20 +40,13 @@ rm roc_nightly.tar.gz
|
|||
# simplify dir name
|
||||
mv roc_nightly* roc_nightly
|
||||
|
||||
# add roc to PATH
|
||||
cd roc_nightly
|
||||
export PATH="$(pwd -P):$PATH"
|
||||
cd ..
|
||||
|
||||
# prevent https://github.com/roc-lang/basic-webserver/issues/9
|
||||
if [ "$OS" != "Linux" ] || [ "$ARCH" != "x86_64" ]; then
|
||||
# build the basic-webserver platform
|
||||
./roc build ../basic-webserver/examples/echo.roc --optimize
|
||||
fi
|
||||
cd basic-webserver
|
||||
|
||||
# We need this extra variable so we can safely check if $2 is empty later
|
||||
EXTRA_ARGS=${2:-}
|
||||
|
||||
# In some rare cases it's nice to be able to use the legacy linker, so we produce the .o file to be able to do that
|
||||
if [ -n "${EXTRA_ARGS}" ];
|
||||
then ./roc build $EXTRA_ARGS ../basic-webserver/examples/echo.roc --optimize
|
||||
fi
|
||||
roc build.roc --prebuilt-platform
|
||||
|
||||
cd ..
|
||||
|
|
|
@ -263,7 +263,7 @@ mod tests {
|
|||
use std::io::Write;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
const FORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
const FORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task
|
||||
|
@ -271,7 +271,7 @@ import pf.Task
|
|||
main =
|
||||
Stdout.line! "I'm a Roc application!""#;
|
||||
|
||||
const UNFORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
const UNFORMATTED_ROC: &str = r#"app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
|
||||
import pf.Stdout
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main =
|
||||
multipleIn =
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [await, loop]
|
||||
|
||||
main =
|
||||
_ <- await (Stdout.line "\nLet's count down from 3 together - all you have to do is press <ENTER>.")
|
||||
_ <- await Stdin.line
|
||||
loop 3 tick
|
||||
Stdout.line! "\nLet's count down from 3 together - all you have to do is press <ENTER>."
|
||||
_ = Stdin.line!
|
||||
Task.loop 3 tick
|
||||
|
||||
tick = \n ->
|
||||
if n == 0 then
|
||||
_ <- await (Stdout.line "🎉 SURPRISE! Happy Birthday! 🎂")
|
||||
Stdout.line! "🎉 SURPRISE! Happy Birthday! 🎂"
|
||||
Task.ok (Done {})
|
||||
else
|
||||
_ <- await (n |> Num.toStr |> \s -> "$(s)..." |> Stdout.line)
|
||||
_ <- await Stdin.line
|
||||
Stdout.line! (n |> Num.toStr |> \s -> "$(s)...")
|
||||
_ = Stdin.line!
|
||||
Task.ok (Step (n - 1))
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main =
|
||||
_ <- Task.await (Stdout.line "🗣 Shout into this cave and hear the echo! 👂👂👂")
|
||||
Stdout.line! "🗣 Shout into this cave and hear the echo! 👂👂👂"
|
||||
|
||||
Task.loop {} tick
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Stderr
|
||||
import pf.Env
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main =
|
||||
task =
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
import pf.File
|
||||
import pf.Path
|
||||
import pf.Env
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdin
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [await, Task]
|
||||
|
||||
main =
|
||||
Stdout.line! "What's your first name?"
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Http
|
||||
import pf.Task exposing [Task]
|
||||
import pf.Stdout
|
||||
|
||||
main =
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "test-file.txt" as testFile
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "test-file.txt" as testFile : _ # the _ is optional
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br" }
|
||||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import "ingested-file.roc" as ownCode : Str
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
app [main] {
|
||||
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br",
|
||||
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br",
|
||||
}
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main =
|
||||
file = strParam { name: "file" }
|
||||
|
@ -55,23 +54,23 @@ numParam = \{ name } ->
|
|||
{ params: [param], parser }
|
||||
|
||||
cliMap : ArgParser a, (a -> b) -> ArgParser b
|
||||
cliMap = \{ params, parser }, mapper -> {
|
||||
params,
|
||||
parser: \args ->
|
||||
(data, afterData) <- parser args
|
||||
|> Result.try
|
||||
cliMap = \{ params, parser }, mapper ->
|
||||
mappedParser = \args ->
|
||||
(data, afterData) = parser? args
|
||||
|
||||
Ok (mapper data, afterData),
|
||||
}
|
||||
Ok (mapper data, afterData)
|
||||
|
||||
{
|
||||
params,
|
||||
parser: mappedParser,
|
||||
}
|
||||
|
||||
cliBuild : ArgParser a, ArgParser b, (a, b -> c) -> ArgParser c
|
||||
cliBuild = \firstWeaver, secondWeaver, combine ->
|
||||
allParams = List.concat firstWeaver.params secondWeaver.params
|
||||
combinedParser = \args ->
|
||||
(firstValue, afterFirst) <- firstWeaver.parser args
|
||||
|> Result.try
|
||||
(secondValue, afterSecond) <- secondWeaver.parser afterFirst
|
||||
|> Result.try
|
||||
(firstValue, afterFirst) = firstWeaver.parser? args
|
||||
(secondValue, afterSecond) = secondWeaver.parser? afterFirst
|
||||
|
||||
Ok (combine firstValue secondValue, afterSecond)
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
app [main] {
|
||||
cli: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.7.1/MvLlME9RxOBjl0QCxyn3LIaoG9pSlaNxCa-t3BfbPNc.tar.br",
|
||||
cli: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
|
||||
import cli.Stdout
|
||||
|
@ -26,18 +26,17 @@ Letter : [A, B, C, Other]
|
|||
|
||||
letterParser : Parser (List U8) Letter
|
||||
letterParser =
|
||||
input <- buildPrimitiveParser
|
||||
buildPrimitiveParser \input ->
|
||||
valResult =
|
||||
when input is
|
||||
[] -> Err (ParsingFailure "Nothing to parse")
|
||||
['A', ..] -> Ok A
|
||||
['B', ..] -> Ok B
|
||||
['C', ..] -> Ok C
|
||||
_ -> Ok Other
|
||||
|
||||
valResult =
|
||||
when input is
|
||||
[] -> Err (ParsingFailure "Nothing to parse")
|
||||
['A', ..] -> Ok A
|
||||
['B', ..] -> Ok B
|
||||
['C', ..] -> Ok C
|
||||
_ -> Ok Other
|
||||
|
||||
valResult
|
||||
|> Result.map \val -> { val, input: List.dropFirst input 1 }
|
||||
valResult
|
||||
|> Result.map \val -> { val, input: List.dropFirst input 1 }
|
||||
|
||||
expect
|
||||
input = "B"
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
app [main] {
|
||||
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.12.0/Lb8EgiejTUzbggO2HVVuPJFkwvvsfW6LojkLR20kTVE.tar.br",
|
||||
pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br",
|
||||
parser: "https://github.com/lukewilliamboswell/roc-parser/releases/download/0.5.2/9VrPjwfQQ1QeSL3CfmWr2Pr9DESdDIXy97pwpuq84Ck.tar.br",
|
||||
}
|
||||
|
||||
import pf.Stdout
|
||||
import pf.Stderr
|
||||
import pf.Task exposing [Task]
|
||||
import parser.Core exposing [map, keep]
|
||||
import parser.String exposing [strFromUtf8]
|
||||
import parser.CSV
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
interface AStar
|
||||
exposes [findPath, Model, initialModel, cheapestOpen, reconstructPath]
|
||||
imports [Quicksort]
|
||||
module [findPath, Model, initialModel, cheapestOpen, reconstructPath]
|
||||
|
||||
import Quicksort
|
||||
|
||||
findPath = \costFn, moveFn, start, end ->
|
||||
astar costFn moveFn end (initialModel start)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
interface Base64 exposes [fromBytes, fromStr, toBytes, toStr] imports []
|
||||
module [fromBytes, fromStr, toBytes, toStr]
|
||||
|
||||
import Base64.Decode
|
||||
import Base64.Encode
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
interface Base64.Decode exposes [fromBytes] imports []
|
||||
module [fromBytes]
|
||||
|
||||
import Bytes.Decode exposing [ByteDecoder, DecodeProblem]
|
||||
|
||||
|
@ -12,40 +12,39 @@ decodeBase64 = \width -> Bytes.Decode.loop loopHelp { remaining: width, string:
|
|||
loopHelp : { remaining : U64, string : Str } -> ByteDecoder (Bytes.Decode.Step { remaining : U64, string : Str } Str)
|
||||
loopHelp = \{ remaining, string } ->
|
||||
if remaining >= 3 then
|
||||
x, y, z <- Bytes.Decode.map3 Bytes.Decode.u8 Bytes.Decode.u8 Bytes.Decode.u8
|
||||
Bytes.Decode.map3 Bytes.Decode.u8 Bytes.Decode.u8 Bytes.Decode.u8 \x, y, z ->
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
b : U32
|
||||
b = Num.intCast y
|
||||
c : U32
|
||||
c = Num.intCast z
|
||||
combined = Num.bitwiseOr (Num.bitwiseOr (Num.shiftLeftBy a 16) (Num.shiftLeftBy b 8)) c
|
||||
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
b : U32
|
||||
b = Num.intCast y
|
||||
c : U32
|
||||
c = Num.intCast z
|
||||
combined = Num.bitwiseOr (Num.bitwiseOr (Num.shiftLeftBy a 16) (Num.shiftLeftBy b 8)) c
|
||||
|
||||
Loop {
|
||||
remaining: remaining - 3,
|
||||
string: Str.concat string (bitsToChars combined 0),
|
||||
}
|
||||
Loop {
|
||||
remaining: remaining - 3,
|
||||
string: Str.concat string (bitsToChars combined 0),
|
||||
}
|
||||
else if remaining == 0 then
|
||||
Bytes.Decode.succeed (Done string)
|
||||
else if remaining == 2 then
|
||||
x, y <- Bytes.Decode.map2 Bytes.Decode.u8 Bytes.Decode.u8
|
||||
Bytes.Decode.map2 Bytes.Decode.u8 Bytes.Decode.u8 \x, y ->
|
||||
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
b : U32
|
||||
b = Num.intCast y
|
||||
combined = Num.bitwiseOr (Num.shiftLeftBy a 16) (Num.shiftLeftBy b 8)
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
b : U32
|
||||
b = Num.intCast y
|
||||
combined = Num.bitwiseOr (Num.shiftLeftBy a 16) (Num.shiftLeftBy b 8)
|
||||
|
||||
Done (Str.concat string (bitsToChars combined 1))
|
||||
Done (Str.concat string (bitsToChars combined 1))
|
||||
else
|
||||
# remaining = 1
|
||||
x <- Bytes.Decode.map Bytes.Decode.u8
|
||||
Bytes.Decode.map Bytes.Decode.u8 \x ->
|
||||
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
a : U32
|
||||
a = Num.intCast x
|
||||
|
||||
Done (Str.concat string (bitsToChars (Num.shiftLeftBy a 16) 2))
|
||||
Done (Str.concat string (bitsToChars (Num.shiftLeftBy a 16) 2))
|
||||
|
||||
bitsToChars : U32, Int * -> Str
|
||||
bitsToChars = \bits, missing ->
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
interface Base64.Encode
|
||||
exposes [toBytes]
|
||||
imports []
|
||||
|
||||
module [toBytes]
|
||||
|
||||
import Bytes.Encode exposing [ByteEncoder]
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
interface Bytes.Decode exposes [ByteDecoder, decode, map, map2, u8, loop, Step, succeed, DecodeProblem, after, map3] imports []
|
||||
module [ByteDecoder, decode, map, map2, u8, loop, Step, succeed, DecodeProblem, after, map3]
|
||||
|
||||
State : { bytes : List U8, cursor : U64 }
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
interface Bytes.Encode exposes [ByteEncoder, sequence, u8, u16, bytes, empty, encode] imports []
|
||||
module [ByteEncoder, sequence, u8, u16, bytes, empty, encode]
|
||||
|
||||
Endianness : [BE, LE]
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
interface Issue2279Help
|
||||
exposes [text, asText]
|
||||
imports []
|
||||
module [text, asText]
|
||||
|
||||
text = "Hello, world!"
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
interface Quicksort exposes [sortBy, sortWith, show] imports []
|
||||
module [sortBy, sortWith, show]
|
||||
|
||||
show : List I64 -> Str
|
||||
show = \list ->
|
||||
|
|
|
@ -1,12 +1,16 @@
|
|||
app "cfold"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.PlatformTasks
|
||||
|
||||
# adapted from https://github.com/koka-lang/koka/blob/master/test/bench/haskell/cfold.hs
|
||||
main : Task.Task {} []
|
||||
main : Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
|
@ -18,10 +22,10 @@ main =
|
|||
|> Num.toStr
|
||||
|> Str.concat " & "
|
||||
|> Str.concat (Num.toStr optimized)
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
Expr : [
|
||||
Add Expr Expr,
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
app "closure"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
# see https://github.com/roc-lang/roc/issues/985
|
||||
main : Task.Task {} []
|
||||
main : Task {} []
|
||||
main = closure1 {}
|
||||
# |> Task.after (\_ -> closure2 {})
|
||||
# |> Task.after (\_ -> closure3 {})
|
||||
# |> Task.after (\_ -> closure4 {})
|
||||
# ---
|
||||
closure1 : {} -> Task.Task {} []
|
||||
closure1 : {} -> Task {} []
|
||||
closure1 = \_ ->
|
||||
Task.succeed (foo toUnitBorrowed "a long string such that it's malloced")
|
||||
Task.ok (foo toUnitBorrowed "a long string such that it's malloced")
|
||||
|> Task.map \_ -> {}
|
||||
|
||||
toUnitBorrowed = \x -> Str.countUtf8Bytes x
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
app "deriv"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.PlatformTasks
|
||||
|
||||
# based on: https://github.com/koka-lang/koka/blob/master/test/bench/haskell/deriv.hs
|
||||
IO a : Task.Task a []
|
||||
IO a : Task a []
|
||||
|
||||
main : Task.Task {} []
|
||||
main : Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
|
@ -22,14 +26,15 @@ main =
|
|||
|> Task.map \_ -> {}
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
nestHelp : I64, (I64, Expr -> IO Expr), I64, Expr -> IO Expr
|
||||
nestHelp = \s, f, m, x -> when m is
|
||||
0 -> Task.succeed x
|
||||
_ ->
|
||||
w <- Task.after (f (s - m) x)
|
||||
nestHelp s f (m - 1) w
|
||||
nestHelp = \s, f, m, x ->
|
||||
when m is
|
||||
0 -> Task.ok x
|
||||
_ ->
|
||||
w = f! (s - m) x
|
||||
nestHelp s f (m - 1) w
|
||||
|
||||
nest : (I64, Expr -> IO Expr), I64, Expr -> IO Expr
|
||||
nest = \f, n, e -> nestHelp n f n e
|
||||
|
@ -162,6 +167,5 @@ deriv = \i, f ->
|
|||
Num.toStr (i + 1)
|
||||
|> Str.concat " count: "
|
||||
|> Str.concat (Num.toStr (count fprime))
|
||||
|
||||
Task.putLine line
|
||||
|> Task.after \_ -> Task.succeed fprime
|
||||
PlatformTasks.putLine! line
|
||||
Task.ok fprime
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
app "issue2279"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [Issue2279Help, pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import Issue2279Help
|
||||
import pf.PlatformTasks
|
||||
|
||||
main =
|
||||
text =
|
||||
|
@ -10,4 +10,4 @@ main =
|
|||
else
|
||||
Issue2279Help.asText 42
|
||||
|
||||
Task.putLine text
|
||||
PlatformTasks.putLine text
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
app "nqueens"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
main : Task.Task {} []
|
||||
import pf.PlatformTasks
|
||||
|
||||
main : Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
queens n # original koka 13
|
||||
|> Num.toStr
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
ConsList a : [Nil, Cons a (ConsList a)]
|
||||
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
hosted Effect
|
||||
exposes [Effect, after, map, always, forever, loop, putLine, putInt, getInt]
|
||||
imports []
|
||||
generates Effect with [after, map, always, forever, loop]
|
||||
|
||||
putLine : Str -> Effect {}
|
||||
|
||||
putInt : I64 -> Effect {}
|
||||
|
||||
getInt : Effect { value : I64, isError : Bool }
|
9
crates/cli/tests/benchmarks/platform/PlatformTasks.roc
Normal file
9
crates/cli/tests/benchmarks/platform/PlatformTasks.roc
Normal file
|
@ -0,0 +1,9 @@
|
|||
hosted PlatformTasks
|
||||
exposes [putLine, putInt, getInt]
|
||||
imports []
|
||||
|
||||
putLine : Str -> Task {} *
|
||||
|
||||
putInt : I64 -> Task {} *
|
||||
|
||||
getInt : Task { value : I64, isError : Bool } *
|
|
@ -1,88 +0,0 @@
|
|||
module [Task, succeed, fail, after, map, putLine, putInt, getInt, forever, loop, attempt]
|
||||
|
||||
import pf.Effect
|
||||
|
||||
Task ok err : Effect.Effect (Result ok err)
|
||||
|
||||
forever : Task val err -> Task * err
|
||||
forever = \task ->
|
||||
looper = \{} ->
|
||||
task
|
||||
|> Effect.map
|
||||
\res ->
|
||||
when res is
|
||||
Ok _ -> Step {}
|
||||
Err e -> Done (Err e)
|
||||
|
||||
Effect.loop {} looper
|
||||
|
||||
loop : state, (state -> Task [Step state, Done done] err) -> Task done err
|
||||
loop = \state, step ->
|
||||
looper = \current ->
|
||||
step current
|
||||
|> Effect.map
|
||||
\res ->
|
||||
when res is
|
||||
Ok (Step newState) -> Step newState
|
||||
Ok (Done result) -> Done (Ok result)
|
||||
Err e -> Done (Err e)
|
||||
|
||||
Effect.loop state looper
|
||||
|
||||
succeed : val -> Task val *
|
||||
succeed = \val ->
|
||||
Effect.always (Ok val)
|
||||
|
||||
fail : err -> Task * err
|
||||
fail = \val ->
|
||||
Effect.always (Err val)
|
||||
|
||||
after : Task a err, (a -> Task b err) -> Task b err
|
||||
after = \effect, transform ->
|
||||
Effect.after
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok a -> transform a
|
||||
Err err -> Task.fail err
|
||||
|
||||
attempt : Task a b, (Result a b -> Task c d) -> Task c d
|
||||
attempt = \task, transform ->
|
||||
Effect.after
|
||||
task
|
||||
\result ->
|
||||
when result is
|
||||
Ok ok -> transform (Ok ok)
|
||||
Err err -> transform (Err err)
|
||||
|
||||
map : Task a err, (a -> b) -> Task b err
|
||||
map = \effect, transform ->
|
||||
Effect.map
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok a -> Ok (transform a)
|
||||
Err err -> Err err
|
||||
|
||||
putLine : Str -> Task {} *
|
||||
putLine = \line -> Effect.map (Effect.putLine line) (\_ -> Ok {})
|
||||
|
||||
putInt : I64 -> Task {} *
|
||||
putInt = \line -> Effect.map (Effect.putInt line) (\_ -> Ok {})
|
||||
|
||||
getInt : Task I64 [GetIntError]
|
||||
getInt =
|
||||
Effect.after
|
||||
Effect.getInt
|
||||
\{ isError, value } ->
|
||||
if
|
||||
isError
|
||||
then
|
||||
# TODO
|
||||
# when errorCode is
|
||||
# # A -> Task.fail InvalidCharacter
|
||||
# # B -> Task.fail IOError
|
||||
# _ ->
|
||||
Task.fail GetIntError
|
||||
else
|
||||
Task.succeed value
|
|
@ -2,7 +2,7 @@ platform "benchmarks"
|
|||
requires {} { main : Task {} [] }
|
||||
exposes []
|
||||
packages {}
|
||||
imports [Task.{ Task }]
|
||||
imports []
|
||||
provides [mainForHost]
|
||||
|
||||
mainForHost : Task {} []
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
app "quicksortapp"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task, Quicksort]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.PlatformTasks
|
||||
import Quicksort
|
||||
|
||||
main : Task.Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
|
@ -19,10 +24,10 @@ main =
|
|||
|
||||
sort unsortedList
|
||||
|> Quicksort.show
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
sort : List I64 -> List I64
|
||||
sort = \list ->
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
app "rbtree-ck"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.PlatformTasks
|
||||
|
||||
Color : [Red, Black]
|
||||
|
||||
|
@ -38,9 +37,14 @@ fold = \f, tree, b ->
|
|||
Leaf -> b
|
||||
Node _ l k v r -> fold f r (f k v (fold f l b))
|
||||
|
||||
main : Task.Task {} []
|
||||
main : Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
|
@ -54,13 +58,13 @@ main =
|
|||
|
||||
val
|
||||
|> Num.toStr
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
Nil ->
|
||||
Task.putLine "fail"
|
||||
PlatformTasks.putLine "fail"
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
insert : Tree (Num k) v, Num k, v -> Tree (Num k) v
|
||||
insert = \t, k, v -> if isRed t then setBlack (ins t k v) else ins t k v
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
app "rbtree-del"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.PlatformTasks
|
||||
|
||||
Color : [Red, Black]
|
||||
|
||||
|
@ -11,9 +10,14 @@ Map : Tree I64 Bool
|
|||
|
||||
ConsList a : [Nil, Cons a (ConsList a)]
|
||||
|
||||
main : Task.Task {} []
|
||||
main : Task {} []
|
||||
main =
|
||||
inputResult <- Task.attempt Task.getInt
|
||||
{ value, isError } = PlatformTasks.getInt!
|
||||
inputResult =
|
||||
if isError then
|
||||
Err GetIntError
|
||||
else
|
||||
Ok value
|
||||
|
||||
when inputResult is
|
||||
Ok n ->
|
||||
|
@ -22,10 +26,10 @@ main =
|
|||
|
||||
val
|
||||
|> Num.toStr
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
Err GetIntError ->
|
||||
Task.putLine "Error: Failed to get Integer from stdin."
|
||||
PlatformTasks.putLine "Error: Failed to get Integer from stdin."
|
||||
|
||||
boom : Str -> a
|
||||
boom = \_ -> boom ""
|
||||
|
@ -248,4 +252,4 @@ del = \t, k ->
|
|||
rebalanceLeft cx lx ky vy ry
|
||||
|
||||
Delmin (Del ry Bool.false) ky vy ->
|
||||
Del (Node cx lx ky vy ry) Bool.false
|
||||
Del (Node cx lx ky vy ry) Bool.false
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
app "rbtree-insert"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
main : Task.Task {} []
|
||||
import pf.PlatformTasks
|
||||
|
||||
main : Task {} []
|
||||
main =
|
||||
tree : RedBlackTree I64 {}
|
||||
tree = insert 0 {} Empty
|
||||
|
||||
tree
|
||||
|> show
|
||||
|> Task.putLine
|
||||
|> PlatformTasks.putLine
|
||||
|
||||
show : RedBlackTree I64 {} -> Str
|
||||
show = \tree -> showRBTree tree Num.toStr (\{} -> "{}")
|
||||
|
|
|
@ -1,20 +1,12 @@
|
|||
app "test-astar"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task, AStar]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
main : Task.Task {} []
|
||||
import pf.PlatformTasks
|
||||
import AStar
|
||||
|
||||
#main : Task {} *
|
||||
main =
|
||||
Task.putLine (showBool test1)
|
||||
PlatformTasks.putLine! (showBool test1)
|
||||
|
||||
# Task.after Task.getInt \n ->
|
||||
# when n is
|
||||
# 1 ->
|
||||
# Task.putLine (showBool test1)
|
||||
#
|
||||
# _ ->
|
||||
# ns = Num.toStr n
|
||||
# Task.putLine "No test $(ns)"
|
||||
showBool : Bool -> Str
|
||||
showBool = \b ->
|
||||
if
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
app "test-base64"
|
||||
packages { pf: "platform/main.roc" }
|
||||
imports [pf.Task, Base64]
|
||||
provides [main] to pf
|
||||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
IO a : Task.Task a []
|
||||
import Base64
|
||||
import pf.PlatformTasks
|
||||
|
||||
IO a : Task a []
|
||||
|
||||
main : IO {}
|
||||
main =
|
||||
when Base64.fromBytes (Str.toUtf8 "Hello World") is
|
||||
Err _ -> Task.putLine "sadness"
|
||||
Err _ -> PlatformTasks.putLine "sadness"
|
||||
Ok encoded ->
|
||||
Task.after
|
||||
(Task.putLine (Str.concat "encoded: " encoded))
|
||||
\_ ->
|
||||
when Base64.toStr encoded is
|
||||
Ok decoded -> Task.putLine (Str.concat "decoded: " decoded)
|
||||
Err _ -> Task.putLine "sadness"
|
||||
PlatformTasks.putLine! (Str.concat "encoded: " encoded)
|
||||
|
||||
when Base64.toStr encoded is
|
||||
Ok decoded -> PlatformTasks.putLine (Str.concat "decoded: " decoded)
|
||||
Err _ -> PlatformTasks.putLine "sadness"
|
||||
|
|
|
@ -48,24 +48,22 @@ mod cli_run {
|
|||
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
|
||||
const TEST_LEGACY_LINKER: bool = false;
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(windows, ignore)]
|
||||
fn hello_world() {
|
||||
let expected_ending = "Hello, World!\n🔨 Building host ...\n";
|
||||
let runner = Run::new_roc()
|
||||
.arg(CMD_RUN)
|
||||
.arg(BUILD_HOST_FLAG)
|
||||
.arg(SUPPRESS_BUILD_HOST_WARNING_FLAG)
|
||||
.add_arg_if(LINKER_FLAG, TEST_LEGACY_LINKER)
|
||||
.with_valigrind(ALLOW_VALGRIND)
|
||||
.arg(file_from_root("examples", "helloWorld.roc").as_path());
|
||||
|
||||
let out = runner.run();
|
||||
out.assert_clean_success();
|
||||
out.assert_stdout_and_stderr_ends_with(expected_ending);
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum Arg<'a> {
|
||||
ExamplePath(&'a str),
|
||||
// allow because we may need PlainText in the future
|
||||
#[allow(dead_code)]
|
||||
PlainText(&'a str),
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_compile_error(file: &Path, flags: &[&str], expected: &str) {
|
||||
check_compile_error_with(CMD_CHECK, file, flags, expected);
|
||||
}
|
||||
|
||||
fn check_compile_error_with(cmd: &str, file: &Path, flags: &[&str], expected: &str) {
|
||||
let compile_out = run_roc([cmd, file.to_str().unwrap()].iter().chain(flags), &[], &[]);
|
||||
let err = compile_out.stdout.trim();
|
||||
let err = strip_colors(err);
|
||||
#[cfg_attr(windows, ignore)]
|
||||
// uses C platform
|
||||
fn platform_switching_main() {
|
||||
|
@ -1214,14 +1212,9 @@ mod cli_run {
|
|||
|
||||
But the type annotation on main says it should be:
|
||||
|
||||
Effect.Effect (Result {} [])
|
||||
Task {} []
|
||||
|
||||
Tip: Add type annotations to functions or values to help you figure
|
||||
this out.
|
||||
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
1 error and 0 warning found in <ignored for test> ms
|
||||
"#
|
||||
);
|
||||
|
||||
|
@ -1335,7 +1328,6 @@ mod cli_run {
|
|||
4│ generates Effect with [after, map, always, foobar]
|
||||
^^^^^^
|
||||
|
||||
Only specific functions like `after` and `map` can be generated.Learn
|
||||
more about hosted modules at TODO.
|
||||
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.Effect
|
||||
import pf.PlatformTasks
|
||||
|
||||
main : Effect.Effect {}
|
||||
main : Task {} []
|
||||
main =
|
||||
Effect.after
|
||||
(Effect.getLine)
|
||||
\line ->
|
||||
Effect.after
|
||||
(Effect.putLine "You entered: $(line)")
|
||||
\{} ->
|
||||
Effect.after
|
||||
(Effect.putLine "It is known")
|
||||
\{} ->
|
||||
Effect.always {}
|
||||
line = PlatformTasks.getLine!
|
||||
PlatformTasks.putLine! "You entered: $(line)"
|
||||
PlatformTasks.putLine! "It is known"
|
||||
|
||||
Task.ok {}
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
hosted Effect
|
||||
exposes [Effect, after, map, always, forever, putLine, getLine]
|
||||
imports []
|
||||
generates Effect with [after, map, always, forever]
|
||||
|
||||
putLine : Str -> Effect {}
|
||||
|
||||
getLine : Effect Str
|
7
crates/cli/tests/effects/platform/PlatformTasks.roc
Normal file
7
crates/cli/tests/effects/platform/PlatformTasks.roc
Normal file
|
@ -0,0 +1,7 @@
|
|||
hosted PlatformTasks
|
||||
exposes [putLine, getLine]
|
||||
imports []
|
||||
|
||||
putLine : Str -> Task {} *
|
||||
|
||||
getLine : Task Str *
|
|
@ -1,9 +1,9 @@
|
|||
platform "effects"
|
||||
requires {} { main : Effect.Effect {} }
|
||||
requires {} { main : Task {} [] }
|
||||
exposes []
|
||||
packages {}
|
||||
imports [Effect]
|
||||
imports []
|
||||
provides [mainForHost]
|
||||
|
||||
mainForHost : Effect.Effect {}
|
||||
mainForHost : Task {} []
|
||||
mainForHost = main
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
module [Context, Data, with, getChar, Option, pushStack, popStack, toStr, inWhileScope]
|
||||
|
||||
import pf.File
|
||||
import pf.Task exposing [Task]
|
||||
import Variable exposing [Variable]
|
||||
|
||||
Option a : [Some a, None]
|
||||
|
@ -61,43 +60,43 @@ toStr = \{ scopes, stack, state, vars } ->
|
|||
|
||||
with : Str, (Context -> Task {} a) -> Task {} a
|
||||
with = \path, callback ->
|
||||
handle <- File.withOpen path
|
||||
# I cant define scope here and put it in the list in callback. It breaks alias anaysis.
|
||||
# Instead I have to inline this.
|
||||
# root_scope = { data: Some handle, index: 0, buf: [], whileInfo: None }
|
||||
callback { scopes: [{ data: Some handle, index: 0, buf: [], whileInfo: None }], state: Executing, stack: [], vars: List.repeat (Number 0) Variable.totalCount }
|
||||
File.withOpen path \handle ->
|
||||
# I cant define scope here and put it in the list in callback. It breaks alias anaysis.
|
||||
# Instead I have to inline this.
|
||||
# root_scope = { data: Some handle, index: 0, buf: [], whileInfo: None }
|
||||
callback { scopes: [{ data: Some handle, index: 0, buf: [], whileInfo: None }], state: Executing, stack: [], vars: List.repeat (Number 0) Variable.totalCount }
|
||||
|
||||
# I am pretty sure there is a syntax to destructure and keep a reference to the whole, but Im not sure what it is.
|
||||
getChar : Context -> Task [T U8 Context] [EndOfData, NoScope]
|
||||
getChar = \ctx ->
|
||||
when List.last ctx.scopes is
|
||||
Ok scope ->
|
||||
(T val newScope) <- Task.await (getCharScope scope)
|
||||
Task.succeed (T val { ctx & scopes: List.set ctx.scopes (List.len ctx.scopes - 1) newScope })
|
||||
(T val newScope) = getCharScope! scope
|
||||
Task.ok (T val { ctx & scopes: List.set ctx.scopes (List.len ctx.scopes - 1) newScope })
|
||||
|
||||
Err ListWasEmpty ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
getCharScope : Scope -> Task [T U8 Scope] [EndOfData, NoScope]
|
||||
getCharScope = \scope ->
|
||||
when List.get scope.buf scope.index is
|
||||
Ok val ->
|
||||
Task.succeed (T val { scope & index: scope.index + 1 })
|
||||
Task.ok (T val { scope & index: scope.index + 1 })
|
||||
|
||||
Err OutOfBounds ->
|
||||
when scope.data is
|
||||
Some h ->
|
||||
bytes <- Task.await (File.chunk h)
|
||||
bytes = File.chunk! h
|
||||
when List.first bytes is
|
||||
Ok val ->
|
||||
# This starts at 1 because the first character is already being returned.
|
||||
Task.succeed (T val { scope & buf: bytes, index: 1 })
|
||||
Task.ok (T val { scope & buf: bytes, index: 1 })
|
||||
|
||||
Err ListWasEmpty ->
|
||||
Task.fail EndOfData
|
||||
Task.err EndOfData
|
||||
|
||||
None ->
|
||||
Task.fail EndOfData
|
||||
Task.err EndOfData
|
||||
|
||||
inWhileScope : Context -> Bool
|
||||
inWhileScope = \ctx ->
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
app [main] { pf: platform "platform/main.roc" }
|
||||
|
||||
import pf.Task exposing [Task]
|
||||
import pf.Stdout
|
||||
import pf.Stdin
|
||||
import Context exposing [Context]
|
||||
|
@ -24,51 +23,51 @@ InterpreterErrors : [BadUtf8, DivByZero, EmptyStack, InvalidBooleanValue, Invali
|
|||
main : Str -> Task {} []
|
||||
main = \filename ->
|
||||
interpretFile filename
|
||||
|> Task.onFail \StringErr e -> Stdout.line "Ran into problem:\n$(e)\n"
|
||||
|> Task.onErr \StringErr e -> Stdout.line "Ran into problem:\n$(e)\n"
|
||||
|
||||
interpretFile : Str -> Task {} [StringErr Str]
|
||||
interpretFile = \filename ->
|
||||
ctx <- Context.with filename
|
||||
result <- Task.attempt (interpretCtx ctx)
|
||||
when result is
|
||||
Ok _ ->
|
||||
Task.succeed {}
|
||||
Context.with filename \ctx ->
|
||||
result = interpretCtx ctx |> Task.result!
|
||||
when result is
|
||||
Ok _ ->
|
||||
Task.ok {}
|
||||
|
||||
Err BadUtf8 ->
|
||||
Task.fail (StringErr "Failed to convert string from Utf8 bytes")
|
||||
Err BadUtf8 ->
|
||||
Task.err (StringErr "Failed to convert string from Utf8 bytes")
|
||||
|
||||
Err DivByZero ->
|
||||
Task.fail (StringErr "Division by zero")
|
||||
Err DivByZero ->
|
||||
Task.err (StringErr "Division by zero")
|
||||
|
||||
Err EmptyStack ->
|
||||
Task.fail (StringErr "Tried to pop a value off of the stack when it was empty")
|
||||
Err EmptyStack ->
|
||||
Task.err (StringErr "Tried to pop a value off of the stack when it was empty")
|
||||
|
||||
Err InvalidBooleanValue ->
|
||||
Task.fail (StringErr "Ran into an invalid boolean that was neither false (0) or true (-1)")
|
||||
Err InvalidBooleanValue ->
|
||||
Task.err (StringErr "Ran into an invalid boolean that was neither false (0) or true (-1)")
|
||||
|
||||
Err (InvalidChar char) ->
|
||||
Task.fail (StringErr "Ran into an invalid character with ascii code: $(char)")
|
||||
Err (InvalidChar char) ->
|
||||
Task.err (StringErr "Ran into an invalid character with ascii code: $(char)")
|
||||
|
||||
Err MaxInputNumber ->
|
||||
Task.fail (StringErr "Like the original false compiler, the max input number is 320,000")
|
||||
Err MaxInputNumber ->
|
||||
Task.err (StringErr "Like the original false compiler, the max input number is 320,000")
|
||||
|
||||
Err NoLambdaOnStack ->
|
||||
Task.fail (StringErr "Tried to run a lambda when no lambda was on the stack")
|
||||
Err NoLambdaOnStack ->
|
||||
Task.err (StringErr "Tried to run a lambda when no lambda was on the stack")
|
||||
|
||||
Err NoNumberOnStack ->
|
||||
Task.fail (StringErr "Tried to run a number when no number was on the stack")
|
||||
Err NoNumberOnStack ->
|
||||
Task.err (StringErr "Tried to run a number when no number was on the stack")
|
||||
|
||||
Err NoVariableOnStack ->
|
||||
Task.fail (StringErr "Tried to load a variable when no variable was on the stack")
|
||||
Err NoVariableOnStack ->
|
||||
Task.err (StringErr "Tried to load a variable when no variable was on the stack")
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail (StringErr "Tried to run code when not in any scope")
|
||||
Err NoScope ->
|
||||
Task.err (StringErr "Tried to run code when not in any scope")
|
||||
|
||||
Err OutOfBounds ->
|
||||
Task.fail (StringErr "Tried to load from an offset that was outside of the stack")
|
||||
Err OutOfBounds ->
|
||||
Task.err (StringErr "Tried to load from an offset that was outside of the stack")
|
||||
|
||||
Err UnexpectedEndOfData ->
|
||||
Task.fail (StringErr "Hit end of data while still parsing something")
|
||||
Err UnexpectedEndOfData ->
|
||||
Task.err (StringErr "Hit end of data while still parsing something")
|
||||
|
||||
isDigit : U8 -> Bool
|
||||
isDigit = \char ->
|
||||
|
@ -107,37 +106,37 @@ interpretCtxLoop = \ctx ->
|
|||
if n == 0 then
|
||||
newScope = { scope & whileInfo: None }
|
||||
|
||||
Task.succeed (Step { popCtx & scopes: List.set ctx.scopes last newScope })
|
||||
Task.ok (Step { popCtx & scopes: List.set ctx.scopes last newScope })
|
||||
else
|
||||
newScope = { scope & whileInfo: Some { state: InBody, body, cond } }
|
||||
|
||||
Task.succeed (Step { popCtx & scopes: List.append (List.set ctx.scopes last newScope) { data: None, buf: body, index: 0, whileInfo: None } })
|
||||
Task.ok (Step { popCtx & scopes: List.append (List.set ctx.scopes last newScope) { data: None, buf: body, index: 0, whileInfo: None } })
|
||||
|
||||
Err e ->
|
||||
Task.fail e
|
||||
Task.err e
|
||||
|
||||
Some { state: InBody, body, cond } ->
|
||||
# Just rand the body. Run the condition again.
|
||||
newScope = { scope & whileInfo: Some { state: InCond, body, cond } }
|
||||
|
||||
Task.succeed (Step { ctx & scopes: List.append (List.set ctx.scopes last newScope) { data: None, buf: cond, index: 0, whileInfo: None } })
|
||||
Task.ok (Step { ctx & scopes: List.append (List.set ctx.scopes last newScope) { data: None, buf: cond, index: 0, whileInfo: None } })
|
||||
|
||||
None ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err OutOfBounds ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Executing ->
|
||||
# {} <- Task.await (Stdout.line (Context.toStr ctx))
|
||||
result <- Task.attempt (Context.getChar ctx)
|
||||
# Stdout.line! (Context.toStr ctx)
|
||||
result = Context.getChar ctx |> Task.result!
|
||||
when result is
|
||||
Ok (T val newCtx) ->
|
||||
execCtx <- Task.await (stepExecCtx newCtx val)
|
||||
Task.succeed (Step execCtx)
|
||||
execCtx = stepExecCtx! newCtx val
|
||||
Task.ok (Step execCtx)
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
# Computation complete for this scope.
|
||||
|
@ -146,28 +145,28 @@ interpretCtxLoop = \ctx ->
|
|||
|
||||
# If no scopes left, all execution complete.
|
||||
if List.isEmpty dropCtx.scopes then
|
||||
Task.succeed (Done dropCtx)
|
||||
Task.ok (Done dropCtx)
|
||||
else
|
||||
Task.succeed (Step dropCtx)
|
||||
Task.ok (Step dropCtx)
|
||||
|
||||
InComment ->
|
||||
result <- Task.attempt (Context.getChar ctx)
|
||||
result = Context.getChar ctx |> Task.result!
|
||||
when result is
|
||||
Ok (T val newCtx) ->
|
||||
if val == 0x7D then
|
||||
# `}` end of comment
|
||||
Task.succeed (Step { newCtx & state: Executing })
|
||||
Task.ok (Step { newCtx & state: Executing })
|
||||
else
|
||||
Task.succeed (Step { newCtx & state: InComment })
|
||||
Task.ok (Step { newCtx & state: InComment })
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
InNumber accum ->
|
||||
result <- Task.attempt (Context.getChar ctx)
|
||||
result = Context.getChar ctx |> Task.result!
|
||||
when result is
|
||||
Ok (T val newCtx) ->
|
||||
if isDigit val then
|
||||
|
@ -177,68 +176,68 @@ interpretCtxLoop = \ctx ->
|
|||
# so this is make i64 mul by 10 then convert back to i32.
|
||||
nextAccum = (10 * Num.intCast accum) + Num.intCast (val - 0x30)
|
||||
|
||||
Task.succeed (Step { newCtx & state: InNumber (Num.intCast nextAccum) })
|
||||
Task.ok (Step { newCtx & state: InNumber (Num.intCast nextAccum) })
|
||||
else
|
||||
# outside of number now, this needs to be executed.
|
||||
pushCtx = Context.pushStack newCtx (Number accum)
|
||||
|
||||
execCtx <- Task.await (stepExecCtx { pushCtx & state: Executing } val)
|
||||
Task.succeed (Step execCtx)
|
||||
execCtx = stepExecCtx! { pushCtx & state: Executing } val
|
||||
Task.ok (Step execCtx)
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
InString bytes ->
|
||||
result <- Task.attempt (Context.getChar ctx)
|
||||
result = Context.getChar ctx |> Task.result!
|
||||
when result is
|
||||
Ok (T val newCtx) ->
|
||||
if val == 0x22 then
|
||||
# `"` end of string
|
||||
when Str.fromUtf8 bytes is
|
||||
Ok str ->
|
||||
{} <- Task.await (Stdout.raw str)
|
||||
Task.succeed (Step { newCtx & state: Executing })
|
||||
Stdout.raw! str
|
||||
Task.ok (Step { newCtx & state: Executing })
|
||||
|
||||
Err _ ->
|
||||
Task.fail BadUtf8
|
||||
Task.err BadUtf8
|
||||
else
|
||||
Task.succeed (Step { newCtx & state: InString (List.append bytes val) })
|
||||
Task.ok (Step { newCtx & state: InString (List.append bytes val) })
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
InLambda depth bytes ->
|
||||
result <- Task.attempt (Context.getChar ctx)
|
||||
result = Context.getChar ctx |> Task.result!
|
||||
when result is
|
||||
Ok (T val newCtx) ->
|
||||
if val == 0x5B then
|
||||
# start of a nested lambda `[`
|
||||
Task.succeed (Step { newCtx & state: InLambda (depth + 1) (List.append bytes val) })
|
||||
Task.ok (Step { newCtx & state: InLambda (depth + 1) (List.append bytes val) })
|
||||
else if val == 0x5D then
|
||||
# `]` end of current lambda
|
||||
if depth == 0 then
|
||||
# end of all lambdas
|
||||
Task.succeed (Step (Context.pushStack { newCtx & state: Executing } (Lambda bytes)))
|
||||
Task.ok (Step (Context.pushStack { newCtx & state: Executing } (Lambda bytes)))
|
||||
else
|
||||
# end of nested lambda
|
||||
Task.succeed (Step { newCtx & state: InLambda (depth - 1) (List.append bytes val) })
|
||||
Task.ok (Step { newCtx & state: InLambda (depth - 1) (List.append bytes val) })
|
||||
else
|
||||
Task.succeed (Step { newCtx & state: InLambda depth (List.append bytes val) })
|
||||
Task.ok (Step { newCtx & state: InLambda depth (List.append bytes val) })
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
InSpecialChar ->
|
||||
result <- Task.attempt (Context.getChar { ctx & state: Executing })
|
||||
result = Context.getChar { ctx & state: Executing } |> Task.result!
|
||||
when result is
|
||||
Ok (T 0xB8 newCtx) ->
|
||||
result2 =
|
||||
|
@ -254,35 +253,35 @@ interpretCtxLoop = \ctx ->
|
|||
Err OutOfBounds
|
||||
|
||||
when result2 is
|
||||
Ok a -> Task.succeed (Step a)
|
||||
Err e -> Task.fail e
|
||||
Ok a -> Task.ok (Step a)
|
||||
Err e -> Task.err e
|
||||
|
||||
Ok (T 0x9F newCtx) ->
|
||||
# This is supposed to flush io buffers. We don't buffer, so it does nothing
|
||||
Task.succeed (Step newCtx)
|
||||
Task.ok (Step newCtx)
|
||||
|
||||
Ok (T x _) ->
|
||||
data = Num.toStr (Num.intCast x)
|
||||
|
||||
Task.fail (InvalidChar data)
|
||||
Task.err (InvalidChar data)
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
LoadChar ->
|
||||
result <- Task.attempt (Context.getChar { ctx & state: Executing })
|
||||
result = Context.getChar { ctx & state: Executing } |> Task.result!
|
||||
when result is
|
||||
Ok (T x newCtx) ->
|
||||
Task.succeed (Step (Context.pushStack newCtx (Number (Num.intCast x))))
|
||||
Task.ok (Step (Context.pushStack newCtx (Number (Num.intCast x))))
|
||||
|
||||
Err NoScope ->
|
||||
Task.fail NoScope
|
||||
Task.err NoScope
|
||||
|
||||
Err EndOfData ->
|
||||
Task.fail UnexpectedEndOfData
|
||||
Task.err UnexpectedEndOfData
|
||||
|
||||
# If it weren't for reading stdin or writing to stdout, this could return a result.
|
||||
stepExecCtx : Context, U8 -> Task Context InterpreterErrors
|
||||
|
@ -333,15 +332,15 @@ stepExecCtx = \ctx, char ->
|
|||
# Switching this to List.last and changing the error to ListWasEmpty leads to a compiler bug.
|
||||
# Complains about the types eq not matching.
|
||||
when List.get ctx.stack (List.len ctx.stack - 1) is
|
||||
Ok dupItem -> Task.succeed (Context.pushStack ctx dupItem)
|
||||
Err OutOfBounds -> Task.fail EmptyStack
|
||||
Ok dupItem -> Task.ok (Context.pushStack ctx dupItem)
|
||||
Err OutOfBounds -> Task.err EmptyStack
|
||||
|
||||
0x25 ->
|
||||
# `%` drop
|
||||
when Context.popStack ctx is
|
||||
# Dropping with an empty stack, all results here are fine
|
||||
Ok (T popCtx _) -> Task.succeed popCtx
|
||||
Err _ -> Task.succeed ctx
|
||||
Ok (T popCtx _) -> Task.ok popCtx
|
||||
Err _ -> Task.ok ctx
|
||||
|
||||
0x5C ->
|
||||
# `\` swap
|
||||
|
@ -352,11 +351,11 @@ stepExecCtx = \ctx, char ->
|
|||
|
||||
when result2 is
|
||||
Ok a ->
|
||||
Task.succeed a
|
||||
Task.ok a
|
||||
|
||||
# Being explicit with error type is required to stop the need to propogate the error parameters to Context.popStack
|
||||
Err EmptyStack ->
|
||||
Task.fail EmptyStack
|
||||
Task.err EmptyStack
|
||||
|
||||
0x40 ->
|
||||
# `@` rot
|
||||
|
@ -368,17 +367,17 @@ stepExecCtx = \ctx, char ->
|
|||
|
||||
when result2 is
|
||||
Ok a ->
|
||||
Task.succeed a
|
||||
Task.ok a
|
||||
|
||||
# Being explicit with error type is required to stop the need to propogate the error parameters to Context.popStack
|
||||
Err EmptyStack ->
|
||||
Task.fail EmptyStack
|
||||
Task.err EmptyStack
|
||||
|
||||
0xC3 ->
|
||||
# `ø` pick or `ß` flush
|
||||
# these are actually 2 bytes, 0xC3 0xB8 or 0xC3 0x9F
|
||||
# requires special parsing
|
||||
Task.succeed { ctx & state: InSpecialChar }
|
||||
Task.ok { ctx & state: InSpecialChar }
|
||||
|
||||
0x4F ->
|
||||
# `O` also treat this as pick for easier script writing
|
||||
|
@ -399,11 +398,11 @@ stepExecCtx = \ctx, char ->
|
|||
0x42 ->
|
||||
# `B` also treat this as flush for easier script writing
|
||||
# This is supposed to flush io buffers. We don't buffer, so it does nothing
|
||||
Task.succeed ctx
|
||||
Task.ok ctx
|
||||
|
||||
0x27 ->
|
||||
# `'` load next char
|
||||
Task.succeed { ctx & state: LoadChar }
|
||||
Task.ok { ctx & state: LoadChar }
|
||||
|
||||
0x2B ->
|
||||
# `+` add
|
||||
|
@ -472,33 +471,33 @@ stepExecCtx = \ctx, char ->
|
|||
Ok (T popCtx num) ->
|
||||
when Str.fromUtf8 [Num.intCast num] is
|
||||
Ok str ->
|
||||
{} <- Task.await (Stdout.raw str)
|
||||
Task.succeed popCtx
|
||||
Stdout.raw! str
|
||||
Task.ok popCtx
|
||||
|
||||
Err _ ->
|
||||
Task.fail BadUtf8
|
||||
Task.err BadUtf8
|
||||
|
||||
Err e ->
|
||||
Task.fail e
|
||||
Task.err e
|
||||
|
||||
0x2E ->
|
||||
# `.` write int
|
||||
when popNumber ctx is
|
||||
Ok (T popCtx num) ->
|
||||
{} <- Task.await (Stdout.raw (Num.toStr (Num.intCast num)))
|
||||
Task.succeed popCtx
|
||||
Stdout.raw! (Num.toStr (Num.intCast num))
|
||||
Task.ok popCtx
|
||||
|
||||
Err e ->
|
||||
Task.fail e
|
||||
Task.err e
|
||||
|
||||
0x5E ->
|
||||
# `^` read char as int
|
||||
in <- Task.await Stdin.char
|
||||
in = Stdin.char! {}
|
||||
if in == 255 then
|
||||
# max char sent on EOF. Change to -1
|
||||
Task.succeed (Context.pushStack ctx (Number -1))
|
||||
Task.ok (Context.pushStack ctx (Number -1))
|
||||
else
|
||||
Task.succeed (Context.pushStack ctx (Number (Num.intCast in)))
|
||||
Task.ok (Context.pushStack ctx (Number (Num.intCast in)))
|
||||
|
||||
0x3A ->
|
||||
# `:` store to variable
|
||||
|
@ -521,33 +520,33 @@ stepExecCtx = \ctx, char ->
|
|||
|
||||
0x22 ->
|
||||
# `"` string start
|
||||
Task.succeed { ctx & state: InString [] }
|
||||
Task.ok { ctx & state: InString [] }
|
||||
|
||||
0x5B ->
|
||||
# `"` string start
|
||||
Task.succeed { ctx & state: InLambda 0 [] }
|
||||
Task.ok { ctx & state: InLambda 0 [] }
|
||||
|
||||
0x7B ->
|
||||
# `{` comment start
|
||||
Task.succeed { ctx & state: InComment }
|
||||
Task.ok { ctx & state: InComment }
|
||||
|
||||
x if isDigit x ->
|
||||
# number start
|
||||
Task.succeed { ctx & state: InNumber (Num.intCast (x - 0x30)) }
|
||||
Task.ok { ctx & state: InNumber (Num.intCast (x - 0x30)) }
|
||||
|
||||
x if isWhitespace x ->
|
||||
Task.succeed ctx
|
||||
Task.ok ctx
|
||||
|
||||
x ->
|
||||
when Variable.fromUtf8 x is
|
||||
# letters are variable names
|
||||
Ok var ->
|
||||
Task.succeed (Context.pushStack ctx (Var var))
|
||||
Task.ok (Context.pushStack ctx (Var var))
|
||||
|
||||
Err _ ->
|
||||
data = Num.toStr (Num.intCast x)
|
||||
|
||||
Task.fail (InvalidChar data)
|
||||
Task.err (InvalidChar data)
|
||||
|
||||
unaryOp : Context, (I32 -> I32) -> Result Context InterpreterErrors
|
||||
unaryOp = \ctx, op ->
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
hosted Effect
|
||||
exposes [Effect, after, map, always, forever, loop, openFile, closeFile, withFileOpen, getFileLine, getFileBytes, putLine, putRaw, getLine, getChar]
|
||||
imports []
|
||||
generates Effect with [after, map, always, forever, loop]
|
||||
|
||||
openFile : Str -> Effect U64
|
||||
|
||||
closeFile : U64 -> Effect {}
|
||||
|
||||
withFileOpen : Str, (U64 -> Effect (Result ok err)) -> Effect {}
|
||||
|
||||
getFileLine : U64 -> Effect Str
|
||||
|
||||
getFileBytes : U64 -> Effect (List U8)
|
||||
|
||||
putLine : Str -> Effect {}
|
||||
|
||||
putRaw : Str -> Effect {}
|
||||
|
||||
getLine : Effect Str
|
||||
|
||||
getChar : Effect U8
|
|
@ -1,28 +1,34 @@
|
|||
module [line, Handle, withOpen, chunk]
|
||||
module [line, withOpen, chunk, Handle]
|
||||
|
||||
import pf.Effect
|
||||
import Task exposing [Task]
|
||||
import pf.PlatformTasks
|
||||
|
||||
Handle := U64
|
||||
|
||||
line : Handle -> Task.Task Str *
|
||||
line = \@Handle handle -> Effect.after (Effect.getFileLine handle) Task.succeed
|
||||
line : Handle -> Task Str *
|
||||
line = \@Handle handle ->
|
||||
PlatformTasks.getFileLine handle
|
||||
|> Task.mapErr \_ -> crash "unreachable File.line"
|
||||
|
||||
chunk : Handle -> Task.Task (List U8) *
|
||||
chunk = \@Handle handle -> Effect.after (Effect.getFileBytes handle) Task.succeed
|
||||
chunk : Handle -> Task (List U8) *
|
||||
chunk = \@Handle handle ->
|
||||
PlatformTasks.getFileBytes handle
|
||||
|> Task.mapErr \_ -> crash "unreachable File.chunk"
|
||||
|
||||
open : Str -> Task.Task Handle *
|
||||
open : Str -> Task Handle *
|
||||
open = \path ->
|
||||
Effect.openFile path
|
||||
|> Effect.map (\id -> @Handle id)
|
||||
|> Effect.after Task.succeed
|
||||
PlatformTasks.openFile path
|
||||
|> Task.mapErr \_ -> crash "unreachable File.open"
|
||||
|> Task.map @Handle
|
||||
|
||||
close : Handle -> Task.Task {} *
|
||||
close = \@Handle handle -> Effect.after (Effect.closeFile handle) Task.succeed
|
||||
close = \@Handle handle ->
|
||||
PlatformTasks.closeFile handle
|
||||
|> Task.mapErr \_ -> crash "unreachable File.close"
|
||||
|
||||
withOpen : Str, (Handle -> Task {} a) -> Task {} a
|
||||
withOpen = \path, callback ->
|
||||
handle <- Task.await (open path)
|
||||
result <- Task.attempt (callback handle)
|
||||
{} <- Task.await (close handle)
|
||||
handle = open! path
|
||||
result = callback handle |> Task.result!
|
||||
close! handle
|
||||
|
||||
Task.fromResult result
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
hosted PlatformTasks
|
||||
exposes [openFile, closeFile, withFileOpen, getFileLine, getFileBytes, putLine, putRaw, getLine, getChar]
|
||||
imports []
|
||||
|
||||
openFile : Str -> Task U64 {}
|
||||
|
||||
closeFile : U64 -> Task {} {}
|
||||
|
||||
withFileOpen : Str, (U64 -> Task ok err) -> Task {} {}
|
||||
|
||||
getFileLine : U64 -> Task Str {}
|
||||
|
||||
getFileBytes : U64 -> Task (List U8) {}
|
||||
|
||||
putLine : Str -> Task {} {}
|
||||
|
||||
putRaw : Str -> Task {} {}
|
||||
|
||||
getLine : Task Str {}
|
||||
|
||||
getChar : Task U8 {}
|
|
@ -1,9 +1,16 @@
|
|||
module [char]
|
||||
module [
|
||||
line,
|
||||
char,
|
||||
]
|
||||
|
||||
import pf.Effect
|
||||
import Task
|
||||
import pf.PlatformTasks
|
||||
|
||||
# line : Task.Task Str *
|
||||
# line = Effect.after Effect.getLine Task.succeed # TODO FIXME Effect.getLine should suffice
|
||||
char : Task.Task U8 *
|
||||
char = Effect.after Effect.getChar Task.succeed # TODO FIXME Effect.getLine should suffice
|
||||
line : {} -> Task Str *
|
||||
line = \{} ->
|
||||
PlatformTasks.getLine
|
||||
|> Task.mapErr \_ -> crash "unreachable Stdin.line"
|
||||
|
||||
char : {} -> Task U8 *
|
||||
char = \{} ->
|
||||
PlatformTasks.getChar
|
||||
|> Task.mapErr \_ -> crash "unreachable Stdin.char"
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
module [line, raw]
|
||||
|
||||
import pf.Effect
|
||||
import Task exposing [Task]
|
||||
import pf.PlatformTasks
|
||||
|
||||
line : Str -> Task {} *
|
||||
line = \str -> Effect.map (Effect.putLine str) (\_ -> Ok {})
|
||||
line = \text ->
|
||||
PlatformTasks.putLine text
|
||||
|> Task.mapErr \_ -> crash "unreachable Stdout.line"
|
||||
|
||||
raw : Str -> Task {} *
|
||||
raw = \str -> Effect.map (Effect.putRaw str) (\_ -> Ok {})
|
||||
raw = \text ->
|
||||
PlatformTasks.putRaw text
|
||||
|> Task.mapErr \_ -> crash "unreachable Stdout.raw"
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
module [Task, succeed, fail, await, map, onFail, attempt, fromResult, loop]
|
||||
|
||||
import pf.Effect
|
||||
|
||||
Task ok err : Effect.Effect (Result ok err)
|
||||
|
||||
loop : state, (state -> Task [Step state, Done done] err) -> Task done err
|
||||
loop = \state, step ->
|
||||
looper = \current ->
|
||||
step current
|
||||
|> Effect.map
|
||||
\res ->
|
||||
when res is
|
||||
Ok (Step newState) -> Step newState
|
||||
Ok (Done result) -> Done (Ok result)
|
||||
Err e -> Done (Err e)
|
||||
|
||||
Effect.loop state looper
|
||||
|
||||
succeed : val -> Task val *
|
||||
succeed = \val ->
|
||||
Effect.always (Ok val)
|
||||
|
||||
fail : err -> Task * err
|
||||
fail = \val ->
|
||||
Effect.always (Err val)
|
||||
|
||||
fromResult : Result a e -> Task a e
|
||||
fromResult = \result ->
|
||||
when result is
|
||||
Ok a -> succeed a
|
||||
Err e -> fail e
|
||||
|
||||
attempt : Task a b, (Result a b -> Task c d) -> Task c d
|
||||
attempt = \effect, transform ->
|
||||
Effect.after
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok ok -> transform (Ok ok)
|
||||
Err err -> transform (Err err)
|
||||
|
||||
await : Task a err, (a -> Task b err) -> Task b err
|
||||
await = \effect, transform ->
|
||||
Effect.after
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok a -> transform a
|
||||
Err err -> Task.fail err
|
||||
|
||||
onFail : Task ok a, (a -> Task ok b) -> Task ok b
|
||||
onFail = \effect, transform ->
|
||||
Effect.after
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok a -> Task.succeed a
|
||||
Err err -> transform err
|
||||
|
||||
map : Task a err, (a -> b) -> Task b err
|
||||
map = \effect, transform ->
|
||||
Effect.after
|
||||
effect
|
||||
\result ->
|
||||
when result is
|
||||
Ok a -> Task.succeed (transform a)
|
||||
Err err -> Task.fail err
|
|
@ -2,7 +2,7 @@ platform "false-interpreter"
|
|||
requires {} { main : Str -> Task {} [] }
|
||||
exposes []
|
||||
packages {}
|
||||
imports [Task.{ Task }]
|
||||
imports []
|
||||
provides [mainForHost]
|
||||
|
||||
mainForHost : Str -> Task {} []
|
||||
|
|
|
@ -3,12 +3,21 @@
|
|||
use core::ffi::c_void;
|
||||
use core::mem::MaybeUninit;
|
||||
use libc;
|
||||
use roc_std::{RocList, RocStr};
|
||||
use roc_std::{RocList, RocResult, RocStr};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::ffi::CStr;
|
||||
use std::fs::File;
|
||||
use std::io::{BufRead, BufReader, Read, Write};
|
||||
use std::os::raw::c_char;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
static FILE_ID: AtomicU64 = AtomicU64::new(0);
|
||||
|
||||
fn file_handles() -> &'static Mutex<HashMap<u64, BufReader<File>>> {
|
||||
static FILE_HANDLES: OnceLock<Mutex<HashMap<u64, BufReader<File>>>> = OnceLock::new();
|
||||
|
||||
FILE_HANDLES.get_or_init(|| Mutex::new(HashMap::default()))
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
#[link_name = "roc__mainForHost_1_exposed_generic"]
|
||||
|
@ -146,81 +155,89 @@ unsafe fn call_the_closure(closure_data_ptr: *const u8) -> i64 {
|
|||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_getLine() -> RocStr {
|
||||
pub extern "C" fn roc_fx_getLine() -> RocResult<RocStr, ()> {
|
||||
let stdin = std::io::stdin();
|
||||
let line1 = stdin.lock().lines().next().unwrap().unwrap();
|
||||
|
||||
RocStr::from(line1.as_str())
|
||||
RocResult::ok(RocStr::from(line1.as_str()))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_getChar() -> u8 {
|
||||
pub extern "C" fn roc_fx_getChar() -> RocResult<u8, ()> {
|
||||
let mut buffer = [0];
|
||||
|
||||
if let Err(ioerr) = std::io::stdin().lock().read_exact(&mut buffer[..]) {
|
||||
if ioerr.kind() == std::io::ErrorKind::UnexpectedEof {
|
||||
u8::MAX
|
||||
RocResult::ok(u8::MAX)
|
||||
} else {
|
||||
panic!("Got an unexpected error while reading char from stdin");
|
||||
}
|
||||
} else {
|
||||
buffer[0]
|
||||
RocResult::ok(buffer[0])
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_putLine(line: &RocStr) {
|
||||
pub extern "C" fn roc_fx_putLine(line: &RocStr) -> RocResult<(), ()> {
|
||||
let string = line.as_str();
|
||||
println!("{}", string);
|
||||
let _ = std::io::stdout().lock().flush();
|
||||
|
||||
RocResult::ok(())
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_putRaw(line: &RocStr) {
|
||||
pub extern "C" fn roc_fx_putRaw(line: &RocStr) -> RocResult<(), ()> {
|
||||
let string = line.as_str();
|
||||
print!("{}", string);
|
||||
let _ = std::io::stdout().lock().flush();
|
||||
|
||||
RocResult::ok(())
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_getFileLine(br_ptr: *mut BufReader<File>) -> RocStr {
|
||||
let br = unsafe { &mut *br_ptr };
|
||||
pub extern "C" fn roc_fx_getFileLine(br_id: u64) -> RocResult<RocStr, ()> {
|
||||
let mut br_map = file_handles().lock().unwrap();
|
||||
let br = br_map.get_mut(&br_id).unwrap();
|
||||
let mut line1 = String::default();
|
||||
|
||||
br.read_line(&mut line1)
|
||||
.expect("Failed to read line from file");
|
||||
|
||||
RocStr::from(line1.as_str())
|
||||
RocResult::ok(RocStr::from(line1.as_str()))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_getFileBytes(br_ptr: *mut BufReader<File>) -> RocList<u8> {
|
||||
let br = unsafe { &mut *br_ptr };
|
||||
pub extern "C" fn roc_fx_getFileBytes(br_id: u64) -> RocResult<RocList<u8>, ()> {
|
||||
let mut br_map = file_handles().lock().unwrap();
|
||||
let br = br_map.get_mut(&br_id).unwrap();
|
||||
let mut buffer = [0; 0x10 /* This is intentionally small to ensure correct implementation */];
|
||||
|
||||
let count = br
|
||||
.read(&mut buffer[..])
|
||||
.expect("Failed to read bytes from file");
|
||||
|
||||
RocList::from_slice(&buffer[..count])
|
||||
RocResult::ok(RocList::from_slice(&buffer[..count]))
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_closeFile(br_ptr: *mut BufReader<File>) {
|
||||
unsafe {
|
||||
let boxed = Box::from_raw(br_ptr);
|
||||
drop(boxed)
|
||||
}
|
||||
pub extern "C" fn roc_fx_closeFile(br_id: u64) -> RocResult<(), ()> {
|
||||
file_handles().lock().unwrap().remove(&br_id);
|
||||
|
||||
RocResult::ok(())
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_openFile(name: &RocStr) -> *mut BufReader<File> {
|
||||
pub extern "C" fn roc_fx_openFile(name: &RocStr) -> RocResult<u64, ()> {
|
||||
let string = name.as_str();
|
||||
match File::open(string) {
|
||||
Ok(f) => {
|
||||
let br = BufReader::new(f);
|
||||
let br_id = FILE_ID.fetch_add(1, Ordering::SeqCst);
|
||||
|
||||
Box::into_raw(Box::new(br))
|
||||
file_handles().lock().unwrap().insert(br_id, br);
|
||||
|
||||
RocResult::ok(br_id)
|
||||
}
|
||||
Err(_) => {
|
||||
panic!(
|
||||
|
@ -232,7 +249,7 @@ pub extern "C" fn roc_fx_openFile(name: &RocStr) -> *mut BufReader<File> {
|
|||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn roc_fx_withFileOpen(_name: &RocStr, _buffer: *const u8) {
|
||||
pub extern "C" fn roc_fx_withFileOpen(_name: &RocStr, _buffer: *const u8) -> RocResult<(), ()> {
|
||||
// TODO: figure out accepting a closure in an fx and passing data to it.
|
||||
// let f = File::open(name.as_str()).expect("Unable to open file");
|
||||
// let mut br = BufReader::new(f);
|
||||
|
@ -241,4 +258,6 @@ pub extern "C" fn roc_fx_withFileOpen(_name: &RocStr, _buffer: *const u8) {
|
|||
// let closure_data_ptr = buffer.offset(8);
|
||||
// call_the_closure(closure_data_ptr);
|
||||
// }
|
||||
|
||||
RocResult::ok(())
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
app [main] { pf: platform "../false-interpreter/platform/main.roc" }
|
||||
|
||||
import pf.Task exposing [Task]
|
||||
|
||||
main : Str -> Task {} []
|
||||
main = \_ ->
|
||||
"this is a string, not a Task {} [] function like the platform expects."
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
hosted UnknownGeneratesWith
|
||||
exposes [Effect, after, map, always]
|
||||
imports []
|
||||
generates Effect with [after, map, always, foobar]
|
69
crates/cli/tests/module_params/Api.roc
Normal file
69
crates/cli/tests/module_params/Api.roc
Normal file
|
@ -0,0 +1,69 @@
|
|||
module { appId, protocol } -> [
|
||||
baseUrl,
|
||||
getUser,
|
||||
getPost,
|
||||
getPosts,
|
||||
getPostComments,
|
||||
getCompanies,
|
||||
baseUrlAliased,
|
||||
getPostAliased,
|
||||
getUserSafe,
|
||||
getPostComment,
|
||||
]
|
||||
|
||||
## value def referencing params
|
||||
baseUrl : Str
|
||||
baseUrl =
|
||||
protocol "api.example.com/$(appId)"
|
||||
|
||||
## function def referencing params
|
||||
getUser : U32 -> Str
|
||||
getUser = \userId ->
|
||||
# purposefully not using baseUrl to test top-level fn referencing param
|
||||
protocol "api.example.com/$(appId)/users/$(Num.toStr userId)"
|
||||
|
||||
## function def referencing top-level value
|
||||
getPost : U32 -> Str
|
||||
getPost = \postId ->
|
||||
"$(baseUrl)/posts/$(Num.toStr postId)"
|
||||
|
||||
## function def passing top-level function
|
||||
getPosts : List U32 -> List Str
|
||||
getPosts = \ids ->
|
||||
List.map ids getPost
|
||||
|
||||
## function def calling top-level function
|
||||
getPostComments : U32 -> Str
|
||||
getPostComments = \postId ->
|
||||
"$(getPost postId)/comments"
|
||||
|
||||
## function def passing nested function
|
||||
getCompanies : List U32 -> List Str
|
||||
getCompanies = \ids ->
|
||||
getCompany = \id ->
|
||||
protocol "api.example.com/$(appId)/companies/$(Num.toStr id)"
|
||||
|
||||
List.map ids getCompany
|
||||
|
||||
## aliasing top-level value
|
||||
baseUrlAliased : Str
|
||||
baseUrlAliased =
|
||||
baseUrl
|
||||
|
||||
## aliasing top-level fn
|
||||
getPostAliased : U32 -> Str
|
||||
getPostAliased =
|
||||
getPost
|
||||
|
||||
## top-level value returning functions
|
||||
getUserSafe : U32 -> Str
|
||||
getUserSafe =
|
||||
if Str.startsWith appId "prod_" then
|
||||
\id -> "$(getUser id)?safe=true"
|
||||
else
|
||||
getUser
|
||||
|
||||
## two-argument function
|
||||
getPostComment : U32, U32 -> Str
|
||||
getPostComment = \postId, commentId ->
|
||||
"$(getPost postId)/comments/$(Num.toStr commentId)"
|
9
crates/cli/tests/module_params/BadAnn.roc
Normal file
9
crates/cli/tests/module_params/BadAnn.roc
Normal file
|
@ -0,0 +1,9 @@
|
|||
module { appId } -> [fnAnnotatedAsValue, missingArg]
|
||||
|
||||
fnAnnotatedAsValue : Str
|
||||
fnAnnotatedAsValue = \postId, commentId ->
|
||||
"/posts/$(postId)/comments/$(Num.toStr commentId)"
|
||||
|
||||
missingArg : Str -> Str
|
||||
missingArg = \postId, _ ->
|
||||
"/posts/$(postId)/comments"
|
7
crates/cli/tests/module_params/Menu.roc
Normal file
7
crates/cli/tests/module_params/Menu.roc
Normal file
|
@ -0,0 +1,7 @@
|
|||
module { echo } -> [menu]
|
||||
|
||||
menu = \name ->
|
||||
indirect name
|
||||
|
||||
indirect = \name ->
|
||||
echo "Hi, $(name)!"
|
59
crates/cli/tests/module_params/app.roc
Normal file
59
crates/cli/tests/module_params/app.roc
Normal file
|
@ -0,0 +1,59 @@
|
|||
app [main] {
|
||||
pf: platform "../fixtures/multi-dep-str/platform/main.roc",
|
||||
}
|
||||
|
||||
import Api { appId: "one", protocol: https } as App1
|
||||
import Api { appId: "two", protocol: http } as App2
|
||||
import Api { appId: "prod_1", protocol: http } as Prod
|
||||
|
||||
https = \url -> "https://$(url)"
|
||||
http = \url -> "http://$(url)"
|
||||
|
||||
usersApp1 =
|
||||
# pass top-level fn in a module with params
|
||||
List.map [1, 2, 3] App1.getUser
|
||||
|
||||
main =
|
||||
app3Id = "three"
|
||||
|
||||
import Api { appId: app3Id, protocol: https } as App3
|
||||
|
||||
getUserApp3Nested = \userId ->
|
||||
# use captured params def
|
||||
App3.getUser userId
|
||||
|
||||
usersApp3Passed =
|
||||
# pass top-level fn in a nested def
|
||||
List.map [1, 2, 3] App3.getUser
|
||||
|
||||
"""
|
||||
App1.baseUrl: $(App1.baseUrl)
|
||||
App2.baseUrl: $(App2.baseUrl)
|
||||
App3.baseUrl: $(App3.baseUrl)
|
||||
App1.getUser 1: $(App1.getUser 1)
|
||||
App2.getUser 2: $(App2.getUser 2)
|
||||
App3.getUser 3: $(App3.getUser 3)
|
||||
App1.getPost 1: $(App1.getPost 1)
|
||||
App2.getPost 2: $(App2.getPost 2)
|
||||
App3.getPost 3: $(App3.getPost 3)
|
||||
App1.getPosts [1, 2]: $(Inspect.toStr (App1.getPosts [1, 2]))
|
||||
App2.getPosts [3, 4]: $(Inspect.toStr (App2.getPosts [3, 4]))
|
||||
App2.getPosts [5, 6]: $(Inspect.toStr (App2.getPosts [5, 6]))
|
||||
App1.getPostComments 1: $(App1.getPostComments 1)
|
||||
App2.getPostComments 2: $(App2.getPostComments 2)
|
||||
App2.getPostComments 3: $(App2.getPostComments 3)
|
||||
App1.getCompanies [1, 2]: $(Inspect.toStr (App1.getCompanies [1, 2]))
|
||||
App2.getCompanies [3, 4]: $(Inspect.toStr (App2.getCompanies [3, 4]))
|
||||
App2.getCompanies [5, 6]: $(Inspect.toStr (App2.getCompanies [5, 6]))
|
||||
App1.getPostAliased 1: $(App1.getPostAliased 1)
|
||||
App2.getPostAliased 2: $(App2.getPostAliased 2)
|
||||
App3.getPostAliased 3: $(App3.getPostAliased 3)
|
||||
App1.baseUrlAliased: $(App1.baseUrlAliased)
|
||||
App2.baseUrlAliased: $(App2.baseUrlAliased)
|
||||
App3.baseUrlAliased: $(App3.baseUrlAliased)
|
||||
App1.getUserSafe 1: $(App1.getUserSafe 1)
|
||||
Prod.getUserSafe 2: $(Prod.getUserSafe 2)
|
||||
usersApp1: $(Inspect.toStr usersApp1)
|
||||
getUserApp3Nested 3: $(getUserApp3Nested 3)
|
||||
usersApp3Passed: $(Inspect.toStr usersApp3Passed)
|
||||
"""
|
17
crates/cli/tests/module_params/arity_mismatch.roc
Normal file
17
crates/cli/tests/module_params/arity_mismatch.roc
Normal file
|
@ -0,0 +1,17 @@
|
|||
app [main] {
|
||||
pf: platform "../fixtures/multi-dep-str/platform/main.roc",
|
||||
}
|
||||
|
||||
import Api { appId: "one", protocol: https }
|
||||
|
||||
https = \url -> "https://$(url)"
|
||||
|
||||
main =
|
||||
"""
|
||||
# too many args
|
||||
$(Api.getUser 1 2)
|
||||
$(Api.baseUrl 1)
|
||||
|
||||
# too few args
|
||||
$(Api.getPostComment 1)
|
||||
"""
|
8
crates/cli/tests/module_params/bad_ann.roc
Normal file
8
crates/cli/tests/module_params/bad_ann.roc
Normal file
|
@ -0,0 +1,8 @@
|
|||
app [main] {
|
||||
pf: platform "../fixtures/multi-dep-str/platform/main.roc",
|
||||
}
|
||||
|
||||
import BadAnn { appId: "one" }
|
||||
|
||||
main =
|
||||
""
|
7
crates/cli/tests/module_params/pass_task.roc
Normal file
7
crates/cli/tests/module_params/pass_task.roc
Normal file
|
@ -0,0 +1,7 @@
|
|||
app [main] { pf: platform "https://github.com/roc-lang/basic-cli/releases/download/0.15.0/SlwdbJ-3GR7uBWQo6zlmYWNYOxnvo8r6YABXD-45UOw.tar.br" }
|
||||
|
||||
import pf.Stdout
|
||||
import Menu { echo: Stdout.line }
|
||||
|
||||
main =
|
||||
Menu.menu "Agus"
|
12
crates/cli/tests/module_params/unexpected_fn.roc
Normal file
12
crates/cli/tests/module_params/unexpected_fn.roc
Normal file
|
@ -0,0 +1,12 @@
|
|||
app [main] {
|
||||
pf: platform "../fixtures/multi-dep-str/platform/main.roc",
|
||||
}
|
||||
|
||||
import Api { appId: "one", protocol: https }
|
||||
|
||||
https = \url -> "https://$(url)"
|
||||
|
||||
main =
|
||||
"""
|
||||
$(Api.getPost)
|
||||
"""
|
|
@ -129,8 +129,8 @@ hashDict = \hasher, dict -> Hash.hashUnordered hasher (toList dict) List.walk
|
|||
|
||||
toInspectorDict : Dict k v -> Inspector f where k implements Inspect & Hash & Eq, v implements Inspect, f implements InspectFormatter
|
||||
toInspectorDict = \dict ->
|
||||
fmt <- Inspect.custom
|
||||
Inspect.apply (Inspect.dict dict walk Inspect.toInspector Inspect.toInspector) fmt
|
||||
Inspect.custom \fmt ->
|
||||
Inspect.apply (Inspect.dict dict walk Inspect.toInspector Inspect.toInspector) fmt
|
||||
|
||||
## Return an empty dictionary.
|
||||
## ```roc
|
||||
|
@ -894,9 +894,9 @@ calcNumBuckets = \shifts ->
|
|||
maxBucketCount
|
||||
|
||||
fillBucketsFromData = \buckets0, data, shifts ->
|
||||
buckets1, (key, _), dataIndex <- List.walkWithIndex data buckets0
|
||||
(bucketIndex, distAndFingerprint) = nextWhileLess buckets1 key shifts
|
||||
placeAndShiftUp buckets1 { distAndFingerprint, dataIndex: Num.toU32 dataIndex } bucketIndex
|
||||
List.walkWithIndex data buckets0 \buckets1, (key, _), dataIndex ->
|
||||
(bucketIndex, distAndFingerprint) = nextWhileLess buckets1 key shifts
|
||||
placeAndShiftUp buckets1 { distAndFingerprint, dataIndex: Num.toU32 dataIndex } bucketIndex
|
||||
|
||||
nextWhileLess : List Bucket, k, U8 -> (U64, U32) where k implements Hash & Eq
|
||||
nextWhileLess = \buckets, key, shifts ->
|
||||
|
@ -1213,15 +1213,15 @@ expect
|
|||
]
|
||||
|
||||
dict =
|
||||
acc, k <- List.walk badKeys (Dict.empty {})
|
||||
Dict.update acc k \val ->
|
||||
when val is
|
||||
Present p -> Present (p |> Num.addWrap 1)
|
||||
Missing -> Present 0
|
||||
List.walk badKeys (Dict.empty {}) \acc, k ->
|
||||
Dict.update acc k \val ->
|
||||
when val is
|
||||
Present p -> Present (p |> Num.addWrap 1)
|
||||
Missing -> Present 0
|
||||
|
||||
allInsertedCorrectly =
|
||||
acc, k <- List.walk badKeys Bool.true
|
||||
acc && Dict.contains dict k
|
||||
List.walk badKeys Bool.true \acc, k ->
|
||||
acc && Dict.contains dict k
|
||||
|
||||
allInsertedCorrectly
|
||||
|
||||
|
|
|
@ -138,203 +138,203 @@ dbgInit = \{} -> @DbgFormatter { data: "" }
|
|||
|
||||
dbgList : list, ElemWalker (DbgFormatter, Bool) list elem, (elem -> Inspector DbgFormatter) -> Inspector DbgFormatter
|
||||
dbgList = \content, walkFn, toDbgInspector ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "["
|
||||
|> \f1 ->
|
||||
(f2, prependSep), elem <- walkFn content (f1, Bool.false)
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "["
|
||||
|> \f1 ->
|
||||
walkFn content (f1, Bool.false) \(f2, prependSep), elem ->
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
|
||||
elem
|
||||
|> toDbgInspector
|
||||
|> apply f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "]"
|
||||
elem
|
||||
|> toDbgInspector
|
||||
|> apply f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "]"
|
||||
|
||||
dbgSet : set, ElemWalker (DbgFormatter, Bool) set elem, (elem -> Inspector DbgFormatter) -> Inspector DbgFormatter
|
||||
dbgSet = \content, walkFn, toDbgInspector ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
(f2, prependSep), elem <- walkFn content (f1, Bool.false)
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
walkFn content (f1, Bool.false) \(f2, prependSep), elem ->
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
|
||||
elem
|
||||
|> toDbgInspector
|
||||
|> apply f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
elem
|
||||
|> toDbgInspector
|
||||
|> apply f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
|
||||
dbgDict : dict, KeyValWalker (DbgFormatter, Bool) dict key value, (key -> Inspector DbgFormatter), (value -> Inspector DbgFormatter) -> Inspector DbgFormatter
|
||||
dbgDict = \d, walkFn, keyToInspector, valueToInspector ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
(f2, prependSep), key, value <- walkFn d (f1, Bool.false)
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
walkFn d (f1, Bool.false) \(f2, prependSep), key, value ->
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
|
||||
apply (keyToInspector key) f3
|
||||
|> dbgWrite ": "
|
||||
|> \x -> apply (valueToInspector value) x
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
apply (keyToInspector key) f3
|
||||
|> dbgWrite ": "
|
||||
|> \x -> apply (valueToInspector value) x
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
|
||||
dbgTag : Str, List (Inspector DbgFormatter) -> Inspector DbgFormatter
|
||||
dbgTag = \name, fields ->
|
||||
if List.isEmpty fields then
|
||||
f0 <- custom
|
||||
dbgWrite f0 name
|
||||
custom \f0 ->
|
||||
dbgWrite f0 name
|
||||
else
|
||||
f0 <- custom
|
||||
dbgWrite f0 "("
|
||||
|> dbgWrite name
|
||||
|> \f1 ->
|
||||
f2, inspector <- List.walk fields f1
|
||||
dbgWrite f2 " "
|
||||
|> \x -> apply inspector x
|
||||
|> dbgWrite ")"
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "("
|
||||
|> dbgWrite name
|
||||
|> \f1 ->
|
||||
List.walk fields f1 \f2, inspector ->
|
||||
dbgWrite f2 " "
|
||||
|> \x -> apply inspector x
|
||||
|> dbgWrite ")"
|
||||
|
||||
dbgTuple : List (Inspector DbgFormatter) -> Inspector DbgFormatter
|
||||
dbgTuple = \fields ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "("
|
||||
|> \f1 ->
|
||||
(f2, prependSep), inspector <- List.walk fields (f1, Bool.false)
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "("
|
||||
|> \f1 ->
|
||||
List.walk fields (f1, Bool.false) \(f2, prependSep), inspector ->
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
|
||||
apply inspector f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite ")"
|
||||
apply inspector f3
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite ")"
|
||||
|
||||
dbgRecord : List { key : Str, value : Inspector DbgFormatter } -> Inspector DbgFormatter
|
||||
dbgRecord = \fields ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
(f2, prependSep), { key, value } <- List.walk fields (f1, Bool.false)
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "{"
|
||||
|> \f1 ->
|
||||
List.walk fields (f1, Bool.false) \(f2, prependSep), { key, value } ->
|
||||
f3 =
|
||||
if prependSep then
|
||||
dbgWrite f2 ", "
|
||||
else
|
||||
f2
|
||||
|
||||
dbgWrite f3 key
|
||||
|> dbgWrite ": "
|
||||
|> \x -> apply value x
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
dbgWrite f3 key
|
||||
|> dbgWrite ": "
|
||||
|> \x -> apply value x
|
||||
|> \f4 -> (f4, Bool.true)
|
||||
|> .0
|
||||
|> dbgWrite "}"
|
||||
|
||||
dbgBool : Bool -> Inspector DbgFormatter
|
||||
dbgBool = \b ->
|
||||
if b then
|
||||
f0 <- custom
|
||||
dbgWrite f0 "Bool.true"
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "Bool.true"
|
||||
else
|
||||
f0 <- custom
|
||||
dbgWrite f0 "Bool.false"
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "Bool.false"
|
||||
|
||||
dbgStr : Str -> Inspector DbgFormatter
|
||||
dbgStr = \s ->
|
||||
f0 <- custom
|
||||
f0
|
||||
|> dbgWrite "\""
|
||||
|> dbgWrite s # TODO: Should we be escaping strings for dbg/logging?
|
||||
|> dbgWrite "\""
|
||||
custom \f0 ->
|
||||
f0
|
||||
|> dbgWrite "\""
|
||||
|> dbgWrite s # TODO: Should we be escaping strings for dbg/logging?
|
||||
|> dbgWrite "\""
|
||||
|
||||
dbgOpaque : * -> Inspector DbgFormatter
|
||||
dbgOpaque = \_ ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "<opaque>"
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "<opaque>"
|
||||
|
||||
dbgFunction : * -> Inspector DbgFormatter
|
||||
dbgFunction = \_ ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 "<function>"
|
||||
custom \f0 ->
|
||||
dbgWrite f0 "<function>"
|
||||
|
||||
dbgU8 : U8 -> Inspector DbgFormatter
|
||||
dbgU8 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgI8 : I8 -> Inspector DbgFormatter
|
||||
dbgI8 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgU16 : U16 -> Inspector DbgFormatter
|
||||
dbgU16 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgI16 : I16 -> Inspector DbgFormatter
|
||||
dbgI16 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgU32 : U32 -> Inspector DbgFormatter
|
||||
dbgU32 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgI32 : I32 -> Inspector DbgFormatter
|
||||
dbgI32 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgU64 : U64 -> Inspector DbgFormatter
|
||||
dbgU64 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgI64 : I64 -> Inspector DbgFormatter
|
||||
dbgI64 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgU128 : U128 -> Inspector DbgFormatter
|
||||
dbgU128 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgI128 : I128 -> Inspector DbgFormatter
|
||||
dbgI128 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgF32 : F32 -> Inspector DbgFormatter
|
||||
dbgF32 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgF64 : F64 -> Inspector DbgFormatter
|
||||
dbgF64 = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgDec : Dec -> Inspector DbgFormatter
|
||||
dbgDec = \num ->
|
||||
f0 <- custom
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
custom \f0 ->
|
||||
dbgWrite f0 (num |> Num.toStr)
|
||||
|
||||
dbgWrite : DbgFormatter, Str -> DbgFormatter
|
||||
dbgWrite = \@DbgFormatter { data }, added ->
|
||||
|
|
|
@ -62,8 +62,8 @@ hashSet = \hasher, @Set inner -> Hash.hash hasher inner
|
|||
|
||||
toInspectorSet : Set k -> Inspector f where k implements Inspect & Hash & Eq, f implements InspectFormatter
|
||||
toInspectorSet = \set ->
|
||||
fmt <- Inspect.custom
|
||||
Inspect.apply (Inspect.set set walk Inspect.toInspector) fmt
|
||||
Inspect.custom \fmt ->
|
||||
Inspect.apply (Inspect.set set walk Inspect.toInspector) fmt
|
||||
|
||||
## Creates a new empty `Set`.
|
||||
## ```roc
|
||||
|
|
265
crates/compiler/builtins/roc/Task.roc
Normal file
265
crates/compiler/builtins/roc/Task.roc
Normal file
|
@ -0,0 +1,265 @@
|
|||
module [
|
||||
Task,
|
||||
ok,
|
||||
err,
|
||||
await,
|
||||
map,
|
||||
mapErr,
|
||||
onErr,
|
||||
attempt,
|
||||
forever,
|
||||
loop,
|
||||
fromResult,
|
||||
batch,
|
||||
sequence,
|
||||
forEach,
|
||||
result,
|
||||
]
|
||||
|
||||
import List
|
||||
import Result exposing [Result]
|
||||
|
||||
## A Task represents an effect; an interaction with state outside your Roc
|
||||
## program, such as the terminal's standard output, or a file.
|
||||
Task ok err := {} -> Result ok err
|
||||
|
||||
## Run a task repeatedly, until it fails with `err`. Note that this task does not return a success value.
|
||||
forever : Task a err -> Task * err
|
||||
forever = \@Task task ->
|
||||
looper = \{} ->
|
||||
when task {} is
|
||||
Err e -> Err e
|
||||
Ok _ -> looper {}
|
||||
|
||||
@Task \{} -> looper {}
|
||||
|
||||
## Run a task repeatedly, until it fails with `err` or completes with `done`.
|
||||
##
|
||||
## ```
|
||||
## sum =
|
||||
## Task.loop! 0 \total ->
|
||||
## numResult =
|
||||
## Stdin.line
|
||||
## |> Task.result!
|
||||
## |> Result.try Str.toU64
|
||||
##
|
||||
## when numResult is
|
||||
## Ok num -> Task.ok (Step (total + num))
|
||||
## Err (StdinErr EndOfFile) -> Task.ok (Done total)
|
||||
## Err InvalidNumStr -> Task.err NonNumberGiven
|
||||
## ```
|
||||
loop : state, (state -> Task [Step state, Done done] err) -> Task done err
|
||||
loop = \state, step ->
|
||||
looper = \current ->
|
||||
(@Task next) = step current
|
||||
when next {} is
|
||||
Err e -> Err e
|
||||
Ok (Done newResult) -> Ok newResult
|
||||
Ok (Step newState) -> looper (newState)
|
||||
|
||||
@Task \{} -> looper state
|
||||
|
||||
## Create a task that always succeeds with the value provided.
|
||||
##
|
||||
## ```
|
||||
## # Always succeeds with "Louis"
|
||||
## getName : Task.Task Str *
|
||||
## getName = Task.ok "Louis"
|
||||
## ```
|
||||
##
|
||||
ok : a -> Task a *
|
||||
ok = \a -> @Task \{} -> Ok a
|
||||
|
||||
## Create a task that always fails with the error provided.
|
||||
##
|
||||
## ```
|
||||
## # Always fails with the tag `CustomError Str`
|
||||
## customError : Str -> Task.Task {} [CustomError Str]
|
||||
## customError = \err -> Task.err (CustomError err)
|
||||
## ```
|
||||
##
|
||||
err : a -> Task * a
|
||||
err = \a -> @Task \{} -> Err a
|
||||
|
||||
## Transform a given Task with a function that handles the success or error case
|
||||
## and returns another task based on that. This is useful for chaining tasks
|
||||
## together or performing error handling and recovery.
|
||||
##
|
||||
## Consider the following task:
|
||||
##
|
||||
## `canFail : Task {} [Failure, AnotherFail, YetAnotherFail]`
|
||||
##
|
||||
## We can use [attempt] to handle the failure cases using the following:
|
||||
##
|
||||
## ```
|
||||
## Task.attempt canFail \result ->
|
||||
## when result is
|
||||
## Ok Success -> Stdout.line "Success!"
|
||||
## Err Failure -> Stdout.line "Oops, failed!"
|
||||
## Err AnotherFail -> Stdout.line "Ooooops, another failure!"
|
||||
## Err YetAnotherFail -> Stdout.line "Really big oooooops, yet again!"
|
||||
## ```
|
||||
##
|
||||
## Here we know that the `canFail` task may fail, and so we use
|
||||
## `Task.attempt` to convert the task to a `Result` and then use pattern
|
||||
## matching to handle the success and possible failure cases.
|
||||
attempt : Task a b, (Result a b -> Task c d) -> Task c d
|
||||
attempt = \@Task task, transform ->
|
||||
@Task \{} ->
|
||||
(@Task transformed) = transform (task {})
|
||||
|
||||
transformed {}
|
||||
|
||||
## Take the success value from a given [Task] and use that to generate a new [Task].
|
||||
##
|
||||
## We can [await] Task results with callbacks:
|
||||
##
|
||||
## ```
|
||||
## Task.await (Stdin.line "What's your name?") \name ->
|
||||
## Stdout.line "Your name is: $(name)"
|
||||
## ```
|
||||
##
|
||||
## Or we can more succinctly use the `!` bang operator, which desugars to [await]:
|
||||
##
|
||||
## ```
|
||||
## name = Stdin.line! "What's your name?"
|
||||
## Stdout.line "Your name is: $(name)"
|
||||
## ```
|
||||
await : Task a b, (a -> Task c b) -> Task c b
|
||||
await = \@Task task, transform ->
|
||||
@Task \{} ->
|
||||
when task {} is
|
||||
Ok a ->
|
||||
(@Task transformed) = transform a
|
||||
transformed {}
|
||||
|
||||
Err b ->
|
||||
Err b
|
||||
|
||||
## Take the error value from a given [Task] and use that to generate a new [Task].
|
||||
##
|
||||
## ```
|
||||
## # Prints "Something went wrong!" to standard error if `canFail` fails.
|
||||
## canFail
|
||||
## |> Task.onErr \_ -> Stderr.line "Something went wrong!"
|
||||
## ```
|
||||
onErr : Task a b, (b -> Task a c) -> Task a c
|
||||
onErr = \@Task task, transform ->
|
||||
@Task \{} ->
|
||||
when task {} is
|
||||
Ok a ->
|
||||
Ok a
|
||||
|
||||
Err b ->
|
||||
(@Task transformed) = transform b
|
||||
transformed {}
|
||||
|
||||
## Transform the success value of a given [Task] with a given function.
|
||||
##
|
||||
## ```
|
||||
## # Succeeds with a value of "Bonjour Louis!"
|
||||
## Task.ok "Louis"
|
||||
## |> Task.map (\name -> "Bonjour $(name)!")
|
||||
## ```
|
||||
map : Task a c, (a -> b) -> Task b c
|
||||
map = \@Task task, transform ->
|
||||
@Task \{} ->
|
||||
when task {} is
|
||||
Ok a -> Ok (transform a)
|
||||
Err b -> Err b
|
||||
|
||||
## Transform the error value of a given [Task] with a given function.
|
||||
##
|
||||
## ```
|
||||
## # Ignore the fail value, and map it to the tag `CustomError`
|
||||
## canFail
|
||||
## |> Task.mapErr \_ -> CustomError
|
||||
## ```
|
||||
mapErr : Task c a, (a -> b) -> Task c b
|
||||
mapErr = \@Task task, transform ->
|
||||
@Task \{} ->
|
||||
when task {} is
|
||||
Ok a -> Ok a
|
||||
Err b -> Err (transform b)
|
||||
|
||||
## Use a Result among other Tasks by converting it into a [Task].
|
||||
fromResult : Result a b -> Task a b
|
||||
fromResult = \res ->
|
||||
@Task \{} -> res
|
||||
|
||||
## Apply a task to another task applicatively. This can be used with
|
||||
## [ok] to build a [Task] that returns a record.
|
||||
##
|
||||
## The following example returns a Record with two fields, `apples` and
|
||||
## `oranges`, each of which is a `List Str`. If it fails it returns the tag
|
||||
## `NoFruitAvailable`.
|
||||
##
|
||||
## ```
|
||||
## getFruitBasket : Task { apples : List Str, oranges : List Str } [NoFruitAvailable]
|
||||
## getFruitBasket = Task.ok {
|
||||
## apples: <- getFruit Apples |> Task.batch,
|
||||
## oranges: <- getFruit Oranges |> Task.batch,
|
||||
## }
|
||||
## ```
|
||||
batch : Task a c -> (Task (a -> b) c -> Task b c)
|
||||
batch = \current ->
|
||||
\next ->
|
||||
await next \f ->
|
||||
map current f
|
||||
|
||||
## Apply each task in a list sequentially, and return a list of the resulting values.
|
||||
## Each task will be awaited before beginning the next task.
|
||||
##
|
||||
## ```
|
||||
## fetchAuthorTasks : List (Task Author [DbError])
|
||||
##
|
||||
## getAuthors : Task (List Author) [DbError]
|
||||
## getAuthors = Task.sequence fetchAuthorTasks
|
||||
## ```
|
||||
##
|
||||
sequence : List (Task ok err) -> Task (List ok) err
|
||||
sequence = \taskList ->
|
||||
Task.loop (taskList, List.withCapacity (List.len taskList)) \(tasks, values) ->
|
||||
when tasks is
|
||||
[task, .. as rest] ->
|
||||
value = task!
|
||||
Task.ok (Step (rest, List.append values value))
|
||||
|
||||
[] ->
|
||||
Task.ok (Done values)
|
||||
|
||||
## Apply a task repeatedly for each item in a list
|
||||
##
|
||||
## ```
|
||||
## authors : List Author
|
||||
## saveAuthor : Author -> Task {} [DbError]
|
||||
##
|
||||
## saveAuthors : Task (List Author) [DbError]
|
||||
## saveAuthors = Task.forEach authors saveAuthor
|
||||
## ```
|
||||
##
|
||||
forEach : List a, (a -> Task {} b) -> Task {} b
|
||||
forEach = \items, fn ->
|
||||
List.walk items (ok {}) \state, item ->
|
||||
state |> await \_ -> fn item
|
||||
|
||||
## Transform a task that can either succeed with `ok`, or fail with `err`, into
|
||||
## a task that succeeds with `Result ok err`.
|
||||
##
|
||||
## This is useful when chaining tasks using the `!` suffix. For example:
|
||||
##
|
||||
## ```
|
||||
## # Path.roc
|
||||
## checkFile : Str -> Task [Good, Bad] [IOError]
|
||||
##
|
||||
## # main.roc
|
||||
## when checkFile "/usr/local/bin/roc" |> Task.result! is
|
||||
## Ok Good -> "..."
|
||||
## Ok Bad -> "..."
|
||||
## Err IOError -> "..."
|
||||
## ```
|
||||
##
|
||||
result : Task ok err -> Task (Result ok err) *
|
||||
result = \@Task task ->
|
||||
@Task \{} ->
|
||||
Ok (task {})
|
|
@ -11,4 +11,5 @@ package [
|
|||
Hash,
|
||||
Box,
|
||||
Inspect,
|
||||
Task,
|
||||
] {}
|
||||
|
|
|
@ -16,6 +16,7 @@ pub fn module_source(module_id: ModuleId) -> &'static str {
|
|||
ModuleId::DECODE => DECODE,
|
||||
ModuleId::HASH => HASH,
|
||||
ModuleId::INSPECT => INSPECT,
|
||||
ModuleId::TASK => TASK,
|
||||
_ => internal_error!(
|
||||
"ModuleId {:?} is not part of the standard library",
|
||||
module_id
|
||||
|
@ -35,3 +36,4 @@ const ENCODE: &str = include_str!("../roc/Encode.roc");
|
|||
const DECODE: &str = include_str!("../roc/Decode.roc");
|
||||
const HASH: &str = include_str!("../roc/Hash.roc");
|
||||
const INSPECT: &str = include_str!("../roc/Inspect.roc");
|
||||
const TASK: &str = include_str!("../roc/Task.roc");
|
||||
|
|
|
@ -290,12 +290,14 @@ fn deep_copy_expr_help<C: CopyEnv>(env: &mut C, copied: &mut Vec<Variable>, expr
|
|||
Var(sym, var) => Var(*sym, sub!(*var)),
|
||||
ParamsVar {
|
||||
symbol,
|
||||
params,
|
||||
var,
|
||||
params_symbol,
|
||||
params_var,
|
||||
} => ParamsVar {
|
||||
symbol: *symbol,
|
||||
params: *params,
|
||||
var: sub!(*var),
|
||||
params_symbol: *params_symbol,
|
||||
params_var: sub!(*params_var),
|
||||
},
|
||||
ImportParams(module_id, region, opt_provided) => ImportParams(
|
||||
*module_id,
|
||||
|
|
|
@ -22,6 +22,7 @@ use crate::pattern::{canonicalize_def_header_pattern, BindingsFromPattern, Patte
|
|||
use crate::procedure::QualifiedReference;
|
||||
use crate::procedure::References;
|
||||
use crate::scope::create_alias;
|
||||
use crate::scope::SymbolLookup;
|
||||
use crate::scope::{PendingAbilitiesInScope, Scope};
|
||||
use roc_collections::ReferenceMatrix;
|
||||
use roc_collections::VecMap;
|
||||
|
@ -111,6 +112,23 @@ impl Annotation {
|
|||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn convert_to_fn(&mut self, argument_count: usize, var_store: &mut VarStore) {
|
||||
let mut arg_types = Vec::with_capacity(argument_count);
|
||||
|
||||
for _ in 0..argument_count {
|
||||
let var = var_store.fresh();
|
||||
self.introduced_variables.insert_inferred(Loc::at_zero(var));
|
||||
|
||||
arg_types.push(Type::Variable(var));
|
||||
}
|
||||
|
||||
self.signature = Type::Function(
|
||||
arg_types,
|
||||
Box::new(Type::Variable(var_store.fresh())),
|
||||
Box::new(self.signature.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -165,6 +183,7 @@ enum PendingValueDef<'a> {
|
|||
/// Module params from an import
|
||||
ImportParams {
|
||||
symbol: Symbol,
|
||||
variable: Variable,
|
||||
loc_pattern: Loc<Pattern>,
|
||||
module_id: ModuleId,
|
||||
opt_provided: Option<ast::Collection<'a, Loc<AssignedField<'a, ast::Expr<'a>>>>>,
|
||||
|
@ -186,6 +205,7 @@ impl PendingValueDef<'_> {
|
|||
PendingValueDef::ImportParams {
|
||||
loc_pattern,
|
||||
symbol: _,
|
||||
variable: _,
|
||||
module_id: _,
|
||||
opt_provided: _,
|
||||
} => loc_pattern,
|
||||
|
@ -1153,6 +1173,7 @@ fn canonicalize_value_defs<'a>(
|
|||
|
||||
pending_value_defs.push(PendingValueDef::ImportParams {
|
||||
symbol: params.symbol,
|
||||
variable: params.variable,
|
||||
loc_pattern: params.loc_pattern,
|
||||
opt_provided: params.opt_provided,
|
||||
module_id,
|
||||
|
@ -2400,15 +2421,17 @@ fn canonicalize_pending_value_def<'a>(
|
|||
}
|
||||
ImportParams {
|
||||
symbol,
|
||||
variable,
|
||||
loc_pattern,
|
||||
module_id,
|
||||
opt_provided,
|
||||
} => {
|
||||
// Insert a reference to the record so that we don't report it as unused
|
||||
// If the whole module is unused, we'll report that separately
|
||||
output
|
||||
.references
|
||||
.insert_value_lookup(symbol, QualifiedReference::Unqualified);
|
||||
output.references.insert_value_lookup(
|
||||
SymbolLookup::no_params(symbol),
|
||||
QualifiedReference::Unqualified,
|
||||
);
|
||||
|
||||
let (opt_var_record, references) = match opt_provided {
|
||||
Some(params) => {
|
||||
|
@ -2418,7 +2441,7 @@ fn canonicalize_pending_value_def<'a>(
|
|||
let references = can_output.references.clone();
|
||||
output.union(can_output);
|
||||
|
||||
(Some((var_store.fresh(), Box::new(record))), references)
|
||||
(Some((variable, Box::new(record))), references)
|
||||
}
|
||||
None => (None, References::new()),
|
||||
};
|
||||
|
@ -2725,12 +2748,11 @@ pub fn report_unused_imports(
|
|||
for (symbol, region) in &import.exposed_symbols {
|
||||
if !references.has_unqualified_type_or_value_lookup(*symbol)
|
||||
&& !scope.abilities_store.is_specialization_name(*symbol)
|
||||
&& !import.is_task(env)
|
||||
{
|
||||
env.problem(Problem::UnusedImport(*symbol, *region));
|
||||
}
|
||||
}
|
||||
} else if !import.is_task(env) {
|
||||
} else {
|
||||
env.problem(Problem::UnusedModuleImport(import.module_id, import.region));
|
||||
}
|
||||
}
|
||||
|
@ -3003,6 +3025,7 @@ struct PendingModuleImport<'a> {
|
|||
|
||||
struct PendingModuleImportParams<'a> {
|
||||
symbol: Symbol,
|
||||
variable: Variable,
|
||||
loc_pattern: Loc<Pattern>,
|
||||
opt_provided: Option<ast::Collection<'a, Loc<AssignedField<'a, ast::Expr<'a>>>>>,
|
||||
}
|
||||
|
@ -3013,16 +3036,6 @@ pub struct IntroducedImport {
|
|||
exposed_symbols: Vec<(Symbol, Region)>,
|
||||
}
|
||||
|
||||
impl IntroducedImport {
|
||||
pub fn is_task(&self, env: &Env<'_>) -> bool {
|
||||
// Temporarily needed for `!` convenience. Can be removed when Task becomes a builtin.
|
||||
match env.qualified_module_ids.get_name(self.module_id) {
|
||||
Some(name) => name.as_inner().as_str() == "Task",
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn to_pending_value_def<'a>(
|
||||
env: &mut Env<'a>,
|
||||
|
@ -3160,15 +3173,17 @@ fn to_pending_value_def<'a>(
|
|||
// We do this even if params weren't provided so that solve can report if they are missing
|
||||
let params_sym = scope.gen_unique_symbol();
|
||||
let params_region = module_import.params.map(|p| p.params.region).unwrap_or(region);
|
||||
let params_var = var_store.fresh();
|
||||
let params =
|
||||
PendingModuleImportParams {
|
||||
symbol: params_sym,
|
||||
variable: params_var,
|
||||
loc_pattern: Loc::at(params_region, Pattern::Identifier(params_sym)),
|
||||
opt_provided: module_import.params.map(|p| p.params.value),
|
||||
};
|
||||
let provided_params_sym = if module_import.params.is_some() {
|
||||
let provided_params = if module_import.params.is_some() {
|
||||
// Only add params to scope if they are provided
|
||||
Some(params_sym)
|
||||
Some((params_var, params_sym))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -3176,7 +3191,7 @@ fn to_pending_value_def<'a>(
|
|||
if let Err(existing_import) =
|
||||
scope
|
||||
.modules
|
||||
.insert(name_with_alias.clone(), module_id, provided_params_sym, region)
|
||||
.insert(name_with_alias.clone(), module_id, provided_params, region)
|
||||
{
|
||||
env.problems.push(Problem::ImportNameConflict {
|
||||
name: name_with_alias,
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -8,6 +8,7 @@ use roc_module::ident::{Ident, ModuleName};
|
|||
use roc_module::symbol::{IdentIdsByModule, ModuleId, PQModuleName, PackageModuleIds, Symbol};
|
||||
use roc_problem::can::{Problem, RuntimeError};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::Variable;
|
||||
|
||||
/// The canonicalization environment for a particular module.
|
||||
pub struct Env<'a> {
|
||||
|
@ -38,6 +39,8 @@ pub struct Env<'a> {
|
|||
|
||||
pub top_level_symbols: VecSet<Symbol>,
|
||||
|
||||
pub home_params_record: Option<(Symbol, Variable)>,
|
||||
|
||||
pub arena: &'a Bump,
|
||||
|
||||
pub opt_shorthand: Option<&'a str>,
|
||||
|
@ -64,6 +67,7 @@ impl<'a> Env<'a> {
|
|||
qualified_type_lookups: VecSet::default(),
|
||||
tailcallable_symbol: None,
|
||||
top_level_symbols: VecSet::default(),
|
||||
home_params_record: None,
|
||||
opt_shorthand,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ use roc_error_macros::internal_error;
|
|||
use roc_module::called_via::CalledVia;
|
||||
use roc_module::ident::{ForeignSymbol, Lowercase, TagName};
|
||||
use roc_module::low_level::LowLevel;
|
||||
use roc_module::symbol::{ModuleId, Symbol};
|
||||
use roc_module::symbol::{IdentId, ModuleId, Symbol};
|
||||
use roc_parse::ast::{self, Defs, PrecedenceConflict, StrLiteral};
|
||||
use roc_parse::ident::Accessor;
|
||||
use roc_parse::pattern::PatternType::*;
|
||||
|
@ -111,8 +111,9 @@ pub enum Expr {
|
|||
/// Like Var, but from a module with params
|
||||
ParamsVar {
|
||||
symbol: Symbol,
|
||||
params: Symbol,
|
||||
var: Variable,
|
||||
params_symbol: Symbol,
|
||||
params_var: Variable,
|
||||
},
|
||||
AbilityMember(
|
||||
/// Actual member name
|
||||
|
@ -320,8 +321,9 @@ impl Expr {
|
|||
&Self::Var(sym, _) => Category::Lookup(sym),
|
||||
&Self::ParamsVar {
|
||||
symbol,
|
||||
params: _,
|
||||
var: _,
|
||||
params_symbol: _,
|
||||
params_var: _,
|
||||
} => Category::Lookup(symbol),
|
||||
&Self::AbilityMember(sym, _, _) => Category::Lookup(sym),
|
||||
Self::When { .. } => Category::When,
|
||||
|
@ -1207,7 +1209,7 @@ pub fn canonicalize_expr<'a>(
|
|||
output,
|
||||
)
|
||||
}
|
||||
ast::Expr::Dbg(_, _) => {
|
||||
ast::Expr::Dbg | ast::Expr::DbgStmt(_, _) => {
|
||||
internal_error!("Dbg should have been desugared by now")
|
||||
}
|
||||
ast::Expr::LowLevelDbg((source_location, source), message, continuation) => {
|
||||
|
@ -1554,6 +1556,8 @@ fn canonicalize_closure_body<'a>(
|
|||
&loc_body_expr.value,
|
||||
);
|
||||
|
||||
let mut references_top_level = false;
|
||||
|
||||
let mut captured_symbols: Vec<_> = new_output
|
||||
.references
|
||||
.value_lookups()
|
||||
|
@ -1564,7 +1568,11 @@ fn canonicalize_closure_body<'a>(
|
|||
.filter(|s| !new_output.references.bound_symbols().any(|x| x == s))
|
||||
.filter(|s| bound_by_argument_patterns.iter().all(|(k, _)| s != k))
|
||||
// filter out top-level symbols those will be globally available, and don't need to be captured
|
||||
.filter(|s| !env.top_level_symbols.contains(s))
|
||||
.filter(|s| {
|
||||
let is_top_level = env.top_level_symbols.contains(s);
|
||||
references_top_level = references_top_level || is_top_level;
|
||||
!is_top_level
|
||||
})
|
||||
// filter out imported symbols those will be globally available, and don't need to be captured
|
||||
.filter(|s| s.module_id() == env.home)
|
||||
// filter out functions that don't close over anything
|
||||
|
@ -1573,6 +1581,15 @@ fn canonicalize_closure_body<'a>(
|
|||
.map(|s| (s, var_store.fresh()))
|
||||
.collect();
|
||||
|
||||
if references_top_level {
|
||||
if let Some(params_record) = env.home_params_record {
|
||||
// If this module has params and the closure references top-level symbols,
|
||||
// we need to capture the whole record so we can pass it.
|
||||
// The lower_params pass will take care of removing the captures for top-level fns.
|
||||
captured_symbols.push(params_record);
|
||||
}
|
||||
}
|
||||
|
||||
output.union(new_output);
|
||||
|
||||
// Now that we've collected all the references, check to see if any of the args we defined
|
||||
|
@ -1918,7 +1935,7 @@ fn canonicalize_var_lookup(
|
|||
Ok(lookup) => {
|
||||
output
|
||||
.references
|
||||
.insert_value_lookup(lookup.symbol, QualifiedReference::Unqualified);
|
||||
.insert_value_lookup(lookup, QualifiedReference::Unqualified);
|
||||
|
||||
if scope.abilities_store.is_ability_member_name(lookup.symbol) {
|
||||
AbilityMember(
|
||||
|
@ -1927,7 +1944,7 @@ fn canonicalize_var_lookup(
|
|||
var_store.fresh(),
|
||||
)
|
||||
} else {
|
||||
lookup_to_expr(lookup, var_store.fresh())
|
||||
lookup_to_expr(var_store, lookup)
|
||||
}
|
||||
}
|
||||
Err(problem) => {
|
||||
|
@ -1943,7 +1960,7 @@ fn canonicalize_var_lookup(
|
|||
Ok(lookup) => {
|
||||
output
|
||||
.references
|
||||
.insert_value_lookup(lookup.symbol, QualifiedReference::Qualified);
|
||||
.insert_value_lookup(lookup, QualifiedReference::Qualified);
|
||||
|
||||
if scope.abilities_store.is_ability_member_name(lookup.symbol) {
|
||||
AbilityMember(
|
||||
|
@ -1952,7 +1969,7 @@ fn canonicalize_var_lookup(
|
|||
var_store.fresh(),
|
||||
)
|
||||
} else {
|
||||
lookup_to_expr(lookup, var_store.fresh())
|
||||
lookup_to_expr(var_store, lookup)
|
||||
}
|
||||
}
|
||||
Err(problem) => {
|
||||
|
@ -1971,20 +1988,21 @@ fn canonicalize_var_lookup(
|
|||
}
|
||||
|
||||
fn lookup_to_expr(
|
||||
var_store: &mut VarStore,
|
||||
SymbolLookup {
|
||||
symbol,
|
||||
module_params: params,
|
||||
module_params,
|
||||
}: SymbolLookup,
|
||||
var: Variable,
|
||||
) -> Expr {
|
||||
if let Some(params) = params {
|
||||
if let Some((params_var, params_symbol)) = module_params {
|
||||
Expr::ParamsVar {
|
||||
symbol,
|
||||
params,
|
||||
var,
|
||||
var: var_store.fresh(),
|
||||
params_symbol,
|
||||
params_var,
|
||||
}
|
||||
} else {
|
||||
Expr::Var(symbol, var)
|
||||
Expr::Var(symbol, var_store.fresh())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2470,21 +2488,28 @@ pub fn is_valid_interpolation(expr: &ast::Expr<'_>) -> bool {
|
|||
| ast::Expr::AccessorFunction(_)
|
||||
| ast::Expr::RecordUpdater(_)
|
||||
| ast::Expr::Crash
|
||||
| ast::Expr::Dbg
|
||||
| ast::Expr::Underscore(_)
|
||||
| ast::Expr::MalformedIdent(_, _)
|
||||
| ast::Expr::Tag(_)
|
||||
| ast::Expr::OpaqueRef(_)
|
||||
| ast::Expr::MalformedClosure => true,
|
||||
// Newlines are disallowed inside interpolation, and these all require newlines
|
||||
ast::Expr::Dbg(_, _)
|
||||
ast::Expr::DbgStmt(_, _)
|
||||
| ast::Expr::LowLevelDbg(_, _, _)
|
||||
| ast::Expr::Defs(_, _)
|
||||
| ast::Expr::Expect(_, _)
|
||||
| ast::Expr::When(_, _)
|
||||
| ast::Expr::Backpassing(_, _, _)
|
||||
| ast::Expr::SpaceBefore(_, _)
|
||||
| ast::Expr::Str(StrLiteral::Block(_))
|
||||
| ast::Expr::SpaceAfter(_, _) => false,
|
||||
// Desugared dbg expression
|
||||
ast::Expr::Defs(_, loc_ret) => match loc_ret.value {
|
||||
ast::Expr::LowLevelDbg(_, _, continuation) => {
|
||||
is_valid_interpolation(&continuation.value)
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
// These can contain subexpressions, so we need to recursively check those
|
||||
ast::Expr::Str(StrLiteral::Line(segments)) => {
|
||||
segments.iter().all(|segment| match segment {
|
||||
|
@ -2774,6 +2799,9 @@ pub struct Declarations {
|
|||
// used for ability member specializatons.
|
||||
pub specializes: VecMap<usize, Symbol>,
|
||||
|
||||
// used while lowering params.
|
||||
arity_by_name: VecMap<IdentId, usize>,
|
||||
|
||||
pub host_exposed_annotations: VecMap<usize, (Variable, crate::def::Annotation)>,
|
||||
|
||||
pub function_bodies: Vec<Loc<FunctionDef>>,
|
||||
|
@ -2802,6 +2830,7 @@ impl Declarations {
|
|||
expressions: Vec::with_capacity(capacity),
|
||||
specializes: VecMap::default(), // number of specializations is probably low
|
||||
destructs: Vec::new(), // number of destructs is probably low
|
||||
arity_by_name: VecMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2840,6 +2869,9 @@ impl Declarations {
|
|||
arguments: loc_closure_data.value.arguments,
|
||||
};
|
||||
|
||||
self.arity_by_name
|
||||
.insert(symbol.value.ident_id(), function_def.arguments.len());
|
||||
|
||||
let loc_function_def = Loc::at(loc_closure_data.region, function_def);
|
||||
|
||||
let function_def_index = Index::push_new(&mut self.function_bodies, loc_function_def);
|
||||
|
@ -2888,6 +2920,9 @@ impl Declarations {
|
|||
arguments: loc_closure_data.value.arguments,
|
||||
};
|
||||
|
||||
self.arity_by_name
|
||||
.insert(symbol.value.ident_id(), function_def.arguments.len());
|
||||
|
||||
let loc_function_def = Loc::at(loc_closure_data.region, function_def);
|
||||
|
||||
let function_def_index = Index::push_new(&mut self.function_bodies, loc_function_def);
|
||||
|
@ -2964,6 +2999,8 @@ impl Declarations {
|
|||
.insert(self.declarations.len(), annotation);
|
||||
}
|
||||
|
||||
self.arity_by_name.insert(symbol.value.ident_id(), 0);
|
||||
|
||||
self.declarations.push(DeclarationTag::Value);
|
||||
self.variables.push(expr_var);
|
||||
self.symbols.push(symbol);
|
||||
|
@ -3080,6 +3117,60 @@ impl Declarations {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert a value def to a function def with the given arguments
|
||||
/// Currently used in lower_params
|
||||
pub fn convert_value_to_function(
|
||||
&mut self,
|
||||
index: usize,
|
||||
new_arguments: Vec<(Variable, AnnotatedMark, Loc<Pattern>)>,
|
||||
var_store: &mut VarStore,
|
||||
) {
|
||||
match self.declarations[index] {
|
||||
DeclarationTag::Value => {
|
||||
let new_args_len = new_arguments.len();
|
||||
|
||||
let loc_body = self.expressions[index].clone();
|
||||
let region = loc_body.region;
|
||||
|
||||
let closure_data = ClosureData {
|
||||
function_type: var_store.fresh(),
|
||||
closure_type: var_store.fresh(),
|
||||
return_type: var_store.fresh(),
|
||||
name: self.symbols[index].value,
|
||||
captured_symbols: vec![],
|
||||
recursive: Recursive::NotRecursive,
|
||||
arguments: new_arguments,
|
||||
loc_body: Box::new(loc_body),
|
||||
};
|
||||
|
||||
let loc_closure_data = Loc::at(region, closure_data);
|
||||
|
||||
let function_def = FunctionDef {
|
||||
closure_type: loc_closure_data.value.closure_type,
|
||||
return_type: loc_closure_data.value.return_type,
|
||||
captured_symbols: loc_closure_data.value.captured_symbols,
|
||||
arguments: loc_closure_data.value.arguments,
|
||||
};
|
||||
|
||||
let loc_function_def = Loc::at(region, function_def);
|
||||
|
||||
let function_def_index =
|
||||
Index::push_new(&mut self.function_bodies, loc_function_def);
|
||||
|
||||
if let Some(annotation) = &mut self.annotations[index] {
|
||||
annotation.convert_to_fn(new_args_len, var_store);
|
||||
}
|
||||
|
||||
if let Some((_var, annotation)) = self.host_exposed_annotations.get_mut(&index) {
|
||||
annotation.convert_to_fn(new_args_len, var_store);
|
||||
}
|
||||
|
||||
self.declarations[index] = DeclarationTag::Function(function_def_index);
|
||||
}
|
||||
_ => internal_error!("Expected value declaration"),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.declarations.len()
|
||||
}
|
||||
|
@ -3149,6 +3240,11 @@ impl Declarations {
|
|||
|
||||
collector
|
||||
}
|
||||
|
||||
pub(crate) fn take_arity_by_name(&mut self) -> VecMap<IdentId, usize> {
|
||||
// `arity_by_name` is only needed for lowering module params
|
||||
std::mem::take(&mut self.arity_by_name)
|
||||
}
|
||||
}
|
||||
|
||||
roc_error_macros::assert_sizeof_default!(DeclarationTag, 8);
|
||||
|
@ -3205,8 +3301,9 @@ pub(crate) fn get_lookup_symbols(expr: &Expr) -> Vec<ExpectLookup> {
|
|||
Expr::Var(symbol, var)
|
||||
| Expr::ParamsVar {
|
||||
symbol,
|
||||
params: _,
|
||||
var,
|
||||
params_symbol: _,
|
||||
params_var: _,
|
||||
}
|
||||
| Expr::RecordUpdate {
|
||||
symbol,
|
||||
|
|
|
@ -14,7 +14,6 @@ pub mod copy;
|
|||
pub mod def;
|
||||
mod derive;
|
||||
pub mod desugar;
|
||||
pub mod effect_module;
|
||||
pub mod env;
|
||||
pub mod exhaustive;
|
||||
pub mod expected;
|
||||
|
@ -26,6 +25,7 @@ pub mod procedure;
|
|||
pub mod scope;
|
||||
pub mod string;
|
||||
pub mod suffixed;
|
||||
pub mod task_module;
|
||||
pub mod traverse;
|
||||
|
||||
pub use derive::DERIVED_REGION;
|
||||
|
|
|
@ -3,13 +3,12 @@ use std::path::Path;
|
|||
use crate::abilities::{AbilitiesStore, ImplKey, PendingAbilitiesStore, ResolvedImpl};
|
||||
use crate::annotation::{canonicalize_annotation, AnnotationFor};
|
||||
use crate::def::{canonicalize_defs, report_unused_imports, Def};
|
||||
use crate::effect_module::HostedGeneratedFunctions;
|
||||
use crate::env::Env;
|
||||
use crate::expr::{
|
||||
AnnotatedMark, ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
|
||||
ClosureData, DbgLookup, Declarations, ExpectLookup, Expr, Output, PendingDerives,
|
||||
};
|
||||
use crate::pattern::{
|
||||
canonicalize_record_destructure, BindingsFromPattern, Pattern, PermitShadows,
|
||||
canonicalize_record_destructs, BindingsFromPattern, Pattern, PermitShadows, RecordDestruct,
|
||||
};
|
||||
use crate::procedure::References;
|
||||
use crate::scope::Scope;
|
||||
|
@ -18,14 +17,14 @@ use roc_collections::{MutMap, SendMap, VecMap, VecSet};
|
|||
use roc_error_macros::internal_error;
|
||||
use roc_module::ident::Ident;
|
||||
use roc_module::ident::Lowercase;
|
||||
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, PackageModuleIds, Symbol};
|
||||
use roc_module::symbol::{IdentId, IdentIds, IdentIdsByModule, ModuleId, PackageModuleIds, Symbol};
|
||||
use roc_parse::ast::{Defs, TypeAnnotation};
|
||||
use roc_parse::header::{HeaderType, ModuleParams};
|
||||
use roc_parse::header::HeaderType;
|
||||
use roc_parse::pattern::PatternType;
|
||||
use roc_problem::can::{Problem, RuntimeError};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{ExposedTypesStorageSubs, Subs, VarStore, Variable};
|
||||
use roc_types::types::{AbilitySet, Alias, AliasKind, AliasVar, Type};
|
||||
use roc_types::types::{AbilitySet, Alias, Type};
|
||||
|
||||
/// The types of all exposed values/functions of a collection of modules
|
||||
#[derive(Clone, Debug, Default)]
|
||||
|
@ -138,7 +137,33 @@ pub struct Module {
|
|||
pub abilities_store: PendingAbilitiesStore,
|
||||
pub loc_expects: VecMap<Region, Vec<ExpectLookup>>,
|
||||
pub loc_dbgs: VecMap<Symbol, DbgLookup>,
|
||||
pub params_pattern: Option<(Variable, AnnotatedMark, Loc<Pattern>)>,
|
||||
pub module_params: Option<ModuleParams>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ModuleParams {
|
||||
pub region: Region,
|
||||
pub whole_symbol: Symbol,
|
||||
pub whole_var: Variable,
|
||||
pub record_var: Variable,
|
||||
pub record_ext_var: Variable,
|
||||
pub destructs: Vec<Loc<RecordDestruct>>,
|
||||
// used while lowering passed functions
|
||||
pub arity_by_name: VecMap<IdentId, usize>,
|
||||
}
|
||||
|
||||
impl ModuleParams {
|
||||
pub fn pattern(&self) -> Loc<Pattern> {
|
||||
let record_pattern = Pattern::RecordDestructure {
|
||||
whole_var: self.record_var,
|
||||
ext_var: self.record_ext_var,
|
||||
destructs: self.destructs.clone(),
|
||||
};
|
||||
let loc_record_pattern = Loc::at(self.region, record_pattern);
|
||||
|
||||
let as_pattern = Pattern::As(Box::new(loc_record_pattern), self.whole_symbol);
|
||||
Loc::at(self.region, as_pattern)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
@ -152,7 +177,7 @@ pub struct RigidVariables {
|
|||
pub struct ModuleOutput {
|
||||
pub aliases: MutMap<Symbol, Alias>,
|
||||
pub rigid_variables: RigidVariables,
|
||||
pub params_pattern: Option<(Variable, AnnotatedMark, Loc<Pattern>)>,
|
||||
pub module_params: Option<ModuleParams>,
|
||||
pub declarations: Declarations,
|
||||
pub exposed_imports: MutMap<Symbol, Region>,
|
||||
pub exposed_symbols: VecSet<Symbol>,
|
||||
|
@ -165,99 +190,6 @@ pub struct ModuleOutput {
|
|||
pub loc_dbgs: VecMap<Symbol, DbgLookup>,
|
||||
}
|
||||
|
||||
fn validate_generate_with<'a>(
|
||||
generate_with: &'a [Loc<roc_parse::header::ExposedName<'a>>],
|
||||
) -> (HostedGeneratedFunctions, Vec<Loc<Ident>>) {
|
||||
let mut functions = HostedGeneratedFunctions::default();
|
||||
let mut unknown = Vec::new();
|
||||
|
||||
for generated in generate_with {
|
||||
match generated.value.as_str() {
|
||||
"after" => functions.after = true,
|
||||
"map" => functions.map = true,
|
||||
"always" => functions.always = true,
|
||||
"loop" => functions.loop_ = true,
|
||||
"forever" => functions.forever = true,
|
||||
other => {
|
||||
// we don't know how to generate this function
|
||||
let ident = Ident::from(other);
|
||||
unknown.push(Loc::at(generated.region, ident));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(functions, unknown)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum GeneratedInfo {
|
||||
Hosted {
|
||||
effect_symbol: Symbol,
|
||||
generated_functions: HostedGeneratedFunctions,
|
||||
},
|
||||
Builtin,
|
||||
NotSpecial,
|
||||
}
|
||||
|
||||
impl GeneratedInfo {
|
||||
fn from_header_type(
|
||||
env: &mut Env,
|
||||
scope: &mut Scope,
|
||||
var_store: &mut VarStore,
|
||||
header_type: &HeaderType,
|
||||
) -> Self {
|
||||
match header_type {
|
||||
HeaderType::Hosted {
|
||||
generates,
|
||||
generates_with,
|
||||
name: _,
|
||||
exposes: _,
|
||||
} => {
|
||||
let name: &str = generates.into();
|
||||
let (generated_functions, unknown_generated) =
|
||||
validate_generate_with(generates_with);
|
||||
|
||||
for unknown in unknown_generated {
|
||||
env.problem(Problem::UnknownGeneratesWith(unknown));
|
||||
}
|
||||
|
||||
let effect_symbol = scope.introduce(name.into(), Region::zero()).unwrap();
|
||||
|
||||
{
|
||||
let a_var = var_store.fresh();
|
||||
|
||||
let actual =
|
||||
crate::effect_module::build_effect_actual(Type::Variable(a_var), var_store);
|
||||
|
||||
scope.add_alias(
|
||||
effect_symbol,
|
||||
Region::zero(),
|
||||
vec![Loc::at_zero(AliasVar::unbound("a".into(), a_var))],
|
||||
vec![],
|
||||
actual,
|
||||
AliasKind::Opaque,
|
||||
);
|
||||
}
|
||||
|
||||
GeneratedInfo::Hosted {
|
||||
effect_symbol,
|
||||
generated_functions,
|
||||
}
|
||||
}
|
||||
HeaderType::Builtin {
|
||||
generates_with,
|
||||
name: _,
|
||||
exposes: _,
|
||||
opt_params: _,
|
||||
} => {
|
||||
debug_assert!(generates_with.is_empty());
|
||||
GeneratedInfo::Builtin
|
||||
}
|
||||
_ => GeneratedInfo::NotSpecial,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_no_implementation(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::RuntimeError(RuntimeError::NoImplementationNamed { .. }) => true,
|
||||
|
@ -326,9 +258,6 @@ pub fn canonicalize_module_defs<'a>(
|
|||
);
|
||||
}
|
||||
|
||||
let generated_info =
|
||||
GeneratedInfo::from_header_type(&mut env, &mut scope, var_store, header_type);
|
||||
|
||||
// Desugar operators (convert them to Apply calls, taking into account
|
||||
// operator precedence and associativity rules), before doing other canonicalization.
|
||||
//
|
||||
|
@ -337,11 +266,20 @@ pub fn canonicalize_module_defs<'a>(
|
|||
// operators, and then again on *their* nested operators, ultimately applying the
|
||||
// rules multiple times unnecessarily.
|
||||
|
||||
crate::desugar::desugar_defs_node_values(arena, loc_defs, src, &mut None, module_path, true);
|
||||
crate::desugar::desugar_defs_node_values(
|
||||
arena,
|
||||
var_store,
|
||||
loc_defs,
|
||||
src,
|
||||
&mut None,
|
||||
module_path,
|
||||
true,
|
||||
&mut env.problems,
|
||||
);
|
||||
|
||||
let mut rigid_variables = RigidVariables::default();
|
||||
|
||||
// Iniital scope values are treated like defs that appear before any others.
|
||||
// Initial scope values are treated like defs that appear before any others.
|
||||
// They include builtin types that are automatically imported, and for a platform
|
||||
// package, the required values from the app.
|
||||
//
|
||||
|
@ -390,13 +328,13 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
let mut output = Output::default();
|
||||
|
||||
let params_pattern = header_type.get_params().as_ref().map(
|
||||
|ModuleParams {
|
||||
let module_params = header_type.get_params().as_ref().map(
|
||||
|roc_parse::header::ModuleParams {
|
||||
pattern,
|
||||
before_arrow: _,
|
||||
after_arrow: _,
|
||||
}| {
|
||||
let can_pattern = canonicalize_record_destructure(
|
||||
let (destructs, _) = canonicalize_record_destructs(
|
||||
&mut env,
|
||||
var_store,
|
||||
&mut scope,
|
||||
|
@ -407,17 +345,22 @@ pub fn canonicalize_module_defs<'a>(
|
|||
PermitShadows(false),
|
||||
);
|
||||
|
||||
let loc_pattern = Loc::at(pattern.region, can_pattern);
|
||||
let whole_symbol = scope.gen_unique_symbol();
|
||||
env.top_level_symbols.insert(whole_symbol);
|
||||
|
||||
for (symbol, _) in BindingsFromPattern::new(&loc_pattern) {
|
||||
env.top_level_symbols.insert(symbol);
|
||||
let whole_var = var_store.fresh();
|
||||
|
||||
env.home_params_record = Some((whole_symbol, whole_var));
|
||||
|
||||
ModuleParams {
|
||||
region: pattern.region,
|
||||
whole_var,
|
||||
whole_symbol,
|
||||
record_var: var_store.fresh(),
|
||||
record_ext_var: var_store.fresh(),
|
||||
destructs,
|
||||
arity_by_name: Default::default(),
|
||||
}
|
||||
|
||||
(
|
||||
var_store.fresh(),
|
||||
AnnotatedMark::new(var_store),
|
||||
loc_pattern,
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -495,6 +438,11 @@ pub fn canonicalize_module_defs<'a>(
|
|||
&exposed_symbols,
|
||||
);
|
||||
|
||||
let module_params = module_params.map(|params| ModuleParams {
|
||||
arity_by_name: declarations.take_arity_by_name(),
|
||||
..params
|
||||
});
|
||||
|
||||
debug_assert!(
|
||||
output.pending_derives.is_empty(),
|
||||
"I thought pending derives are only found during def introduction"
|
||||
|
@ -534,24 +482,6 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
report_unused_imports(imports_introduced, &output.references, &mut env, &mut scope);
|
||||
|
||||
if let GeneratedInfo::Hosted {
|
||||
effect_symbol,
|
||||
generated_functions,
|
||||
} = generated_info
|
||||
{
|
||||
let mut exposed_symbols = VecSet::default();
|
||||
|
||||
// NOTE this currently builds all functions, not just the ones that the user requested
|
||||
crate::effect_module::build_effect_builtins(
|
||||
&mut scope,
|
||||
effect_symbol,
|
||||
var_store,
|
||||
&mut exposed_symbols,
|
||||
&mut declarations,
|
||||
generated_functions,
|
||||
);
|
||||
}
|
||||
|
||||
for index in 0..declarations.len() {
|
||||
use crate::expr::DeclarationTag::*;
|
||||
|
||||
|
@ -572,8 +502,8 @@ pub fn canonicalize_module_defs<'a>(
|
|||
// and which are meant to be normal definitions without a body. So for now
|
||||
// we just assume they are hosted functions (meant to be provided by the platform)
|
||||
if has_no_implementation(&declarations.expressions[index].value) {
|
||||
match generated_info {
|
||||
GeneratedInfo::Builtin => {
|
||||
match header_type {
|
||||
HeaderType::Builtin { .. } => {
|
||||
match crate::builtins::builtin_defs_map(*symbol, var_store) {
|
||||
None => {
|
||||
internal_error!("A builtin module contains a signature without implementation for {:?}", symbol)
|
||||
|
@ -583,7 +513,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
}
|
||||
}
|
||||
}
|
||||
GeneratedInfo::Hosted { effect_symbol, .. } => {
|
||||
HeaderType::Hosted { .. } => {
|
||||
let ident_id = symbol.ident_id();
|
||||
let ident = scope
|
||||
.locals
|
||||
|
@ -601,13 +531,8 @@ pub fn canonicalize_module_defs<'a>(
|
|||
aliases: Default::default(),
|
||||
};
|
||||
|
||||
let hosted_def = crate::effect_module::build_host_exposed_def(
|
||||
&mut scope,
|
||||
*symbol,
|
||||
&ident,
|
||||
effect_symbol,
|
||||
var_store,
|
||||
annotation,
|
||||
let hosted_def = crate::task_module::build_host_exposed_def(
|
||||
&mut scope, *symbol, &ident, var_store, annotation,
|
||||
);
|
||||
|
||||
declarations.update_builtin_def(index, hosted_def);
|
||||
|
@ -630,8 +555,8 @@ pub fn canonicalize_module_defs<'a>(
|
|||
// and which are meant to be normal definitions without a body. So for now
|
||||
// we just assume they are hosted functions (meant to be provided by the platform)
|
||||
if has_no_implementation(&declarations.expressions[index].value) {
|
||||
match generated_info {
|
||||
GeneratedInfo::Builtin => {
|
||||
match header_type {
|
||||
HeaderType::Builtin { .. } => {
|
||||
match crate::builtins::builtin_defs_map(*symbol, var_store) {
|
||||
None => {
|
||||
internal_error!("A builtin module contains a signature without implementation for {:?}", symbol)
|
||||
|
@ -641,7 +566,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
}
|
||||
}
|
||||
}
|
||||
GeneratedInfo::Hosted { effect_symbol, .. } => {
|
||||
HeaderType::Hosted { .. } => {
|
||||
let ident_id = symbol.ident_id();
|
||||
let ident = scope
|
||||
.locals
|
||||
|
@ -659,13 +584,8 @@ pub fn canonicalize_module_defs<'a>(
|
|||
aliases: Default::default(),
|
||||
};
|
||||
|
||||
let hosted_def = crate::effect_module::build_host_exposed_def(
|
||||
&mut scope,
|
||||
*symbol,
|
||||
&ident,
|
||||
effect_symbol,
|
||||
var_store,
|
||||
annotation,
|
||||
let hosted_def = crate::task_module::build_host_exposed_def(
|
||||
&mut scope, *symbol, &ident, var_store, annotation,
|
||||
);
|
||||
|
||||
declarations.update_builtin_def(index, hosted_def);
|
||||
|
@ -691,18 +611,6 @@ pub fn canonicalize_module_defs<'a>(
|
|||
|
||||
let mut aliases = MutMap::default();
|
||||
|
||||
if let GeneratedInfo::Hosted { effect_symbol, .. } = generated_info {
|
||||
// Remove this from exposed_symbols,
|
||||
// so that at the end of the process,
|
||||
// we can see if there were any
|
||||
// exposed symbols which did not have
|
||||
// corresponding defs.
|
||||
exposed_but_not_defined.remove(&effect_symbol);
|
||||
|
||||
let hosted_alias = scope.lookup_alias(effect_symbol).unwrap().clone();
|
||||
aliases.insert(effect_symbol, hosted_alias);
|
||||
}
|
||||
|
||||
for (symbol, alias) in output.aliases {
|
||||
// Remove this from exposed_symbols,
|
||||
// so that at the end of the process,
|
||||
|
@ -851,7 +759,7 @@ pub fn canonicalize_module_defs<'a>(
|
|||
scope,
|
||||
aliases,
|
||||
rigid_variables,
|
||||
params_pattern,
|
||||
module_params,
|
||||
declarations,
|
||||
referenced_values,
|
||||
exposed_imports: can_exposed_imports,
|
||||
|
|
|
@ -623,16 +623,29 @@ pub fn canonicalize_pattern<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
RecordDestructure(patterns) => canonicalize_record_destructure(
|
||||
env,
|
||||
var_store,
|
||||
scope,
|
||||
output,
|
||||
pattern_type,
|
||||
patterns,
|
||||
region,
|
||||
permit_shadows,
|
||||
),
|
||||
RecordDestructure(patterns) => {
|
||||
let ext_var = var_store.fresh();
|
||||
let whole_var = var_store.fresh();
|
||||
|
||||
let (destructs, opt_erroneous) = canonicalize_record_destructs(
|
||||
env,
|
||||
var_store,
|
||||
scope,
|
||||
output,
|
||||
pattern_type,
|
||||
patterns,
|
||||
region,
|
||||
permit_shadows,
|
||||
);
|
||||
|
||||
// If we encountered an erroneous pattern (e.g. one with shadowing),
|
||||
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
|
||||
opt_erroneous.unwrap_or(Pattern::RecordDestructure {
|
||||
whole_var,
|
||||
ext_var,
|
||||
destructs,
|
||||
})
|
||||
}
|
||||
|
||||
RequiredField(_name, _loc_pattern) => {
|
||||
unreachable!("should have been handled in RecordDestructure");
|
||||
|
@ -779,7 +792,7 @@ pub fn canonicalize_pattern<'a>(
|
|||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn canonicalize_record_destructure<'a>(
|
||||
pub fn canonicalize_record_destructs<'a>(
|
||||
env: &mut Env<'a>,
|
||||
var_store: &mut VarStore,
|
||||
scope: &mut Scope,
|
||||
|
@ -788,11 +801,9 @@ pub fn canonicalize_record_destructure<'a>(
|
|||
patterns: &ast::Collection<Loc<ast::Pattern<'a>>>,
|
||||
region: Region,
|
||||
permit_shadows: PermitShadows,
|
||||
) -> Pattern {
|
||||
) -> (Vec<Loc<RecordDestruct>>, Option<Pattern>) {
|
||||
use ast::Pattern::*;
|
||||
|
||||
let ext_var = var_store.fresh();
|
||||
let whole_var = var_store.fresh();
|
||||
let mut destructs = Vec::with_capacity(patterns.len());
|
||||
let mut opt_erroneous = None;
|
||||
|
||||
|
@ -907,13 +918,7 @@ pub fn canonicalize_record_destructure<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
// If we encountered an erroneous pattern (e.g. one with shadowing),
|
||||
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
|
||||
opt_erroneous.unwrap_or(Pattern::RecordDestructure {
|
||||
whole_var,
|
||||
ext_var,
|
||||
destructs,
|
||||
})
|
||||
(destructs, opt_erroneous)
|
||||
}
|
||||
|
||||
/// When we detect an unsupported pattern type (e.g. 5 = 1 + 2 is unsupported because you can't
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::expr::Expr;
|
||||
use crate::pattern::Pattern;
|
||||
use crate::{expr::Expr, scope::SymbolLookup};
|
||||
use roc_module::symbol::{ModuleId, Symbol};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::Variable;
|
||||
|
@ -125,8 +125,18 @@ impl References {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn insert_value_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||
self.insert(symbol, qualified.flags(ReferencesBitflags::VALUE_LOOKUP));
|
||||
pub fn insert_value_lookup(&mut self, lookup: SymbolLookup, qualified: QualifiedReference) {
|
||||
self.insert(
|
||||
lookup.symbol,
|
||||
qualified.flags(ReferencesBitflags::VALUE_LOOKUP),
|
||||
);
|
||||
|
||||
if let Some((_, params_symbol)) = lookup.module_params {
|
||||
self.insert(
|
||||
params_symbol,
|
||||
qualified.flags(ReferencesBitflags::VALUE_LOOKUP),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_type_lookup(&mut self, symbol: Symbol, qualified: QualifiedReference) {
|
||||
|
|
|
@ -669,7 +669,7 @@ pub struct ScopeModules {
|
|||
/// Why is this module in scope?
|
||||
sources: Vec<ScopeModuleSource>,
|
||||
/// The params of a module if any
|
||||
params: Vec<Option<Symbol>>,
|
||||
params: Vec<Option<(Variable, Symbol)>>,
|
||||
}
|
||||
|
||||
impl ScopeModules {
|
||||
|
@ -731,7 +731,7 @@ impl ScopeModules {
|
|||
&mut self,
|
||||
module_name: ModuleName,
|
||||
module_id: ModuleId,
|
||||
params_symbol: Option<Symbol>,
|
||||
params: Option<(Variable, Symbol)>,
|
||||
region: Region,
|
||||
) -> Result<(), ScopeModuleSource> {
|
||||
if let Some(index) = self.names.iter().position(|name| name == &module_name) {
|
||||
|
@ -745,7 +745,7 @@ impl ScopeModules {
|
|||
self.ids.push(module_id);
|
||||
self.names.push(module_name);
|
||||
self.sources.push(ScopeModuleSource::Import(region));
|
||||
self.params.push(params_symbol);
|
||||
self.params.push(params);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -768,14 +768,14 @@ impl ScopeModules {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SymbolLookup {
|
||||
pub symbol: Symbol,
|
||||
pub module_params: Option<Symbol>,
|
||||
pub module_params: Option<(Variable, Symbol)>,
|
||||
}
|
||||
|
||||
impl SymbolLookup {
|
||||
pub fn new(symbol: Symbol, params: Option<Symbol>) -> Self {
|
||||
pub fn new(symbol: Symbol, params: Option<(Variable, Symbol)>) -> Self {
|
||||
Self {
|
||||
symbol,
|
||||
module_params: params,
|
||||
|
@ -789,7 +789,7 @@ impl SymbolLookup {
|
|||
|
||||
pub struct ModuleLookup {
|
||||
pub id: ModuleId,
|
||||
pub params: Option<Symbol>,
|
||||
pub params: Option<(Variable, Symbol)>,
|
||||
}
|
||||
|
||||
impl ModuleLookup {
|
||||
|
|
207
crates/compiler/can/src/task_module.rs
Normal file
207
crates/compiler/can/src/task_module.rs
Normal file
|
@ -0,0 +1,207 @@
|
|||
use crate::def::Def;
|
||||
use crate::expr::{AnnotatedMark, ClosureData, Expr, Recursive};
|
||||
use crate::pattern::Pattern;
|
||||
use crate::scope::Scope;
|
||||
use roc_collections::SendMap;
|
||||
use roc_module::symbol::Symbol;
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::{VarStore, Variable};
|
||||
use roc_types::types::{LambdaSet, OptAbleVar, Type};
|
||||
|
||||
pub fn build_host_exposed_def(
|
||||
scope: &mut Scope,
|
||||
symbol: Symbol,
|
||||
ident: &str,
|
||||
var_store: &mut VarStore,
|
||||
annotation: crate::annotation::Annotation,
|
||||
) -> Def {
|
||||
let expr_var = var_store.fresh();
|
||||
let pattern = Pattern::Identifier(symbol);
|
||||
let mut pattern_vars = SendMap::default();
|
||||
pattern_vars.insert(symbol, expr_var);
|
||||
|
||||
let mut arguments: Vec<(Variable, AnnotatedMark, Loc<Pattern>)> = Vec::new();
|
||||
let mut linked_symbol_arguments: Vec<(Variable, Expr)> = Vec::new();
|
||||
let mut captured_symbols: Vec<(Symbol, Variable)> = Vec::new();
|
||||
|
||||
let crate::annotation::Annotation {
|
||||
introduced_variables,
|
||||
typ,
|
||||
aliases,
|
||||
..
|
||||
} = annotation;
|
||||
|
||||
let def_body = {
|
||||
match typ.shallow_structural_dealias() {
|
||||
Type::Function(args, _, _) => {
|
||||
for i in 0..args.len() {
|
||||
let name = format!("closure_arg_{ident}_{i}");
|
||||
|
||||
let arg_symbol = {
|
||||
let ident = name.clone().into();
|
||||
scope.introduce(ident, Region::zero()).unwrap()
|
||||
};
|
||||
|
||||
let arg_var = var_store.fresh();
|
||||
|
||||
arguments.push((
|
||||
arg_var,
|
||||
AnnotatedMark::new(var_store),
|
||||
Loc::at_zero(Pattern::Identifier(arg_symbol)),
|
||||
));
|
||||
|
||||
captured_symbols.push((arg_symbol, arg_var));
|
||||
linked_symbol_arguments.push((arg_var, Expr::Var(arg_symbol, arg_var)));
|
||||
}
|
||||
|
||||
let foreign_symbol_name = format!("roc_fx_{ident}");
|
||||
let low_level_call = Expr::ForeignCall {
|
||||
foreign_symbol: foreign_symbol_name.into(),
|
||||
args: linked_symbol_arguments,
|
||||
ret_var: var_store.fresh(),
|
||||
};
|
||||
|
||||
let task_closure_symbol = {
|
||||
let name = format!("task_closure_{ident}");
|
||||
|
||||
let ident = name.into();
|
||||
scope.introduce(ident, Region::zero()).unwrap()
|
||||
};
|
||||
|
||||
let task_closure = Expr::Closure(ClosureData {
|
||||
function_type: var_store.fresh(),
|
||||
closure_type: var_store.fresh(),
|
||||
return_type: var_store.fresh(),
|
||||
name: task_closure_symbol,
|
||||
captured_symbols,
|
||||
recursive: Recursive::NotRecursive,
|
||||
arguments: vec![(
|
||||
var_store.fresh(),
|
||||
AnnotatedMark::new(var_store),
|
||||
Loc::at_zero(empty_record_pattern(var_store)),
|
||||
)],
|
||||
loc_body: Box::new(Loc::at_zero(low_level_call)),
|
||||
});
|
||||
|
||||
let (specialized_def_type, type_arguments, lambda_set_variables) =
|
||||
build_fresh_opaque_variables(var_store);
|
||||
let body = Expr::OpaqueRef {
|
||||
opaque_var: var_store.fresh(),
|
||||
name: Symbol::TASK_TASK,
|
||||
argument: Box::new((var_store.fresh(), Loc::at_zero(task_closure))),
|
||||
specialized_def_type,
|
||||
type_arguments,
|
||||
lambda_set_variables,
|
||||
};
|
||||
|
||||
Expr::Closure(ClosureData {
|
||||
function_type: var_store.fresh(),
|
||||
closure_type: var_store.fresh(),
|
||||
return_type: var_store.fresh(),
|
||||
name: symbol,
|
||||
captured_symbols: std::vec::Vec::new(),
|
||||
recursive: Recursive::NotRecursive,
|
||||
arguments,
|
||||
loc_body: Box::new(Loc::at_zero(body)),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
// not a function
|
||||
|
||||
let foreign_symbol_name = format!("roc_fx_{ident}");
|
||||
let low_level_call = Expr::ForeignCall {
|
||||
foreign_symbol: foreign_symbol_name.into(),
|
||||
args: linked_symbol_arguments,
|
||||
ret_var: var_store.fresh(),
|
||||
};
|
||||
|
||||
let task_closure_symbol = {
|
||||
let name = format!("task_closure_{ident}");
|
||||
|
||||
let ident = name.into();
|
||||
scope.introduce(ident, Region::zero()).unwrap()
|
||||
};
|
||||
|
||||
let task_closure = Expr::Closure(ClosureData {
|
||||
function_type: var_store.fresh(),
|
||||
closure_type: var_store.fresh(),
|
||||
return_type: var_store.fresh(),
|
||||
name: task_closure_symbol,
|
||||
captured_symbols,
|
||||
recursive: Recursive::NotRecursive,
|
||||
arguments: vec![(
|
||||
var_store.fresh(),
|
||||
AnnotatedMark::new(var_store),
|
||||
Loc::at_zero(empty_record_pattern(var_store)),
|
||||
)],
|
||||
loc_body: Box::new(Loc::at_zero(low_level_call)),
|
||||
});
|
||||
|
||||
let (specialized_def_type, type_arguments, lambda_set_variables) =
|
||||
build_fresh_opaque_variables(var_store);
|
||||
Expr::OpaqueRef {
|
||||
opaque_var: var_store.fresh(),
|
||||
name: Symbol::TASK_TASK,
|
||||
argument: Box::new((var_store.fresh(), Loc::at_zero(task_closure))),
|
||||
specialized_def_type,
|
||||
type_arguments,
|
||||
lambda_set_variables,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let def_annotation = crate::def::Annotation {
|
||||
signature: typ,
|
||||
introduced_variables,
|
||||
aliases,
|
||||
region: Region::zero(),
|
||||
};
|
||||
|
||||
Def {
|
||||
loc_pattern: Loc::at_zero(pattern),
|
||||
loc_expr: Loc::at_zero(def_body),
|
||||
expr_var,
|
||||
pattern_vars,
|
||||
annotation: Some(def_annotation),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_fresh_opaque_variables(
|
||||
var_store: &mut VarStore,
|
||||
) -> (Box<Type>, Vec<OptAbleVar>, Vec<LambdaSet>) {
|
||||
let closure_var = var_store.fresh();
|
||||
|
||||
let ok_var = var_store.fresh();
|
||||
let err_var = var_store.fresh();
|
||||
let result_var = var_store.fresh();
|
||||
|
||||
let actual = Type::Function(
|
||||
vec![Type::EmptyRec],
|
||||
Box::new(Type::Variable(closure_var)),
|
||||
Box::new(Type::Variable(result_var)),
|
||||
);
|
||||
|
||||
let type_arguments = vec![
|
||||
OptAbleVar {
|
||||
var: ok_var,
|
||||
opt_abilities: None,
|
||||
},
|
||||
OptAbleVar {
|
||||
var: err_var,
|
||||
opt_abilities: None,
|
||||
},
|
||||
];
|
||||
let lambda_set_variables = vec![roc_types::types::LambdaSet(Type::Variable(closure_var))];
|
||||
|
||||
(Box::new(actual), type_arguments, lambda_set_variables)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn empty_record_pattern(var_store: &mut VarStore) -> Pattern {
|
||||
Pattern::RecordDestructure {
|
||||
whole_var: var_store.fresh(),
|
||||
ext_var: var_store.fresh(),
|
||||
destructs: vec![],
|
||||
}
|
||||
}
|
|
@ -55,10 +55,12 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
|
|||
// rules multiple times unnecessarily.
|
||||
let loc_expr = desugar::desugar_expr(
|
||||
arena,
|
||||
&mut var_store,
|
||||
&loc_expr,
|
||||
expr_str,
|
||||
&mut None,
|
||||
arena.alloc("TestPath"),
|
||||
&mut Default::default(),
|
||||
);
|
||||
|
||||
let mut scope = Scope::new(
|
||||
|
|
|
@ -0,0 +1,145 @@
|
|||
---
|
||||
source: crates/compiler/can/tests/test_suffixed.rs
|
||||
assertion_line: 449
|
||||
expression: snapshot
|
||||
---
|
||||
Defs {
|
||||
tags: [
|
||||
Index(2147483648),
|
||||
],
|
||||
regions: [
|
||||
@0-26,
|
||||
],
|
||||
space_before: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
space_after: [
|
||||
Slice(start = 0, length = 1),
|
||||
],
|
||||
spaces: [
|
||||
Newline,
|
||||
],
|
||||
type_defs: [],
|
||||
value_defs: [
|
||||
Body(
|
||||
@0-4 Identifier {
|
||||
ident: "main",
|
||||
},
|
||||
@11-26 Defs(
|
||||
Defs {
|
||||
tags: [
|
||||
Index(2147483648),
|
||||
],
|
||||
regions: [
|
||||
@15-26,
|
||||
],
|
||||
space_before: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
space_after: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
spaces: [],
|
||||
type_defs: [],
|
||||
value_defs: [
|
||||
Body(
|
||||
@15-26 Identifier {
|
||||
ident: "64",
|
||||
},
|
||||
@15-26 ParensAround(
|
||||
Defs(
|
||||
Defs {
|
||||
tags: [
|
||||
Index(2147483648),
|
||||
],
|
||||
regions: [
|
||||
@20-25,
|
||||
],
|
||||
space_before: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
space_after: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
spaces: [],
|
||||
type_defs: [],
|
||||
value_defs: [
|
||||
Body(
|
||||
@20-25 Identifier {
|
||||
ident: "63",
|
||||
},
|
||||
@20-25 Apply(
|
||||
@22-23 Var {
|
||||
module_name: "Num",
|
||||
ident: "add",
|
||||
},
|
||||
[
|
||||
@20-21 Num(
|
||||
"1",
|
||||
),
|
||||
@24-25 Num(
|
||||
"1",
|
||||
),
|
||||
],
|
||||
BinOp(
|
||||
Plus,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
},
|
||||
@15-26 LowLevelDbg(
|
||||
(
|
||||
"test.roc:3",
|
||||
" ",
|
||||
),
|
||||
@20-25 Apply(
|
||||
@20-25 Var {
|
||||
module_name: "Inspect",
|
||||
ident: "toStr",
|
||||
},
|
||||
[
|
||||
@20-25 Var {
|
||||
module_name: "",
|
||||
ident: "63",
|
||||
},
|
||||
],
|
||||
Space,
|
||||
),
|
||||
@20-25 Var {
|
||||
module_name: "",
|
||||
ident: "63",
|
||||
},
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
},
|
||||
@11-26 LowLevelDbg(
|
||||
(
|
||||
"test.roc:2",
|
||||
"in =\n ",
|
||||
),
|
||||
@15-26 Apply(
|
||||
@15-26 Var {
|
||||
module_name: "Inspect",
|
||||
ident: "toStr",
|
||||
},
|
||||
[
|
||||
@15-26 Var {
|
||||
module_name: "",
|
||||
ident: "64",
|
||||
},
|
||||
],
|
||||
Space,
|
||||
),
|
||||
@15-26 Var {
|
||||
module_name: "",
|
||||
ident: "64",
|
||||
},
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
---
|
||||
source: crates/compiler/can/tests/test_suffixed.rs
|
||||
assertion_line: 459
|
||||
expression: snapshot
|
||||
---
|
||||
Defs {
|
||||
tags: [
|
||||
Index(2147483648),
|
||||
],
|
||||
regions: [
|
||||
@0-19,
|
||||
],
|
||||
space_before: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
space_after: [
|
||||
Slice(start = 0, length = 1),
|
||||
],
|
||||
spaces: [
|
||||
Newline,
|
||||
],
|
||||
type_defs: [],
|
||||
value_defs: [
|
||||
Body(
|
||||
@0-4 Identifier {
|
||||
ident: "main",
|
||||
},
|
||||
@11-19 Defs(
|
||||
Defs {
|
||||
tags: [
|
||||
Index(2147483648),
|
||||
],
|
||||
regions: [
|
||||
@11-12,
|
||||
],
|
||||
space_before: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
space_after: [
|
||||
Slice(start = 0, length = 0),
|
||||
],
|
||||
spaces: [],
|
||||
type_defs: [],
|
||||
value_defs: [
|
||||
Body(
|
||||
@11-12 Identifier {
|
||||
ident: "63",
|
||||
},
|
||||
@11-12 Num(
|
||||
"1",
|
||||
),
|
||||
),
|
||||
],
|
||||
},
|
||||
@11-19 LowLevelDbg(
|
||||
(
|
||||
"test.roc:2",
|
||||
" ",
|
||||
),
|
||||
@11-12 Apply(
|
||||
@11-12 Var {
|
||||
module_name: "Inspect",
|
||||
ident: "toStr",
|
||||
},
|
||||
[
|
||||
@11-12 Var {
|
||||
module_name: "",
|
||||
ident: "63",
|
||||
},
|
||||
],
|
||||
Space,
|
||||
),
|
||||
@11-12 Var {
|
||||
module_name: "",
|
||||
ident: "63",
|
||||
},
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
}
|
|
@ -7,12 +7,23 @@ mod suffixed_tests {
|
|||
use insta::assert_snapshot;
|
||||
use roc_can::desugar::desugar_defs_node_values;
|
||||
use roc_parse::test_helpers::parse_defs_with;
|
||||
use roc_types::subs::VarStore;
|
||||
|
||||
macro_rules! run_test {
|
||||
($src:expr) => {{
|
||||
let arena = &Bump::new();
|
||||
let mut var_store = VarStore::default();
|
||||
let mut defs = parse_defs_with(arena, indoc!($src)).unwrap();
|
||||
desugar_defs_node_values(arena, &mut defs, $src, &mut None, "test.roc", true);
|
||||
desugar_defs_node_values(
|
||||
arena,
|
||||
&mut var_store,
|
||||
&mut defs,
|
||||
$src,
|
||||
&mut None,
|
||||
"test.roc",
|
||||
true,
|
||||
&mut Default::default(),
|
||||
);
|
||||
|
||||
let snapshot = format!("{:#?}", &defs);
|
||||
println!("{}", snapshot);
|
||||
|
@ -433,6 +444,16 @@ mod suffixed_tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dbg_expr() {
|
||||
run_test!(
|
||||
r#"
|
||||
main =
|
||||
dbg (dbg 1 + 1)
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_argument_single() {
|
||||
run_test!(
|
||||
|
|
231
crates/compiler/checkmate/www/package-lock.json
generated
231
crates/compiler/checkmate/www/package-lock.json
generated
|
@ -3424,21 +3424,15 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.18",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz",
|
||||
"integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==",
|
||||
"version": "0.3.25",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
|
||||
"integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "1.4.14"
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": {
|
||||
"version": "1.4.14",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
|
||||
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@jsdevtools/ono": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
|
||||
|
@ -4351,20 +4345,10 @@
|
|||
"@types/json-schema": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/eslint-scope": {
|
||||
"version": "3.7.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz",
|
||||
"integrity": "sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/eslint": "*",
|
||||
"@types/estree": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz",
|
||||
"integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==",
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz",
|
||||
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/express": {
|
||||
|
@ -4932,9 +4916,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/ast": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz",
|
||||
"integrity": "sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.12.1.tgz",
|
||||
"integrity": "sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/helper-numbers": "1.11.6",
|
||||
|
@ -4954,9 +4938,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-buffer": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz",
|
||||
"integrity": "sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz",
|
||||
"integrity": "sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-numbers": {
|
||||
|
@ -4977,15 +4961,15 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/helper-wasm-section": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz",
|
||||
"integrity": "sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz",
|
||||
"integrity": "sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6"
|
||||
"@webassemblyjs/wasm-gen": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/ieee754": {
|
||||
|
@ -5013,28 +4997,28 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-edit": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz",
|
||||
"integrity": "sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz",
|
||||
"integrity": "sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/helper-wasm-section": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6",
|
||||
"@webassemblyjs/wasm-opt": "1.11.6",
|
||||
"@webassemblyjs/wasm-parser": "1.11.6",
|
||||
"@webassemblyjs/wast-printer": "1.11.6"
|
||||
"@webassemblyjs/helper-wasm-section": "1.12.1",
|
||||
"@webassemblyjs/wasm-gen": "1.12.1",
|
||||
"@webassemblyjs/wasm-opt": "1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "1.12.1",
|
||||
"@webassemblyjs/wast-printer": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-gen": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz",
|
||||
"integrity": "sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz",
|
||||
"integrity": "sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/ieee754": "1.11.6",
|
||||
"@webassemblyjs/leb128": "1.11.6",
|
||||
|
@ -5042,24 +5026,24 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-opt": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz",
|
||||
"integrity": "sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz",
|
||||
"integrity": "sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/helper-buffer": "1.11.6",
|
||||
"@webassemblyjs/wasm-gen": "1.11.6",
|
||||
"@webassemblyjs/wasm-parser": "1.11.6"
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-buffer": "1.12.1",
|
||||
"@webassemblyjs/wasm-gen": "1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "1.12.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wasm-parser": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz",
|
||||
"integrity": "sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz",
|
||||
"integrity": "sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@webassemblyjs/helper-api-error": "1.11.6",
|
||||
"@webassemblyjs/helper-wasm-bytecode": "1.11.6",
|
||||
"@webassemblyjs/ieee754": "1.11.6",
|
||||
|
@ -5068,12 +5052,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@webassemblyjs/wast-printer": {
|
||||
"version": "1.11.6",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz",
|
||||
"integrity": "sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A==",
|
||||
"version": "1.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz",
|
||||
"integrity": "sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@webassemblyjs/ast": "1.11.6",
|
||||
"@webassemblyjs/ast": "1.12.1",
|
||||
"@xtuc/long": "4.2.2"
|
||||
}
|
||||
},
|
||||
|
@ -5142,10 +5126,10 @@
|
|||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-import-assertions": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz",
|
||||
"integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==",
|
||||
"node_modules/acorn-import-attributes": {
|
||||
"version": "1.9.5",
|
||||
"resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz",
|
||||
"integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==",
|
||||
"dev": true,
|
||||
"peerDependencies": {
|
||||
"acorn": "^8"
|
||||
|
@ -6039,9 +6023,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/browserslist": {
|
||||
"version": "4.21.9",
|
||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.9.tgz",
|
||||
"integrity": "sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg==",
|
||||
"version": "4.23.3",
|
||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz",
|
||||
"integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
|
@ -6058,10 +6042,10 @@
|
|||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"caniuse-lite": "^1.0.30001503",
|
||||
"electron-to-chromium": "^1.4.431",
|
||||
"node-releases": "^2.0.12",
|
||||
"update-browserslist-db": "^1.0.11"
|
||||
"caniuse-lite": "^1.0.30001646",
|
||||
"electron-to-chromium": "^1.5.4",
|
||||
"node-releases": "^2.0.18",
|
||||
"update-browserslist-db": "^1.1.0"
|
||||
},
|
||||
"bin": {
|
||||
"browserslist": "cli.js"
|
||||
|
@ -6178,9 +6162,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001516",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001516.tgz",
|
||||
"integrity": "sha512-Wmec9pCBY8CWbmI4HsjBeQLqDTqV91nFVR83DnZpYyRnPI1wePDsTg0bGLPC5VU/3OIZV1fmxEea1b+tFKe86g==",
|
||||
"version": "1.0.30001655",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001655.tgz",
|
||||
"integrity": "sha512-jRGVy3iSGO5Uutn2owlb5gR6qsGngTw9ZTb4ali9f3glshcNmJ2noam4Mo9zia5P9Dk3jNNydy7vQjuE5dQmfg==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
|
@ -7575,9 +7559,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.4.461",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.461.tgz",
|
||||
"integrity": "sha512-1JkvV2sgEGTDXjdsaQCeSwYYuhLRphRpc+g6EHTFELJXEiznLt3/0pZ9JuAOQ5p2rI3YxKTbivtvajirIfhrEQ==",
|
||||
"version": "1.5.13",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.13.tgz",
|
||||
"integrity": "sha512-lbBcvtIJ4J6sS4tb5TLp1b4LyfCdMkwStzXPyAgVgTRAsep4bvrAGaBOP7ZJtQMNJpSQ9SqG4brWOroNaQtm7Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/elkjs": {
|
||||
|
@ -7622,9 +7606,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/enhanced-resolve": {
|
||||
"version": "5.15.0",
|
||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz",
|
||||
"integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==",
|
||||
"version": "5.17.1",
|
||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz",
|
||||
"integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.4",
|
||||
|
@ -7815,9 +7799,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/escalade": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
|
||||
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
|
||||
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
|
@ -13471,9 +13455,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/node-releases": {
|
||||
"version": "2.0.13",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz",
|
||||
"integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==",
|
||||
"version": "2.0.18",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
|
||||
"integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
|
@ -13926,9 +13910,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
|
||||
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
|
@ -17473,9 +17457,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/terser": {
|
||||
"version": "5.19.0",
|
||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.19.0.tgz",
|
||||
"integrity": "sha512-JpcpGOQLOXm2jsomozdMDpd5f8ZHh1rR48OFgWUH3QsyZcfPgv2qDCYbcDEAYNd4OZRj2bWYKpwdll/udZCk/Q==",
|
||||
"version": "5.31.6",
|
||||
"resolved": "https://registry.npmjs.org/terser/-/terser-5.31.6.tgz",
|
||||
"integrity": "sha512-PQ4DAriWzKj+qgehQ7LK5bQqCFNMmlhjR2PFFLuqGCpuCAauxemVBWwWOxo3UIwWQx8+Pr61Df++r76wDmkQBg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/source-map": "^0.3.3",
|
||||
|
@ -17491,16 +17475,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/terser-webpack-plugin": {
|
||||
"version": "5.3.9",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz",
|
||||
"integrity": "sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA==",
|
||||
"version": "5.3.10",
|
||||
"resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz",
|
||||
"integrity": "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "^0.3.17",
|
||||
"@jridgewell/trace-mapping": "^0.3.20",
|
||||
"jest-worker": "^27.4.5",
|
||||
"schema-utils": "^3.1.1",
|
||||
"serialize-javascript": "^6.0.1",
|
||||
"terser": "^5.16.8"
|
||||
"terser": "^5.26.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.13.0"
|
||||
|
@ -17984,9 +17968,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/update-browserslist-db": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz",
|
||||
"integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz",
|
||||
"integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
|
@ -18003,8 +17987,8 @@
|
|||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"escalade": "^3.1.1",
|
||||
"picocolors": "^1.0.0"
|
||||
"escalade": "^3.1.2",
|
||||
"picocolors": "^1.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"update-browserslist-db": "cli.js"
|
||||
|
@ -18140,9 +18124,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/watchpack": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz",
|
||||
"integrity": "sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==",
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.2.tgz",
|
||||
"integrity": "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"glob-to-regexp": "^0.4.1",
|
||||
|
@ -18171,34 +18155,33 @@
|
|||
}
|
||||
},
|
||||
"node_modules/webpack": {
|
||||
"version": "5.88.1",
|
||||
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.88.1.tgz",
|
||||
"integrity": "sha512-FROX3TxQnC/ox4N+3xQoWZzvGXSuscxR32rbzjpXgEzWudJFEJBpdlkkob2ylrv5yzzufD1zph1OoFsLtm6stQ==",
|
||||
"version": "5.94.0",
|
||||
"resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz",
|
||||
"integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/eslint-scope": "^3.7.3",
|
||||
"@types/estree": "^1.0.0",
|
||||
"@webassemblyjs/ast": "^1.11.5",
|
||||
"@webassemblyjs/wasm-edit": "^1.11.5",
|
||||
"@webassemblyjs/wasm-parser": "^1.11.5",
|
||||
"@types/estree": "^1.0.5",
|
||||
"@webassemblyjs/ast": "^1.12.1",
|
||||
"@webassemblyjs/wasm-edit": "^1.12.1",
|
||||
"@webassemblyjs/wasm-parser": "^1.12.1",
|
||||
"acorn": "^8.7.1",
|
||||
"acorn-import-assertions": "^1.9.0",
|
||||
"browserslist": "^4.14.5",
|
||||
"acorn-import-attributes": "^1.9.5",
|
||||
"browserslist": "^4.21.10",
|
||||
"chrome-trace-event": "^1.0.2",
|
||||
"enhanced-resolve": "^5.15.0",
|
||||
"enhanced-resolve": "^5.17.1",
|
||||
"es-module-lexer": "^1.2.1",
|
||||
"eslint-scope": "5.1.1",
|
||||
"events": "^3.2.0",
|
||||
"glob-to-regexp": "^0.4.1",
|
||||
"graceful-fs": "^4.2.9",
|
||||
"graceful-fs": "^4.2.11",
|
||||
"json-parse-even-better-errors": "^2.3.1",
|
||||
"loader-runner": "^4.2.0",
|
||||
"mime-types": "^2.1.27",
|
||||
"neo-async": "^2.6.2",
|
||||
"schema-utils": "^3.2.0",
|
||||
"tapable": "^2.1.1",
|
||||
"terser-webpack-plugin": "^5.3.7",
|
||||
"watchpack": "^2.4.0",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"watchpack": "^2.4.1",
|
||||
"webpack-sources": "^3.2.3"
|
||||
},
|
||||
"bin": {
|
||||
|
|
|
@ -17,6 +17,10 @@ impl<T> VecSet<T> {
|
|||
pub fn into_vec(self) -> Vec<T> {
|
||||
self.elements
|
||||
}
|
||||
|
||||
pub fn reserve(&mut self, additional: usize) {
|
||||
self.elements.reserve(additional)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PartialEq> VecSet<T> {
|
||||
|
|
|
@ -569,8 +569,8 @@ pub fn constrain_expr(
|
|||
Var(symbol, variable)
|
||||
| ParamsVar {
|
||||
symbol,
|
||||
params: _,
|
||||
var: variable,
|
||||
..
|
||||
} => {
|
||||
// Save the expectation in the variable, then lookup the symbol's type in the environment
|
||||
let expected_type = *constraints[expected].get_type_ref();
|
||||
|
|
|
@ -3,13 +3,12 @@ use crate::pattern::{constrain_pattern, PatternState};
|
|||
use roc_can::abilities::{PendingAbilitiesStore, PendingMemberType};
|
||||
use roc_can::constraint::{Constraint, Constraints, Generalizable};
|
||||
use roc_can::expected::{Expected, PExpected};
|
||||
use roc_can::expr::{AnnotatedMark, Declarations};
|
||||
use roc_can::expr::Declarations;
|
||||
use roc_can::module::ModuleParams;
|
||||
use roc_can::pattern::Pattern;
|
||||
use roc_collections::MutMap;
|
||||
use roc_error_macros::internal_error;
|
||||
use roc_module::symbol::{ModuleId, Symbol};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use roc_types::subs::Variable;
|
||||
use roc_types::types::{AnnotationSource, Category, Type, Types};
|
||||
|
||||
pub fn constrain_module(
|
||||
|
@ -18,12 +17,12 @@ pub fn constrain_module(
|
|||
symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
|
||||
abilities_store: &PendingAbilitiesStore,
|
||||
declarations: &Declarations,
|
||||
params_pattern: &Option<(Variable, AnnotatedMark, Loc<Pattern>)>,
|
||||
opt_module_params: &Option<ModuleParams>,
|
||||
home: ModuleId,
|
||||
) -> Constraint {
|
||||
let constraint = crate::expr::constrain_decls(types, constraints, home, declarations);
|
||||
|
||||
let constraint = match params_pattern {
|
||||
let constraint = match opt_module_params {
|
||||
Some(params_pattern) => {
|
||||
constrain_params(types, constraints, home, constraint, params_pattern)
|
||||
}
|
||||
|
@ -51,7 +50,7 @@ fn constrain_params(
|
|||
constraints: &mut Constraints,
|
||||
home: ModuleId,
|
||||
constraint: Constraint,
|
||||
(pattern_var, _, loc_pattern): &(Variable, AnnotatedMark, Loc<Pattern>),
|
||||
module_params: &ModuleParams,
|
||||
) -> Constraint {
|
||||
let mut env = Env {
|
||||
home,
|
||||
|
@ -59,23 +58,13 @@ fn constrain_params(
|
|||
resolutions_to_make: vec![],
|
||||
};
|
||||
|
||||
let index = constraints.push_variable(*pattern_var);
|
||||
let index = constraints.push_variable(module_params.whole_var);
|
||||
let expected_params = constraints.push_pat_expected_type(PExpected::NoExpectation(index));
|
||||
|
||||
let mut state = PatternState::default();
|
||||
|
||||
let closed_con = match loc_pattern.value {
|
||||
Pattern::RecordDestructure {
|
||||
whole_var: _,
|
||||
ext_var,
|
||||
destructs: _,
|
||||
} => {
|
||||
// Disallow record extension for module params
|
||||
let empty_rec = constraints.push_type(types, Types::EMPTY_RECORD);
|
||||
constraints.store(empty_rec, ext_var, file!(), line!())
|
||||
}
|
||||
_ => internal_error!("Only record destructures are allowed in module params. This should've been caught earlier."),
|
||||
};
|
||||
let empty_rec = constraints.push_type(types, Types::EMPTY_RECORD);
|
||||
let closed_con = constraints.store(empty_rec, module_params.record_ext_var, file!(), line!());
|
||||
|
||||
state.constraints.push(closed_con);
|
||||
|
||||
|
@ -83,8 +72,8 @@ fn constrain_params(
|
|||
types,
|
||||
constraints,
|
||||
&mut env,
|
||||
&loc_pattern.value,
|
||||
loc_pattern.region,
|
||||
&module_params.pattern().value,
|
||||
module_params.region,
|
||||
expected_params,
|
||||
&mut state,
|
||||
);
|
||||
|
@ -100,7 +89,7 @@ fn constrain_params(
|
|||
Generalizable(true),
|
||||
);
|
||||
|
||||
constraints.exists([*pattern_var], cons)
|
||||
constraints.exists([module_params.whole_var], cons)
|
||||
}
|
||||
|
||||
fn constrain_symbols_from_requires(
|
||||
|
|
|
@ -38,6 +38,7 @@ pub enum Parens {
|
|||
InFunctionType,
|
||||
InApply,
|
||||
InOperator,
|
||||
InAsPattern,
|
||||
}
|
||||
|
||||
/// In an AST node, do we show newlines around it
|
||||
|
|
|
@ -46,7 +46,8 @@ impl<'a> Formattable for Expr<'a> {
|
|||
| MalformedClosure
|
||||
| Tag(_)
|
||||
| OpaqueRef(_)
|
||||
| Crash => false,
|
||||
| Crash
|
||||
| Dbg => false,
|
||||
|
||||
RecordAccess(inner, _) | TupleAccess(inner, _) | TrySuffix { expr: inner, .. } => {
|
||||
inner.is_multiline()
|
||||
|
@ -65,7 +66,7 @@ impl<'a> Formattable for Expr<'a> {
|
|||
Expect(condition, continuation) => {
|
||||
condition.is_multiline() || continuation.is_multiline()
|
||||
}
|
||||
Dbg(condition, _) => condition.is_multiline(),
|
||||
DbgStmt(condition, _) => condition.is_multiline(),
|
||||
LowLevelDbg(_, _, _) => unreachable!(
|
||||
"LowLevelDbg should only exist after desugaring, not during formatting"
|
||||
),
|
||||
|
@ -453,8 +454,12 @@ impl<'a> Formattable for Expr<'a> {
|
|||
Expect(condition, continuation) => {
|
||||
fmt_expect(buf, condition, continuation, self.is_multiline(), indent);
|
||||
}
|
||||
Dbg(condition, continuation) => {
|
||||
fmt_dbg(buf, condition, continuation, self.is_multiline(), indent);
|
||||
Dbg => {
|
||||
buf.indent(indent);
|
||||
buf.push_str("dbg");
|
||||
}
|
||||
DbgStmt(condition, continuation) => {
|
||||
fmt_dbg_stmt(buf, condition, continuation, self.is_multiline(), indent);
|
||||
}
|
||||
LowLevelDbg(_, _, _) => unreachable!(
|
||||
"LowLevelDbg should only exist after desugaring, not during formatting"
|
||||
|
@ -1018,7 +1023,7 @@ fn fmt_when<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn fmt_dbg<'a>(
|
||||
fn fmt_dbg_stmt<'a>(
|
||||
buf: &mut Buf,
|
||||
condition: &'a Loc<Expr<'a>>,
|
||||
continuation: &'a Loc<Expr<'a>>,
|
||||
|
@ -1240,7 +1245,7 @@ fn fmt_closure<'a>(
|
|||
let mut it = loc_patterns.iter().peekable();
|
||||
|
||||
while let Some(loc_pattern) = it.next() {
|
||||
loc_pattern.format(buf, indent);
|
||||
loc_pattern.format_with_options(buf, Parens::InAsPattern, Newlines::No, indent);
|
||||
|
||||
if it.peek().is_some() {
|
||||
buf.indent(indent);
|
||||
|
|
|
@ -7,11 +7,10 @@ use crate::spaces::{fmt_comments_only, fmt_default_spaces, fmt_spaces, NewlineAt
|
|||
use crate::Buf;
|
||||
use roc_parse::ast::{Collection, CommentOrNewline, Header, Spaced, Spaces, SpacesBefore};
|
||||
use roc_parse::header::{
|
||||
AppHeader, ExposedName, ExposesKeyword, GeneratesKeyword, HostedHeader, ImportsEntry,
|
||||
ImportsKeyword, Keyword, KeywordItem, ModuleHeader, ModuleName, PackageEntry, PackageHeader,
|
||||
PackageKeyword, PackageName, PackagesKeyword, PlatformHeader, PlatformKeyword,
|
||||
PlatformRequires, ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent,
|
||||
WithKeyword,
|
||||
AppHeader, ExposedName, ExposesKeyword, HostedHeader, ImportsEntry, ImportsKeyword, Keyword,
|
||||
KeywordItem, ModuleHeader, ModuleName, PackageEntry, PackageHeader, PackageKeyword,
|
||||
PackageName, PackagesKeyword, PlatformHeader, PlatformKeyword, PlatformRequires,
|
||||
ProvidesKeyword, ProvidesTo, RequiresKeyword, To, ToKeyword, TypedIdent,
|
||||
};
|
||||
use roc_parse::ident::UppercaseIdent;
|
||||
use roc_region::all::Loc;
|
||||
|
@ -63,8 +62,6 @@ macro_rules! keywords {
|
|||
keywords! {
|
||||
ExposesKeyword,
|
||||
ImportsKeyword,
|
||||
WithKeyword,
|
||||
GeneratesKeyword,
|
||||
PackageKeyword,
|
||||
PackagesKeyword,
|
||||
RequiresKeyword,
|
||||
|
@ -206,9 +203,6 @@ pub fn fmt_hosted_header<'a>(buf: &mut Buf, header: &'a HostedHeader<'a>) {
|
|||
fmt_exposes(buf, header.exposes.item, indent);
|
||||
header.imports.keyword.format(buf, indent);
|
||||
fmt_imports(buf, header.imports.item, indent);
|
||||
header.generates.format(buf, indent);
|
||||
header.generates_with.keyword.format(buf, indent);
|
||||
fmt_exposes(buf, header.generates_with.item, indent);
|
||||
}
|
||||
|
||||
pub fn fmt_app_header<'a>(buf: &mut Buf, header: &'a AppHeader<'a>) {
|
||||
|
|
|
@ -244,9 +244,19 @@ impl<'a> Formattable for Pattern<'a> {
|
|||
}
|
||||
|
||||
As(pattern, pattern_as) => {
|
||||
let needs_parens = parens == Parens::InAsPattern;
|
||||
|
||||
if needs_parens {
|
||||
buf.push('(');
|
||||
}
|
||||
|
||||
fmt_pattern(buf, &pattern.value, indent, parens);
|
||||
|
||||
pattern_as.format(buf, indent + INDENT);
|
||||
|
||||
if needs_parens {
|
||||
buf.push(')');
|
||||
}
|
||||
}
|
||||
|
||||
// Space
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue