Compare commits
22 Commits
f6bf3dd639
...
rethink
| Author | SHA1 | Date | |
|---|---|---|---|
| 70c634f01f | |||
| 7b5397f661 | |||
| 175b358205 | |||
| 6649b190ac | |||
| c542476365 | |||
| 15b4059163 | |||
| 188114ce78 | |||
| 2d11a0ff93 | |||
| 82c260fec3 | |||
| ad016f9dcf | |||
| 40c0517dd3 | |||
| e1821977ab | |||
| e84175c501 | |||
| 3adb7650b4 | |||
| 156120fbec | |||
| 89a7df58e5 | |||
| e5bb88aa4e | |||
| 12ef49fc7b | |||
| 79a291a8e8 | |||
| a9dc45d93d | |||
| edc2a7161a | |||
| 079796ecc3 |
9
.gitignore
vendored
9
.gitignore
vendored
@@ -5,10 +5,7 @@
|
|||||||
*.bak
|
*.bak
|
||||||
hsrc/Syntax/Lexer.hs
|
hsrc/Syntax/Lexer.hs
|
||||||
hsrc/Syntax/Parser.hs
|
hsrc/Syntax/Parser.hs
|
||||||
bang
|
|
||||||
|
|
||||||
|
.cabal-sandbox/
|
||||||
# Added by cargo
|
dist/
|
||||||
/proptest-regressions
|
cabal.sandbox.config
|
||||||
/target
|
|
||||||
.aider*
|
|
||||||
|
|||||||
452
Cargo.lock
generated
452
Cargo.lock
generated
@@ -1,452 +0,0 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 4
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "autocfg"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bang"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"codespan",
|
|
||||||
"codespan-reporting",
|
|
||||||
"proptest",
|
|
||||||
"proptest-derive",
|
|
||||||
"thiserror",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-vec"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "2.9.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "1.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "codespan"
|
|
||||||
version = "0.12.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3e4b418d52c9206820a56fc1aa28db73d67e346ba8ba6aa90987e8d6becef7e4"
|
|
||||||
dependencies = [
|
|
||||||
"codespan-reporting",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "codespan-reporting"
|
|
||||||
version = "0.12.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81"
|
|
||||||
dependencies = [
|
|
||||||
"serde",
|
|
||||||
"termcolor",
|
|
||||||
"unicode-width",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "errno"
|
|
||||||
version = "0.3.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fastrand"
|
|
||||||
version = "2.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fnv"
|
|
||||||
version = "1.0.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.3.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"r-efi",
|
|
||||||
"wasi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.176"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "linux-raw-sys"
|
|
||||||
version = "0.11.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.2.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "once_cell"
|
|
||||||
version = "1.21.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ppv-lite86"
|
|
||||||
version = "0.2.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
|
|
||||||
dependencies = [
|
|
||||||
"zerocopy",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "1.0.101"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest"
|
|
||||||
version = "1.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2bb0be07becd10686a0bb407298fb425360a5c44a663774406340c59a22de4ce"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set",
|
|
||||||
"bit-vec",
|
|
||||||
"bitflags",
|
|
||||||
"lazy_static",
|
|
||||||
"num-traits",
|
|
||||||
"rand",
|
|
||||||
"rand_chacha",
|
|
||||||
"rand_xorshift",
|
|
||||||
"regex-syntax",
|
|
||||||
"rusty-fork",
|
|
||||||
"tempfile",
|
|
||||||
"unarray",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest-derive"
|
|
||||||
version = "0.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "1.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "1.0.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "r-efi"
|
|
||||||
version = "5.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.9.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
|
|
||||||
dependencies = [
|
|
||||||
"rand_chacha",
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
|
|
||||||
dependencies = [
|
|
||||||
"ppv-lite86",
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.9.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_xorshift"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.8.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustix"
|
|
||||||
version = "1.1.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
"errno",
|
|
||||||
"libc",
|
|
||||||
"linux-raw-sys",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusty-fork"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
|
|
||||||
dependencies = [
|
|
||||||
"fnv",
|
|
||||||
"quick-error",
|
|
||||||
"tempfile",
|
|
||||||
"wait-timeout",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde"
|
|
||||||
version = "1.0.227"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "80ece43fc6fbed4eb5392ab50c07334d3e577cbf40997ee896fe7af40bba4245"
|
|
||||||
dependencies = [
|
|
||||||
"serde_core",
|
|
||||||
"serde_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_core"
|
|
||||||
version = "1.0.227"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7a576275b607a2c86ea29e410193df32bc680303c82f31e275bbfcafe8b33be5"
|
|
||||||
dependencies = [
|
|
||||||
"serde_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_derive"
|
|
||||||
version = "1.0.227"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "51e694923b8824cf0e9b382adf0f60d4e05f348f357b38833a3fa5ed7c2ede04"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "2.0.106"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tempfile"
|
|
||||||
version = "3.23.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
|
|
||||||
dependencies = [
|
|
||||||
"fastrand",
|
|
||||||
"getrandom",
|
|
||||||
"once_cell",
|
|
||||||
"rustix",
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "termcolor"
|
|
||||||
version = "1.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-util",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror"
|
|
||||||
version = "2.0.16"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
|
|
||||||
dependencies = [
|
|
||||||
"thiserror-impl",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror-impl"
|
|
||||||
version = "2.0.16"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unarray"
|
|
||||||
version = "0.1.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-ident"
|
|
||||||
version = "1.0.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-width"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wait-timeout"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.14.7+wasi-0.2.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
|
||||||
dependencies = [
|
|
||||||
"wasip2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasip2"
|
|
||||||
version = "1.0.1+wasi-0.2.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
|
|
||||||
dependencies = [
|
|
||||||
"wit-bindgen",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-util"
|
|
||||||
version = "0.1.11"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
|
||||||
dependencies = [
|
|
||||||
"windows-sys",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-link"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-sys"
|
|
||||||
version = "0.61.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f"
|
|
||||||
dependencies = [
|
|
||||||
"windows-link",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wit-bindgen"
|
|
||||||
version = "0.46.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zerocopy"
|
|
||||||
version = "0.8.27"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
|
|
||||||
dependencies = [
|
|
||||||
"zerocopy-derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zerocopy-derive"
|
|
||||||
version = "0.8.27"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
11
Cargo.toml
11
Cargo.toml
@@ -1,11 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "bang"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
codespan = "0.12.0"
|
|
||||||
codespan-reporting = "0.12.0"
|
|
||||||
proptest = "1.7.0"
|
|
||||||
proptest-derive = "0.6.0"
|
|
||||||
thiserror = "2.0.12"
|
|
||||||
30
LICENSE
Normal file
30
LICENSE
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
Copyright (c) 2016, Adam Wick
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following
|
||||||
|
disclaimer in the documentation and/or other materials provided
|
||||||
|
with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of Adam Wick nor the names of other
|
||||||
|
contributors may be used to endorse or promote products derived
|
||||||
|
from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
50
bang.cabal
Normal file
50
bang.cabal
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: bang
|
||||||
|
version: 0.1.0.0
|
||||||
|
synopsis: A fun little language to explore building a compiler. Again.
|
||||||
|
homepage: http://github.com/acw/bang
|
||||||
|
license: BSD3
|
||||||
|
license-file: LICENSE
|
||||||
|
author: Adam Wick <awick@uhsure.com>
|
||||||
|
maintainer: Adam Wick <awick@uhsure.com>
|
||||||
|
|
||||||
|
category: Development
|
||||||
|
build-type: Simple
|
||||||
|
cabal-version: >= 1.10
|
||||||
|
|
||||||
|
|
||||||
|
executable bang
|
||||||
|
main-is: Main.hs
|
||||||
|
build-depends:
|
||||||
|
array >= 0.5.1.1 && < 0.9,
|
||||||
|
base >= 4.7 && < 5.0,
|
||||||
|
bytestring >= 0.10.6 && < 0.13,
|
||||||
|
containers >= 0.5.4 && < 0.8,
|
||||||
|
GraphSCC >= 1.0.4 && < 1.4,
|
||||||
|
lens >= 4.14 && < 4.16,
|
||||||
|
llvm-pretty >= 0.4.0.1 && < 0.8,
|
||||||
|
optparse-applicative >= 0.12.1.0 && < 0.15,
|
||||||
|
pretty >= 1.1.3.3 && < 1.4,
|
||||||
|
text >= 1.2.2.1 && < 1.5
|
||||||
|
hs-source-dirs: src
|
||||||
|
build-tools: alex, happy
|
||||||
|
ghc-options: -Wall
|
||||||
|
default-language: Haskell2010
|
||||||
|
other-extensions: CPP,
|
||||||
|
DeriveDataTypeable,
|
||||||
|
DeriveFunctor,
|
||||||
|
FlexibleInstances,
|
||||||
|
GeneralizedNewtypeDeriving,
|
||||||
|
MagicHash,
|
||||||
|
MultiParamTypeClasses,
|
||||||
|
OverloadedStrings,
|
||||||
|
TemplateHaskell,
|
||||||
|
UndecidableInstances
|
||||||
|
other-modules:
|
||||||
|
Bang.CommandLine,
|
||||||
|
Bang.Syntax.Lexer,
|
||||||
|
Bang.Syntax.Location,
|
||||||
|
Bang.Syntax.Name,
|
||||||
|
Bang.Syntax.Parser,
|
||||||
|
Bang.Syntax.Token,
|
||||||
|
Paths_bang
|
||||||
|
|
||||||
55
hsrc/Main.hs
55
hsrc/Main.hs
@@ -1,55 +0,0 @@
|
|||||||
import Control.Exception
|
|
||||||
import Control.Monad
|
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import System.Environment
|
|
||||||
import System.Exit
|
|
||||||
import System.IO.Error
|
|
||||||
|
|
||||||
import Syntax.AST
|
|
||||||
import Syntax.Lexer
|
|
||||||
import Syntax.Parser
|
|
||||||
import Syntax.ParserCore
|
|
||||||
|
|
||||||
main :: IO ()
|
|
||||||
main = do
|
|
||||||
args <- getArgs
|
|
||||||
case args of
|
|
||||||
[file] -> do
|
|
||||||
ast <- loadModule file
|
|
||||||
putStrLn "Successful parse!"
|
|
||||||
putStrLn (show ast)
|
|
||||||
["-lex",path] -> do
|
|
||||||
mtxt <- tryJust (guard . isDoesNotExistError) $ S.readFile path
|
|
||||||
case mtxt of
|
|
||||||
Left _ -> fail $ "Unable to open file: " ++ path
|
|
||||||
Right txt -> do
|
|
||||||
case runParser path txt pullTokens of
|
|
||||||
Left err -> printError err >> exitWith (ExitFailure 1)
|
|
||||||
Right ress -> do
|
|
||||||
mapM_ putStrLn ress
|
|
||||||
putStrLn "Successful lex."
|
|
||||||
["-parse",path] -> do
|
|
||||||
ast <- loadModule path
|
|
||||||
putStrLn "Successful parse!"
|
|
||||||
putStrLn (show ast)
|
|
||||||
_ -> fail "Unacceptable arguments."
|
|
||||||
|
|
||||||
pullTokens :: Parser [String]
|
|
||||||
pullTokens = do
|
|
||||||
tok <- scan
|
|
||||||
case tok of
|
|
||||||
Lexeme pos tok' -> do
|
|
||||||
let res = show pos ++ " " ++ show tok'
|
|
||||||
if tok' == TokEOF
|
|
||||||
then return [res]
|
|
||||||
else return (res :) `ap` pullTokens
|
|
||||||
|
|
||||||
loadModule :: FilePath -> IO (Module Position)
|
|
||||||
loadModule path = do
|
|
||||||
mtxt <- tryJust (guard . isDoesNotExistError) $ S.readFile path
|
|
||||||
case mtxt of
|
|
||||||
Left _ -> fail $ "Unable to open file: " ++ path
|
|
||||||
Right txt ->
|
|
||||||
case runParser path txt parseModule of
|
|
||||||
Left err -> printError err >> exitWith (ExitFailure 1)
|
|
||||||
Right ast -> return ast
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
module Syntax.AST where
|
|
||||||
|
|
||||||
import Syntax.ParserCore
|
|
||||||
|
|
||||||
data Show a => Module a = Module {
|
|
||||||
modName :: QualifiedName
|
|
||||||
, modImports :: [Import]
|
|
||||||
, modDecls :: [Decl a]
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data QualifiedName = QualifiedName {
|
|
||||||
qnPrefixes :: [String]
|
|
||||||
, qnName :: String
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
gensym :: Parser QualifiedName
|
|
||||||
gensym = do
|
|
||||||
name <- genstr
|
|
||||||
return (QualifiedName [] name)
|
|
||||||
|
|
||||||
data Import = Import {
|
|
||||||
imName :: QualifiedName
|
|
||||||
, imQualified :: Bool
|
|
||||||
, imList :: Maybe [ImportName]
|
|
||||||
, imAs :: Maybe QualifiedName
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data ImportName = ImportNamed QualifiedName
|
|
||||||
| ImportRenamed QualifiedName QualifiedName
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data Show a => Decl a =
|
|
||||||
DeclData a [Type] QualifiedName [QualifiedName] [DataClause a]
|
|
||||||
| DeclType a [Type]
|
|
||||||
| DeclNewtype a [Type]
|
|
||||||
| DeclClass a [Type] QualifiedName [QualifiedName] [ClassClause a]
|
|
||||||
| DeclInstance a [Type]
|
|
||||||
| DeclValue a [Type] Type QualifiedName (Expr a)
|
|
||||||
| DeclExport a (Decl a)
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
addTypeRestrictions :: Show a => [Type] -> Decl a -> Decl a
|
|
||||||
addTypeRestrictions rs (DeclData s _ a b c) = DeclData s rs a b c
|
|
||||||
addTypeRestrictions rs (DeclType s _) = DeclType s rs
|
|
||||||
addTypeRestrictions rs (DeclNewtype s _) = DeclNewtype s rs
|
|
||||||
addTypeRestrictions rs (DeclClass s _ a b c) = DeclClass s rs a b c
|
|
||||||
addTypeRestrictions rs (DeclInstance s _) = DeclInstance s rs
|
|
||||||
addTypeRestrictions rs (DeclValue s _ n a b) = DeclValue s rs n a b
|
|
||||||
addTypeRestrictions rs (DeclExport s d) =
|
|
||||||
DeclExport s (addTypeRestrictions rs d)
|
|
||||||
|
|
||||||
data DataClause a = DataClause a QualifiedName [Maybe QualifiedName] [Type]
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data ClassClause a = ClassClause a QualifiedName Type (Maybe (Expr a))
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data Show a => Expr a =
|
|
||||||
Const a ConstVal
|
|
||||||
| VarRef a QualifiedName
|
|
||||||
| Cond a (Expr a) (Expr a) (Expr a)
|
|
||||||
| App a (Expr a) [Expr a]
|
|
||||||
| Block a [Stmt a]
|
|
||||||
| Lambda a [QualifiedName] (Expr a)
|
|
||||||
| Let a Type QualifiedName (Expr a) (Expr a)
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
getSpecial :: Show a => Expr a -> a
|
|
||||||
getSpecial (Const a _) = a
|
|
||||||
getSpecial (VarRef a _) = a
|
|
||||||
getSpecial (Cond a _ _ _) = a
|
|
||||||
getSpecial (App a _ _) = a
|
|
||||||
getSpecial (Block a _) = a
|
|
||||||
getSpecial (Lambda a _ _) = a
|
|
||||||
getSpecial (Let a _ _ _ _) = a
|
|
||||||
|
|
||||||
data Show a => Stmt a =
|
|
||||||
SExpr a (Expr a)
|
|
||||||
| SBind a QualifiedName (Stmt a)
|
|
||||||
| SLet a Type QualifiedName (Expr a)
|
|
||||||
| SCase a (Expr a) [(Pattern,Maybe (Expr a),Stmt a)]
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data Pattern =
|
|
||||||
ListNull
|
|
||||||
| PConst ConstVal
|
|
||||||
| PVar QualifiedName
|
|
||||||
| PNamed QualifiedName Pattern
|
|
||||||
| PAp Pattern Pattern
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data Kind = Star | KFun Kind Kind
|
|
||||||
deriving (Eq,Show)
|
|
||||||
|
|
||||||
data Type = TVar QualifiedName Kind
|
|
||||||
| TCon QualifiedName Kind
|
|
||||||
| TAp Type Type
|
|
||||||
| TGen Int
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data ConstVal = ConstInteger Int String
|
|
||||||
| ConstFloat String
|
|
||||||
| ConstChar String
|
|
||||||
| ConstString String
|
|
||||||
| ConstEmpty
|
|
||||||
deriving (Show)
|
|
||||||
@@ -1,129 +0,0 @@
|
|||||||
{
|
|
||||||
{-# LANGUAGE DeriveDataTypeable #-}
|
|
||||||
{-# OPTIONS -w #-}
|
|
||||||
module Syntax.Lexer where
|
|
||||||
|
|
||||||
import qualified Codec.Binary.UTF8.Generic as UTF8
|
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import MonadLib
|
|
||||||
|
|
||||||
import Syntax.ParserCore
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
-- Digits
|
|
||||||
$decdigit = 0-9
|
|
||||||
$hexdigit = [0-9a-fA-f]
|
|
||||||
$octdigit = 0-7
|
|
||||||
$bindigit = [01]
|
|
||||||
|
|
||||||
-- Identifier Characters
|
|
||||||
$typestart = [A-Z\_]
|
|
||||||
$valstart = [a-z\_]
|
|
||||||
$identrest = [a-zA-Z0-9\_\.]
|
|
||||||
$opident = [\~\!\@\#\$\%\^\&\*\+\-\=\.\<\>\?\_\|]
|
|
||||||
$escape_char = [abfnrtv'\"\\]
|
|
||||||
|
|
||||||
:-
|
|
||||||
|
|
||||||
-- Whitespace
|
|
||||||
$white+ ;
|
|
||||||
"/*".*"*/" ;
|
|
||||||
|
|
||||||
-- Numbers
|
|
||||||
$decdigit+ { emitS (buildInt 10) }
|
|
||||||
"0x"$hexdigit+ { emitS (buildInt 16) }
|
|
||||||
"0o"$octdigit+ { emitS (buildInt 8) }
|
|
||||||
"0b"$bindigit+ { emitS (buildInt 2) }
|
|
||||||
$decdigit+"."$decdigit+ ("e""-"?$decdigit+)? { emitS TokFloat}
|
|
||||||
$decdigit+"e""-"?$decdigit+ { emitS TokFloat}
|
|
||||||
|
|
||||||
-- Identifier
|
|
||||||
$typestart $identrest* { emitS TokTypeIdent }
|
|
||||||
"prim%" $typestart $identrest* { emitS TokTypeIdent }
|
|
||||||
$valstart $identrest* { emitS TokValIdent }
|
|
||||||
"prim%" $valstart $identrest* { emitS TokValIdent }
|
|
||||||
$opident+ { emitS TokOpIdent }
|
|
||||||
":"+ { emitS TokOpIdent }
|
|
||||||
|
|
||||||
-- Characters and Strings
|
|
||||||
['].['] { emitS TokChar }
|
|
||||||
['] [\\] $escape_char ['] { emitS TokChar }
|
|
||||||
[\"] ([^\"] | [\n] | ([\\] $escape_char))* [\"] { emitS TokString }
|
|
||||||
|
|
||||||
-- Symbols
|
|
||||||
"(" { emitT LParen }
|
|
||||||
")" { emitT RParen }
|
|
||||||
"[" { emitT LSquare }
|
|
||||||
"]" { emitT RSquare }
|
|
||||||
"{" { emitT LBrace }
|
|
||||||
"}" { emitT RBrace }
|
|
||||||
";" { emitT Semi }
|
|
||||||
"," { emitT Comma }
|
|
||||||
"`" { emitT BTick }
|
|
||||||
[\\] { emitT LLambda }
|
|
||||||
|
|
||||||
{
|
|
||||||
|
|
||||||
type AlexInput = (Position,Char,S.ByteString)
|
|
||||||
|
|
||||||
emitT :: Token -> AlexInput -> Int -> Parser Lexeme
|
|
||||||
emitT tok (pos,_,_) _ = return $! Lexeme pos tok
|
|
||||||
|
|
||||||
emitS :: (String -> Token) -> AlexInput -> Int -> Parser Lexeme
|
|
||||||
emitS mk (pos,c,bs) len = return $! Lexeme pos (mk input)
|
|
||||||
where input = UTF8.toString (S.take len bs)
|
|
||||||
|
|
||||||
scan :: Parser Lexeme
|
|
||||||
scan = do
|
|
||||||
inp@(pos,_,_) <- alexGetInput
|
|
||||||
sc <- alexGetStartCode
|
|
||||||
case alexScan inp sc of
|
|
||||||
AlexEOF -> return $! Lexeme pos TokEOF
|
|
||||||
AlexError inp' -> do
|
|
||||||
let posStr = pprtPosition pos
|
|
||||||
alexError $ posStr ++ ": Lexical error."
|
|
||||||
AlexSkip inp' len' -> alexSetInput inp' >> scan
|
|
||||||
AlexToken inp' len action -> do
|
|
||||||
alexSetInput inp'
|
|
||||||
action inp len
|
|
||||||
|
|
||||||
alexGetInput :: Parser AlexInput
|
|
||||||
alexGetInput = do
|
|
||||||
s <- get
|
|
||||||
return (psPos s, psChar s, psInput s)
|
|
||||||
|
|
||||||
alexSetInput :: AlexInput -> Parser ()
|
|
||||||
alexSetInput (pos,c,bs) = do
|
|
||||||
s <- get
|
|
||||||
set $! s {
|
|
||||||
psPos = pos
|
|
||||||
, psChar = c
|
|
||||||
, psInput = bs
|
|
||||||
}
|
|
||||||
|
|
||||||
alexGetChar :: AlexInput -> Maybe (Char,AlexInput)
|
|
||||||
alexGetChar (p,_,bs) = do
|
|
||||||
(c,bs') <- UTF8.uncons bs
|
|
||||||
return (c, (movePos p c, c, bs'))
|
|
||||||
|
|
||||||
alexInputPrevChar :: AlexInput -> Char
|
|
||||||
alexInputPrevChar (_,c,_) = c
|
|
||||||
|
|
||||||
alexError :: String -> Parser a
|
|
||||||
alexError = raiseL
|
|
||||||
|
|
||||||
alexGetStartCode :: Parser Int
|
|
||||||
alexGetStartCode = psLexCode `fmap` get
|
|
||||||
|
|
||||||
alexSetStartCode :: Int -> Parser ()
|
|
||||||
alexSetStartCode code = do
|
|
||||||
s <- get
|
|
||||||
set $! s { psLexCode = code }
|
|
||||||
|
|
||||||
begin code _ _ = alexSetStartCode code >> scan
|
|
||||||
|
|
||||||
buildInt :: Int -> String -> Token
|
|
||||||
buildInt base val = TokInt (base, val)
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
CURDIR := $(TOPDIR)/hsrc/Syntax
|
|
||||||
|
|
||||||
SYNFILES := Lexer ParserCore AST Parser
|
|
||||||
|
|
||||||
SYNFILES_PREFIXED := $(addprefix $(CURDIR)/,$(SYNFILES))
|
|
||||||
OBJECTS += $(addsuffix .o,$(SYNFILES_PREFIXED))
|
|
||||||
HS_SOURCES += $(addsuffix .hs,$(SYNFILES_PREFIXED))
|
|
||||||
|
|
||||||
EXTRA_CLEAN += $(CURDIR)/Lexer.hs $(CURDIR)/Lexer.info \
|
|
||||||
$(CURDIR)/Parser.hs $(CURDIR)/Parser.info
|
|
||||||
|
|
||||||
.SECONDARY: $(CURDIR)/Lexer.hs $(CURDIR)/Parser.hs
|
|
||||||
|
|
||||||
$(CURDIR)/Parser.d: $(CURDIR)/Lexer.d
|
|
||||||
@@ -1,509 +0,0 @@
|
|||||||
{
|
|
||||||
{-# OPTIONS_GHC -w #-}
|
|
||||||
|
|
||||||
-- vim: filetype=haskell
|
|
||||||
|
|
||||||
module Syntax.Parser where
|
|
||||||
|
|
||||||
import Syntax.AST
|
|
||||||
import Syntax.Lexer
|
|
||||||
import Syntax.ParserCore
|
|
||||||
|
|
||||||
import MonadLib
|
|
||||||
import qualified Codec.Binary.UTF8.Generic as UTF8
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
%token
|
|
||||||
|
|
||||||
-- reserved words
|
|
||||||
'module' { Lexeme $$ (TokValIdent "module" ) }
|
|
||||||
'export' { Lexeme $$ (TokValIdent "export" ) }
|
|
||||||
'import' { Lexeme $$ (TokValIdent "import" ) }
|
|
||||||
'datatype' { Lexeme $$ (TokValIdent "datatype") }
|
|
||||||
'type' { Lexeme $$ (TokValIdent "type" ) }
|
|
||||||
'newtype' { Lexeme $$ (TokValIdent "newtype" ) }
|
|
||||||
'class' { Lexeme $$ (TokValIdent "class" ) }
|
|
||||||
'instance' { Lexeme $$ (TokValIdent "instance") }
|
|
||||||
'qualified' { Lexeme $$ (TokValIdent "qualified") }
|
|
||||||
'as' { Lexeme $$ (TokValIdent "as") }
|
|
||||||
'let' { Lexeme $$ (TokValIdent "let") }
|
|
||||||
'in' { Lexeme $$ (TokValIdent "in") }
|
|
||||||
'case' { Lexeme $$ (TokValIdent "case") }
|
|
||||||
'of' { Lexeme $$ (TokValIdent "of") }
|
|
||||||
'restrict' { Lexeme $$ (TokValIdent "restrict") }
|
|
||||||
|
|
||||||
-- symbols
|
|
||||||
'=' { Lexeme $$ (TokOpIdent "=") }
|
|
||||||
'|' { Lexeme $$ (TokOpIdent "|") }
|
|
||||||
'->' { Lexeme $$ (TokOpIdent "->") }
|
|
||||||
'@' { Lexeme $$ (TokOpIdent "@") }
|
|
||||||
'::' { Lexeme $$ (TokOpIdent "::") }
|
|
||||||
'\\' { Lexeme $$ LLambda }
|
|
||||||
'(' { Lexeme $$ LParen }
|
|
||||||
')' { Lexeme $$ RParen }
|
|
||||||
'[' { Lexeme $$ LSquare }
|
|
||||||
']' { Lexeme $$ RSquare }
|
|
||||||
'{' { Lexeme $$ LBrace }
|
|
||||||
'}' { Lexeme $$ RBrace }
|
|
||||||
';' { Lexeme $$ Semi }
|
|
||||||
',' { Lexeme $$ Comma }
|
|
||||||
'`' { Lexeme $$ BTick }
|
|
||||||
|
|
||||||
-- identifiers
|
|
||||||
TYPE_IDENT { Lexeme _ (TokTypeIdent _) }
|
|
||||||
VAL_IDENT { Lexeme _ (TokValIdent _) }
|
|
||||||
OP_IDENT { Lexeme _ (TokOpIdent _) }
|
|
||||||
|
|
||||||
-- values
|
|
||||||
INTVAL { Lexeme _ (TokInt _) }
|
|
||||||
FLOATVAL { Lexeme _ (TokFloat _) }
|
|
||||||
CHARVAL { Lexeme _ (TokChar _) }
|
|
||||||
STRVAL { Lexeme _ (TokString _) }
|
|
||||||
|
|
||||||
%monad { Parser } { (>>=) } { return }
|
|
||||||
%name parseModule top_module
|
|
||||||
%tokentype { Lexeme }
|
|
||||||
|
|
||||||
%lexer { lexer } { Lexeme initPosition TokEOF }
|
|
||||||
|
|
||||||
%%
|
|
||||||
|
|
||||||
top_module :: { Module Position } : 'module' TYPE_IDENT module_decls {
|
|
||||||
let (imports,items) = $3
|
|
||||||
in Module (makeQualified $2) imports items
|
|
||||||
}
|
|
||||||
|
|
||||||
module_decls :: { ([Import], [Decl Position]) }
|
|
||||||
: module_decls module_decl { $1 `pappend` $2 }
|
|
||||||
| module_decl { $1 }
|
|
||||||
|
|
||||||
module_decl :: { ([Import], [Decl Position]) }
|
|
||||||
: import_decl ';' { ([$1], []) }
|
|
||||||
| decl ';' { ([], [$1]) }
|
|
||||||
|
|
||||||
-- Import Declarations ------------------------------------------------------
|
|
||||||
|
|
||||||
import_decl :: { Import }
|
|
||||||
: 'import' mqualified TYPE_IDENT mimport_list mas
|
|
||||||
{ Import (makeQualified $3) $2 $4 $5 }
|
|
||||||
|
|
||||||
mqualified :: { Bool }
|
|
||||||
: { False }
|
|
||||||
| 'qualified' { True }
|
|
||||||
|
|
||||||
mimport_list :: { Maybe [ImportName] }
|
|
||||||
: { Nothing }
|
|
||||||
| '(' ')' { Just [] }
|
|
||||||
| '(' import_list ')' { Just $2 }
|
|
||||||
|
|
||||||
mas :: { Maybe QualifiedName }
|
|
||||||
: { Nothing }
|
|
||||||
| 'as' TYPE_IDENT { Just (makeQualified $2) }
|
|
||||||
|
|
||||||
import_list :: { [ImportName] }
|
|
||||||
: import_name { [$1] }
|
|
||||||
| import_list ',' import_name { $1 ++ [$3] }
|
|
||||||
|
|
||||||
import_name :: { ImportName }
|
|
||||||
: either_ident { ImportNamed $1 }
|
|
||||||
| either_ident 'as' either_ident { ImportRenamed $1 $3 }
|
|
||||||
|
|
||||||
either_ident :: { QualifiedName }
|
|
||||||
: TYPE_IDENT { makeQualified $1 }
|
|
||||||
| VAL_IDENT { makeQualified $1 }
|
|
||||||
|
|
||||||
-- Actual Declarations ------------------------------------------------------
|
|
||||||
|
|
||||||
-- A declaration starts with an optional export flag and an optional type
|
|
||||||
-- restriction flag, and then has the declaration. We apply the restrictions /
|
|
||||||
-- exports post-hoc because we're lazy.
|
|
||||||
decl :: { Decl Position }
|
|
||||||
: optional_decl_flags decl2 { $1 $2 }
|
|
||||||
|
|
||||||
optional_decl_flags :: { Decl Position -> Decl Position }
|
|
||||||
: { id }
|
|
||||||
| opt_export { $1 }
|
|
||||||
| opt_restrict { $1 }
|
|
||||||
| opt_export opt_restrict { $1 . $2 }
|
|
||||||
| opt_restrict opt_export { $1 . $2 }
|
|
||||||
|
|
||||||
opt_export :: { Decl Position -> Decl Position }
|
|
||||||
: 'export' { DeclExport $1 }
|
|
||||||
|
|
||||||
opt_restrict :: { Decl Position -> Decl Position }
|
|
||||||
: 'restrict' '(' type_restrictions ')' { addTypeRestrictions $3 }
|
|
||||||
|
|
||||||
type_restrictions :: { [Type] }
|
|
||||||
: type_restriction { [$1] }
|
|
||||||
| type_restrictions ',' type_restriction { $1 ++ [$3] }
|
|
||||||
|
|
||||||
type_restriction :: { Type }
|
|
||||||
: TYPE_IDENT VAL_IDENT
|
|
||||||
{ TAp (TVar (makeQualified $1) Star) (TVar (makeQualified $2) Star) }
|
|
||||||
| type_restriction VAL_IDENT
|
|
||||||
{ TAp $1 (TVar (makeQualified $2) Star) }
|
|
||||||
|
|
||||||
decl2 :: { Decl Position }
|
|
||||||
: data_decl { $1 }
|
|
||||||
| type_decl { $1 }
|
|
||||||
| newtype_decl { $1 }
|
|
||||||
| class_decl { $1 }
|
|
||||||
| instance_decl { $1 }
|
|
||||||
| value_decl { $1 }
|
|
||||||
|
|
||||||
-- Data Declarations --------------------------------------------------------
|
|
||||||
|
|
||||||
data_decl :: { Decl Position }
|
|
||||||
: 'datatype' TYPE_IDENT type_args '=' data_clauses
|
|
||||||
{ DeclData $1 [] (makeQualified $2) $3 $5 }
|
|
||||||
|
|
||||||
type_args :: { [QualifiedName] }
|
|
||||||
: { [] }
|
|
||||||
| type_args VAL_IDENT { $1 ++ [makeQualified $2] }
|
|
||||||
|
|
||||||
data_clauses :: { [DataClause Position] }
|
|
||||||
: data_clause { [] }
|
|
||||||
| data_clauses '|' data_clause { $1 ++ [$3] }
|
|
||||||
|
|
||||||
data_clause :: { DataClause Position }
|
|
||||||
: constructor_name '(' ')'
|
|
||||||
{ DataClause $2 $1 [] [] }
|
|
||||||
| constructor_name '(' constructor_args ')'
|
|
||||||
{ DataClause $2 $1 (map fst $3) (map snd $3) }
|
|
||||||
|
|
||||||
constructor_name :: { QualifiedName }
|
|
||||||
: TYPE_IDENT { makeQualified $1 }
|
|
||||||
| '(' OP_IDENT ')' { makeQualified $2 }
|
|
||||||
|
|
||||||
constructor_args :: { [(Maybe QualifiedName,Type)] }
|
|
||||||
: constructor_arg { [$1] }
|
|
||||||
| constructor_args ',' constructor_arg { $1 ++ [$3] }
|
|
||||||
|
|
||||||
constructor_arg :: { (Maybe QualifiedName,Type) }
|
|
||||||
: bang_type { (Nothing, $1) }
|
|
||||||
| VAL_IDENT '::' bang_type { (Just (makeQualified $1), $3) }
|
|
||||||
|
|
||||||
-- Type Declarations --------------------------------------------------------
|
|
||||||
|
|
||||||
type_decl :: { Decl Position }
|
|
||||||
: 'type' { undefined }
|
|
||||||
|
|
||||||
-- Newtype Declarations -----------------------------------------------------
|
|
||||||
|
|
||||||
newtype_decl :: { Decl Position }
|
|
||||||
: 'newtype' { undefined }
|
|
||||||
|
|
||||||
-- Class Declarations -------------------------------------------------------
|
|
||||||
|
|
||||||
class_decl :: { Decl Position }
|
|
||||||
: 'class' type_ident class_args '{' class_items '}'
|
|
||||||
{ DeclClass $1 [] $2 $3 $5 }
|
|
||||||
|
|
||||||
class_args :: { [QualifiedName] }
|
|
||||||
: VAL_IDENT { [makeQualified $1] }
|
|
||||||
| class_args VAL_IDENT { $1 ++ [makeQualified $2] }
|
|
||||||
|
|
||||||
class_items :: { [ClassClause Position] }
|
|
||||||
: class_item { [$1] }
|
|
||||||
| class_items class_item { $1 ++ [$2] }
|
|
||||||
|
|
||||||
class_item :: { ClassClause Position }
|
|
||||||
: value_ident maybe_clargs cl_retarg maybe_body ';'
|
|
||||||
{% case ($2, $4) of
|
|
||||||
(Nothing, Nothing) -> return (ClassClause $5 $1 $3 Nothing)
|
|
||||||
(Just as, Nothing) ->
|
|
||||||
let types = map snd as
|
|
||||||
in return (ClassClause $5 $1 (buildFunType types $3) Nothing)
|
|
||||||
(Nothing, Just bd) -> return (ClassClause $5 $1 $3 (Just bd))
|
|
||||||
(Just as, Just bd) ->
|
|
||||||
let types = map snd as
|
|
||||||
names = sequence (map fst as)
|
|
||||||
in case names of
|
|
||||||
Nothing ->
|
|
||||||
raiseP "Can't have class implementation without argument names."
|
|
||||||
Just nms -> return (ClassClause $5 $1 (buildFunType types $3)
|
|
||||||
(Just $ Lambda $5 nms bd))
|
|
||||||
}
|
|
||||||
|
|
||||||
maybe_clargs :: { Maybe [(Maybe QualifiedName, Type)] }
|
|
||||||
: { Nothing }
|
|
||||||
| '(' clargs ')' { Just $2 }
|
|
||||||
|
|
||||||
clargs :: { [(Maybe QualifiedName, Type)] }
|
|
||||||
: class_arg { [$1] }
|
|
||||||
| clargs ',' class_arg { $1 ++ [$3] }
|
|
||||||
|
|
||||||
class_arg :: { (Maybe QualifiedName, Type) }
|
|
||||||
: value_ident '::' bang_type { (Just $1, $3) }
|
|
||||||
| bang_type { (Nothing, $1) }
|
|
||||||
|
|
||||||
cl_retarg :: { Type }
|
|
||||||
: '::' bang_type { $2 }
|
|
||||||
|
|
||||||
maybe_body :: { Maybe (Expr Position) }
|
|
||||||
: { Nothing }
|
|
||||||
| '=' expression { Just $2 }
|
|
||||||
| '{' statements '}' { Just (Block $1 $2) }
|
|
||||||
|
|
||||||
type_ident :: { QualifiedName }
|
|
||||||
: TYPE_IDENT { makeQualified $1 }
|
|
||||||
| '(' OP_IDENT ')' { makeQualified $2 }
|
|
||||||
|
|
||||||
-- Instance Declarations ----------------------------------------------------
|
|
||||||
|
|
||||||
instance_decl :: { Decl Position }
|
|
||||||
: 'instance' { undefined }
|
|
||||||
|
|
||||||
-- Value Declaration --------------------------------------------------------
|
|
||||||
|
|
||||||
value_decl :: { Decl Position }
|
|
||||||
: value_ident optional_args optional_type value_body
|
|
||||||
{% postProcessDeclVal DeclValue $1 $2 $3 $4 }
|
|
||||||
|
|
||||||
|
|
||||||
optional_args :: { Maybe [(QualifiedName, Maybe Type)] }
|
|
||||||
: '(' optional_args2 ')' { Just $2 }
|
|
||||||
| { Nothing }
|
|
||||||
|
|
||||||
optional_args2 :: { [(QualifiedName, Maybe Type)] }
|
|
||||||
: optional_arg { [$1] }
|
|
||||||
| optional_args2 ',' optional_arg { $1 ++ [$3] }
|
|
||||||
|
|
||||||
optional_arg :: { (QualifiedName, Maybe Type) }
|
|
||||||
: value_ident optional_type { ($1, $2) }
|
|
||||||
|
|
||||||
optional_type :: { Maybe Type }
|
|
||||||
: { Nothing }
|
|
||||||
| '::' bang_type { Just $2 }
|
|
||||||
|
|
||||||
value_ident :: { QualifiedName }
|
|
||||||
: VAL_IDENT { makeQualified $1 }
|
|
||||||
| '(' OP_IDENT ')' { makeQualified $2 }
|
|
||||||
| '(' '|' ')' { makeQualified (Lexeme $2 (TokOpIdent "|")) }
|
|
||||||
|
|
||||||
value_body :: { (Position, Expr Position) }
|
|
||||||
: '=' expression { ($1, $2) }
|
|
||||||
| '{' statements '}' { ($1, Block $1 $2) }
|
|
||||||
|
|
||||||
-- Types in Bang ------------------------------------------------------------
|
|
||||||
|
|
||||||
primary_type :: { Type }
|
|
||||||
: TYPE_IDENT { TVar (makeQualified $1) Star }
|
|
||||||
| VAL_IDENT { TVar (makeQualified $1) Star }
|
|
||||||
| '(' bang_type ')' { $2 }
|
|
||||||
|
|
||||||
type_application_type :: { Type }
|
|
||||||
: type_application_type primary_type
|
|
||||||
{ TAp $1 $2 }
|
|
||||||
| primary_type
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
function_type :: { Type }
|
|
||||||
: function_type '->' type_application_type
|
|
||||||
{ TAp (TVar (QualifiedName ["--INTERNAL--"] "->") Star) $3 }
|
|
||||||
| type_application_type
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
list_type :: { Type }
|
|
||||||
: '[' list_type ']'
|
|
||||||
{ TAp (TVar (QualifiedName ["Data","List"] "List") Star) $2 }
|
|
||||||
| function_type
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
bang_type :: { Type }
|
|
||||||
: list_type { $1 }
|
|
||||||
|
|
||||||
-- Statements in bang
|
|
||||||
|
|
||||||
statements :: { [Stmt Position] }
|
|
||||||
: { [] }
|
|
||||||
| statements statement { $1 ++ [$2] }
|
|
||||||
|
|
||||||
statement :: { Stmt Position }
|
|
||||||
: assignment_statement ';' { $1 }
|
|
||||||
| case_statement { $1 }
|
|
||||||
| expression ';' { SExpr $2 $1 }
|
|
||||||
|
|
||||||
assignment_statement :: { Stmt Position }
|
|
||||||
: value_ident '=' expression -- FIXME: Too restrictive!
|
|
||||||
{ SBind $2 $1 (SExpr $2 $3) }
|
|
||||||
| 'let' value_ident optional_args optional_type value_body
|
|
||||||
{% postProcessDeclVal (\ s _ t n e -> SLet s t n e) $2 $3 $4 $5 }
|
|
||||||
|
|
||||||
case_statement :: { Stmt Position }
|
|
||||||
: 'case' expression '{' case_items '}'
|
|
||||||
{ SCase $1 $2 $4 }
|
|
||||||
|
|
||||||
case_items :: { [(Pattern,Maybe (Expr Position),(Stmt Position))] }
|
|
||||||
: case_item { [$1] }
|
|
||||||
| case_items case_item { $1 ++ [$2] }
|
|
||||||
|
|
||||||
case_item :: { (Pattern, Maybe (Expr Position), (Stmt Position)) }
|
|
||||||
: pattern mguard '->' statement { ($1, $2, $4) }
|
|
||||||
|
|
||||||
mguard :: { Maybe (Expr Position) }
|
|
||||||
: { Nothing }
|
|
||||||
| '|' expression { Just $2 }
|
|
||||||
|
|
||||||
-- Patterns for pattern matching
|
|
||||||
|
|
||||||
infix_operator :: { QualifiedName }
|
|
||||||
: OP_IDENT { makeQualified $1 }
|
|
||||||
| '`' VAL_IDENT '`' { makeQualified $2 }
|
|
||||||
|
|
||||||
pattern_primary :: { Pattern }
|
|
||||||
: TYPE_IDENT { PVar (makeQualified $1) }
|
|
||||||
| VAL_IDENT { PVar (makeQualified $1) }
|
|
||||||
| '[' ']' { PVar (QualifiedName ["Data","List"] "NULL") }
|
|
||||||
| INTVAL { let (Lexeme _ (TokInt (base, val))) = $1
|
|
||||||
in PConst (ConstInteger base val) }
|
|
||||||
| FLOATVAL { let (Lexeme _ (TokFloat val)) = $1
|
|
||||||
in PConst (ConstFloat val) }
|
|
||||||
| CHARVAL { let (Lexeme _ (TokChar val)) = $1
|
|
||||||
in PConst (ConstChar val) }
|
|
||||||
| STRVAL { let (Lexeme _ (TokString val)) = $1
|
|
||||||
in PConst (ConstString val) }
|
|
||||||
| '(' pattern ')' { $2 }
|
|
||||||
|
|
||||||
pattern_infix :: { Pattern }
|
|
||||||
: pattern_infix infix_operator pattern_primary { PAp (PAp $1 (PVar $2)) $3 }
|
|
||||||
| pattern_primary { $1 }
|
|
||||||
|
|
||||||
pattern_ap :: { Pattern }
|
|
||||||
: pattern_ap pattern_infix { PAp $1 $2 }
|
|
||||||
| pattern_infix { $1 }
|
|
||||||
|
|
||||||
pattern_name :: { Pattern }
|
|
||||||
: value_ident '@' pattern_name { PNamed $1 $3 }
|
|
||||||
| pattern_ap { $1 }
|
|
||||||
|
|
||||||
pattern :: { Pattern }
|
|
||||||
: pattern_name { $1 }
|
|
||||||
|
|
||||||
-- Expressions in bang
|
|
||||||
|
|
||||||
primary_expression :: { Expr Position }
|
|
||||||
: '(' expression ')' { $2 }
|
|
||||||
| '[' ']' { VarRef $1 (QualifiedName ["Data","List"] "NULL") }
|
|
||||||
| INTVAL { let (Lexeme src (TokInt (base, val))) = $1
|
|
||||||
in Const src (ConstInteger base val) }
|
|
||||||
| FLOATVAL { let (Lexeme src (TokFloat val)) = $1
|
|
||||||
in Const src (ConstFloat val) }
|
|
||||||
| CHARVAL { let (Lexeme src (TokChar val)) = $1
|
|
||||||
in Const src (ConstChar val) }
|
|
||||||
| STRVAL { let (Lexeme src (TokString val)) = $1
|
|
||||||
in Const src (ConstString val) }
|
|
||||||
| TYPE_IDENT { let l@(Lexeme src (TokTypeIdent name)) = $1
|
|
||||||
in VarRef src (makeQualified l) }
|
|
||||||
| VAL_IDENT { let l@(Lexeme src (TokValIdent name)) = $1
|
|
||||||
in VarRef src (makeQualified l) }
|
|
||||||
|
|
||||||
let_expression :: {Expr Position}
|
|
||||||
: 'let' value_ident optional_args optional_type value_body 'in' let_expression
|
|
||||||
{% postProcessDeclVal (\ s _ t n b -> Let s t n b $7) $2 $3 $4 $5 }
|
|
||||||
| primary_expression { $1 }
|
|
||||||
|
|
||||||
conditional_expression :: { Expr Position }
|
|
||||||
: let_expression { $1 }
|
|
||||||
|
|
||||||
infix_expression :: { Expr Position }
|
|
||||||
: infix_expression infix_operator conditional_expression
|
|
||||||
{ App (getSpecial $1) (VarRef (getSpecial $1) $2) [$1, $3] }
|
|
||||||
| conditional_expression
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
lambda_expression :: { Expr Position }
|
|
||||||
: '\\' arguments '->' infix_expression
|
|
||||||
{ Lambda $1 $2 $4 }
|
|
||||||
| infix_expression
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
arguments :: { [QualifiedName] }
|
|
||||||
: value_ident { [$1] }
|
|
||||||
| arguments ',' value_ident { $1 ++ [$3] }
|
|
||||||
|
|
||||||
application_expression :: { Expr Position }
|
|
||||||
: application_expression '(' app_args ')'
|
|
||||||
{ App $2 $1 $3 }
|
|
||||||
| application_expression '(' ')'
|
|
||||||
{ App $2 $1 [] }
|
|
||||||
| lambda_expression
|
|
||||||
{ $1 }
|
|
||||||
|
|
||||||
app_args :: { [Expr Position] }
|
|
||||||
: expression { [$1] }
|
|
||||||
| app_args ',' expression { $1 ++ [$3] }
|
|
||||||
|
|
||||||
block_expression :: { Expr Position }
|
|
||||||
: '{' statements '}' { Block $1 $2 }
|
|
||||||
| application_expression { $1 }
|
|
||||||
|
|
||||||
expression :: { Expr Position }
|
|
||||||
: block_expression { $1 }
|
|
||||||
|
|
||||||
{
|
|
||||||
lexer :: (Lexeme -> Parser a) -> Parser a
|
|
||||||
lexer k = scan >>= k
|
|
||||||
|
|
||||||
happyError :: Parser a
|
|
||||||
happyError = raiseP "Parse Error"
|
|
||||||
|
|
||||||
pappend :: ([a],[b]) -> ([a],[b]) -> ([a],[b])
|
|
||||||
pappend (a,b) (c,d) = (a++c,b++d)
|
|
||||||
|
|
||||||
makeQualified :: Lexeme -> QualifiedName
|
|
||||||
makeQualified (Lexeme _ (TokTypeIdent str)) = makeQualified' str
|
|
||||||
makeQualified (Lexeme _ (TokValIdent str)) = makeQualified' str
|
|
||||||
makeQualified (Lexeme _ (TokOpIdent str)) = makeQualified' str
|
|
||||||
makeQualified _ = error "makeQualified bad arg"
|
|
||||||
|
|
||||||
makeQualified' :: String -> QualifiedName
|
|
||||||
makeQualified' str = QualifiedName prefixes name
|
|
||||||
where
|
|
||||||
(prefixes,name) = loop str
|
|
||||||
loop val =
|
|
||||||
let (pre,rest) = span (/= '.') val
|
|
||||||
in if rest == ""
|
|
||||||
then ([], pre)
|
|
||||||
else let (pres, name) = loop (tail rest)
|
|
||||||
in (pre:pres, name)
|
|
||||||
|
|
||||||
postProcessDeclVal ::
|
|
||||||
(Position -> [Type] -> Type -> QualifiedName -> Expr Position -> a) ->
|
|
||||||
QualifiedName ->
|
|
||||||
Maybe [(QualifiedName, Maybe Type)] ->
|
|
||||||
Maybe Type ->
|
|
||||||
(Position, Expr Position) ->
|
|
||||||
Parser a
|
|
||||||
postProcessDeclVal builder name margs mrettype (src, body) = do
|
|
||||||
final_type <- case mrettype of
|
|
||||||
Nothing -> do
|
|
||||||
name <- gensym
|
|
||||||
return (TVar name Star)
|
|
||||||
Just x ->
|
|
||||||
return x
|
|
||||||
case margs of
|
|
||||||
Nothing ->
|
|
||||||
return (builder src [] final_type name body)
|
|
||||||
Just [] ->
|
|
||||||
fail "Need to figure out empty arg items."
|
|
||||||
Just args -> do
|
|
||||||
let anames = map fst args
|
|
||||||
atypes <- forM (map snd args) $ \ x ->
|
|
||||||
case x of
|
|
||||||
Nothing -> do
|
|
||||||
name <- gensym
|
|
||||||
return (TVar name Star)
|
|
||||||
Just x ->
|
|
||||||
return x
|
|
||||||
let ftype = buildFunType atypes final_type
|
|
||||||
return (builder src [] ftype name (Lambda src anames body))
|
|
||||||
|
|
||||||
buildFunType :: [Type] -> Type -> Type
|
|
||||||
buildFunType [] finaltype = finaltype
|
|
||||||
buildFunType (first:rest) finaltype =
|
|
||||||
TAp (TAp arrow first) (buildFunType rest finaltype)
|
|
||||||
where arrow = (TVar (makeQualified' "Data.Function") Star)
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
|
||||||
{-# LANGUAGE DeriveFunctor #-}
|
|
||||||
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
|
||||||
module Syntax.ParserCore where
|
|
||||||
|
|
||||||
import Control.Applicative(Applicative)
|
|
||||||
import qualified Data.ByteString as S
|
|
||||||
import MonadLib
|
|
||||||
import System.IO
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- Positions
|
|
||||||
--
|
|
||||||
|
|
||||||
data Position = Position {
|
|
||||||
posOff :: !Int
|
|
||||||
, posLine :: !Int
|
|
||||||
, posCol :: !Int
|
|
||||||
, posFile :: !FilePath
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
initPosition :: FilePath -> Position
|
|
||||||
initPosition = Position 0 1 1
|
|
||||||
|
|
||||||
movePos :: Position -> Char -> Position
|
|
||||||
movePos (Position o l c f) '\t' = Position (o+1) l (c+8) f
|
|
||||||
movePos (Position o l _ f) '\n' = Position (o+1) (l+1) 0 f
|
|
||||||
movePos (Position o l c f) _ = Position (o+1) l (c+1) f
|
|
||||||
|
|
||||||
pprtPosition :: Position -> String
|
|
||||||
pprtPosition p = posFile p ++ ":" ++ show (posLine p) ++ ":" ++ show (posCol p)
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- Tokens
|
|
||||||
--
|
|
||||||
|
|
||||||
data Token = LParen | RParen
|
|
||||||
| LSquare | RSquare
|
|
||||||
| LBrace | RBrace
|
|
||||||
| Bar | Semi | Comma | BTick | LLambda
|
|
||||||
| TokTypeIdent String
|
|
||||||
| TokValIdent String
|
|
||||||
| TokOpIdent String
|
|
||||||
| TokInt (Int,String)
|
|
||||||
| TokFloat String
|
|
||||||
| TokChar String
|
|
||||||
| TokString String
|
|
||||||
| TokEOF
|
|
||||||
deriving (Eq, Show)
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- Lexemes
|
|
||||||
--
|
|
||||||
|
|
||||||
data Lexeme = Lexeme {
|
|
||||||
lexPos :: !Position
|
|
||||||
, lexTok :: Token
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
instance Eq Lexeme where
|
|
||||||
a == b = lexTok a == lexTok b
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- Errors
|
|
||||||
--
|
|
||||||
|
|
||||||
data ErrorType =
|
|
||||||
LexerError
|
|
||||||
| ParserError
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
data Error = Error ErrorType String Position
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
printError :: Error -> IO ()
|
|
||||||
printError (Error etype str pos) = hPutStrLn stderr errstr
|
|
||||||
where
|
|
||||||
errstr = pprtPosition pos ++ ":" ++ etypeStr ++ ": " ++ str
|
|
||||||
etypeStr = case etype of
|
|
||||||
LexerError -> "LEX"
|
|
||||||
ParserError -> "PARSE"
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- ParserState
|
|
||||||
--
|
|
||||||
|
|
||||||
data ParserState = ParserState {
|
|
||||||
psInput :: !S.ByteString
|
|
||||||
, psChar :: !Char
|
|
||||||
, psPos :: !Position
|
|
||||||
, psLexCode :: !Int
|
|
||||||
, psGenNum :: !Int
|
|
||||||
}
|
|
||||||
deriving (Show)
|
|
||||||
|
|
||||||
initParserState :: FilePath -> S.ByteString -> ParserState
|
|
||||||
initParserState path bs = ParserState {
|
|
||||||
psInput = bs
|
|
||||||
, psChar = '\n'
|
|
||||||
, psPos = initPosition path
|
|
||||||
, psLexCode = 0
|
|
||||||
, psGenNum = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
-- --------------------------------------------------------------------------
|
|
||||||
-- Parser
|
|
||||||
--
|
|
||||||
|
|
||||||
newtype Parser a = Parser {
|
|
||||||
unParser :: StateT ParserState (ExceptionT Error Id) a
|
|
||||||
} deriving (Functor, Applicative, Monad)
|
|
||||||
|
|
||||||
instance StateM Parser ParserState where
|
|
||||||
get = Parser get
|
|
||||||
set = Parser . set
|
|
||||||
|
|
||||||
instance ExceptionM Parser Error where
|
|
||||||
raise = Parser . raise
|
|
||||||
|
|
||||||
instance RunExceptionM Parser Error where
|
|
||||||
try m = Parser (try (unParser m))
|
|
||||||
|
|
||||||
-- |Raise a lexer error
|
|
||||||
raiseL :: String -> Parser a
|
|
||||||
raiseL msg = do
|
|
||||||
st <- get
|
|
||||||
raise (Error LexerError msg (psPos st))
|
|
||||||
|
|
||||||
-- |Raise a parser error
|
|
||||||
raiseP :: String -> Parser a
|
|
||||||
raiseP msg = do
|
|
||||||
st <- get
|
|
||||||
raise (Error ParserError msg (psPos st))
|
|
||||||
|
|
||||||
-- |Run the parser over the given file
|
|
||||||
runParser :: FilePath -> S.ByteString -> Parser a -> Either Error a
|
|
||||||
runParser path bs (Parser m) =
|
|
||||||
case runM m (initParserState path bs) of
|
|
||||||
Right (a,_) -> Right a
|
|
||||||
Left err -> Left err
|
|
||||||
|
|
||||||
genstr :: Parser String
|
|
||||||
genstr = do
|
|
||||||
st <- get
|
|
||||||
set st{ psGenNum = psGenNum st + 1 }
|
|
||||||
return $ "--gen" ++ show (psGenNum st)
|
|
||||||
|
|
||||||
43
src/Bang/AST.hs
Normal file
43
src/Bang/AST.hs
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.AST
|
||||||
|
( Module
|
||||||
|
, ppModule
|
||||||
|
, mkModule
|
||||||
|
, moduleName, moduleDeclarations
|
||||||
|
, module Bang.AST.Declaration
|
||||||
|
, module Bang.AST.Expression
|
||||||
|
, module Bang.AST.Name
|
||||||
|
, module Bang.AST.Type
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST.Declaration
|
||||||
|
import Bang.AST.Expression
|
||||||
|
import Bang.AST.Name
|
||||||
|
import Bang.AST.Type
|
||||||
|
import Control.Lens(view)
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, empty, text, (<+>), ($+$))
|
||||||
|
|
||||||
|
data Module = Module {
|
||||||
|
_moduleName :: Name
|
||||||
|
, _moduleDeclarations :: [[Declaration]]
|
||||||
|
}
|
||||||
|
|
||||||
|
mkModule :: Name -> [[Declaration]] -> Module
|
||||||
|
mkModule = Module
|
||||||
|
|
||||||
|
makeLenses ''Module
|
||||||
|
|
||||||
|
ppModule :: Module -> Doc a
|
||||||
|
ppModule m = text "module" <+> ppName (view moduleName m) $+$
|
||||||
|
dump (view moduleName m) (view moduleDeclarations m)
|
||||||
|
where
|
||||||
|
dump _ [] = empty
|
||||||
|
dump prev ([]:rest) = dump prev rest
|
||||||
|
dump prev ((x:rest):lr)
|
||||||
|
| prev == view declName x =
|
||||||
|
ppDeclaration x $+$ dump prev (rest:lr)
|
||||||
|
| otherwise =
|
||||||
|
text "" $+$ dump (view declName x) ((x:rest):lr)
|
||||||
|
|
||||||
114
src/Bang/AST/Declaration.hs
Normal file
114
src/Bang/AST/Declaration.hs
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.AST.Declaration
|
||||||
|
( Declaration(..)
|
||||||
|
, ppDeclaration
|
||||||
|
, declName
|
||||||
|
-- * Type Declarations
|
||||||
|
, TypeDeclaration
|
||||||
|
, ppTypeDeclaration
|
||||||
|
, mkTypeDecl
|
||||||
|
, tdName, tdLocation, tdType
|
||||||
|
-- * Value Declarations
|
||||||
|
, ValueDeclaration
|
||||||
|
, ppValueDeclaration
|
||||||
|
, mkValueDecl
|
||||||
|
, vdName, vdLocation
|
||||||
|
, vdDeclaredType, vdValue
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST.Expression(Expression, ppExpression)
|
||||||
|
import Bang.AST.Name(Name, ppName)
|
||||||
|
import Bang.AST.Type(Type(TypePrim), ppType)
|
||||||
|
import Bang.Syntax.Location(Location)
|
||||||
|
import Bang.Utils.FreeVars(CanHaveFreeVars(..))
|
||||||
|
import Control.Lens(Lens', view, set, lens)
|
||||||
|
import Control.Lens(makeLenses)
|
||||||
|
import Data.Set(delete, union)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, text, (<+>), ($+$), (<>), empty, space)
|
||||||
|
|
||||||
|
data TypeDeclaration = TypeDeclaration
|
||||||
|
{ _tdName :: Name
|
||||||
|
, _tdLocation :: Location
|
||||||
|
, _tdType :: Type
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkTypeDecl a where
|
||||||
|
mkTypeDecl :: Name -> Location -> Type -> a
|
||||||
|
|
||||||
|
ppTypeDeclaration :: TypeDeclaration -> Doc a
|
||||||
|
ppTypeDeclaration td = prefix <> text "type" <+> ppName (_tdName td) <+>
|
||||||
|
text "=" <+> ppType (_tdType td)
|
||||||
|
where
|
||||||
|
prefix | TypePrim _ <- _tdType td = text "primitive" <> space
|
||||||
|
| otherwise = empty
|
||||||
|
|
||||||
|
instance MkTypeDecl TypeDeclaration where
|
||||||
|
mkTypeDecl = TypeDeclaration
|
||||||
|
|
||||||
|
instance MkTypeDecl Declaration where
|
||||||
|
mkTypeDecl n l t = DeclType (TypeDeclaration n l t)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars TypeDeclaration where
|
||||||
|
freeVariables td = delete (_tdName td) (freeVariables (_tdType td))
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data ValueDeclaration = ValueDeclaration
|
||||||
|
{ _vdName :: Name
|
||||||
|
, _vdLocation :: Location
|
||||||
|
, _vdDeclaredType :: Maybe Type
|
||||||
|
, _vdValue :: Expression
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkValueDecl a where
|
||||||
|
mkValueDecl :: Name -> Location -> Maybe Type -> Expression -> a
|
||||||
|
|
||||||
|
ppValueDeclaration :: ValueDeclaration -> Doc a
|
||||||
|
ppValueDeclaration vd = typedecl $+$ valuedecl
|
||||||
|
where
|
||||||
|
typedecl
|
||||||
|
| Just t <- _vdDeclaredType vd =
|
||||||
|
ppName (_vdName vd) <+> text "::" <+> ppType t
|
||||||
|
| otherwise = empty
|
||||||
|
valuedecl = ppName (_vdName vd) <+> text "=" <+> ppExpression (_vdValue vd)
|
||||||
|
|
||||||
|
instance MkValueDecl ValueDeclaration where
|
||||||
|
mkValueDecl n l mt e = ValueDeclaration n l mt e
|
||||||
|
|
||||||
|
instance MkValueDecl Declaration where
|
||||||
|
mkValueDecl n l mt e = DeclVal (ValueDeclaration n l mt e)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars ValueDeclaration where
|
||||||
|
freeVariables vd = delete (_vdName vd) (union valTypes typeTypes)
|
||||||
|
where
|
||||||
|
valTypes = freeVariables (_vdValue vd)
|
||||||
|
typeTypes = freeVariables (_vdDeclaredType vd)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data Declaration = DeclType TypeDeclaration
|
||||||
|
| DeclVal ValueDeclaration
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
ppDeclaration :: Declaration -> Doc a
|
||||||
|
ppDeclaration (DeclType d) = ppTypeDeclaration d
|
||||||
|
ppDeclaration (DeclVal d) = ppValueDeclaration d
|
||||||
|
|
||||||
|
instance CanHaveFreeVars Declaration where
|
||||||
|
freeVariables (DeclType td) = freeVariables td
|
||||||
|
freeVariables (DeclVal vd) = freeVariables vd
|
||||||
|
|
||||||
|
makeLenses ''TypeDeclaration
|
||||||
|
makeLenses ''ValueDeclaration
|
||||||
|
|
||||||
|
declName :: Lens' Declaration Name
|
||||||
|
declName = lens getter setter
|
||||||
|
where
|
||||||
|
getter (DeclType d) = view tdName d
|
||||||
|
getter (DeclVal d) = view vdName d
|
||||||
|
setter (DeclType d) x = DeclType (set tdName x d)
|
||||||
|
setter (DeclVal d) x = DeclVal (set vdName x d)
|
||||||
|
|
||||||
163
src/Bang/AST/Expression.hs
Normal file
163
src/Bang/AST/Expression.hs
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.AST.Expression
|
||||||
|
( Expression(..)
|
||||||
|
, ppExpression
|
||||||
|
-- * Constant Expressions
|
||||||
|
, ConstantExpression
|
||||||
|
, ppConstantExpression
|
||||||
|
, mkConstExp
|
||||||
|
, constLocation
|
||||||
|
, constValue
|
||||||
|
, ConstantValue(..)
|
||||||
|
, ppConstantValue
|
||||||
|
-- * References
|
||||||
|
, ReferenceExpression
|
||||||
|
, ppReferenceExpression
|
||||||
|
, mkRefExp
|
||||||
|
, refLocation
|
||||||
|
, refName
|
||||||
|
-- * Lambdas
|
||||||
|
, LambdaExpression
|
||||||
|
, ppLambdaExpression
|
||||||
|
, mkLambdaExp
|
||||||
|
, lambdaLocation
|
||||||
|
, lambdaArgumentNames
|
||||||
|
, lambdaBody
|
||||||
|
-- * Empty Expressions
|
||||||
|
, emptyExpression
|
||||||
|
, isEmptyExpression
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Syntax.Location(Location, fakeLocation)
|
||||||
|
import Bang.AST.Name(Name, ppName, nothingName)
|
||||||
|
import Bang.Utils.FreeVars(CanHaveFreeVars(..))
|
||||||
|
import Bang.Utils.Pretty(text')
|
||||||
|
import Control.Lens(view)
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Data.Set(empty, singleton, fromList, (\\))
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, text, hsep, (<>), (<+>))
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data ConstantValue = ConstantInt Word Text
|
||||||
|
| ConstantChar Text
|
||||||
|
| ConstantString Text
|
||||||
|
| ConstantFloat Text
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
ppConstantValue :: ConstantValue -> Doc a
|
||||||
|
ppConstantValue cv =
|
||||||
|
case cv of
|
||||||
|
ConstantInt 2 t -> text "0b" <> text' t
|
||||||
|
ConstantInt 8 t -> text "0o" <> text' t
|
||||||
|
ConstantInt 10 t -> text' t
|
||||||
|
ConstantInt 16 t -> text "0x" <> text' t
|
||||||
|
ConstantInt _ _ -> error "Internal error: bad base for constant"
|
||||||
|
ConstantChar c -> text' c
|
||||||
|
ConstantString s -> text' s
|
||||||
|
ConstantFloat f -> text' f
|
||||||
|
|
||||||
|
data ConstantExpression = ConstantExpression
|
||||||
|
{ _constLocation :: Location
|
||||||
|
, _constValue :: ConstantValue
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkConstExp a where
|
||||||
|
mkConstExp :: Location -> ConstantValue -> a
|
||||||
|
|
||||||
|
instance MkConstExp ConstantExpression where
|
||||||
|
mkConstExp = ConstantExpression
|
||||||
|
|
||||||
|
instance MkConstExp Expression where
|
||||||
|
mkConstExp l v = ConstExp (mkConstExp l v)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars ConstantExpression where
|
||||||
|
freeVariables _ = empty
|
||||||
|
|
||||||
|
ppConstantExpression :: ConstantExpression -> Doc a
|
||||||
|
ppConstantExpression = ppConstantValue . _constValue
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data ReferenceExpression = ReferenceExpression
|
||||||
|
{ _refLocation :: Location
|
||||||
|
, _refName :: Name
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
ppReferenceExpression :: ReferenceExpression -> Doc a
|
||||||
|
ppReferenceExpression = ppName . _refName
|
||||||
|
|
||||||
|
class MkRefExp a where
|
||||||
|
mkRefExp :: Location -> Name -> a
|
||||||
|
|
||||||
|
instance MkRefExp ReferenceExpression where
|
||||||
|
mkRefExp = ReferenceExpression
|
||||||
|
|
||||||
|
instance MkRefExp Expression where
|
||||||
|
mkRefExp l n = RefExp (ReferenceExpression l n)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars ReferenceExpression where
|
||||||
|
freeVariables r = singleton (_refName r)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data LambdaExpression = LambdaExpression
|
||||||
|
{ _lambdaLocation :: Location
|
||||||
|
, _lambdaArgumentNames :: [Name]
|
||||||
|
, _lambdaBody :: Expression
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkLambdaExp a where
|
||||||
|
mkLambdaExp :: Location -> [Name] -> Expression -> a
|
||||||
|
|
||||||
|
ppLambdaExpression :: LambdaExpression -> Doc a
|
||||||
|
ppLambdaExpression le =
|
||||||
|
text "λ" <+> hsep (map ppName (_lambdaArgumentNames le)) <+> text "->" <+>
|
||||||
|
ppExpression (_lambdaBody le)
|
||||||
|
|
||||||
|
instance MkLambdaExp LambdaExpression where
|
||||||
|
mkLambdaExp = LambdaExpression
|
||||||
|
|
||||||
|
instance MkLambdaExp Expression where
|
||||||
|
mkLambdaExp l a b = LambdaExp (LambdaExpression l a b)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars LambdaExpression where
|
||||||
|
freeVariables l = freeVariables (_lambdaBody l) \\
|
||||||
|
fromList (_lambdaArgumentNames l)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data Expression = ConstExp ConstantExpression
|
||||||
|
| RefExp ReferenceExpression
|
||||||
|
| LambdaExp LambdaExpression
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars Expression where
|
||||||
|
freeVariables (ConstExp e) = freeVariables e
|
||||||
|
freeVariables (RefExp e) = freeVariables e
|
||||||
|
freeVariables (LambdaExp e) = freeVariables e
|
||||||
|
|
||||||
|
ppExpression :: Expression -> Doc a
|
||||||
|
ppExpression (ConstExp e) = ppConstantExpression e
|
||||||
|
ppExpression (RefExp e) = ppReferenceExpression e
|
||||||
|
ppExpression (LambdaExp e) = ppLambdaExpression e
|
||||||
|
|
||||||
|
makeLenses ''ConstantExpression
|
||||||
|
makeLenses ''ReferenceExpression
|
||||||
|
makeLenses ''LambdaExpression
|
||||||
|
|
||||||
|
emptyExpression :: Expression
|
||||||
|
emptyExpression = mkRefExp fakeLocation nothingName
|
||||||
|
|
||||||
|
isEmptyExpression :: Expression -> Bool
|
||||||
|
isEmptyExpression (RefExp e) = view refLocation e == fakeLocation &&
|
||||||
|
view refName e == nothingName
|
||||||
|
isEmptyExpression _ = False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
59
src/Bang/AST/Name.hs
Normal file
59
src/Bang/AST/Name.hs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.AST.Name(
|
||||||
|
NameEnvironment(..)
|
||||||
|
, Name
|
||||||
|
, nothingName
|
||||||
|
, mkName
|
||||||
|
, ppName
|
||||||
|
, nameText
|
||||||
|
, nameEnvironment
|
||||||
|
, nameLocation
|
||||||
|
, nameIndex
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Control.Lens(view)
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Data.Text.Lazy(Text, unpack)
|
||||||
|
import Data.Word(Word)
|
||||||
|
import Bang.Syntax.Location(Location, fakeLocation)
|
||||||
|
import Bang.Utils.Pretty(text', word)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, colon, (<>))
|
||||||
|
|
||||||
|
data NameEnvironment = ModuleEnv | TypeEnv | VarEnv
|
||||||
|
deriving (Eq, Ord, Show)
|
||||||
|
|
||||||
|
data Name = Name
|
||||||
|
{ _nameText :: Text
|
||||||
|
, _nameEnvironment :: NameEnvironment
|
||||||
|
, _nameLocation :: Location
|
||||||
|
, _nameIndex :: Word
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLenses ''Name
|
||||||
|
|
||||||
|
nothingName :: Name
|
||||||
|
nothingName = Name ":<nothing>:" VarEnv fakeLocation 0
|
||||||
|
|
||||||
|
mkName :: Text -> NameEnvironment -> Location -> Word -> Name
|
||||||
|
mkName = Name
|
||||||
|
|
||||||
|
ppName :: Name -> Doc a
|
||||||
|
ppName n = text' (view nameText n) <> colon <> word (view nameIndex n)
|
||||||
|
|
||||||
|
instance Eq Name where
|
||||||
|
a == b = view nameIndex a == view nameIndex b
|
||||||
|
a /= b = view nameIndex a /= view nameIndex b
|
||||||
|
|
||||||
|
instance Ord Name where
|
||||||
|
compare a b = compare (view nameIndex a) (view nameIndex b)
|
||||||
|
max a b = if a < b then b else a
|
||||||
|
min a b = if a < b then a else b
|
||||||
|
(<) a b = (<) (view nameIndex a) (view nameIndex b)
|
||||||
|
(>) a b = (>) (view nameIndex a) (view nameIndex b)
|
||||||
|
(<=) a b = (<=) (view nameIndex a) (view nameIndex b)
|
||||||
|
(>=) a b = (>=) (view nameIndex a) (view nameIndex b)
|
||||||
|
|
||||||
|
instance Show Name where
|
||||||
|
show n = unpack (view nameText n) ++ ":" ++ show (view nameIndex n)
|
||||||
217
src/Bang/AST/Type.hs
Normal file
217
src/Bang/AST/Type.hs
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.AST.Type
|
||||||
|
( Type(..)
|
||||||
|
, ppType
|
||||||
|
, Kind(..)
|
||||||
|
, ppKind
|
||||||
|
, Kinded(..)
|
||||||
|
-- * the unit time
|
||||||
|
, UnitType
|
||||||
|
, ppUnitType
|
||||||
|
-- * primitive types
|
||||||
|
, PrimitiveType
|
||||||
|
, ppPrimitiveType
|
||||||
|
, mkPrimType
|
||||||
|
, ptLocation, ptName
|
||||||
|
-- * reference types
|
||||||
|
, ReferenceType
|
||||||
|
, ppReferenceType
|
||||||
|
, mkTypeRef
|
||||||
|
, rtLocation, rtKind, rtName
|
||||||
|
-- * lambda types
|
||||||
|
, FunctionType
|
||||||
|
, ppFunctionType
|
||||||
|
, mkFunType
|
||||||
|
, ftLocation, ftKind, ftArgumentType, ftResultType
|
||||||
|
-- * type application
|
||||||
|
, TypeApplication
|
||||||
|
, ppTypeApplication
|
||||||
|
, mkTypeApp
|
||||||
|
, taLocation, taKind, taLeftType, taRightType
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST.Name(Name, ppName)
|
||||||
|
import Bang.Syntax.Location(Location)
|
||||||
|
import Bang.Utils.FreeVars(CanHaveFreeVars(..))
|
||||||
|
import Bang.Utils.Pretty(text')
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Data.Set(union, empty, singleton)
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, (<+>), (<>), text)
|
||||||
|
|
||||||
|
data Kind = Star
|
||||||
|
| Unknown
|
||||||
|
| KindArrow Kind Kind
|
||||||
|
deriving (Show, Eq)
|
||||||
|
|
||||||
|
ppKind :: Kind -> Doc a
|
||||||
|
ppKind Star = text "*"
|
||||||
|
ppKind Unknown = text "?"
|
||||||
|
ppKind (KindArrow a b) = ppKind a <+> text "->" <+> ppKind b
|
||||||
|
|
||||||
|
class Kinded a where
|
||||||
|
kind :: a -> Kind
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data UnitType = UnitType
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance Kinded UnitType where
|
||||||
|
kind _ = Star
|
||||||
|
|
||||||
|
instance CanHaveFreeVars UnitType where
|
||||||
|
freeVariables _ = empty
|
||||||
|
|
||||||
|
ppUnitType :: UnitType -> Doc a
|
||||||
|
ppUnitType _ = text "()"
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data PrimitiveType = PrimitiveType
|
||||||
|
{ _ptLocation :: Location
|
||||||
|
, _ptName :: Text
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkPrimType a where
|
||||||
|
mkPrimType :: Location -> Text -> a
|
||||||
|
|
||||||
|
instance Kinded PrimitiveType where
|
||||||
|
kind _ = Star
|
||||||
|
|
||||||
|
instance MkPrimType PrimitiveType where
|
||||||
|
mkPrimType = PrimitiveType
|
||||||
|
|
||||||
|
instance MkPrimType Type where
|
||||||
|
mkPrimType l t = TypePrim (PrimitiveType l t)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars PrimitiveType where
|
||||||
|
freeVariables _ = empty
|
||||||
|
|
||||||
|
ppPrimitiveType :: PrimitiveType -> Doc a
|
||||||
|
ppPrimitiveType pt = text "llvm:" <> text' (_ptName pt)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data ReferenceType = ReferenceType
|
||||||
|
{ _rtLocation :: Location
|
||||||
|
, _rtKind :: Kind
|
||||||
|
, _rtName :: Name
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance Kinded ReferenceType where
|
||||||
|
kind = _rtKind
|
||||||
|
|
||||||
|
ppReferenceType :: ReferenceType -> Doc a
|
||||||
|
ppReferenceType = ppName . _rtName
|
||||||
|
|
||||||
|
class MkTypeRef a where
|
||||||
|
mkTypeRef :: Location -> Kind -> Name -> a
|
||||||
|
|
||||||
|
instance MkTypeRef ReferenceType where
|
||||||
|
mkTypeRef = ReferenceType
|
||||||
|
|
||||||
|
instance MkTypeRef Type where
|
||||||
|
mkTypeRef l k n = TypeRef (ReferenceType l k n)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars ReferenceType where
|
||||||
|
freeVariables r = singleton (_rtName r)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data FunctionType = FunctionType
|
||||||
|
{ _ftLocation :: Location
|
||||||
|
, _ftKind :: Kind
|
||||||
|
, _ftArgumentType :: Type
|
||||||
|
, _ftResultType :: Type
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkFunType a where
|
||||||
|
mkFunType :: Location -> Type -> Type -> a
|
||||||
|
|
||||||
|
instance MkFunType FunctionType where
|
||||||
|
mkFunType l a r = FunctionType l Star a r
|
||||||
|
|
||||||
|
instance MkFunType Type where
|
||||||
|
mkFunType l a r = TypeFun (FunctionType l Star a r)
|
||||||
|
|
||||||
|
ppFunctionType :: FunctionType -> Doc a
|
||||||
|
ppFunctionType ft =
|
||||||
|
ppType (_ftArgumentType ft) <+> text "->" <+> ppType (_ftResultType ft)
|
||||||
|
|
||||||
|
instance Kinded FunctionType where
|
||||||
|
kind = _ftKind
|
||||||
|
|
||||||
|
instance CanHaveFreeVars FunctionType where
|
||||||
|
freeVariables ft = freeVariables (_ftArgumentType ft) `union`
|
||||||
|
freeVariables (_ftResultType ft)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data TypeApplication = TypeApplication
|
||||||
|
{ _taLocation :: Location
|
||||||
|
, _taKind :: Kind
|
||||||
|
, _taLeftType :: Type
|
||||||
|
, _taRightType :: Type
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
class MkTypeApp a where
|
||||||
|
mkTypeApp :: Location -> Type -> Type -> a
|
||||||
|
|
||||||
|
instance MkTypeApp TypeApplication where
|
||||||
|
mkTypeApp l s t = TypeApplication l Unknown s t
|
||||||
|
|
||||||
|
instance MkTypeApp Type where
|
||||||
|
mkTypeApp l s t = TypeApp (TypeApplication l Unknown s t)
|
||||||
|
|
||||||
|
instance Kinded TypeApplication where
|
||||||
|
kind = _taKind
|
||||||
|
|
||||||
|
ppTypeApplication :: TypeApplication -> Doc a
|
||||||
|
ppTypeApplication ta =
|
||||||
|
ppType (_taLeftType ta) <+> ppType (_taRightType ta)
|
||||||
|
|
||||||
|
instance CanHaveFreeVars TypeApplication where
|
||||||
|
freeVariables ta = freeVariables (_taLeftType ta) `union`
|
||||||
|
freeVariables (_taRightType ta)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data Type = TypeUnit UnitType
|
||||||
|
| TypePrim PrimitiveType
|
||||||
|
| TypeRef ReferenceType
|
||||||
|
| TypeFun FunctionType
|
||||||
|
| TypeApp TypeApplication
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
ppType :: Type -> Doc a
|
||||||
|
ppType (TypeUnit t) = ppUnitType t
|
||||||
|
ppType (TypePrim t) = ppPrimitiveType t
|
||||||
|
ppType (TypeRef t) = ppReferenceType t
|
||||||
|
ppType (TypeFun t) = ppFunctionType t
|
||||||
|
ppType (TypeApp t) = ppTypeApplication t
|
||||||
|
|
||||||
|
instance Kinded Type where
|
||||||
|
kind (TypeUnit x) = kind x
|
||||||
|
kind (TypePrim x) = kind x
|
||||||
|
kind (TypeRef x) = kind x
|
||||||
|
kind (TypeFun x) = kind x
|
||||||
|
kind (TypeApp x) = kind x
|
||||||
|
|
||||||
|
instance CanHaveFreeVars Type where
|
||||||
|
freeVariables (TypeUnit t) = freeVariables t
|
||||||
|
freeVariables (TypePrim t) = freeVariables t
|
||||||
|
freeVariables (TypeRef t) = freeVariables t
|
||||||
|
freeVariables (TypeFun t) = freeVariables t
|
||||||
|
freeVariables (TypeApp t) = freeVariables t
|
||||||
|
|
||||||
|
makeLenses ''PrimitiveType
|
||||||
|
makeLenses ''ReferenceType
|
||||||
|
makeLenses ''FunctionType
|
||||||
|
makeLenses ''TypeApplication
|
||||||
|
|
||||||
122
src/Bang/CommandLine.hs
Normal file
122
src/Bang/CommandLine.hs
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.CommandLine(
|
||||||
|
Verbosity(..)
|
||||||
|
, CommandsWithInputFile(..)
|
||||||
|
, CommandsWithOutputFile(..)
|
||||||
|
, CommandsWithVerbosity(..)
|
||||||
|
, BangCommand(..)
|
||||||
|
, ParserOptions(..)
|
||||||
|
, getCommand
|
||||||
|
, helpString
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Control.Applicative((<|>))
|
||||||
|
import Control.Lens(Lens')
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Data.Monoid((<>))
|
||||||
|
import Options.Applicative(Parser, ParserInfo, ParserPrefs(..), flag,
|
||||||
|
short, long, strOption, command, subparser, info,
|
||||||
|
progDesc, execParser, helper, metavar, str, argument,
|
||||||
|
showDefault, value, help)
|
||||||
|
import Options.Applicative.Help(parserHelp)
|
||||||
|
|
||||||
|
class CommandsWithInputFile opts where
|
||||||
|
inputFile :: Lens' opts FilePath
|
||||||
|
|
||||||
|
class CommandsWithOutputFile opts where
|
||||||
|
outputFile :: Lens' opts FilePath
|
||||||
|
|
||||||
|
class CommandsWithVerbosity opts where
|
||||||
|
verbosity :: Lens' opts Verbosity
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data Verbosity = Silent | Normal | Verbose
|
||||||
|
deriving (Eq, Show)
|
||||||
|
|
||||||
|
verboseOption :: Parser Verbosity
|
||||||
|
verboseOption = flag Normal Silent (short 'q' <> long "quiet")
|
||||||
|
<|> flag Normal Verbose (short 'v' <> long "verbose")
|
||||||
|
|
||||||
|
optOutputFile :: Parser FilePath
|
||||||
|
optOutputFile = strOption (short 'o' <> long "output-file" <> metavar "FILE"
|
||||||
|
<> help "The file to output as a result of this action."
|
||||||
|
<> value "/dev/stdout" <> showDefault)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data ParserOptions = ParserOptions {
|
||||||
|
_parseInputFile :: FilePath
|
||||||
|
, _parseOutputFile :: FilePath
|
||||||
|
, _parseVerbosity :: Verbosity
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
makeLenses ''ParserOptions
|
||||||
|
|
||||||
|
parseParseOptions :: Parser ParserOptions
|
||||||
|
parseParseOptions = ParserOptions <$> argument str (metavar "FILE")
|
||||||
|
<*> optOutputFile
|
||||||
|
<*> verboseOption
|
||||||
|
|
||||||
|
instance CommandsWithInputFile ParserOptions where
|
||||||
|
inputFile = parseInputFile
|
||||||
|
|
||||||
|
instance CommandsWithOutputFile ParserOptions where
|
||||||
|
outputFile = parseOutputFile
|
||||||
|
|
||||||
|
instance CommandsWithVerbosity ParserOptions where
|
||||||
|
verbosity = parseVerbosity
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data TypeCheckOptions = TypeCheckOptions {
|
||||||
|
_tcheckInputFile :: FilePath
|
||||||
|
, _tcheckOutputFile :: FilePath
|
||||||
|
, _tcheckVerbosity :: Verbosity
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
makeLenses ''TypeCheckOptions
|
||||||
|
|
||||||
|
parseTypeCheckOptions :: Parser TypeCheckOptions
|
||||||
|
parseTypeCheckOptions = TypeCheckOptions <$> argument str (metavar "FILE")
|
||||||
|
<*> optOutputFile
|
||||||
|
<*> verboseOption
|
||||||
|
|
||||||
|
instance CommandsWithInputFile TypeCheckOptions where
|
||||||
|
inputFile = tcheckInputFile
|
||||||
|
|
||||||
|
instance CommandsWithOutputFile TypeCheckOptions where
|
||||||
|
outputFile = tcheckOutputFile
|
||||||
|
|
||||||
|
instance CommandsWithVerbosity TypeCheckOptions where
|
||||||
|
verbosity = tcheckVerbosity
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data BangCommand = Help
|
||||||
|
| Parse ParserOptions
|
||||||
|
| TypeCheck TypeCheckOptions
|
||||||
|
| Version
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
bangOperation :: Parser BangCommand
|
||||||
|
bangOperation = subparser $
|
||||||
|
command "help" (pure Help `withInfo` "Describe common commands.") <>
|
||||||
|
command "version" (pure Version `withInfo` "Display version information.") <>
|
||||||
|
command "parse" (parseParse `withInfo` "Parse a file into its AST.") <>
|
||||||
|
command "typeCheck" (parseTCheck `withInfo` "Type check a file.")
|
||||||
|
where
|
||||||
|
parseParse = Parse <$> parseParseOptions
|
||||||
|
parseTCheck = TypeCheck <$> parseTypeCheckOptions
|
||||||
|
|
||||||
|
withInfo :: Parser a -> String -> ParserInfo a
|
||||||
|
withInfo opts desc = info (helper <*> opts) (progDesc desc)
|
||||||
|
|
||||||
|
helpString :: String
|
||||||
|
helpString = show (parserHelp (ParserPrefs "" False False True 80) bangOperation)
|
||||||
|
|
||||||
|
getCommand :: IO BangCommand
|
||||||
|
getCommand = execParser (bangOperation `withInfo` "Run a bang language action.")
|
||||||
11
src/Bang/Error.hs
Normal file
11
src/Bang/Error.hs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
module Bang.Error(
|
||||||
|
exit
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import System.Exit(ExitCode(..), exitWith)
|
||||||
|
|
||||||
|
exit :: String -> IO b
|
||||||
|
exit x =
|
||||||
|
do putStrLn ("ERROR: " ++ x)
|
||||||
|
exitWith (ExitFailure 1)
|
||||||
176
src/Bang/Monad.hs
Normal file
176
src/Bang/Monad.hs
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
{-# LANGUAGE FlexibleInstances #-}
|
||||||
|
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||||
|
{-# LANGUAGE Rank2Types #-}
|
||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
{-# LANGUAGE UndecidableInstances #-}
|
||||||
|
module Bang.Monad(
|
||||||
|
Compiler
|
||||||
|
, BangError(..)
|
||||||
|
, BangWarning(..)
|
||||||
|
, runCompiler
|
||||||
|
, runPass
|
||||||
|
, getPassState, setPassState, mapPassState, overPassState, viewPassState
|
||||||
|
, registerName, registerNewName, genName, genTypeRef, genVarRef
|
||||||
|
, warn, err, err'
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST.Expression(Expression, mkRefExp)
|
||||||
|
import Bang.AST.Name(NameEnvironment(..), Name, mkName, nameIndex)
|
||||||
|
import Bang.AST.Type(Kind(..), Type, mkTypeRef)
|
||||||
|
import Bang.CommandLine(BangCommand, CommandsWithInputFile(..))
|
||||||
|
import Bang.Error(exit)
|
||||||
|
import Bang.Syntax.Location(Location(..), Origin(..),
|
||||||
|
unknownLocation, ppLocation)
|
||||||
|
import Bang.Utils.Pretty(BangDoc)
|
||||||
|
import Control.Exception(tryJust)
|
||||||
|
import Control.Lens(Lens', over, set, view)
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Control.Monad(guard, when)
|
||||||
|
import Data.Text.Lazy(Text, pack)
|
||||||
|
import qualified Data.Text.Lazy.IO as T
|
||||||
|
import System.Exit(ExitCode(..), exitWith)
|
||||||
|
import System.IO.Error(isDoesNotExistError)
|
||||||
|
import Text.PrettyPrint.Annotated(text, ($+$), nest, render)
|
||||||
|
|
||||||
|
class BangError e where
|
||||||
|
ppError :: e -> (Maybe Location, BangDoc)
|
||||||
|
|
||||||
|
class BangWarning w where
|
||||||
|
ppWarning :: w -> (Maybe Location, BangDoc)
|
||||||
|
|
||||||
|
data CompilerState state = CompilerState {
|
||||||
|
_csNextIdent :: !Word
|
||||||
|
, _csPromoteWarnings :: !Bool
|
||||||
|
, _csWarnings :: [BangDoc]
|
||||||
|
, _csPassState :: !state
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLenses ''CompilerState
|
||||||
|
|
||||||
|
initialState :: BangCommand -> CompilerState ()
|
||||||
|
initialState _ = CompilerState 1 False [] ()
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
newtype Compiler s a =
|
||||||
|
Compiler { unCompiler :: CompilerState s -> IO (CompilerState s, a) }
|
||||||
|
|
||||||
|
instance Applicative (Compiler s) where
|
||||||
|
pure a = Compiler (\ st -> return (st, a))
|
||||||
|
mf <*> ma = Compiler (\ st ->
|
||||||
|
do (st', f) <- unCompiler mf st
|
||||||
|
(st'', a) <- unCompiler ma st'
|
||||||
|
return (st'', f a))
|
||||||
|
|
||||||
|
instance Functor (Compiler s) where
|
||||||
|
fmap f m = return f <*> m
|
||||||
|
|
||||||
|
instance Monad (Compiler s) where
|
||||||
|
return a = Compiler (\ st -> return (st, a))
|
||||||
|
m >>= k = Compiler (\ st ->
|
||||||
|
do (st', a) <- unCompiler m st
|
||||||
|
unCompiler (k a) st')
|
||||||
|
|
||||||
|
runCompiler :: CommandsWithInputFile o =>
|
||||||
|
BangCommand -> o ->
|
||||||
|
(Origin -> Text -> Compiler () a) ->
|
||||||
|
IO a
|
||||||
|
runCompiler cmd opts action =
|
||||||
|
do let path = view inputFile opts
|
||||||
|
orig = File path
|
||||||
|
mtxt <- tryJust (guard . isDoesNotExistError) (T.readFile path)
|
||||||
|
case mtxt of
|
||||||
|
Left _ -> exit ("Unable to open file '" ++ path ++ "'")
|
||||||
|
Right txt -> snd `fmap` unCompiler (action orig txt) (initialState cmd)
|
||||||
|
|
||||||
|
runPass :: s2 -> (Compiler s2 a) -> Compiler s1 (s2, a)
|
||||||
|
runPass s2 action =
|
||||||
|
Compiler (\ cst1 ->
|
||||||
|
do let cst2 = set csPassState s2 cst1
|
||||||
|
s1 = view csPassState cst1
|
||||||
|
(cst2', v) <- unCompiler action cst2
|
||||||
|
let retval = (view csPassState cst2', v)
|
||||||
|
return (set csPassState s1 cst2', retval))
|
||||||
|
|
||||||
|
getPassState :: Compiler s s
|
||||||
|
getPassState = Compiler (\ st -> return (st, view csPassState st))
|
||||||
|
|
||||||
|
setPassState :: Lens' s b -> b -> Compiler s ()
|
||||||
|
setPassState passLens v =
|
||||||
|
Compiler (\ st -> return (set (csPassState . passLens) v st, ()))
|
||||||
|
|
||||||
|
mapPassState :: (s -> s) -> Compiler s ()
|
||||||
|
mapPassState f = Compiler (\ st -> return (over csPassState f st, ()))
|
||||||
|
|
||||||
|
overPassState :: Lens' s b -> (b -> b) -> Compiler s ()
|
||||||
|
overPassState passLens f =
|
||||||
|
Compiler (\ st -> return (over (csPassState . passLens) f st, ()))
|
||||||
|
|
||||||
|
viewPassState :: Lens' s b -> Compiler s b
|
||||||
|
viewPassState l = Compiler (\ st -> return (st, view (csPassState . l) st))
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
registerName :: Name -> Compiler s Name
|
||||||
|
registerName name =
|
||||||
|
Compiler (\ st ->
|
||||||
|
do let current = view csNextIdent st
|
||||||
|
return (over csNextIdent (+1) st, set nameIndex current name))
|
||||||
|
|
||||||
|
registerNewName :: NameEnvironment -> Text -> Compiler s Name
|
||||||
|
registerNewName env name =
|
||||||
|
Compiler (\ st ->
|
||||||
|
do let current = view csNextIdent st
|
||||||
|
res = mkName name env unknownLocation current
|
||||||
|
return (over csNextIdent (+1) st, res))
|
||||||
|
|
||||||
|
genName :: NameEnvironment -> Compiler s Name
|
||||||
|
genName env =
|
||||||
|
Compiler (\ st ->
|
||||||
|
do let current = view csNextIdent st
|
||||||
|
str = "gen:" ++ show current
|
||||||
|
res = mkName (pack str) env unknownLocation current
|
||||||
|
return (over csNextIdent (+1) st, res))
|
||||||
|
|
||||||
|
genTypeRef :: Kind -> Compiler s Type
|
||||||
|
genTypeRef k = mkTypeRef unknownLocation k `fmap` genName TypeEnv
|
||||||
|
|
||||||
|
genVarRef :: Compiler s Expression
|
||||||
|
genVarRef = mkRefExp unknownLocation `fmap` genName VarEnv
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data WErrorWarning w = WErrorWarning w
|
||||||
|
|
||||||
|
instance BangWarning w => BangError (WErrorWarning w) where
|
||||||
|
ppError (WErrorWarning w) =
|
||||||
|
let (loc, wdoc) = ppWarning w
|
||||||
|
edoc = text "Warning lifted to error by -WError:" $+$ nest 3 wdoc
|
||||||
|
in (loc, edoc)
|
||||||
|
|
||||||
|
warn :: BangWarning w => w -> Compiler s ()
|
||||||
|
warn w = Compiler (\ st ->
|
||||||
|
if view csPromoteWarnings st
|
||||||
|
then runError (WErrorWarning w) False >> return (st, ())
|
||||||
|
else runWarning w >> return (st, ()))
|
||||||
|
|
||||||
|
err :: BangError w => w -> Compiler s a
|
||||||
|
err w = Compiler (\ _ -> runError w True >> undefined)
|
||||||
|
|
||||||
|
err' :: BangError e => e -> Compiler s ()
|
||||||
|
err' e = Compiler (\ st -> runError e False >> return (st, ()))
|
||||||
|
|
||||||
|
runWarning :: BangWarning w => w -> IO ()
|
||||||
|
runWarning w = putStrLn (go (ppWarning w))
|
||||||
|
where
|
||||||
|
go (Nothing, doc) = render doc
|
||||||
|
go (Just a, doc) = render (ppLocation a $+$ nest 3 doc)
|
||||||
|
|
||||||
|
runError :: BangError w => w -> Bool -> IO ()
|
||||||
|
runError e die =
|
||||||
|
do putStrLn (go (ppError e))
|
||||||
|
when die $ exitWith (ExitFailure 1)
|
||||||
|
where
|
||||||
|
go (Nothing, doc) = render doc
|
||||||
|
go (Just a, doc) = render (ppLocation a $+$ nest 3 doc)
|
||||||
118
src/Bang/Syntax/Lexer.x
Normal file
118
src/Bang/Syntax/Lexer.x
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
-- -*- mode: haskell -*-
|
||||||
|
-- vi: set ft=haskell :
|
||||||
|
{
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# OPTIONS -w #-}
|
||||||
|
module Bang.Syntax.Lexer(
|
||||||
|
AlexReturn(..)
|
||||||
|
, AlexInput(..)
|
||||||
|
, alexScan
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Syntax.Location(Location(..), Located(..), Origin(..),
|
||||||
|
Position(..), advanceWith, advanceWith',
|
||||||
|
locatedAt, initialPosition)
|
||||||
|
import Bang.Syntax.Token(Token(..), Fixity(..))
|
||||||
|
import Data.Char(isAscii, ord)
|
||||||
|
import Data.Int(Int64)
|
||||||
|
import Data.Map.Strict(Map)
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
import Data.Maybe(fromMaybe)
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import qualified Data.Text.Lazy as T
|
||||||
|
import Data.Word(Word8)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
-- Digits
|
||||||
|
$decdigit = 0-9
|
||||||
|
$hexdigit = [0-9a-fA-f]
|
||||||
|
$octdigit = 0-7
|
||||||
|
$bindigit = [01]
|
||||||
|
|
||||||
|
-- Identifier Characters
|
||||||
|
$typestart = [A-Z\_]
|
||||||
|
$valstart = [a-z\_]
|
||||||
|
$identrest = [a-zA-Z0-9\_\.]
|
||||||
|
$opident = [\~\!\@\#\$\%\^\&\*\+\-\=\.\<\>\?\_\|:]
|
||||||
|
$escape_char = [abfnrtv'\"\\] --"
|
||||||
|
|
||||||
|
:-
|
||||||
|
|
||||||
|
-- Whitespace
|
||||||
|
$white+ ;
|
||||||
|
"/*"[.\n]*"*/" ;
|
||||||
|
|
||||||
|
-- Numbers
|
||||||
|
$decdigit+ { emitI 0 (IntTok 10) }
|
||||||
|
"0x"$hexdigit+ { emitI 2 (IntTok 16) }
|
||||||
|
"0o"$octdigit+ { emitI 2 (IntTok 8) }
|
||||||
|
"0b"$bindigit+ { emitI 2 (IntTok 2) }
|
||||||
|
$decdigit+"."$decdigit+ ("e""-"?$decdigit+)? { emitS FloatTok}
|
||||||
|
$decdigit+"e""-"?$decdigit+ { emitS FloatTok}
|
||||||
|
|
||||||
|
-- Identifier
|
||||||
|
$typestart $identrest* { emitS TypeIdent }
|
||||||
|
$valstart $identrest* { emitS ValIdent }
|
||||||
|
$opident+ { emitO }
|
||||||
|
|
||||||
|
-- Characters and Strings
|
||||||
|
['].['] { emitS CharTok }
|
||||||
|
['] [\\] $escape_char ['] { emitS CharTok }
|
||||||
|
[\"] ([^\"] | [\n] | ([\\] $escape_char))* [\"] { emitS StringTok } --"
|
||||||
|
|
||||||
|
-- Symbols
|
||||||
|
"(" { emitT "(" }
|
||||||
|
")" { emitT ")" }
|
||||||
|
"[" { emitT "[" }
|
||||||
|
"]" { emitT "]" }
|
||||||
|
"{" { emitT "{" }
|
||||||
|
"}" { emitT "}" }
|
||||||
|
";" { emitT ";" }
|
||||||
|
"," { emitT "," }
|
||||||
|
"`" { emitT "`" }
|
||||||
|
[\\] { emitT "\\" }
|
||||||
|
|
||||||
|
{
|
||||||
|
|
||||||
|
type AlexAction = Origin -> Map Text Fixity -> Int -> AlexInput -> Located Token
|
||||||
|
|
||||||
|
data AlexInput = AlexInput !Position Text
|
||||||
|
|
||||||
|
emitT :: Text -> AlexAction
|
||||||
|
emitT t = emitS (const (Special t))
|
||||||
|
|
||||||
|
emitS :: (Text -> Token) -> AlexAction
|
||||||
|
emitS mk src _ len (AlexInput pos t) = token `locatedAt` loc
|
||||||
|
where
|
||||||
|
txt = T.take (fromIntegral len) t
|
||||||
|
token = mk txt
|
||||||
|
loc = Location src pos (pos `advanceWith'` txt)
|
||||||
|
|
||||||
|
emitI :: Int64 -> (Text -> Token) -> AlexAction
|
||||||
|
emitI dropCount mk src _ len (AlexInput pos t) = token `locatedAt` loc
|
||||||
|
where
|
||||||
|
baseText = T.take (fromIntegral len) t
|
||||||
|
txt = T.drop dropCount baseText
|
||||||
|
token = mk txt
|
||||||
|
loc = Location src pos (pos `advanceWith'` baseText)
|
||||||
|
|
||||||
|
emitO :: AlexAction
|
||||||
|
emitO src fixTable len (AlexInput pos t) =
|
||||||
|
case Map.lookup baseText fixTable of
|
||||||
|
Nothing -> OpIdent (LeftAssoc 9) baseText `locatedAt` loc
|
||||||
|
Just f -> OpIdent f baseText `locatedAt` loc
|
||||||
|
where
|
||||||
|
baseText = T.take (fromIntegral len) t
|
||||||
|
loc = Location src pos (pos `advanceWith'` baseText)
|
||||||
|
|
||||||
|
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput)
|
||||||
|
alexGetByte (AlexInput p t) =
|
||||||
|
do (c, rest) <- T.uncons t
|
||||||
|
return (byteForChar c, (AlexInput (p `advanceWith` c) rest))
|
||||||
|
where
|
||||||
|
byteForChar c | isAscii c = fromIntegral (ord c)
|
||||||
|
| otherwise = 0
|
||||||
|
|
||||||
|
}
|
||||||
105
src/Bang/Syntax/Location.hs
Normal file
105
src/Bang/Syntax/Location.hs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.Syntax.Location(
|
||||||
|
Position, posRow, posColumn, posOffset
|
||||||
|
, ppPosition
|
||||||
|
, initialPosition
|
||||||
|
, advanceWith, advanceWith'
|
||||||
|
, Origin(..)
|
||||||
|
, ppOrigin
|
||||||
|
, Location(Location)
|
||||||
|
, locSource, locStart, locEnd
|
||||||
|
, ppLocation
|
||||||
|
, Located(..)
|
||||||
|
, locatedAt
|
||||||
|
, unknownLocation
|
||||||
|
, fakeLocation
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Utils.Pretty(BangDoc, word)
|
||||||
|
import Control.Lens
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Data.Monoid((<>))
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import qualified Data.Text.Lazy as T
|
||||||
|
import Text.PrettyPrint.Annotated(colon, parens, text)
|
||||||
|
|
||||||
|
data Position = Position {
|
||||||
|
_posRow :: Word
|
||||||
|
, _posColumn :: Word
|
||||||
|
, _posOffset :: Word
|
||||||
|
}
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
makeLenses ''Position
|
||||||
|
|
||||||
|
ppPosition :: Position -> BangDoc
|
||||||
|
ppPosition (Position r c _) = word r <> colon <> word c
|
||||||
|
|
||||||
|
initialPosition :: Position
|
||||||
|
initialPosition = Position 1 1 0
|
||||||
|
|
||||||
|
instance Eq Position where
|
||||||
|
a == b = _posOffset a == _posOffset b
|
||||||
|
|
||||||
|
advanceWith :: Position -> Char -> Position
|
||||||
|
advanceWith (Position r c o) '\t' = Position r (c+8) (o+1)
|
||||||
|
advanceWith (Position r _ o) '\n' = Position (r+1) 1 (o+1)
|
||||||
|
advanceWith (Position r c o) _ = Position r (c+1) (o+1)
|
||||||
|
|
||||||
|
advanceWith' :: Position -> Text -> Position
|
||||||
|
advanceWith' pos txt =
|
||||||
|
case T.uncons txt of
|
||||||
|
Nothing -> pos
|
||||||
|
Just (c, rest) -> advanceWith' (pos `advanceWith` c) rest
|
||||||
|
|
||||||
|
data Origin = Unknown
|
||||||
|
| Generated
|
||||||
|
| Interactive
|
||||||
|
| File FilePath
|
||||||
|
deriving (Eq, Show)
|
||||||
|
|
||||||
|
ppOrigin :: Origin -> BangDoc
|
||||||
|
ppOrigin x =
|
||||||
|
case x of
|
||||||
|
Unknown -> text "<unknown>"
|
||||||
|
Generated -> text "<generated>"
|
||||||
|
Interactive -> text "<interactive>"
|
||||||
|
File f -> text f
|
||||||
|
|
||||||
|
data Location = Location {
|
||||||
|
_locSource :: Origin
|
||||||
|
, _locStart :: Position
|
||||||
|
, _locEnd :: Position
|
||||||
|
}
|
||||||
|
deriving (Eq, Show)
|
||||||
|
|
||||||
|
makeLenses ''Location
|
||||||
|
|
||||||
|
ppLocation :: Location -> BangDoc
|
||||||
|
ppLocation loc
|
||||||
|
| start == end = ppOrigin src <> colon <> ppPosition start
|
||||||
|
| view posRow start == view posRow end =
|
||||||
|
ppOrigin src <> colon <> word (view posRow start) <> colon <>
|
||||||
|
word (view posColumn start) <> text "–" <> word (view posColumn end)
|
||||||
|
| otherwise =
|
||||||
|
ppOrigin src <> colon <> parens (ppPosition start) <> text "–" <>
|
||||||
|
parens (ppPosition end)
|
||||||
|
where
|
||||||
|
src = view locSource loc
|
||||||
|
start = view locStart loc
|
||||||
|
end = view locEnd loc
|
||||||
|
|
||||||
|
data Located a = Located !Location a
|
||||||
|
|
||||||
|
instance Show a => Show (Located a) where
|
||||||
|
show (Located l x) = show x ++ " `locatedAt` " ++ show l
|
||||||
|
|
||||||
|
locatedAt :: a -> Location -> Located a
|
||||||
|
locatedAt a p = Located p a
|
||||||
|
|
||||||
|
unknownLocation :: Location
|
||||||
|
unknownLocation = Location Unknown initialPosition initialPosition
|
||||||
|
|
||||||
|
fakeLocation :: Location
|
||||||
|
fakeLocation = Location Generated initialPosition initialPosition
|
||||||
32
src/Bang/Syntax/Name.hs
Normal file
32
src/Bang/Syntax/Name.hs
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.Syntax.Name(
|
||||||
|
Name
|
||||||
|
, nameId
|
||||||
|
, nameString
|
||||||
|
, nameGenerated
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
|
||||||
|
data Name = Name {
|
||||||
|
_nameId :: !Word
|
||||||
|
, _nameString :: !String
|
||||||
|
, _nameGenerated :: !Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLenses ''Name
|
||||||
|
|
||||||
|
instance Eq Name where
|
||||||
|
a == b = _nameId a == _nameId b
|
||||||
|
a /= b = _nameId a /= _nameId b
|
||||||
|
|
||||||
|
instance Ord Name where
|
||||||
|
compare a b = compare (_nameId a) (_nameId b)
|
||||||
|
max a b = if a > b then a else b
|
||||||
|
min a b = if a > b then b else a
|
||||||
|
a < b = _nameId a < _nameId b
|
||||||
|
a <= b = _nameId a <= _nameId b
|
||||||
|
a > b = _nameId a > _nameId b
|
||||||
|
a >= b = _nameId a >= _nameId b
|
||||||
|
|
||||||
253
src/Bang/Syntax/Parser.y
Normal file
253
src/Bang/Syntax/Parser.y
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
-- -*- mode: haskell -*-
|
||||||
|
-- vi: set ft=haskell :
|
||||||
|
{
|
||||||
|
{-# LANGUAGE DeriveFunctor #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# OPTION_GHC -w #-}
|
||||||
|
module Bang.Syntax.Parser(
|
||||||
|
runParser
|
||||||
|
, parseModule
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Monad(err)
|
||||||
|
import Bang.AST(Name, Module, NameEnvironment(..), mkModule, mkName, emptyExpression)
|
||||||
|
import Bang.AST.Declaration(Declaration, mkTypeDecl, mkValueDecl)
|
||||||
|
import Bang.AST.Expression(ConstantValue(..), Expression, mkConstExp, mkRefExp, mkLambdaExp)
|
||||||
|
import Bang.AST.Type(Type, Kind(..), mkTypeRef, mkFunType, mkTypeApp, mkPrimType)
|
||||||
|
import Bang.Syntax.Location(Located(..), Origin, Position)
|
||||||
|
import Bang.Syntax.ParserError(ParserError(..))
|
||||||
|
import Bang.Syntax.ParserMonad(Parser, addFixities, parseError, runNextToken, runParser)
|
||||||
|
import Bang.Syntax.Token(Token(..), Fixity(..))
|
||||||
|
import Control.Monad(forM)
|
||||||
|
import Data.List(union)
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
%name top_module
|
||||||
|
%tokentype { Located Token }
|
||||||
|
%monad { Parser }
|
||||||
|
%error { parseError }
|
||||||
|
%lexer { runNextToken } { Located _ EOFTok }
|
||||||
|
|
||||||
|
%token
|
||||||
|
'::' { Located $$ (OpIdent _ "::") }
|
||||||
|
'=' { Located $$ (OpIdent _ "=") }
|
||||||
|
',' { Located $$ (OpIdent _ ",") }
|
||||||
|
'->' { Located $$ (OpIdent _ "->") }
|
||||||
|
'infixl' { Located $$ (ValIdent "infixl") }
|
||||||
|
'infixr' { Located $$ (ValIdent "infixr") }
|
||||||
|
'infix' { Located $$ (ValIdent "infix") }
|
||||||
|
'module' { Located $$ (ValIdent "module") }
|
||||||
|
'primitive' { Located $$ (ValIdent "primitive") }
|
||||||
|
'type' { Located $$ (ValIdent "type") }
|
||||||
|
Integer { Located _ (IntTok _ _) }
|
||||||
|
Float { Located _ (FloatTok _) }
|
||||||
|
Char { Located _ (CharTok _) }
|
||||||
|
String { Located _ (StringTok _) }
|
||||||
|
OpIdent { Located _ (OpIdent _ _) }
|
||||||
|
TypeIdent { Located _ (TypeIdent _) }
|
||||||
|
ValIdent { Located _ (ValIdent _) }
|
||||||
|
OPL0 { Located _ (OpIdent (LeftAssoc 0) _) }
|
||||||
|
OPR0 { Located _ (OpIdent (RightAssoc 0) _) }
|
||||||
|
OPN0 { Located _ (OpIdent (NonAssoc 0) _) }
|
||||||
|
OPL1 { Located _ (OpIdent (LeftAssoc 1) _) }
|
||||||
|
OPR1 { Located _ (OpIdent (RightAssoc 1) _) }
|
||||||
|
OPN1 { Located _ (OpIdent (NonAssoc 1) _) }
|
||||||
|
OPL2 { Located _ (OpIdent (LeftAssoc 2) _) }
|
||||||
|
OPR2 { Located _ (OpIdent (RightAssoc 2) _) }
|
||||||
|
OPN2 { Located _ (OpIdent (NonAssoc 2) _) }
|
||||||
|
OPL3 { Located _ (OpIdent (LeftAssoc 3) _) }
|
||||||
|
OPR3 { Located _ (OpIdent (RightAssoc 3) _) }
|
||||||
|
OPN3 { Located _ (OpIdent (NonAssoc 3) _) }
|
||||||
|
OPL4 { Located _ (OpIdent (LeftAssoc 4) _) }
|
||||||
|
OPR4 { Located _ (OpIdent (RightAssoc 4) _) }
|
||||||
|
OPN4 { Located _ (OpIdent (NonAssoc 4) _) }
|
||||||
|
OPL5 { Located _ (OpIdent (LeftAssoc 5) _) }
|
||||||
|
OPR5 { Located _ (OpIdent (RightAssoc 5) _) }
|
||||||
|
OPN5 { Located _ (OpIdent (NonAssoc 5) _) }
|
||||||
|
OPL6 { Located _ (OpIdent (LeftAssoc 6) _) }
|
||||||
|
OPR6 { Located _ (OpIdent (RightAssoc 6) _) }
|
||||||
|
OPN6 { Located _ (OpIdent (NonAssoc 6) _) }
|
||||||
|
OPL7 { Located _ (OpIdent (LeftAssoc 7) _) }
|
||||||
|
OPR7 { Located _ (OpIdent (RightAssoc 7) _) }
|
||||||
|
OPN7 { Located _ (OpIdent (NonAssoc 7) _) }
|
||||||
|
OPL8 { Located _ (OpIdent (LeftAssoc 8) _) }
|
||||||
|
OPR8 { Located _ (OpIdent (RightAssoc 8) _) }
|
||||||
|
OPN8 { Located _ (OpIdent (NonAssoc 8) _) }
|
||||||
|
OPL9 { Located _ (OpIdent (LeftAssoc 9) _) }
|
||||||
|
OPR9 { Located _ (OpIdent (RightAssoc 9) _) }
|
||||||
|
OPN9 { Located _ (OpIdent (NonAssoc 9) _) }
|
||||||
|
|
||||||
|
%right OPL0
|
||||||
|
%left OPR0
|
||||||
|
%nonassoc OPN0
|
||||||
|
%right OPL1
|
||||||
|
%left OPR1
|
||||||
|
%nonassoc OPN1
|
||||||
|
%right OPL2
|
||||||
|
%left OPR2
|
||||||
|
%nonassoc OPN2
|
||||||
|
%right OPL3
|
||||||
|
%left OPR3
|
||||||
|
%nonassoc OPN3
|
||||||
|
%right OPL4
|
||||||
|
%left OPR4
|
||||||
|
%nonassoc OPN4
|
||||||
|
%right OPL5
|
||||||
|
%left OPR5
|
||||||
|
%nonassoc OPN5
|
||||||
|
%right OPL6
|
||||||
|
%left OPR6
|
||||||
|
%nonassoc OPN6
|
||||||
|
%right OPL7
|
||||||
|
%left OPR7
|
||||||
|
%nonassoc OPN7
|
||||||
|
%right OPL8
|
||||||
|
%left OPR8
|
||||||
|
%nonassoc OPN8
|
||||||
|
%right OPL9
|
||||||
|
%left OPR9
|
||||||
|
%nonassoc OPN9
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
top_module :: { Module }
|
||||||
|
: 'module' TypeIdent listopt(Declaration)
|
||||||
|
{%
|
||||||
|
do let Located src (TypeIdent rawName) = $2
|
||||||
|
return (mkModule (mkName rawName ModuleEnv src 0) [$3]) }
|
||||||
|
|
||||||
|
Declaration :: { Maybe Declaration }
|
||||||
|
: ValueDeclaration { Just $1 }
|
||||||
|
| FixityDeclaration { Nothing }
|
||||||
|
| TypeDeclaration { Just $1 }
|
||||||
|
|
||||||
|
ValueDeclaration :: { Declaration }
|
||||||
|
: list1(ValIdent) '=' Expression
|
||||||
|
{%
|
||||||
|
case $1 of
|
||||||
|
[] ->
|
||||||
|
err (InternalError $2 "ValDeclLHS")
|
||||||
|
[Located src (ValIdent rawName)] ->
|
||||||
|
do let name = mkName rawName VarEnv src 0
|
||||||
|
return (mkValueDecl name src Nothing $3)
|
||||||
|
((Located src (ValIdent rawName)) : args) ->
|
||||||
|
do let name = mkName rawName VarEnv src 0
|
||||||
|
argNames = map (\ (Located arsrc (ValIdent argName)) ->
|
||||||
|
mkName argName VarEnv arsrc 0)
|
||||||
|
args
|
||||||
|
return (mkValueDecl name src Nothing
|
||||||
|
(mkLambdaExp $2 argNames $3))
|
||||||
|
}
|
||||||
|
|
||||||
|
FixityDeclaration :: { () }
|
||||||
|
: 'infixl' Integer sep(',',OpIdent)
|
||||||
|
{% addFixities $1 LeftAssoc $2 $3 }
|
||||||
|
| 'infixr' Integer sep(',',OpIdent)
|
||||||
|
{% addFixities $1 RightAssoc $2 $3 }
|
||||||
|
| 'infix' Integer sep(',',OpIdent)
|
||||||
|
{% addFixities $1 NonAssoc $2 $3 }
|
||||||
|
|
||||||
|
TypeDeclaration :: { Declaration }
|
||||||
|
: ValIdent '::' Type
|
||||||
|
{%
|
||||||
|
do let Located src (ValIdent rawName) = $1
|
||||||
|
name = mkName rawName VarEnv src 0
|
||||||
|
return (mkValueDecl name src (Just $3) emptyExpression) }
|
||||||
|
| 'type' TypeIdent '=' Type
|
||||||
|
{%
|
||||||
|
do let Located src (TypeIdent rawName) = $2
|
||||||
|
name = mkName rawName TypeEnv src 0
|
||||||
|
return (mkTypeDecl name src $4) }
|
||||||
|
| 'primitive' 'type' TypeIdent '=' String
|
||||||
|
{%
|
||||||
|
do let Located nsrc (TypeIdent rawName) = $3
|
||||||
|
Located tsrc (StringTok rawText) = $5
|
||||||
|
name = mkName rawName TypeEnv nsrc 0
|
||||||
|
return (mkTypeDecl name $2 (mkPrimType tsrc rawText)) }
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Type :: { Type }
|
||||||
|
: RawType { $1 }
|
||||||
|
|
||||||
|
RawType :: { Type }
|
||||||
|
: RawType '->' BaseType { mkFunType $2 $1 $3 }
|
||||||
|
| BaseType { $1 }
|
||||||
|
|
||||||
|
BaseType :: { Type }
|
||||||
|
: TypeIdent {%
|
||||||
|
let Located src (TypeIdent rawName) = $1
|
||||||
|
name = mkName rawName TypeEnv src 0
|
||||||
|
in return (mkTypeRef src Unknown name) }
|
||||||
|
| ValIdent {%
|
||||||
|
let Located src (ValIdent rawName) = $1
|
||||||
|
name = mkName rawName TypeEnv src 0
|
||||||
|
in return (mkTypeRef src Unknown name) }
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Expression :: { Expression }
|
||||||
|
: BaseExpression { $1 }
|
||||||
|
|
||||||
|
BaseExpression :: { Expression }
|
||||||
|
: OpIdent {%
|
||||||
|
let Located src (OpIdent _ rawName) = $1
|
||||||
|
name = mkName rawName VarEnv src 0
|
||||||
|
in return (mkRefExp src name) }
|
||||||
|
| ValIdent {%
|
||||||
|
let Located src (ValIdent rawName) = $1
|
||||||
|
name = mkName rawName VarEnv src 0
|
||||||
|
in return (mkRefExp src name) }
|
||||||
|
| Integer { let Located src (IntTok base val) = $1
|
||||||
|
in mkConstExp src (ConstantInt base val) }
|
||||||
|
| String { let Located src (StringTok val) = $1
|
||||||
|
in mkConstExp src (ConstantString val) }
|
||||||
|
| Float { let Located src (FloatTok val) = $1
|
||||||
|
in mkConstExp src (ConstantFloat val) }
|
||||||
|
| Char { let Located src (CharTok val) = $1
|
||||||
|
in mkConstExp src (ConstantChar val) }
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
opt(p)
|
||||||
|
: {- empty -} { Nothing }
|
||||||
|
| p { Just $1 }
|
||||||
|
|
||||||
|
sep(p,q)
|
||||||
|
: {- empty -} { [] }
|
||||||
|
| sep_body(p,q) { reverse $1 }
|
||||||
|
|
||||||
|
sep1(p,q)
|
||||||
|
: sep_body(p,q) { reverse $1 }
|
||||||
|
|
||||||
|
sep_body(p,q)
|
||||||
|
: q { [$1] }
|
||||||
|
| sep_body(p,q) p q { $3 : $1 }
|
||||||
|
|
||||||
|
list(p)
|
||||||
|
: {- empty -} { [] }
|
||||||
|
| list_body(p) { reverse $1 }
|
||||||
|
|
||||||
|
list1(p)
|
||||||
|
: list_body(p) { reverse $1 }
|
||||||
|
|
||||||
|
list_body(p)
|
||||||
|
: p { [$1] }
|
||||||
|
| list_body(p) p { $2 : $1 }
|
||||||
|
|
||||||
|
listopt(p)
|
||||||
|
: {- empty -} { [] }
|
||||||
|
| listopt(p) p { case $2 of
|
||||||
|
Nothing -> $1
|
||||||
|
Just x -> $1 ++ [x]
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
|
||||||
|
parseModule :: Parser Module
|
||||||
|
parseModule = top_module
|
||||||
|
|
||||||
|
}
|
||||||
43
src/Bang/Syntax/ParserError.hs
Normal file
43
src/Bang/Syntax/ParserError.hs
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
module Bang.Syntax.ParserError(
|
||||||
|
ParserError(..)
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import Bang.Monad(BangError(..))
|
||||||
|
import Bang.Syntax.Location(Location, ppLocation)
|
||||||
|
import Bang.Syntax.Token(Token, ppToken)
|
||||||
|
import Bang.Utils.Pretty(BangDoc, text')
|
||||||
|
import Text.PrettyPrint.Annotated((<+>), ($+$), text, quotes, text, nest)
|
||||||
|
|
||||||
|
data ParserError = LexError Location Text
|
||||||
|
| ParseError Location Token
|
||||||
|
| RedefinitionError Location Location Text
|
||||||
|
| InternalError Location Text
|
||||||
|
| UnboundVariable Location Text
|
||||||
|
| UnexpectedEOF
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance BangError ParserError where
|
||||||
|
ppError = prettyError
|
||||||
|
|
||||||
|
prettyError :: ParserError -> (Maybe Location, BangDoc)
|
||||||
|
prettyError e =
|
||||||
|
case e of
|
||||||
|
LexError l t ->
|
||||||
|
(Just l, text "Lexical error around token" <+> quotes (text' t))
|
||||||
|
ParseError l t ->
|
||||||
|
(Just l, text "Parser error around token" <+> quotes (ppToken t))
|
||||||
|
RedefinitionError errLoc origLoc t ->
|
||||||
|
let line1 = text "Variable" <+> quotes (text' t) <+> text "is redefined: "
|
||||||
|
line2 = text "Original definition:" <+> ppLocation origLoc
|
||||||
|
line3 = text "Redefinition:" <+> ppLocation errLoc
|
||||||
|
in (Nothing, line1 $+$ nest 3 (line2 $+$ line3))
|
||||||
|
InternalError loc t ->
|
||||||
|
(Just loc, text' t)
|
||||||
|
UnboundVariable loc t ->
|
||||||
|
(Just loc, text "Unbound variable" <+> quotes (text' t))
|
||||||
|
UnexpectedEOF ->
|
||||||
|
(Nothing, text "Unexpected end of file.")
|
||||||
|
|
||||||
|
|
||||||
129
src/Bang/Syntax/ParserMonad.hs
Normal file
129
src/Bang/Syntax/ParserMonad.hs
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
module Bang.Syntax.ParserMonad(
|
||||||
|
Parser
|
||||||
|
, runParser
|
||||||
|
, addFixities
|
||||||
|
, parseError
|
||||||
|
, runNextToken
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Monad(Compiler, err, runPass,
|
||||||
|
setPassState, overPassState, viewPassState)
|
||||||
|
import Bang.Syntax.Lexer(AlexReturn(..), AlexInput(..), alexScan)
|
||||||
|
import Bang.Syntax.Location(Location(..), Located(..),
|
||||||
|
Origin(..), initialPosition,
|
||||||
|
advanceWith', locatedAt)
|
||||||
|
import Bang.Syntax.ParserError(ParserError(..))
|
||||||
|
import Bang.Syntax.Token(Token(..), Fixity)
|
||||||
|
import Control.Lens.TH(makeLenses)
|
||||||
|
import Control.Monad(forM_)
|
||||||
|
import Data.Char(digitToInt, isSpace)
|
||||||
|
import Data.Map.Strict(Map)
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import qualified Data.Text.Lazy as T
|
||||||
|
|
||||||
|
data ParserState = ParserState {
|
||||||
|
_psPrecTable :: Map Text Fixity
|
||||||
|
, _psOrigin :: Origin
|
||||||
|
, _psLexerState :: AlexInput
|
||||||
|
}
|
||||||
|
|
||||||
|
makeLenses ''ParserState
|
||||||
|
|
||||||
|
type Parser a = Compiler ParserState a
|
||||||
|
|
||||||
|
runParser :: Origin -> Text -> Parser a -> Compiler ps a
|
||||||
|
runParser origin stream action = snd `fmap` runPass pstate action
|
||||||
|
where
|
||||||
|
initInput = AlexInput initialPosition stream
|
||||||
|
pstate = ParserState Map.empty origin initInput
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
addFixities :: Location ->
|
||||||
|
(Word -> Fixity) -> Located Token -> [Located Token] ->
|
||||||
|
Parser ()
|
||||||
|
addFixities src fixityBuilder lval names =
|
||||||
|
do value <- processInteger lval
|
||||||
|
let fixity = fixityBuilder value
|
||||||
|
forM_ names $ \ tok ->
|
||||||
|
overPassState psPrecTable (Map.insert (tokenName tok) fixity)
|
||||||
|
where
|
||||||
|
processInteger x =
|
||||||
|
case x of
|
||||||
|
Located _ (IntTok base text) ->
|
||||||
|
return (makeNumeric base text 0)
|
||||||
|
_ ->
|
||||||
|
err (InternalError src "Non-number in fixity?")
|
||||||
|
|
||||||
|
--
|
||||||
|
makeNumeric base text acc =
|
||||||
|
case T.uncons text of
|
||||||
|
Nothing -> acc
|
||||||
|
Just (x, rest) ->
|
||||||
|
let acc' = (acc * base) + charValue x
|
||||||
|
in makeNumeric base rest acc'
|
||||||
|
--
|
||||||
|
charValue = fromIntegral . digitToInt
|
||||||
|
--
|
||||||
|
tokenName t =
|
||||||
|
case t of
|
||||||
|
Located _ (TypeIdent x) -> x
|
||||||
|
Located _ (ValIdent x) -> x
|
||||||
|
Located _ (OpIdent _ x) -> x
|
||||||
|
_ ->
|
||||||
|
error "Internal error (tokenName in Parser.y)"
|
||||||
|
|
||||||
|
getFixities :: Parser (Map Text Fixity)
|
||||||
|
getFixities = viewPassState psPrecTable
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
runNextToken :: (Located Token -> Parser a) ->
|
||||||
|
Parser a
|
||||||
|
runNextToken parseAction = go =<< getLexerState
|
||||||
|
where
|
||||||
|
go state@(AlexInput initPos _) =
|
||||||
|
case alexScan state 0 of
|
||||||
|
AlexEOF ->
|
||||||
|
do orig <- getOrigin
|
||||||
|
parseAction (EOFTok `locatedAt` Location orig initPos initPos)
|
||||||
|
AlexError (AlexInput pos text) ->
|
||||||
|
do let (as, bs) = T.break isSpace text
|
||||||
|
pos' = advanceWith' pos as
|
||||||
|
input' = AlexInput pos' bs
|
||||||
|
setLexerState input'
|
||||||
|
orig <- getOrigin
|
||||||
|
parseAction (ErrorTok as `locatedAt` Location orig initPos initPos)
|
||||||
|
AlexSkip input' _ ->
|
||||||
|
go input'
|
||||||
|
AlexToken input' len lexAction ->
|
||||||
|
do setLexerState input'
|
||||||
|
src <- getOrigin
|
||||||
|
table <- getFixities
|
||||||
|
parseAction (lexAction src table len state)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
getOrigin :: Parser Origin
|
||||||
|
getOrigin = viewPassState psOrigin
|
||||||
|
|
||||||
|
getLexerState :: Parser AlexInput
|
||||||
|
getLexerState = viewPassState psLexerState
|
||||||
|
|
||||||
|
setLexerState :: AlexInput -> Parser ()
|
||||||
|
setLexerState = setPassState psLexerState
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
parseError :: Located Token -> Parser a
|
||||||
|
parseError t =
|
||||||
|
case t of
|
||||||
|
Located _ EOFTok -> err UnexpectedEOF
|
||||||
|
Located p (ErrorTok tok) -> err (LexError p tok)
|
||||||
|
Located p tok -> err (ParseError p tok)
|
||||||
|
|
||||||
|
|
||||||
217
src/Bang/Syntax/PostProcess.hs
Normal file
217
src/Bang/Syntax/PostProcess.hs
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
{-# LANGUAGE RankNTypes #-}
|
||||||
|
module Bang.Syntax.PostProcess(
|
||||||
|
runPostProcessor
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST(Name, Module, moduleDeclarations, ppName,
|
||||||
|
nameText, nameLocation, nameEnvironment)
|
||||||
|
import Bang.AST.Declaration(Declaration(..), declName,
|
||||||
|
tdName, tdType,
|
||||||
|
ValueDeclaration, vdName, vdLocation,
|
||||||
|
vdDeclaredType, vdValue)
|
||||||
|
import Bang.AST.Expression(Expression(..), isEmptyExpression, refName,
|
||||||
|
lambdaArgumentNames, lambdaBody,
|
||||||
|
isEmptyExpression)
|
||||||
|
import Bang.AST.Type(Type(..), rtName, ftArgumentType, ftResultType,
|
||||||
|
taLeftType, taRightType)
|
||||||
|
import Bang.Monad(Compiler, BangError(..), err, err', registerName)
|
||||||
|
import Bang.Syntax.Location(Location, ppLocation)
|
||||||
|
import Bang.Utils.FreeVars(CanHaveFreeVars(..))
|
||||||
|
import Bang.Utils.Pretty(BangDoc, text')
|
||||||
|
import Control.Lens(Lens', view, set)
|
||||||
|
import Control.Monad(foldM)
|
||||||
|
import Data.Char(isLower)
|
||||||
|
import Data.Graph(SCC(..))
|
||||||
|
import Data.Graph.SCC(stronglyConnComp)
|
||||||
|
import Data.Map.Strict(Map)
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
import Data.Set(toList)
|
||||||
|
import Data.Text.Lazy(uncons)
|
||||||
|
import Text.PrettyPrint.Annotated(text, ($+$), (<+>), nest, quotes)
|
||||||
|
|
||||||
|
data PostProcessError = InternalError Name
|
||||||
|
| UndefinedVariable Name
|
||||||
|
| RedefinitionError Name Location Location
|
||||||
|
| TypeDeclWithoutValue Name Location
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
instance BangError PostProcessError where
|
||||||
|
ppError = prettyError
|
||||||
|
|
||||||
|
prettyError :: PostProcessError -> (Maybe Location, BangDoc)
|
||||||
|
prettyError e =
|
||||||
|
case e of
|
||||||
|
InternalError n ->
|
||||||
|
(Nothing, text "Serious post-processing error w.r.t. " <+> ppName n)
|
||||||
|
UndefinedVariable n ->
|
||||||
|
(Just (view nameLocation n), text "Undefined variable " <+> quotes (text' (view nameText n)))
|
||||||
|
RedefinitionError n l1 l2 ->
|
||||||
|
(Just l1, text "Name" <+> ppName n <+> text "redefined." $+$
|
||||||
|
nest 2 (text "original definiton at " <+> ppLocation l2))
|
||||||
|
TypeDeclWithoutValue n l ->
|
||||||
|
(Just l, text "Type declaration provided, but no value provided." $+$
|
||||||
|
nest 2 (text "variable name: " <+> ppName n))
|
||||||
|
|
||||||
|
runPostProcessor :: Module -> Compiler ps Module
|
||||||
|
runPostProcessor mdl =
|
||||||
|
do let baseDecls = concat (view moduleDeclarations mdl)
|
||||||
|
decls <- linkNames baseDecls
|
||||||
|
declTable <- makeDeclarationTable decls
|
||||||
|
decls' <- combineTypeValueDeclarations declTable decls
|
||||||
|
return (set moduleDeclarations (orderDecls decls') mdl)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
linkNames :: [Declaration] -> Compiler ps [Declaration]
|
||||||
|
linkNames decls =
|
||||||
|
do declaredNames <- foldM addNewNames Map.empty (map (view declName) decls)
|
||||||
|
mapM (linkDecls declaredNames) decls
|
||||||
|
where
|
||||||
|
addNewNames t n =
|
||||||
|
do n' <- registerName n
|
||||||
|
let key = (view nameText n, view nameEnvironment n)
|
||||||
|
return (Map.insert key n' t)
|
||||||
|
--
|
||||||
|
replaceName nameMap name =
|
||||||
|
do let key = (view nameText name, view nameEnvironment name)
|
||||||
|
case Map.lookup key nameMap of
|
||||||
|
Nothing -> err' (UndefinedVariable name) >> return name
|
||||||
|
Just name' -> return name'
|
||||||
|
--
|
||||||
|
addOrReplaceName nameMap name =
|
||||||
|
do let key = (view nameText name, view nameEnvironment name)
|
||||||
|
case Map.lookup key nameMap of
|
||||||
|
Nothing | couldBeTypeVariable name ->
|
||||||
|
do name' <- registerName name
|
||||||
|
return (name', Map.insert key name' nameMap)
|
||||||
|
Nothing ->
|
||||||
|
err' (UndefinedVariable name) >> return (name, nameMap)
|
||||||
|
Just name' ->
|
||||||
|
return (name', nameMap)
|
||||||
|
--
|
||||||
|
couldBeTypeVariable n =
|
||||||
|
case uncons (view nameText n) of
|
||||||
|
Nothing ->
|
||||||
|
error "Empty variable name?"
|
||||||
|
Just (x,_) ->
|
||||||
|
isLower x
|
||||||
|
--
|
||||||
|
linkDecls nameMap (DeclType td) =
|
||||||
|
do td' <- overM tdType (linkType' nameMap) td
|
||||||
|
td'' <- overM tdName (replaceName nameMap) td'
|
||||||
|
return (DeclType td'')
|
||||||
|
linkDecls nameMap (DeclVal vd) =
|
||||||
|
do vd' <- overM vdDeclaredType (traverse (linkType' nameMap)) vd
|
||||||
|
vd'' <- overM vdValue (linkExpr nameMap) vd'
|
||||||
|
vd''' <- overM vdName (replaceName nameMap) vd''
|
||||||
|
return (DeclVal vd''')
|
||||||
|
--
|
||||||
|
linkType' nm t = fst `fmap` linkType nm t
|
||||||
|
--
|
||||||
|
linkType nameMap x@(TypeUnit _) = return (x, nameMap)
|
||||||
|
linkType nameMap x@(TypePrim _) = return (x, nameMap)
|
||||||
|
linkType nameMap (TypeRef t) =
|
||||||
|
do (name, nameMap') <- addOrReplaceName nameMap (view rtName t)
|
||||||
|
let t' = set rtName name t
|
||||||
|
return (TypeRef t', nameMap')
|
||||||
|
linkType nameMap (TypeFun t) =
|
||||||
|
do (argType, nameMap') <- linkType nameMap (view ftArgumentType t)
|
||||||
|
(resType, nameMap'') <- linkType nameMap' (view ftResultType t)
|
||||||
|
return (TypeFun (set ftArgumentType argType $
|
||||||
|
set ftResultType resType t),
|
||||||
|
nameMap'')
|
||||||
|
linkType nameMap (TypeApp t) =
|
||||||
|
do (lt, nameMap') <- linkType nameMap (view taLeftType t)
|
||||||
|
(rt, nameMap'') <- linkType nameMap' (view taRightType t)
|
||||||
|
return (TypeApp (set taLeftType lt (set taRightType rt t)), nameMap'')
|
||||||
|
--
|
||||||
|
linkExpr _ x | isEmptyExpression x = return x
|
||||||
|
linkExpr _ x@(ConstExp _) = return x
|
||||||
|
linkExpr nameMap (RefExp e) =
|
||||||
|
RefExp `fmap` overM refName (replaceName nameMap) e
|
||||||
|
linkExpr nameMap (LambdaExp e) =
|
||||||
|
do let names = view lambdaArgumentNames e
|
||||||
|
nameMap' <- foldM addNewNames nameMap names
|
||||||
|
e' <- overM lambdaArgumentNames (mapM (replaceName nameMap')) e
|
||||||
|
e'' <- overM lambdaBody (linkExpr nameMap') e'
|
||||||
|
return (LambdaExp e'')
|
||||||
|
|
||||||
|
overM :: Monad m => Lens' s a -> (a -> m a) -> s -> m s
|
||||||
|
overM field action input =
|
||||||
|
do newval <- action (view field input)
|
||||||
|
return (set field newval input)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type DeclarationTable = Map Name (Maybe (Type, Location), Maybe ValueDeclaration)
|
||||||
|
|
||||||
|
makeDeclarationTable :: [Declaration] -> Compiler ps DeclarationTable
|
||||||
|
makeDeclarationTable decls = foldM combine Map.empty decls
|
||||||
|
where
|
||||||
|
combine table d =
|
||||||
|
do let name = view declName d
|
||||||
|
case d of
|
||||||
|
DeclType _ ->
|
||||||
|
return table
|
||||||
|
DeclVal vd | Just t <- view vdDeclaredType vd,
|
||||||
|
isEmptyExpression (view vdValue vd) ->
|
||||||
|
do let myLoc = view vdLocation vd
|
||||||
|
myVal = Just (t, myLoc)
|
||||||
|
case Map.lookup name table of
|
||||||
|
Nothing ->
|
||||||
|
return (Map.insert name (myVal, Nothing) table)
|
||||||
|
Just (Nothing, vd') ->
|
||||||
|
return (Map.insert name (myVal, vd') table)
|
||||||
|
Just (Just (_, theirLoc), _) ->
|
||||||
|
err (RedefinitionError name myLoc theirLoc)
|
||||||
|
DeclVal vd | Just _ <- view vdDeclaredType vd ->
|
||||||
|
err (InternalError name)
|
||||||
|
DeclVal vd | isEmptyExpression (view vdValue vd) ->
|
||||||
|
err (InternalError name)
|
||||||
|
DeclVal vd ->
|
||||||
|
case Map.lookup name table of
|
||||||
|
Nothing ->
|
||||||
|
return (Map.insert name (Nothing, Just vd) table)
|
||||||
|
Just (td, Nothing) ->
|
||||||
|
return (Map.insert name (td, Just vd) table)
|
||||||
|
Just (_, Just vd') ->
|
||||||
|
do let newLoc = view vdLocation vd
|
||||||
|
origLoc = view vdLocation vd'
|
||||||
|
err (RedefinitionError name newLoc origLoc)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
combineTypeValueDeclarations :: DeclarationTable ->
|
||||||
|
[Declaration] ->
|
||||||
|
Compiler ps [Declaration]
|
||||||
|
combineTypeValueDeclarations table decls = process decls
|
||||||
|
where
|
||||||
|
process [] = return []
|
||||||
|
process (x:rest) =
|
||||||
|
case x of
|
||||||
|
DeclType _ ->
|
||||||
|
(x:) `fmap` process rest
|
||||||
|
DeclVal vd | Just _ <- view vdDeclaredType vd,
|
||||||
|
isEmptyExpression (view vdValue vd) ->
|
||||||
|
process rest
|
||||||
|
DeclVal vd ->
|
||||||
|
case Map.lookup (view vdName vd) table of
|
||||||
|
Nothing ->
|
||||||
|
err (InternalError (view vdName vd))
|
||||||
|
Just (Nothing, _) ->
|
||||||
|
(x:) `fmap` process rest
|
||||||
|
Just (Just (t, _), _) ->
|
||||||
|
do let vd' = set vdDeclaredType (Just t) vd
|
||||||
|
(DeclVal vd' :) `fmap` process rest
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
orderDecls :: [Declaration] -> [[Declaration]]
|
||||||
|
orderDecls decls = map unSCC (stronglyConnComp nodes)
|
||||||
|
where
|
||||||
|
unSCC (AcyclicSCC x) = [x]
|
||||||
|
unSCC (CyclicSCC xs) = xs
|
||||||
|
--
|
||||||
|
nodes = map tuplify decls
|
||||||
|
tuplify d = (d, view declName d, toList (freeVariables d))
|
||||||
40
src/Bang/Syntax/Token.hs
Normal file
40
src/Bang/Syntax/Token.hs
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
module Bang.Syntax.Token(
|
||||||
|
Token(..)
|
||||||
|
, Fixity(..)
|
||||||
|
, ppToken
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.Utils.Pretty(BangDoc, text')
|
||||||
|
import Data.Monoid((<>))
|
||||||
|
import Data.Text.Lazy(Text)
|
||||||
|
import Text.PrettyPrint.Annotated(quotes, doubleQuotes, text, parens)
|
||||||
|
|
||||||
|
data Token = CharTok Text
|
||||||
|
| FloatTok Text
|
||||||
|
| IntTok Word Text
|
||||||
|
| OpIdent Fixity Text
|
||||||
|
| Special Text
|
||||||
|
| StringTok Text
|
||||||
|
| TypeIdent Text
|
||||||
|
| ValIdent Text
|
||||||
|
| ErrorTok Text
|
||||||
|
| EOFTok
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
data Fixity = LeftAssoc Word
|
||||||
|
| RightAssoc Word
|
||||||
|
| NonAssoc Word
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
ppToken :: Token -> BangDoc
|
||||||
|
ppToken (CharTok t) = quotes (text' t)
|
||||||
|
ppToken (FloatTok t) = text' t
|
||||||
|
ppToken (IntTok _ t) = text' t
|
||||||
|
ppToken (OpIdent _ t) = text' t
|
||||||
|
ppToken (Special t) = text' t
|
||||||
|
ppToken (StringTok t) = doubleQuotes (text' t)
|
||||||
|
ppToken (TypeIdent t) = text' t
|
||||||
|
ppToken (ValIdent t) = text' t
|
||||||
|
ppToken (ErrorTok t) = text "ERROR" <> parens (text' t)
|
||||||
|
ppToken EOFTok = text "<EOF>"
|
||||||
269
src/Bang/TypeInfer.hs
Normal file
269
src/Bang/TypeInfer.hs
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
{-# LANGUAGE FlexibleInstances #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
{-# LANGUAGE TypeSynonymInstances #-}
|
||||||
|
module Bang.TypeInfer(runTypeInference)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST(Module, moduleDeclarations)
|
||||||
|
import Bang.AST.Declaration(Declaration(..), ValueDeclaration,
|
||||||
|
vdName, vdDeclaredType, vdValue,
|
||||||
|
tdName, tdType)
|
||||||
|
import Bang.AST.Expression(Expression(..), ConstantValue(..),
|
||||||
|
lambdaArgumentNames, lambdaBody,
|
||||||
|
constLocation, constValue, refName)
|
||||||
|
import Bang.AST.Name(Name, NameEnvironment(..),
|
||||||
|
nameLocation, nameText, ppName)
|
||||||
|
import Bang.AST.Type(Type(..), ppType, rtName, ftArgumentType,
|
||||||
|
ftResultType, taLeftType, taRightType,
|
||||||
|
mkPrimType, mkFunType, mkTypeRef,
|
||||||
|
Kind(..))
|
||||||
|
import Bang.Monad(Compiler, BangError(..), BangWarning(..),
|
||||||
|
registerNewName, err', err, warn,
|
||||||
|
getPassState, mapPassState, runPass)
|
||||||
|
import Bang.Syntax.Location(Location, fakeLocation)
|
||||||
|
import Bang.Utils.FreeVars(CanHaveFreeVars(..))
|
||||||
|
import Bang.Utils.Pretty(BangDoc, text')
|
||||||
|
import Control.Lens(view, over)
|
||||||
|
import Data.Map.Strict(Map)
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
import Data.Set(Set, (\\))
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
import Text.PrettyPrint.Annotated(text, nest, quotes, ($+$), (<+>))
|
||||||
|
|
||||||
|
runTypeInference :: Module -> Compiler ps Module
|
||||||
|
runTypeInference x =
|
||||||
|
do _ <- runPass emptyEnvironment (mapM_ typeInferDecls (view moduleDeclarations x))
|
||||||
|
return x
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Infer a = Compiler TypeEnvironment a
|
||||||
|
|
||||||
|
getNamesTypeScheme :: Name -> Infer (Maybe Scheme)
|
||||||
|
getNamesTypeScheme n = Map.lookup n `fmap` getPassState
|
||||||
|
|
||||||
|
addToTypeEnvironment :: [Name] -> [Scheme] -> Infer ()
|
||||||
|
addToTypeEnvironment ns schms = mapPassState (add ns schms)
|
||||||
|
where
|
||||||
|
add :: [Name] -> [Scheme] -> TypeEnvironment -> TypeEnvironment
|
||||||
|
add [] [] acc = acc
|
||||||
|
add (n:restns) (s:rschms) acc =
|
||||||
|
Map.insertWithKey errorFn n s (add restns rschms acc)
|
||||||
|
add _ _ _ =
|
||||||
|
error "Wackiness has insued."
|
||||||
|
--
|
||||||
|
errorFn k _ _ = error ("Redefinition of " ++ show k)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Substitution = Map Name Type
|
||||||
|
|
||||||
|
nullSubstitution :: Substitution
|
||||||
|
nullSubstitution = Map.empty
|
||||||
|
|
||||||
|
composeSubstitutions :: Substitution -> Substitution -> Substitution
|
||||||
|
composeSubstitutions s1 s2 = Map.map (apply s1) s2 `Map.union` s1
|
||||||
|
|
||||||
|
class ApplySubst t where
|
||||||
|
apply :: Substitution -> t -> t
|
||||||
|
|
||||||
|
instance ApplySubst Type where
|
||||||
|
apply s (TypeUnit t) = TypeUnit t
|
||||||
|
apply s (TypePrim t) = TypePrim t
|
||||||
|
apply s (TypeRef t) = case Map.lookup (view rtName t) s of
|
||||||
|
Nothing -> TypeRef t
|
||||||
|
Just t' -> t'
|
||||||
|
apply s (TypeFun t) = TypeFun (over ftArgumentType (apply s) $
|
||||||
|
over ftResultType (apply s) t)
|
||||||
|
apply s (TypeApp t) = TypeApp (over taLeftType (apply s) $
|
||||||
|
over taRightType (apply s) t)
|
||||||
|
|
||||||
|
instance ApplySubst a => ApplySubst [a] where
|
||||||
|
apply s = map (apply s)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data Scheme = Scheme [Name] Type
|
||||||
|
|
||||||
|
instance CanHaveFreeVars Scheme where
|
||||||
|
freeVariables (Scheme ns t) = freeVariables t \\ Set.fromList ns
|
||||||
|
|
||||||
|
instance ApplySubst Scheme where
|
||||||
|
apply s (Scheme vars t) = Scheme vars (apply s t)
|
||||||
|
|
||||||
|
newTypeVar :: Name -> Infer Type
|
||||||
|
newTypeVar n =
|
||||||
|
do let loc = view nameLocation n
|
||||||
|
n' <- registerNewName TypeEnv (view nameText n)
|
||||||
|
return (mkTypeRef loc Unknown n')
|
||||||
|
|
||||||
|
instantiate :: Scheme -> Infer Type
|
||||||
|
instantiate (Scheme vars t) =
|
||||||
|
do refs <- mapM newTypeVar vars
|
||||||
|
let newSubsts = Map.fromList (zip vars refs)
|
||||||
|
return (apply newSubsts t)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
mostGeneralUnifier :: Type -> Type -> Infer Substitution
|
||||||
|
mostGeneralUnifier a b =
|
||||||
|
case (a, b) of
|
||||||
|
(TypeUnit _, TypeUnit _) -> return nullSubstitution
|
||||||
|
(TypePrim _, TypePrim _) -> return nullSubstitution
|
||||||
|
(TypeRef t1, t2) -> varBind (view rtName t1) t2
|
||||||
|
(t2, TypeRef t1) -> varBind (view rtName t1) t2
|
||||||
|
(TypeFun t1, TypeFun t2) -> do let at1 = view ftArgumentType t1
|
||||||
|
at2 = view ftArgumentType t2
|
||||||
|
s1 <- mostGeneralUnifier at1 at2
|
||||||
|
let rt1 = apply s1 (view ftResultType t1)
|
||||||
|
rt2 = apply s1 (view ftResultType t2)
|
||||||
|
s2 <- mostGeneralUnifier rt1 rt2
|
||||||
|
return (s1 `composeSubstitutions` s2)
|
||||||
|
(TypeApp t1, TypeApp t2) -> do let lt1 = view taLeftType t1
|
||||||
|
lt2 = view taLeftType t2
|
||||||
|
s1 <- mostGeneralUnifier lt1 lt2
|
||||||
|
let rt1 = apply s1 (view taRightType t1)
|
||||||
|
rt2 = apply s1 (view taRightType t2)
|
||||||
|
s2 <- mostGeneralUnifier rt1 rt2
|
||||||
|
return (s1 `composeSubstitutions` s2)
|
||||||
|
_ -> do err' (TypesDontUnify a b)
|
||||||
|
return nullSubstitution
|
||||||
|
|
||||||
|
varBind :: Name -> Type -> Infer Substitution
|
||||||
|
varBind u t | TypeRef t' <- t,
|
||||||
|
view rtName t' == u = return nullSubstitution
|
||||||
|
| u `Set.member` freeVariables t = do err' (OccursFail u t)
|
||||||
|
return nullSubstitution
|
||||||
|
| otherwise = return (Map.singleton u t)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type TypeEnvironment = Map Name Scheme
|
||||||
|
|
||||||
|
emptyEnvironment :: TypeEnvironment
|
||||||
|
emptyEnvironment = Map.empty
|
||||||
|
|
||||||
|
instance ApplySubst TypeEnvironment where
|
||||||
|
apply s tenv = Map.map (apply s) tenv
|
||||||
|
|
||||||
|
instance CanHaveFreeVars TypeEnvironment where
|
||||||
|
freeVariables tenv = freeVariables (Map.elems tenv)
|
||||||
|
|
||||||
|
generalize :: TypeEnvironment -> Type -> Scheme
|
||||||
|
generalize env t = Scheme vars t
|
||||||
|
where vars = Set.toList (freeVariables t \\ freeVariables env)
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data InferenceError = InternalError
|
||||||
|
| TypesDontUnify Type Type
|
||||||
|
| OccursFail Name Type
|
||||||
|
| UnboundVariable Name
|
||||||
|
|
||||||
|
instance BangError InferenceError where
|
||||||
|
ppError = prettyError
|
||||||
|
|
||||||
|
prettyError :: InferenceError -> (Maybe Location, BangDoc)
|
||||||
|
prettyError e =
|
||||||
|
case e of
|
||||||
|
InternalError ->
|
||||||
|
(Nothing, text "<internal error>")
|
||||||
|
TypesDontUnify t1 t2 ->
|
||||||
|
(Nothing, text "Types don't unify:" $+$
|
||||||
|
(nest 3
|
||||||
|
(text "first type: " <+> ppType t1 $+$
|
||||||
|
text "second type: " <+> ppType t2)))
|
||||||
|
OccursFail n t ->
|
||||||
|
(Just (view nameLocation n),
|
||||||
|
text "Occurs check failed:" $+$
|
||||||
|
(nest 3 (text "Type: " <+> ppType t)))
|
||||||
|
UnboundVariable n ->
|
||||||
|
(Just (view nameLocation n),
|
||||||
|
text "Unbound variable (in type checker?):" <+> ppName n)
|
||||||
|
|
||||||
|
data InferenceWarning = TopLevelWithoutType Name Type
|
||||||
|
| DeclarationMismatch Name Type Type
|
||||||
|
|
||||||
|
instance BangWarning InferenceWarning where
|
||||||
|
ppWarning = prettyWarning
|
||||||
|
|
||||||
|
prettyWarning :: InferenceWarning -> (Maybe Location, BangDoc)
|
||||||
|
prettyWarning w =
|
||||||
|
case w of
|
||||||
|
TopLevelWithoutType n t ->
|
||||||
|
(Just (view nameLocation n),
|
||||||
|
text "Variable" <+> quotes (text' (view nameText n)) <+>
|
||||||
|
text "is defined without a type." $+$
|
||||||
|
text "Inferred type:" $+$ nest 3 (ppType t))
|
||||||
|
DeclarationMismatch n dt it ->
|
||||||
|
(Just (view nameLocation n),
|
||||||
|
text "Mismatch between declared and inferred type of" <+>
|
||||||
|
quotes (text' (view nameText n)) $+$
|
||||||
|
nest 3 (text "declared type:" <+> ppType dt $+$
|
||||||
|
text "inferred type:" <+> ppType it))
|
||||||
|
|
||||||
|
-- -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
-- Infer the type of a group of declarations with cyclic dependencies.
|
||||||
|
typeInferDecls :: [Declaration] -> Infer ()
|
||||||
|
typeInferDecls decls =
|
||||||
|
do (names, schemes, decls') <- getInitialSchemes decls
|
||||||
|
addToTypeEnvironment names schemes
|
||||||
|
mapM_ typeInferDecl decls'
|
||||||
|
where
|
||||||
|
getInitialSchemes [] =
|
||||||
|
return ([], [], [])
|
||||||
|
getInitialSchemes ((DeclType td) : rest) =
|
||||||
|
do (rn, rs, rd) <- getInitialSchemes rest
|
||||||
|
let n = view tdName td
|
||||||
|
s = Scheme [] (view tdType td)
|
||||||
|
return (n:rn, s:rs, rd)
|
||||||
|
getInitialSchemes ((DeclVal td) : rest) =
|
||||||
|
do (rn, rs, rd) <- getInitialSchemes rest
|
||||||
|
return (rn, rs, (td : rd))
|
||||||
|
|
||||||
|
typeInferDecl :: ValueDeclaration -> Infer ()
|
||||||
|
typeInferDecl vd =
|
||||||
|
do (subs, t) <- typeInferExpr (view vdValue vd)
|
||||||
|
let t' = apply subs t
|
||||||
|
case view vdDeclaredType vd of
|
||||||
|
Nothing ->
|
||||||
|
warn (TopLevelWithoutType (view vdName vd) t')
|
||||||
|
Just dt ->
|
||||||
|
warn (DeclarationMismatch (view vdName vd) dt t)
|
||||||
|
|
||||||
|
typeInferConst :: Location -> ConstantValue ->
|
||||||
|
Infer (Substitution, Type)
|
||||||
|
typeInferConst l (ConstantInt _ _) =
|
||||||
|
return (nullSubstitution, mkPrimType l "i64")
|
||||||
|
typeInferConst l (ConstantChar _) =
|
||||||
|
return (nullSubstitution, mkPrimType l "i8") -- FIXME
|
||||||
|
typeInferConst l (ConstantString _) =
|
||||||
|
return (nullSubstitution, mkPrimType l "i8*") -- FIXME
|
||||||
|
typeInferConst l (ConstantFloat _) =
|
||||||
|
return (nullSubstitution, mkPrimType l "double")
|
||||||
|
|
||||||
|
typeInferExpr :: Expression -> Infer (Substitution, Type)
|
||||||
|
typeInferExpr expr =
|
||||||
|
case expr of
|
||||||
|
ConstExp e ->
|
||||||
|
typeInferConst (view constLocation e) (view constValue e)
|
||||||
|
RefExp e ->
|
||||||
|
do mscheme <- getNamesTypeScheme (view refName e)
|
||||||
|
case mscheme of
|
||||||
|
Nothing -> err (UnboundVariable (view refName e))
|
||||||
|
Just scheme -> do t <- instantiate scheme
|
||||||
|
return (nullSubstitution, t)
|
||||||
|
LambdaExp e ->
|
||||||
|
do let argNames = view lambdaArgumentNames e
|
||||||
|
tvars <- mapM newTypeVar argNames
|
||||||
|
let tvars' = map (Scheme []) tvars
|
||||||
|
addToTypeEnvironment argNames tvars'
|
||||||
|
(s1, t1) <- typeInferExpr (view lambdaBody e)
|
||||||
|
return (s1, mkFunType' (apply s1 tvars) t1)
|
||||||
|
where
|
||||||
|
mkFunType' [] t = t
|
||||||
|
mkFunType' (x:rest) t = mkFunType fakeLocation x (mkFunType' rest t)
|
||||||
|
|
||||||
|
|
||||||
20
src/Bang/Utils/FreeVars.hs
Normal file
20
src/Bang/Utils/FreeVars.hs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
module Bang.Utils.FreeVars(
|
||||||
|
CanHaveFreeVars(..)
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Bang.AST.Name(Name)
|
||||||
|
import Data.Set(Set)
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
|
||||||
|
class CanHaveFreeVars a where
|
||||||
|
freeVariables :: a -> Set Name
|
||||||
|
|
||||||
|
instance CanHaveFreeVars a => CanHaveFreeVars (Maybe a) where
|
||||||
|
freeVariables (Just x) = freeVariables x
|
||||||
|
freeVariables Nothing = Set.empty
|
||||||
|
|
||||||
|
instance CanHaveFreeVars a => CanHaveFreeVars [a] where
|
||||||
|
freeVariables [] = Set.empty
|
||||||
|
freeVariables (x:xs) = freeVariables x `Set.union` freeVariables xs
|
||||||
|
|
||||||
9
src/Bang/Utils/PP.hs
Normal file
9
src/Bang/Utils/PP.hs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
module Bang.Utils.PP(
|
||||||
|
PP(..)
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import
|
||||||
|
|
||||||
|
class PP a where
|
||||||
|
ppr :: a -> Doc
|
||||||
20
src/Bang/Utils/Pretty.hs
Normal file
20
src/Bang/Utils/Pretty.hs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
module Bang.Utils.Pretty(
|
||||||
|
BangDoc
|
||||||
|
, Annotation(..)
|
||||||
|
, text'
|
||||||
|
, word
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Data.Text.Lazy(Text, unpack)
|
||||||
|
import Text.PrettyPrint.Annotated(Doc, text, integer)
|
||||||
|
|
||||||
|
type BangDoc = Doc Annotation
|
||||||
|
|
||||||
|
data Annotation = KeywordAnnotation
|
||||||
|
|
||||||
|
text' :: Text -> Doc a
|
||||||
|
text' = text . unpack
|
||||||
|
|
||||||
|
word :: Word -> Doc a
|
||||||
|
word = integer . fromIntegral
|
||||||
22
src/Main.hs
Normal file
22
src/Main.hs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import Bang.CommandLine(getCommand, BangCommand(..), helpString)
|
||||||
|
import Bang.AST(ppModule)
|
||||||
|
import Bang.Monad(runCompiler)
|
||||||
|
import Bang.Syntax.Parser(runParser, parseModule)
|
||||||
|
import Bang.Syntax.PostProcess(runPostProcessor)
|
||||||
|
import Bang.TypeInfer(runTypeInference)
|
||||||
|
import Data.Version(showVersion)
|
||||||
|
import Paths_bang(version)
|
||||||
|
import Text.PrettyPrint.Annotated(render)
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = getCommand >>= \ cmd ->
|
||||||
|
case cmd of
|
||||||
|
Parse o -> do mdl <- runCompiler cmd o (\ r t -> runParser r t parseModule)
|
||||||
|
putStrLn (render (ppModule mdl))
|
||||||
|
TypeCheck o -> do mdl <- runCompiler cmd o (\ r t ->
|
||||||
|
do mdl <- runParser r t parseModule
|
||||||
|
mdl' <- runPostProcessor mdl
|
||||||
|
runTypeInference mdl')
|
||||||
|
putStrLn (render (ppModule mdl))
|
||||||
|
Help -> putStrLn helpString
|
||||||
|
Version -> putStrLn ("Bang tool, version " ++ showVersion version)
|
||||||
@@ -1 +0,0 @@
|
|||||||
fn main() {}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pub mod syntax;
|
|
||||||
227
src/syntax.rs
227
src/syntax.rs
@@ -1,227 +0,0 @@
|
|||||||
mod error;
|
|
||||||
mod location;
|
|
||||||
mod name;
|
|
||||||
mod parse;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod parser_tests;
|
|
||||||
pub mod tokens;
|
|
||||||
|
|
||||||
pub use location::{Located, Location};
|
|
||||||
pub use name::Name;
|
|
||||||
use proptest_derive::Arbitrary;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::ops::Range;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Module {
|
|
||||||
definitions: Vec<Definition>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Definition {
|
|
||||||
location: Location,
|
|
||||||
export: ExportClass,
|
|
||||||
type_restrictions: TypeRestrictions,
|
|
||||||
definition: Def,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Located for Definition {
|
|
||||||
fn location(&self) -> Location {
|
|
||||||
self.location.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Def {
|
|
||||||
Enumeration(EnumerationDef),
|
|
||||||
Structure(StructureDef),
|
|
||||||
Function(FunctionDef),
|
|
||||||
Value(ValueDef),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Located for Def {
|
|
||||||
fn location(&self) -> Location {
|
|
||||||
match self {
|
|
||||||
Def::Enumeration(def) => def.location.clone(),
|
|
||||||
Def::Structure(def) => def.location.clone(),
|
|
||||||
Def::Function(def) => def.location.clone(),
|
|
||||||
Def::Value(def) => def.location.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct EnumerationDef {
|
|
||||||
name: String,
|
|
||||||
location: Location,
|
|
||||||
variants: Vec<EnumerationVariant>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct EnumerationVariant {
|
|
||||||
location: Location,
|
|
||||||
name: String,
|
|
||||||
argument: Option<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct StructureDef {
|
|
||||||
name: String,
|
|
||||||
location: Location,
|
|
||||||
fields: Vec<StructureField>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct StructureField {
|
|
||||||
location: Location,
|
|
||||||
export: ExportClass,
|
|
||||||
name: String,
|
|
||||||
field_type: Option<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct FunctionDef {
|
|
||||||
name: String,
|
|
||||||
location: Location,
|
|
||||||
arguments: Vec<FunctionArg>,
|
|
||||||
return_type: Option<Type>,
|
|
||||||
body: Vec<Statement>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct FunctionArg {
|
|
||||||
name: String,
|
|
||||||
arg_type: Option<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ValueDef {
|
|
||||||
name: String,
|
|
||||||
location: Location,
|
|
||||||
value: Expression,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum ExportClass {
|
|
||||||
Public,
|
|
||||||
Private,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Statement {
|
|
||||||
Binding(BindingStmt),
|
|
||||||
Expression(Expression),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BindingStmt {
|
|
||||||
location: Location,
|
|
||||||
mutable: bool,
|
|
||||||
variable: Name,
|
|
||||||
value: Expression,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Expression {
|
|
||||||
Value(ConstantValue),
|
|
||||||
Reference(Name),
|
|
||||||
EnumerationValue(Name, Name, Option<Box<Expression>>),
|
|
||||||
StructureValue(Name, Vec<FieldValue>),
|
|
||||||
Conditional(ConditionalExpr),
|
|
||||||
Call(Box<Expression>, CallKind, Vec<Expression>),
|
|
||||||
Block(Location, Vec<Statement>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ConditionalExpr {
|
|
||||||
location: Location,
|
|
||||||
test: Box<Expression>,
|
|
||||||
consequent: Box<Expression>,
|
|
||||||
alternative: Option<Box<Expression>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum CallKind {
|
|
||||||
Infix,
|
|
||||||
Normal,
|
|
||||||
Postfix,
|
|
||||||
Prefix,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct FieldValue {
|
|
||||||
field: Name,
|
|
||||||
value: Expression,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TypeRestrictions {
|
|
||||||
restrictions: Vec<TypeRestriction>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeRestrictions {
|
|
||||||
fn empty() -> Self {
|
|
||||||
TypeRestrictions {
|
|
||||||
restrictions: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TypeRestriction {
|
|
||||||
constructor: Type,
|
|
||||||
arguments: Vec<Type>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Type {
|
|
||||||
Constructor(Location, String),
|
|
||||||
Variable(Location, String),
|
|
||||||
Primitive(Location, String),
|
|
||||||
Application(Box<Type>, Vec<Type>),
|
|
||||||
Function(Vec<Type>, Box<Type>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Located for Type {
|
|
||||||
fn location(&self) -> Location {
|
|
||||||
match self {
|
|
||||||
Type::Constructor(l, _) => l.clone(),
|
|
||||||
Type::Variable(l, _) => l.clone(),
|
|
||||||
Type::Primitive(l, _) => l.clone(),
|
|
||||||
Type::Application(t1, ts) => {
|
|
||||||
let mut result = t1.location();
|
|
||||||
if let Some(last) = ts.last() {
|
|
||||||
result = result.extend_to(&last.location());
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
Type::Function(args, ret) => {
|
|
||||||
if let Some(first) = args.first() {
|
|
||||||
first.location().extend_to(&ret.location())
|
|
||||||
} else {
|
|
||||||
ret.location()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum ConstantValue {
|
|
||||||
Integer(Location, IntegerWithBase),
|
|
||||||
Character(Location, char),
|
|
||||||
String(Location, String),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Arbitrary)]
|
|
||||||
pub struct IntegerWithBase {
|
|
||||||
#[proptest(strategy = "proptest::prop_oneof![ \
|
|
||||||
proptest::strategy::Just(None), \
|
|
||||||
proptest::strategy::Just(Some(2)), \
|
|
||||||
proptest::strategy::Just(Some(8)), \
|
|
||||||
proptest::strategy::Just(Some(10)), \
|
|
||||||
proptest::strategy::Just(Some(16)), \
|
|
||||||
]")]
|
|
||||||
base: Option<u8>,
|
|
||||||
value: u64,
|
|
||||||
}
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
//use codespan_reporting::diagnostic::{Diagnostic, Label};
|
|
||||||
use crate::syntax::tokens::Token;
|
|
||||||
use std::ops::Range;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ParserError {
|
|
||||||
#[error("Lexer error at {file_id}: {error}")]
|
|
||||||
LexerError { file_id: usize, error: LexerError },
|
|
||||||
|
|
||||||
#[error("Unacceptable end of file at {file_id} while {place}")]
|
|
||||||
UnacceptableEof { file_id: usize, place: &'static str },
|
|
||||||
|
|
||||||
#[error("Unexpected token at {file_id}: expected {expected}, saw {token}")]
|
|
||||||
UnexpectedToken {
|
|
||||||
file_id: usize,
|
|
||||||
span: Range<usize>,
|
|
||||||
token: Token,
|
|
||||||
expected: &'static str,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Error, PartialEq)]
|
|
||||||
pub enum LexerError {
|
|
||||||
#[error("Illegal control character in input stream at offset {offset}")]
|
|
||||||
IllegalControlCharacter { offset: usize },
|
|
||||||
|
|
||||||
#[error("Illegal primitive value/type; it cut off before we could determine which at {span:?}")]
|
|
||||||
IllegalPrimitive { span: Range<usize> },
|
|
||||||
|
|
||||||
#[error("Illegal character in primitive ({char:?}) at {span:?}")]
|
|
||||||
IllegalPrimitiveCharacter { span: Range<usize>, char: char },
|
|
||||||
|
|
||||||
#[error("Unfinished character constant found at {span:?}")]
|
|
||||||
UnfinishedCharacter { span: Range<usize> },
|
|
||||||
|
|
||||||
#[error("Unfinished string constant found at {span:?}")]
|
|
||||||
UnfinishedString { span: Range<usize> },
|
|
||||||
|
|
||||||
#[error("Character {char:?} has some extra bits at the end at {span:?}")]
|
|
||||||
OverlongCharacter { char: char, span: Range<usize> },
|
|
||||||
|
|
||||||
#[error("Unknown escaped character {escaped_char:?} at {span:?}")]
|
|
||||||
UnknownEscapeCharacter {
|
|
||||||
escaped_char: char,
|
|
||||||
span: Range<usize>,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[error("Invalid unicode escape sequence at {span:?}")]
|
|
||||||
InvalidUnicode { span: Range<usize> },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LexerError {
|
|
||||||
pub fn to_triple(&self) -> (usize, Result<Token, LexerError>, usize) {
|
|
||||||
match self {
|
|
||||||
LexerError::IllegalControlCharacter { offset } => (*offset, Err(self.clone()), *offset),
|
|
||||||
LexerError::IllegalPrimitive { span } => (span.start, Err(self.clone()), span.end),
|
|
||||||
LexerError::IllegalPrimitiveCharacter { span, .. } => {
|
|
||||||
(span.start, Err(self.clone()), span.end)
|
|
||||||
}
|
|
||||||
LexerError::UnfinishedCharacter { span, .. } => {
|
|
||||||
(span.start, Err(self.clone()), span.end)
|
|
||||||
}
|
|
||||||
LexerError::UnfinishedString { span, .. } => (span.start, Err(self.clone()), span.end),
|
|
||||||
LexerError::OverlongCharacter { span, .. } => (span.start, Err(self.clone()), span.end),
|
|
||||||
LexerError::UnknownEscapeCharacter { span, .. } => {
|
|
||||||
(span.start, Err(self.clone()), span.end)
|
|
||||||
}
|
|
||||||
LexerError::InvalidUnicode { span, .. } => (span.start, Err(self.clone()), span.end),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//impl<F> From<LexerError> for Diagnostic<F> {
|
|
||||||
// fn from(value: LexerError) -> Self {
|
|
||||||
// match value {
|
|
||||||
// LexerError::IllegalControlCharacter { file, offset } => Diagnostic::error()
|
|
||||||
// .with_code("E1001")
|
|
||||||
// .with_message("Illegal control character in input stream")
|
|
||||||
// .with_label(Label::primary(file, offset..offset).with_message("illegal character")),
|
|
||||||
//
|
|
||||||
// LexerError::IllegalPrimitive { file, span } => Diagnostic::error()
|
|
||||||
// .with_code("E1002")
|
|
||||||
// .with_message("Illegal primitive; it cut off before it could finish")
|
|
||||||
// .with_label(
|
|
||||||
// Label::primary(file, span)
|
|
||||||
// .with_message("should be at least one character after the %"),
|
|
||||||
// ),
|
|
||||||
//
|
|
||||||
// LexerError::IllegalPrimitiveCharacter { file, span, char } => Diagnostic::error()
|
|
||||||
// .with_code("E1003")
|
|
||||||
// .with_message(format!("Illegal character {char:?} in primitive"))
|
|
||||||
// .with_label(Label::primary(file, span).with_message("illegal character")),
|
|
||||||
//
|
|
||||||
// LexerError::UnfinishedCharacter { file, span } => Diagnostic::error()
|
|
||||||
// .with_code("E1004")
|
|
||||||
// .with_message("Unfinished character in input stream.")
|
|
||||||
// .with_label(Label::primary(file, span).with_message("unfinished character")),
|
|
||||||
//
|
|
||||||
// LexerError::UnfinishedString { file, span } => Diagnostic::error()
|
|
||||||
// .with_code("E1005")
|
|
||||||
// .with_message("Unfinished string in input stream.")
|
|
||||||
// .with_label(Label::primary(file, span).with_message("unfinished string")),
|
|
||||||
//
|
|
||||||
// LexerError::OverlongCharacter { file, char, span } => Diagnostic::error()
|
|
||||||
// .with_code("E1006")
|
|
||||||
// .with_message(format!(
|
|
||||||
// "Character {char:?} has some extra bits at the end of it."
|
|
||||||
// ))
|
|
||||||
// .with_label(Label::primary(file, span).with_message("overlong character")),
|
|
||||||
//
|
|
||||||
// LexerError::UnknownEscapeCharacter {
|
|
||||||
// file,
|
|
||||||
// escaped_char,
|
|
||||||
// span,
|
|
||||||
// } => Diagnostic::error()
|
|
||||||
// .with_code("E1007")
|
|
||||||
// .with_message(format!("Unknown escape character {escaped_char:?}."))
|
|
||||||
// .with_label(Label::primary(file, span).with_message("unknown character")),
|
|
||||||
//
|
|
||||||
// LexerError::InvalidUnicode { file, span } => Diagnostic::error()
|
|
||||||
// .with_code("E1008")
|
|
||||||
// .with_message("Unknown or invalid unicode escape sequence.")
|
|
||||||
// .with_label(Label::primary(file, span).with_message("escape sequence")),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
use codespan_reporting::diagnostic::Label;
|
|
||||||
use std::cmp::{max, min};
|
|
||||||
use std::ops::Range;
|
|
||||||
|
|
||||||
pub trait Located {
|
|
||||||
fn location(&self) -> Location;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
|
||||||
pub struct Location {
|
|
||||||
file_id: usize,
|
|
||||||
span: Range<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Location {
|
|
||||||
pub fn new(file_id: usize, span: Range<usize>) -> Self {
|
|
||||||
Location { file_id, span }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extend_to(&self, other: &Location) -> Location {
|
|
||||||
assert_eq!(self.file_id, other.file_id);
|
|
||||||
Location {
|
|
||||||
file_id: self.file_id,
|
|
||||||
span: min(self.span.start, other.span.start)..max(self.span.end, other.span.end),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn merge_span(mut self, span: Range<usize>) -> Location {
|
|
||||||
self.span = min(self.span.start, span.start)..max(self.span.end, span.end);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_id(&self) -> usize {
|
|
||||||
self.file_id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn span(&self) -> Range<usize> {
|
|
||||||
self.span.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn primary_label(&self) -> Label<usize> {
|
|
||||||
Label::primary(self.file_id, self.span.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn secondary_label(&self) -> Label<usize> {
|
|
||||||
Label::secondary(self.file_id, self.span.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
use crate::syntax::Location;
|
|
||||||
use std::cmp;
|
|
||||||
use std::fmt;
|
|
||||||
use std::hash;
|
|
||||||
use std::sync::atomic::{AtomicU64, Ordering};
|
|
||||||
|
|
||||||
static IDENTIFIER_COUNTER: AtomicU64 = AtomicU64::new(0);
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Name {
|
|
||||||
printable: String,
|
|
||||||
identifier: u64,
|
|
||||||
location: Option<Location>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl cmp::PartialEq for Name {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.identifier == other.identifier
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl cmp::Eq for Name {}
|
|
||||||
|
|
||||||
impl hash::Hash for Name {
|
|
||||||
fn hash<H: hash::Hasher>(&self, state: &mut H) {
|
|
||||||
self.identifier.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Name {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}:{}", self.printable, self.identifier)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Name {
|
|
||||||
pub fn new<S: ToString>(location: Location, s: S) -> Name {
|
|
||||||
let my_id = IDENTIFIER_COUNTER.fetch_add(1, Ordering::SeqCst);
|
|
||||||
Name {
|
|
||||||
printable: s.to_string(),
|
|
||||||
identifier: my_id,
|
|
||||||
location: Some(location),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn gensym(base: &'static str) -> Name {
|
|
||||||
let formatted = format!("<{base}>");
|
|
||||||
let my_id = IDENTIFIER_COUNTER.fetch_add(1, Ordering::SeqCst);
|
|
||||||
|
|
||||||
Name {
|
|
||||||
printable: formatted,
|
|
||||||
identifier: my_id,
|
|
||||||
location: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_printed(&self) -> &str {
|
|
||||||
self.printable.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1250
src/syntax/parse.rs
1250
src/syntax/parse.rs
File diff suppressed because it is too large
Load Diff
@@ -1,70 +0,0 @@
|
|||||||
use crate::syntax::*;
|
|
||||||
use crate::syntax::error::ParserError;
|
|
||||||
use crate::syntax::tokens::*;
|
|
||||||
|
|
||||||
grammar(file_id: usize);
|
|
||||||
|
|
||||||
extern {
|
|
||||||
type Location = usize;
|
|
||||||
type Error = ParserError;
|
|
||||||
|
|
||||||
enum Token {
|
|
||||||
"(" => Token::OpenParen,
|
|
||||||
")" => Token::CloseParen,
|
|
||||||
"[" => Token::OpenSquare,
|
|
||||||
"]" => Token::CloseSquare,
|
|
||||||
"{" => Token::OpenBrace,
|
|
||||||
"}" => Token::CloseBrace,
|
|
||||||
";" => Token::Semi,
|
|
||||||
":" => Token::Colon,
|
|
||||||
"," => Token::Comma,
|
|
||||||
"`" => Token::BackTick,
|
|
||||||
"\\" => Token::Lambda(_),
|
|
||||||
"->" => Token::Arrow,
|
|
||||||
|
|
||||||
"<constructor>" => Token::TypeName(<String>),
|
|
||||||
"<value>" => Token::ValueName(<String>),
|
|
||||||
"<op>" => Token::OperatorName(<String>),
|
|
||||||
"<prim_constructor>" => Token::PrimitiveTypeName(<String>),
|
|
||||||
"<prim_value>" => Token::PrimitiveValueName(<String>),
|
|
||||||
"<integer>" => Token::Integer(<IntegerWithBase>),
|
|
||||||
"<char>" => Token::Character(<char>),
|
|
||||||
"<string>" => Token::String(<String>),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub Type: Type = {
|
|
||||||
FunctionType,
|
|
||||||
}
|
|
||||||
|
|
||||||
FunctionType: Type = {
|
|
||||||
TypeApplication,
|
|
||||||
<argtype:FunctionType> "->" <ret:TypeApplication> =>
|
|
||||||
Type::Function(Box::new(argtype), Box::new(ret)),
|
|
||||||
}
|
|
||||||
|
|
||||||
TypeApplication: Type = {
|
|
||||||
BaseType,
|
|
||||||
<s:@L> <c:"<constructor>"> <e:@L> <arguments: BaseType*> => {
|
|
||||||
let constructor = Type::Constructor(Location::new(file_id, s..e), c);
|
|
||||||
Type::Application(Box::new(constructor), arguments)
|
|
||||||
},
|
|
||||||
<s:@L> <c:"<prim_constructor>"> <e:@L> <arguments: BaseType*> => {
|
|
||||||
let constructor = Type::Constructor(Location::new(file_id, s..e), c);
|
|
||||||
Type::Application(Box::new(constructor), arguments)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
BaseType: Type = {
|
|
||||||
<s:@L> <v:"<value>"> <e:@L> =>
|
|
||||||
Type::Variable(Location::new(file_id, s..e), v),
|
|
||||||
<s:@L> <p: "<prim_value>"> <e:@L> =>
|
|
||||||
Type::Primitive(Location::new(file_id, s..e), p),
|
|
||||||
"(" <t:Type> ")" => t,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub ConstantValue: ConstantValue = {
|
|
||||||
<s:@L> <x:"<integer>"> <e:@L> => ConstantValue::Integer(Location::new(file_id, s..e), x),
|
|
||||||
<s:@L> <x:"<char>"> <e:@L> => ConstantValue::Character(Location::new(file_id, s..e), x),
|
|
||||||
<s:@L> <x:"<string>"> <e:@L> => ConstantValue::String(Location::new(file_id, s..e), x),
|
|
||||||
}
|
|
||||||
@@ -1,930 +0,0 @@
|
|||||||
use crate::syntax::error::ParserError;
|
|
||||||
use crate::syntax::parse::Parser;
|
|
||||||
use crate::syntax::tokens::{Lexer, Token};
|
|
||||||
use crate::syntax::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn constants() {
|
|
||||||
let parse_constant = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_constant()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_constant("16"),
|
|
||||||
Ok(ConstantValue::Integer(
|
|
||||||
_,
|
|
||||||
IntegerWithBase {
|
|
||||||
base: None,
|
|
||||||
value: 16,
|
|
||||||
}
|
|
||||||
))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_constant("0x10"),
|
|
||||||
Ok(ConstantValue::Integer(
|
|
||||||
_,
|
|
||||||
IntegerWithBase {
|
|
||||||
base: Some(16),
|
|
||||||
value: 16,
|
|
||||||
}
|
|
||||||
))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_constant("0o20"),
|
|
||||||
Ok(ConstantValue::Integer(
|
|
||||||
_,
|
|
||||||
IntegerWithBase {
|
|
||||||
base: Some(8),
|
|
||||||
value: 16,
|
|
||||||
}
|
|
||||||
))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_constant("0b10000"),
|
|
||||||
Ok(ConstantValue::Integer(
|
|
||||||
_,
|
|
||||||
IntegerWithBase {
|
|
||||||
base: Some(2),
|
|
||||||
value: 16,
|
|
||||||
}
|
|
||||||
))
|
|
||||||
));
|
|
||||||
assert!(
|
|
||||||
matches!(parse_constant("\"foo\""), Ok(ConstantValue::String(_, x))
|
|
||||||
if x == "foo")
|
|
||||||
);
|
|
||||||
assert!(matches!(
|
|
||||||
parse_constant("'f'"),
|
|
||||||
Ok(ConstantValue::Character(_, 'f'))
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn types() {
|
|
||||||
let parse_type = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_type()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("Cons"),
|
|
||||||
Ok(Type::Application(cons, empty)) if
|
|
||||||
matches!(cons.as_ref(), Type::Constructor(_, c) if c == "Cons") &&
|
|
||||||
empty.is_empty()
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("cons"),
|
|
||||||
Ok(Type::Variable(_, c)) if c == "cons"
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("Cons a b"),
|
|
||||||
Ok(Type::Application(a, b))
|
|
||||||
if matches!(a.as_ref(), Type::Constructor(_, c) if c == "Cons") &&
|
|
||||||
matches!(b.as_slice(), [Type::Variable(_, b1), Type::Variable(_, b2)]
|
|
||||||
if b1 == "a" && b2 == "b")
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("a -> z"),
|
|
||||||
Ok(Type::Function(a, z))
|
|
||||||
if matches!(a.as_slice(), [Type::Variable(_, a1)] if a1 == "a") &&
|
|
||||||
matches!(z.as_ref(), Type::Variable(_, z1) if z1 == "z")
|
|
||||||
));
|
|
||||||
println!("-------------");
|
|
||||||
println!("{:?}", parse_type("(a -> z)"));
|
|
||||||
println!("-------------");
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("(a -> z)"),
|
|
||||||
Ok(Type::Function(a, z))
|
|
||||||
if matches!(a.as_slice(), [Type::Variable(_, a1)] if a1 == "a") &&
|
|
||||||
matches!(z.as_ref(), Type::Variable(_, z1) if z1 == "z")
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("a b -> z"),
|
|
||||||
Ok(Type::Function(a, z))
|
|
||||||
if matches!(a.as_slice(), [Type::Variable(_, a1), Type::Variable(_, b1)]
|
|
||||||
if a1 == "a" && b1 == "b") &&
|
|
||||||
matches!(z.as_ref(), Type::Variable(_, z1) if z1 == "z")
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_type("Cons a b -> z"),
|
|
||||||
Ok(Type::Function(a, z))
|
|
||||||
if matches!(a.as_slice(), [Type::Application(cons, appargs)]
|
|
||||||
if matches!(cons.as_ref(), Type::Constructor(_, c) if c == "Cons") &&
|
|
||||||
matches!(appargs.as_slice(), [Type::Variable(_, b1), Type::Variable(_, b2)]
|
|
||||||
if b1 == "a" && b2 == "b")) &&
|
|
||||||
matches!(z.as_ref(), Type::Variable(_, z1) if z1 == "z")
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_restrictions() {
|
|
||||||
let parse_tr = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_type_restrictions()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_tr("restrict()"),
|
|
||||||
Ok(TypeRestrictions{ restrictions }) if restrictions.is_empty()
|
|
||||||
));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_tr("restrict(Cons a b)"),
|
|
||||||
Ok(TypeRestrictions { restrictions }) if restrictions.len() == 1 &&
|
|
||||||
matches!(&restrictions[0], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Cons") &&
|
|
||||||
arguments.len() == 2 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "a") &&
|
|
||||||
matches!(&arguments[1], Type::Variable(_, x) if x == "b"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_tr("restrict(Cons a b,)"),
|
|
||||||
Ok(TypeRestrictions { restrictions }) if restrictions.len() == 1 &&
|
|
||||||
matches!(&restrictions[0], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Cons") &&
|
|
||||||
arguments.len() == 2 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "a") &&
|
|
||||||
matches!(&arguments[1], Type::Variable(_, x) if x == "b"))));
|
|
||||||
|
|
||||||
assert!(matches!(parse_tr("restrict(,Cons a b,)"), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_tr("restrict(Cons a b, Monad m)"),
|
|
||||||
Ok(TypeRestrictions { restrictions }) if restrictions.len() == 2 &&
|
|
||||||
matches!(&restrictions[0], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Cons") &&
|
|
||||||
arguments.len() == 2 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "a") &&
|
|
||||||
matches!(&arguments[1], Type::Variable(_, x) if x == "b")) &&
|
|
||||||
matches!(&restrictions[1], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Monad") &&
|
|
||||||
arguments.len() == 1 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "m"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_tr("restrict(Cons a b, Monad m,)"),
|
|
||||||
Ok(TypeRestrictions { restrictions }) if restrictions.len() == 2 &&
|
|
||||||
matches!(&restrictions[0], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Cons") &&
|
|
||||||
arguments.len() == 2 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "a") &&
|
|
||||||
matches!(&arguments[1], Type::Variable(_, x) if x == "b")) &&
|
|
||||||
matches!(&restrictions[1], TypeRestriction {
|
|
||||||
constructor,
|
|
||||||
arguments,
|
|
||||||
} if matches!(constructor, Type::Constructor(_, x) if x == "Monad") &&
|
|
||||||
arguments.len() == 1 &&
|
|
||||||
matches!(&arguments[0], Type::Variable(_, x) if x == "m"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn field_definition() {
|
|
||||||
let parse_fd = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_field_definition()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_fd("foo"), Err(_),));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("foo,"),
|
|
||||||
Ok(Some(StructureField{ name, export: ExportClass::Private, field_type: None, .. }))
|
|
||||||
if name == "foo"
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("foo}"),
|
|
||||||
Ok(Some(StructureField{ name, export: ExportClass::Private, field_type: None, .. }))
|
|
||||||
if name == "foo"
|
|
||||||
));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("foo: Word8,"),
|
|
||||||
Ok(Some(StructureField{ name, field_type, .. }))
|
|
||||||
if name == "foo" &&
|
|
||||||
matches!(&field_type, Some(Type::Application(c, args))
|
|
||||||
if matches!(c.as_ref(), Type::Constructor(_, c) if c == "Word8") &&
|
|
||||||
args.is_empty())));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("foo: Cons a b,"),
|
|
||||||
Ok(Some(StructureField{ name, field_type, .. }))
|
|
||||||
if name == "foo" &&
|
|
||||||
matches!(&field_type, Some(Type::Application(c, args))
|
|
||||||
if matches!(c.as_ref(), Type::Constructor(_, c) if c == "Cons") &&
|
|
||||||
matches!(&args.as_slice(), &[Type::Variable(_, v1), Type::Variable(_, v2)]
|
|
||||||
if v1 == "a" && v2 == "b"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("foo: a -> b,"),
|
|
||||||
Ok(Some(StructureField{ name, field_type, .. }))
|
|
||||||
if name == "foo" &&
|
|
||||||
matches!(&field_type, Some(Type::Function(args, ret))
|
|
||||||
if matches!(&args.as_slice(), &[Type::Variable(_, a)] if a == "a") &&
|
|
||||||
matches!(ret.as_ref(), Type::Variable(_, b) if b == "b"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_fd("export foo: a -> b,"),
|
|
||||||
Ok(Some(StructureField{ name, export: ExportClass::Public, field_type, .. }))
|
|
||||||
if name == "foo" &&
|
|
||||||
matches!(&field_type, Some(Type::Function(args, ret))
|
|
||||||
if matches!(&args.as_slice(), &[Type::Variable(_, a)] if a == "a") &&
|
|
||||||
matches!(ret.as_ref(), Type::Variable(_, b) if b == "b"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn structures() {
|
|
||||||
let parse_st = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_structure()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_st("structure { }"), Err(_)));
|
|
||||||
assert!(matches!(parse_st("structure {"), Err(_)));
|
|
||||||
assert!(matches!(parse_st("structure foo {}"), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo {}"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" && fields.is_empty()));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo { bar }"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" &&
|
|
||||||
matches!(fields.as_slice(), &[StructureField { ref name, ref field_type, .. }]
|
|
||||||
if name == "bar" && matches!(field_type, None))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo { bar: Word8 }"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" &&
|
|
||||||
matches!(fields.as_slice(), &[StructureField { ref name, ref field_type, .. }]
|
|
||||||
if name == "bar" &&
|
|
||||||
matches!(field_type, Some(Type::Application(c, args))
|
|
||||||
if matches!(c.as_ref(), Type::Constructor(_, c) if c == "Word8") &&
|
|
||||||
args.is_empty()))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo { bar: Word8, goo }"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" &&
|
|
||||||
matches!(fields.as_slice(),
|
|
||||||
&[StructureField { ref name, ref field_type, .. },
|
|
||||||
StructureField { name: ref name2, field_type: None, .. }]
|
|
||||||
if name == "bar" &&
|
|
||||||
name2 == "goo" &&
|
|
||||||
matches!(field_type, Some(Type::Application(c, args))
|
|
||||||
if matches!(c.as_ref(), Type::Constructor(_, c) if c == "Word8") &&
|
|
||||||
args.is_empty()))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo { bar: b c -> a, goo }"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" &&
|
|
||||||
matches!(fields.as_slice(),
|
|
||||||
&[StructureField { ref name, ref field_type, .. },
|
|
||||||
StructureField { name: ref name2, field_type: None, .. }]
|
|
||||||
if name == "bar" &&
|
|
||||||
name2 == "goo" &&
|
|
||||||
matches!(field_type, Some(Type::Function(args, ret))
|
|
||||||
if matches!(&args.as_slice(), &[Type::Variable(_, b), Type::Variable(_, c)]
|
|
||||||
if b == "b" && c == "c") &&
|
|
||||||
matches!(ret.as_ref(), Type::Variable(_, a) if a == "a")))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("structure Foo { bar: b c -> a, goo, }"),
|
|
||||||
Ok(StructureDef { name, fields, .. })
|
|
||||||
if name == "Foo" &&
|
|
||||||
matches!(fields.as_slice(),
|
|
||||||
&[StructureField { ref name, ref field_type, .. },
|
|
||||||
StructureField { name: ref name2, field_type: None, .. }]
|
|
||||||
if name == "bar" &&
|
|
||||||
name2 == "goo" &&
|
|
||||||
matches!(field_type, Some(Type::Function(args, ret))
|
|
||||||
if matches!(&args.as_slice(), &[Type::Variable(_, b), Type::Variable(_, c)]
|
|
||||||
if b == "b" && c == "c") &&
|
|
||||||
matches!(ret.as_ref(), Type::Variable(_, a) if a == "a")))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enum_variant() {
|
|
||||||
let parse_ev = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_enum_variant()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_ev("foo"), Err(_),));
|
|
||||||
assert!(matches!(parse_ev("foo,"), Err(_),));
|
|
||||||
assert!(matches!(parse_ev("Cons foo,"), Err(_),));
|
|
||||||
assert!(matches!(parse_ev(""), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(parse_ev("}"), Ok(None)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons,"),
|
|
||||||
Ok(Some(EnumerationVariant { name, argument, .. }))
|
|
||||||
if name == "Cons" && argument.is_none()));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons }"),
|
|
||||||
Ok(Some(EnumerationVariant { name, argument, .. }))
|
|
||||||
if name == "Cons" && argument.is_none()));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons, }"),
|
|
||||||
Ok(Some(EnumerationVariant { name, argument, .. }))
|
|
||||||
if name == "Cons" && argument.is_none()));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons(Pair a),"),
|
|
||||||
Ok(Some(EnumerationVariant { name, ref argument, .. }))
|
|
||||||
if name == "Cons" &&
|
|
||||||
matches!(argument, Some(Type::Application(typef, args))
|
|
||||||
if matches!(typef.as_ref(), Type::Constructor(_, name)
|
|
||||||
if name == "Pair") &&
|
|
||||||
matches!(&args.as_slice(), &[Type::Variable(_, argname)]
|
|
||||||
if argname == "a"))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons(Pair a) }"),
|
|
||||||
Ok(Some(EnumerationVariant { name, ref argument, .. }))
|
|
||||||
if name == "Cons" &&
|
|
||||||
matches!(argument, Some(Type::Application(typef, args))
|
|
||||||
if matches!(typef.as_ref(), Type::Constructor(_, name)
|
|
||||||
if name == "Pair") &&
|
|
||||||
matches!(&args.as_slice(), &[Type::Variable(_, argname)]
|
|
||||||
if argname == "a"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ev("Cons(a b -> c) }"),
|
|
||||||
Ok(Some(EnumerationVariant { name, ref argument, .. }))
|
|
||||||
if name == "Cons" &&
|
|
||||||
matches!(argument, Some(Type::Function(args, ret))
|
|
||||||
if matches!(&args.as_slice(), &[Type::Variable(_, a), Type::Variable(_, b)]
|
|
||||||
if a == "a" && b == "b") &&
|
|
||||||
matches!(ret.as_ref(), Type::Variable(_, c) if c == "c"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enumerations() {
|
|
||||||
let parse_en = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_enumeration()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_en("enumeration { }"), Err(_)));
|
|
||||||
assert!(matches!(parse_en("enumeration {"), Err(_)));
|
|
||||||
assert!(matches!(parse_en("enumeration"), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_en("enumeration Empty { }"),
|
|
||||||
Ok(EnumerationDef { name, variants, .. })
|
|
||||||
if name == "Empty" && variants.is_empty()));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_en("enumeration Alternates { A, B }"),
|
|
||||||
Ok(EnumerationDef { name, variants, .. })
|
|
||||||
if name == "Alternates" &&
|
|
||||||
matches!(&variants.as_slice(), &[
|
|
||||||
EnumerationVariant { name: name1, argument: arg1, ..},
|
|
||||||
EnumerationVariant { name: name2, argument: arg2, ..},
|
|
||||||
] if name1 == "A" && arg1.is_none() &&
|
|
||||||
name2 == "B" && arg2.is_none())));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_en("enumeration Alternates { A, B, }"),
|
|
||||||
Ok(EnumerationDef { name, variants, .. })
|
|
||||||
if name == "Alternates" &&
|
|
||||||
matches!(&variants.as_slice(), &[
|
|
||||||
EnumerationVariant { name: name1, argument: arg1, ..},
|
|
||||||
EnumerationVariant { name: name2, argument: arg2, ..},
|
|
||||||
] if name1 == "A" && arg1.is_none() &&
|
|
||||||
name2 == "B" && arg2.is_none())));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn expressions() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_ex(""), Err(_)));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("x"),
|
|
||||||
Ok(Expression::Reference(n)) if n.as_printed() == "x"));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("(x)"),
|
|
||||||
Ok(Expression::Reference(n)) if n.as_printed() == "x"));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("'c'"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Character(_, _)))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("\"c\""),
|
|
||||||
Ok(Expression::Value(ConstantValue::String(_, _)))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Integer(_, _)))
|
|
||||||
));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("(1)"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Integer(_, _)))
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn enumeration_values() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_ex("Hello::world"), Err(_)));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("Hello::World"),
|
|
||||||
Ok(Expression::EnumerationValue(t, v, None))
|
|
||||||
if t.as_printed() == "Hello" &&
|
|
||||||
v.as_printed() == "World"));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("Hello::World(a)"),
|
|
||||||
Ok(Expression::EnumerationValue(t, v, Some(_)))
|
|
||||||
if t.as_printed() == "Hello" &&
|
|
||||||
v.as_printed() == "World"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn structure_value() {
|
|
||||||
let parse_st = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(parse_st("Foo{ , }"), Err(_)));
|
|
||||||
assert!(matches!(parse_st("Foo{ foo, }"), Err(_)));
|
|
||||||
assert!(matches!(parse_st("Foo{ foo: , }"), Err(_)));
|
|
||||||
assert!(matches!(parse_st("Foo{ , foo: 1, }"), Err(_)));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("Foo{ foo: 1 }"),
|
|
||||||
Ok(Expression::StructureValue(sname, values))
|
|
||||||
if sname.as_printed() == "Foo" &&
|
|
||||||
matches!(values.as_slice(), [FieldValue{ field, value }]
|
|
||||||
if field.as_printed() == "foo" &&
|
|
||||||
matches!(value, Expression::Value(ConstantValue::Integer(_,_))))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("Foo{ foo: 1, }"),
|
|
||||||
Ok(Expression::StructureValue(sname, values))
|
|
||||||
if sname.as_printed() == "Foo" &&
|
|
||||||
matches!(values.as_slice(), [FieldValue{ field, value }]
|
|
||||||
if field.as_printed() == "foo" &&
|
|
||||||
matches!(value, Expression::Value(ConstantValue::Integer(_,_))))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("Foo{ foo: 1, bar: \"foo\" }"),
|
|
||||||
Ok(Expression::StructureValue(sname, values))
|
|
||||||
if sname.as_printed() == "Foo" &&
|
|
||||||
matches!(values.as_slice(), [FieldValue{ field: f1, value: v1 },
|
|
||||||
FieldValue{ field: f2, value: v2 }]
|
|
||||||
if f1.as_printed() == "foo" &&
|
|
||||||
f2.as_printed() == "bar" &&
|
|
||||||
matches!(v1, Expression::Value(ConstantValue::Integer(_,_))) &&
|
|
||||||
matches!(v2, Expression::Value(ConstantValue::String(_,_))))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_st("Foo{ foo: 1, bar: \"foo\", }"),
|
|
||||||
Ok(Expression::StructureValue(sname, values))
|
|
||||||
if sname.as_printed() == "Foo" &&
|
|
||||||
matches!(values.as_slice(), [FieldValue{ field: f1, value: v1 },
|
|
||||||
FieldValue{ field: f2, value: v2 }]
|
|
||||||
if f1.as_printed() == "foo" &&
|
|
||||||
f2.as_printed() == "bar" &&
|
|
||||||
matches!(v1, Expression::Value(ConstantValue::Integer(_,_))) &&
|
|
||||||
matches!(v2, Expression::Value(ConstantValue::String(_,_))))));
|
|
||||||
assert!(matches!(parse_st("Foo{ foo: 1,, bar: \"foo\", }"), Err(_)));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn infix_and_precedence() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.add_infix_precedence("+", parse::Associativity::Left, 6);
|
|
||||||
result.add_infix_precedence("*", parse::Associativity::Right, 7);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("0"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value, .. })))
|
|
||||||
if value == 0));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("(0)"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value, .. })))
|
|
||||||
if value == 0));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("((0))"),
|
|
||||||
Ok(Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value, .. })))
|
|
||||||
if value == 0));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1 + 2"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, args))
|
|
||||||
if matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v1, .. })),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v2, .. }))
|
|
||||||
] if *v1 == 1 && *v2 == 2)));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1 + 2 + 3"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, args))
|
|
||||||
if matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Call(innerplus, CallKind::Infix, inner_args),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v3, .. }))
|
|
||||||
] if *v3 == 3 &&
|
|
||||||
matches!(innerplus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(inner_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v1, .. })),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v2, .. }))
|
|
||||||
] if *v1 == 1 && *v2 == 2))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1 * 2 * 3"),
|
|
||||||
Ok(Expression::Call(times, CallKind::Infix, args))
|
|
||||||
if matches!(times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v1, .. })),
|
|
||||||
Expression::Call(innertimes, CallKind::Infix, inner_args),
|
|
||||||
] if *v1 == 1 &&
|
|
||||||
matches!(innertimes.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(inner_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v2, .. })),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: v3, .. }))
|
|
||||||
] if *v2 == 2 && *v3 == 3))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1 + 2 * 3 + 4"),
|
|
||||||
Ok(Expression::Call(plus_right, CallKind::Infix, outer_args)) if
|
|
||||||
matches!(plus_right.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(outer_args.as_slice(), [
|
|
||||||
Expression::Call(plus_left, CallKind::Infix, left_args),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v4))
|
|
||||||
] if
|
|
||||||
matches!(v4, IntegerWithBase{ value: 4, .. }) &&
|
|
||||||
matches!(plus_left.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(left_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v1)),
|
|
||||||
Expression::Call(times, CallKind::Infix, times_args)
|
|
||||||
] if
|
|
||||||
matches!(v1, IntegerWithBase{ value: 1, .. }) &&
|
|
||||||
matches!(times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(times_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v2)),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v3))
|
|
||||||
] if
|
|
||||||
matches!(v2, IntegerWithBase{ value: 2, .. }) &&
|
|
||||||
matches!(v3, IntegerWithBase{ value: 3, .. }))))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("1 * 2 + 3 * 4"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, outer_args)) if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(outer_args.as_slice(), [
|
|
||||||
Expression::Call(left_times, CallKind::Infix, left_args),
|
|
||||||
Expression::Call(right_times, CallKind::Infix, right_args)
|
|
||||||
] if
|
|
||||||
matches!(left_times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(right_times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(left_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v1)),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v2)),
|
|
||||||
] if
|
|
||||||
matches!(v1, IntegerWithBase { value: 1, .. }) &&
|
|
||||||
matches!(v2, IntegerWithBase { value: 2, .. })) &&
|
|
||||||
matches!(right_args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v3)),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v4)),
|
|
||||||
] if
|
|
||||||
matches!(v3, IntegerWithBase { value: 3, .. }) &&
|
|
||||||
matches!(v4, IntegerWithBase { value: 4, .. })))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn calls() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.add_infix_precedence("+", parse::Associativity::Left, 6);
|
|
||||||
result.add_infix_precedence("*", parse::Associativity::Right, 7);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f()"),
|
|
||||||
Ok(Expression::Call(f, CallKind::Normal, args)) if
|
|
||||||
matches!(f.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
args.is_empty()));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f(a)"),
|
|
||||||
Ok(Expression::Call(f, CallKind::Normal, args)) if
|
|
||||||
matches!(f.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a")));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f(a,b)"),
|
|
||||||
Ok(Expression::Call(f, CallKind::Normal, args)) if
|
|
||||||
matches!(f.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Reference(a),
|
|
||||||
Expression::Reference(b),
|
|
||||||
] if a.as_printed() == "a" && b.as_printed() == "b")));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f(a,b,)"),
|
|
||||||
Ok(Expression::Call(f, CallKind::Normal, args)) if
|
|
||||||
matches!(f.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Reference(a),
|
|
||||||
Expression::Reference(b),
|
|
||||||
] if a.as_printed() == "a" && b.as_printed() == "b")));
|
|
||||||
assert!(matches!(parse_ex("f(,a,b,)"), Err(_)));
|
|
||||||
assert!(matches!(parse_ex("f(a,,b,)"), Err(_)));
|
|
||||||
assert!(matches!(parse_ex("f(a,b,,)"), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f()()"),
|
|
||||||
Ok(Expression::Call(f, CallKind::Normal, args)) if
|
|
||||||
matches!(f.as_ref(), Expression::Call(inner, CallKind::Normal, inner_args) if
|
|
||||||
matches!(inner.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
inner_args.is_empty()) &&
|
|
||||||
args.is_empty()));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f() + 1"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, args)) if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Call(subcall, CallKind::Normal, subargs),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v1))
|
|
||||||
] if
|
|
||||||
matches!(v1, IntegerWithBase{ value: 1, .. }) &&
|
|
||||||
matches!(subcall.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
subargs.is_empty())));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f(a + b, c*d)"),
|
|
||||||
Ok(Expression::Call(eff, CallKind::Normal, args)) if
|
|
||||||
matches!(eff.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Call(plus, CallKind::Infix, pargs),
|
|
||||||
Expression::Call(times, CallKind::Infix, targs),
|
|
||||||
] if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(pargs.as_slice(), [ Expression::Reference(a), Expression::Reference(b) ] if
|
|
||||||
a.as_printed() == "a" && b.as_printed() == "b") &&
|
|
||||||
matches!(targs.as_slice(), [ Expression::Reference(c), Expression::Reference(d) ] if
|
|
||||||
c.as_printed() == "c" && d.as_printed() == "d"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("f(a + b, c*d,)"),
|
|
||||||
Ok(Expression::Call(eff, CallKind::Normal, args)) if
|
|
||||||
matches!(eff.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Call(plus, CallKind::Infix, pargs),
|
|
||||||
Expression::Call(times, CallKind::Infix, targs),
|
|
||||||
] if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(pargs.as_slice(), [ Expression::Reference(a), Expression::Reference(b) ] if
|
|
||||||
a.as_printed() == "a" && b.as_printed() == "b") &&
|
|
||||||
matches!(targs.as_slice(), [ Expression::Reference(c), Expression::Reference(d) ] if
|
|
||||||
c.as_printed() == "c" && d.as_printed() == "d"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("3 + f(1 + 2)"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, args)) if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, v3)),
|
|
||||||
Expression::Call(eff, CallKind::Normal, fargs)
|
|
||||||
] if
|
|
||||||
matches!(v3, IntegerWithBase{ value: 3, .. }) &&
|
|
||||||
matches!(eff.as_ref(), Expression::Reference(n) if n.as_printed() == "f") &&
|
|
||||||
matches!(fargs.as_slice(), [Expression::Call(p, CallKind::Infix, pargs)] if
|
|
||||||
matches!(p.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(pargs.as_slice(), [Expression::Value(v1), Expression::Value(v2)] if
|
|
||||||
matches!(v1, ConstantValue::Integer(_, IntegerWithBase { value: 1, .. })) &&
|
|
||||||
matches!(v2, ConstantValue::Integer(_, IntegerWithBase { value: 2, .. })))))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("(f . g)(1 + 2)"),
|
|
||||||
Ok(Expression::Call(fg, CallKind::Normal, args)) if
|
|
||||||
matches!(fg.as_ref(), Expression::Call(dot, CallKind::Infix, fgargs) if
|
|
||||||
matches!(dot.as_ref(), Expression::Reference(n) if n.as_printed() == ".") &&
|
|
||||||
matches!(fgargs.as_slice(), [Expression::Reference(f), Expression::Reference(g)] if
|
|
||||||
f.as_printed() == "f" && g.as_printed() == "g")) &&
|
|
||||||
matches!(args.as_slice(), [Expression::Call(plus, CallKind::Infix, pargs)] if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(pargs.as_slice(), [Expression::Value(v1), Expression::Value(v2)] if
|
|
||||||
matches!(v1, ConstantValue::Integer(_, IntegerWithBase{ value: 1, .. })) &&
|
|
||||||
matches!(v2, ConstantValue::Integer(_, IntegerWithBase{ value: 2, .. }))))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("a + b(2 + 3) * c"),
|
|
||||||
Ok(Expression::Call(plus, CallKind::Infix, pargs)) if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(pargs.as_slice(), [
|
|
||||||
Expression::Reference(a),
|
|
||||||
Expression::Call(times, CallKind::Infix, targs)
|
|
||||||
] if a.as_printed() == "a" &&
|
|
||||||
matches!(times.as_ref(), Expression::Reference(n) if n.as_printed() == "*") &&
|
|
||||||
matches!(targs.as_slice(), [
|
|
||||||
Expression::Call(b, CallKind::Normal, bargs),
|
|
||||||
Expression::Reference(c),
|
|
||||||
] if c.as_printed() == "c" &&
|
|
||||||
matches!(b.as_ref(), Expression::Reference(n) if n.as_printed() == "b") &&
|
|
||||||
matches!(bargs.as_slice(), [Expression::Call(plus, CallKind::Infix, pargs)] if
|
|
||||||
matches!(plus.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(pargs.as_slice(), [
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: 2, .. })),
|
|
||||||
Expression::Value(ConstantValue::Integer(_, IntegerWithBase{ value: 3, .. }))
|
|
||||||
]))))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn prefix_and_postfix() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.add_infix_precedence("+", parse::Associativity::Left, 4);
|
|
||||||
result.add_infix_precedence("*", parse::Associativity::Left, 8);
|
|
||||||
result.add_prefix_precedence("++", 6);
|
|
||||||
result.add_postfix_precedence("++", 6);
|
|
||||||
result.add_prefix_precedence("--", 7);
|
|
||||||
result.add_postfix_precedence("--", 7);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("++a"),
|
|
||||||
Ok(Expression::Call(pp, CallKind::Prefix, args)) if
|
|
||||||
matches!(pp.as_ref(), Expression::Reference(n) if n.as_printed() == "++") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a")));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("a--"),
|
|
||||||
Ok(Expression::Call(pp, CallKind::Postfix, args)) if
|
|
||||||
matches!(pp.as_ref(), Expression::Reference(n) if n.as_printed() == "--") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a")));
|
|
||||||
|
|
||||||
// the prefix is weaker than the postfix, so it should be the outside
|
|
||||||
// operatotr
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("++a--"),
|
|
||||||
Ok(Expression::Call(pp, CallKind::Prefix, args)) if
|
|
||||||
matches!(pp.as_ref(), Expression::Reference(n) if n.as_printed() == "++") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Call(mm, CallKind::Postfix, args)] if
|
|
||||||
matches!(mm.as_ref(), Expression::Reference(n) if n.as_printed() == "--") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a"))));
|
|
||||||
|
|
||||||
// the prefix is stronger than the postfix, so it should be the inside
|
|
||||||
// operator
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("--a++"),
|
|
||||||
Ok(Expression::Call(pp, CallKind::Postfix, args)) if
|
|
||||||
matches!(pp.as_ref(), Expression::Reference(n) if n.as_printed() == "++") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Call(mm, CallKind::Prefix, args)] if
|
|
||||||
matches!(mm.as_ref(), Expression::Reference(n) if n.as_printed() == "--") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("a++ + b"),
|
|
||||||
Ok(Expression::Call(p, CallKind::Infix, args)) if
|
|
||||||
matches!(p.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Call(mm, CallKind::Postfix, args),
|
|
||||||
Expression::Reference(n)
|
|
||||||
] if n.as_printed() == "b" &&
|
|
||||||
matches!(mm.as_ref(), Expression::Reference(n) if n.as_printed() == "++") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "a"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("a + ++ b"),
|
|
||||||
Ok(Expression::Call(p, CallKind::Infix, args)) if
|
|
||||||
matches!(p.as_ref(), Expression::Reference(n) if n.as_printed() == "+") &&
|
|
||||||
matches!(args.as_slice(), [
|
|
||||||
Expression::Reference(n),
|
|
||||||
Expression::Call(mm, CallKind::Prefix, args),
|
|
||||||
] if n.as_printed() == "a" &&
|
|
||||||
matches!(mm.as_ref(), Expression::Reference(n) if n.as_printed() == "++") &&
|
|
||||||
matches!(args.as_slice(), [Expression::Reference(n)] if n.as_printed() == "b"))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("a * ++ b"),
|
|
||||||
Err(ParserError::UnexpectedToken{ token: Token::OperatorName(pp), .. })
|
|
||||||
if pp == "++"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn blocks() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("{}"),
|
|
||||||
Ok(Expression::Block(_, void)) if
|
|
||||||
matches!(void.as_slice(), [Statement::Expression(call)] if
|
|
||||||
matches!(call, Expression::Call(void, CallKind::Normal, vargs) if
|
|
||||||
matches!(void.as_ref(), Expression::Reference(n) if
|
|
||||||
n.as_printed() == "%prim%void") &&
|
|
||||||
vargs.is_empty()))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("{ x }"),
|
|
||||||
Ok(Expression::Block(_, x)) if
|
|
||||||
matches!(x.as_slice(), [Statement::Expression(Expression::Reference(n))] if
|
|
||||||
n.as_printed() == "x")));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("{ x; }"),
|
|
||||||
Ok(Expression::Block(_, x)) if
|
|
||||||
matches!(x.as_slice(), [
|
|
||||||
Statement::Expression(Expression::Reference(n)),
|
|
||||||
Statement::Expression(Expression::Call(primv, CallKind::Normal, vargs)),
|
|
||||||
] if n.as_printed() == "x" && vargs.is_empty() &&
|
|
||||||
matches!(primv.as_ref(), Expression::Reference(n) if
|
|
||||||
n.as_printed() == "%prim%void"))));
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("{ x; y }"),
|
|
||||||
Ok(Expression::Block(_, x)) if
|
|
||||||
matches!(x.as_slice(), [
|
|
||||||
Statement::Expression(Expression::Reference(x)),
|
|
||||||
Statement::Expression(Expression::Reference(y)),
|
|
||||||
] if x.as_printed() == "x" && y.as_printed() == "y")));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bindings() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("{ let x = y; }"),
|
|
||||||
Ok(Expression::Block(_, x)) if
|
|
||||||
matches!(x.as_slice(), [Statement::Binding(b), Statement::Expression(_)] if
|
|
||||||
!b.mutable &&
|
|
||||||
b.variable.as_printed() == "x" &&
|
|
||||||
matches!(b.value, Expression::Reference(ref n) if n.as_printed() == "y"))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn conditionals() {
|
|
||||||
let parse_ex = |str| {
|
|
||||||
let lexer = Lexer::from(str);
|
|
||||||
let mut result = Parser::new(0, lexer);
|
|
||||||
result.parse_expression()
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("if x { y } else { z }"),
|
|
||||||
Ok(Expression::Conditional(cond)) if
|
|
||||||
matches!(cond.test.as_ref(), Expression::Reference(n) if n.as_printed() == "x") &&
|
|
||||||
matches!(cond.consequent.as_ref(), Expression::Block(_, cs) if
|
|
||||||
matches!(cs.as_slice(), [Statement::Expression(Expression::Reference(n))] if
|
|
||||||
n.as_printed() == "y")) &&
|
|
||||||
matches!(cond.alternative.as_ref(), Some(expr) if
|
|
||||||
matches!(expr.as_ref(), Expression::Block(_, ast) if
|
|
||||||
matches!(ast.as_slice(), [Statement::Expression(Expression::Reference(n))] if
|
|
||||||
n.as_printed() == "z")))));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("if x { y }"),
|
|
||||||
Ok(Expression::Conditional(cond)) if
|
|
||||||
matches!(cond.test.as_ref(), Expression::Reference(n) if n.as_printed() == "x") &&
|
|
||||||
matches!(cond.consequent.as_ref(), Expression::Block(_, cs) if
|
|
||||||
matches!(cs.as_slice(), [Statement::Expression(Expression::Reference(n))] if
|
|
||||||
n.as_printed() == "y")) &&
|
|
||||||
cond.alternative.is_none()));
|
|
||||||
|
|
||||||
assert!(matches!(parse_ex("if x v { z }"), Err(_)));
|
|
||||||
|
|
||||||
assert!(matches!(
|
|
||||||
parse_ex("if x + y { z }"),
|
|
||||||
Ok(Expression::Conditional(cond)) if
|
|
||||||
matches!(cond.test.as_ref(), Expression::Call(_, CallKind::Infix, _))));
|
|
||||||
}
|
|
||||||
@@ -1,709 +0,0 @@
|
|||||||
use crate::syntax::IntegerWithBase;
|
|
||||||
use crate::syntax::error::LexerError;
|
|
||||||
use proptest_derive::Arbitrary;
|
|
||||||
use std::fmt;
|
|
||||||
use std::ops::Range;
|
|
||||||
use std::str::CharIndices;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct LocatedToken {
|
|
||||||
pub token: Token,
|
|
||||||
pub span: Range<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A single token of the input stream; used to help the parsing function over
|
|
||||||
/// more concrete things than bytes.
|
|
||||||
///
|
|
||||||
/// The [`std::fmt::Display`] implementation is designed to round-trip, so those
|
|
||||||
/// needing a more regular or descriptive option should consider using the
|
|
||||||
/// [`std::fmt::Debug`] implementation instead.
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Arbitrary)]
|
|
||||||
pub enum Token {
|
|
||||||
OpenParen,
|
|
||||||
CloseParen,
|
|
||||||
OpenSquare,
|
|
||||||
CloseSquare,
|
|
||||||
OpenBrace,
|
|
||||||
CloseBrace,
|
|
||||||
Semi,
|
|
||||||
Colon,
|
|
||||||
Comma,
|
|
||||||
BackTick,
|
|
||||||
Arrow,
|
|
||||||
Lambda(bool),
|
|
||||||
|
|
||||||
TypeName(#[proptest(regex = r"[A-Z][a-zA-Z0-9_]*")] String),
|
|
||||||
ValueName(#[proptest(regex = r"[a-z_][a-zA-Z0-9_]*")] String),
|
|
||||||
OperatorName(
|
|
||||||
#[proptest(
|
|
||||||
regex = r"[\~\!\@\#\$\%\^\&\*\+\-\=\.<>\?\|][\~\!\@\#\$\%\^\&\*\+\-\=\.<>\?\|_]*",
|
|
||||||
filter = "|x| x != \"->\""
|
|
||||||
)]
|
|
||||||
String,
|
|
||||||
),
|
|
||||||
|
|
||||||
PrimitiveTypeName(#[proptest(regex = r"[A-Z][a-zA-Z0-9_]*")] String),
|
|
||||||
PrimitiveValueName(#[proptest(regex = r"[a-z_][a-zA-Z0-9_]*")] String),
|
|
||||||
|
|
||||||
Integer(IntegerWithBase),
|
|
||||||
Character(char),
|
|
||||||
String(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Token {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Token::OpenParen => write!(f, "("),
|
|
||||||
Token::CloseParen => write!(f, ")"),
|
|
||||||
Token::OpenSquare => write!(f, "["),
|
|
||||||
Token::CloseSquare => write!(f, "]"),
|
|
||||||
Token::OpenBrace => write!(f, "{{"),
|
|
||||||
Token::CloseBrace => write!(f, "}}"),
|
|
||||||
Token::Semi => write!(f, ";"),
|
|
||||||
Token::Colon => write!(f, ":"),
|
|
||||||
Token::Comma => write!(f, ","),
|
|
||||||
Token::BackTick => write!(f, "`"),
|
|
||||||
Token::Arrow => write!(f, "->"),
|
|
||||||
Token::Lambda(false) => write!(f, "\\"),
|
|
||||||
Token::Lambda(true) => write!(f, "λ"),
|
|
||||||
Token::TypeName(str) => write!(f, "{str}"),
|
|
||||||
Token::ValueName(str) => write!(f, "{str}"),
|
|
||||||
Token::OperatorName(str) => write!(f, "{str}"),
|
|
||||||
Token::PrimitiveTypeName(str) => write!(f, "prim%{str}"),
|
|
||||||
Token::PrimitiveValueName(str) => write!(f, "prim%{str}"),
|
|
||||||
Token::Integer(IntegerWithBase { base, value }) => match base {
|
|
||||||
None => write!(f, "{value}"),
|
|
||||||
Some(2) => write!(f, "0b{value:b}"),
|
|
||||||
Some(8) => write!(f, "0o{value:o}"),
|
|
||||||
Some(10) => write!(f, "0d{value}"),
|
|
||||||
Some(16) => write!(f, "0x{value:x}"),
|
|
||||||
Some(base) => write!(f, "<illegal number token base={base} value={value}>"),
|
|
||||||
},
|
|
||||||
Token::Character(c) => write!(f, "{c:?}"),
|
|
||||||
Token::String(s) => write!(f, "{s:?}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(private_interfaces)]
|
|
||||||
pub enum Lexer<'a> {
|
|
||||||
Working(LexerState<'a>),
|
|
||||||
Errored(LexerError),
|
|
||||||
Done(usize),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LexerState<'a> {
|
|
||||||
stream: CharIndices<'a>,
|
|
||||||
buffer: Option<(usize, char)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a str> for Lexer<'a> {
|
|
||||||
fn from(value: &'a str) -> Self {
|
|
||||||
Lexer::Working(LexerState {
|
|
||||||
stream: value.char_indices(),
|
|
||||||
buffer: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Lexer<'a> {
|
|
||||||
pub fn new(stream: &'a str) -> Self {
|
|
||||||
Lexer::Working(LexerState {
|
|
||||||
stream: stream.char_indices(),
|
|
||||||
buffer: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for Lexer<'a> {
|
|
||||||
type Item = Result<LocatedToken, LexerError>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self {
|
|
||||||
Lexer::Done(_) => None,
|
|
||||||
Lexer::Errored(e) => Some(Err(e.clone())),
|
|
||||||
Lexer::Working(state) => match state.next_token() {
|
|
||||||
Err(e) => {
|
|
||||||
println!("ERROR: {e}");
|
|
||||||
*self = Lexer::Errored(e.clone());
|
|
||||||
Some(Err(e))
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None) => {
|
|
||||||
*self = Lexer::Done(state.stream.offset());
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(ltoken)) => Some(Ok(ltoken)),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> LexerState<'a> {
|
|
||||||
fn next_char(&mut self) -> Option<(usize, char)> {
|
|
||||||
let result = self.buffer.take().or_else(|| self.stream.next());
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stash_char(&mut self, idx: usize, c: char) {
|
|
||||||
assert!(self.buffer.is_none());
|
|
||||||
self.buffer = Some((idx, c));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next_token(&mut self) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
while let Some((token_start_offset, char)) = self.next_char() {
|
|
||||||
if char.is_whitespace() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let simple_response = |token| {
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token,
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
match char {
|
|
||||||
'(' => return simple_response(Token::OpenParen),
|
|
||||||
')' => return simple_response(Token::CloseParen),
|
|
||||||
'[' => return simple_response(Token::OpenSquare),
|
|
||||||
']' => return simple_response(Token::CloseSquare),
|
|
||||||
'{' => return simple_response(Token::OpenBrace),
|
|
||||||
'}' => return simple_response(Token::CloseBrace),
|
|
||||||
';' => return simple_response(Token::Semi),
|
|
||||||
':' => return simple_response(Token::Colon),
|
|
||||||
',' => return simple_response(Token::Comma),
|
|
||||||
'`' => return simple_response(Token::BackTick),
|
|
||||||
'\\' => return simple_response(Token::Lambda(false)),
|
|
||||||
'λ' => return simple_response(Token::Lambda(true)),
|
|
||||||
|
|
||||||
'0' => return self.starts_with_zero(token_start_offset),
|
|
||||||
'\'' => return self.starts_with_single(token_start_offset),
|
|
||||||
'\"' => return self.starts_with_double(token_start_offset),
|
|
||||||
'-' => return self.starts_with_dash(token_start_offset),
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(value) = char.to_digit(10) {
|
|
||||||
return self.parse_integer(token_start_offset, 10, None, value as u64);
|
|
||||||
}
|
|
||||||
|
|
||||||
if char.is_uppercase() {
|
|
||||||
return self.parse_identifier(
|
|
||||||
token_start_offset,
|
|
||||||
char.into(),
|
|
||||||
|c| c.is_alphanumeric() || c == '_',
|
|
||||||
Token::TypeName,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if char.is_alphabetic() || char == '_' {
|
|
||||||
return self.parse_identifier(
|
|
||||||
token_start_offset,
|
|
||||||
char.into(),
|
|
||||||
|c| c.is_alphanumeric() || c == '_',
|
|
||||||
Token::ValueName,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !char.is_alphanumeric() && !char.is_whitespace() && !char.is_control() {
|
|
||||||
return self.parse_identifier(
|
|
||||||
token_start_offset,
|
|
||||||
char.into(),
|
|
||||||
|c| !c.is_alphanumeric() && !c.is_whitespace() && !c.is_control(),
|
|
||||||
Token::OperatorName,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn starts_with_zero(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
match self.next_char() {
|
|
||||||
None => {
|
|
||||||
let token = Token::Integer(IntegerWithBase {
|
|
||||||
base: None,
|
|
||||||
value: 0,
|
|
||||||
});
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token,
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((_, 'b')) => self.parse_integer(token_start_offset, 2, Some(2), 0),
|
|
||||||
Some((_, 'o')) => self.parse_integer(token_start_offset, 8, Some(8), 0),
|
|
||||||
Some((_, 'd')) => self.parse_integer(token_start_offset, 10, Some(10), 0),
|
|
||||||
Some((_, 'x')) => self.parse_integer(token_start_offset, 16, Some(16), 0),
|
|
||||||
|
|
||||||
Some((offset, c)) => {
|
|
||||||
if let Some(value) = c.to_digit(10) {
|
|
||||||
self.parse_integer(token_start_offset, 10, None, value as u64)
|
|
||||||
} else {
|
|
||||||
self.stash_char(offset, c);
|
|
||||||
let token = Token::Integer(IntegerWithBase {
|
|
||||||
base: None,
|
|
||||||
value: 0,
|
|
||||||
});
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token,
|
|
||||||
span: token_start_offset..offset,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_integer(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
base: u32,
|
|
||||||
provided_base: Option<u8>,
|
|
||||||
mut value: u64,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
let mut end_offset = self.stream.offset();
|
|
||||||
|
|
||||||
while let Some((offset, c)) = self.next_char() {
|
|
||||||
end_offset = offset;
|
|
||||||
if let Some(digit) = c.to_digit(base) {
|
|
||||||
value = (value * (base as u64)) + (digit as u64);
|
|
||||||
} else {
|
|
||||||
self.stash_char(offset, c);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let token = Token::Integer(IntegerWithBase {
|
|
||||||
base: provided_base,
|
|
||||||
value,
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token,
|
|
||||||
span: token_start_offset..end_offset,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_identifier(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
mut identifier: String,
|
|
||||||
mut allowed_character: fn(char) -> bool,
|
|
||||||
mut builder: fn(String) -> Token,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
let mut end_offset = self.stream.offset();
|
|
||||||
|
|
||||||
while let Some((offset, c)) = self.next_char() {
|
|
||||||
end_offset = offset;
|
|
||||||
|
|
||||||
if allowed_character(c) {
|
|
||||||
identifier.push(c);
|
|
||||||
} else if identifier == "prim" && c == '%' {
|
|
||||||
identifier = String::new();
|
|
||||||
allowed_character = |c| c.is_alphanumeric() || c == '_';
|
|
||||||
match self.next_char() {
|
|
||||||
None => {
|
|
||||||
return Err(LexerError::IllegalPrimitive {
|
|
||||||
span: token_start_offset..end_offset,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((_, char)) => {
|
|
||||||
if char.is_uppercase() {
|
|
||||||
identifier.push(char);
|
|
||||||
builder = Token::PrimitiveTypeName;
|
|
||||||
} else if char.is_lowercase() || char == '_' {
|
|
||||||
identifier.push(char);
|
|
||||||
builder = Token::PrimitiveValueName;
|
|
||||||
} else {
|
|
||||||
return Err(LexerError::IllegalPrimitiveCharacter {
|
|
||||||
span: token_start_offset..end_offset,
|
|
||||||
char,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.stash_char(offset, c);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token: builder(identifier),
|
|
||||||
span: token_start_offset..end_offset,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn starts_with_single(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
let Some((_, mut char)) = self.next_char() else {
|
|
||||||
return Err(LexerError::UnfinishedCharacter {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
if char == '\\' {
|
|
||||||
char = self.get_escaped_character(token_start_offset)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some((idx, finish_char)) = self.next_char() else {
|
|
||||||
return Err(LexerError::UnfinishedCharacter {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
if finish_char != '\'' {
|
|
||||||
return Err(LexerError::OverlongCharacter {
|
|
||||||
char,
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token: Token::Character(char),
|
|
||||||
span: token_start_offset..idx,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_escaped_character(&mut self, token_start_offset: usize) -> Result<char, LexerError> {
|
|
||||||
let Some((idx, escaped_char)) = self.next_char() else {
|
|
||||||
return Err(LexerError::UnfinishedCharacter {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
match escaped_char {
|
|
||||||
'0' => Ok('\0'),
|
|
||||||
'a' => Ok('\u{0007}'),
|
|
||||||
'b' => Ok('\u{0008}'),
|
|
||||||
'f' => Ok('\u{000C}'),
|
|
||||||
'n' => Ok('\n'),
|
|
||||||
'r' => Ok('\r'),
|
|
||||||
't' => Ok('\t'),
|
|
||||||
'u' => self.get_unicode_sequence(idx),
|
|
||||||
'v' => Ok('\u{000B}'),
|
|
||||||
'\'' => Ok('\''),
|
|
||||||
'"' => Ok('"'),
|
|
||||||
'\\' => Ok('\\'),
|
|
||||||
_ => Err(LexerError::UnknownEscapeCharacter {
|
|
||||||
escaped_char,
|
|
||||||
span: idx..self.stream.offset(),
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_unicode_sequence(&mut self, token_start_offset: usize) -> Result<char, LexerError> {
|
|
||||||
let Some((_, char)) = self.next_char() else {
|
|
||||||
return Err(LexerError::InvalidUnicode {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
if char != '{' {
|
|
||||||
return Err(LexerError::InvalidUnicode {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut value = 0;
|
|
||||||
|
|
||||||
while let Some((idx, char)) = self.next_char() {
|
|
||||||
if let Some(digit) = char.to_digit(16) {
|
|
||||||
value = (value * 16) + digit;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if char == '}' {
|
|
||||||
if let Some(char) = char::from_u32(value) {
|
|
||||||
return Ok(char);
|
|
||||||
} else {
|
|
||||||
return Err(LexerError::InvalidUnicode {
|
|
||||||
span: token_start_offset..idx,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Err(LexerError::InvalidUnicode {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(LexerError::InvalidUnicode {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn starts_with_double(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
let mut result = String::new();
|
|
||||||
|
|
||||||
while let Some((idx, char)) = self.next_char() {
|
|
||||||
match char {
|
|
||||||
'"' => {
|
|
||||||
return Ok(Some(LocatedToken {
|
|
||||||
token: Token::String(result),
|
|
||||||
span: token_start_offset..idx,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
'\\' => result.push(self.get_escaped_character(idx)?),
|
|
||||||
|
|
||||||
_ => result.push(char),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(LexerError::UnfinishedString {
|
|
||||||
span: token_start_offset..self.stream.offset(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn starts_with_dash(
|
|
||||||
&mut self,
|
|
||||||
token_start_offset: usize,
|
|
||||||
) -> Result<Option<LocatedToken>, LexerError> {
|
|
||||||
match self.next_char() {
|
|
||||||
None => Ok(Some(LocatedToken {
|
|
||||||
token: Token::OperatorName("-".into()),
|
|
||||||
span: token_start_offset..token_start_offset + 1,
|
|
||||||
})),
|
|
||||||
Some((end, '>')) => {
|
|
||||||
let Some((pbloc, peekaboo)) = self.next_char() else {
|
|
||||||
return Ok(Some(LocatedToken {
|
|
||||||
token: Token::Arrow,
|
|
||||||
span: token_start_offset..end,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
let is_operator = !peekaboo.is_alphanumeric()
|
|
||||||
&& !peekaboo.is_whitespace()
|
|
||||||
&& !peekaboo.is_control();
|
|
||||||
|
|
||||||
if is_operator {
|
|
||||||
self.parse_identifier(
|
|
||||||
token_start_offset,
|
|
||||||
format!("->{peekaboo}"),
|
|
||||||
|c| !c.is_alphanumeric() && !c.is_whitespace() && !c.is_control(),
|
|
||||||
Token::OperatorName,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
self.stash_char(pbloc, peekaboo);
|
|
||||||
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token: Token::Arrow,
|
|
||||||
span: token_start_offset..end,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some((_, c)) if !c.is_alphanumeric() && !c.is_whitespace() && !c.is_control() => self
|
|
||||||
.parse_identifier(
|
|
||||||
token_start_offset,
|
|
||||||
format!("-{c}"),
|
|
||||||
|c| !c.is_alphanumeric() && !c.is_whitespace() && !c.is_control(),
|
|
||||||
Token::OperatorName,
|
|
||||||
),
|
|
||||||
Some((idx, c)) => {
|
|
||||||
self.stash_char(idx, c);
|
|
||||||
Ok(Some(LocatedToken {
|
|
||||||
token: Token::OperatorName("-".into()),
|
|
||||||
span: token_start_offset..idx,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
proptest::proptest! {
|
|
||||||
#[test]
|
|
||||||
fn token_string_token(token: Token) {
|
|
||||||
println!("Starting from {token:?}");
|
|
||||||
let string = format!("{token}");
|
|
||||||
let mut tokens = Lexer::from(string.as_str());
|
|
||||||
let initial_token = tokens.next()
|
|
||||||
.expect("Can get a token without an error.")
|
|
||||||
.expect("Can get a valid token.")
|
|
||||||
.token;
|
|
||||||
|
|
||||||
proptest::prop_assert_eq!(token, initial_token);
|
|
||||||
proptest::prop_assert!(tokens.next().is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
fn parsed_single_token(s: &str) -> Token {
|
|
||||||
let mut tokens = Lexer::from(s);
|
|
||||||
let result = tokens
|
|
||||||
.next()
|
|
||||||
.expect(format!("Can get at least one token from {s:?}").as_str())
|
|
||||||
.expect("Can get a valid token.")
|
|
||||||
.token;
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
tokens.next().is_none(),
|
|
||||||
"Should only get one token from {s:?}"
|
|
||||||
);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn numbers_work_as_expected() {
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: None,
|
|
||||||
value: 1
|
|
||||||
}),
|
|
||||||
parsed_single_token("1")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(2),
|
|
||||||
value: 1
|
|
||||||
}),
|
|
||||||
parsed_single_token("0b1")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(8),
|
|
||||||
value: 1
|
|
||||||
}),
|
|
||||||
parsed_single_token("0o1")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(10),
|
|
||||||
value: 1
|
|
||||||
}),
|
|
||||||
parsed_single_token("0d1")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(16),
|
|
||||||
value: 1
|
|
||||||
}),
|
|
||||||
parsed_single_token("0x1")
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: None,
|
|
||||||
value: 10
|
|
||||||
}),
|
|
||||||
parsed_single_token("10")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(2),
|
|
||||||
value: 2
|
|
||||||
}),
|
|
||||||
parsed_single_token("0b10")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(8),
|
|
||||||
value: 8
|
|
||||||
}),
|
|
||||||
parsed_single_token("0o10")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(10),
|
|
||||||
value: 10
|
|
||||||
}),
|
|
||||||
parsed_single_token("0d10")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
Token::Integer(IntegerWithBase {
|
|
||||||
base: Some(16),
|
|
||||||
value: 16
|
|
||||||
}),
|
|
||||||
parsed_single_token("0x10")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn lambda_works() {
|
|
||||||
assert_eq!(Token::Lambda(false), parsed_single_token("\\"));
|
|
||||||
assert_eq!(Token::Lambda(true), parsed_single_token("λ"));
|
|
||||||
assert_eq!(Token::TypeName("Λ".into()), parsed_single_token("Λ"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn types_work_as_expected() {
|
|
||||||
assert_eq!(Token::TypeName("Int".into()), parsed_single_token("Int"));
|
|
||||||
assert_eq!(Token::TypeName("Int8".into()), parsed_single_token("Int8"));
|
|
||||||
assert_eq!(Token::TypeName("Γ".into()), parsed_single_token("Γ"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn values_work_as_expected() {
|
|
||||||
assert_eq!(
|
|
||||||
Token::ValueName("alpha".into()),
|
|
||||||
parsed_single_token("alpha")
|
|
||||||
);
|
|
||||||
assert_eq!(Token::ValueName("ɑ".into()), parsed_single_token("ɑ"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn operators_work_as_expected() {
|
|
||||||
assert_eq!(Token::OperatorName("-".into()), parsed_single_token("-"));
|
|
||||||
assert_eq!(Token::OperatorName("+".into()), parsed_single_token("+"));
|
|
||||||
assert_eq!(Token::OperatorName("*".into()), parsed_single_token("*"));
|
|
||||||
assert_eq!(Token::OperatorName("/".into()), parsed_single_token("/"));
|
|
||||||
assert_eq!(Token::OperatorName("↣".into()), parsed_single_token("↣"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn can_separate_pieces() {
|
|
||||||
let mut lexer = Lexer::from("a-b");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
|
|
||||||
assert_eq!(Some(Token::ValueName("a".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::OperatorName("-".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::ValueName("b".into())), next_token());
|
|
||||||
assert_eq!(None, next_token());
|
|
||||||
|
|
||||||
let mut lexer = Lexer::from("a--b");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
|
|
||||||
assert_eq!(Some(Token::ValueName("a".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::OperatorName("--".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::ValueName("b".into())), next_token());
|
|
||||||
assert_eq!(None, next_token());
|
|
||||||
|
|
||||||
let mut lexer = Lexer::from("a - -b");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
|
|
||||||
assert_eq!(Some(Token::ValueName("a".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::OperatorName("-".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::OperatorName("-".into())), next_token());
|
|
||||||
assert_eq!(Some(Token::ValueName("b".into())), next_token());
|
|
||||||
assert_eq!(None, next_token());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn arrow_requires_nonop() {
|
|
||||||
let mut lexer = Lexer::from("->");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
assert_eq!(Some(Token::Arrow), next_token());
|
|
||||||
|
|
||||||
let mut lexer = Lexer::from("->*");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
assert_eq!(Some(Token::OperatorName("->*".into())), next_token());
|
|
||||||
|
|
||||||
let mut lexer = Lexer::from("->*x");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
assert_eq!(Some(Token::OperatorName("->*".into())), next_token());
|
|
||||||
|
|
||||||
let mut lexer = Lexer::from("->x");
|
|
||||||
let mut next_token = move || lexer.next().map(|x| x.expect("Can read valid token").token);
|
|
||||||
assert_eq!(Some(Token::Arrow), next_token());
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user