diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..2004e45 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1040 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "backtrace" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-link", +] + +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" + +[[package]] +name = "bytes" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + +[[package]] +name = "clap" +version = "4.5.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" + +[[package]] +name = "clipboard-win" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bde03770d3df201d4fb868f2c9c59e66a3e4e2bd06692a0fe701e7103c7e84d4" +dependencies = [ + "error-code", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "eclexia" +version = "0.1.0" +dependencies = [ + "clap", + "eclexia-ast", + "eclexia-codegen", + "eclexia-hir", + "eclexia-lexer", + "eclexia-mir", + "eclexia-parser", + "eclexia-runtime", + "eclexia-typeck", + "miette", + "rustyline", + "smol_str", +] + +[[package]] +name = "eclexia-ast" +version = "0.1.0" +dependencies = [ + "la-arena", + "serde", + "smol_str", +] + +[[package]] +name = "eclexia-codegen" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "eclexia-mir", +] + +[[package]] +name = "eclexia-hir" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "la-arena", + "smol_str", +] + +[[package]] +name = "eclexia-lexer" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "logos", + "smol_str", +] + +[[package]] +name = "eclexia-mir" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "la-arena", + "smol_str", +] + +[[package]] +name = "eclexia-parser" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "eclexia-lexer", + "smol_str", + "thiserror", +] + +[[package]] +name = "eclexia-runtime" +version = "0.1.0" +dependencies = [ + "tokio", +] + +[[package]] +name = "eclexia-typeck" +version = "0.1.0" +dependencies = [ + "eclexia-ast", + "indexmap", + "rustc-hash", + "smol_str", + "thiserror", +] + +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "error-code" +version = "3.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" + +[[package]] +name = "fd-lock" +version = "4.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" +dependencies = [ + "cfg-if", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "home" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "indexmap" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "la-arena" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306" + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.178" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "logos" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7251356ef8cb7aec833ddf598c6cb24d17b689d20b993f9d11a3d764e34e6458" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-codegen" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59f80069600c0d66734f5ff52cc42f2dabd6b29d205f333d61fd7832e9e9963f" +dependencies = [ + "beef", + "fnv", + "lazy_static", + "proc-macro2", + "quote", + "regex-syntax", + "syn", +] + +[[package]] +name = "logos-derive" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24fb722b06a9dc12adb0963ed585f19fc61dc5413e6a9be9422ef92c091e731d" +dependencies = [ + "logos-codegen", +] + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "unicode-width 0.1.14", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] + +[[package]] +name = "nix" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +dependencies = [ + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "owo-colors" +version = "4.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "proc-macro2" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustix" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustyline" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63" +dependencies = [ + "bitflags", + "cfg-if", + "clipboard-win", + "fd-lock", + "home", + "libc", + "log", + "memchr", + "nix", + "radix_trie", + "unicode-segmentation", + "unicode-width 0.1.14", + "utf8parse", + "windows-sys 0.52.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "smol_str" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + +[[package]] +name = "syn" +version = "2.0.112" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "terminal_size" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" +dependencies = [ + "rustix", + "windows-sys 0.60.2", +] + +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "unicode-linebreak", + "unicode-width 0.2.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio" +version = "1.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..51accfc --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[workspace] +resolver = "2" +members = [ + "compiler/eclexia", + "compiler/eclexia-lexer", + "compiler/eclexia-parser", + "compiler/eclexia-ast", + "compiler/eclexia-typeck", + "compiler/eclexia-hir", + "compiler/eclexia-mir", + "compiler/eclexia-codegen", + "runtime/eclexia-runtime", +] + +[workspace.package] +version = "0.1.0" +edition = "2021" +authors = ["Jonathan D.A. Jewell"] +license = "AGPL-3.0-or-later" +repository = "https://gitlab.com/eclexia-lang/eclexia" +homepage = "https://eclexia.org" +rust-version = "1.75" + +[workspace.dependencies] +# Lexer +logos = "0.14" +unicode-xid = "0.2" + +# Parser +# Using hand-written recursive descent for control + +# Data structures +indexmap = "2.0" +im = "15.0" +petgraph = "0.6" +typed-arena = "2.0" +bumpalo = "3.16" + +# Error handling +thiserror = "1.0" +miette = { version = "7", features = ["fancy"] } +ariadne = "0.4" + +# Serialization +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" + +# CLI +clap = { version = "4", features = ["derive"] } +rustyline = "14" + +# Async runtime (for runtime system) +tokio = { version = "1.37", features = ["full"] } + +# Optimization / LP +# good_lp = "1.8" # Add when implementing shadow prices + +# Utilities +smol_str = "0.2" +la-arena = "0.3" +rustc-hash = "1.1" + +[profile.release] +lto = true +codegen-units = 1 +strip = true + +[profile.dev] +opt-level = 0 +debug = true diff --git a/STATE.scm b/STATE.scm index 81e2254..8980bc2 100644 --- a/STATE.scm +++ b/STATE.scm @@ -6,8 +6,8 @@ '((version . "0.1.0") (updated . "2025-12-31") (project . "eclexia"))) (define current-position - '((phase . "v0.1 - Initial Setup") - (overall-completion . 80) + '((phase . "v0.2 - Core Development") + (overall-completion . 35) (components ((rsr-compliance ((status . "complete") (completion . 100))) (security-docs ((status . "complete") (completion . 100))) (scm-files ((status . "complete") (completion . 100))) @@ -18,7 +18,16 @@ (bibliography ((status . "complete") (completion . 100))) (extended-proofs ((status . "complete") (completion . 100))) (implementation-roadmap ((status . "complete") (completion . 100))) - (implementation ((status . "not-started") (completion . 0))))))) + (compiler-lexer ((status . "complete") (completion . 100))) + (compiler-parser ((status . "complete") (completion . 100))) + (compiler-ast ((status . "complete") (completion . 100))) + (compiler-typeck ((status . "in-progress") (completion . 20))) + (compiler-hir ((status . "not-started") (completion . 0))) + (compiler-mir ((status . "not-started") (completion . 0))) + (compiler-codegen ((status . "not-started") (completion . 0))) + (runtime ((status . "not-started") (completion . 5))) + (cli ((status . "complete") (completion . 100))) + (repl ((status . "complete") (completion . 100))))))) (define blockers-and-issues '((critical ()) (high-priority ()))) @@ -30,7 +39,8 @@ '((snapshots ((date . "2025-12-15") (session . "initial") (notes . "SCM files added")) ((date . "2025-12-17") (session . "security-review") (notes . "Fixed placeholders in SECURITY.md, CODE_OF_CONDUCT.md, CONTRIBUTING.md; updated SCM files")) ((date . "2025-12-31") (session . "academic-proofs") (notes . "Added comprehensive academic documentation: WHITEPAPER.md, PROOFS.md, SPECIFICATION.md, FORMAL_VERIFICATION.md, THEORY.md, ALGORITHMS.md, BIBLIOGRAPHY.md")) - ((date . "2025-12-31") (session . "implementation-planning") (notes . "Added EXTENDED_PROOFS.md with complete academic proofs; added IMPLEMENTATION_ROADMAP.md with full technology stack and phased development plan"))))) + ((date . "2025-12-31") (session . "implementation-planning") (notes . "Added EXTENDED_PROOFS.md with complete academic proofs; added IMPLEMENTATION_ROADMAP.md with full technology stack and phased development plan")) + ((date . "2025-12-31") (session . "compiler-phase1") (notes . "Implemented Phase 1 of compiler: lexer with dimensional literals, recursive descent parser, AST with dimensional types, basic type checker scaffolding, CLI with build/run/check/fmt commands, interactive REPL. All 14 tests passing."))))) (define state-summary - '((project . "eclexia") (completion . 80) (blockers . 0) (updated . "2025-12-31"))) + '((project . "eclexia") (completion . 35) (blockers . 0) (updated . "2025-12-31"))) diff --git a/compiler/eclexia-ast/Cargo.toml b/compiler/eclexia-ast/Cargo.toml new file mode 100644 index 0000000..f0435ac --- /dev/null +++ b/compiler/eclexia-ast/Cargo.toml @@ -0,0 +1,20 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-ast" +description = "Abstract syntax tree definitions for Eclexia" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +smol_str = { workspace = true } +la-arena = { workspace = true } +serde = { workspace = true, optional = true } + +[features] +default = [] +serde = ["dep:serde", "smol_str/serde"] diff --git a/compiler/eclexia-ast/src/dimension.rs b/compiler/eclexia-ast/src/dimension.rs new file mode 100644 index 0000000..d3f6ea2 --- /dev/null +++ b/compiler/eclexia-ast/src/dimension.rs @@ -0,0 +1,575 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Dimensional analysis for resource types. +//! +//! Eclexia tracks physical dimensions at the type level to prevent +//! errors like adding energy to time. This module defines the +//! dimension representation based on SI base units plus extensions +//! for economics and sustainability. +//! +//! # Dimension Algebra +//! +//! Dimensions form an abelian group under multiplication: +//! - `Dimension * Dimension → Dimension` (add exponents) +//! - `Dimension / Dimension → Dimension` (subtract exponents) +//! - `Dimension^n → Dimension` (multiply exponents) +//! - `Dimensionless` is the identity element + +/// A dimension represented as a vector of SI base unit exponents +/// plus extensions for economic and environmental units. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Dimension { + /// Mass (kilogram, kg) - exponent + pub mass: i8, + /// Length (meter, m) - exponent + pub length: i8, + /// Time (second, s) - exponent + pub time: i8, + /// Electric current (ampere, A) - exponent + pub current: i8, + /// Thermodynamic temperature (kelvin, K) - exponent + pub temperature: i8, + /// Amount of substance (mole, mol) - exponent + pub amount: i8, + /// Luminous intensity (candela, cd) - exponent + pub luminosity: i8, + /// Currency (abstract monetary unit) - exponent + pub money: i8, + /// Carbon dioxide equivalent (CO2e) - exponent + pub carbon: i8, + /// Information (bit) - exponent + pub information: i8, +} + +impl Dimension { + /// Create a new dimension with all zero exponents (dimensionless). + pub const fn dimensionless() -> Self { + Self { + mass: 0, + length: 0, + time: 0, + current: 0, + temperature: 0, + amount: 0, + luminosity: 0, + money: 0, + carbon: 0, + information: 0, + } + } + + /// Check if this dimension is dimensionless. + pub const fn is_dimensionless(&self) -> bool { + self.mass == 0 + && self.length == 0 + && self.time == 0 + && self.current == 0 + && self.temperature == 0 + && self.amount == 0 + && self.luminosity == 0 + && self.money == 0 + && self.carbon == 0 + && self.information == 0 + } + + // Common SI base dimensions + + /// Mass dimension (kg) + pub const fn mass() -> Self { + Self { mass: 1, ..Self::dimensionless() } + } + + /// Length dimension (m) + pub const fn length() -> Self { + Self { length: 1, ..Self::dimensionless() } + } + + /// Time dimension (s) + pub const fn time() -> Self { + Self { time: 1, ..Self::dimensionless() } + } + + /// Electric current dimension (A) + pub const fn current() -> Self { + Self { current: 1, ..Self::dimensionless() } + } + + /// Temperature dimension (K) + pub const fn temperature() -> Self { + Self { temperature: 1, ..Self::dimensionless() } + } + + // Common derived dimensions + + /// Energy dimension (J = kg·m²/s²) + pub const fn energy() -> Self { + Self { + mass: 1, + length: 2, + time: -2, + ..Self::dimensionless() + } + } + + /// Power dimension (W = kg·m²/s³) + pub const fn power() -> Self { + Self { + mass: 1, + length: 2, + time: -3, + ..Self::dimensionless() + } + } + + /// Force dimension (N = kg·m/s²) + pub const fn force() -> Self { + Self { + mass: 1, + length: 1, + time: -2, + ..Self::dimensionless() + } + } + + /// Frequency dimension (Hz = 1/s) + pub const fn frequency() -> Self { + Self { + time: -1, + ..Self::dimensionless() + } + } + + /// Velocity dimension (m/s) + pub const fn velocity() -> Self { + Self { + length: 1, + time: -1, + ..Self::dimensionless() + } + } + + /// Acceleration dimension (m/s²) + pub const fn acceleration() -> Self { + Self { + length: 1, + time: -2, + ..Self::dimensionless() + } + } + + /// Area dimension (m²) + pub const fn area() -> Self { + Self { + length: 2, + ..Self::dimensionless() + } + } + + /// Volume dimension (m³) + pub const fn volume() -> Self { + Self { + length: 3, + ..Self::dimensionless() + } + } + + // Extended dimensions for Eclexia + + /// Money dimension (currency) + pub const fn money() -> Self { + Self { money: 1, ..Self::dimensionless() } + } + + /// Carbon dioxide equivalent dimension (gCO2e) + pub const fn carbon() -> Self { + Self { carbon: 1, ..Self::dimensionless() } + } + + /// Information dimension (bits) + pub const fn information() -> Self { + Self { information: 1, ..Self::dimensionless() } + } + + /// Memory dimension (bytes = 8 bits) + pub const fn memory() -> Self { + Self { information: 1, ..Self::dimensionless() } + } + + /// Carbon intensity dimension (gCO2e/kWh = carbon/energy) + pub const fn carbon_intensity() -> Self { + Self { + mass: -1, + length: -2, + time: 2, + carbon: 1, + ..Self::dimensionless() + } + } + + // Dimension algebra + + /// Multiply two dimensions (add exponents). + pub const fn multiply(&self, other: &Self) -> Self { + Self { + mass: self.mass + other.mass, + length: self.length + other.length, + time: self.time + other.time, + current: self.current + other.current, + temperature: self.temperature + other.temperature, + amount: self.amount + other.amount, + luminosity: self.luminosity + other.luminosity, + money: self.money + other.money, + carbon: self.carbon + other.carbon, + information: self.information + other.information, + } + } + + /// Divide two dimensions (subtract exponents). + pub const fn divide(&self, other: &Self) -> Self { + Self { + mass: self.mass - other.mass, + length: self.length - other.length, + time: self.time - other.time, + current: self.current - other.current, + temperature: self.temperature - other.temperature, + amount: self.amount - other.amount, + luminosity: self.luminosity - other.luminosity, + money: self.money - other.money, + carbon: self.carbon - other.carbon, + information: self.information - other.information, + } + } + + /// Raise dimension to a power (multiply exponents). + pub const fn pow(&self, n: i8) -> Self { + Self { + mass: self.mass * n, + length: self.length * n, + time: self.time * n, + current: self.current * n, + temperature: self.temperature * n, + amount: self.amount * n, + luminosity: self.luminosity * n, + money: self.money * n, + carbon: self.carbon * n, + information: self.information * n, + } + } + + /// Get the inverse dimension (negate all exponents). + pub const fn inverse(&self) -> Self { + self.pow(-1) + } + + /// Format the dimension as a string for error messages. + pub fn to_string(&self) -> String { + if self.is_dimensionless() { + return "dimensionless".to_string(); + } + + let mut parts = Vec::new(); + let mut neg_parts = Vec::new(); + + macro_rules! add_dim { + ($field:ident, $name:literal) => { + match self.$field { + 0 => {} + 1 => parts.push($name.to_string()), + -1 => neg_parts.push($name.to_string()), + n if n > 0 => parts.push(format!("{}^{}", $name, n)), + n => neg_parts.push(format!("{}^{}", $name, -n)), + } + }; + } + + add_dim!(mass, "kg"); + add_dim!(length, "m"); + add_dim!(time, "s"); + add_dim!(current, "A"); + add_dim!(temperature, "K"); + add_dim!(amount, "mol"); + add_dim!(luminosity, "cd"); + add_dim!(money, "$"); + add_dim!(carbon, "CO2e"); + add_dim!(information, "bit"); + + if neg_parts.is_empty() { + parts.join("·") + } else if parts.is_empty() { + format!("1/{}", neg_parts.join("·")) + } else { + format!("{}/{}", parts.join("·"), neg_parts.join("·")) + } + } +} + +impl std::ops::Mul for Dimension { + type Output = Self; + + fn mul(self, rhs: Self) -> Self::Output { + self.multiply(&rhs) + } +} + +impl std::ops::Div for Dimension { + type Output = Self; + + fn div(self, rhs: Self) -> Self::Output { + self.divide(&rhs) + } +} + +impl std::fmt::Display for Dimension { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.to_string()) + } +} + +/// A unit with its dimension and conversion factor to SI base. +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Unit { + /// Name of the unit + pub name: &'static str, + /// Symbol of the unit + pub symbol: &'static str, + /// Dimension of the unit + pub dimension: Dimension, + /// Conversion factor to SI base unit (multiply by this to get SI) + pub to_si: f64, +} + +/// Common units used in Eclexia +pub mod units { + use super::*; + + // Time units + pub const SECOND: Unit = Unit { + name: "second", + symbol: "s", + dimension: Dimension::time(), + to_si: 1.0, + }; + pub const MILLISECOND: Unit = Unit { + name: "millisecond", + symbol: "ms", + dimension: Dimension::time(), + to_si: 0.001, + }; + pub const MICROSECOND: Unit = Unit { + name: "microsecond", + symbol: "μs", + dimension: Dimension::time(), + to_si: 0.000_001, + }; + pub const NANOSECOND: Unit = Unit { + name: "nanosecond", + symbol: "ns", + dimension: Dimension::time(), + to_si: 0.000_000_001, + }; + pub const MINUTE: Unit = Unit { + name: "minute", + symbol: "min", + dimension: Dimension::time(), + to_si: 60.0, + }; + pub const HOUR: Unit = Unit { + name: "hour", + symbol: "h", + dimension: Dimension::time(), + to_si: 3600.0, + }; + + // Energy units + pub const JOULE: Unit = Unit { + name: "joule", + symbol: "J", + dimension: Dimension::energy(), + to_si: 1.0, + }; + pub const MILLIJOULE: Unit = Unit { + name: "millijoule", + symbol: "mJ", + dimension: Dimension::energy(), + to_si: 0.001, + }; + pub const KILOJOULE: Unit = Unit { + name: "kilojoule", + symbol: "kJ", + dimension: Dimension::energy(), + to_si: 1000.0, + }; + pub const WATT_HOUR: Unit = Unit { + name: "watt-hour", + symbol: "Wh", + dimension: Dimension::energy(), + to_si: 3600.0, + }; + pub const KILOWATT_HOUR: Unit = Unit { + name: "kilowatt-hour", + symbol: "kWh", + dimension: Dimension::energy(), + to_si: 3_600_000.0, + }; + + // Power units + pub const WATT: Unit = Unit { + name: "watt", + symbol: "W", + dimension: Dimension::power(), + to_si: 1.0, + }; + pub const MILLIWATT: Unit = Unit { + name: "milliwatt", + symbol: "mW", + dimension: Dimension::power(), + to_si: 0.001, + }; + pub const KILOWATT: Unit = Unit { + name: "kilowatt", + symbol: "kW", + dimension: Dimension::power(), + to_si: 1000.0, + }; + + // Carbon units + pub const GRAM_CO2E: Unit = Unit { + name: "gram CO2 equivalent", + symbol: "gCO2e", + dimension: Dimension::carbon(), + to_si: 0.001, // SI base is kg + }; + pub const KILOGRAM_CO2E: Unit = Unit { + name: "kilogram CO2 equivalent", + symbol: "kgCO2e", + dimension: Dimension::carbon(), + to_si: 1.0, + }; + pub const TONNE_CO2E: Unit = Unit { + name: "tonne CO2 equivalent", + symbol: "tCO2e", + dimension: Dimension::carbon(), + to_si: 1000.0, + }; + + // Memory units + pub const BIT: Unit = Unit { + name: "bit", + symbol: "b", + dimension: Dimension::information(), + to_si: 1.0, + }; + pub const BYTE: Unit = Unit { + name: "byte", + symbol: "B", + dimension: Dimension::information(), + to_si: 8.0, + }; + pub const KILOBYTE: Unit = Unit { + name: "kilobyte", + symbol: "KB", + dimension: Dimension::information(), + to_si: 8_000.0, + }; + pub const MEGABYTE: Unit = Unit { + name: "megabyte", + symbol: "MB", + dimension: Dimension::information(), + to_si: 8_000_000.0, + }; + pub const GIGABYTE: Unit = Unit { + name: "gigabyte", + symbol: "GB", + dimension: Dimension::information(), + to_si: 8_000_000_000.0, + }; + + // Binary memory units (IEC) + pub const KIBIBYTE: Unit = Unit { + name: "kibibyte", + symbol: "KiB", + dimension: Dimension::information(), + to_si: 8.0 * 1024.0, + }; + pub const MEBIBYTE: Unit = Unit { + name: "mebibyte", + symbol: "MiB", + dimension: Dimension::information(), + to_si: 8.0 * 1024.0 * 1024.0, + }; + pub const GIBIBYTE: Unit = Unit { + name: "gibibyte", + symbol: "GiB", + dimension: Dimension::information(), + to_si: 8.0 * 1024.0 * 1024.0 * 1024.0, + }; +} + +/// Parse a unit suffix from a string. +pub fn parse_unit(suffix: &str) -> Option<&'static Unit> { + match suffix { + // Time + "s" => Some(&units::SECOND), + "ms" => Some(&units::MILLISECOND), + "μs" | "us" => Some(&units::MICROSECOND), + "ns" => Some(&units::NANOSECOND), + "min" => Some(&units::MINUTE), + "h" => Some(&units::HOUR), + // Energy + "J" => Some(&units::JOULE), + "mJ" => Some(&units::MILLIJOULE), + "kJ" => Some(&units::KILOJOULE), + "Wh" => Some(&units::WATT_HOUR), + "kWh" => Some(&units::KILOWATT_HOUR), + // Power + "W" => Some(&units::WATT), + "mW" => Some(&units::MILLIWATT), + "kW" => Some(&units::KILOWATT), + // Carbon + "gCO2e" => Some(&units::GRAM_CO2E), + "kgCO2e" => Some(&units::KILOGRAM_CO2E), + "tCO2e" => Some(&units::TONNE_CO2E), + // Memory + "b" => Some(&units::BIT), + "B" => Some(&units::BYTE), + "KB" => Some(&units::KILOBYTE), + "MB" => Some(&units::MEGABYTE), + "GB" => Some(&units::GIGABYTE), + "KiB" => Some(&units::KIBIBYTE), + "MiB" => Some(&units::MEBIBYTE), + "GiB" => Some(&units::GIBIBYTE), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_energy_division_gives_power() { + let energy = Dimension::energy(); + let time = Dimension::time(); + let power = energy / time; + assert_eq!(power, Dimension::power()); + } + + #[test] + fn test_dimensionless_is_identity() { + let energy = Dimension::energy(); + let one = Dimension::dimensionless(); + assert_eq!(energy * one, energy); + assert_eq!(energy / energy, one); + } + + #[test] + fn test_dimension_display() { + assert_eq!(Dimension::energy().to_string(), "kg·m^2/s^2"); + assert_eq!(Dimension::power().to_string(), "kg·m^2/s^3"); + assert_eq!(Dimension::velocity().to_string(), "m/s"); + assert_eq!(Dimension::dimensionless().to_string(), "dimensionless"); + } +} diff --git a/compiler/eclexia-ast/src/lib.rs b/compiler/eclexia-ast/src/lib.rs new file mode 100644 index 0000000..e088926 --- /dev/null +++ b/compiler/eclexia-ast/src/lib.rs @@ -0,0 +1,470 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Abstract Syntax Tree definitions for the Eclexia programming language. +//! +//! This crate defines the core AST nodes that represent Eclexia programs +//! after parsing. The AST preserves source locations for error reporting +//! and includes all syntactic constructs including: +//! +//! - Resource types with dimensional analysis +//! - Adaptive blocks with solution alternatives +//! - Constraint annotations (@requires, @provides, @optimize) +//! - Standard expressions and statements + +pub mod dimension; +pub mod span; +pub mod types; + +use la_arena::{Arena, Idx}; +use smol_str::SmolStr; +use span::Span; + +/// Interned string type for identifiers +pub type Ident = SmolStr; + +/// Index into an expression arena +pub type ExprId = Idx; + +/// Index into a statement arena +pub type StmtId = Idx; + +/// Index into a type arena +pub type TypeId = Idx; + +/// A complete Eclexia source file +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SourceFile { + /// Module-level items (functions, types, imports) + pub items: Vec, + /// Expression arena for this file + pub exprs: Arena, + /// Statement arena + pub stmts: Arena, + /// Type arena + pub types: Arena, +} + +impl SourceFile { + pub fn new() -> Self { + Self { + items: Vec::new(), + exprs: Arena::new(), + stmts: Arena::new(), + types: Arena::new(), + } + } +} + +impl Default for SourceFile { + fn default() -> Self { + Self::new() + } +} + +/// Top-level item in a source file +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum Item { + /// Function definition + Function(Function), + /// Adaptive function definition + AdaptiveFunction(AdaptiveFunction), + /// Type definition + TypeDef(TypeDef), + /// Import statement + Import(Import), + /// Constant definition + Const(ConstDef), +} + +/// A regular function definition +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Function { + pub span: Span, + pub name: Ident, + pub params: Vec, + pub return_type: Option, + pub constraints: Vec, + pub body: Block, +} + +/// An adaptive function with multiple solution alternatives +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct AdaptiveFunction { + pub span: Span, + pub name: Ident, + pub params: Vec, + pub return_type: Option, + pub constraints: Vec, + pub optimize: Vec, + pub solutions: Vec, +} + +/// A solution alternative within an adaptive function +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Solution { + pub span: Span, + pub name: Ident, + pub when_clause: Option, + pub provides: Vec, + pub body: Block, +} + +/// Resource provision declaration (@provides) +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ResourceProvision { + pub span: Span, + pub resource: Ident, + pub amount: ResourceAmount, +} + +/// A resource amount with optional unit +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ResourceAmount { + pub value: f64, + pub unit: Option, +} + +/// Function parameter +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Param { + pub span: Span, + pub name: Ident, + pub ty: Option, +} + +/// Constraint annotation (@requires) +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Constraint { + pub span: Span, + pub kind: ConstraintKind, +} + +/// Kind of constraint +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum ConstraintKind { + /// Resource budget: energy < 100J + Resource { + resource: Ident, + op: CompareOp, + amount: ResourceAmount, + }, + /// Custom predicate + Predicate(ExprId), +} + +/// Optimization objective +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Objective { + pub span: Span, + pub direction: OptimizeDirection, + pub target: Ident, +} + +/// Optimization direction +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OptimizeDirection { + Minimize, + Maximize, +} + +/// Comparison operator +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum CompareOp { + Lt, + Le, + Gt, + Ge, + Eq, + Ne, +} + +/// Type definition +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct TypeDef { + pub span: Span, + pub name: Ident, + pub params: Vec, + pub kind: TypeDefKind, +} + +/// Kind of type definition +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum TypeDefKind { + /// Type alias + Alias(TypeId), + /// Struct/record type + Struct(Vec), + /// Enum/variant type + Enum(Vec), +} + +/// Struct field +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Field { + pub span: Span, + pub name: Ident, + pub ty: TypeId, +} + +/// Enum variant +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Variant { + pub span: Span, + pub name: Ident, + pub fields: Option>, +} + +/// Import statement +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Import { + pub span: Span, + pub path: Vec, + pub alias: Option, +} + +/// Constant definition +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ConstDef { + pub span: Span, + pub name: Ident, + pub ty: Option, + pub value: ExprId, +} + +/// A block of statements +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Block { + pub span: Span, + pub stmts: Vec, + /// Optional trailing expression (block value) + pub expr: Option, +} + +/// Statement +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Stmt { + pub span: Span, + pub kind: StmtKind, +} + +/// Statement kind +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum StmtKind { + /// Let binding + Let { + name: Ident, + ty: Option, + value: ExprId, + }, + /// Expression statement + Expr(ExprId), + /// Return statement + Return(Option), + /// While loop + While { condition: ExprId, body: Block }, + /// For loop + For { + name: Ident, + iter: ExprId, + body: Block, + }, +} + +/// Expression +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Expr { + pub span: Span, + pub kind: ExprKind, +} + +/// Expression kind +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum ExprKind { + /// Literal value + Literal(Literal), + /// Variable reference + Var(Ident), + /// Binary operation + Binary { + op: BinaryOp, + lhs: ExprId, + rhs: ExprId, + }, + /// Unary operation + Unary { op: UnaryOp, operand: ExprId }, + /// Function call + Call { func: ExprId, args: Vec }, + /// Method call + MethodCall { + receiver: ExprId, + method: Ident, + args: Vec, + }, + /// Field access + Field { expr: ExprId, field: Ident }, + /// Index access + Index { expr: ExprId, index: ExprId }, + /// If expression + If { + condition: ExprId, + then_branch: Block, + else_branch: Option, + }, + /// Match expression + Match { + scrutinee: ExprId, + arms: Vec, + }, + /// Block expression + Block(Block), + /// Lambda/closure + Lambda { params: Vec, body: ExprId }, + /// Tuple construction + Tuple(Vec), + /// Array literal + Array(Vec), + /// Struct literal + Struct { + name: Ident, + fields: Vec<(Ident, ExprId)>, + }, + /// Resource literal (e.g., 100J, 5ms, 10gCO2e) + Resource(ResourceAmount), + /// Error placeholder for recovery + Error, +} + +/// Match arm +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct MatchArm { + pub span: Span, + pub pattern: Pattern, + pub guard: Option, + pub body: ExprId, +} + +/// Pattern for matching +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum Pattern { + /// Wildcard pattern (_) + Wildcard, + /// Variable binding + Var(Ident), + /// Literal pattern + Literal(Literal), + /// Tuple pattern + Tuple(Vec), + /// Constructor pattern + Constructor { name: Ident, fields: Vec }, +} + +/// Literal value +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum Literal { + /// Integer literal + Int(i64), + /// Float literal + Float(f64), + /// String literal + String(SmolStr), + /// Character literal + Char(char), + /// Boolean literal + Bool(bool), + /// Unit literal () + Unit, +} + +/// Binary operator +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum BinaryOp { + // Arithmetic + Add, + Sub, + Mul, + Div, + Rem, + Pow, + // Comparison + Eq, + Ne, + Lt, + Le, + Gt, + Ge, + // Logical + And, + Or, + // Bitwise + BitAnd, + BitOr, + BitXor, + Shl, + Shr, +} + +/// Unary operator +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum UnaryOp { + Neg, + Not, + BitNot, +} + +/// Type expression +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Type { + pub span: Span, + pub kind: TypeKind, +} + +/// Type expression kind +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum TypeKind { + /// Named type (possibly generic) + Named { name: Ident, args: Vec }, + /// Function type + Function { params: Vec, ret: TypeId }, + /// Tuple type + Tuple(Vec), + /// Array type with optional size + Array { elem: TypeId, size: Option }, + /// Resource type with dimension + Resource { base: Ident, dimension: dimension::Dimension }, + /// Infer type (_) + Infer, + /// Error placeholder + Error, +} diff --git a/compiler/eclexia-ast/src/span.rs b/compiler/eclexia-ast/src/span.rs new file mode 100644 index 0000000..e9fc15f --- /dev/null +++ b/compiler/eclexia-ast/src/span.rs @@ -0,0 +1,142 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Source span tracking for error reporting. + +/// A span in the source code, representing a range of bytes. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Span { + /// Start byte offset (inclusive) + pub start: u32, + /// End byte offset (exclusive) + pub end: u32, +} + +impl Span { + /// Create a new span from start and end offsets. + pub const fn new(start: u32, end: u32) -> Self { + Self { start, end } + } + + /// Create an empty span at a position. + pub const fn empty(pos: u32) -> Self { + Self { start: pos, end: pos } + } + + /// Create a dummy span for synthesized nodes. + pub const fn dummy() -> Self { + Self { start: 0, end: 0 } + } + + /// Check if this span is empty. + pub const fn is_empty(&self) -> bool { + self.start == self.end + } + + /// Get the length of this span in bytes. + pub const fn len(&self) -> u32 { + self.end - self.start + } + + /// Merge two spans into one that covers both. + pub fn merge(self, other: Self) -> Self { + Self { + start: self.start.min(other.start), + end: self.end.max(other.end), + } + } + + /// Check if this span contains a byte offset. + pub const fn contains(&self, offset: u32) -> bool { + offset >= self.start && offset < self.end + } + + /// Check if this span overlaps with another. + pub const fn overlaps(&self, other: &Self) -> bool { + self.start < other.end && other.start < self.end + } + + /// Convert to a range for slicing. + pub fn as_range(&self) -> std::ops::Range { + self.start as usize..self.end as usize + } +} + +impl From> for Span { + fn from(range: std::ops::Range) -> Self { + Self { + start: range.start as u32, + end: range.end as u32, + } + } +} + +impl From> for Span { + fn from(range: std::ops::Range) -> Self { + Self { + start: range.start, + end: range.end, + } + } +} + +/// A value with an associated source span. +#[derive(Debug, Clone, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Spanned { + pub span: Span, + pub value: T, +} + +impl Spanned { + /// Create a new spanned value. + pub const fn new(span: Span, value: T) -> Self { + Self { span, value } + } + + /// Map the inner value while preserving the span. + pub fn map U>(self, f: F) -> Spanned { + Spanned { + span: self.span, + value: f(self.value), + } + } +} + +impl std::ops::Deref for Spanned { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.value + } +} + +impl std::ops::DerefMut for Spanned { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.value + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_span_merge() { + let a = Span::new(10, 20); + let b = Span::new(15, 30); + let merged = a.merge(b); + assert_eq!(merged.start, 10); + assert_eq!(merged.end, 30); + } + + #[test] + fn test_span_contains() { + let span = Span::new(10, 20); + assert!(span.contains(10)); + assert!(span.contains(15)); + assert!(!span.contains(20)); // exclusive end + assert!(!span.contains(5)); + } +} diff --git a/compiler/eclexia-ast/src/types.rs b/compiler/eclexia-ast/src/types.rs new file mode 100644 index 0000000..18b0faf --- /dev/null +++ b/compiler/eclexia-ast/src/types.rs @@ -0,0 +1,359 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type representations for the Eclexia type system. +//! +//! This module defines the semantic types used during type checking, +//! separate from the syntactic type expressions in the AST. + +use crate::dimension::Dimension; +use smol_str::SmolStr; + +/// A semantic type in Eclexia's type system. +#[derive(Debug, Clone, PartialEq)] +pub enum Ty { + /// Primitive types + Primitive(PrimitiveTy), + + /// A named type (struct, enum, alias) with optional type arguments + Named { + name: SmolStr, + args: Vec, + }, + + /// Function type + Function { + params: Vec, + ret: Box, + }, + + /// Tuple type + Tuple(Vec), + + /// Array type + Array { + elem: Box, + size: Option, + }, + + /// Resource-annotated type with dimensional information + Resource { + base: PrimitiveTy, + dimension: Dimension, + }, + + /// Type variable (for inference) + Var(TypeVar), + + /// Universal quantification (polymorphic type) + ForAll { + vars: Vec, + body: Box, + }, + + /// Error type (for recovery) + Error, + + /// Never type (for diverging expressions) + Never, +} + +impl Ty { + /// Check if this type contains any type variables. + pub fn has_vars(&self) -> bool { + match self { + Ty::Var(_) => true, + Ty::Named { args, .. } => args.iter().any(|t| t.has_vars()), + Ty::Function { params, ret } => { + params.iter().any(|t| t.has_vars()) || ret.has_vars() + } + Ty::Tuple(elems) => elems.iter().any(|t| t.has_vars()), + Ty::Array { elem, .. } => elem.has_vars(), + Ty::ForAll { body, .. } => body.has_vars(), + _ => false, + } + } + + /// Check if this type is dimensioned (has resource tracking). + pub fn is_resource(&self) -> bool { + matches!(self, Ty::Resource { .. }) + } + + /// Get the dimension if this is a resource type. + pub fn dimension(&self) -> Option<&Dimension> { + match self { + Ty::Resource { dimension, .. } => Some(dimension), + _ => None, + } + } + + /// Create a unit type. + pub fn unit() -> Self { + Ty::Tuple(Vec::new()) + } + + /// Create an integer type. + pub fn int() -> Self { + Ty::Primitive(PrimitiveTy::Int) + } + + /// Create a float type. + pub fn float() -> Self { + Ty::Primitive(PrimitiveTy::Float) + } + + /// Create a bool type. + pub fn bool() -> Self { + Ty::Primitive(PrimitiveTy::Bool) + } + + /// Create a string type. + pub fn string() -> Self { + Ty::Primitive(PrimitiveTy::String) + } +} + +/// Primitive types built into the language. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum PrimitiveTy { + /// Signed integer (platform-dependent size) + Int, + /// 8-bit signed integer + I8, + /// 16-bit signed integer + I16, + /// 32-bit signed integer + I32, + /// 64-bit signed integer + I64, + /// 128-bit signed integer + I128, + /// Unsigned integer (platform-dependent size) + UInt, + /// 8-bit unsigned integer + U8, + /// 16-bit unsigned integer + U16, + /// 32-bit unsigned integer + U32, + /// 64-bit unsigned integer + U64, + /// 128-bit unsigned integer + U128, + /// 32-bit floating point + F32, + /// 64-bit floating point + F64, + /// Default float type (F64) + Float, + /// Boolean + Bool, + /// Unicode character + Char, + /// String type + String, + /// Unit type (void) + Unit, +} + +impl PrimitiveTy { + /// Get the name of this primitive type. + pub fn name(&self) -> &'static str { + match self { + PrimitiveTy::Int => "Int", + PrimitiveTy::I8 => "I8", + PrimitiveTy::I16 => "I16", + PrimitiveTy::I32 => "I32", + PrimitiveTy::I64 => "I64", + PrimitiveTy::I128 => "I128", + PrimitiveTy::UInt => "UInt", + PrimitiveTy::U8 => "U8", + PrimitiveTy::U16 => "U16", + PrimitiveTy::U32 => "U32", + PrimitiveTy::U64 => "U64", + PrimitiveTy::U128 => "U128", + PrimitiveTy::F32 => "F32", + PrimitiveTy::F64 => "F64", + PrimitiveTy::Float => "Float", + PrimitiveTy::Bool => "Bool", + PrimitiveTy::Char => "Char", + PrimitiveTy::String => "String", + PrimitiveTy::Unit => "Unit", + } + } + + /// Check if this is a numeric type. + pub fn is_numeric(&self) -> bool { + matches!( + self, + PrimitiveTy::Int + | PrimitiveTy::I8 + | PrimitiveTy::I16 + | PrimitiveTy::I32 + | PrimitiveTy::I64 + | PrimitiveTy::I128 + | PrimitiveTy::UInt + | PrimitiveTy::U8 + | PrimitiveTy::U16 + | PrimitiveTy::U32 + | PrimitiveTy::U64 + | PrimitiveTy::U128 + | PrimitiveTy::F32 + | PrimitiveTy::F64 + | PrimitiveTy::Float + ) + } + + /// Check if this is an integer type. + pub fn is_integer(&self) -> bool { + matches!( + self, + PrimitiveTy::Int + | PrimitiveTy::I8 + | PrimitiveTy::I16 + | PrimitiveTy::I32 + | PrimitiveTy::I64 + | PrimitiveTy::I128 + | PrimitiveTy::UInt + | PrimitiveTy::U8 + | PrimitiveTy::U16 + | PrimitiveTy::U32 + | PrimitiveTy::U64 + | PrimitiveTy::U128 + ) + } + + /// Check if this is a floating-point type. + pub fn is_float(&self) -> bool { + matches!( + self, + PrimitiveTy::F32 | PrimitiveTy::F64 | PrimitiveTy::Float + ) + } +} + +/// A type variable for type inference. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TypeVar(pub u32); + +impl TypeVar { + /// Create a new type variable with the given ID. + pub const fn new(id: u32) -> Self { + Self(id) + } +} + +/// Type scheme for polymorphic types. +#[derive(Debug, Clone)] +pub struct TypeScheme { + /// Bound type variables + pub vars: Vec, + /// The body type + pub ty: Ty, +} + +impl TypeScheme { + /// Create a monomorphic type scheme (no quantification). + pub fn mono(ty: Ty) -> Self { + Self { + vars: Vec::new(), + ty, + } + } + + /// Check if this scheme is monomorphic. + pub fn is_mono(&self) -> bool { + self.vars.is_empty() + } +} + +/// Resource constraint for type checking. +#[derive(Debug, Clone)] +pub struct ResourceConstraint { + /// The resource being constrained (energy, time, memory, carbon) + pub resource: SmolStr, + /// The dimension of the resource + pub dimension: Dimension, + /// The constraint operator + pub op: ConstraintOp, + /// The bound value + pub bound: f64, +} + +/// Constraint operator +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ConstraintOp { + Lt, + Le, + Gt, + Ge, + Eq, +} + +impl std::fmt::Display for Ty { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Ty::Primitive(p) => write!(f, "{}", p.name()), + Ty::Named { name, args } => { + write!(f, "{}", name)?; + if !args.is_empty() { + write!(f, "[")?; + for (i, arg) in args.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", arg)?; + } + write!(f, "]")?; + } + Ok(()) + } + Ty::Function { params, ret } => { + write!(f, "(")?; + for (i, param) in params.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", param)?; + } + write!(f, ") -> {}", ret) + } + Ty::Tuple(elems) => { + write!(f, "(")?; + for (i, elem) in elems.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", elem)?; + } + if elems.len() == 1 { + write!(f, ",")?; + } + write!(f, ")") + } + Ty::Array { elem, size } => { + write!(f, "[{}]", elem)?; + if let Some(n) = size { + write!(f, "; {}", n)?; + } + Ok(()) + } + Ty::Resource { base, dimension } => { + write!(f, "{}[{}]", base.name(), dimension) + } + Ty::Var(v) => write!(f, "?{}", v.0), + Ty::ForAll { vars, body } => { + write!(f, "∀")?; + for (i, var) in vars.iter().enumerate() { + if i > 0 { + write!(f, " ")?; + } + write!(f, "{}", var)?; + } + write!(f, ". {}", body) + } + Ty::Error => write!(f, "{{error}}"), + Ty::Never => write!(f, "!"), + } + } +} diff --git a/compiler/eclexia-codegen/Cargo.toml b/compiler/eclexia-codegen/Cargo.toml new file mode 100644 index 0000000..7eaeb91 --- /dev/null +++ b/compiler/eclexia-codegen/Cargo.toml @@ -0,0 +1,15 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-codegen" +description = "Code generation for the Eclexia compiler" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +eclexia-mir = { path = "../eclexia-mir" } diff --git a/compiler/eclexia-codegen/src/lib.rs b/compiler/eclexia-codegen/src/lib.rs new file mode 100644 index 0000000..bd27897 --- /dev/null +++ b/compiler/eclexia-codegen/src/lib.rs @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Code generation for the Eclexia compiler. +//! +//! Supports multiple backends: +//! - Native code via LLVM or Cranelift +//! - WebAssembly for browser and WASI +//! - Interpreter for development + +// TODO: Implement code generation backends diff --git a/compiler/eclexia-hir/Cargo.toml b/compiler/eclexia-hir/Cargo.toml new file mode 100644 index 0000000..f31910d --- /dev/null +++ b/compiler/eclexia-hir/Cargo.toml @@ -0,0 +1,16 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-hir" +description = "High-level intermediate representation for Eclexia" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +smol_str = { workspace = true } +la-arena = { workspace = true } diff --git a/compiler/eclexia-hir/src/lib.rs b/compiler/eclexia-hir/src/lib.rs new file mode 100644 index 0000000..145471c --- /dev/null +++ b/compiler/eclexia-hir/src/lib.rs @@ -0,0 +1,10 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! High-level Intermediate Representation (HIR) for Eclexia. +//! +//! The HIR is a desugared, type-annotated representation of the AST. +//! It preserves resource annotations and adaptive block structure +//! while simplifying the syntax. + +// TODO: Implement HIR lowering from AST diff --git a/compiler/eclexia-lexer/Cargo.toml b/compiler/eclexia-lexer/Cargo.toml new file mode 100644 index 0000000..ed42354 --- /dev/null +++ b/compiler/eclexia-lexer/Cargo.toml @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-lexer" +description = "Lexer for the Eclexia programming language" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +logos = { workspace = true } +smol_str = { workspace = true } +eclexia-ast = { path = "../eclexia-ast" } + +[dev-dependencies] diff --git a/compiler/eclexia-lexer/src/lib.rs b/compiler/eclexia-lexer/src/lib.rs new file mode 100644 index 0000000..a7ac785 --- /dev/null +++ b/compiler/eclexia-lexer/src/lib.rs @@ -0,0 +1,533 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Lexer for the Eclexia programming language. +//! +//! This crate provides lexical analysis for Eclexia source code, +//! converting a string of characters into a stream of tokens. +//! The lexer handles: +//! +//! - Keywords and identifiers +//! - Numeric literals with dimensional units (e.g., `100J`, `5ms`) +//! - String and character literals +//! - Operators and punctuation +//! - Comments (line and block) +//! - Annotation syntax (@requires, @provides, etc.) + +use eclexia_ast::span::Span; +use logos::Logos; +use smol_str::SmolStr; + +/// A token with its span in the source. +#[derive(Debug, Clone)] +pub struct Token { + pub kind: TokenKind, + pub span: Span, +} + +impl Token { + pub fn new(kind: TokenKind, span: Span) -> Self { + Self { kind, span } + } +} + +/// Token kinds produced by the lexer. +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\r\n\f]+")] +#[logos(skip r"//[^\n]*")] +#[logos(skip r"/\*([^*]|\*[^/])*\*/")] +pub enum TokenKind { + // === Keywords === + #[token("adaptive")] + Adaptive, + #[token("def")] + Def, + #[token("fn")] + Fn, + #[token("let")] + Let, + #[token("mut")] + Mut, + #[token("const")] + Const, + #[token("if")] + If, + #[token("then")] + Then, + #[token("else")] + Else, + #[token("match")] + Match, + #[token("while")] + While, + #[token("for")] + For, + #[token("in")] + In, + #[token("return")] + Return, + #[token("break")] + Break, + #[token("continue")] + Continue, + #[token("type")] + Type, + #[token("struct")] + Struct, + #[token("enum")] + Enum, + #[token("impl")] + Impl, + #[token("trait")] + Trait, + #[token("import")] + Import, + #[token("export")] + Export, + #[token("async")] + Async, + #[token("await")] + Await, + #[token("true")] + True, + #[token("false")] + False, + #[token("and")] + And, + #[token("or")] + Or, + #[token("not")] + Not, + + // === Eclexia-specific keywords === + #[token("@solution")] + AtSolution, + #[token("@when")] + AtWhen, + #[token("@requires")] + AtRequires, + #[token("@provides")] + AtProvides, + #[token("@optimize")] + AtOptimize, + #[token("@observe")] + AtObserve, + #[token("@defer_until")] + AtDeferUntil, + #[token("minimize")] + Minimize, + #[token("maximize")] + Maximize, + + // === Literals === + /// Integer literal (possibly with unit suffix) + #[regex(r"[0-9][0-9_]*", |lex| parse_int(lex.slice()))] + Integer(i64), + + /// Float literal (possibly with unit suffix) + #[regex(r"[0-9][0-9_]*\.[0-9][0-9_]*([eE][+-]?[0-9]+)?", |lex| parse_float(lex.slice()))] + Float(f64), + + /// Resource literal: number followed by unit suffix + /// Examples: 100J, 5ms, 10gCO2e, 1.5kWh + #[regex(r"[0-9][0-9_]*(\.[0-9][0-9_]*)?[a-zA-Z][a-zA-Z0-9]*", |lex| parse_resource(lex.slice()))] + Resource(ResourceLiteral), + + /// String literal + #[regex(r#""([^"\\]|\\.)*""#, |lex| parse_string(lex.slice()))] + String(SmolStr), + + /// Character literal + #[regex(r"'([^'\\]|\\.)'", |lex| parse_char(lex.slice()))] + Char(char), + + // === Identifiers === + // Note: Keywords take priority. Single underscore is handled separately. + #[regex(r"[a-zA-Z][a-zA-Z0-9_]*", |lex| SmolStr::new(lex.slice()))] + #[regex(r"_[a-zA-Z0-9_]+", |lex| SmolStr::new(lex.slice()))] + Ident(SmolStr), + + // === Operators === + #[token("+")] + Plus, + #[token("-")] + Minus, + #[token("*")] + Star, + #[token("/")] + Slash, + #[token("%")] + Percent, + #[token("**")] + StarStar, + #[token("==")] + EqEq, + #[token("!=")] + BangEq, + #[token("<")] + Lt, + #[token("<=")] + Le, + #[token(">")] + Gt, + #[token(">=")] + Ge, + #[token("&&")] + AmpAmp, + #[token("||")] + PipePipe, + #[token("!")] + Bang, + #[token("&")] + Amp, + #[token("|")] + Pipe, + #[token("^")] + Caret, + #[token("~")] + Tilde, + #[token("<<")] + LtLt, + #[token(">>")] + GtGt, + + // === Punctuation === + #[token("=")] + Eq, + #[token(":")] + Colon, + #[token("::")] + ColonColon, + #[token(";")] + Semi, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("..")] + DotDot, + #[token("...")] + DotDotDot, + #[token("->")] + Arrow, + #[token("=>")] + FatArrow, + #[token("@")] + At, + #[token("#")] + Hash, + #[token("?")] + Question, + #[token("_")] + Underscore, + + // === Delimiters === + #[token("(")] + LParen, + #[token(")")] + RParen, + #[token("[")] + LBracket, + #[token("]")] + RBracket, + #[token("{")] + LBrace, + #[token("}")] + RBrace, + + // === Special === + /// End of file + Eof, + + /// Error token + Error, +} + +/// A resource literal with value and unit. +#[derive(Debug, Clone, PartialEq)] +pub struct ResourceLiteral { + pub value: f64, + pub unit: SmolStr, +} + +fn parse_int(s: &str) -> i64 { + s.replace('_', "").parse().unwrap_or(0) +} + +fn parse_float(s: &str) -> f64 { + s.replace('_', "").parse().unwrap_or(0.0) +} + +fn parse_resource(s: &str) -> ResourceLiteral { + // Find where the number ends and unit begins + let unit_start = s + .char_indices() + .find(|(_, c)| c.is_alphabetic()) + .map(|(i, _)| i) + .unwrap_or(s.len()); + + let num_part = &s[..unit_start]; + let unit_part = &s[unit_start..]; + + let value = num_part.replace('_', "").parse().unwrap_or(0.0); + + ResourceLiteral { + value, + unit: SmolStr::new(unit_part), + } +} + +fn parse_string(s: &str) -> SmolStr { + // Remove quotes and process escapes + let inner = &s[1..s.len() - 1]; + let mut result = String::with_capacity(inner.len()); + let mut chars = inner.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '\\' { + match chars.next() { + Some('n') => result.push('\n'), + Some('r') => result.push('\r'), + Some('t') => result.push('\t'), + Some('\\') => result.push('\\'), + Some('"') => result.push('"'), + Some('0') => result.push('\0'), + Some(c) => result.push(c), + None => break, + } + } else { + result.push(c); + } + } + + SmolStr::new(&result) +} + +fn parse_char(s: &str) -> char { + let inner = &s[1..s.len() - 1]; + let mut chars = inner.chars(); + + match chars.next() { + Some('\\') => match chars.next() { + Some('n') => '\n', + Some('r') => '\r', + Some('t') => '\t', + Some('\\') => '\\', + Some('\'') => '\'', + Some('0') => '\0', + Some(c) => c, + None => '\0', + }, + Some(c) => c, + None => '\0', + } +} + +/// Lexer for Eclexia source code. +pub struct Lexer<'src> { + inner: logos::Lexer<'src, TokenKind>, + peeked: Option, +} + +impl<'src> Lexer<'src> { + /// Create a new lexer for the given source. + pub fn new(source: &'src str) -> Self { + Self { + inner: TokenKind::lexer(source), + peeked: None, + } + } + + /// Get the next token. + pub fn next(&mut self) -> Token { + if let Some(token) = self.peeked.take() { + return token; + } + + match self.inner.next() { + Some(Ok(kind)) => { + let span = self.inner.span(); + Token::new(kind, Span::new(span.start as u32, span.end as u32)) + } + Some(Err(())) => { + let span = self.inner.span(); + Token::new(TokenKind::Error, Span::new(span.start as u32, span.end as u32)) + } + None => Token::new(TokenKind::Eof, Span::empty(self.inner.span().end as u32)), + } + } + + /// Peek at the next token without consuming it. + pub fn peek(&mut self) -> &Token { + if self.peeked.is_none() { + self.peeked = Some(self.next()); + } + self.peeked.as_ref().unwrap() + } + + /// Check if we've reached the end of input. + pub fn is_eof(&mut self) -> bool { + matches!(self.peek().kind, TokenKind::Eof) + } + + /// Get the current source slice for a span. + pub fn slice(&self) -> &'src str { + self.inner.slice() + } + + /// Get the source string. + pub fn source(&self) -> &'src str { + self.inner.source() + } +} + +impl Iterator for Lexer<'_> { + type Item = Token; + + fn next(&mut self) -> Option { + let token = Lexer::next(self); + if matches!(token.kind, TokenKind::Eof) { + None + } else { + Some(token) + } + } +} + +/// Tokenize a source string into a vector of tokens. +pub fn tokenize(source: &str) -> Vec { + let mut lexer = Lexer::new(source); + let mut tokens = Vec::new(); + + loop { + let token = lexer.next(); + let is_eof = matches!(token.kind, TokenKind::Eof); + tokens.push(token); + if is_eof { + break; + } + } + + tokens +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_keywords() { + let source = "adaptive def fn let if else match while for return"; + let tokens: Vec<_> = Lexer::new(source).collect(); + + assert!(matches!(tokens[0].kind, TokenKind::Adaptive)); + assert!(matches!(tokens[1].kind, TokenKind::Def)); + assert!(matches!(tokens[2].kind, TokenKind::Fn)); + assert!(matches!(tokens[3].kind, TokenKind::Let)); + assert!(matches!(tokens[4].kind, TokenKind::If)); + } + + #[test] + fn test_resource_literals() { + let source = "100J 5ms 10gCO2e 1.5kWh 500mW"; + let tokens: Vec<_> = Lexer::new(source).collect(); + + if let TokenKind::Resource(r) = &tokens[0].kind { + assert_eq!(r.value, 100.0); + assert_eq!(r.unit.as_str(), "J"); + } else { + panic!("Expected resource literal"); + } + + if let TokenKind::Resource(r) = &tokens[1].kind { + assert_eq!(r.value, 5.0); + assert_eq!(r.unit.as_str(), "ms"); + } else { + panic!("Expected resource literal"); + } + + if let TokenKind::Resource(r) = &tokens[2].kind { + assert_eq!(r.value, 10.0); + assert_eq!(r.unit.as_str(), "gCO2e"); + } else { + panic!("Expected resource literal"); + } + } + + #[test] + fn test_annotations() { + let source = "@requires @provides @optimize @solution @when"; + let tokens: Vec<_> = Lexer::new(source).collect(); + + assert!(matches!(tokens[0].kind, TokenKind::AtRequires)); + assert!(matches!(tokens[1].kind, TokenKind::AtProvides)); + assert!(matches!(tokens[2].kind, TokenKind::AtOptimize)); + assert!(matches!(tokens[3].kind, TokenKind::AtSolution)); + assert!(matches!(tokens[4].kind, TokenKind::AtWhen)); + } + + #[test] + fn test_operators() { + let source = "+ - * / % ** == != < <= > >= && ||"; + let tokens: Vec<_> = Lexer::new(source).collect(); + + assert!(matches!(tokens[0].kind, TokenKind::Plus)); + assert!(matches!(tokens[1].kind, TokenKind::Minus)); + assert!(matches!(tokens[2].kind, TokenKind::Star)); + assert!(matches!(tokens[3].kind, TokenKind::Slash)); + assert!(matches!(tokens[4].kind, TokenKind::Percent)); + assert!(matches!(tokens[5].kind, TokenKind::StarStar)); + assert!(matches!(tokens[6].kind, TokenKind::EqEq)); + } + + #[test] + fn test_string_literal() { + let source = r#""hello world" "with\nescape""#; + let tokens: Vec<_> = Lexer::new(source).collect(); + + if let TokenKind::String(s) = &tokens[0].kind { + assert_eq!(s.as_str(), "hello world"); + } else { + panic!("Expected string literal"); + } + + if let TokenKind::String(s) = &tokens[1].kind { + assert_eq!(s.as_str(), "with\nescape"); + } else { + panic!("Expected string literal"); + } + } + + #[test] + fn test_comments_skipped() { + let source = "let // this is a comment\nx /* block */ = 5"; + let tokens: Vec<_> = Lexer::new(source).collect(); + + // Comments should be skipped + assert!(matches!(tokens[0].kind, TokenKind::Let)); + assert!(matches!(tokens[1].kind, TokenKind::Ident(_))); + assert!(matches!(tokens[2].kind, TokenKind::Eq)); + assert!(matches!(tokens[3].kind, TokenKind::Integer(5))); + } + + #[test] + fn test_full_adaptive_function() { + let source = r#" + adaptive def sort(arr: Array[Int]) -> Array[Int] + @requires: energy < 100J + @optimize: minimize energy + { + @solution "quick": + @when: length(arr) > 100 + @provides: energy: 50J + { quicksort(arr) } + } + "#; + + let tokens: Vec<_> = Lexer::new(source).collect(); + + // Just verify it tokenizes without error + assert!(!tokens.is_empty()); + assert!(matches!(tokens[0].kind, TokenKind::Adaptive)); + } +} diff --git a/compiler/eclexia-mir/Cargo.toml b/compiler/eclexia-mir/Cargo.toml new file mode 100644 index 0000000..9c2d18c --- /dev/null +++ b/compiler/eclexia-mir/Cargo.toml @@ -0,0 +1,16 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-mir" +description = "Mid-level intermediate representation for Eclexia" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +smol_str = { workspace = true } +la-arena = { workspace = true } diff --git a/compiler/eclexia-mir/src/lib.rs b/compiler/eclexia-mir/src/lib.rs new file mode 100644 index 0000000..0d4b36d --- /dev/null +++ b/compiler/eclexia-mir/src/lib.rs @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Mid-level Intermediate Representation (MIR) for Eclexia. +//! +//! The MIR is a control-flow graph representation used for: +//! - Optimization passes +//! - Resource constraint lowering +//! - Shadow price hook insertion + +// TODO: Implement MIR lowering from HIR diff --git a/compiler/eclexia-parser/Cargo.toml b/compiler/eclexia-parser/Cargo.toml new file mode 100644 index 0000000..7556b21 --- /dev/null +++ b/compiler/eclexia-parser/Cargo.toml @@ -0,0 +1,19 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-parser" +description = "Parser for the Eclexia programming language" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +eclexia-lexer = { path = "../eclexia-lexer" } +smol_str = { workspace = true } +thiserror = { workspace = true } + +[dev-dependencies] diff --git a/compiler/eclexia-parser/src/error.rs b/compiler/eclexia-parser/src/error.rs new file mode 100644 index 0000000..a5bdbcd --- /dev/null +++ b/compiler/eclexia-parser/src/error.rs @@ -0,0 +1,86 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Parser error types. + +use eclexia_ast::span::Span; +use eclexia_lexer::{Token, TokenKind}; +use thiserror::Error; + +/// Result type for parsing operations. +pub type ParseResult = Result; + +/// A parsing error. +#[derive(Debug, Error)] +pub enum ParseError { + #[error("unexpected token {found:?}, expected {expected}")] + UnexpectedToken { + span: Span, + found: TokenKind, + expected: String, + }, + + #[error("expected {expected:?}, found {found:?}")] + ExpectedToken { + span: Span, + expected: TokenKind, + found: TokenKind, + }, + + #[error("expected identifier")] + ExpectedIdentifier { span: Span }, + + #[error("unexpected end of file")] + UnexpectedEof { span: Span }, + + #[error("invalid resource literal")] + InvalidResourceLiteral { span: Span }, + + #[error("{message}")] + Custom { span: Span, message: String }, +} + +impl ParseError { + /// Create an unexpected token error. + pub fn unexpected_token(token: Token, expected: &str) -> Self { + Self::UnexpectedToken { + span: token.span, + found: token.kind, + expected: expected.to_string(), + } + } + + /// Create an expected token error. + pub fn expected_token(expected: TokenKind, found: Token) -> Self { + Self::ExpectedToken { + span: found.span, + expected, + found: found.kind, + } + } + + /// Create an expected identifier error. + pub fn expected_identifier(token: Token) -> Self { + Self::ExpectedIdentifier { span: token.span } + } + + /// Create a custom error. + pub fn custom(span: Span, message: impl Into) -> Self { + Self::Custom { + span, + message: message.into(), + } + } + + /// Get the span of this error. + pub fn span(&self) -> Span { + match self { + Self::UnexpectedToken { span, .. } => *span, + Self::ExpectedToken { span, .. } => *span, + Self::ExpectedIdentifier { span } => *span, + Self::UnexpectedEof { span } => *span, + Self::InvalidResourceLiteral { span } => *span, + Self::Custom { span, .. } => *span, + } + } +} diff --git a/compiler/eclexia-parser/src/expr.rs b/compiler/eclexia-parser/src/expr.rs new file mode 100644 index 0000000..8c40334 --- /dev/null +++ b/compiler/eclexia-parser/src/expr.rs @@ -0,0 +1,499 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Expression parsing using Pratt parsing for operators. + +use eclexia_ast::*; +use eclexia_lexer::{TokenKind, ResourceLiteral}; + +use crate::{Parser, ParseResult, ParseError}; + +/// Operator precedence levels. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[repr(u8)] +pub enum Precedence { + None = 0, + Assignment = 1, // = + Or = 2, // or, || + And = 3, // and, && + Equality = 4, // ==, != + Comparison = 5, // <, >, <=, >= + BitOr = 6, // | + BitXor = 7, // ^ + BitAnd = 8, // & + Shift = 9, // <<, >> + Term = 10, // +, - + Factor = 11, // *, /, % + Power = 12, // ** + Unary = 13, // !, -, ~ + Call = 14, // (), [], . + Primary = 15, +} + +impl<'src> Parser<'src> { + /// Parse an expression. + pub fn parse_expr(&mut self, file: &mut SourceFile) -> ParseResult { + self.parse_expr_prec(file, Precedence::None) + } + + /// Parse an expression with a minimum precedence. + fn parse_expr_prec(&mut self, file: &mut SourceFile, min_prec: Precedence) -> ParseResult { + let mut lhs = self.parse_prefix(file)?; + + loop { + let prec = self.current_precedence(); + if prec <= min_prec { + break; + } + + lhs = self.parse_infix(file, lhs, prec)?; + } + + Ok(lhs) + } + + /// Parse a prefix expression (primary or unary). + fn parse_prefix(&mut self, file: &mut SourceFile) -> ParseResult { + let token = self.peek().clone(); + + match &token.kind { + // Unary operators + TokenKind::Minus | TokenKind::Bang | TokenKind::Tilde | TokenKind::Not => { + self.advance(); + let op = match token.kind { + TokenKind::Minus => UnaryOp::Neg, + TokenKind::Bang | TokenKind::Not => UnaryOp::Not, + TokenKind::Tilde => UnaryOp::BitNot, + _ => unreachable!(), + }; + let operand = self.parse_expr_prec(file, Precedence::Unary)?; + let span = token.span.merge(file.exprs[operand].span); + let expr = Expr { + span, + kind: ExprKind::Unary { op, operand }, + }; + Ok(file.exprs.alloc(expr)) + } + + // Primary expressions + _ => self.parse_primary(file), + } + } + + /// Parse a primary expression. + fn parse_primary(&mut self, file: &mut SourceFile) -> ParseResult { + let token = self.advance(); + + let kind = match token.kind { + // Literals + TokenKind::Integer(n) => ExprKind::Literal(Literal::Int(n)), + TokenKind::Float(f) => ExprKind::Literal(Literal::Float(f)), + TokenKind::String(s) => ExprKind::Literal(Literal::String(s)), + TokenKind::Char(c) => ExprKind::Literal(Literal::Char(c)), + TokenKind::True => ExprKind::Literal(Literal::Bool(true)), + TokenKind::False => ExprKind::Literal(Literal::Bool(false)), + + // Resource literal + TokenKind::Resource(ResourceLiteral { value, unit }) => { + ExprKind::Resource(ResourceAmount { + value, + unit: Some(unit), + }) + } + + // Identifier + TokenKind::Ident(name) => ExprKind::Var(name), + + // Parenthesized expression or tuple + TokenKind::LParen => { + if self.check(TokenKind::RParen) { + self.advance(); + ExprKind::Literal(Literal::Unit) + } else { + let first = self.parse_expr(file)?; + + if self.check(TokenKind::Comma) { + // Tuple + let mut elems = vec![first]; + while self.check(TokenKind::Comma) { + self.advance(); + if self.check(TokenKind::RParen) { + break; + } + elems.push(self.parse_expr(file)?); + } + self.expect(TokenKind::RParen)?; + ExprKind::Tuple(elems) + } else { + self.expect(TokenKind::RParen)?; + // Just return the inner expression + return Ok(first); + } + } + } + + // Array literal + TokenKind::LBracket => { + let mut elems = Vec::new(); + if !self.check(TokenKind::RBracket) { + loop { + elems.push(self.parse_expr(file)?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + self.expect(TokenKind::RBracket)?; + ExprKind::Array(elems) + } + + // Block expression + TokenKind::LBrace => { + // Put token back (hacky but works for now) + // Actually we need to handle this differently + let block = self.parse_block_inner(file, token.span)?; + ExprKind::Block(block) + } + + // If expression + TokenKind::If => { + let condition = self.parse_expr(file)?; + + // Optional 'then' keyword + if self.check(TokenKind::Then) { + self.advance(); + } + + let then_branch = self.parse_block(file)?; + + let else_branch = if self.check(TokenKind::Else) { + self.advance(); + Some(self.parse_block(file)?) + } else { + None + }; + + ExprKind::If { + condition, + then_branch, + else_branch, + } + } + + // Match expression + TokenKind::Match => { + let scrutinee = self.parse_expr(file)?; + self.expect(TokenKind::LBrace)?; + + let mut arms = Vec::new(); + while !self.check(TokenKind::RBrace) && !self.is_eof() { + let arm = self.parse_match_arm(file)?; + arms.push(arm); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + + self.expect(TokenKind::RBrace)?; + ExprKind::Match { scrutinee, arms } + } + + // Lambda + TokenKind::Fn => { + self.expect(TokenKind::LParen)?; + let params = self.parse_params(file)?; + self.expect(TokenKind::RParen)?; + self.expect(TokenKind::Arrow)?; + let body = self.parse_expr(file)?; + ExprKind::Lambda { params, body } + } + + _ => { + return Err(ParseError::unexpected_token(token, "expression")); + } + }; + + let expr = Expr { + span: token.span, + kind, + }; + let id = file.exprs.alloc(expr); + + // Parse postfix operations + self.parse_postfix(file, id) + } + + /// Parse a block expression when we've already consumed the opening brace. + fn parse_block_inner(&mut self, file: &mut SourceFile, start: eclexia_ast::span::Span) -> ParseResult { + let mut stmts = Vec::new(); + let mut expr = None; + + while !self.check(TokenKind::RBrace) && !self.is_eof() { + match self.parse_stmt(file) { + Ok(stmt) => { + let stmt_id = file.stmts.alloc(stmt); + stmts.push(stmt_id); + } + Err(e) => { + self.errors.push(e); + self.recover_to_stmt(); + } + } + } + + // Check if last statement can be a trailing expression + if let Some(&last_id) = stmts.last() { + if let StmtKind::Expr(expr_id) = file.stmts[last_id].kind { + stmts.pop(); + expr = Some(expr_id); + } + } + + let end = self.expect(TokenKind::RBrace)?; + let span = start.merge(end); + + Ok(Block { span, stmts, expr }) + } + + /// Parse postfix operations (calls, field access, indexing). + fn parse_postfix(&mut self, file: &mut SourceFile, mut expr: ExprId) -> ParseResult { + loop { + match self.peek().kind { + // Function call + TokenKind::LParen => { + self.advance(); + let mut args = Vec::new(); + if !self.check(TokenKind::RParen) { + loop { + args.push(self.parse_expr(file)?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + let end = self.expect(TokenKind::RParen)?; + + let span = file.exprs[expr].span.merge(end); + let call_expr = Expr { + span, + kind: ExprKind::Call { func: expr, args }, + }; + expr = file.exprs.alloc(call_expr); + } + + // Field access or method call + TokenKind::Dot => { + self.advance(); + let field = self.expect_ident()?; + + if self.check(TokenKind::LParen) { + // Method call + self.advance(); + let mut args = Vec::new(); + if !self.check(TokenKind::RParen) { + loop { + args.push(self.parse_expr(file)?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + let end = self.expect(TokenKind::RParen)?; + + let span = file.exprs[expr].span.merge(end); + let method_expr = Expr { + span, + kind: ExprKind::MethodCall { + receiver: expr, + method: field, + args, + }, + }; + expr = file.exprs.alloc(method_expr); + } else { + // Field access + let span = file.exprs[expr].span.merge(self.previous_span()); + let field_expr = Expr { + span, + kind: ExprKind::Field { expr, field }, + }; + expr = file.exprs.alloc(field_expr); + } + } + + // Index access + TokenKind::LBracket => { + self.advance(); + let index = self.parse_expr(file)?; + let end = self.expect(TokenKind::RBracket)?; + + let span = file.exprs[expr].span.merge(end); + let index_expr = Expr { + span, + kind: ExprKind::Index { expr, index }, + }; + expr = file.exprs.alloc(index_expr); + } + + _ => break, + } + } + + Ok(expr) + } + + /// Parse an infix expression. + fn parse_infix( + &mut self, + file: &mut SourceFile, + lhs: ExprId, + prec: Precedence, + ) -> ParseResult { + let token = self.advance(); + + let op = match token.kind { + // Arithmetic + TokenKind::Plus => BinaryOp::Add, + TokenKind::Minus => BinaryOp::Sub, + TokenKind::Star => BinaryOp::Mul, + TokenKind::Slash => BinaryOp::Div, + TokenKind::Percent => BinaryOp::Rem, + TokenKind::StarStar => BinaryOp::Pow, + + // Comparison + TokenKind::EqEq => BinaryOp::Eq, + TokenKind::BangEq => BinaryOp::Ne, + TokenKind::Lt => BinaryOp::Lt, + TokenKind::Le => BinaryOp::Le, + TokenKind::Gt => BinaryOp::Gt, + TokenKind::Ge => BinaryOp::Ge, + + // Logical + TokenKind::AmpAmp | TokenKind::And => BinaryOp::And, + TokenKind::PipePipe | TokenKind::Or => BinaryOp::Or, + + // Bitwise + TokenKind::Amp => BinaryOp::BitAnd, + TokenKind::Pipe => BinaryOp::BitOr, + TokenKind::Caret => BinaryOp::BitXor, + TokenKind::LtLt => BinaryOp::Shl, + TokenKind::GtGt => BinaryOp::Shr, + + _ => return Err(ParseError::unexpected_token(token, "operator")), + }; + + // Right associativity for ** (power) + let next_prec = if matches!(op, BinaryOp::Pow) { + Precedence::Power + } else { + prec + }; + + let rhs = self.parse_expr_prec(file, next_prec)?; + + let span = file.exprs[lhs].span.merge(file.exprs[rhs].span); + let expr = Expr { + span, + kind: ExprKind::Binary { op, lhs, rhs }, + }; + Ok(file.exprs.alloc(expr)) + } + + /// Parse a match arm. + fn parse_match_arm(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.peek().span; + let pattern = self.parse_pattern()?; + + let guard = if self.check(TokenKind::If) { + self.advance(); + Some(self.parse_expr(file)?) + } else { + None + }; + + self.expect(TokenKind::FatArrow)?; + let body = self.parse_expr(file)?; + + let span = start.merge(file.exprs[body].span); + + Ok(MatchArm { + span, + pattern, + guard, + body, + }) + } + + /// Parse a pattern. + fn parse_pattern(&mut self) -> ParseResult { + let token = self.advance(); + + match token.kind { + TokenKind::Underscore => Ok(Pattern::Wildcard), + TokenKind::Ident(name) => { + if self.check(TokenKind::LParen) { + // Constructor pattern + self.advance(); + let mut fields = Vec::new(); + if !self.check(TokenKind::RParen) { + loop { + fields.push(self.parse_pattern()?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + self.expect(TokenKind::RParen)?; + Ok(Pattern::Constructor { name, fields }) + } else { + Ok(Pattern::Var(name)) + } + } + TokenKind::Integer(n) => Ok(Pattern::Literal(Literal::Int(n))), + TokenKind::Float(f) => Ok(Pattern::Literal(Literal::Float(f))), + TokenKind::String(s) => Ok(Pattern::Literal(Literal::String(s))), + TokenKind::True => Ok(Pattern::Literal(Literal::Bool(true))), + TokenKind::False => Ok(Pattern::Literal(Literal::Bool(false))), + TokenKind::LParen => { + let mut patterns = Vec::new(); + if !self.check(TokenKind::RParen) { + loop { + patterns.push(self.parse_pattern()?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + self.expect(TokenKind::RParen)?; + Ok(Pattern::Tuple(patterns)) + } + _ => Err(ParseError::unexpected_token(token, "pattern")), + } + } + + /// Get the precedence of the current token. + fn current_precedence(&mut self) -> Precedence { + match self.peek().kind { + TokenKind::PipePipe | TokenKind::Or => Precedence::Or, + TokenKind::AmpAmp | TokenKind::And => Precedence::And, + TokenKind::EqEq | TokenKind::BangEq => Precedence::Equality, + TokenKind::Lt | TokenKind::Le | TokenKind::Gt | TokenKind::Ge => Precedence::Comparison, + TokenKind::Pipe => Precedence::BitOr, + TokenKind::Caret => Precedence::BitXor, + TokenKind::Amp => Precedence::BitAnd, + TokenKind::LtLt | TokenKind::GtGt => Precedence::Shift, + TokenKind::Plus | TokenKind::Minus => Precedence::Term, + TokenKind::Star | TokenKind::Slash | TokenKind::Percent => Precedence::Factor, + TokenKind::StarStar => Precedence::Power, + TokenKind::LParen | TokenKind::LBracket | TokenKind::Dot => Precedence::Call, + _ => Precedence::None, + } + } +} diff --git a/compiler/eclexia-parser/src/lib.rs b/compiler/eclexia-parser/src/lib.rs new file mode 100644 index 0000000..24ea934 --- /dev/null +++ b/compiler/eclexia-parser/src/lib.rs @@ -0,0 +1,868 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Parser for the Eclexia programming language. +//! +//! This crate provides a hand-written recursive descent parser for Eclexia. +//! The parser is designed for: +//! +//! - Good error messages with recovery +//! - Incremental parsing support (future) +//! - Full source span preservation + +mod error; +mod expr; + +use eclexia_ast::span::Span; +use eclexia_ast::*; +use eclexia_lexer::{Lexer, Token, TokenKind, ResourceLiteral}; +use smol_str::SmolStr; + +pub use error::{ParseError, ParseResult}; + +/// Parser for Eclexia source code. +pub struct Parser<'src> { + lexer: Lexer<'src>, + source: &'src str, + errors: Vec, +} + +impl<'src> Parser<'src> { + /// Create a new parser for the given source. + pub fn new(source: &'src str) -> Self { + Self { + lexer: Lexer::new(source), + source, + errors: Vec::new(), + } + } + + /// Parse a complete source file. + pub fn parse_file(&mut self) -> (SourceFile, Vec) { + let mut file = SourceFile::new(); + + while !self.is_eof() { + match self.parse_item(&mut file) { + Ok(item) => file.items.push(item), + Err(e) => { + self.errors.push(e); + self.recover_to_item(); + } + } + } + + (file, std::mem::take(&mut self.errors)) + } + + /// Parse a single top-level item. + fn parse_item(&mut self, file: &mut SourceFile) -> ParseResult { + let token = self.peek(); + + match &token.kind { + TokenKind::Adaptive => self.parse_adaptive_function(file).map(Item::AdaptiveFunction), + TokenKind::Def | TokenKind::Fn => self.parse_function(file).map(Item::Function), + TokenKind::Type => self.parse_type_def(file).map(Item::TypeDef), + TokenKind::Import => self.parse_import().map(Item::Import), + TokenKind::Const => self.parse_const(file).map(Item::Const), + _ => Err(ParseError::unexpected_token(token.clone(), "item")), + } + } + + /// Parse a regular function definition. + fn parse_function(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.peek().span; + + // 'def' or 'fn' + self.expect_one_of(&[TokenKind::Def, TokenKind::Fn])?; + + // Function name + let name = self.expect_ident()?; + + // Parameters + self.expect(TokenKind::LParen)?; + let params = self.parse_params(file)?; + self.expect(TokenKind::RParen)?; + + // Return type (optional) + let return_type = if self.check(TokenKind::Arrow) { + self.advance(); + Some(self.parse_type(file)?) + } else { + None + }; + + // Constraints + let constraints = self.parse_constraints(file)?; + + // Body + let body = self.parse_block(file)?; + + let span = start.merge(body.span); + + Ok(Function { + span, + name, + params, + return_type, + constraints, + body, + }) + } + + /// Parse an adaptive function definition. + fn parse_adaptive_function(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.peek().span; + + // 'adaptive' + self.expect(TokenKind::Adaptive)?; + + // 'def' or 'fn' + self.expect_one_of(&[TokenKind::Def, TokenKind::Fn])?; + + // Function name + let name = self.expect_ident()?; + + // Parameters + self.expect(TokenKind::LParen)?; + let params = self.parse_params(file)?; + self.expect(TokenKind::RParen)?; + + // Return type (optional) + let return_type = if self.check(TokenKind::Arrow) { + self.advance(); + Some(self.parse_type(file)?) + } else { + None + }; + + // Constraints + let constraints = self.parse_constraints(file)?; + + // Optimize directives + let optimize = self.parse_optimize_directives()?; + + // Body with solutions + self.expect(TokenKind::LBrace)?; + let solutions = self.parse_solutions(file)?; + let end = self.expect(TokenKind::RBrace)?; + + let span = start.merge(end); + + Ok(AdaptiveFunction { + span, + name, + params, + return_type, + constraints, + optimize, + solutions, + }) + } + + /// Parse function parameters. + fn parse_params(&mut self, file: &mut SourceFile) -> ParseResult> { + let mut params = Vec::new(); + + if !self.check(TokenKind::RParen) { + loop { + let param = self.parse_param(file)?; + params.push(param); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + + Ok(params) + } + + /// Parse a single parameter. + fn parse_param(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.peek().span; + let name = self.expect_ident()?; + + let ty = if self.check(TokenKind::Colon) { + self.advance(); + Some(self.parse_type(file)?) + } else { + None + }; + + let span = if let Some(ty_id) = ty { + start.merge(file.types[ty_id].span) + } else { + start + }; + + Ok(Param { span, name, ty }) + } + + /// Parse constraint annotations (@requires). + fn parse_constraints(&mut self, file: &mut SourceFile) -> ParseResult> { + let mut constraints = Vec::new(); + + while self.check(TokenKind::AtRequires) { + let start = self.advance().span; + self.expect(TokenKind::Colon)?; + + // Parse constraint expression + let kind = self.parse_constraint_kind(file)?; + let span = start.merge(self.previous_span()); + + constraints.push(Constraint { span, kind }); + + // Multiple constraints separated by comma + while self.check(TokenKind::Comma) { + self.advance(); + let kind = self.parse_constraint_kind(file)?; + constraints.push(Constraint { + span: self.previous_span(), + kind, + }); + } + } + + Ok(constraints) + } + + /// Parse a constraint kind. + fn parse_constraint_kind(&mut self, file: &mut SourceFile) -> ParseResult { + let resource = self.expect_ident()?; + + let op = match self.peek().kind { + TokenKind::Lt => CompareOp::Lt, + TokenKind::Le => CompareOp::Le, + TokenKind::Gt => CompareOp::Gt, + TokenKind::Ge => CompareOp::Ge, + TokenKind::EqEq => CompareOp::Eq, + _ => { + // Might be a predicate expression + let expr = self.parse_expr(file)?; + return Ok(ConstraintKind::Predicate(expr)); + } + }; + self.advance(); + + let amount = self.parse_resource_amount()?; + + Ok(ConstraintKind::Resource { resource, op, amount }) + } + + /// Parse a resource amount (e.g., 100J, 5ms). + fn parse_resource_amount(&mut self) -> ParseResult { + let token = self.advance(); + + match &token.kind { + TokenKind::Resource(ResourceLiteral { value, unit }) => Ok(ResourceAmount { + value: *value, + unit: Some(unit.clone()), + }), + TokenKind::Integer(n) => Ok(ResourceAmount { + value: *n as f64, + unit: None, + }), + TokenKind::Float(f) => Ok(ResourceAmount { + value: *f, + unit: None, + }), + _ => Err(ParseError::unexpected_token(token, "resource amount")), + } + } + + /// Parse optimize directives. + fn parse_optimize_directives(&mut self) -> ParseResult> { + let mut objectives = Vec::new(); + + while self.check(TokenKind::AtOptimize) { + let start = self.advance().span; + self.expect(TokenKind::Colon)?; + + loop { + let direction = if self.check(TokenKind::Minimize) { + self.advance(); + OptimizeDirection::Minimize + } else if self.check(TokenKind::Maximize) { + self.advance(); + OptimizeDirection::Maximize + } else { + return Err(ParseError::unexpected_token( + self.peek().clone(), + "minimize or maximize", + )); + }; + + let target = self.expect_ident()?; + let span = start.merge(self.previous_span()); + + objectives.push(Objective { + span, + direction, + target, + }); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + + Ok(objectives) + } + + /// Parse solution alternatives within an adaptive function. + fn parse_solutions(&mut self, file: &mut SourceFile) -> ParseResult> { + let mut solutions = Vec::new(); + + while self.check(TokenKind::AtSolution) { + let solution = self.parse_solution(file)?; + solutions.push(solution); + } + + Ok(solutions) + } + + /// Parse a single solution alternative. + fn parse_solution(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.expect(TokenKind::AtSolution)?; + + // Solution name (string literal) + let name = match self.advance().kind { + TokenKind::String(s) => s, + TokenKind::Ident(s) => s, + _ => return Err(ParseError::unexpected_token(self.peek().clone(), "solution name")), + }; + + self.expect(TokenKind::Colon)?; + + // @when clause (optional) + let when_clause = if self.check(TokenKind::AtWhen) { + self.advance(); + self.expect(TokenKind::Colon)?; + Some(self.parse_expr(file)?) + } else { + None + }; + + // @provides clause + let mut provides = Vec::new(); + while self.check(TokenKind::AtProvides) { + self.advance(); + self.expect(TokenKind::Colon)?; + + loop { + let resource = self.expect_ident()?; + self.expect(TokenKind::Colon)?; + let amount = self.parse_resource_amount()?; + + provides.push(ResourceProvision { + span: self.previous_span(), + resource, + amount, + }); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + } + + // Solution body + let body = self.parse_block(file)?; + let span = start.merge(body.span); + + Ok(Solution { + span, + name, + when_clause, + provides, + body, + }) + } + + /// Parse a block of statements. + fn parse_block(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.expect(TokenKind::LBrace)?; + let mut stmts = Vec::new(); + let mut expr = None; + + while !self.check(TokenKind::RBrace) && !self.is_eof() { + // Try to parse a statement + match self.parse_stmt(file) { + Ok(stmt) => { + let stmt_id = file.stmts.alloc(stmt); + stmts.push(stmt_id); + } + Err(e) => { + self.errors.push(e); + self.recover_to_stmt(); + } + } + } + + // Check if last statement can be a trailing expression + if let Some(&last_id) = stmts.last() { + if let StmtKind::Expr(expr_id) = file.stmts[last_id].kind { + // Remove from statements and set as block expression + stmts.pop(); + expr = Some(expr_id); + } + } + + let end = self.expect(TokenKind::RBrace)?; + let span = start.merge(end); + + Ok(Block { span, stmts, expr }) + } + + /// Parse a statement. + fn parse_stmt(&mut self, file: &mut SourceFile) -> ParseResult { + let token = self.peek(); + let start = token.span; + + let kind = match &token.kind { + TokenKind::Let => { + self.advance(); + let name = self.expect_ident()?; + + let ty = if self.check(TokenKind::Colon) { + self.advance(); + Some(self.parse_type(file)?) + } else { + None + }; + + self.expect(TokenKind::Eq)?; + let value = self.parse_expr(file)?; + + StmtKind::Let { name, ty, value } + } + TokenKind::Return => { + self.advance(); + let value = if !self.check(TokenKind::Semi) && !self.check(TokenKind::RBrace) { + Some(self.parse_expr(file)?) + } else { + None + }; + StmtKind::Return(value) + } + TokenKind::While => { + self.advance(); + let condition = self.parse_expr(file)?; + let body = self.parse_block(file)?; + StmtKind::While { condition, body } + } + TokenKind::For => { + self.advance(); + let name = self.expect_ident()?; + self.expect(TokenKind::In)?; + let iter = self.parse_expr(file)?; + let body = self.parse_block(file)?; + StmtKind::For { name, iter, body } + } + _ => { + let expr = self.parse_expr(file)?; + StmtKind::Expr(expr) + } + }; + + // Optional semicolon + if self.check(TokenKind::Semi) { + self.advance(); + } + + let span = start.merge(self.previous_span()); + Ok(Stmt { span, kind }) + } + + /// Parse a type definition. + fn parse_type_def(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.expect(TokenKind::Type)?; + let name = self.expect_ident()?; + + // Type parameters (optional) + let params = if self.check(TokenKind::LBracket) { + self.advance(); + let mut params = Vec::new(); + loop { + params.push(self.expect_ident()?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + self.expect(TokenKind::RBracket)?; + params + } else { + Vec::new() + }; + + self.expect(TokenKind::Eq)?; + + let kind = if self.check(TokenKind::Struct) { + self.advance(); + self.expect(TokenKind::LBrace)?; + let fields = self.parse_fields(file)?; + self.expect(TokenKind::RBrace)?; + TypeDefKind::Struct(fields) + } else if self.check(TokenKind::Enum) { + self.advance(); + self.expect(TokenKind::LBrace)?; + let variants = self.parse_variants(file)?; + self.expect(TokenKind::RBrace)?; + TypeDefKind::Enum(variants) + } else { + let ty = self.parse_type(file)?; + TypeDefKind::Alias(ty) + }; + + let span = start.merge(self.previous_span()); + + Ok(TypeDef { + span, + name, + params, + kind, + }) + } + + /// Parse struct fields. + fn parse_fields(&mut self, file: &mut SourceFile) -> ParseResult> { + let mut fields = Vec::new(); + + while !self.check(TokenKind::RBrace) && !self.is_eof() { + let start = self.peek().span; + let name = self.expect_ident()?; + self.expect(TokenKind::Colon)?; + let ty = self.parse_type(file)?; + + fields.push(Field { + span: start.merge(file.types[ty].span), + name, + ty, + }); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + + Ok(fields) + } + + /// Parse enum variants. + fn parse_variants(&mut self, file: &mut SourceFile) -> ParseResult> { + let mut variants = Vec::new(); + + while !self.check(TokenKind::RBrace) && !self.is_eof() { + let start = self.peek().span; + let name = self.expect_ident()?; + + let fields = if self.check(TokenKind::LParen) { + self.advance(); + let mut types = Vec::new(); + loop { + types.push(self.parse_type(file)?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + self.expect(TokenKind::RParen)?; + Some(types) + } else { + None + }; + + variants.push(Variant { + span: start.merge(self.previous_span()), + name, + fields, + }); + + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + + Ok(variants) + } + + /// Parse an import statement. + fn parse_import(&mut self) -> ParseResult { + let start = self.expect(TokenKind::Import)?; + let mut path = vec![self.expect_ident()?]; + + while self.check(TokenKind::ColonColon) || self.check(TokenKind::Dot) { + self.advance(); + path.push(self.expect_ident()?); + } + + let alias = if self.check_ident("as") { + self.advance(); + Some(self.expect_ident()?) + } else { + None + }; + + let span = start.merge(self.previous_span()); + + Ok(Import { span, path, alias }) + } + + /// Parse a const definition. + fn parse_const(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.expect(TokenKind::Const)?; + let name = self.expect_ident()?; + + let ty = if self.check(TokenKind::Colon) { + self.advance(); + Some(self.parse_type(file)?) + } else { + None + }; + + self.expect(TokenKind::Eq)?; + let value = self.parse_expr(file)?; + + let span = start.merge(self.previous_span()); + + Ok(ConstDef { + span, + name, + ty, + value, + }) + } + + /// Parse a type expression. + fn parse_type(&mut self, file: &mut SourceFile) -> ParseResult { + let start = self.peek().span; + + let kind = if self.check(TokenKind::LParen) { + // Tuple or function type + self.advance(); + + if self.check(TokenKind::RParen) { + self.advance(); + // Unit type () + TypeKind::Tuple(Vec::new()) + } else { + let mut types = vec![self.parse_type(file)?]; + + while self.check(TokenKind::Comma) { + self.advance(); + if self.check(TokenKind::RParen) { + break; + } + types.push(self.parse_type(file)?); + } + self.expect(TokenKind::RParen)?; + + if self.check(TokenKind::Arrow) { + self.advance(); + let ret = self.parse_type(file)?; + TypeKind::Function { params: types, ret } + } else if types.len() == 1 { + // Parenthesized type, unwrap + return Ok(types.pop().unwrap()); + } else { + TypeKind::Tuple(types) + } + } + } else if self.check(TokenKind::LBracket) { + // Array type + self.advance(); + let elem = self.parse_type(file)?; + + let size = if self.check(TokenKind::Semi) { + self.advance(); + if let TokenKind::Integer(n) = self.advance().kind { + Some(n as usize) + } else { + None + } + } else { + None + }; + + self.expect(TokenKind::RBracket)?; + TypeKind::Array { elem, size } + } else if self.check(TokenKind::Underscore) { + self.advance(); + TypeKind::Infer + } else { + // Named type + let name = self.expect_ident()?; + + let args = if self.check(TokenKind::LBracket) { + self.advance(); + let mut args = Vec::new(); + loop { + args.push(self.parse_type(file)?); + if !self.check(TokenKind::Comma) { + break; + } + self.advance(); + } + self.expect(TokenKind::RBracket)?; + args + } else { + Vec::new() + }; + + TypeKind::Named { name, args } + }; + + let span = start.merge(self.previous_span()); + let ty = Type { span, kind }; + Ok(file.types.alloc(ty)) + } + + // === Helper methods === + + fn peek(&mut self) -> &Token { + self.lexer.peek() + } + + fn advance(&mut self) -> Token { + self.lexer.next() + } + + fn is_eof(&mut self) -> bool { + self.lexer.is_eof() + } + + fn check(&mut self, kind: TokenKind) -> bool { + std::mem::discriminant(&self.peek().kind) == std::mem::discriminant(&kind) + } + + fn check_ident(&mut self, name: &str) -> bool { + matches!(&self.peek().kind, TokenKind::Ident(s) if s.as_str() == name) + } + + fn expect(&mut self, kind: TokenKind) -> ParseResult { + if self.check(kind.clone()) { + Ok(self.advance().span) + } else { + Err(ParseError::expected_token(kind, self.peek().clone())) + } + } + + fn expect_one_of(&mut self, kinds: &[TokenKind]) -> ParseResult { + for kind in kinds { + if self.check(kind.clone()) { + return Ok(self.advance().span); + } + } + Err(ParseError::unexpected_token( + self.peek().clone(), + "one of expected tokens", + )) + } + + fn expect_ident(&mut self) -> ParseResult { + match self.advance().kind { + TokenKind::Ident(s) => Ok(s), + _ => Err(ParseError::expected_identifier(self.peek().clone())), + } + } + + fn previous_span(&self) -> Span { + // This is a simplified version; in practice we'd track the previous token + Span::dummy() + } + + fn recover_to_item(&mut self) { + while !self.is_eof() { + match self.peek().kind { + TokenKind::Adaptive + | TokenKind::Def + | TokenKind::Fn + | TokenKind::Type + | TokenKind::Import + | TokenKind::Const => return, + _ => { + self.advance(); + } + } + } + } + + fn recover_to_stmt(&mut self) { + while !self.is_eof() { + match self.peek().kind { + TokenKind::Let + | TokenKind::Return + | TokenKind::While + | TokenKind::For + | TokenKind::If + | TokenKind::RBrace => return, + TokenKind::Semi => { + self.advance(); + return; + } + _ => { + self.advance(); + } + } + } + } +} + +/// Parse a source file. +pub fn parse(source: &str) -> (SourceFile, Vec) { + let mut parser = Parser::new(source); + parser.parse_file() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_simple_function() { + let source = r#" + def add(x: Int, y: Int) -> Int { + x + y + } + "#; + + let (file, errors) = parse(source); + assert!(errors.is_empty(), "Parse errors: {:?}", errors); + assert_eq!(file.items.len(), 1); + } + + #[test] + fn test_parse_adaptive_function() { + let source = r#" + adaptive def sort(arr: Array[Int]) -> Array[Int] + @requires: energy < 100J + @optimize: minimize energy + { + @solution "quick": + @when: true + @provides: energy: 50J + { + quicksort(arr) + } + } + "#; + + let (file, errors) = parse(source); + assert!(errors.is_empty(), "Parse errors: {:?}", errors); + assert_eq!(file.items.len(), 1); + + if let Item::AdaptiveFunction(af) = &file.items[0] { + assert_eq!(af.name.as_str(), "sort"); + assert_eq!(af.solutions.len(), 1); + } else { + panic!("Expected adaptive function"); + } + } +} diff --git a/compiler/eclexia-typeck/Cargo.toml b/compiler/eclexia-typeck/Cargo.toml new file mode 100644 index 0000000..8d8aebd --- /dev/null +++ b/compiler/eclexia-typeck/Cargo.toml @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-typeck" +description = "Type checker for the Eclexia programming language" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +smol_str = { workspace = true } +indexmap = { workspace = true } +thiserror = { workspace = true } +rustc-hash = { workspace = true } diff --git a/compiler/eclexia-typeck/src/env.rs b/compiler/eclexia-typeck/src/env.rs new file mode 100644 index 0000000..907bf77 --- /dev/null +++ b/compiler/eclexia-typeck/src/env.rs @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type environment for variable bindings. + +use eclexia_ast::types::{Ty, TypeScheme}; +use rustc_hash::FxHashMap; +use smol_str::SmolStr; + +/// Type environment mapping names to type schemes. +#[derive(Debug, Clone)] +pub struct TypeEnv { + bindings: FxHashMap, + parent: Option>, +} + +impl TypeEnv { + /// Create a new empty environment. + pub fn new() -> Self { + Self { + bindings: FxHashMap::default(), + parent: None, + } + } + + /// Create a child environment. + pub fn child(&self) -> Self { + Self { + bindings: FxHashMap::default(), + parent: Some(Box::new(self.clone())), + } + } + + /// Insert a binding. + pub fn insert(&mut self, name: SmolStr, scheme: TypeScheme) { + self.bindings.insert(name, scheme); + } + + /// Look up a binding. + pub fn lookup(&self, name: &str) -> Option<&TypeScheme> { + self.bindings.get(name).or_else(|| { + self.parent.as_ref().and_then(|p| p.lookup(name)) + }) + } + + /// Insert a monomorphic type. + pub fn insert_mono(&mut self, name: SmolStr, ty: Ty) { + self.insert(name, TypeScheme::mono(ty)); + } +} + +impl Default for TypeEnv { + fn default() -> Self { + Self::new() + } +} diff --git a/compiler/eclexia-typeck/src/error.rs b/compiler/eclexia-typeck/src/error.rs new file mode 100644 index 0000000..61301d9 --- /dev/null +++ b/compiler/eclexia-typeck/src/error.rs @@ -0,0 +1,66 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type checking errors. + +use eclexia_ast::span::Span; +use eclexia_ast::types::Ty; +use thiserror::Error; + +/// Result type for type checking operations. +pub type TypeResult = Result; + +/// A type checking error. +#[derive(Debug, Error)] +pub enum TypeError { + #[error("type mismatch: expected {expected}, found {found}")] + Mismatch { + span: Span, + expected: Ty, + found: Ty, + }, + + #[error("undefined variable: {name}")] + Undefined { + span: Span, + name: String, + }, + + #[error("dimension mismatch: cannot combine {dim1} with {dim2}")] + DimensionMismatch { + span: Span, + dim1: String, + dim2: String, + }, + + #[error("resource constraint violated: {message}")] + ResourceViolation { + span: Span, + message: String, + }, + + #[error("occurs check failed: infinite type")] + OccursCheck { + span: Span, + }, + + #[error("{message}")] + Custom { + span: Span, + message: String, + }, +} + +impl TypeError { + /// Get the span of this error. + pub fn span(&self) -> Span { + match self { + TypeError::Mismatch { span, .. } => *span, + TypeError::Undefined { span, .. } => *span, + TypeError::DimensionMismatch { span, .. } => *span, + TypeError::ResourceViolation { span, .. } => *span, + TypeError::OccursCheck { span } => *span, + TypeError::Custom { span, .. } => *span, + } + } +} diff --git a/compiler/eclexia-typeck/src/infer.rs b/compiler/eclexia-typeck/src/infer.rs new file mode 100644 index 0000000..349933d --- /dev/null +++ b/compiler/eclexia-typeck/src/infer.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type inference implementation. + +// TODO: Implement Algorithm W for Hindley-Milner type inference diff --git a/compiler/eclexia-typeck/src/lib.rs b/compiler/eclexia-typeck/src/lib.rs new file mode 100644 index 0000000..adc7e8d --- /dev/null +++ b/compiler/eclexia-typeck/src/lib.rs @@ -0,0 +1,102 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type checker for the Eclexia programming language. +//! +//! This crate implements bidirectional type checking with: +//! - Hindley-Milner type inference +//! - Dimensional type checking for resource types +//! - Constraint solving for resource bounds +//! - Effect tracking + +mod infer; +mod unify; +mod env; +mod error; + +use eclexia_ast::types::{Ty, TypeVar}; +use eclexia_ast::SourceFile; +use rustc_hash::FxHashMap; + +pub use error::{TypeError, TypeResult}; +pub use env::TypeEnv; + +/// Type checker state. +pub struct TypeChecker { + /// Environment with type bindings + env: TypeEnv, + /// Substitution from type variables to types + substitution: FxHashMap, + /// Next type variable ID + next_var: u32, + /// Collected errors + errors: Vec, +} + +impl TypeChecker { + /// Create a new type checker. + pub fn new() -> Self { + Self { + env: TypeEnv::new(), + substitution: FxHashMap::default(), + next_var: 0, + errors: Vec::new(), + } + } + + /// Generate a fresh type variable. + pub fn fresh_var(&mut self) -> Ty { + let var = TypeVar::new(self.next_var); + self.next_var += 1; + Ty::Var(var) + } + + /// Check a source file. + pub fn check_file(&mut self, _file: &SourceFile) -> Vec { + // TODO: Implement type checking + std::mem::take(&mut self.errors) + } + + /// Apply the current substitution to a type. + pub fn apply(&self, ty: &Ty) -> Ty { + match ty { + Ty::Var(v) => { + if let Some(t) = self.substitution.get(v) { + self.apply(t) + } else { + ty.clone() + } + } + Ty::Named { name, args } => Ty::Named { + name: name.clone(), + args: args.iter().map(|t| self.apply(t)).collect(), + }, + Ty::Function { params, ret } => Ty::Function { + params: params.iter().map(|t| self.apply(t)).collect(), + ret: Box::new(self.apply(ret)), + }, + Ty::Tuple(elems) => Ty::Tuple(elems.iter().map(|t| self.apply(t)).collect()), + Ty::Array { elem, size } => Ty::Array { + elem: Box::new(self.apply(elem)), + size: *size, + }, + Ty::ForAll { vars, body } => Ty::ForAll { + vars: vars.clone(), + body: Box::new(self.apply(body)), + }, + _ => ty.clone(), + } + } +} + +impl Default for TypeChecker { + fn default() -> Self { + Self::new() + } +} + +/// Type check a source file. +pub fn check(file: &SourceFile) -> Vec { + let mut checker = TypeChecker::new(); + checker.check_file(file) +} diff --git a/compiler/eclexia-typeck/src/unify.rs b/compiler/eclexia-typeck/src/unify.rs new file mode 100644 index 0000000..f31affc --- /dev/null +++ b/compiler/eclexia-typeck/src/unify.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Type unification implementation. + +// TODO: Implement unification algorithm diff --git a/compiler/eclexia/Cargo.toml b/compiler/eclexia/Cargo.toml new file mode 100644 index 0000000..8b0ad7d --- /dev/null +++ b/compiler/eclexia/Cargo.toml @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia" +description = "Eclexia compiler and toolchain" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[[bin]] +name = "eclexia" +path = "src/main.rs" + +[dependencies] +eclexia-ast = { path = "../eclexia-ast" } +eclexia-lexer = { path = "../eclexia-lexer" } +eclexia-parser = { path = "../eclexia-parser" } +eclexia-typeck = { path = "../eclexia-typeck" } +eclexia-hir = { path = "../eclexia-hir" } +eclexia-mir = { path = "../eclexia-mir" } +eclexia-codegen = { path = "../eclexia-codegen" } +eclexia-runtime = { path = "../../runtime/eclexia-runtime" } + +clap = { workspace = true } +miette = { workspace = true } +rustyline = { workspace = true } +smol_str = { workspace = true } diff --git a/compiler/eclexia/src/commands.rs b/compiler/eclexia/src/commands.rs new file mode 100644 index 0000000..57d6b2c --- /dev/null +++ b/compiler/eclexia/src/commands.rs @@ -0,0 +1,170 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Command implementations for the Eclexia CLI. + +use std::path::Path; +use miette::{Context, IntoDiagnostic}; + +/// Build an Eclexia program. +pub fn build(input: &Path, _output: Option<&Path>, _target: &str) -> miette::Result<()> { + let source = std::fs::read_to_string(input) + .into_diagnostic() + .wrap_err_with(|| format!("Failed to read {}", input.display()))?; + + // Parse + let (file, parse_errors) = eclexia_parser::parse(&source); + + if !parse_errors.is_empty() { + eprintln!("Parse errors:"); + for err in &parse_errors { + eprintln!(" {}", err); + } + return Err(miette::miette!("Parsing failed with {} errors", parse_errors.len())); + } + + // Type check + let type_errors = eclexia_typeck::check(&file); + + if !type_errors.is_empty() { + eprintln!("Type errors:"); + for err in &type_errors { + eprintln!(" {}", err); + } + return Err(miette::miette!("Type checking failed with {} errors", type_errors.len())); + } + + println!("✓ Build successful"); + println!(" {} items parsed", file.items.len()); + + // TODO: Lower to HIR, MIR, and generate code + + Ok(()) +} + +/// Build and run an Eclexia program. +pub fn run(input: &Path, observe_shadow: bool, carbon_report: bool) -> miette::Result<()> { + build(input, None, "native")?; + + if observe_shadow { + println!("Shadow price observation enabled (not yet implemented)"); + } + + if carbon_report { + println!("Carbon reporting enabled (not yet implemented)"); + } + + // TODO: Execute the compiled program + + Ok(()) +} + +/// Type check a file. +pub fn check(input: &Path) -> miette::Result<()> { + let source = std::fs::read_to_string(input) + .into_diagnostic() + .wrap_err_with(|| format!("Failed to read {}", input.display()))?; + + let (file, parse_errors) = eclexia_parser::parse(&source); + + if !parse_errors.is_empty() { + eprintln!("Parse errors:"); + for err in &parse_errors { + eprintln!(" {}", err); + } + return Err(miette::miette!("Parsing failed")); + } + + let type_errors = eclexia_typeck::check(&file); + + if !type_errors.is_empty() { + eprintln!("Type errors:"); + for err in &type_errors { + eprintln!(" {}", err); + } + return Err(miette::miette!("Type checking failed")); + } + + println!("✓ No errors found"); + + Ok(()) +} + +/// Format source files. +pub fn fmt(inputs: &[std::path::PathBuf], check: bool) -> miette::Result<()> { + for input in inputs { + if check { + println!("Checking {}...", input.display()); + } else { + println!("Formatting {}...", input.display()); + } + // TODO: Implement formatter + } + + Ok(()) +} + +/// Initialize a new project. +pub fn init(name: Option<&str>) -> miette::Result<()> { + let project_name = name.unwrap_or("my-eclexia-project"); + + println!("Initializing new Eclexia project: {}", project_name); + + // Create directory structure + std::fs::create_dir_all(project_name).into_diagnostic()?; + std::fs::create_dir_all(format!("{}/src", project_name)).into_diagnostic()?; + + // Create eclexia.toml + let config = format!(r#"# SPDX-License-Identifier: MIT +# Eclexia project configuration + +[package] +name = "{}" +version = "0.1.0" +edition = "2025" + +[dependencies] +# Add your dependencies here + +[resources] +default-energy-budget = "1000J" +default-carbon-budget = "100gCO2e" +"#, project_name); + + std::fs::write(format!("{}/eclexia.toml", project_name), config).into_diagnostic()?; + + // Create main.ecl + let main = r#"// SPDX-License-Identifier: MIT +// Main entry point + +def main() -> Unit + @requires: energy < 1J +{ + println("Hello, Economics-as-Code!") +} +"#; + + std::fs::write(format!("{}/src/main.ecl", project_name), main).into_diagnostic()?; + + println!("✓ Created project in {}/", project_name); + println!(); + println!("To get started:"); + println!(" cd {}", project_name); + println!(" eclexia run src/main.ecl"); + + Ok(()) +} + +/// Run tests. +pub fn test(_filter: Option<&str>) -> miette::Result<()> { + println!("Running tests..."); + // TODO: Implement test runner + Ok(()) +} + +/// Run benchmarks. +pub fn bench(_filter: Option<&str>) -> miette::Result<()> { + println!("Running benchmarks..."); + // TODO: Implement benchmark runner + Ok(()) +} diff --git a/compiler/eclexia/src/main.rs b/compiler/eclexia/src/main.rs new file mode 100644 index 0000000..075de90 --- /dev/null +++ b/compiler/eclexia/src/main.rs @@ -0,0 +1,127 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Eclexia compiler and toolchain CLI. + +use clap::{Parser, Subcommand}; +use std::path::PathBuf; + +mod commands; +mod repl; + +#[derive(Parser)] +#[command(name = "eclexia")] +#[command(author, version, about = "Eclexia: Economics-as-Code programming language")] +#[command(propagate_version = true)] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Build an Eclexia program + Build { + /// Input file + #[arg(value_name = "FILE")] + input: PathBuf, + + /// Output file + #[arg(short, long)] + output: Option, + + /// Target platform (native, wasm) + #[arg(short, long, default_value = "native")] + target: String, + }, + + /// Build and run an Eclexia program + Run { + /// Input file + #[arg(value_name = "FILE")] + input: PathBuf, + + /// Show shadow prices during execution + #[arg(long)] + observe_shadow: bool, + + /// Generate carbon report + #[arg(long)] + carbon_report: bool, + }, + + /// Type check a file without building + Check { + /// Input file + #[arg(value_name = "FILE")] + input: PathBuf, + }, + + /// Format Eclexia source code + Fmt { + /// Input file(s) + #[arg(value_name = "FILE")] + input: Vec, + + /// Check formatting without modifying files + #[arg(long)] + check: bool, + }, + + /// Run the interactive REPL + Repl, + + /// Initialize a new Eclexia project + Init { + /// Project name + #[arg(value_name = "NAME")] + name: Option, + }, + + /// Run tests + Test { + /// Test filter pattern + #[arg(value_name = "FILTER")] + filter: Option, + }, + + /// Run benchmarks + Bench { + /// Benchmark filter pattern + #[arg(value_name = "FILTER")] + filter: Option, + }, +} + +fn main() -> miette::Result<()> { + let cli = Cli::parse(); + + match cli.command { + Commands::Build { input, output, target } => { + commands::build(&input, output.as_deref(), &target)?; + } + Commands::Run { input, observe_shadow, carbon_report } => { + commands::run(&input, observe_shadow, carbon_report)?; + } + Commands::Check { input } => { + commands::check(&input)?; + } + Commands::Fmt { input, check } => { + commands::fmt(&input, check)?; + } + Commands::Repl => { + repl::run()?; + } + Commands::Init { name } => { + commands::init(name.as_deref())?; + } + Commands::Test { filter } => { + commands::test(filter.as_deref())?; + } + Commands::Bench { filter } => { + commands::bench(filter.as_deref())?; + } + } + + Ok(()) +} diff --git a/compiler/eclexia/src/repl.rs b/compiler/eclexia/src/repl.rs new file mode 100644 index 0000000..c8b60ae --- /dev/null +++ b/compiler/eclexia/src/repl.rs @@ -0,0 +1,157 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Interactive REPL for Eclexia. + +use rustyline::error::ReadlineError; +use rustyline::DefaultEditor; + +/// Run the interactive REPL. +pub fn run() -> miette::Result<()> { + println!("Eclexia REPL v0.1.0"); + println!("Type :help for help, :quit to exit"); + println!(); + + let mut rl = DefaultEditor::new().map_err(|e| miette::miette!("Failed to create editor: {}", e))?; + + // Try to load history + let history_path = dirs::data_dir() + .map(|d| d.join("eclexia").join("repl_history")) + .unwrap_or_else(|| std::path::PathBuf::from(".eclexia_history")); + + let _ = rl.load_history(&history_path); + + loop { + match rl.readline("ecl> ") { + Ok(line) => { + let trimmed = line.trim(); + + if trimmed.is_empty() { + continue; + } + + let _ = rl.add_history_entry(&line); + + // Handle REPL commands + if trimmed.starts_with(':') { + match handle_command(trimmed) { + CommandResult::Continue => continue, + CommandResult::Quit => break, + } + continue; + } + + // Parse and evaluate + eval_line(trimmed); + } + Err(ReadlineError::Interrupted) => { + println!("^C"); + continue; + } + Err(ReadlineError::Eof) => { + println!("Goodbye!"); + break; + } + Err(err) => { + eprintln!("Error: {:?}", err); + break; + } + } + } + + // Save history + if let Some(parent) = history_path.parent() { + let _ = std::fs::create_dir_all(parent); + } + let _ = rl.save_history(&history_path); + + Ok(()) +} + +enum CommandResult { + Continue, + Quit, +} + +fn handle_command(cmd: &str) -> CommandResult { + match cmd { + ":quit" | ":q" | ":exit" => CommandResult::Quit, + ":help" | ":h" | ":?" => { + println!("Available commands:"); + println!(" :help, :h, :? Show this help"); + println!(" :quit, :q Exit the REPL"); + println!(" :type Show the type of an expression"); + println!(" :shadow Show current shadow prices"); + println!(" :resources Show resource usage"); + println!(" :clear Clear the screen"); + CommandResult::Continue + } + ":clear" => { + print!("\x1B[2J\x1B[1;1H"); + CommandResult::Continue + } + ":shadow" => { + println!("Shadow prices (not yet implemented):"); + println!(" λ_energy = 0.0"); + println!(" λ_time = 0.0"); + println!(" λ_memory = 0.0"); + println!(" λ_carbon = 0.0"); + CommandResult::Continue + } + ":resources" => { + println!("Resource usage (not yet implemented):"); + println!(" Energy: 0 J"); + println!(" Time: 0 ms"); + println!(" Memory: 0 B"); + println!(" Carbon: 0 gCO2e"); + CommandResult::Continue + } + _ if cmd.starts_with(":type ") => { + let expr = &cmd[6..]; + println!("Type of '{}': (not yet implemented)", expr); + CommandResult::Continue + } + _ => { + println!("Unknown command: {}. Type :help for help.", cmd); + CommandResult::Continue + } + } +} + +fn eval_line(line: &str) { + // Parse as expression + let source = format!("def __repl__() {{ {} }}", line); + let (file, errors) = eclexia_parser::parse(&source); + + if !errors.is_empty() { + for err in &errors { + eprintln!("Error: {}", err); + } + return; + } + + // Type check + let type_errors = eclexia_typeck::check(&file); + if !type_errors.is_empty() { + for err in &type_errors { + eprintln!("Type error: {}", err); + } + return; + } + + // TODO: Evaluate and print result + println!("(parsed {} items)", file.items.len()); +} + +// Helper for finding data directories +mod dirs { + use std::path::PathBuf; + + pub fn data_dir() -> Option { + std::env::var_os("XDG_DATA_HOME") + .map(PathBuf::from) + .or_else(|| { + std::env::var_os("HOME").map(|h| PathBuf::from(h).join(".local/share")) + }) + } +} diff --git a/examples/carbon_aware.ecl b/examples/carbon_aware.ecl new file mode 100644 index 0000000..3f5e3fb --- /dev/null +++ b/examples/carbon_aware.ecl @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: MIT +// Carbon-aware ML training example + +type Dataset = Array[Float] +type Model = { weights: Array[Float], bias: Float } + +async def train_model(data: Dataset) -> Model + @requires: carbon < 500gCO2e + @optimize: minimize carbon + @defer_until: grid_carbon_intensity < 100gCO2e/kWh +{ + // This computation will wait for low-carbon electricity + // Typical time: overnight or during sunny/windy periods + + let model = Model { weights: [], bias: 0.0 } + + for epoch in 0..100 { + // Training loop placeholder + let loss = compute_loss(model, data) + let gradients = compute_gradients(model, data) + update_model(model, gradients) + } + + model +} + +def compute_loss(model: Model, data: Dataset) -> Float { 0.0 } +def compute_gradients(model: Model, data: Dataset) -> Array[Float] { [] } +def update_model(model: Model, gradients: Array[Float]) -> Unit { } diff --git a/examples/fibonacci.ecl b/examples/fibonacci.ecl new file mode 100644 index 0000000..8b1dd23 --- /dev/null +++ b/examples/fibonacci.ecl @@ -0,0 +1,32 @@ +// SPDX-License-Identifier: MIT +// Adaptive Fibonacci implementation + +adaptive def fibonacci(n: Int) -> Int + @requires: energy < 100J + @optimize: minimize latency +{ + @solution "memoized": + @when: n > 20 + @provides: energy: 50J, latency: 5ms + { + memo_fib(n) + } + + @solution "naive": + @when: true + @provides: energy: 10J, latency: 100ms + { + if n <= 1 then { n } + else { fibonacci(n - 1) + fibonacci(n - 2) } + } +} + +def memo_fib(n: Int) -> Int { + // Memoized implementation placeholder + 0 +} + +def main() -> Unit { + let result = fibonacci(10) + println("fibonacci(10) = " + result.to_string()) +} diff --git a/examples/hello.ecl b/examples/hello.ecl new file mode 100644 index 0000000..0e3596b --- /dev/null +++ b/examples/hello.ecl @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: MIT +// Hello World example + +def main() -> Unit + @requires: energy < 1J +{ + println("Hello, Economics-as-Code!") +} diff --git a/examples/matrix_multiply.ecl b/examples/matrix_multiply.ecl new file mode 100644 index 0000000..0c0a51c --- /dev/null +++ b/examples/matrix_multiply.ecl @@ -0,0 +1,39 @@ +// SPDX-License-Identifier: MIT +// Adaptive matrix multiplication + +type Matrix = Array[Array[Float]] + +adaptive def matrix_multiply(A: Matrix, B: Matrix) -> Matrix + @requires: energy < 100J, latency < 500ms + @optimize: minimize energy, minimize carbon +{ + @solution "gpu_accelerated": + @when: gpu_available() and matrix_size(A) > 1000 + @provides: energy: 50J, latency: 100ms, carbon: 5gCO2e + { + gpu::multiply(A, B) + } + + @solution "parallel_cpu": + @when: cpu_cores() >= 4 + @provides: energy: 80J, latency: 300ms, carbon: 8gCO2e + { + parallel::multiply(A, B) + } + + @solution "naive": + @when: true + @provides: energy: 30J, latency: 800ms, carbon: 3gCO2e + { + naive_multiply(A, B) + } +} + +def naive_multiply(A: Matrix, B: Matrix) -> Matrix { + // Naive O(n³) implementation placeholder + A +} + +def gpu_available() -> Bool { false } +def cpu_cores() -> Int { 4 } +def matrix_size(m: Matrix) -> Int { 0 } diff --git a/runtime/eclexia-runtime/Cargo.toml b/runtime/eclexia-runtime/Cargo.toml new file mode 100644 index 0000000..b0218fa --- /dev/null +++ b/runtime/eclexia-runtime/Cargo.toml @@ -0,0 +1,14 @@ +# SPDX-License-Identifier: AGPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +[package] +name = "eclexia-runtime" +description = "Runtime system for Eclexia" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +tokio = { workspace = true } diff --git a/runtime/eclexia-runtime/src/carbon.rs b/runtime/eclexia-runtime/src/carbon.rs new file mode 100644 index 0000000..4f46e3b --- /dev/null +++ b/runtime/eclexia-runtime/src/carbon.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Carbon intensity monitor and scheduler. + +// TODO: Implement carbon-aware scheduling diff --git a/runtime/eclexia-runtime/src/lib.rs b/runtime/eclexia-runtime/src/lib.rs new file mode 100644 index 0000000..6df2637 --- /dev/null +++ b/runtime/eclexia-runtime/src/lib.rs @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Runtime system for Eclexia. +//! +//! The runtime provides: +//! - Adaptive scheduler for solution selection +//! - Shadow price computation engine +//! - Resource profiler +//! - Carbon monitor +//! - Memory manager + +pub mod scheduler; +pub mod shadow; +pub mod profiler; +pub mod carbon; + +/// Runtime context for Eclexia programs. +pub struct Runtime { + // TODO: Add runtime state +} + +impl Runtime { + /// Create a new runtime. + pub fn new() -> Self { + Self {} + } +} + +impl Default for Runtime { + fn default() -> Self { + Self::new() + } +} diff --git a/runtime/eclexia-runtime/src/profiler.rs b/runtime/eclexia-runtime/src/profiler.rs new file mode 100644 index 0000000..eca90bf --- /dev/null +++ b/runtime/eclexia-runtime/src/profiler.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Resource profiler for tracking energy, time, memory, carbon. + +// TODO: Implement resource profiling diff --git a/runtime/eclexia-runtime/src/scheduler.rs b/runtime/eclexia-runtime/src/scheduler.rs new file mode 100644 index 0000000..a75c5d3 --- /dev/null +++ b/runtime/eclexia-runtime/src/scheduler.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Adaptive scheduler for solution selection. + +// TODO: Implement adaptive scheduling diff --git a/runtime/eclexia-runtime/src/shadow.rs b/runtime/eclexia-runtime/src/shadow.rs new file mode 100644 index 0000000..f3e3a38 --- /dev/null +++ b/runtime/eclexia-runtime/src/shadow.rs @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: AGPL-3.0-or-later +// SPDX-FileCopyrightText: 2025 Jonathan D.A. Jewell + +//! Shadow price computation engine. + +// TODO: Implement shadow price computation via LP duality