From 273b9faa5e4e6cbda2d0b877f56de984fe1c99d4 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:23:07 +0000 Subject: [PATCH 01/60] rename CONTRIBUTING --- CONTRIBUTING => CONTRIBUTING.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename CONTRIBUTING => CONTRIBUTING.md (100%) diff --git a/CONTRIBUTING b/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING rename to CONTRIBUTING.md From 3f50d080e9d248506da5feac73949d9d656210ce Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:23:57 +0000 Subject: [PATCH 02/60] add flush_state to readme example --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index ad775e2..1a97864 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,8 @@ fn fitness(dna: &MyAgentDNA) -> f32 { let above = n > 0.5; let res = agent.network.predict([n]); + agent.network.flush_state(); + let resi = res.iter().max_index(); if resi == 0 ^ above { From a94198a9bc2776a451bca27211d0c72291df5842 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 16:14:42 +0000 Subject: [PATCH 03/60] create basic log test --- src/lib.rs | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index ee9f769..ac569b0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,3 +23,67 @@ pub use topology::*; #[cfg(feature = "serde")] pub use nnt_serde::*; + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + + #[derive(RandomlyMutable, DivisionReproduction, Clone)] + struct AgentDNA { + network: NeuralNetworkTopology<2, 1>, + } + + impl Prunable for AgentDNA {} + + impl GenerateRandom for AgentDNA { + fn gen_random(rng: &mut impl Rng) -> Self { + Self { + network: NeuralNetworkTopology::new(0.01, 3, rng), + } + } + } + + #[test] + fn basic_test() { + let fitness = |g: &AgentDNA| { + let network = NeuralNetwork::from(&g.network); + let mut fitness = 0.; + let mut rng = rand::thread_rng(); + + for _ in 0..100 { + let n = rng.gen::() * 10000.; + let base = rng.gen::() * 10.; + let expected = n.log(base); + + let [answer] = network.predict([n, base]); + network.flush_state(); + + fitness += 5. / (answer - expected).abs(); + } + + fitness + }; + + let mut rng = rand::thread_rng(); + + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, 100), + fitness, + division_pruning_nextgen, + ); + + for _ in 0..100 { + sim.next_generation(); + } + + let mut fits: Vec<_> = sim.genomes + .iter() + .map(fitness) + .collect(); + + fits.sort_by(|a, b| a.partial_cmp(&b).unwrap()); + + dbg!(fits); + } +} \ No newline at end of file From 339b90b3c7970a9b88b162db34551d190d947324 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:05:13 +0000 Subject: [PATCH 04/60] create plotters example --- Cargo.lock | 744 ++++++++++++++++++++++++++++++++++++++++++++++- Cargo.toml | 1 + examples/plot.rs | 135 +++++++++ 3 files changed, 879 insertions(+), 1 deletion(-) create mode 100644 examples/plot.rs diff --git a/Cargo.lock b/Cargo.lock index be4d7b8..5c98cd1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,33 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "autocfg" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" + [[package]] name = "bincode" version = "1.3.3" @@ -11,18 +38,135 @@ dependencies = [ "serde", ] +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + [[package]] name = "bitflags" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytemuck" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "cc" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" + [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-targets", +] + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "const-cstr" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3d0b5ff30645a68f35ece8cea4556ca14ef8a1651455f789a099a0513532a6" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "core-graphics" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "libc", +] + +[[package]] +name = "core-text" +version = "19.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d74ada66e07c1cefa18f8abfba765b486f250de2e4a999e5727fc0dd4b4a25" +dependencies = [ + "core-foundation", + "core-graphics", + "foreign-types", + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -48,12 +192,140 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dlib" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" +dependencies = [ + "libloading", +] + +[[package]] +name = "dwrote" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439a1c2ba5611ad3ed731280541d36d2e9c4ac5e7fb818a27b604bdc5a6aa65b" +dependencies = [ + "lazy_static", + "libc", + "winapi", + "wio", +] + [[package]] name = "either" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +[[package]] +name = "fdeflate" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f9bfee30e4dedf0ab8b422f03af778d9612b63f502710fc500a334ebe2de645" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "flate2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "float-ord" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e" + +[[package]] +name = "font-kit" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21fe28504d371085fae9ac7a3450f0b289ab71e07c8e57baa3fb68b9e57d6ce5" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "core-foundation", + "core-graphics", + "core-text", + "dirs-next", + "dwrote", + "float-ord", + "freetype", + "lazy_static", + "libc", + "log", + "pathfinder_geometry", + "pathfinder_simd", + "walkdir", + "winapi", + "yeslogic-fontconfig-sys", +] + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "freetype" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efc8599a3078adf8edeb86c71e9f8fa7d88af5ca31e806a867756081f90f5d83" +dependencies = [ + "freetype-sys", + "libc", +] + +[[package]] +name = "freetype-sys" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66ee28c39a43d89fbed8b4798fb4ba56722cfd2b5af81f9326c27614ba88ecd5" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + [[package]] name = "genetic-rs" version = "0.5.1" @@ -98,12 +370,74 @@ dependencies = [ "wasi", ] +[[package]] +name = "gif" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80792593675e051cf94a4b111980da2ba60d4a83e43e0048c5693baab3977045" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "image" +version = "0.24.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" +dependencies = [ + "bytemuck", + "byteorder", + "color_quant", + "jpeg-decoder", + "num-traits", + "png", +] + [[package]] name = "itoa" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +[[package]] +name = "jpeg-decoder" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -116,14 +450,51 @@ version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +[[package]] +name = "libloading" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +dependencies = [ + "cfg-if", + "windows-targets", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.5.0", + "libc", +] + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", + "simd-adler32", +] + [[package]] name = "neat" version = "0.5.1" dependencies = [ "bincode", - "bitflags", + "bitflags 2.5.0", "genetic-rs", "lazy_static", + "plotters", "rand", "rayon", "serde", @@ -131,6 +502,105 @@ dependencies = [ "serde_json", ] +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "pathfinder_geometry" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b7e7b4ea703700ce73ebf128e1450eb69c3a8329199ffbfb9b2a0418e5ad3" +dependencies = [ + "log", + "pathfinder_simd", +] + +[[package]] +name = "pathfinder_simd" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebf45976c56919841273f2a0fc684c28437e2f304e264557d9c72be5d5a718be" +dependencies = [ + "rustc_version", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "chrono", + "font-kit", + "image", + "lazy_static", + "num-traits", + "pathfinder_geometry", + "plotters-backend", + "plotters-bitmap", + "plotters-svg", + "ttf-parser", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-bitmap" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cebbe1f70205299abc69e8b295035bb52a6a70ee35474ad10011f0a4efb8543" +dependencies = [ + "gif", + "image", + "plotters-backend", +] + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "png" +version = "0.17.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -205,18 +675,53 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + [[package]] name = "replace_with" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3a8614ee435691de62bcffcf4a66d91b3594bf1428a5722e79103249a095690" +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + [[package]] name = "ryu" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" + [[package]] name = "serde" version = "1.0.197" @@ -257,6 +762,12 @@ dependencies = [ "serde", ] +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "syn" version = "2.0.51" @@ -268,14 +779,245 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "thiserror" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ttf-parser" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "375812fa44dab6df41c195cd2f7fecb488f6c09fbaafb62807488cefab642bff" + [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "weezl" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "wio" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d129932f4644ac2396cb456385cbf9e63b5b30c6e8dc4820bdca4eb082037a5" +dependencies = [ + "winapi", +] + +[[package]] +name = "yeslogic-fontconfig-sys" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2bbd69036d397ebbff671b1b8e4d918610c181c5a16073b96f984a38d08c386" +dependencies = [ + "const-cstr", + "dlib", + "once_cell", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml index 91247ad..f2f976e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,3 +37,4 @@ serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] bincode = "1.3.3" serde_json = "1.0.114" +plotters = "0.3.5" \ No newline at end of file diff --git a/examples/plot.rs b/examples/plot.rs new file mode 100644 index 0000000..48db937 --- /dev/null +++ b/examples/plot.rs @@ -0,0 +1,135 @@ +use std::{error::Error, sync::{Arc, Mutex}}; + +use neat::*; +use rand::prelude::*; +use plotters::prelude::*; + +#[derive(RandomlyMutable, DivisionReproduction, Clone)] +struct AgentDNA { + network: NeuralNetworkTopology<2, 1>, +} + +impl Prunable for AgentDNA {} + +impl GenerateRandom for AgentDNA { + fn gen_random(rng: &mut impl Rng) -> Self { + Self { + network: NeuralNetworkTopology::new(0.01, 3, rng), + } + } +} + +fn fitness(g: &AgentDNA) -> f32 { + let network = NeuralNetwork::from(&g.network); + let mut fitness = 0.; + let mut rng = rand::thread_rng(); + + for _ in 0..100 { + let n = rng.gen::() * 10000.; + let base = rng.gen::() * 10.; + let expected = n.log(base); + + let [answer] = network.predict([n, base]); + network.flush_state(); + + fitness += 5. / (answer - expected).abs(); + } + + fitness +} + +struct PlottingNG { + performance_stats: Arc>>, +} + +impl NextgenFn for PlottingNG { + fn next_gen(&self, fitness: Vec<(AgentDNA, f32)>) -> Vec { + let l = fitness.len(); + + let high = fitness[0].1; + + let median = fitness[l / 2].1; + + let low = fitness[l-1].1; + + let mut ps = self.performance_stats.lock().unwrap(); + ps.push(PerformanceStats { high, median, low }); + + division_pruning_nextgen(fitness) + } +} + +struct PerformanceStats { + high: f32, + median: f32, + low: f32, +} + +const OUTPUT_FILE_NAME: &'static str = "fitness-plot.png"; +const GENS: usize = 100; +fn main() -> Result<(), Box> { + let mut rng = rand::thread_rng(); + + let performance_stats = Arc::new(Mutex::new(Vec::with_capacity(GENS))); + let ng = PlottingNG { performance_stats: performance_stats.clone() }; + + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, 100), + fitness, + ng, + ); + + println!("Training..."); + + for _ in 0..GENS { + sim.next_generation(); + } + + println!("Training complete, collecting data and building chart..."); + + let root = BitMapBackend::new(OUTPUT_FILE_NAME, (640, 480)).into_drawing_area(); + root.fill(&WHITE)?; + + let mut chart = ChartBuilder::on(&root) + .caption("agent fitness over gens", ("sans-serif", 50).into_font()) + .margin(5) + .x_label_area_size(30) + .y_label_area_size(30) + .build_cartesian_2d(0usize..100, 0f32..200.0)?; + + chart.configure_mesh().draw()?; + + let data: Vec<_> = Arc::into_inner(performance_stats).unwrap().into_inner().unwrap() + .into_iter() + .enumerate() + .collect(); + let highs = data + .iter() + .map(|(i, PerformanceStats { high, .. })| (*i, *high)); + + let medians = data + .iter() + .map(|(i, PerformanceStats { median, .. })| (*i, *median)); + + let lows = data + .iter() + .map(|(i, PerformanceStats { low, .. })| (*i, *low)); + + chart + .draw_series(LineSeries::new(highs, &GREEN))? + .label("high"); + + chart + .draw_series(LineSeries::new(medians, &YELLOW))? + .label("median"); + + chart + .draw_series(LineSeries::new(lows, &RED))? + .label("low"); + + root.present()?; + + println!("Complete"); + + Ok(()) +} \ No newline at end of file From 5cddae7b31b34c8a0f5d37398558f833f3e22560 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:25:49 +0000 Subject: [PATCH 05/60] small changes --- examples/plot.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/plot.rs b/examples/plot.rs index 48db937..59e3a24 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -91,7 +91,7 @@ fn main() -> Result<(), Box> { root.fill(&WHITE)?; let mut chart = ChartBuilder::on(&root) - .caption("agent fitness over gens", ("sans-serif", 50).into_font()) + .caption("agent fitness values per generation", ("sans-serif", 50).into_font()) .margin(5) .x_label_area_size(30) .y_label_area_size(30) @@ -103,6 +103,7 @@ fn main() -> Result<(), Box> { .into_iter() .enumerate() .collect(); + let highs = data .iter() .map(|(i, PerformanceStats { high, .. })| (*i, *high)); From 728cbdeca4e6009399b1167bdd25a535da491d06 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:29:56 +0000 Subject: [PATCH 06/60] more configuration --- examples/plot.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/examples/plot.rs b/examples/plot.rs index 59e3a24..605ed69 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -103,7 +103,7 @@ fn main() -> Result<(), Box> { .into_iter() .enumerate() .collect(); - + let highs = data .iter() .map(|(i, PerformanceStats { high, .. })| (*i, *high)); @@ -128,6 +128,12 @@ fn main() -> Result<(), Box> { .draw_series(LineSeries::new(lows, &RED))? .label("low"); + chart + .configure_series_labels() + .background_style(&WHITE.mix(0.8)) + .border_style(&BLACK) + .draw()?; + root.present()?; println!("Complete"); From 91c3f9f1463096178cf011acc8e8aadd730f7f46 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 17 Apr 2024 11:31:35 +0000 Subject: [PATCH 07/60] make plotting ng more generic --- examples/plot.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/examples/plot.rs b/examples/plot.rs index 605ed69..6cd555e 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -38,11 +38,12 @@ fn fitness(g: &AgentDNA) -> f32 { fitness } -struct PlottingNG { +struct PlottingNG> { performance_stats: Arc>>, + actual_ng: F, } -impl NextgenFn for PlottingNG { +impl> NextgenFn for PlottingNG { fn next_gen(&self, fitness: Vec<(AgentDNA, f32)>) -> Vec { let l = fitness.len(); @@ -55,7 +56,7 @@ impl NextgenFn for PlottingNG { let mut ps = self.performance_stats.lock().unwrap(); ps.push(PerformanceStats { high, median, low }); - division_pruning_nextgen(fitness) + self.actual_ng.next_gen(fitness) } } @@ -71,7 +72,7 @@ fn main() -> Result<(), Box> { let mut rng = rand::thread_rng(); let performance_stats = Arc::new(Mutex::new(Vec::with_capacity(GENS))); - let ng = PlottingNG { performance_stats: performance_stats.clone() }; + let ng = PlottingNG { performance_stats: performance_stats.clone(), actual_ng: division_pruning_nextgen }; let mut sim = GeneticSim::new( Vec::gen_random(&mut rng, 100), From f6d0df0493d2ec8b8cdc7ce8c978154470c449f5 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:54:27 +0000 Subject: [PATCH 08/60] fix test rayon feature --- src/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index ac569b0..0de19a1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -65,10 +65,16 @@ mod tests { fitness }; + #[cfg(not(feature = "rayon"))] let mut rng = rand::thread_rng(); let mut sim = GeneticSim::new( + #[cfg(not(feature = "rayon"))] Vec::gen_random(&mut rng, 100), + + #[cfg(feature = "rayon")] + Vec::gen_random(100), + fitness, division_pruning_nextgen, ); From cc88ebfc8497ec4583a4fbd43d5ad7e53ef8d9ba Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:55:23 +0000 Subject: [PATCH 09/60] cargo fmt --- src/lib.rs | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 0de19a1..0dd0b8c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,7 +30,7 @@ mod tests { use rand::prelude::*; #[derive(RandomlyMutable, DivisionReproduction, Clone)] - struct AgentDNA { + struct AgentDNA { network: NeuralNetworkTopology<2, 1>, } @@ -71,10 +71,8 @@ mod tests { let mut sim = GeneticSim::new( #[cfg(not(feature = "rayon"))] Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] Vec::gen_random(100), - fitness, division_pruning_nextgen, ); @@ -83,13 +81,10 @@ mod tests { sim.next_generation(); } - let mut fits: Vec<_> = sim.genomes - .iter() - .map(fitness) - .collect(); + let mut fits: Vec<_> = sim.genomes.iter().map(fitness).collect(); fits.sort_by(|a, b| a.partial_cmp(&b).unwrap()); dbg!(fits); } -} \ No newline at end of file +} From b95084dd4d615c3a685eebca83df4c16e7133910 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Thu, 18 Apr 2024 14:53:59 +0000 Subject: [PATCH 10/60] create custom activations example --- examples/custom_activation.rs | 92 +++++++++++++++++++++++++++++++++++ src/topology/activation.rs | 4 +- 2 files changed, 94 insertions(+), 2 deletions(-) create mode 100644 examples/custom_activation.rs diff --git a/examples/custom_activation.rs b/examples/custom_activation.rs new file mode 100644 index 0000000..f52882b --- /dev/null +++ b/examples/custom_activation.rs @@ -0,0 +1,92 @@ +//! An example implementation of a custom activation function. + +use neat::*; +use rand::prelude::*; + +#[derive(DivisionReproduction, RandomlyMutable, Clone)] +struct AgentDNA { + network: NeuralNetworkTopology<2, 2>, +} + +impl Prunable for AgentDNA {} + +impl GenerateRandom for AgentDNA { + fn gen_random(rng: &mut impl Rng) -> Self { + Self { + network: NeuralNetworkTopology::new(0.01, 3, rng), + } + } +} + +fn fitness(g: &AgentDNA) -> f32 { + let network: NeuralNetwork<2, 2> = NeuralNetwork::from(&g.network); + let mut fitness = 0.; + let mut rng = rand::thread_rng(); + + for _ in 0..50 { + let n = rng.gen::(); + let n2 = rng.gen::(); + + let expected = if (n + n2) / 2. >= 0.5 { + 0 + } else { + 1 + }; + + let result = network.predict([n, n2]); + network.flush_state(); + + // partial_cmp chance of returning None in this smh + let result = result.iter().max_index(); + + if result == expected { + fitness += 1.; + } else { + fitness -= 1.; + } + } + + fitness +} + +#[cfg(feature = "serde")] +fn serde_nextgen(rewards: Vec<(AgentDNA, f32)>) -> Vec { + let max = rewards + .iter() + .max_by(|(_, ra), (_, rb)| ra.total_cmp(rb)) + .unwrap(); + + let ser = NNTSerde::from(&max.0.network); + let data = serde_json::to_string_pretty(&ser).unwrap(); + std::fs::write("best-agent.json", data).expect("Failed to write to file"); + + division_pruning_nextgen(rewards) +} + +fn main() { + let log_activation = activation_fn!(f32::log10); + register_activation(log_activation); + + #[cfg(not(feature = "rayon"))] + let mut rng = rand::thread_rng(); + + let mut sim = GeneticSim::new( + #[cfg(not(feature = "rayon"))] + Vec::gen_random(&mut rng, 100), + + #[cfg(feature = "rayon")] + Vec::gen_random(100), + + fitness, + + #[cfg(not(feature = "serde"))] + division_pruning_nextgen, + + #[cfg(feature = "serde")] + serde_nextgen, + ); + + for _ in 0..200 { + sim.next_generation(); + } +} \ No newline at end of file diff --git a/src/topology/activation.rs b/src/topology/activation.rs index a711851..5bf9540 100644 --- a/src/topology/activation.rs +++ b/src/topology/activation.rs @@ -15,11 +15,11 @@ use crate::NeuronLocation; #[macro_export] macro_rules! activation_fn { ($F: path) => { - ActivationFn::new(Arc::new($F), ActivationScope::default(), stringify!($F).into()) + ActivationFn::new(std::sync::Arc::new($F), ActivationScope::default(), stringify!($F).into()) }; ($F: path, $S: expr) => { - ActivationFn::new(Arc::new($F), $S, stringify!($F).into()) + ActivationFn::new(std::sync::Arc::new($F), $S, stringify!($F).into()) }; {$($F: path),*} => { From 35868795738bb443c31c09371dc4d76ee56fa6e5 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Thu, 18 Apr 2024 15:01:37 +0000 Subject: [PATCH 11/60] fix opposite high and low --- examples/plot.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/plot.rs b/examples/plot.rs index 6cd555e..4fa4c51 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -47,11 +47,11 @@ impl> NextgenFn for PlottingNG { fn next_gen(&self, fitness: Vec<(AgentDNA, f32)>) -> Vec { let l = fitness.len(); - let high = fitness[0].1; + let high = fitness[l-1].1; let median = fitness[l / 2].1; - let low = fitness[l-1].1; + let low = fitness[0].1; let mut ps = self.performance_stats.lock().unwrap(); ps.push(PerformanceStats { high, median, low }); From 27e972af6f6fcaf885cbc96de7dc052405840897 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 6 May 2024 10:19:37 -0400 Subject: [PATCH 12/60] Update Cargo.toml --- Cargo.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 91247ad..96767ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,9 +3,9 @@ name = "neat" description = "Crate for working with NEAT in rust" version = "0.5.1" edition = "2021" -authors = ["Inflectrix"] -repository = "https://github.com/inflectrix/neat" -homepage = "https://github.com/inflectrix/neat" +authors = ["HyperCodec"] +repository = "https://github.com/HyperCodec/neat" +homepage = "https://github.com/HyperCodec/neat" readme = "README.md" keywords = ["genetic", "machine-learning", "ai", "algorithm", "evolution"] categories = ["algorithms", "science", "simulation"] From 4b8cef0f7a2d226a6bab62d9e8fde1996978a018 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 11:42:36 +0000 Subject: [PATCH 13/60] use svgbackend (now it hangs for some reason) --- .gitignore | 3 ++- examples/plot.rs | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 1b71596..a6e0cb6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ /target/ -/.vscode/ \ No newline at end of file +/.vscode/ +best-agent.json \ No newline at end of file diff --git a/examples/plot.rs b/examples/plot.rs index 4fa4c51..2be99c9 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -88,7 +88,7 @@ fn main() -> Result<(), Box> { println!("Training complete, collecting data and building chart..."); - let root = BitMapBackend::new(OUTPUT_FILE_NAME, (640, 480)).into_drawing_area(); + let root = SVGBackend::new(OUTPUT_FILE_NAME, (640, 480)).into_drawing_area(); root.fill(&WHITE)?; let mut chart = ChartBuilder::on(&root) From 6a7090ace3522817ed9979c21efa8fdb42d53112 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 13:36:47 +0000 Subject: [PATCH 14/60] fix arc::into_inner failure --- .gitignore | 3 ++- examples/plot.rs | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index a6e0cb6..b2d8069 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ /target/ /.vscode/ -best-agent.json \ No newline at end of file +best-agent.json +fitness-plot.svg \ No newline at end of file diff --git a/examples/plot.rs b/examples/plot.rs index 2be99c9..967b3d0 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -66,7 +66,7 @@ struct PerformanceStats { low: f32, } -const OUTPUT_FILE_NAME: &'static str = "fitness-plot.png"; +const OUTPUT_FILE_NAME: &'static str = "fitness-plot.svg"; const GENS: usize = 100; fn main() -> Result<(), Box> { let mut rng = rand::thread_rng(); @@ -86,6 +86,9 @@ fn main() -> Result<(), Box> { sim.next_generation(); } + // prevent `Arc::into_inner` from failing + drop(sim); + println!("Training complete, collecting data and building chart..."); let root = SVGBackend::new(OUTPUT_FILE_NAME, (640, 480)).into_drawing_area(); From 945ea4a7b1a350d75b9260f2d903ddb1566fc848 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 13:43:33 +0000 Subject: [PATCH 15/60] fix data retrieval --- examples/plot.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/plot.rs b/examples/plot.rs index 967b3d0..33c032a 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -44,7 +44,10 @@ struct PlottingNG> { } impl> NextgenFn for PlottingNG { - fn next_gen(&self, fitness: Vec<(AgentDNA, f32)>) -> Vec { + fn next_gen(&self, mut fitness: Vec<(AgentDNA, f32)>) -> Vec { + // it's a bit slower because of sorting twice but I don't want to rewrite the nextgen. + fitness.sort_by(|(_, fa), (_, fb)| fa.partial_cmp(fb).unwrap()); + let l = fitness.len(); let high = fitness[l-1].1; From 0717843bfd615421f20b352f165b8a758822bacc Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 13:48:58 +0000 Subject: [PATCH 16/60] make compatible with other features --- examples/plot.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/examples/plot.rs b/examples/plot.rs index 33c032a..ab0585a 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -71,14 +71,21 @@ struct PerformanceStats { const OUTPUT_FILE_NAME: &'static str = "fitness-plot.svg"; const GENS: usize = 100; + fn main() -> Result<(), Box> { + #[cfg(not(feature = "rayon"))] let mut rng = rand::thread_rng(); let performance_stats = Arc::new(Mutex::new(Vec::with_capacity(GENS))); let ng = PlottingNG { performance_stats: performance_stats.clone(), actual_ng: division_pruning_nextgen }; let mut sim = GeneticSim::new( + #[cfg(not(feature = "rayon"))] Vec::gen_random(&mut rng, 100), + + #[cfg(feature = "rayon")] + Vec::gen_random(100), + fitness, ng, ); From 6a98fb0d928f8922e6c535f3a74312a89a277d43 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 13:49:53 +0000 Subject: [PATCH 17/60] cargo fmt --- examples/plot.rs | 38 +++++++++++++++++++++++--------------- src/lib.rs | 9 +++------ 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/examples/plot.rs b/examples/plot.rs index ab0585a..2b6a851 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -1,11 +1,14 @@ -use std::{error::Error, sync::{Arc, Mutex}}; +use std::{ + error::Error, + sync::{Arc, Mutex}, +}; use neat::*; -use rand::prelude::*; use plotters::prelude::*; +use rand::prelude::*; #[derive(RandomlyMutable, DivisionReproduction, Clone)] -struct AgentDNA { +struct AgentDNA { network: NeuralNetworkTopology<2, 1>, } @@ -50,7 +53,7 @@ impl> NextgenFn for PlottingNG { let l = fitness.len(); - let high = fitness[l-1].1; + let high = fitness[l - 1].1; let median = fitness[l / 2].1; @@ -77,21 +80,22 @@ fn main() -> Result<(), Box> { let mut rng = rand::thread_rng(); let performance_stats = Arc::new(Mutex::new(Vec::with_capacity(GENS))); - let ng = PlottingNG { performance_stats: performance_stats.clone(), actual_ng: division_pruning_nextgen }; + let ng = PlottingNG { + performance_stats: performance_stats.clone(), + actual_ng: division_pruning_nextgen, + }; let mut sim = GeneticSim::new( #[cfg(not(feature = "rayon"))] Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] Vec::gen_random(100), - fitness, ng, ); println!("Training..."); - + for _ in 0..GENS { sim.next_generation(); } @@ -105,7 +109,10 @@ fn main() -> Result<(), Box> { root.fill(&WHITE)?; let mut chart = ChartBuilder::on(&root) - .caption("agent fitness values per generation", ("sans-serif", 50).into_font()) + .caption( + "agent fitness values per generation", + ("sans-serif", 50).into_font(), + ) .margin(5) .x_label_area_size(30) .y_label_area_size(30) @@ -113,7 +120,10 @@ fn main() -> Result<(), Box> { chart.configure_mesh().draw()?; - let data: Vec<_> = Arc::into_inner(performance_stats).unwrap().into_inner().unwrap() + let data: Vec<_> = Arc::into_inner(performance_stats) + .unwrap() + .into_inner() + .unwrap() .into_iter() .enumerate() .collect(); @@ -138,9 +148,7 @@ fn main() -> Result<(), Box> { .draw_series(LineSeries::new(medians, &YELLOW))? .label("median"); - chart - .draw_series(LineSeries::new(lows, &RED))? - .label("low"); + chart.draw_series(LineSeries::new(lows, &RED))?.label("low"); chart .configure_series_labels() @@ -151,6 +159,6 @@ fn main() -> Result<(), Box> { root.present()?; println!("Complete"); - + Ok(()) -} \ No newline at end of file +} diff --git a/src/lib.rs b/src/lib.rs index ac569b0..98429d6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,7 +30,7 @@ mod tests { use rand::prelude::*; #[derive(RandomlyMutable, DivisionReproduction, Clone)] - struct AgentDNA { + struct AgentDNA { network: NeuralNetworkTopology<2, 1>, } @@ -77,13 +77,10 @@ mod tests { sim.next_generation(); } - let mut fits: Vec<_> = sim.genomes - .iter() - .map(fitness) - .collect(); + let mut fits: Vec<_> = sim.genomes.iter().map(fitness).collect(); fits.sort_by(|a, b| a.partial_cmp(&b).unwrap()); dbg!(fits); } -} \ No newline at end of file +} From 44b7fdbc37992f766d322f22c14e7133eab9a481 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 14:24:25 +0000 Subject: [PATCH 18/60] create progress bar for plotting example --- Cargo.lock | 69 ++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 4 ++- examples/plot.rs | 14 ++++++++-- 3 files changed, 84 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c98cd1..700cf2f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -100,6 +100,19 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys", +] + [[package]] name = "const-cstr" version = "0.3.0" @@ -240,6 +253,12 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "fdeflate" version = "0.3.4" @@ -417,6 +436,28 @@ dependencies = [ "png", ] +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + [[package]] name = "itoa" version = "1.0.10" @@ -493,6 +534,7 @@ dependencies = [ "bincode", "bitflags 2.5.0", "genetic-rs", + "indicatif", "lazy_static", "plotters", "rand", @@ -511,6 +553,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "once_cell" version = "1.19.0" @@ -601,6 +649,12 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -811,6 +865,12 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "unicode-width" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" + [[package]] name = "walkdir" version = "2.5.0" @@ -937,6 +997,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-targets" version = "0.52.5" diff --git a/Cargo.toml b/Cargo.toml index 8ccd8ca..8305fe4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ serde = ["dep:serde", "dep:serde-big-array"] [dependencies] bitflags = "2.5.0" genetic-rs = { version = "0.5.1", features = ["derive"] } + lazy_static = "1.4.0" rand = "0.8.5" rayon = { version = "1.8.1", optional = true } @@ -37,4 +38,5 @@ serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] bincode = "1.3.3" serde_json = "1.0.114" -plotters = "0.3.5" \ No newline at end of file +plotters = "0.3.5" +indicatif = "0.17.8" diff --git a/examples/plot.rs b/examples/plot.rs index 2b6a851..af48b01 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -6,6 +6,7 @@ use std::{ use neat::*; use plotters::prelude::*; use rand::prelude::*; +use indicatif::{ProgressBar, ProgressStyle}; #[derive(RandomlyMutable, DivisionReproduction, Clone)] struct AgentDNA { @@ -73,7 +74,7 @@ struct PerformanceStats { } const OUTPUT_FILE_NAME: &'static str = "fitness-plot.svg"; -const GENS: usize = 100; +const GENS: usize = 1000; fn main() -> Result<(), Box> { #[cfg(not(feature = "rayon"))] @@ -94,12 +95,21 @@ fn main() -> Result<(), Box> { ng, ); + let pb = ProgressBar::new(GENS as u64) + .with_style(ProgressStyle::with_template("[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}") + .unwrap()) + .with_message("gen"); + println!("Training..."); for _ in 0..GENS { sim.next_generation(); + + pb.inc(1); } + pb.finish(); + // prevent `Arc::into_inner` from failing drop(sim); @@ -116,7 +126,7 @@ fn main() -> Result<(), Box> { .margin(5) .x_label_area_size(30) .y_label_area_size(30) - .build_cartesian_2d(0usize..100, 0f32..200.0)?; + .build_cartesian_2d(0usize..GENS, 0f32..1000.0)?; chart.configure_mesh().draw()?; From 6d17ec6bf1682f549ec8533aaa01f97ca0956dfe Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 14:27:12 +0000 Subject: [PATCH 19/60] create progress bar for basic example --- examples/basic.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/examples/basic.rs b/examples/basic.rs index 9ad0419..2aa640b 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -2,6 +2,7 @@ use neat::*; use rand::prelude::*; +use indicatif::{ProgressBar, ProgressStyle}; #[derive(PartialEq, Clone, Debug, DivisionReproduction, RandomlyMutable)] #[cfg_attr(feature = "crossover", derive(CrossoverReproduction))] @@ -103,10 +104,19 @@ fn main() { crossover_pruning_nextgen, ); - for _ in 0..100 { + const GENS: u64 = 1000; + let pb = ProgressBar::new(GENS) + .with_style(ProgressStyle::with_template("[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}") + .unwrap()) + .with_message("gen"); + + for _ in 0..GENS { sim.next_generation(); + pb.inc(1); } + pb.finish(); + #[cfg(not(feature = "serde"))] let mut fits: Vec<_> = sim.genomes.iter().map(fitness).collect(); From d3a9c409f51c11e7069b414e85069a1d7d7d7a76 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+inflectrix@users.noreply.github.com> Date: Wed, 15 May 2024 14:28:23 +0000 Subject: [PATCH 20/60] cargo fmt --- examples/basic.rs | 10 +++++++--- examples/plot.rs | 10 +++++++--- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/examples/basic.rs b/examples/basic.rs index 2aa640b..9bbb346 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -1,8 +1,8 @@ //! A basic example of NEAT with this crate. Enable the `crossover` feature for it to use crossover reproduction +use indicatif::{ProgressBar, ProgressStyle}; use neat::*; use rand::prelude::*; -use indicatif::{ProgressBar, ProgressStyle}; #[derive(PartialEq, Clone, Debug, DivisionReproduction, RandomlyMutable)] #[cfg_attr(feature = "crossover", derive(CrossoverReproduction))] @@ -106,8 +106,12 @@ fn main() { const GENS: u64 = 1000; let pb = ProgressBar::new(GENS) - .with_style(ProgressStyle::with_template("[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}") - .unwrap()) + .with_style( + ProgressStyle::with_template( + "[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}", + ) + .unwrap(), + ) .with_message("gen"); for _ in 0..GENS { diff --git a/examples/plot.rs b/examples/plot.rs index af48b01..34fb391 100644 --- a/examples/plot.rs +++ b/examples/plot.rs @@ -3,10 +3,10 @@ use std::{ sync::{Arc, Mutex}, }; +use indicatif::{ProgressBar, ProgressStyle}; use neat::*; use plotters::prelude::*; use rand::prelude::*; -use indicatif::{ProgressBar, ProgressStyle}; #[derive(RandomlyMutable, DivisionReproduction, Clone)] struct AgentDNA { @@ -96,8 +96,12 @@ fn main() -> Result<(), Box> { ); let pb = ProgressBar::new(GENS as u64) - .with_style(ProgressStyle::with_template("[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}") - .unwrap()) + .with_style( + ProgressStyle::with_template( + "[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}", + ) + .unwrap(), + ) .with_message("gen"); println!("Training..."); From e45908cacd6126997f9d26a4a1dffa99c9f25244 Mon Sep 17 00:00:00 2001 From: Tristan Murphy Date: Fri, 31 May 2024 21:40:06 -0400 Subject: [PATCH 21/60] add logic to prevent duplicated input neurons --- src/topology/mod.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/topology/mod.rs b/src/topology/mod.rs index 02ad296..dd246f2 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -121,6 +121,18 @@ impl NeuralNetworkTopology { return true; } + // check to make sure it isn't duplicate + { + let n = self.get_neuron(to); + let n2 = n.read().unwrap(); + + for (loc, _) in &n2.inputs { + if from == *loc { + return false; + } + } + } + let mut visited = HashSet::new(); self.dfs(from, to, &mut visited) } From 7c31f30f88bcd0221e628daa8b0d0530f5c46523 Mon Sep 17 00:00:00 2001 From: Tristan Murphy Date: Thu, 13 Jun 2024 10:54:22 -0400 Subject: [PATCH 22/60] cargo fmt --- examples/custom_activation.rs | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/examples/custom_activation.rs b/examples/custom_activation.rs index f52882b..bc6aae2 100644 --- a/examples/custom_activation.rs +++ b/examples/custom_activation.rs @@ -27,11 +27,7 @@ fn fitness(g: &AgentDNA) -> f32 { let n = rng.gen::(); let n2 = rng.gen::(); - let expected = if (n + n2) / 2. >= 0.5 { - 0 - } else { - 1 - }; + let expected = if (n + n2) / 2. >= 0.5 { 0 } else { 1 }; let result = network.predict([n, n2]); network.flush_state(); @@ -73,15 +69,11 @@ fn main() { let mut sim = GeneticSim::new( #[cfg(not(feature = "rayon"))] Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] Vec::gen_random(100), - fitness, - #[cfg(not(feature = "serde"))] division_pruning_nextgen, - #[cfg(feature = "serde")] serde_nextgen, ); @@ -89,4 +81,4 @@ fn main() { for _ in 0..200 { sim.next_generation(); } -} \ No newline at end of file +} From a32bfff0375835cc9f49ad2d4e42513a8cb55a5c Mon Sep 17 00:00:00 2001 From: Tristan Murphy Date: Thu, 13 Jun 2024 11:09:52 -0400 Subject: [PATCH 23/60] change activation function to one that doesn't return NaN --- examples/custom_activation.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/custom_activation.rs b/examples/custom_activation.rs index bc6aae2..7b37c02 100644 --- a/examples/custom_activation.rs +++ b/examples/custom_activation.rs @@ -60,8 +60,8 @@ fn serde_nextgen(rewards: Vec<(AgentDNA, f32)>) -> Vec { } fn main() { - let log_activation = activation_fn!(f32::log10); - register_activation(log_activation); + let sin_activation = activation_fn!(f32::sin); + register_activation(sin_activation); #[cfg(not(feature = "rayon"))] let mut rng = rand::thread_rng(); From 627830a61b25d4859644ca0ed55711f9dbde33a0 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Tue, 24 Sep 2024 12:08:01 -0400 Subject: [PATCH 24/60] Update CONTRIBUTING.md --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 68c1a8c..683c357 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ Thanks for contributing to this project. -To get started, check out the [issues page](https://github.com/inflectrix/neat). You can either find a feature/fix from there or start a new issue, then begin implementing it in your own fork of this repo. +To get started, check out the [issues page](https://github.com/hypercodec/neat). You can either find a feature/fix from there or start a new issue, then begin implementing it in your own fork of this repo. -Once you are done making the changes you'd like the make, start a pull request to the [dev](https://github.com/inflectrix/neat/tree/dev) branch. State your changes and request a review. After all branch rules have been satisfied, someone with management permissions on this repository will merge it. +Once you are done making the changes you'd like the make, start a pull request to the [dev](https://github.com/hypercodec/neat/tree/dev) branch. State your changes and request a review. After all branch rules have been satisfied, someone with management permissions on this repository will merge it. From 1978058ed0a3b0adf25e3deb688e4e4cbfbf5733 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Tue, 4 Feb 2025 14:02:23 +0000 Subject: [PATCH 25/60] Merge branch 'rewrite' into dev --- .github/workflows/ci-cd.yml | 2 +- CONTRIBUTING.md | 3 +- Cargo.lock | 885 ++----------------------------- Cargo.toml | 24 +- README.md | 97 +--- examples/basic.rs | 143 +---- examples/extra_dna.rs | 3 + src/{topology => }/activation.rs | 101 ++-- src/activation/builtin.rs | 14 + src/lib.rs | 93 +--- src/neuralnet.rs | 856 ++++++++++++++++++++++++++++++ src/runnable.rs | 300 ----------- src/tests.rs | 179 +++++++ src/topology/mod.rs | 638 ---------------------- src/topology/nnt_serde.rs | 71 --- 15 files changed, 1165 insertions(+), 2244 deletions(-) create mode 100644 examples/extra_dna.rs rename src/{topology => }/activation.rs (79%) create mode 100644 src/activation/builtin.rs create mode 100644 src/neuralnet.rs delete mode 100644 src/runnable.rs create mode 100644 src/tests.rs delete mode 100644 src/topology/nnt_serde.rs diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index fef7ca6..fd849f4 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -2,7 +2,7 @@ name: CI-CD on: push: - branches: [main] + branches: [main, dev] pull_request: jobs: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 683c357..8ede4a4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,5 +2,6 @@ Thanks for contributing to this project. To get started, check out the [issues page](https://github.com/hypercodec/neat). You can either find a feature/fix from there or start a new issue, then begin implementing it in your own fork of this repo. -Once you are done making the changes you'd like the make, start a pull request to the [dev](https://github.com/hypercodec/neat/tree/dev) branch. State your changes and request a review. After all branch rules have been satisfied, someone with management permissions on this repository will merge it. +Once you are done making the changes you'd like the make, start a pull request to the [dev](https://github.com/hypercodec/neat/tree/dev) branch. State your changes and request a review. After all branch rules have been satisfied and the pull request has a valid reason, someone with management permissions on this repository will merge it. +You could also make a draft PR while implementing your features if you want feedback or discussion before finalizing your changes. \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 700cf2f..0a9a53b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,78 +1,18 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "autocfg" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bitflags" -version = "1.3.2" +name = "atomic_float" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "628d228f918ac3b82fe590352cc719d30664a0c13ca3a60266fe02c7132d480a" [[package]] name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" - -[[package]] -name = "bumpalo" -version = "3.16.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "bytemuck" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "cc" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "cfg-if" @@ -80,106 +20,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chrono" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits", - "wasm-bindgen", - "windows-targets", -] - -[[package]] -name = "color_quant" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" - -[[package]] -name = "console" -version = "0.15.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" -dependencies = [ - "encode_unicode", - "lazy_static", - "libc", - "unicode-width", - "windows-sys", -] - -[[package]] -name = "const-cstr" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3d0b5ff30645a68f35ece8cea4556ca14ef8a1651455f789a099a0513532a6" - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" - -[[package]] -name = "core-graphics" -version = "0.22.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-graphics-types", - "foreign-types", - "libc", -] - -[[package]] -name = "core-graphics-types" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "libc", -] - -[[package]] -name = "core-text" -version = "19.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d74ada66e07c1cefa18f8abfba765b486f250de2e4a999e5727fc0dd4b4a25" -dependencies = [ - "core-foundation", - "core-graphics", - "foreign-types", - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" -dependencies = [ - "cfg-if", -] - [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -205,151 +45,17 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users", - "winapi", -] - -[[package]] -name = "dlib" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" -dependencies = [ - "libloading", -] - -[[package]] -name = "dwrote" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439a1c2ba5611ad3ed731280541d36d2e9c4ac5e7fb818a27b604bdc5a6aa65b" -dependencies = [ - "lazy_static", - "libc", - "winapi", - "wio", -] - [[package]] name = "either" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "fdeflate" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f9bfee30e4dedf0ab8b422f03af778d9612b63f502710fc500a334ebe2de645" -dependencies = [ - "simd-adler32", -] - -[[package]] -name = "flate2" -version = "1.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "float-ord" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e" - -[[package]] -name = "font-kit" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21fe28504d371085fae9ac7a3450f0b289ab71e07c8e57baa3fb68b9e57d6ce5" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "core-foundation", - "core-graphics", - "core-text", - "dirs-next", - "dwrote", - "float-ord", - "freetype", - "lazy_static", - "libc", - "log", - "pathfinder_geometry", - "pathfinder_simd", - "walkdir", - "winapi", - "yeslogic-fontconfig-sys", -] - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "freetype" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efc8599a3078adf8edeb86c71e9f8fa7d88af5ca31e806a867756081f90f5d83" -dependencies = [ - "freetype-sys", - "libc", -] - -[[package]] -name = "freetype-sys" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66ee28c39a43d89fbed8b4798fb4ba56722cfd2b5af81f9326c27614ba88ecd5" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - [[package]] name = "genetic-rs" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94601f3db2fb341f71a4470134eb1f71d39f54c2fe264122698eda67cd1c91b" +checksum = "a68bb62a836f6ea3261d77cfec4012316e206f53e7d0eab519f5f3630e86001f" dependencies = [ "genetic-rs-common", "genetic-rs-macros", @@ -357,9 +63,9 @@ dependencies = [ [[package]] name = "genetic-rs-common" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f41b0e3f6ccb66a00e7fc9170d4e02b1ae80c85f03c67b76b067b3637fd314a" +checksum = "3be7aaffd4e4dc82d11819d40794f089c37d02595a401f229ed2877d1a4c401d" dependencies = [ "rand", "rayon", @@ -368,9 +74,9 @@ dependencies = [ [[package]] name = "genetic-rs-macros" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d5ec3b9e69a6836bb0f0c8fa6972e6322e0b49108f7b3ed40769feb452c120a" +checksum = "4e73b1f36ea3e799232e1a3141a2765fa6ee9ed7bb3fed96ccfb3bf272d1832e" dependencies = [ "genetic-rs-common", "proc-macro2", @@ -389,272 +95,45 @@ dependencies = [ "wasi", ] -[[package]] -name = "gif" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80792593675e051cf94a4b111980da2ba60d4a83e43e0048c5693baab3977045" -dependencies = [ - "color_quant", - "weezl", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "image" -version = "0.24.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" -dependencies = [ - "bytemuck", - "byteorder", - "color_quant", - "jpeg-decoder", - "num-traits", - "png", -] - -[[package]] -name = "indicatif" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" -dependencies = [ - "console", - "instant", - "number_prefix", - "portable-atomic", - "unicode-width", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" - -[[package]] -name = "jpeg-decoder" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" - -[[package]] -name = "js-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" -dependencies = [ - "wasm-bindgen", -] +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.153" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" - -[[package]] -name = "libloading" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" -dependencies = [ - "cfg-if", - "windows-targets", -] - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.5.0", - "libc", -] - -[[package]] -name = "log" -version = "0.4.21" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] -name = "miniz_oxide" -version = "0.7.2" +name = "memchr" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" -dependencies = [ - "adler", - "simd-adler32", -] +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "neat" version = "0.5.1" dependencies = [ - "bincode", - "bitflags 2.5.0", + "atomic_float", + "bitflags", "genetic-rs", - "indicatif", "lazy_static", - "plotters", - "rand", "rayon", + "replace_with", "serde", "serde-big-array", "serde_json", ] -[[package]] -name = "num-traits" -version = "0.2.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - -[[package]] -name = "once_cell" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" - -[[package]] -name = "pathfinder_geometry" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b7e7b4ea703700ce73ebf128e1450eb69c3a8329199ffbfb9b2a0418e5ad3" -dependencies = [ - "log", - "pathfinder_simd", -] - -[[package]] -name = "pathfinder_simd" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebf45976c56919841273f2a0fc684c28437e2f304e264557d9c72be5d5a718be" -dependencies = [ - "rustc_version", -] - -[[package]] -name = "pkg-config" -version = "0.3.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" - -[[package]] -name = "plotters" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" -dependencies = [ - "chrono", - "font-kit", - "image", - "lazy_static", - "num-traits", - "pathfinder_geometry", - "plotters-backend", - "plotters-bitmap", - "plotters-svg", - "ttf-parser", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" - -[[package]] -name = "plotters-bitmap" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cebbe1f70205299abc69e8b295035bb52a6a70ee35474ad10011f0a4efb8543" -dependencies = [ - "gif", - "image", - "plotters-backend", -] - -[[package]] -name = "plotters-svg" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" -dependencies = [ - "plotters-backend", -] - -[[package]] -name = "png" -version = "0.17.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1" -dependencies = [ - "bitflags 1.3.2", - "crc32fast", - "fdeflate", - "flate2", - "miniz_oxide", -] - -[[package]] -name = "portable-atomic" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" - [[package]] name = "ppv-lite86" version = "0.2.17" @@ -663,9 +142,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" dependencies = [ "unicode-ident", ] @@ -711,9 +190,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.8.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -729,58 +208,23 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "redox_users" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" -dependencies = [ - "getrandom", - "libredox", - "thiserror", -] - [[package]] name = "replace_with" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3a8614ee435691de62bcffcf4a66d91b3594bf1428a5722e79103249a095690" -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - [[package]] name = "ryu" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "semver" -version = "1.0.22" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "serde" -version = "1.0.197" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] @@ -796,9 +240,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", @@ -807,286 +251,35 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.114" +version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] -[[package]] -name = "simd-adler32" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" - [[package]] name = "syn" -version = "2.0.51" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab617d94515e94ae53b8406c628598680aa0c9587474ecbe58188f7b345d66c" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "thiserror" -version = "1.0.58" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.58" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "ttf-parser" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375812fa44dab6df41c195cd2f7fecb488f6c09fbaafb62807488cefab642bff" - [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" -[[package]] -name = "unicode-width" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" - -[[package]] -name = "web-sys" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "weezl" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" - -[[package]] -name = "wio" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d129932f4644ac2396cb456385cbf9e63b5b30c6e8dc4820bdca4eb082037a5" -dependencies = [ - "winapi", -] - -[[package]] -name = "yeslogic-fontconfig-sys" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2bbd69036d397ebbff671b1b8e4d918610c181c5a16073b96f984a38d08c386" -dependencies = [ - "const-cstr", - "dlib", - "once_cell", - "pkg-config", -] diff --git a/Cargo.toml b/Cargo.toml index 8305fe4..4b26e0f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,25 +18,19 @@ rustdoc-args = ["--cfg", "docsrs"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -default = ["max-index"] -crossover = ["genetic-rs/crossover"] -rayon = ["genetic-rs/rayon", "dep:rayon"] -max-index = [] +default = [] serde = ["dep:serde", "dep:serde-big-array"] [dependencies] -bitflags = "2.5.0" -genetic-rs = { version = "0.5.1", features = ["derive"] } - -lazy_static = "1.4.0" -rand = "0.8.5" -rayon = { version = "1.8.1", optional = true } -serde = { version = "1.0.197", features = ["derive"], optional = true } +atomic_float = "1.1.0" +bitflags = "2.8.0" +genetic-rs = { version = "0.5.4", features = ["rayon", "derive"] } +lazy_static = "1.5.0" +rayon = "1.10.0" +replace_with = "0.1.7" +serde = { version = "1.0.217", features = ["derive"], optional = true } serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] -bincode = "1.3.3" -serde_json = "1.0.114" -plotters = "0.3.5" -indicatif = "0.17.8" +serde_json = "1.0.138" \ No newline at end of file diff --git a/README.md b/README.md index 1a97864..4e9828b 100644 --- a/README.md +++ b/README.md @@ -1,104 +1,17 @@ # neat -[github](https://github.com/inflectrix/neat) +[github](https://github.com/hypercodec/neat) [crates.io](https://crates.io/crates/neat) [docs.rs](https://docs.rs/neat) Implementation of the NEAT algorithm using `genetic-rs`. ### Features -- rayon - Uses parallelization on the `NeuralNetwork` struct and adds the `rayon` feature to the `genetic-rs` re-export. -- serde - Adds the NNTSerde struct and allows for serialization of `NeuralNetworkTopology` -- crossover - Implements the `CrossoverReproduction` trait on `NeuralNetworkTopology` and adds the `crossover` feature to the `genetic-rs` re-export. +- serde - Implements `Serialize` and `Deserialize` on most of the types in this crate. -*Do you like this repo and want to support it? If so, leave a ⭐* +*Do you like this crate and want to support it? If so, leave a ⭐* -### How To Use -When working with this crate, you'll want to use the `NeuralNetworkTopology` struct in your agent's DNA and -the use `NeuralNetwork::from` when you finally want to test its performance. The `genetic-rs` crate is also re-exported with the rest of this crate. - -Here's an example of how one might use this crate: -```rust -use neat::*; - -#[derive(Clone, RandomlyMutable, DivisionReproduction)] -struct MyAgentDNA { - network: NeuralNetworkTopology<1, 2>, -} - -impl GenerateRandom for MyAgentDNA { - fn gen_random(rng: &mut impl rand::Rng) -> Self { - Self { - network: NeuralNetworkTopology::new(0.01, 3, rng), - } - } -} - -struct MyAgent { - network: NeuralNetwork<1, 2>, - // ... other state -} - -impl From<&MyAgentDNA> for MyAgent { - fn from(value: &MyAgentDNA) -> Self { - Self { - network: NeuralNetwork::from(&value.network), - } - } -} - -fn fitness(dna: &MyAgentDNA) -> f32 { - // agent will simply try to predict whether a number is greater than 0.5 - let mut agent = MyAgent::from(dna); - let mut rng = rand::thread_rng(); - let mut fitness = 0; - - // use repeated tests to avoid situational bias and some local maximums, overall providing more accurate score - for _ in 0..10 { - let n = rng.gen::(); - let above = n > 0.5; - - let res = agent.network.predict([n]); - agent.network.flush_state(); - - let resi = res.iter().max_index(); - - if resi == 0 ^ above { - // agent did not guess correctly, punish slightly (too much will hinder exploration) - fitness -= 0.5; - - continue; - } - - // agent guessed correctly, they become more fit. - fitness += 3.; - } - - fitness -} - -fn main() { - let mut rng = rand::thread_rng(); - - let mut sim = GeneticSim::new( - Vec::gen_random(&mut rng, 100), - fitness, - division_pruning_nextgen, - ); - - // simulate 100 generations - for _ in 0..100 { - sim.next_generation(); - } - - // display fitness results - let fits: Vec<_> = sim.entities - .iter() - .map(fitness) - .collect(); - - dbg!(&fits, fits.iter().max()); -} -``` +# How To Use +TODO ### License This crate falls under the `MIT` license diff --git a/examples/basic.rs b/examples/basic.rs index 9bbb346..85f58cb 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -1,144 +1,3 @@ -//! A basic example of NEAT with this crate. Enable the `crossover` feature for it to use crossover reproduction - -use indicatif::{ProgressBar, ProgressStyle}; -use neat::*; -use rand::prelude::*; - -#[derive(PartialEq, Clone, Debug, DivisionReproduction, RandomlyMutable)] -#[cfg_attr(feature = "crossover", derive(CrossoverReproduction))] -struct AgentDNA { - network: NeuralNetworkTopology<2, 4>, -} - -impl Prunable for AgentDNA {} - -impl GenerateRandom for AgentDNA { - fn gen_random(rng: &mut impl rand::Rng) -> Self { - Self { - network: NeuralNetworkTopology::new(0.01, 3, rng), - } - } -} - -#[derive(Debug)] -struct Agent { - network: NeuralNetwork<2, 4>, -} - -impl From<&AgentDNA> for Agent { - fn from(value: &AgentDNA) -> Self { - Self { - network: (&value.network).into(), - } - } -} - -fn fitness(dna: &AgentDNA) -> f32 { - let agent = Agent::from(dna); - - let mut fitness = 0.; - let mut rng = rand::thread_rng(); - - for _ in 0..10 { - // 10 games - - // set up game - let mut agent_pos: (i32, i32) = (rng.gen_range(0..10), rng.gen_range(0..10)); - let mut food_pos: (i32, i32) = (rng.gen_range(0..10), rng.gen_range(0..10)); - - while food_pos == agent_pos { - food_pos = (rng.gen_range(0..10), rng.gen_range(0..10)); - } - - let mut step = 0; - - loop { - // perform actions in game - let action = agent.network.predict([ - (food_pos.0 - agent_pos.0) as f32, - (food_pos.1 - agent_pos.1) as f32, - ]); - let action = action.iter().max_index(); - - match action { - 0 => agent_pos.0 += 1, - 1 => agent_pos.0 -= 1, - 2 => agent_pos.1 += 1, - _ => agent_pos.1 -= 1, - } - - step += 1; - - if agent_pos == food_pos { - fitness += 10.; - break; // new game - } else { - // lose fitness for being slow and far away - fitness -= - (food_pos.0 - agent_pos.0 + food_pos.1 - agent_pos.1).abs() as f32 * 0.001; - } - - // 50 steps per game - if step == 50 { - break; - } - } - } - - fitness -} - fn main() { - #[cfg(not(feature = "rayon"))] - let mut rng = rand::thread_rng(); - - let mut sim = GeneticSim::new( - #[cfg(not(feature = "rayon"))] - Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] - Vec::gen_random(100), - fitness, - #[cfg(not(feature = "crossover"))] - division_pruning_nextgen, - #[cfg(feature = "crossover")] - crossover_pruning_nextgen, - ); - - const GENS: u64 = 1000; - let pb = ProgressBar::new(GENS) - .with_style( - ProgressStyle::with_template( - "[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}", - ) - .unwrap(), - ) - .with_message("gen"); - - for _ in 0..GENS { - sim.next_generation(); - pb.inc(1); - } - - pb.finish(); - - #[cfg(not(feature = "serde"))] - let mut fits: Vec<_> = sim.genomes.iter().map(fitness).collect(); - - #[cfg(feature = "serde")] - let mut fits: Vec<_> = sim.genomes.iter().map(|e| (e, fitness(e))).collect(); - - #[cfg(not(feature = "serde"))] - fits.sort_by(|a, b| a.partial_cmp(&b).unwrap()); - - #[cfg(feature = "serde")] - fits.sort_by(|(_, a), (_, b)| a.partial_cmp(&b).unwrap()); - - dbg!(&fits); - - #[cfg(feature = "serde")] - { - let intermediate = NNTSerde::from(&fits[0].0.network); - let serialized = serde_json::to_string(&intermediate).unwrap(); - println!("{}", serialized); - } + todo!("use NeuralNetwork as the entire DNA"); } diff --git a/examples/extra_dna.rs b/examples/extra_dna.rs new file mode 100644 index 0000000..038709f --- /dev/null +++ b/examples/extra_dna.rs @@ -0,0 +1,3 @@ +fn main() { + todo!("use AgentDNA with additional params") +} diff --git a/src/topology/activation.rs b/src/activation.rs similarity index 79% rename from src/topology/activation.rs rename to src/activation.rs index 5bf9540..af9f74e 100644 --- a/src/topology/activation.rs +++ b/src/activation.rs @@ -1,7 +1,12 @@ +/// Contains some builtin activation functions ([`sigmoid`], [`relu`], etc.) +pub mod builtin; + +use bitflags::bitflags; +use builtin::*; + #[cfg(feature = "serde")] use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use bitflags::bitflags; use lazy_static::lazy_static; use std::{ collections::HashMap, @@ -15,7 +20,7 @@ use crate::NeuronLocation; #[macro_export] macro_rules! activation_fn { ($F: path) => { - ActivationFn::new(std::sync::Arc::new($F), ActivationScope::default(), stringify!($F).into()) + ActivationFn::new(std::sync::Arc::new($F), NeuronScope::default(), stringify!($F).into()) }; ($F: path, $S: expr) => { @@ -73,11 +78,11 @@ impl ActivationRegistry { } /// Gets all activation functions that are valid for a scope. - pub fn activations_in_scope(&self, scope: ActivationScope) -> Vec { + pub fn activations_in_scope(&self, scope: NeuronScope) -> Vec { let acts = self.activations(); acts.into_iter() - .filter(|a| a.scope != ActivationScope::NONE && a.scope.contains(scope)) + .filter(|a| !a.scope.contains(NeuronScope::NONE) && a.scope.contains(scope)) .collect() } } @@ -88,51 +93,18 @@ impl Default for ActivationRegistry { fns: HashMap::new(), }; + // TODO add a way to disable this s.batch_register(activation_fn! { - sigmoid => ActivationScope::HIDDEN | ActivationScope::OUTPUT, - relu => ActivationScope::HIDDEN | ActivationScope::OUTPUT, - linear_activation => ActivationScope::INPUT | ActivationScope::HIDDEN | ActivationScope::OUTPUT, - f32::tanh => ActivationScope::HIDDEN | ActivationScope::OUTPUT + sigmoid => NeuronScope::HIDDEN | NeuronScope::OUTPUT, + relu => NeuronScope::HIDDEN | NeuronScope::OUTPUT, + linear_activation => NeuronScope::INPUT | NeuronScope::HIDDEN | NeuronScope::OUTPUT, + f32::tanh => NeuronScope::HIDDEN | NeuronScope::OUTPUT }); s } } -bitflags! { - /// Specifies where an activation function can occur - #[derive(Copy, Clone, Debug, Eq, PartialEq)] - pub struct ActivationScope: u8 { - /// Whether the activation can be applied to the input layer. - const INPUT = 0b001; - - /// Whether the activation can be applied to the hidden layer. - const HIDDEN = 0b010; - - /// Whether the activation can be applied to the output layer. - const OUTPUT = 0b100; - - /// The activation function will not be randomly placed anywhere - const NONE = 0b000; - } -} - -impl Default for ActivationScope { - fn default() -> Self { - Self::HIDDEN - } -} - -impl From<&NeuronLocation> for ActivationScope { - fn from(value: &NeuronLocation) -> Self { - match value { - NeuronLocation::Input(_) => Self::INPUT, - NeuronLocation::Hidden(_) => Self::HIDDEN, - NeuronLocation::Output(_) => Self::OUTPUT, - } - } -} - /// A trait that represents an activation method. pub trait Activation { /// The activation function. @@ -152,17 +124,13 @@ pub struct ActivationFn { pub func: Arc, /// The scope defining where the activation function can appear. - pub scope: ActivationScope, + pub scope: NeuronScope, pub(crate) name: String, } impl ActivationFn { /// Creates a new ActivationFn object. - pub fn new( - func: Arc, - scope: ActivationScope, - name: String, - ) -> Self { + pub fn new(func: Arc, scope: NeuronScope, name: String) -> Self { Self { func, name, scope } } } @@ -206,17 +174,36 @@ impl<'a> Deserialize<'a> for ActivationFn { } } -/// The sigmoid activation function. -pub fn sigmoid(n: f32) -> f32 { - 1. / (1. + std::f32::consts::E.powf(-n)) +bitflags! { + /// Specifies where an activation function can occur + #[derive(Copy, Clone, Debug, Eq, PartialEq)] + pub struct NeuronScope: u8 { + /// Whether the activation can be applied to the input layer. + const INPUT = 0b001; + + /// Whether the activation can be applied to the hidden layer. + const HIDDEN = 0b010; + + /// Whether the activation can be applied to the output layer. + const OUTPUT = 0b100; + + /// The activation function will not be randomly placed anywhere + const NONE = 0b000; + } } -/// The ReLU activation function. -pub fn relu(n: f32) -> f32 { - n.max(0.) +impl Default for NeuronScope { + fn default() -> Self { + Self::HIDDEN + } } -/// Activation function that does nothing. -pub fn linear_activation(n: f32) -> f32 { - n +impl> From for NeuronScope { + fn from(value: L) -> Self { + match value.as_ref() { + NeuronLocation::Input(_) => Self::INPUT, + NeuronLocation::Hidden(_) => Self::HIDDEN, + NeuronLocation::Output(_) => Self::OUTPUT, + } + } } diff --git a/src/activation/builtin.rs b/src/activation/builtin.rs new file mode 100644 index 0000000..fdf7ab7 --- /dev/null +++ b/src/activation/builtin.rs @@ -0,0 +1,14 @@ +/// The sigmoid activation function. Scales all values nonlinearly in the range of 1 to -1. +pub fn sigmoid(n: f32) -> f32 { + 1. / (1. + std::f32::consts::E.powf(-n)) +} + +/// The ReLU activation function. Equal to `n.max(0)`` +pub fn relu(n: f32) -> f32 { + n.max(0.) +} + +/// Activation function that does nothing. +pub fn linear_activation(n: f32) -> f32 { + n +} diff --git a/src/lib.rs b/src/lib.rs index 0dd0b8c..0de7360 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,90 +1,21 @@ -//! A simple crate that implements the Neuroevolution Augmenting Topologies algorithm using [genetic-rs](https://crates.io/crates/genetic-rs) -//! ### Feature Roadmap: -//! - [x] base (single-core) crate -//! - [x] rayon -//! - [x] serde -//! - [x] crossover +//! A crate implementing NeuroEvolution of Augmenting Topologies (NEAT). //! -//! You can get started by looking at [genetic-rs docs](https://docs.rs/genetic-rs) and checking the examples for this crate. +//! The goal is to provide a simple-to-use, very dynamic [`NeuralNetwork`] type that +//! integrates directly into the [`genetic-rs`](https://crates.io/crates/genetic-rs) ecosystem. +//! +//! Look at the README, docs, or examples to learn how to use this crate. #![warn(missing_docs)] -#![cfg_attr(docsrs, feature(doc_cfg))] -/// A module containing the [`NeuralNetworkTopology`] struct. This is what you want to use in the DNA of your agent, as it is the thing that goes through nextgens and suppors mutation. -pub mod topology; +/// Contains the types surrounding activation functions. +pub mod activation; -/// A module containing the main [`NeuralNetwork`] struct. -/// This has state/cache and will run the predictions. Make sure to run [`NeuralNetwork::flush_state`] between uses of [`NeuralNetwork::predict`]. -pub mod runnable; +/// Contains the [`NeuralNetwork`] and related types. +pub mod neuralnet; -pub use genetic_rs::prelude::*; -pub use runnable::*; -pub use topology::*; +pub use neuralnet::*; -#[cfg(feature = "serde")] -pub use nnt_serde::*; +pub use genetic_rs::{self, prelude::*}; #[cfg(test)] -mod tests { - use super::*; - use rand::prelude::*; - - #[derive(RandomlyMutable, DivisionReproduction, Clone)] - struct AgentDNA { - network: NeuralNetworkTopology<2, 1>, - } - - impl Prunable for AgentDNA {} - - impl GenerateRandom for AgentDNA { - fn gen_random(rng: &mut impl Rng) -> Self { - Self { - network: NeuralNetworkTopology::new(0.01, 3, rng), - } - } - } - - #[test] - fn basic_test() { - let fitness = |g: &AgentDNA| { - let network = NeuralNetwork::from(&g.network); - let mut fitness = 0.; - let mut rng = rand::thread_rng(); - - for _ in 0..100 { - let n = rng.gen::() * 10000.; - let base = rng.gen::() * 10.; - let expected = n.log(base); - - let [answer] = network.predict([n, base]); - network.flush_state(); - - fitness += 5. / (answer - expected).abs(); - } - - fitness - }; - - #[cfg(not(feature = "rayon"))] - let mut rng = rand::thread_rng(); - - let mut sim = GeneticSim::new( - #[cfg(not(feature = "rayon"))] - Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] - Vec::gen_random(100), - fitness, - division_pruning_nextgen, - ); - - for _ in 0..100 { - sim.next_generation(); - } - - let mut fits: Vec<_> = sim.genomes.iter().map(fitness).collect(); - - fits.sort_by(|a, b| a.partial_cmp(&b).unwrap()); - - dbg!(fits); - } -} +mod tests; diff --git a/src/neuralnet.rs b/src/neuralnet.rs new file mode 100644 index 0000000..6f5f25d --- /dev/null +++ b/src/neuralnet.rs @@ -0,0 +1,856 @@ +use std::{ + collections::HashSet, + sync::{ + atomic::{AtomicBool, AtomicUsize, Ordering}, + Arc, + }, +}; + +use atomic_float::AtomicF32; +use genetic_rs::prelude::*; +use rand::Rng; +use replace_with::replace_with_or_abort; + +use crate::{ + activation::{builtin::*, *}, + activation_fn, +}; + +use rayon::prelude::*; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +#[cfg(feature = "serde")] +use serde_big_array::BigArray; + +/// The mutation settings for [`NeuralNetwork`]. +/// Does not affect [`NeuralNetwork::mutate`], only [`NeuralNetwork::divide`] and [`NeuralNetwork::crossover`]. +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone, PartialEq)] +pub struct MutationSettings { + /// The chance of each mutation type to occur. + pub mutation_rate: f32, + + /// The number of times to try to mutate the network. + pub mutation_passes: usize, + + /// The maximum amount that the weights will be mutated by. + pub weight_mutation_amount: f32, +} + +impl Default for MutationSettings { + fn default() -> Self { + Self { + mutation_rate: 0.01, + mutation_passes: 3, + weight_mutation_amount: 0.5, + } + } +} + +/// An abstract neural network type with `I` input neurons and `O` output neurons. +/// Hidden neurons are not organized into layers, but rather float and link freely +/// (or at least in any way that doesn't cause a cyclic dependency). +/// +/// See [`NeuralNetwork::predict`] for usage. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct NeuralNetwork { + /// The input layer of neurons. Values specified in [`NeuralNetwork::predict`] will start here. + #[cfg_attr(feature = "serde", serde(with = "BigArray"))] + pub input_layer: [Neuron; I], + + /// The hidden layer(s) of neurons. They are not actually layered, but rather free-floating. + pub hidden_layers: Vec, + + /// The output layer of neurons. Their values will be returned from [`NeuralNetwork::predict`]. + #[cfg_attr(feature = "serde", serde(with = "BigArray"))] + pub output_layer: [Neuron; O], + + /// The mutation settings for the network. + pub mutation_settings: MutationSettings, +} + +impl NeuralNetwork { + // TODO option to set default output layer activations + /// Creates a new random neural network with the given settings. + pub fn new(mutation_settings: MutationSettings, rng: &mut impl Rng) -> Self { + let mut output_layer = Vec::with_capacity(O); + + for _ in 0..O { + output_layer.push(Neuron::new_with_activation( + vec![], + activation_fn!(sigmoid), + rng, + )); + } + + let mut input_layer = Vec::with_capacity(I); + + for _ in 0..I { + let mut already_chosen = Vec::new(); + let outputs = (0..rng.gen_range(1..=O)) + .map(|_| { + let mut j = rng.gen_range(0..O); + while already_chosen.contains(&j) { + j = rng.gen_range(0..O); + } + + output_layer[j].input_count += 1; + already_chosen.push(j); + + (NeuronLocation::Output(j), rng.gen()) + }) + .collect(); + + input_layer.push(Neuron::new_with_activation( + outputs, + activation_fn!(linear_activation), + rng, + )); + } + + let input_layer = input_layer.try_into().unwrap(); + let output_layer = output_layer.try_into().unwrap(); + + Self { + input_layer, + hidden_layers: vec![], + output_layer, + mutation_settings, + } + } + + /// Runs the neural network, propagating values from input to output layer. + pub fn predict(&self, inputs: [f32; I]) -> [f32; O] { + let cache = Arc::new(NeuralNetCache::from(self)); + cache.prime_inputs(inputs); + + (0..I) + .into_par_iter() + .for_each(|i| self.eval(NeuronLocation::Input(i), cache.clone())); + + cache.output() + } + + fn eval(&self, loc: impl AsRef, cache: Arc>) { + let loc = loc.as_ref(); + + if !cache.claim(loc) { + // some other thread is already + // waiting to do this task, currently doing it, or done. + // no need to do it again. + return; + } + + let loc = loc.as_ref(); + while !cache.is_ready(loc) { + // essentially spinlocks until the dependency tasks are complete, + // while letting this thread do some work on random tasks. + rayon::yield_now(); + } + + let val = cache.get(loc); + let n = self.get_neuron(loc); + + n.outputs.par_iter().for_each(|(loc2, weight)| { + cache.add(loc2, n.activate(val * weight)); + self.eval(loc2, cache.clone()); + }); + } + + /// Get a neuron at the specified [`NeuronLocation`]. + pub fn get_neuron(&self, loc: impl AsRef) -> &Neuron { + match loc.as_ref() { + NeuronLocation::Input(i) => &self.input_layer[*i], + NeuronLocation::Hidden(i) => &self.hidden_layers[*i], + NeuronLocation::Output(i) => &self.output_layer[*i], + } + } + + /// Get a mutable reference to the neuron at the specified [`NeuronLocation`]. + pub fn get_neuron_mut(&mut self, loc: impl AsRef) -> &mut Neuron { + match loc.as_ref() { + NeuronLocation::Input(i) => &mut self.input_layer[*i], + NeuronLocation::Hidden(i) => &mut self.hidden_layers[*i], + NeuronLocation::Output(i) => &mut self.output_layer[*i], + } + } + + /// Split a [`Connection`] into two of the same weight, joined by a new [`Neuron`] in the hidden layer(s). + pub fn split_connection(&mut self, connection: Connection, rng: &mut impl Rng) { + let newloc = NeuronLocation::Hidden(self.hidden_layers.len()); + + let a = self.get_neuron_mut(connection.from); + let weight = unsafe { a.remove_connection(connection.to) }.unwrap(); + + a.outputs.push((newloc, weight)); + + let n = Neuron::new(vec![(connection.to, weight)], NeuronScope::HIDDEN, rng); + self.hidden_layers.push(n); + } + + /// Adds a connection but does not check for cyclic linkages. + /// Marked as unsafe because it could cause a hang/livelock when predicting due to cyclic linkage. + /// There is no actual UB or unsafe code associated with it. + pub unsafe fn add_connection_raw(&mut self, connection: Connection, weight: f32) { + let a = self.get_neuron_mut(connection.from); + a.outputs.push((connection.to, weight)); + + // let b = self.get_neuron_mut(connection.to); + // b.inputs.insert(connection.from); + } + + /// Returns false if the connection is cyclic. + pub fn is_connection_safe(&self, connection: Connection) -> bool { + let mut visited = HashSet::from([connection.from]); + + self.dfs(&mut visited, connection.to) + } + + // TODO maybe parallelize + fn dfs(&self, visited: &mut HashSet, current: NeuronLocation) -> bool { + if !visited.insert(current) { + return false; + } + + let n = self.get_neuron(current); + for (loc, _) in &n.outputs { + if !self.dfs(visited, *loc) { + return false; + } + } + + true + } + + /// Safe, checked add connection method. Returns false if it aborted connecting due to cyclic linkage. + pub fn add_connection(&mut self, connection: Connection, weight: f32) -> bool { + if !self.is_connection_safe(connection) { + return false; + } + + unsafe { + self.add_connection_raw(connection, weight); + } + + true + } + + /// Mutates a connection's weight. + pub fn mutate_weight(&mut self, connection: Connection, rng: &mut impl Rng) { + let rate = self.mutation_settings.weight_mutation_amount; + let n = self.get_neuron_mut(connection.from); + n.mutate_weight(connection.to, rate, rng).unwrap(); + } + + /// Get a random valid location within the network. + pub fn random_location(&self, rng: &mut impl Rng) -> NeuronLocation { + match rng.gen_range(0..3) { + 0 => NeuronLocation::Input(rng.gen_range(0..self.input_layer.len())), + 1 => NeuronLocation::Hidden(rng.gen_range(0..self.hidden_layers.len())), + 2 => NeuronLocation::Output(rng.gen_range(0..self.output_layer.len())), + _ => unreachable!(), + } + } + + /// Get a random valid location within a [`NeuronScope`]. + pub fn random_location_in_scope( + &self, + rng: &mut impl Rng, + scope: NeuronScope, + ) -> NeuronLocation { + let loc = self.random_location(rng); + + // this is a lazy and slow way of donig it, TODO better version. + if !scope.contains(NeuronScope::from(loc)) { + return self.random_location_in_scope(rng, scope); + } + + loc + } + + /// Remove a connection and any hanging neurons caused by the deletion. + /// Returns whether there was a hanging neuron. + pub fn remove_connection(&mut self, connection: Connection) -> bool { + let a = self.get_neuron_mut(connection.from); + unsafe { a.remove_connection(connection.to) }.unwrap(); + + let b = self.get_neuron_mut(connection.to); + b.input_count -= 1; + + if b.input_count <= 0 { + self.remove_neuron(connection.to); + return true; + } + + false + } + + /// Remove a neuron and downshift all connection indexes to compensate for it. + pub fn remove_neuron(&mut self, loc: impl AsRef) { + let loc = loc.as_ref(); + if !loc.is_hidden() { + panic!("Can only remove neurons from hidden layer"); + } + + unsafe { + self.downshift_connections(loc.unwrap()); + } + } + + unsafe fn downshift_connections(&mut self, i: usize) { + self.input_layer + .par_iter_mut() + .for_each(|n| n.downshift_outputs(i)); + + self.hidden_layers + .par_iter_mut() + .for_each(|n| n.downshift_outputs(i)); + } + + // TODO maybe more parallelism and pass Connection info. + /// Runs the `callback` on the weights of the neural network in parallel, allowing it to modify weight values. + pub fn map_weights(&mut self, callback: impl Fn(&mut f32) + Sync) { + for n in &mut self.input_layer { + n.outputs.par_iter_mut().for_each(|(_, w)| callback(w)); + } + + for n in &mut self.hidden_layers { + n.outputs.par_iter_mut().for_each(|(_, w)| callback(w)); + } + } + + unsafe fn clear_input_counts(&mut self) { + // not sure whether all this parallelism is necessary or if it will just generate overhead + // rayon::scope(|s| { + // s.spawn(|_| self.input_layer.par_iter_mut().for_each(|n| n.input_count = 0)); + // s.spawn(|_| self.hidden_layers.par_iter_mut().for_each(|n| n.input_count = 0)); + // s.spawn(|_| self.output_layer.par_iter_mut().for_each(|n| n.input_count = 0)); + // }); + + self.input_layer + .par_iter_mut() + .for_each(|n| n.input_count = 0); + self.hidden_layers + .par_iter_mut() + .for_each(|n| n.input_count = 0); + self.output_layer + .par_iter_mut() + .for_each(|n| n.input_count = 0); + } + + /// Recalculates the [`input_count`][`Neuron::input_count`] field for all neurons in the network. + pub fn recalculate_input_counts(&mut self) { + unsafe { self.clear_input_counts() }; + + for i in 0..I { + for j in 0..self.input_layer[i].outputs.len() { + let (loc, _) = self.input_layer[i].outputs[j]; + self.get_neuron_mut(loc).input_count += 1; + } + } + + for i in 0..self.hidden_layers.len() { + for j in 0..self.hidden_layers[i].outputs.len() { + let (loc, _) = self.hidden_layers[i].outputs[j]; + self.get_neuron_mut(loc).input_count += 1; + } + } + } +} + +impl RandomlyMutable for NeuralNetwork { + fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { + if rng.gen::() <= rate { + // split connection + let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + let n = self.get_neuron(from); + let (to, _) = n.random_output(rng); + + self.split_connection(Connection { from, to }, rng); + } + + if rng.gen::() <= rate { + // add connection + let weight = rng.gen::(); + + let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + let to = self.random_location_in_scope(rng, !NeuronScope::INPUT); + + let mut connection = Connection { from, to }; + while !self.add_connection(connection, weight) { + let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + let to = self.random_location_in_scope(rng, !NeuronScope::INPUT); + connection = Connection { from, to }; + } + } + + if rng.gen::() <= rate { + // remove connection + + let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + let a = self.get_neuron(from); + let (to, _) = a.random_output(rng); + + self.remove_connection(Connection { from, to }); + } + + self.map_weights(|w| { + // TODO maybe `Send`able rng. + let mut rng = rand::thread_rng(); + + if rng.gen::() <= rate { + *w += rng.gen_range(-rate..rate); + } + }); + } +} + +impl DivisionReproduction for NeuralNetwork { + fn divide(&self, rng: &mut impl Rng) -> Self { + let mut child = self.clone(); + + for _ in 0..self.mutation_settings.mutation_passes { + child.mutate(child.mutation_settings.mutation_rate, rng); + } + + child + } +} + +impl CrossoverReproduction for NeuralNetwork { + fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { + let mut output_layer = self.output_layer.clone(); + + for (i, n) in output_layer.iter_mut().enumerate() { + if rng.gen::() >= 0.5 { + *n = other.output_layer[i].clone(); + } + } + + let hidden_len = self.hidden_layers.len().max(other.hidden_layers.len()); + let mut hidden_layers = Vec::with_capacity(hidden_len); + + for i in 0..hidden_len { + if rng.gen::() >= 0.5 { + if let Some(n) = self.hidden_layers.get(i) { + let mut n = n.clone(); + n.prune_invalid_outputs(hidden_len, O); + + hidden_layers[i] = n; + + continue; + } + } + + let mut n = other.hidden_layers[i].clone(); + n.prune_invalid_outputs(hidden_len, O); + + hidden_layers[i] = n; + } + + let mut input_layer = self.input_layer.clone(); + + for (i, n) in input_layer.iter_mut().enumerate() { + if rng.gen::() >= 0.5 { + *n = other.input_layer[i].clone(); + } + n.prune_invalid_outputs(hidden_len, O); + } + + // crossover mutation settings just in case. + let mutation_settings = if rng.gen::() >= 0.5 { + self.mutation_settings.clone() + } else { + other.mutation_settings.clone() + }; + + let mut child = Self { + input_layer, + hidden_layers, + output_layer, + mutation_settings, + }; + + // TODO maybe find a way to do this while doing crossover stuff instead of recalculating everything. + // would be annoying to implement though. + child.recalculate_input_counts(); + + for _ in 0..child.mutation_settings.mutation_passes { + child.mutate(child.mutation_settings.mutation_rate, rng); + } + + child + } +} + +fn output_exists(loc: NeuronLocation, hidden_len: usize, output_len: usize) -> bool { + match loc { + NeuronLocation::Input(_) => false, + NeuronLocation::Hidden(i) => i < hidden_len, + NeuronLocation::Output(i) => i < output_len, + } +} + +/// A helper struct for operations on connections between neurons. +/// It does not contain information about the weight. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct Connection { + /// The source of the connection. + pub from: NeuronLocation, + + /// The destination of the connection. + pub to: NeuronLocation, +} + +/// A stateless neuron. Contains info about bias, activation, and connections. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct Neuron { + /// The input count used in [`NeuralNetCache`]. Not safe to modify. + pub input_count: usize, + + /// The connections and weights to other neurons. + pub outputs: Vec<(NeuronLocation, f32)>, + + /// The initial value of the neuron. + pub bias: f32, + + /// The activation function applied to the value before propagating to [`outputs`][Neuron::outputs]. + pub activation_fn: ActivationFn, +} + +impl Neuron { + /// Creates a new neuron with a specified activation function and outputs. + pub fn new_with_activation( + outputs: Vec<(NeuronLocation, f32)>, + activation_fn: ActivationFn, + rng: &mut impl Rng, + ) -> Self { + Self { + input_count: 0, + outputs, + bias: rng.gen(), + activation_fn, + } + } + + /// Creates a new neuron with the given output locations. + /// Chooses a random activation function within the specified scope. + pub fn new( + outputs: Vec<(NeuronLocation, f32)>, + current_scope: NeuronScope, + rng: &mut impl Rng, + ) -> Self { + let reg = ACTIVATION_REGISTRY.read().unwrap(); + let activations = reg.activations_in_scope(current_scope); + + Self::new_with_activations(outputs, activations, rng) + } + + /// Creates a new neuron with the given outputs. + /// Takes a collection of activation functions and chooses a random one from them to use. + pub fn new_with_activations( + outputs: Vec<(NeuronLocation, f32)>, + activations: impl IntoIterator, + rng: &mut impl Rng, + ) -> Self { + // TODO get random in iterator form + let mut activations: Vec<_> = activations.into_iter().collect(); + + // TODO maybe Result instead. + if activations.is_empty() { + panic!("Empty activations list provided"); + } + + Self::new_with_activation( + outputs, + activations.remove(rng.gen_range(0..activations.len())), + rng, + ) + } + + /// Runs the [activation function][Neuron::activation_fn] on the given value and returns it. + pub fn activate(&self, v: f32) -> f32 { + self.activation_fn.func.activate(v) + } + + /// Get the weight of the provided output location. Returns `None` if not found. + pub fn get_weight(&self, output: impl AsRef) -> Option { + let loc = *output.as_ref(); + for out in &self.outputs { + if out.0 == loc { + return Some(out.1); + } + } + + None + } + + /// Tries to remove a connection from the neuron and returns the weight if it was found. + /// Marked as unsafe because it will not update the destination's [`input_count`][Neuron::input_count]. + pub unsafe fn remove_connection(&mut self, output: impl AsRef) -> Option { + let loc = *output.as_ref(); + let mut i = 0; + + while i < self.outputs.len() { + if self.outputs[i].0 == loc { + return Some(self.outputs.remove(i).1); + } + i += 1; + } + + None + } + + /// Randomly mutates the specified weight with the rate. + pub fn mutate_weight( + &mut self, + output: impl AsRef, + rate: f32, + rng: &mut impl Rng, + ) -> Option { + let loc = *output.as_ref(); + let mut i = 0; + + while i < self.outputs.len() { + let o = &mut self.outputs[i]; + if o.0 == loc { + o.1 += rng.gen_range(-rate..rate); + + return Some(o.1); + } + + i += 1; + } + + None + } + + /// Get a random output location and weight. + pub fn random_output(&self, rng: &mut impl Rng) -> (NeuronLocation, f32) { + self.outputs[rng.gen_range(0..self.outputs.len())] + } + + pub(crate) fn downshift_outputs(&mut self, i: usize) { + // TODO par_iter_mut instead of replace + replace_with_or_abort(&mut self.outputs, |o| { + o.into_par_iter() + .map(|(loc, w)| match loc { + NeuronLocation::Hidden(j) if j > i => (NeuronLocation::Hidden(j - 1), w), + _ => (loc, w), + }) + .collect() + }); + } + + /// Removes any outputs pointing to a nonexistent neuron. + pub fn prune_invalid_outputs(&mut self, hidden_len: usize, output_len: usize) { + self.outputs + .retain(|(loc, _)| output_exists(*loc, hidden_len, output_len)); + } +} + +/// A pseudo-pointer of sorts that is used for caching. +#[derive(Hash, Clone, Copy, Debug, Eq, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub enum NeuronLocation { + /// Points to a neuron in the input layer at contained index. + Input(usize), + + /// Points to a neuron in the hidden layer at contained index. + Hidden(usize), + + /// Points to a neuron in the output layer at contained index. + Output(usize), +} + +impl NeuronLocation { + /// Returns `true` if it points to the input layer. Otherwise, returns `false`. + pub fn is_input(&self) -> bool { + matches!(self, Self::Input(_)) + } + + /// Returns `true` if it points to the hidden layer. Otherwise, returns `false`. + pub fn is_hidden(&self) -> bool { + matches!(self, Self::Hidden(_)) + } + + /// Returns `true` if it points to the output layer. Otherwise, returns `false`. + pub fn is_output(&self) -> bool { + matches!(self, Self::Output(_)) + } + + /// Retrieves the index value, regardless of layer. Does not consume. + pub fn unwrap(&self) -> usize { + match self { + Self::Input(i) => *i, + Self::Hidden(i) => *i, + Self::Output(i) => *i, + } + } +} + +impl AsRef for NeuronLocation { + fn as_ref(&self) -> &NeuronLocation { + self + } +} + +/// Handles the state of a single neuron for [`NeuralNetCache`]. +#[derive(Debug, Default)] +pub struct NeuronCache { + /// The value of the neuron. + pub value: AtomicF32, + + /// The expected input count. + pub expected_inputs: usize, + + /// The number of inputs that have finished evaluating. + pub finished_inputs: AtomicUsize, + + /// Whether or not a thread has claimed this neuron to work on it. + pub claimed: AtomicBool, +} + +impl NeuronCache { + /// Creates a new [`NeuronCache`] given relevant info. + /// Use [`NeuronCache::from`] instead to create cache for a [`Neuron`]. + pub fn new(bias: f32, expected_inputs: usize) -> Self { + Self { + value: AtomicF32::new(bias), + expected_inputs, + ..Default::default() + } + } +} + +impl From<&Neuron> for NeuronCache { + fn from(value: &Neuron) -> Self { + Self { + value: AtomicF32::new(value.bias), + expected_inputs: value.input_count, + finished_inputs: AtomicUsize::new(0), + claimed: AtomicBool::new(false), + } + } +} + +/// A cache type used in [`NeuralNetwork::predict`] to track state. +#[derive(Debug)] +pub struct NeuralNetCache { + /// The input layer cache. + pub input_layer: [NeuronCache; I], + + /// The hidden layer(s) cache. + pub hidden_layers: Vec, + + /// The output layer cache. + pub output_layer: [NeuronCache; O], +} + +impl NeuralNetCache { + /// Gets the value of a neuron at the given location. + pub fn get(&self, loc: impl AsRef) -> f32 { + match loc.as_ref() { + NeuronLocation::Input(i) => self.input_layer[*i].value.load(Ordering::SeqCst), + NeuronLocation::Hidden(i) => self.hidden_layers[*i].value.load(Ordering::SeqCst), + NeuronLocation::Output(i) => self.output_layer[*i].value.load(Ordering::SeqCst), + } + } + + /// Adds a value to the neuron at the specified location and increments [`finished_inputs`][NeuronCache::finished_inputs]. + pub fn add(&self, loc: impl AsRef, n: f32) -> f32 { + match loc.as_ref() { + NeuronLocation::Input(i) => self.input_layer[*i].value.fetch_add(n, Ordering::SeqCst), + NeuronLocation::Hidden(i) => { + let c = &self.hidden_layers[*i]; + let v = c.value.fetch_add(n, Ordering::SeqCst); + c.finished_inputs.fetch_add(1, Ordering::SeqCst); + v + } + NeuronLocation::Output(i) => { + let c = &self.output_layer[*i]; + let v = c.value.fetch_add(n, Ordering::SeqCst); + c.finished_inputs.fetch_add(1, Ordering::SeqCst); + v + } + } + } + + /// Returns whether [`finished_inputs`][NeuronCache::finished_inputs] matches [`expected_inputs`][NeuronCache::expected_inputs]. + pub fn is_ready(&self, loc: impl AsRef) -> bool { + match loc.as_ref() { + NeuronLocation::Input(i) => { + let c = &self.input_layer[*i]; + c.expected_inputs >= c.finished_inputs.load(Ordering::SeqCst) + } + NeuronLocation::Hidden(i) => { + let c = &self.hidden_layers[*i]; + c.expected_inputs >= c.finished_inputs.load(Ordering::SeqCst) + } + NeuronLocation::Output(i) => { + let c = &self.output_layer[*i]; + c.expected_inputs >= c.finished_inputs.load(Ordering::SeqCst) + } + } + } + + /// Adds the input values to the input layer of neurons. + pub fn prime_inputs(&self, inputs: [f32; I]) { + for (i, v) in inputs.into_iter().enumerate() { + self.input_layer[i].value.fetch_add(v, Ordering::SeqCst); + } + } + + /// Fetches and packs the output layer values into an array. + pub fn output(&self) -> [f32; O] { + let output: Vec<_> = self + .output_layer + .par_iter() + .map(|c| c.value.load(Ordering::SeqCst)) + .collect(); + + output.try_into().unwrap() + } + + /// Attempts to claim a neuron. Returns false if it has already been claimed. + pub fn claim(&self, loc: impl AsRef) -> bool { + match loc.as_ref() { + NeuronLocation::Input(i) => self.input_layer[*i] + .claimed + .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) + .is_ok(), + NeuronLocation::Hidden(i) => self.hidden_layers[*i] + .claimed + .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) + .is_ok(), + NeuronLocation::Output(i) => self.output_layer[*i] + .claimed + .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) + .is_ok(), + } + } +} + +impl From<&NeuralNetwork> for NeuralNetCache { + fn from(net: &NeuralNetwork) -> Self { + let input_layer: Vec<_> = net.input_layer.par_iter().map(|n| n.into()).collect(); + let input_layer = input_layer.try_into().unwrap(); + + let hidden_layers: Vec<_> = net.hidden_layers.par_iter().map(|n| n.into()).collect(); + let hidden_layers = hidden_layers.try_into().unwrap(); + + let output_layer: Vec<_> = net.output_layer.par_iter().map(|n| n.into()).collect(); + let output_layer = output_layer.try_into().unwrap(); + + Self { + input_layer, + hidden_layers, + output_layer, + } + } +} diff --git a/src/runnable.rs b/src/runnable.rs deleted file mode 100644 index 5b28f54..0000000 --- a/src/runnable.rs +++ /dev/null @@ -1,300 +0,0 @@ -use crate::topology::*; - -#[cfg(not(feature = "rayon"))] -use std::{cell::RefCell, rc::Rc}; - -#[cfg(feature = "rayon")] -use rayon::prelude::*; -#[cfg(feature = "rayon")] -use std::sync::{Arc, RwLock}; - -/// A runnable, stated Neural Network generated from a [NeuralNetworkTopology]. Use [`NeuralNetwork::from`] to go from stateles to runnable. -/// Because this has state, you need to run [`NeuralNetwork::flush_state`] between [`NeuralNetwork::predict`] calls. -#[derive(Debug)] -#[cfg(not(feature = "rayon"))] -pub struct NeuralNetwork { - input_layer: [Rc>; I], - hidden_layers: Vec>>, - output_layer: [Rc>; O], -} - -/// Parallelized version of the [`NeuralNetwork`] struct. -#[derive(Debug)] -#[cfg(feature = "rayon")] -pub struct NeuralNetwork { - input_layer: [Arc>; I], - hidden_layers: Vec>>, - output_layer: [Arc>; O], -} - -impl NeuralNetwork { - /// Predicts an output for the given inputs. - #[cfg(not(feature = "rayon"))] - pub fn predict(&self, inputs: [f32; I]) -> [f32; O] { - for (i, v) in inputs.iter().enumerate() { - let mut nw = self.input_layer[i].borrow_mut(); - nw.state.value = *v; - nw.state.processed = true; - } - - (0..O) - .map(NeuronLocation::Output) - .map(|loc| self.process_neuron(loc)) - .collect::>() - .try_into() - .unwrap() - } - - /// Parallelized prediction of outputs from inputs. - #[cfg(feature = "rayon")] - pub fn predict(&self, inputs: [f32; I]) -> [f32; O] { - inputs.par_iter().enumerate().for_each(|(i, v)| { - let mut nw = self.input_layer[i].write().unwrap(); - nw.state.value = *v; - nw.state.processed = true; - }); - - (0..O) - .map(NeuronLocation::Output) - .collect::>() - .into_par_iter() - .map(|loc| self.process_neuron(loc)) - .collect::>() - .try_into() - .unwrap() - } - - #[cfg(not(feature = "rayon"))] - fn process_neuron(&self, loc: NeuronLocation) -> f32 { - let n = self.get_neuron(loc); - - { - let nr = n.borrow(); - - if nr.state.processed { - return nr.state.value; - } - } - - let mut n = n.borrow_mut(); - - for (l, w) in n.inputs.clone() { - n.state.value += self.process_neuron(l) * w; - } - - n.activate(); - - n.state.value - } - - #[cfg(feature = "rayon")] - fn process_neuron(&self, loc: NeuronLocation) -> f32 { - let n = self.get_neuron(loc); - - { - let nr = n.read().unwrap(); - - if nr.state.processed { - return nr.state.value; - } - } - - let val: f32 = n - .read() - .unwrap() - .inputs - .par_iter() - .map(|&(n2, w)| { - let processed = self.process_neuron(n2); - processed * w - }) - .sum(); - - let mut nw = n.write().unwrap(); - nw.state.value += val; - nw.activate(); - - nw.state.value - } - - #[cfg(not(feature = "rayon"))] - fn get_neuron(&self, loc: NeuronLocation) -> Rc> { - match loc { - NeuronLocation::Input(i) => self.input_layer[i].clone(), - NeuronLocation::Hidden(i) => self.hidden_layers[i].clone(), - NeuronLocation::Output(i) => self.output_layer[i].clone(), - } - } - - #[cfg(feature = "rayon")] - fn get_neuron(&self, loc: NeuronLocation) -> Arc> { - match loc { - NeuronLocation::Input(i) => self.input_layer[i].clone(), - NeuronLocation::Hidden(i) => self.hidden_layers[i].clone(), - NeuronLocation::Output(i) => self.output_layer[i].clone(), - } - } - - /// Flushes the network's state after a [prediction][NeuralNetwork::predict]. - #[cfg(not(feature = "rayon"))] - pub fn flush_state(&self) { - for n in &self.input_layer { - n.borrow_mut().flush_state(); - } - - for n in &self.hidden_layers { - n.borrow_mut().flush_state(); - } - - for n in &self.output_layer { - n.borrow_mut().flush_state(); - } - } - - /// Flushes the neural network's state. - #[cfg(feature = "rayon")] - pub fn flush_state(&self) { - self.input_layer - .par_iter() - .for_each(|n| n.write().unwrap().flush_state()); - - self.hidden_layers - .par_iter() - .for_each(|n| n.write().unwrap().flush_state()); - - self.output_layer - .par_iter() - .for_each(|n| n.write().unwrap().flush_state()); - } -} - -impl From<&NeuralNetworkTopology> for NeuralNetwork { - #[cfg(not(feature = "rayon"))] - fn from(value: &NeuralNetworkTopology) -> Self { - let input_layer = value - .input_layer - .iter() - .map(|n| Rc::new(RefCell::new(Neuron::from(&n.read().unwrap().clone())))) - .collect::>() - .try_into() - .unwrap(); - - let hidden_layers = value - .hidden_layers - .iter() - .map(|n| Rc::new(RefCell::new(Neuron::from(&n.read().unwrap().clone())))) - .collect(); - - let output_layer = value - .output_layer - .iter() - .map(|n| Rc::new(RefCell::new(Neuron::from(&n.read().unwrap().clone())))) - .collect::>() - .try_into() - .unwrap(); - - Self { - input_layer, - hidden_layers, - output_layer, - } - } - - #[cfg(feature = "rayon")] - fn from(value: &NeuralNetworkTopology) -> Self { - let input_layer = value - .input_layer - .iter() - .map(|n| Arc::new(RwLock::new(Neuron::from(&n.read().unwrap().clone())))) - .collect::>() - .try_into() - .unwrap(); - - let hidden_layers = value - .hidden_layers - .iter() - .map(|n| Arc::new(RwLock::new(Neuron::from(&n.read().unwrap().clone())))) - .collect(); - - let output_layer = value - .output_layer - .iter() - .map(|n| Arc::new(RwLock::new(Neuron::from(&n.read().unwrap().clone())))) - .collect::>() - .try_into() - .unwrap(); - - Self { - input_layer, - hidden_layers, - output_layer, - } - } -} - -/// A state-filled neuron. -#[derive(Clone, Debug)] -pub struct Neuron { - inputs: Vec<(NeuronLocation, f32)>, - bias: f32, - - /// The current state of the neuron. - pub state: NeuronState, - - /// The neuron's activation function - pub activation: ActivationFn, -} - -impl Neuron { - /// Flushes a neuron's state. Called by [`NeuralNetwork::flush_state`] - pub fn flush_state(&mut self) { - self.state.value = self.bias; - } - - /// Applies the activation function to the neuron - pub fn activate(&mut self) { - self.state.value = self.activation.func.activate(self.state.value); - } -} - -impl From<&NeuronTopology> for Neuron { - fn from(value: &NeuronTopology) -> Self { - Self { - inputs: value.inputs.clone(), - bias: value.bias, - state: NeuronState { - value: value.bias, - ..Default::default() - }, - activation: value.activation.clone(), - } - } -} - -/// A state used in [`Neuron`]s for cache. -#[derive(Clone, Debug, Default)] -pub struct NeuronState { - /// The current value of the neuron. Initialized to a neuron's bias when flushed. - pub value: f32, - - /// Whether or not [`value`][NeuronState::value] has finished processing. - pub processed: bool, -} - -/// A blanket trait for iterators meant to help with interpreting the output of a [`NeuralNetwork`] -#[cfg(feature = "max-index")] -pub trait MaxIndex { - /// Retrieves the index of the max value. - fn max_index(self) -> usize; -} - -#[cfg(feature = "max-index")] -impl, T: PartialOrd> MaxIndex for I { - // slow and lazy implementation but it works (will prob optimize in the future) - fn max_index(self) -> usize { - self.enumerate() - .max_by(|(_, v), (_, v2)| v.partial_cmp(v2).unwrap()) - .unwrap() - .0 - } -} diff --git a/src/tests.rs b/src/tests.rs new file mode 100644 index 0000000..825cdee --- /dev/null +++ b/src/tests.rs @@ -0,0 +1,179 @@ +use crate::*; +use rand::prelude::*; + +// no support for tuple structs derive in genetic-rs yet :( +#[derive(Debug, Clone, PartialEq)] +struct Agent(NeuralNetwork<4, 1>); + +impl Prunable for Agent {} + +impl RandomlyMutable for Agent { + fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { + self.0.mutate(rate, rng); + } +} + +impl DivisionReproduction for Agent { + fn divide(&self, rng: &mut impl rand::Rng) -> Self { + Self(self.0.divide(rng)) + } +} + +impl CrossoverReproduction for Agent { + fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { + Self(self.0.crossover(&other.0, rng)) + } +} + +struct GuessTheNumber(f32); + +impl GuessTheNumber { + fn new(rng: &mut impl Rng) -> Self { + Self(rng.gen()) + } + + fn guess(&self, n: f32) -> Option { + if n > self.0 + 1.0e-5 { + return Some(1.); + } + + if n < self.0 - 1.0e-5 { + return Some(-1.); + } + + // guess was correct (or at least within margin of error). + None + } +} + +fn fitness(agent: &Agent) -> f32 { + let mut rng = rand::thread_rng(); + + let mut fitness = 0.; + + // 10 games for consistency + for _ in 0..10 { + let game = GuessTheNumber::new(&mut rng); + + let mut last_guess = 0.; + let mut last_result = 0.; + + let mut last_guess_2 = 0.; + let mut last_result_2 = 0.; + + let mut steps = 0; + loop { + if steps >= 20 { + // took too many guesses + fitness -= 50.; + break; + } + + let [cur_guess] = + agent + .0 + .predict([last_guess, last_result, last_guess_2, last_result_2]); + + let cur_result = game.guess(cur_guess); + + if let Some(result) = cur_result { + last_guess = last_guess_2; + last_result = last_result_2; + + last_guess_2 = cur_guess; + last_result_2 = result; + + fitness -= 1.; + steps += 1; + + continue; + } + + fitness += 50.; + break; + } + } + + fitness +} + +#[test] +fn division() { + let mut rng = rand::thread_rng(); + + let starting_genomes = (0..100) + .map(|_| Agent(NeuralNetwork::new(MutationSettings::default(), &mut rng))) + .collect(); + + let mut sim = GeneticSim::new(starting_genomes, fitness, division_pruning_nextgen); + + sim.perform_generations(100); +} + +#[test] +fn crossover() { + let mut rng = rand::thread_rng(); + + let starting_genomes = (0..100) + .map(|_| Agent(NeuralNetwork::new(MutationSettings::default(), &mut rng))) + .collect(); + + let mut sim = GeneticSim::new(starting_genomes, fitness, crossover_pruning_nextgen); + + sim.perform_generations(100); +} + +#[cfg(feature = "serde")] +#[test] +fn serde() { + let mut rng = rand::thread_rng(); + let net: NeuralNetwork<5, 10> = NeuralNetwork::new(MutationSettings::default(), &mut rng); + + let text = serde_json::to_string(&net).unwrap(); + + let net2: NeuralNetwork<5, 10> = serde_json::from_str(&text).unwrap(); + + assert_eq!(net, net2); +} + +#[test] +fn neural_net_cache_sync() { + let cache = NeuralNetCache { + input_layer: [NeuronCache::new(0.3, 0), NeuronCache::new(0.25, 0)], + hidden_layers: vec![ + NeuronCache::new(0.2, 2), + NeuronCache::new(0.0, 2), + NeuronCache::new(1.5, 2), + ], + output_layer: [NeuronCache::new(0.0, 3), NeuronCache::new(0.0, 3)], + }; + + for i in 0..2 { + let input_loc = NeuronLocation::Input(i); + + assert!(cache.claim(&input_loc)); + + for j in 0..3 { + cache.add( + NeuronLocation::Hidden(j), + f32::tanh(cache.get(&input_loc) * 1.2), + ); + } + } + + for i in 0..3 { + let hidden_loc = NeuronLocation::Hidden(i); + + assert!(cache.is_ready(&hidden_loc)); + assert!(cache.claim(&hidden_loc)); + + for j in 0..2 { + cache.add( + NeuronLocation::Output(j), + activation::builtin::sigmoid(cache.get(&hidden_loc) * 0.7), + ); + } + } + + assert_eq!(cache.output(), [2.0688455, 2.0688455]); +} diff --git a/src/topology/mod.rs b/src/topology/mod.rs index dd246f2..e69de29 100644 --- a/src/topology/mod.rs +++ b/src/topology/mod.rs @@ -1,638 +0,0 @@ -/// Contains useful structs for serializing/deserializing a [`NeuronTopology`] -#[cfg_attr(docsrs, doc(cfg(feature = "serde")))] -#[cfg(feature = "serde")] -pub mod nnt_serde; - -/// Contains structs and traits used for activation functions. -pub mod activation; - -pub use activation::*; - -use std::{ - collections::HashSet, - sync::{Arc, RwLock}, -}; - -use genetic_rs::prelude::*; -use rand::prelude::*; - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -use crate::activation_fn; - -/// A stateless neural network topology. -/// This is the struct you want to use in your agent's inheritance. -/// See [`NeuralNetwork::from`][crate::NeuralNetwork::from] for how to convert this to a runnable neural network. -#[derive(Debug)] -pub struct NeuralNetworkTopology { - /// The input layer of the neural network. Uses a fixed length of `I`. - pub input_layer: [Arc>; I], - - /// The hidden layers of the neural network. Because neurons have a flexible connection system, all of them exist in the same flat vector. - pub hidden_layers: Vec>>, - - /// The output layer of the neural netowrk. Uses a fixed length of `O`. - pub output_layer: [Arc>; O], - - /// The mutation rate used in [`NeuralNetworkTopology::mutate`] after crossover/division. - pub mutation_rate: f32, - - /// The number of mutation passes (and thus, maximum number of possible mutations that can occur for each entity in the generation). - pub mutation_passes: usize, -} - -impl NeuralNetworkTopology { - /// Creates a new [`NeuralNetworkTopology`]. - pub fn new(mutation_rate: f32, mutation_passes: usize, rng: &mut impl Rng) -> Self { - let input_layer: [Arc>; I] = (0..I) - .map(|_| { - Arc::new(RwLock::new(NeuronTopology::new_with_activation( - vec![], - activation_fn!(linear_activation), - rng, - ))) - }) - .collect::>() - .try_into() - .unwrap(); - - let mut output_layer = Vec::with_capacity(O); - - for _ in 0..O { - // random number of connections to random input neurons. - let input = (0..rng.gen_range(1..=I)) - .map(|_| { - let mut already_chosen = Vec::new(); - let mut i = rng.gen_range(0..I); - while already_chosen.contains(&i) { - i = rng.gen_range(0..I); - } - - already_chosen.push(i); - - NeuronLocation::Input(i) - }) - .collect(); - - output_layer.push(Arc::new(RwLock::new(NeuronTopology::new_with_activation( - input, - activation_fn!(sigmoid), - rng, - )))); - } - - let output_layer = output_layer.try_into().unwrap(); - - Self { - input_layer, - hidden_layers: vec![], - output_layer, - mutation_rate, - mutation_passes, - } - } - - /// Creates a new connection between the neurons. - /// If the connection is cyclic, it does not add a connection and returns false. - /// Otherwise, it returns true. - pub fn add_connection( - &mut self, - from: NeuronLocation, - to: NeuronLocation, - weight: f32, - ) -> bool { - if self.is_connection_cyclic(from, to) { - return false; - } - - // Add the connection since it is not cyclic - self.get_neuron(to) - .write() - .unwrap() - .inputs - .push((from, weight)); - - true - } - - fn is_connection_cyclic(&self, from: NeuronLocation, to: NeuronLocation) -> bool { - if to.is_input() || from.is_output() { - return true; - } - - // check to make sure it isn't duplicate - { - let n = self.get_neuron(to); - let n2 = n.read().unwrap(); - - for (loc, _) in &n2.inputs { - if from == *loc { - return false; - } - } - } - - let mut visited = HashSet::new(); - self.dfs(from, to, &mut visited) - } - - // TODO rayon implementation - fn dfs( - &self, - current: NeuronLocation, - target: NeuronLocation, - visited: &mut HashSet, - ) -> bool { - if current == target { - return true; - } - - visited.insert(current); - - let n = self.get_neuron(current); - let nr = n.read().unwrap(); - - for &(input, _) in &nr.inputs { - if !visited.contains(&input) && self.dfs(input, target, visited) { - return true; - } - } - - visited.remove(¤t); - false - } - - /// Gets a neuron pointer from a [`NeuronLocation`]. - /// You shouldn't ever need to directly call this unless you are doing complex custom mutations. - pub fn get_neuron(&self, loc: NeuronLocation) -> Arc> { - match loc { - NeuronLocation::Input(i) => self.input_layer[i].clone(), - NeuronLocation::Hidden(i) => self.hidden_layers[i].clone(), - NeuronLocation::Output(i) => self.output_layer[i].clone(), - } - } - - /// Gets a random neuron and its location. - pub fn rand_neuron(&self, rng: &mut impl Rng) -> (Arc>, NeuronLocation) { - match rng.gen_range(0..3) { - 0 => { - let i = rng.gen_range(0..self.input_layer.len()); - (self.input_layer[i].clone(), NeuronLocation::Input(i)) - } - 1 if !self.hidden_layers.is_empty() => { - let i = rng.gen_range(0..self.hidden_layers.len()); - (self.hidden_layers[i].clone(), NeuronLocation::Hidden(i)) - } - _ => { - let i = rng.gen_range(0..self.output_layer.len()); - (self.output_layer[i].clone(), NeuronLocation::Output(i)) - } - } - } - - fn delete_neuron(&mut self, loc: NeuronLocation) -> NeuronTopology { - if !loc.is_hidden() { - panic!("Invalid neuron deletion"); - } - - let index = loc.unwrap(); - let neuron = Arc::into_inner(self.hidden_layers.remove(index)).unwrap(); - - for n in &self.hidden_layers { - let mut nw = n.write().unwrap(); - - nw.inputs = nw - .inputs - .iter() - .filter_map(|&(input_loc, w)| { - if !input_loc.is_hidden() { - return Some((input_loc, w)); - } - - if input_loc.unwrap() == index { - return None; - } - - if input_loc.unwrap() > index { - return Some((NeuronLocation::Hidden(input_loc.unwrap() - 1), w)); - } - - Some((input_loc, w)) - }) - .collect(); - } - - for n2 in &self.output_layer { - let mut nw = n2.write().unwrap(); - nw.inputs = nw - .inputs - .iter() - .filter_map(|&(input_loc, w)| { - if !input_loc.is_hidden() { - return Some((input_loc, w)); - } - - if input_loc.unwrap() == index { - return None; - } - - if input_loc.unwrap() > index { - return Some((NeuronLocation::Hidden(input_loc.unwrap() - 1), w)); - } - - Some((input_loc, w)) - }) - .collect(); - } - - neuron.into_inner().unwrap() - } -} - -// need to do all this manually because Arcs are cringe -impl Clone for NeuralNetworkTopology { - fn clone(&self) -> Self { - let input_layer = self - .input_layer - .iter() - .map(|n| Arc::new(RwLock::new(n.read().unwrap().clone()))) - .collect::>() - .try_into() - .unwrap(); - - let hidden_layers = self - .hidden_layers - .iter() - .map(|n| Arc::new(RwLock::new(n.read().unwrap().clone()))) - .collect(); - - let output_layer = self - .output_layer - .iter() - .map(|n| Arc::new(RwLock::new(n.read().unwrap().clone()))) - .collect::>() - .try_into() - .unwrap(); - - Self { - input_layer, - hidden_layers, - output_layer, - mutation_rate: self.mutation_rate, - mutation_passes: self.mutation_passes, - } - } -} - -impl RandomlyMutable for NeuralNetworkTopology { - fn mutate(&mut self, rate: f32, rng: &mut impl rand::Rng) { - for _ in 0..self.mutation_passes { - if rng.gen::() <= rate { - // split preexisting connection - let (mut n2, _) = self.rand_neuron(rng); - - while n2.read().unwrap().inputs.is_empty() { - (n2, _) = self.rand_neuron(rng); - } - - let mut n2 = n2.write().unwrap(); - let i = rng.gen_range(0..n2.inputs.len()); - let (loc, w) = n2.inputs.remove(i); - - let loc3 = NeuronLocation::Hidden(self.hidden_layers.len()); - - let n3 = NeuronTopology::new(vec![loc], ActivationScope::HIDDEN, rng); - - self.hidden_layers.push(Arc::new(RwLock::new(n3))); - - n2.inputs.insert(i, (loc3, w)); - } - - if rng.gen::() <= rate { - // add a connection - let (_, mut loc1) = self.rand_neuron(rng); - let (_, mut loc2) = self.rand_neuron(rng); - - while loc1.is_output() || !self.add_connection(loc1, loc2, rng.gen::()) { - (_, loc1) = self.rand_neuron(rng); - (_, loc2) = self.rand_neuron(rng); - } - } - - if rng.gen::() <= rate && !self.hidden_layers.is_empty() { - // remove a neuron - let (_, mut loc) = self.rand_neuron(rng); - - while !loc.is_hidden() { - (_, loc) = self.rand_neuron(rng); - } - - // delete the neuron - self.delete_neuron(loc); - } - - if rng.gen::() <= rate { - // mutate a connection - let (mut n, _) = self.rand_neuron(rng); - - while n.read().unwrap().inputs.is_empty() { - (n, _) = self.rand_neuron(rng); - } - - let mut n = n.write().unwrap(); - let i = rng.gen_range(0..n.inputs.len()); - let (_, w) = &mut n.inputs[i]; - *w += rng.gen_range(-1.0..1.0) * rate; - } - - if rng.gen::() <= rate { - // mutate bias - let (n, _) = self.rand_neuron(rng); - let mut n = n.write().unwrap(); - - n.bias += rng.gen_range(-1.0..1.0) * rate; - } - - if rng.gen::() <= rate && !self.hidden_layers.is_empty() { - // mutate activation function - let reg = ACTIVATION_REGISTRY.read().unwrap(); - let activations = reg.activations_in_scope(ActivationScope::HIDDEN); - - let (mut n, mut loc) = self.rand_neuron(rng); - - while !loc.is_hidden() { - (n, loc) = self.rand_neuron(rng); - } - - let mut nw = n.write().unwrap(); - - // should probably not clone, but its not a huge efficiency issue anyways - nw.activation = activations[rng.gen_range(0..activations.len())].clone(); - } - } - } -} - -impl DivisionReproduction for NeuralNetworkTopology { - fn divide(&self, rng: &mut impl rand::Rng) -> Self { - let mut child = self.clone(); - child.mutate(self.mutation_rate, rng); - child - } -} - -impl PartialEq for NeuralNetworkTopology { - fn eq(&self, other: &Self) -> bool { - if self.mutation_rate != other.mutation_rate - || self.mutation_passes != other.mutation_passes - { - return false; - } - - for i in 0..I { - if *self.input_layer[i].read().unwrap() != *other.input_layer[i].read().unwrap() { - return false; - } - } - - for i in 0..self.hidden_layers.len().min(other.hidden_layers.len()) { - if *self.hidden_layers[i].read().unwrap() != *other.hidden_layers[i].read().unwrap() { - return false; - } - } - - for i in 0..O { - if *self.output_layer[i].read().unwrap() != *other.output_layer[i].read().unwrap() { - return false; - } - } - - true - } -} - -#[cfg(feature = "serde")] -impl From> - for NeuralNetworkTopology -{ - fn from(value: nnt_serde::NNTSerde) -> Self { - let input_layer = value - .input_layer - .into_iter() - .map(|n| Arc::new(RwLock::new(n))) - .collect::>() - .try_into() - .unwrap(); - - let hidden_layers = value - .hidden_layers - .into_iter() - .map(|n| Arc::new(RwLock::new(n))) - .collect(); - - let output_layer = value - .output_layer - .into_iter() - .map(|n| Arc::new(RwLock::new(n))) - .collect::>() - .try_into() - .unwrap(); - - NeuralNetworkTopology { - input_layer, - hidden_layers, - output_layer, - mutation_rate: value.mutation_rate, - mutation_passes: value.mutation_passes, - } - } -} - -#[cfg(feature = "crossover")] -impl CrossoverReproduction for NeuralNetworkTopology { - fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { - let input_layer = self - .input_layer - .iter() - .map(|n| Arc::new(RwLock::new(n.read().unwrap().clone()))) - .collect::>() - .try_into() - .unwrap(); - - let mut hidden_layers = - Vec::with_capacity(self.hidden_layers.len().max(other.hidden_layers.len())); - - for i in 0..hidden_layers.len() { - if rng.gen::() <= 0.5 { - if let Some(n) = self.hidden_layers.get(i) { - let mut n = n.read().unwrap().clone(); - - n.inputs - .retain(|(l, _)| input_exists(*l, &input_layer, &hidden_layers)); - hidden_layers[i] = Arc::new(RwLock::new(n)); - - continue; - } - } - - let mut n = other.hidden_layers[i].read().unwrap().clone(); - - n.inputs - .retain(|(l, _)| input_exists(*l, &input_layer, &hidden_layers)); - hidden_layers[i] = Arc::new(RwLock::new(n)); - } - - let mut output_layer: [Arc>; O] = self - .output_layer - .iter() - .map(|n| Arc::new(RwLock::new(n.read().unwrap().clone()))) - .collect::>() - .try_into() - .unwrap(); - - for (i, n) in self.output_layer.iter().enumerate() { - if rng.gen::() <= 0.5 { - let mut n = n.read().unwrap().clone(); - - n.inputs - .retain(|(l, _)| input_exists(*l, &input_layer, &hidden_layers)); - output_layer[i] = Arc::new(RwLock::new(n)); - - continue; - } - - let mut n = other.output_layer[i].read().unwrap().clone(); - - n.inputs - .retain(|(l, _)| input_exists(*l, &input_layer, &hidden_layers)); - output_layer[i] = Arc::new(RwLock::new(n)); - } - - let mut child = Self { - input_layer, - hidden_layers, - output_layer, - mutation_rate: self.mutation_rate, - mutation_passes: self.mutation_passes, - }; - - child.mutate(self.mutation_rate, rng); - - child - } -} - -#[cfg(feature = "crossover")] -fn input_exists( - loc: NeuronLocation, - input: &[Arc>; I], - hidden: &[Arc>], -) -> bool { - match loc { - NeuronLocation::Input(i) => i < input.len(), - NeuronLocation::Hidden(i) => i < hidden.len(), - NeuronLocation::Output(_) => false, - } -} - -/// A stateless version of [`Neuron`][crate::Neuron]. -#[derive(PartialEq, Debug, Clone)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct NeuronTopology { - /// The input locations and weights. - pub inputs: Vec<(NeuronLocation, f32)>, - - /// The neuron's bias. - pub bias: f32, - - /// The neuron's activation function. - pub activation: ActivationFn, -} - -impl NeuronTopology { - /// Creates a new neuron with the given input locations. - pub fn new( - inputs: Vec, - current_scope: ActivationScope, - rng: &mut impl Rng, - ) -> Self { - let reg = ACTIVATION_REGISTRY.read().unwrap(); - let activations = reg.activations_in_scope(current_scope); - - Self::new_with_activations(inputs, activations, rng) - } - - /// Takes a collection of activation functions and chooses a random one to use. - pub fn new_with_activations( - inputs: Vec, - activations: impl IntoIterator, - rng: &mut impl Rng, - ) -> Self { - let mut activations: Vec<_> = activations.into_iter().collect(); - - Self::new_with_activation( - inputs, - activations.remove(rng.gen_range(0..activations.len())), - rng, - ) - } - - /// Creates a neuron with the activation. - pub fn new_with_activation( - inputs: Vec, - activation: ActivationFn, - rng: &mut impl Rng, - ) -> Self { - let inputs = inputs - .into_iter() - .map(|i| (i, rng.gen_range(-1.0..1.0))) - .collect(); - - Self { - inputs, - bias: rng.gen(), - activation, - } - } -} - -/// A pseudo-pointer of sorts used to make structural conversions very fast and easy to write. -#[derive(Hash, Clone, Copy, Debug, Eq, PartialEq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub enum NeuronLocation { - /// Points to a neuron in the input layer at contained index. - Input(usize), - - /// Points to a neuron in the hidden layer at contained index. - Hidden(usize), - - /// Points to a neuron in the output layer at contained index. - Output(usize), -} - -impl NeuronLocation { - /// Returns `true` if it points to the input layer. Otherwise, returns `false`. - pub fn is_input(&self) -> bool { - matches!(self, Self::Input(_)) - } - - /// Returns `true` if it points to the hidden layer. Otherwise, returns `false`. - pub fn is_hidden(&self) -> bool { - matches!(self, Self::Hidden(_)) - } - - /// Returns `true` if it points to the output layer. Otherwise, returns `false`. - pub fn is_output(&self) -> bool { - matches!(self, Self::Output(_)) - } - - /// Retrieves the index value, regardless of layer. Does not consume. - pub fn unwrap(&self) -> usize { - match self { - Self::Input(i) => *i, - Self::Hidden(i) => *i, - Self::Output(i) => *i, - } - } -} diff --git a/src/topology/nnt_serde.rs b/src/topology/nnt_serde.rs deleted file mode 100644 index 14f392c..0000000 --- a/src/topology/nnt_serde.rs +++ /dev/null @@ -1,71 +0,0 @@ -use super::*; -use serde::{Deserialize, Serialize}; -use serde_big_array::BigArray; - -/// A serializable wrapper for [`NeuronTopology`]. See [`NNTSerde::from`] for conversion. -#[derive(Serialize, Deserialize)] -pub struct NNTSerde { - #[serde(with = "BigArray")] - pub(crate) input_layer: [NeuronTopology; I], - - pub(crate) hidden_layers: Vec, - - #[serde(with = "BigArray")] - pub(crate) output_layer: [NeuronTopology; O], - - pub(crate) mutation_rate: f32, - pub(crate) mutation_passes: usize, -} - -impl From<&NeuralNetworkTopology> for NNTSerde { - fn from(value: &NeuralNetworkTopology) -> Self { - let input_layer = value - .input_layer - .iter() - .map(|n| n.read().unwrap().clone()) - .collect::>() - .try_into() - .unwrap(); - - let hidden_layers = value - .hidden_layers - .iter() - .map(|n| n.read().unwrap().clone()) - .collect(); - - let output_layer = value - .output_layer - .iter() - .map(|n| n.read().unwrap().clone()) - .collect::>() - .try_into() - .unwrap(); - - Self { - input_layer, - hidden_layers, - output_layer, - mutation_rate: value.mutation_rate, - mutation_passes: value.mutation_passes, - } - } -} - -#[cfg(test)] -#[test] -fn serde() { - let mut rng = rand::thread_rng(); - let nnt = NeuralNetworkTopology::<10, 10>::new(0.1, 3, &mut rng); - let nnts = NNTSerde::from(&nnt); - - let encoded = bincode::serialize(&nnts).unwrap(); - - if let Some(_) = option_env!("TEST_CREATEFILE") { - std::fs::write("serde-test.nn", &encoded).unwrap(); - } - - let decoded: NNTSerde<10, 10> = bincode::deserialize(&encoded).unwrap(); - let nnt2: NeuralNetworkTopology<10, 10> = decoded.into(); - - dbg!(nnt, nnt2); -} From 31b6e7dd9df87824b18c2c9c850a59225d0be012 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Tue, 4 Feb 2025 14:03:37 +0000 Subject: [PATCH 26/60] delete topology and some examples --- examples/custom_activation.rs | 84 ---------------- examples/plot.rs | 178 ---------------------------------- src/topology/mod.rs | 0 3 files changed, 262 deletions(-) delete mode 100644 examples/custom_activation.rs delete mode 100644 examples/plot.rs delete mode 100644 src/topology/mod.rs diff --git a/examples/custom_activation.rs b/examples/custom_activation.rs deleted file mode 100644 index 7b37c02..0000000 --- a/examples/custom_activation.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! An example implementation of a custom activation function. - -use neat::*; -use rand::prelude::*; - -#[derive(DivisionReproduction, RandomlyMutable, Clone)] -struct AgentDNA { - network: NeuralNetworkTopology<2, 2>, -} - -impl Prunable for AgentDNA {} - -impl GenerateRandom for AgentDNA { - fn gen_random(rng: &mut impl Rng) -> Self { - Self { - network: NeuralNetworkTopology::new(0.01, 3, rng), - } - } -} - -fn fitness(g: &AgentDNA) -> f32 { - let network: NeuralNetwork<2, 2> = NeuralNetwork::from(&g.network); - let mut fitness = 0.; - let mut rng = rand::thread_rng(); - - for _ in 0..50 { - let n = rng.gen::(); - let n2 = rng.gen::(); - - let expected = if (n + n2) / 2. >= 0.5 { 0 } else { 1 }; - - let result = network.predict([n, n2]); - network.flush_state(); - - // partial_cmp chance of returning None in this smh - let result = result.iter().max_index(); - - if result == expected { - fitness += 1.; - } else { - fitness -= 1.; - } - } - - fitness -} - -#[cfg(feature = "serde")] -fn serde_nextgen(rewards: Vec<(AgentDNA, f32)>) -> Vec { - let max = rewards - .iter() - .max_by(|(_, ra), (_, rb)| ra.total_cmp(rb)) - .unwrap(); - - let ser = NNTSerde::from(&max.0.network); - let data = serde_json::to_string_pretty(&ser).unwrap(); - std::fs::write("best-agent.json", data).expect("Failed to write to file"); - - division_pruning_nextgen(rewards) -} - -fn main() { - let sin_activation = activation_fn!(f32::sin); - register_activation(sin_activation); - - #[cfg(not(feature = "rayon"))] - let mut rng = rand::thread_rng(); - - let mut sim = GeneticSim::new( - #[cfg(not(feature = "rayon"))] - Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] - Vec::gen_random(100), - fitness, - #[cfg(not(feature = "serde"))] - division_pruning_nextgen, - #[cfg(feature = "serde")] - serde_nextgen, - ); - - for _ in 0..200 { - sim.next_generation(); - } -} diff --git a/examples/plot.rs b/examples/plot.rs deleted file mode 100644 index 34fb391..0000000 --- a/examples/plot.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::{ - error::Error, - sync::{Arc, Mutex}, -}; - -use indicatif::{ProgressBar, ProgressStyle}; -use neat::*; -use plotters::prelude::*; -use rand::prelude::*; - -#[derive(RandomlyMutable, DivisionReproduction, Clone)] -struct AgentDNA { - network: NeuralNetworkTopology<2, 1>, -} - -impl Prunable for AgentDNA {} - -impl GenerateRandom for AgentDNA { - fn gen_random(rng: &mut impl Rng) -> Self { - Self { - network: NeuralNetworkTopology::new(0.01, 3, rng), - } - } -} - -fn fitness(g: &AgentDNA) -> f32 { - let network = NeuralNetwork::from(&g.network); - let mut fitness = 0.; - let mut rng = rand::thread_rng(); - - for _ in 0..100 { - let n = rng.gen::() * 10000.; - let base = rng.gen::() * 10.; - let expected = n.log(base); - - let [answer] = network.predict([n, base]); - network.flush_state(); - - fitness += 5. / (answer - expected).abs(); - } - - fitness -} - -struct PlottingNG> { - performance_stats: Arc>>, - actual_ng: F, -} - -impl> NextgenFn for PlottingNG { - fn next_gen(&self, mut fitness: Vec<(AgentDNA, f32)>) -> Vec { - // it's a bit slower because of sorting twice but I don't want to rewrite the nextgen. - fitness.sort_by(|(_, fa), (_, fb)| fa.partial_cmp(fb).unwrap()); - - let l = fitness.len(); - - let high = fitness[l - 1].1; - - let median = fitness[l / 2].1; - - let low = fitness[0].1; - - let mut ps = self.performance_stats.lock().unwrap(); - ps.push(PerformanceStats { high, median, low }); - - self.actual_ng.next_gen(fitness) - } -} - -struct PerformanceStats { - high: f32, - median: f32, - low: f32, -} - -const OUTPUT_FILE_NAME: &'static str = "fitness-plot.svg"; -const GENS: usize = 1000; - -fn main() -> Result<(), Box> { - #[cfg(not(feature = "rayon"))] - let mut rng = rand::thread_rng(); - - let performance_stats = Arc::new(Mutex::new(Vec::with_capacity(GENS))); - let ng = PlottingNG { - performance_stats: performance_stats.clone(), - actual_ng: division_pruning_nextgen, - }; - - let mut sim = GeneticSim::new( - #[cfg(not(feature = "rayon"))] - Vec::gen_random(&mut rng, 100), - #[cfg(feature = "rayon")] - Vec::gen_random(100), - fitness, - ng, - ); - - let pb = ProgressBar::new(GENS as u64) - .with_style( - ProgressStyle::with_template( - "[{elapsed_precise}] {bar:40.cyan/blue} | {msg} {pos}/{len}", - ) - .unwrap(), - ) - .with_message("gen"); - - println!("Training..."); - - for _ in 0..GENS { - sim.next_generation(); - - pb.inc(1); - } - - pb.finish(); - - // prevent `Arc::into_inner` from failing - drop(sim); - - println!("Training complete, collecting data and building chart..."); - - let root = SVGBackend::new(OUTPUT_FILE_NAME, (640, 480)).into_drawing_area(); - root.fill(&WHITE)?; - - let mut chart = ChartBuilder::on(&root) - .caption( - "agent fitness values per generation", - ("sans-serif", 50).into_font(), - ) - .margin(5) - .x_label_area_size(30) - .y_label_area_size(30) - .build_cartesian_2d(0usize..GENS, 0f32..1000.0)?; - - chart.configure_mesh().draw()?; - - let data: Vec<_> = Arc::into_inner(performance_stats) - .unwrap() - .into_inner() - .unwrap() - .into_iter() - .enumerate() - .collect(); - - let highs = data - .iter() - .map(|(i, PerformanceStats { high, .. })| (*i, *high)); - - let medians = data - .iter() - .map(|(i, PerformanceStats { median, .. })| (*i, *median)); - - let lows = data - .iter() - .map(|(i, PerformanceStats { low, .. })| (*i, *low)); - - chart - .draw_series(LineSeries::new(highs, &GREEN))? - .label("high"); - - chart - .draw_series(LineSeries::new(medians, &YELLOW))? - .label("median"); - - chart.draw_series(LineSeries::new(lows, &RED))?.label("low"); - - chart - .configure_series_labels() - .background_style(&WHITE.mix(0.8)) - .border_style(&BLACK) - .draw()?; - - root.present()?; - - println!("Complete"); - - Ok(()) -} diff --git a/src/topology/mod.rs b/src/topology/mod.rs deleted file mode 100644 index e69de29..0000000 From 0a958f95a75bdec2d8a296747dd29586245ae198 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Tue, 4 Feb 2025 14:08:31 +0000 Subject: [PATCH 27/60] solve clippy errors --- src/neuralnet.rs | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 6f5f25d..cce0d61 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -144,7 +144,6 @@ impl NeuralNetwork { return; } - let loc = loc.as_ref(); while !cache.is_ready(loc) { // essentially spinlocks until the dependency tasks are complete, // while letting this thread do some work on random tasks. @@ -192,7 +191,9 @@ impl NeuralNetwork { } /// Adds a connection but does not check for cyclic linkages. - /// Marked as unsafe because it could cause a hang/livelock when predicting due to cyclic linkage. + /// + /// # Safety + /// This is marked as unsafe because it could cause a hang/livelock when predicting due to cyclic linkage. /// There is no actual UB or unsafe code associated with it. pub unsafe fn add_connection_raw(&mut self, connection: Connection, weight: f32) { let a = self.get_neuron_mut(connection.from); @@ -280,7 +281,7 @@ impl NeuralNetwork { let b = self.get_neuron_mut(connection.to); b.input_count -= 1; - if b.input_count <= 0 { + if b.input_count == 0 { self.remove_neuron(connection.to); return true; } @@ -420,6 +421,7 @@ impl DivisionReproduction for NeuralNetwork CrossoverReproduction for NeuralNetwork { fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { let mut output_layer = self.output_layer.clone(); @@ -591,7 +593,10 @@ impl Neuron { } /// Tries to remove a connection from the neuron and returns the weight if it was found. - /// Marked as unsafe because it will not update the destination's [`input_count`][Neuron::input_count]. + /// + /// # Safety + /// This is marked as unsafe because it will not update the destination's [`input_count`][Neuron::input_count]. + /// Similar to [`add_connection_raw`][NeuralNetwork::add_connection_raw], this does not mean UB or anything. pub unsafe fn remove_connection(&mut self, output: impl AsRef) -> Option { let loc = *output.as_ref(); let mut i = 0; @@ -841,8 +846,7 @@ impl From<&NeuralNetwork> for NeuralNetCac let input_layer: Vec<_> = net.input_layer.par_iter().map(|n| n.into()).collect(); let input_layer = input_layer.try_into().unwrap(); - let hidden_layers: Vec<_> = net.hidden_layers.par_iter().map(|n| n.into()).collect(); - let hidden_layers = hidden_layers.try_into().unwrap(); + let hidden_layers = net.hidden_layers.par_iter().map(|n| n.into()).collect(); let output_layer: Vec<_> = net.output_layer.par_iter().map(|n| n.into()).collect(); let output_layer = output_layer.try_into().unwrap(); From 93481f251d642b11306368bd77d94f8a0fed1124 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 14 Jan 2026 19:06:14 +0000 Subject: [PATCH 28/60] clear mutation and tests code for new rewrite --- Cargo.lock | 84 +++++++++------ Cargo.toml | 8 +- src/activation.rs | 2 +- src/neuralnet.rs | 260 ++++++++++------------------------------------ src/tests.rs | 180 +------------------------------- 5 files changed, 112 insertions(+), 422 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0a9a53b..fdfc875 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "628d228f918ac3b82fe590352cc719d30664a0c13ca3a60266fe02c7132d480a" [[package]] name = "bitflags" -version = "2.8.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "cfg-if" @@ -53,9 +53,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "genetic-rs" -version = "0.5.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68bb62a836f6ea3261d77cfec4012316e206f53e7d0eab519f5f3630e86001f" +checksum = "c5047d738fba12c89533a4321c98a5ebb3ef180364244fd712d3f28893c55fcb" dependencies = [ "genetic-rs-common", "genetic-rs-macros", @@ -63,20 +63,19 @@ dependencies = [ [[package]] name = "genetic-rs-common" -version = "0.5.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be7aaffd4e4dc82d11819d40794f089c37d02595a401f229ed2877d1a4c401d" +checksum = "47b143ce3476694e634396a5583a3863dab8055c05465b1be95398fcb4eca6cc" dependencies = [ "rand", "rayon", - "replace_with", ] [[package]] name = "genetic-rs-macros" -version = "0.5.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e73b1f36ea3e799232e1a3141a2765fa6ee9ed7bb3fed96ccfb3bf272d1832e" +checksum = "2b7f7b4257f21a5904db4fda2160ad129762dfcf1ff997c2ef21387dd8565850" dependencies = [ "genetic-rs-common", "proc-macro2", @@ -86,13 +85,14 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.12" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "libc", - "wasi", + "r-efi", + "wasip2", ] [[package]] @@ -158,22 +158,27 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "rand" -version = "0.8.5" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ - "libc", "rand_chacha", "rand_core", ] [[package]] name = "rand_chacha" -version = "0.3.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core", @@ -181,18 +186,18 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" dependencies = [ "getrandom", ] [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -200,9 +205,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -222,10 +227,11 @@ checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] @@ -238,11 +244,20 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -279,7 +294,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" diff --git a/Cargo.toml b/Cargo.toml index 4b26e0f..58c4a5f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,12 +24,12 @@ serde = ["dep:serde", "dep:serde-big-array"] [dependencies] atomic_float = "1.1.0" -bitflags = "2.8.0" -genetic-rs = { version = "0.5.4", features = ["rayon", "derive"] } +bitflags = "2.10.0" +genetic-rs = { version = "1.0.0", features = ["rayon", "derive"] } lazy_static = "1.5.0" -rayon = "1.10.0" +rayon = "1.11.0" replace_with = "0.1.7" -serde = { version = "1.0.217", features = ["derive"], optional = true } +serde = { version = "1.0.228", features = ["derive"], optional = true } serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] diff --git a/src/activation.rs b/src/activation.rs index af9f74e..f39e1d5 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -72,7 +72,7 @@ impl ActivationRegistry { } } - /// Gets a Vec of all the activation functions registered. Unless you need an owned value, use [fns][ActivationRegistry::fns].values() instead. + /// Gets a Vec of all the activation functions registered. Use [fns][ActivationRegistry::fns] if you only need an iterator. pub fn activations(&self) -> Vec { self.fns.values().cloned().collect() } diff --git a/src/neuralnet.rs b/src/neuralnet.rs index cce0d61..a51a81f 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1,8 +1,7 @@ use std::{ - collections::HashSet, + collections::{HashMap, HashSet}, sync::{ - atomic::{AtomicBool, AtomicUsize, Ordering}, - Arc, + Arc, atomic::{AtomicBool, AtomicUsize, Ordering} }, }; @@ -80,7 +79,7 @@ impl NeuralNetwork { for _ in 0..O { output_layer.push(Neuron::new_with_activation( - vec![], + HashMap::new(), activation_fn!(sigmoid), rng, )); @@ -90,17 +89,17 @@ impl NeuralNetwork { for _ in 0..I { let mut already_chosen = Vec::new(); - let outputs = (0..rng.gen_range(1..=O)) + let outputs = (0..rng.random_range(1..=O)) .map(|_| { - let mut j = rng.gen_range(0..O); + let mut j = rng.random_range(0..O); while already_chosen.contains(&j) { - j = rng.gen_range(0..O); + j = rng.random_range(0..O); } output_layer[j].input_count += 1; already_chosen.push(j); - (NeuronLocation::Output(j), rng.gen()) + (NeuronLocation::Output(j), rng.random()) }) .collect(); @@ -179,28 +178,13 @@ impl NeuralNetwork { /// Split a [`Connection`] into two of the same weight, joined by a new [`Neuron`] in the hidden layer(s). pub fn split_connection(&mut self, connection: Connection, rng: &mut impl Rng) { - let newloc = NeuronLocation::Hidden(self.hidden_layers.len()); - - let a = self.get_neuron_mut(connection.from); - let weight = unsafe { a.remove_connection(connection.to) }.unwrap(); - - a.outputs.push((newloc, weight)); - - let n = Neuron::new(vec![(connection.to, weight)], NeuronScope::HIDDEN, rng); - self.hidden_layers.push(n); + todo!() } /// Adds a connection but does not check for cyclic linkages. - /// - /// # Safety - /// This is marked as unsafe because it could cause a hang/livelock when predicting due to cyclic linkage. - /// There is no actual UB or unsafe code associated with it. - pub unsafe fn add_connection_raw(&mut self, connection: Connection, weight: f32) { + pub fn add_connection_unchecked(&mut self, connection: Connection, weight: f32) { let a = self.get_neuron_mut(connection.from); - a.outputs.push((connection.to, weight)); - - // let b = self.get_neuron_mut(connection.to); - // b.inputs.insert(connection.from); + a.outputs.insert(connection.to, weight); } /// Returns false if the connection is cyclic. @@ -211,6 +195,7 @@ impl NeuralNetwork { } // TODO maybe parallelize + // TODO properly test this for bugs fn dfs(&self, visited: &mut HashSet, current: NeuronLocation) -> bool { if !visited.insert(current) { return false; @@ -228,15 +213,7 @@ impl NeuralNetwork { /// Safe, checked add connection method. Returns false if it aborted connecting due to cyclic linkage. pub fn add_connection(&mut self, connection: Connection, weight: f32) -> bool { - if !self.is_connection_safe(connection) { - return false; - } - - unsafe { - self.add_connection_raw(connection, weight); - } - - true + todo!() } /// Mutates a connection's weight. @@ -248,10 +225,10 @@ impl NeuralNetwork { /// Get a random valid location within the network. pub fn random_location(&self, rng: &mut impl Rng) -> NeuronLocation { - match rng.gen_range(0..3) { - 0 => NeuronLocation::Input(rng.gen_range(0..self.input_layer.len())), - 1 => NeuronLocation::Hidden(rng.gen_range(0..self.hidden_layers.len())), - 2 => NeuronLocation::Output(rng.gen_range(0..self.output_layer.len())), + match rng.random_range(0..3) { + 0 => NeuronLocation::Input(rng.random_range(0..self.input_layer.len())), + 1 => NeuronLocation::Hidden(rng.random_range(0..self.hidden_layers.len())), + 2 => NeuronLocation::Output(rng.random_range(0..self.output_layer.len())), _ => unreachable!(), } } @@ -275,33 +252,15 @@ impl NeuralNetwork { /// Remove a connection and any hanging neurons caused by the deletion. /// Returns whether there was a hanging neuron. pub fn remove_connection(&mut self, connection: Connection) -> bool { - let a = self.get_neuron_mut(connection.from); - unsafe { a.remove_connection(connection.to) }.unwrap(); - - let b = self.get_neuron_mut(connection.to); - b.input_count -= 1; - - if b.input_count == 0 { - self.remove_neuron(connection.to); - return true; - } - - false + todo!() } /// Remove a neuron and downshift all connection indexes to compensate for it. pub fn remove_neuron(&mut self, loc: impl AsRef) { - let loc = loc.as_ref(); - if !loc.is_hidden() { - panic!("Can only remove neurons from hidden layer"); - } - - unsafe { - self.downshift_connections(loc.unwrap()); - } + todo!() } - unsafe fn downshift_connections(&mut self, i: usize) { + fn downshift_connections(&mut self, i: usize) { self.input_layer .par_iter_mut() .for_each(|n| n.downshift_outputs(i)); @@ -323,7 +282,7 @@ impl NeuralNetwork { } } - unsafe fn clear_input_counts(&mut self) { + fn clear_input_counts(&mut self) { // not sure whether all this parallelism is necessary or if it will just generate overhead // rayon::scope(|s| { // s.spawn(|_| self.input_layer.par_iter_mut().for_each(|n| n.input_count = 0)); @@ -341,30 +300,13 @@ impl NeuralNetwork { .par_iter_mut() .for_each(|n| n.input_count = 0); } - - /// Recalculates the [`input_count`][`Neuron::input_count`] field for all neurons in the network. - pub fn recalculate_input_counts(&mut self) { - unsafe { self.clear_input_counts() }; - - for i in 0..I { - for j in 0..self.input_layer[i].outputs.len() { - let (loc, _) = self.input_layer[i].outputs[j]; - self.get_neuron_mut(loc).input_count += 1; - } - } - - for i in 0..self.hidden_layers.len() { - for j in 0..self.hidden_layers[i].outputs.len() { - let (loc, _) = self.hidden_layers[i].outputs[j]; - self.get_neuron_mut(loc).input_count += 1; - } - } - } } impl RandomlyMutable for NeuralNetwork { fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { - if rng.gen::() <= rate { + // TODO maybe allow specifying probability + // for each type of mutation + if rng.random::() <= rate { // split connection let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); let n = self.get_neuron(from); @@ -373,9 +315,9 @@ impl RandomlyMutable for NeuralNetwork { self.split_connection(Connection { from, to }, rng); } - if rng.gen::() <= rate { + if rng.random::() <= rate { // add connection - let weight = rng.gen::(); + let weight = rng.random::(); let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); let to = self.random_location_in_scope(rng, !NeuronScope::INPUT); @@ -388,7 +330,7 @@ impl RandomlyMutable for NeuralNetwork { } } - if rng.gen::() <= rate { + if rng.random::() <= rate { // remove connection let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); @@ -399,22 +341,21 @@ impl RandomlyMutable for NeuralNetwork { } self.map_weights(|w| { - // TODO maybe `Send`able rng. - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); - if rng.gen::() <= rate { - *w += rng.gen_range(-rate..rate); + if rng.random::() <= rate { + *w += rng.random_range(-rate..rate); } }); } } -impl DivisionReproduction for NeuralNetwork { - fn divide(&self, rng: &mut impl Rng) -> Self { +impl Mitosis for NeuralNetwork { + fn divide(&self, rate: f32, rng: &mut impl prelude::Rng) -> Self { let mut child = self.clone(); - + for _ in 0..self.mutation_settings.mutation_passes { - child.mutate(child.mutation_settings.mutation_rate, rng); + child.mutate(rate, rng); } child @@ -422,69 +363,9 @@ impl DivisionReproduction for NeuralNetwork CrossoverReproduction for NeuralNetwork { - fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { - let mut output_layer = self.output_layer.clone(); - - for (i, n) in output_layer.iter_mut().enumerate() { - if rng.gen::() >= 0.5 { - *n = other.output_layer[i].clone(); - } - } - - let hidden_len = self.hidden_layers.len().max(other.hidden_layers.len()); - let mut hidden_layers = Vec::with_capacity(hidden_len); - - for i in 0..hidden_len { - if rng.gen::() >= 0.5 { - if let Some(n) = self.hidden_layers.get(i) { - let mut n = n.clone(); - n.prune_invalid_outputs(hidden_len, O); - - hidden_layers[i] = n; - - continue; - } - } - - let mut n = other.hidden_layers[i].clone(); - n.prune_invalid_outputs(hidden_len, O); - - hidden_layers[i] = n; - } - - let mut input_layer = self.input_layer.clone(); - - for (i, n) in input_layer.iter_mut().enumerate() { - if rng.gen::() >= 0.5 { - *n = other.input_layer[i].clone(); - } - n.prune_invalid_outputs(hidden_len, O); - } - - // crossover mutation settings just in case. - let mutation_settings = if rng.gen::() >= 0.5 { - self.mutation_settings.clone() - } else { - other.mutation_settings.clone() - }; - - let mut child = Self { - input_layer, - hidden_layers, - output_layer, - mutation_settings, - }; - - // TODO maybe find a way to do this while doing crossover stuff instead of recalculating everything. - // would be annoying to implement though. - child.recalculate_input_counts(); - - for _ in 0..child.mutation_settings.mutation_passes { - child.mutate(child.mutation_settings.mutation_rate, rng); - } - - child +impl Crossover for NeuralNetwork { + fn crossover(&self, other: &Self, rate: f32, rng: &mut impl prelude::Rng) -> Self { + todo!() } } @@ -516,7 +397,7 @@ pub struct Neuron { pub input_count: usize, /// The connections and weights to other neurons. - pub outputs: Vec<(NeuronLocation, f32)>, + pub outputs: HashMap, /// The initial value of the neuron. pub bias: f32, @@ -528,14 +409,14 @@ pub struct Neuron { impl Neuron { /// Creates a new neuron with a specified activation function and outputs. pub fn new_with_activation( - outputs: Vec<(NeuronLocation, f32)>, + outputs: HashMap, activation_fn: ActivationFn, rng: &mut impl Rng, ) -> Self { Self { input_count: 0, outputs, - bias: rng.gen(), + bias: rng.random(), activation_fn, } } @@ -543,12 +424,13 @@ impl Neuron { /// Creates a new neuron with the given output locations. /// Chooses a random activation function within the specified scope. pub fn new( - outputs: Vec<(NeuronLocation, f32)>, + outputs: HashMap, current_scope: NeuronScope, rng: &mut impl Rng, ) -> Self { let reg = ACTIVATION_REGISTRY.read().unwrap(); let activations = reg.activations_in_scope(current_scope); + drop(reg); Self::new_with_activations(outputs, activations, rng) } @@ -556,7 +438,7 @@ impl Neuron { /// Creates a new neuron with the given outputs. /// Takes a collection of activation functions and chooses a random one from them to use. pub fn new_with_activations( - outputs: Vec<(NeuronLocation, f32)>, + outputs: HashMap, activations: impl IntoIterator, rng: &mut impl Rng, ) -> Self { @@ -570,7 +452,7 @@ impl Neuron { Self::new_with_activation( outputs, - activations.remove(rng.gen_range(0..activations.len())), + activations.remove(rng.random_range(0..activations.len())), rng, ) } @@ -580,56 +462,16 @@ impl Neuron { self.activation_fn.func.activate(v) } - /// Get the weight of the provided output location. Returns `None` if not found. - pub fn get_weight(&self, output: impl AsRef) -> Option { - let loc = *output.as_ref(); - for out in &self.outputs { - if out.0 == loc { - return Some(out.1); - } - } - - None - } - - /// Tries to remove a connection from the neuron and returns the weight if it was found. - /// - /// # Safety - /// This is marked as unsafe because it will not update the destination's [`input_count`][Neuron::input_count]. - /// Similar to [`add_connection_raw`][NeuralNetwork::add_connection_raw], this does not mean UB or anything. - pub unsafe fn remove_connection(&mut self, output: impl AsRef) -> Option { - let loc = *output.as_ref(); - let mut i = 0; - - while i < self.outputs.len() { - if self.outputs[i].0 == loc { - return Some(self.outputs.remove(i).1); - } - i += 1; - } - - None - } - /// Randomly mutates the specified weight with the rate. pub fn mutate_weight( &mut self, - output: impl AsRef, + output: NeuronLocation, rate: f32, rng: &mut impl Rng, ) -> Option { - let loc = *output.as_ref(); - let mut i = 0; - - while i < self.outputs.len() { - let o = &mut self.outputs[i]; - if o.0 == loc { - o.1 += rng.gen_range(-rate..rate); - - return Some(o.1); - } - - i += 1; + if let Some(mut w) = self.outputs.get_mut(&output) { + *w += rng.random_range(-rate..rate); + return Some(*w); } None @@ -637,11 +479,13 @@ impl Neuron { /// Get a random output location and weight. pub fn random_output(&self, rng: &mut impl Rng) -> (NeuronLocation, f32) { - self.outputs[rng.gen_range(0..self.outputs.len())] + // will panic if outputs is empty + let i = rng.random_range(0..self.outputs.len()); + let x = self.outputs.iter().skip(i).next().unwrap(); + (*x.0, *x.1) } pub(crate) fn downshift_outputs(&mut self, i: usize) { - // TODO par_iter_mut instead of replace replace_with_or_abort(&mut self.outputs, |o| { o.into_par_iter() .map(|(loc, w)| match loc { @@ -655,7 +499,7 @@ impl Neuron { /// Removes any outputs pointing to a nonexistent neuron. pub fn prune_invalid_outputs(&mut self, hidden_len: usize, output_len: usize) { self.outputs - .retain(|(loc, _)| output_exists(*loc, hidden_len, output_len)); + .retain(|loc, _| output_exists(*loc, hidden_len, output_len)); } } diff --git a/src/tests.rs b/src/tests.rs index 825cdee..0ffdd02 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,179 +1 @@ -use crate::*; -use rand::prelude::*; - -// no support for tuple structs derive in genetic-rs yet :( -#[derive(Debug, Clone, PartialEq)] -struct Agent(NeuralNetwork<4, 1>); - -impl Prunable for Agent {} - -impl RandomlyMutable for Agent { - fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { - self.0.mutate(rate, rng); - } -} - -impl DivisionReproduction for Agent { - fn divide(&self, rng: &mut impl rand::Rng) -> Self { - Self(self.0.divide(rng)) - } -} - -impl CrossoverReproduction for Agent { - fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { - Self(self.0.crossover(&other.0, rng)) - } -} - -struct GuessTheNumber(f32); - -impl GuessTheNumber { - fn new(rng: &mut impl Rng) -> Self { - Self(rng.gen()) - } - - fn guess(&self, n: f32) -> Option { - if n > self.0 + 1.0e-5 { - return Some(1.); - } - - if n < self.0 - 1.0e-5 { - return Some(-1.); - } - - // guess was correct (or at least within margin of error). - None - } -} - -fn fitness(agent: &Agent) -> f32 { - let mut rng = rand::thread_rng(); - - let mut fitness = 0.; - - // 10 games for consistency - for _ in 0..10 { - let game = GuessTheNumber::new(&mut rng); - - let mut last_guess = 0.; - let mut last_result = 0.; - - let mut last_guess_2 = 0.; - let mut last_result_2 = 0.; - - let mut steps = 0; - loop { - if steps >= 20 { - // took too many guesses - fitness -= 50.; - break; - } - - let [cur_guess] = - agent - .0 - .predict([last_guess, last_result, last_guess_2, last_result_2]); - - let cur_result = game.guess(cur_guess); - - if let Some(result) = cur_result { - last_guess = last_guess_2; - last_result = last_result_2; - - last_guess_2 = cur_guess; - last_result_2 = result; - - fitness -= 1.; - steps += 1; - - continue; - } - - fitness += 50.; - break; - } - } - - fitness -} - -#[test] -fn division() { - let mut rng = rand::thread_rng(); - - let starting_genomes = (0..100) - .map(|_| Agent(NeuralNetwork::new(MutationSettings::default(), &mut rng))) - .collect(); - - let mut sim = GeneticSim::new(starting_genomes, fitness, division_pruning_nextgen); - - sim.perform_generations(100); -} - -#[test] -fn crossover() { - let mut rng = rand::thread_rng(); - - let starting_genomes = (0..100) - .map(|_| Agent(NeuralNetwork::new(MutationSettings::default(), &mut rng))) - .collect(); - - let mut sim = GeneticSim::new(starting_genomes, fitness, crossover_pruning_nextgen); - - sim.perform_generations(100); -} - -#[cfg(feature = "serde")] -#[test] -fn serde() { - let mut rng = rand::thread_rng(); - let net: NeuralNetwork<5, 10> = NeuralNetwork::new(MutationSettings::default(), &mut rng); - - let text = serde_json::to_string(&net).unwrap(); - - let net2: NeuralNetwork<5, 10> = serde_json::from_str(&text).unwrap(); - - assert_eq!(net, net2); -} - -#[test] -fn neural_net_cache_sync() { - let cache = NeuralNetCache { - input_layer: [NeuronCache::new(0.3, 0), NeuronCache::new(0.25, 0)], - hidden_layers: vec![ - NeuronCache::new(0.2, 2), - NeuronCache::new(0.0, 2), - NeuronCache::new(1.5, 2), - ], - output_layer: [NeuronCache::new(0.0, 3), NeuronCache::new(0.0, 3)], - }; - - for i in 0..2 { - let input_loc = NeuronLocation::Input(i); - - assert!(cache.claim(&input_loc)); - - for j in 0..3 { - cache.add( - NeuronLocation::Hidden(j), - f32::tanh(cache.get(&input_loc) * 1.2), - ); - } - } - - for i in 0..3 { - let hidden_loc = NeuronLocation::Hidden(i); - - assert!(cache.is_ready(&hidden_loc)); - assert!(cache.claim(&hidden_loc)); - - for j in 0..2 { - cache.add( - NeuronLocation::Output(j), - activation::builtin::sigmoid(cache.get(&hidden_loc) * 0.7), - ); - } - } - - assert_eq!(cache.output(), [2.0688455, 2.0688455]); -} +// TODO \ No newline at end of file From 551843e79c8b6834630b3932546dcbe61034d9fe Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 15 Jan 2026 18:23:18 +0000 Subject: [PATCH 29/60] begin rewriting core mutation functions --- Cargo.lock | 30 ++++++---- Cargo.toml | 5 +- src/activation.rs | 33 +++++++++-- src/neuralnet.rs | 140 +++++++++++++++++++++++++++++++++------------- src/tests.rs | 138 ++++++++++++++++++++++++++++++++++++++++++++- 5 files changed, 287 insertions(+), 59 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fdfc875..d3d1c6f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -132,6 +132,7 @@ dependencies = [ "serde", "serde-big-array", "serde_json", + "union-find", ] [[package]] @@ -215,15 +216,9 @@ dependencies = [ [[package]] name = "replace_with" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a8614ee435691de62bcffcf4a66d91b3594bf1428a5722e79103249a095690" - -[[package]] -name = "ryu" -version = "1.0.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" +checksum = "51743d3e274e2b18df81c4dc6caf8a5b8e15dbe799e0dca05c7617380094e884" [[package]] name = "serde" @@ -266,14 +261,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", + "serde_core", + "zmij", ] [[package]] @@ -293,6 +289,12 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "union-find" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "039142448432983c34b64739f8526f8f233a1eec7a66e61b6ab29acfa781194e" + [[package]] name = "wasip2" version = "1.0.1+wasi-0.2.4" @@ -307,3 +309,9 @@ name = "wit-bindgen" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "zmij" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" diff --git a/Cargo.toml b/Cargo.toml index 58c4a5f..4ff726c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,9 +28,10 @@ bitflags = "2.10.0" genetic-rs = { version = "1.0.0", features = ["rayon", "derive"] } lazy_static = "1.5.0" rayon = "1.11.0" -replace_with = "0.1.7" +replace_with = "0.1.8" serde = { version = "1.0.228", features = ["derive"], optional = true } serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] -serde_json = "1.0.138" \ No newline at end of file +serde_json = "1.0.149" +union-find = "0.4.3" diff --git a/src/activation.rs b/src/activation.rs index f39e1d5..9915260 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -4,6 +4,7 @@ pub mod builtin; use bitflags::bitflags; use builtin::*; +use genetic_rs::prelude::rand; #[cfg(feature = "serde")] use serde::{Deserialize, Deserializer, Serialize, Serializer}; @@ -56,13 +57,13 @@ pub fn batch_register_activation(acts: impl IntoIterator) { /// A registry of the different possible activation functions. pub struct ActivationRegistry { /// The currently-registered activation functions. - pub fns: HashMap, + pub fns: HashMap<&'static str, ActivationFn>, } impl ActivationRegistry { /// Registers an activation function. pub fn register(&mut self, activation: ActivationFn) { - self.fns.insert(activation.name.clone(), activation); + self.fns.insert(activation.name, activation); } /// Registers multiple activation functions at once. @@ -82,9 +83,31 @@ impl ActivationRegistry { let acts = self.activations(); acts.into_iter() - .filter(|a| !a.scope.contains(NeuronScope::NONE) && a.scope.contains(scope)) + .filter(|a| a.scope.contains(scope)) .collect() } + + /// Clears all existing values in the activation registry. + pub fn clear(&mut self) { + self.fns.clear(); + } + + /// Fetches a random activation fn that applies to the provided scope. + pub fn random_activation_in_scope(&self, scope: NeuronScope, rng: &mut impl rand::Rng) -> ActivationFn { + let mut iter = self.fns.values().cycle(); + let num_iterations = rng.random_range(0..self.fns.len()-1); + + for _ in 0..num_iterations { + iter.next().unwrap(); + } + + let mut val = iter.next().unwrap(); + while !val.scope.contains(scope) { + val = iter.next().unwrap(); + } + + val.clone() + } } impl Default for ActivationRegistry { @@ -125,12 +148,12 @@ pub struct ActivationFn { /// The scope defining where the activation function can appear. pub scope: NeuronScope, - pub(crate) name: String, + pub(crate) name: &'static str, } impl ActivationFn { /// Creates a new ActivationFn object. - pub fn new(func: Arc, scope: NeuronScope, name: String) -> Self { + pub fn new(func: Arc, scope: NeuronScope, name: &'static str) -> Self { Self { func, name, scope } } } diff --git a/src/neuralnet.rs b/src/neuralnet.rs index a51a81f..636dee5 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -74,7 +74,7 @@ pub struct NeuralNetwork { impl NeuralNetwork { // TODO option to set default output layer activations /// Creates a new random neural network with the given settings. - pub fn new(mutation_settings: MutationSettings, rng: &mut impl Rng) -> Self { + pub fn new(mutation_settings: MutationSettings, rng: &mut impl rand::Rng) -> Self { let mut output_layer = Vec::with_capacity(O); for _ in 0..O { @@ -88,20 +88,21 @@ impl NeuralNetwork { let mut input_layer = Vec::with_capacity(I); for _ in 0..I { - let mut already_chosen = Vec::new(); - let outputs = (0..rng.random_range(1..=O)) - .map(|_| { - let mut j = rng.random_range(0..O); - while already_chosen.contains(&j) { - j = rng.random_range(0..O); - } - - output_layer[j].input_count += 1; - already_chosen.push(j); - - (NeuronLocation::Output(j), rng.random()) - }) - .collect(); + let mut already_chosen = HashSet::new(); + let num_outputs = rng.random_range(1..=O); + let mut outputs = HashMap::with_capacity(num_outputs); + + for _ in 0..num_outputs { + let mut j = rng.random_range(0..O); + while already_chosen.contains(&j) { + j = rng.random_range(0..O); + } + + output_layer[j].input_count += 1; + already_chosen.insert(j); + + outputs.insert(NeuronLocation::Output(j), rng.random()); + } input_layer.push(Neuron::new_with_activation( outputs, @@ -133,9 +134,7 @@ impl NeuralNetwork { cache.output() } - fn eval(&self, loc: impl AsRef, cache: Arc>) { - let loc = loc.as_ref(); - + fn eval(&self, loc: NeuronLocation, cache: Arc>) { if !cache.claim(loc) { // some other thread is already // waiting to do this task, currently doing it, or done. @@ -152,39 +151,69 @@ impl NeuralNetwork { let val = cache.get(loc); let n = self.get_neuron(loc); - n.outputs.par_iter().for_each(|(loc2, weight)| { + n.outputs.par_iter().for_each(|(&loc2, weight)| { cache.add(loc2, n.activate(val * weight)); self.eval(loc2, cache.clone()); }); } /// Get a neuron at the specified [`NeuronLocation`]. - pub fn get_neuron(&self, loc: impl AsRef) -> &Neuron { - match loc.as_ref() { - NeuronLocation::Input(i) => &self.input_layer[*i], - NeuronLocation::Hidden(i) => &self.hidden_layers[*i], - NeuronLocation::Output(i) => &self.output_layer[*i], + pub fn get_neuron(&self, loc: NeuronLocation) -> &Neuron { + match loc { + NeuronLocation::Input(i) => &self.input_layer[i], + NeuronLocation::Hidden(i) => &self.hidden_layers[i], + NeuronLocation::Output(i) => &self.output_layer[i], } } /// Get a mutable reference to the neuron at the specified [`NeuronLocation`]. - pub fn get_neuron_mut(&mut self, loc: impl AsRef) -> &mut Neuron { - match loc.as_ref() { - NeuronLocation::Input(i) => &mut self.input_layer[*i], - NeuronLocation::Hidden(i) => &mut self.hidden_layers[*i], - NeuronLocation::Output(i) => &mut self.output_layer[*i], + pub fn get_neuron_mut(&mut self, loc: NeuronLocation) -> &mut Neuron { + match loc { + NeuronLocation::Input(i) => &mut self.input_layer[i], + NeuronLocation::Hidden(i) => &mut self.hidden_layers[i], + NeuronLocation::Output(i) => &mut self.output_layer[i], } } + /// Adds a new neuron to hidden layer. Updates [`input_count`][Neuron::input_count]s automatically. + pub fn add_neuron(&mut self, n: Neuron) { + for loc in n.outputs.keys() { + let n2 = self.get_neuron_mut(*loc); + n2.input_count += 1; + } + + self.hidden_layers.push(n); + } + /// Split a [`Connection`] into two of the same weight, joined by a new [`Neuron`] in the hidden layer(s). pub fn split_connection(&mut self, connection: Connection, rng: &mut impl Rng) { - todo!() + let new_loc = NeuronLocation::Hidden(self.hidden_layers.len()); + + let a = self.get_neuron_mut(connection.from); + let w = a.outputs.remove(&connection.to).expect("invalid connection.to"); + + a.outputs.insert(new_loc, w); + + let mut outputs = HashMap::new(); + outputs.insert(connection.to, w); + let mut new_n = Neuron::new(outputs, NeuronScope::HIDDEN, rng); + new_n.input_count = 1; + self.hidden_layers.push(new_n); + } + + /// Changes a neuron's activation function to a random one in its scope. + pub fn mutate_activation(&mut self, loc: NeuronLocation, rng: &mut impl Rng) { + let reg = ACTIVATION_REGISTRY.read().unwrap(); + self.get_neuron_mut(loc).activation_fn = reg.random_activation_in_scope(loc.into(), rng); } /// Adds a connection but does not check for cyclic linkages. pub fn add_connection_unchecked(&mut self, connection: Connection, weight: f32) { let a = self.get_neuron_mut(connection.from); a.outputs.insert(connection.to, weight); + + let b = self.get_neuron_mut(connection.to); + b.input_count += 1; } /// Returns false if the connection is cyclic. @@ -194,8 +223,6 @@ impl NeuralNetwork { self.dfs(&mut visited, connection.to) } - // TODO maybe parallelize - // TODO properly test this for bugs fn dfs(&self, visited: &mut HashSet, current: NeuronLocation) -> bool { if !visited.insert(current) { return false; @@ -225,10 +252,17 @@ impl NeuralNetwork { /// Get a random valid location within the network. pub fn random_location(&self, rng: &mut impl Rng) -> NeuronLocation { + if self.hidden_layers.is_empty() { + if rng.random_range(0..=1) != 0 { + return NeuronLocation::Input(rng.random_range(0..I)); + } + return NeuronLocation::Output(rng.random_range(0..O)); + } + match rng.random_range(0..3) { - 0 => NeuronLocation::Input(rng.random_range(0..self.input_layer.len())), + 0 => NeuronLocation::Input(rng.random_range(0..I)), 1 => NeuronLocation::Hidden(rng.random_range(0..self.hidden_layers.len())), - 2 => NeuronLocation::Output(rng.random_range(0..self.output_layer.len())), + 2 => NeuronLocation::Output(rng.random_range(0..O)), _ => unreachable!(), } } @@ -249,15 +283,42 @@ impl NeuralNetwork { loc } - /// Remove a connection and any hanging neurons caused by the deletion. - /// Returns whether there was a hanging neuron. + /// Remove a connection and any hanging neurons caused by the deletion + /// (with the exception of output neurons). + /// Returns whether it deleted a hanging neuron. pub fn remove_connection(&mut self, connection: Connection) -> bool { - todo!() + let a = self.get_neuron_mut(connection.from); + a.outputs.remove(&connection.to).expect("invalid connection"); + + let b = self.get_neuron_mut(connection.to); + b.input_count -= 1; + + if connection.to.is_hidden() && b.input_count == 0 { + // hanging neuron that must be deleted. + self.remove_neuron(connection.to); + return true; + } + + false } /// Remove a neuron and downshift all connection indexes to compensate for it. - pub fn remove_neuron(&mut self, loc: impl AsRef) { - todo!() + /// This will also deal with hanging neurons and such. + pub fn remove_neuron(&mut self, loc: NeuronLocation) { + if !loc.is_hidden() { + panic!("cannot remove neurons in input or output layer"); + } + + let n = self.get_neuron(loc); + let locs: Vec<_> = n.outputs.keys().cloned().collect(); + for loc2 in locs { + self.remove_connection(Connection { from: loc, to: loc2 }); + } + + let i = loc.unwrap(); + self.hidden_layers.remove(i); + + self.downshift_connections(i); } fn downshift_connections(&mut self, i: usize) { @@ -362,7 +423,6 @@ impl Mitosis for NeuralNetwork { } } -#[allow(clippy::needless_range_loop)] impl Crossover for NeuralNetwork { fn crossover(&self, other: &Self, rate: f32, rng: &mut impl prelude::Rng) -> Self { todo!() diff --git a/src/tests.rs b/src/tests.rs index 0ffdd02..d37e0dd 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1 +1,137 @@ -// TODO \ No newline at end of file +use crate::*; +use genetic_rs::prelude::rand::{SeedableRng, rngs::StdRng}; +use union_find::{QuickFindUf, UnionBySize, UnionFind}; + +fn loc_to_index(net: &NeuralNetwork, loc: NeuronLocation) -> usize { + match loc { + NeuronLocation::Input(i) => i, + NeuronLocation::Hidden(i) => I + i, + NeuronLocation::Output(i) => I + net.hidden_layers.len() + i, + } +} + +fn assert_graph_invariants(net: &NeuralNetwork) { + let total_len = I + O + net.hidden_layers.len(); + let mut uf = QuickFindUf::::new(total_len); + + for i in 0..I { + let loc = NeuronLocation::Input(i); + let a_ident = uf.find(i); + + let n = net.get_neuron(loc); + for (loc2, _) in &n.outputs { + let b_ident = uf.find(loc_to_index(net, *loc2)); + if !uf.union(a_ident, b_ident) { + panic!("cycle detected in network: {loc:?} -> {loc2:?}"); + } + } + } + + for i in 0..total_len { + if uf.find(i) >= I { + panic!("found hanging neuron"); + } + } +} + +struct InputCountsCache { + hidden_layers: Vec, + output: [usize; O], +} + +impl InputCountsCache { + fn tally(&mut self, loc: NeuronLocation) { + match loc { + NeuronLocation::Input(_) => panic!("input neurons can't have inputs"), + NeuronLocation::Hidden(i) => self.hidden_layers[i] += 1, + NeuronLocation::Output(i) => self.output[i] += 1, + } + } +} + +// asserts that cached/tracked values are correct. mainly only used for +// input count and such +fn assert_cache_consistency(net: &NeuralNetwork) { + let mut cache = InputCountsCache { + hidden_layers: vec![0; net.hidden_layers.len()], + output: [0; O], + }; + + for i in 0..I { + let n = net.get_neuron(NeuronLocation::Input(i)); + for loc in n.outputs.keys() { + cache.tally(*loc); + } + } + + for n in &net.hidden_layers { + for loc in n.outputs.keys() { + cache.tally(*loc); + } + } + + for (i, x) in cache.hidden_layers.into_iter().enumerate() { + if x == 0 { + // maybe redundant because of graph invariants, but oh well + panic!("found hanging neuron"); + } + + assert_eq!(x, net.hidden_layers[i].input_count); + } + + for (i, x) in cache.output.into_iter().enumerate() { + assert_eq!(x, net.output_layer[i].input_count); + } +} + +fn assert_network_invariants(net: &NeuralNetwork) { + // assert_graph_invariants(net); + assert_cache_consistency(net); + // TODO other invariants +} + +const TEST_COUNT: u64 = 1000; +fn rng_test(test: impl Fn(&mut StdRng)) { + for seed in 0..TEST_COUNT { + let mut rng = StdRng::seed_from_u64(seed); + test(&mut rng); + } +} + +#[test] +fn create_network() { + rng_test(|rng| { + let net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + assert_network_invariants(&net); + }); +} + +#[test] +fn split_connection() { + // rng doesn't matter here since it's just adding bias in eval + let mut rng = StdRng::seed_from_u64(0xabcdef); + + let mut net = NeuralNetwork::<1, 1>::new(MutationSettings::default(), &mut rng); + assert_network_invariants(&net); + + net.split_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0) }, &mut rng); + assert_network_invariants(&net); + + assert_eq!(*net.input_layer[0].outputs.keys().next().unwrap(), NeuronLocation::Hidden(0)); + assert_eq!(*net.hidden_layers[0].outputs.keys().next().unwrap(), NeuronLocation::Output(0)); +} + +const NUM_MUTATIONS: usize = 1000; +const MUTATION_RATE: f32 = 0.25; +#[test] +fn mutate() { + rng_test(|rng| { + let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + assert_network_invariants(&net); + + for _ in 0..NUM_MUTATIONS { + net.mutate(MUTATION_RATE, rng); + assert_network_invariants(&net); + } + }); +} \ No newline at end of file From 1458729cad0c9b96ef53798cf4de26f6295cc0cd Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 15 Jan 2026 18:59:46 +0000 Subject: [PATCH 30/60] fix graph invariants check --- Cargo.lock | 7 ------- Cargo.toml | 1 - src/neuralnet.rs | 4 ++-- src/tests.rs | 52 +++++++++++++++++++++++++++++++----------------- 4 files changed, 36 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d3d1c6f..8d66b01 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -132,7 +132,6 @@ dependencies = [ "serde", "serde-big-array", "serde_json", - "union-find", ] [[package]] @@ -289,12 +288,6 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" -[[package]] -name = "union-find" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "039142448432983c34b64739f8526f8f233a1eec7a66e61b6ab29acfa781194e" - [[package]] name = "wasip2" version = "1.0.1+wasi-0.2.4" diff --git a/Cargo.toml b/Cargo.toml index 4ff726c..5303f49 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,4 +34,3 @@ serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] serde_json = "1.0.149" -union-find = "0.4.3" diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 636dee5..364ae9f 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -148,11 +148,11 @@ impl NeuralNetwork { rayon::yield_now(); } - let val = cache.get(loc); let n = self.get_neuron(loc); + let val = n.activate(cache.get(loc)); n.outputs.par_iter().for_each(|(&loc2, weight)| { - cache.add(loc2, n.activate(val * weight)); + cache.add(loc2, val * weight); self.eval(loc2, cache.clone()); }); } diff --git a/src/tests.rs b/src/tests.rs index d37e0dd..b7ddc09 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,6 +1,7 @@ +use std::collections::HashMap; + use crate::*; use genetic_rs::prelude::rand::{SeedableRng, rngs::StdRng}; -use union_find::{QuickFindUf, UnionBySize, UnionFind}; fn loc_to_index(net: &NeuralNetwork, loc: NeuronLocation) -> usize { match loc { @@ -10,28 +11,43 @@ fn loc_to_index(net: &NeuralNetwork, loc: } } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum GraphCheckState { + CurrentCycle, + Checked, +} + fn assert_graph_invariants(net: &NeuralNetwork) { - let total_len = I + O + net.hidden_layers.len(); - let mut uf = QuickFindUf::::new(total_len); - + let mut visited = HashMap::new(); + for i in 0..I { - let loc = NeuronLocation::Input(i); - let a_ident = uf.find(i); - - let n = net.get_neuron(loc); - for (loc2, _) in &n.outputs { - let b_ident = uf.find(loc_to_index(net, *loc2)); - if !uf.union(a_ident, b_ident) { - panic!("cycle detected in network: {loc:?} -> {loc2:?}"); - } + dfs(net, NeuronLocation::Input(i), &mut visited); + } + + for i in 0..net.hidden_layers.len() { + let loc = NeuronLocation::Hidden(i); + if !visited.contains_key(&loc) { + panic!("hanging neuron: {loc:?}"); } } +} - for i in 0..total_len { - if uf.find(i) >= I { - panic!("found hanging neuron"); +// simple colored dfs for checking graph invariants. +fn dfs(net: &NeuralNetwork, loc: NeuronLocation, visited: &mut HashMap) { + if let Some(existing) = visited.get(&loc) { + match *existing { + GraphCheckState::CurrentCycle => panic!("cycle detected on {loc:?}"), + GraphCheckState::Checked => return, } } + + visited.insert(loc, GraphCheckState::CurrentCycle); + + for loc2 in net.get_neuron(loc).outputs.keys() { + dfs(net, *loc2, visited); + } + + visited.insert(loc, GraphCheckState::Checked); } struct InputCountsCache { @@ -72,7 +88,7 @@ fn assert_cache_consistency(net: &NeuralNetwork< for (i, x) in cache.hidden_layers.into_iter().enumerate() { if x == 0 { - // maybe redundant because of graph invariants, but oh well + // redundant because of graph invariants, but better safe than sorry panic!("found hanging neuron"); } @@ -85,7 +101,7 @@ fn assert_cache_consistency(net: &NeuralNetwork< } fn assert_network_invariants(net: &NeuralNetwork) { - // assert_graph_invariants(net); + assert_graph_invariants(net); assert_cache_consistency(net); // TODO other invariants } From bbb4a710f6d2e89329b34e94ee9de5ccb361e007 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 15 Jan 2026 19:02:38 +0000 Subject: [PATCH 31/60] parallelize random testing --- src/tests.rs | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/src/tests.rs b/src/tests.rs index b7ddc09..e043a64 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -2,14 +2,7 @@ use std::collections::HashMap; use crate::*; use genetic_rs::prelude::rand::{SeedableRng, rngs::StdRng}; - -fn loc_to_index(net: &NeuralNetwork, loc: NeuronLocation) -> usize { - match loc { - NeuronLocation::Input(i) => i, - NeuronLocation::Hidden(i) => I + i, - NeuronLocation::Output(i) => I + net.hidden_layers.len() + i, - } -} +use rayon::prelude::*; #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum GraphCheckState { @@ -107,11 +100,13 @@ fn assert_network_invariants(net: &NeuralNetwork } const TEST_COUNT: u64 = 1000; -fn rng_test(test: impl Fn(&mut StdRng)) { - for seed in 0..TEST_COUNT { - let mut rng = StdRng::seed_from_u64(seed); - test(&mut rng); - } +fn rng_test(test: impl Fn(&mut StdRng) + Sync) { + (0..TEST_COUNT) + .into_par_iter() + .for_each(|seed| { + let mut rng = StdRng::seed_from_u64(seed); + test(&mut rng); + }); } #[test] From 301d927a2deda21c56847f948cb9616d0c86ed49 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 21 Jan 2026 18:41:04 +0000 Subject: [PATCH 32/60] implement add_connection --- src/activation.rs | 4 +- src/lib.rs | 5 +- src/neuralnet.rs | 126 ++++++++++++++++++++++++++++++++++------------ src/tests.rs | 76 +++++++++++++++++++++++++--- 4 files changed, 167 insertions(+), 44 deletions(-) diff --git a/src/activation.rs b/src/activation.rs index 9915260..3d15e0d 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -21,11 +21,11 @@ use crate::NeuronLocation; #[macro_export] macro_rules! activation_fn { ($F: path) => { - ActivationFn::new(std::sync::Arc::new($F), NeuronScope::default(), stringify!($F).into()) + $crate::activation::ActivationFn::new(std::sync::Arc::new($F), $crate::activation::NeuronScope::default(), stringify!($F).into()) }; ($F: path, $S: expr) => { - ActivationFn::new(std::sync::Arc::new($F), $S, stringify!($F).into()) + $crate::activation::ActivationFn::new(std::sync::Arc::new($F), $S, stringify!($F).into()) }; {$($F: path),*} => { diff --git a/src/lib.rs b/src/lib.rs index 0de7360..6c93cd4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,5 @@ -//! A crate implementing NeuroEvolution of Augmenting Topologies (NEAT). -//! -//! The goal is to provide a simple-to-use, very dynamic [`NeuralNetwork`] type that +//! A crate implementing NeuroEvolution of Augmenting Topologies (NEAT) using a directed acyclic graph. +//! It provides an easy-to-use [`NeuralNetwork`] type that //! integrates directly into the [`genetic-rs`](https://crates.io/crates/genetic-rs) ecosystem. //! //! Look at the README, docs, or examples to learn how to use this crate. diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 364ae9f..e5f980b 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -166,6 +166,15 @@ impl NeuralNetwork { } } + /// Returns whether there is a neuron at the location + pub fn neuron_exists(&self, loc: NeuronLocation) -> bool { + match loc { + NeuronLocation::Input(i) => i < I, + NeuronLocation::Hidden(i) => i < self.hidden_layers.len(), + NeuronLocation::Output(i) => i < O, + } + } + /// Get a mutable reference to the neuron at the specified [`NeuronLocation`]. pub fn get_neuron_mut(&mut self, loc: NeuronLocation) -> &mut Neuron { match loc { @@ -176,13 +185,28 @@ impl NeuralNetwork { } /// Adds a new neuron to hidden layer. Updates [`input_count`][Neuron::input_count]s automatically. - pub fn add_neuron(&mut self, n: Neuron) { - for loc in n.outputs.keys() { - let n2 = self.get_neuron_mut(*loc); - n2.input_count += 1; + /// Removes any output connections that point to invalid neurons or would result in cyclic linkage. + /// Returns whether all output connections were valid. + /// Due to the cyclic check, this function has time complexity O(nm), where n is the number of neurons + /// and m is the number of output connections. + pub fn add_neuron(&mut self, mut n: Neuron) -> bool { + let mut valid = true; + let new_loc = NeuronLocation::Hidden(self.hidden_layers.len()); + let outputs = n.outputs.keys().cloned().collect::>(); + for loc in outputs { + if !self.neuron_exists(loc) || !self.is_connection_safe(Connection { from: new_loc, to: loc }) { + n.outputs.remove(&loc); + valid = false; + continue; + } + + let n = self.get_neuron_mut(loc); + n.input_count += 1; } - + self.hidden_layers.push(n); + + valid } /// Split a [`Connection`] into two of the same weight, joined by a new [`Neuron`] in the hidden layer(s). @@ -216,10 +240,13 @@ impl NeuralNetwork { b.input_count += 1; } - /// Returns false if the connection is cyclic. + /// Returns false if the connection is cyclic or the input/output neurons are otherwise invalid in some other way. + /// Can be O(n) over the number of neurons in the network. pub fn is_connection_safe(&self, connection: Connection) -> bool { + if connection.from.is_output() || connection.to.is_input() || (self.neuron_exists(connection.from) && self.get_neuron(connection.from).outputs.contains_key(&connection.to)) { + return false; + } let mut visited = HashSet::from([connection.from]); - self.dfs(&mut visited, connection.to) } @@ -238,9 +265,34 @@ impl NeuralNetwork { true } - /// Safe, checked add connection method. Returns false if it aborted connecting due to cyclic linkage. + /// Safe, checked add connection method. Returns false if it aborted due to cyclic linkage. + /// Note that checking for cyclic linkage is O(n) over all neurons in the network, which + /// may be expensive for larger networks. pub fn add_connection(&mut self, connection: Connection, weight: f32) -> bool { - todo!() + if !self.is_connection_safe(connection) { + return false; + } + + self.add_connection_unchecked(connection, weight); + + true + } + + /// Attempts to add a random connection, retrying if unsafe. + /// Returns the connection if it established one before reaching max_retries. + pub fn add_random_connection(&mut self, max_retries: usize, rng: &mut impl rand::Rng) -> Option { + for _ in 0..max_retries { + let a = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + let b = self.random_location_in_scope(rng, !NeuronScope::INPUT); + + let conn = Connection { from: a, to: b }; + let rate = self.mutation_settings.mutation_rate; + if self.add_connection(conn, rng.random_range(-rate..rate)) { + return Some(conn); + } + } + + None } /// Mutates a connection's weight. @@ -270,22 +322,32 @@ impl NeuralNetwork { /// Get a random valid location within a [`NeuronScope`]. pub fn random_location_in_scope( &self, - rng: &mut impl Rng, + rng: &mut impl rand::Rng, scope: NeuronScope, ) -> NeuronLocation { - let loc = self.random_location(rng); + if scope == NeuronScope::NONE { + panic!("cannot select from empty scope"); + } - // this is a lazy and slow way of donig it, TODO better version. - if !scope.contains(NeuronScope::from(loc)) { - return self.random_location_in_scope(rng, scope); + let mut layers = Vec::with_capacity(3); + if scope.contains(NeuronScope::INPUT) { + layers.push((NeuronLocation::Input(0), I)); + } + if scope.contains(NeuronScope::HIDDEN) && !self.hidden_layers.is_empty() { + layers.push((NeuronLocation::Hidden(0), self.hidden_layers.len())); + } + if scope.contains(NeuronScope::OUTPUT) { + layers.push((NeuronLocation::Output(0), O)); } - loc + let (mut loc, size) = layers[rng.random_range(0..layers.len())]; + loc.set_inner(rng.random_range(0..size)); + loc } /// Remove a connection and any hanging neurons caused by the deletion - /// (with the exception of output neurons). - /// Returns whether it deleted a hanging neuron. + /// (with the exception of output layer neurons). + /// Returns whether it removed a hanging neuron. pub fn remove_connection(&mut self, connection: Connection) -> bool { let a = self.get_neuron_mut(connection.from); a.outputs.remove(&connection.to).expect("invalid connection"); @@ -302,7 +364,7 @@ impl NeuralNetwork { false } - /// Remove a neuron and downshift all connection indexes to compensate for it. + /// Remove a neuron and downshift all connection indices to compensate for it. /// This will also deal with hanging neurons and such. pub fn remove_neuron(&mut self, loc: NeuronLocation) { if !loc.is_hidden() { @@ -344,13 +406,6 @@ impl NeuralNetwork { } fn clear_input_counts(&mut self) { - // not sure whether all this parallelism is necessary or if it will just generate overhead - // rayon::scope(|s| { - // s.spawn(|_| self.input_layer.par_iter_mut().for_each(|n| n.input_count = 0)); - // s.spawn(|_| self.hidden_layers.par_iter_mut().for_each(|n| n.input_count = 0)); - // s.spawn(|_| self.output_layer.par_iter_mut().for_each(|n| n.input_count = 0)); - // }); - self.input_layer .par_iter_mut() .for_each(|n| n.input_count = 0); @@ -485,14 +540,13 @@ impl Neuron { /// Chooses a random activation function within the specified scope. pub fn new( outputs: HashMap, - current_scope: NeuronScope, + scope: NeuronScope, rng: &mut impl Rng, ) -> Self { let reg = ACTIVATION_REGISTRY.read().unwrap(); - let activations = reg.activations_in_scope(current_scope); - drop(reg); + let act = reg.random_activation_in_scope(scope, rng); - Self::new_with_activations(outputs, activations, rng) + Self::new_with_activation(outputs, act, rng) } /// Creates a new neuron with the given outputs. @@ -529,8 +583,8 @@ impl Neuron { rate: f32, rng: &mut impl Rng, ) -> Option { - if let Some(mut w) = self.outputs.get_mut(&output) { - *w += rng.random_range(-rate..rate); + if let Some(w) = self.outputs.get_mut(&output) { + *w += rng.random_range(-rate..=rate); return Some(*w); } @@ -601,6 +655,16 @@ impl NeuronLocation { Self::Output(i) => *i, } } + + /// Sets the inner index value without changing the layer. + pub fn set_inner(&mut self, v: usize) { + // there's gotta be a cleaner way of doing this + match self { + Self::Input(i) => *i = v, + Self::Hidden(i) => *i = v, + Self::Output(i) => *i = v, + } + } } impl AsRef for NeuronLocation { diff --git a/src/tests.rs b/src/tests.rs index e043a64..18993c2 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use crate::*; +use crate::{activation::builtin::linear_activation, *}; use genetic_rs::prelude::rand::{SeedableRng, rngs::StdRng}; use rayon::prelude::*; @@ -132,17 +132,77 @@ fn split_connection() { assert_eq!(*net.hidden_layers[0].outputs.keys().next().unwrap(), NeuronLocation::Output(0)); } -const NUM_MUTATIONS: usize = 1000; -const MUTATION_RATE: f32 = 0.25; #[test] -fn mutate() { +fn add_connection() { + let mut rng = StdRng::seed_from_u64(0xabcdef); + let mut net = NeuralNetwork { + input_layer: [Neuron::new_with_activation(HashMap::new(), activation_fn!(linear_activation), &mut rng)], + hidden_layers: vec![], + output_layer: [Neuron::new_with_activation(HashMap::new(), activation_fn!(linear_activation), &mut rng)], + mutation_settings: MutationSettings::default() + }; + assert_network_invariants(&net); + + let mut conn = Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0) }; + assert!(net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + assert!(!net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + let mut outputs = HashMap::new(); + outputs.insert(NeuronLocation::Output(0), 0.1); + let n = Neuron::new_with_activation(outputs, activation_fn!(linear_activation), &mut rng); + + net.add_neuron(n.clone()); + // temporarily broken invariants bc of hanging neuron + + conn.to = NeuronLocation::Hidden(0); + assert!(net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + net.add_neuron(n); + + conn.to = NeuronLocation::Hidden(1); + assert!(net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + conn.from = NeuronLocation::Hidden(0); + assert!(net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + net.split_connection(conn, &mut rng); + assert_network_invariants(&net); + + conn.from = NeuronLocation::Hidden(2); + conn.to = NeuronLocation::Hidden(0); + + assert!(!net.add_connection(conn, 0.1)); + assert_network_invariants(&net); + + // random stress testing rng_test(|rng| { let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); assert_network_invariants(&net); - - for _ in 0..NUM_MUTATIONS { - net.mutate(MUTATION_RATE, rng); + for _ in 0..50 { + net.add_random_connection(10, rng); assert_network_invariants(&net); } }); -} \ No newline at end of file +} + +// TODO will use this once we have all the individual functions tested +// const NUM_MUTATIONS: usize = 1000; +// const MUTATION_RATE: f32 = 0.25; +// #[test] +// fn mutate() { +// rng_test(|rng| { +// let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); +// assert_network_invariants(&net); + +// for _ in 0..NUM_MUTATIONS { +// net.mutate(MUTATION_RATE, rng); +// assert_network_invariants(&net); +// } +// }); +// } \ No newline at end of file From 8dbbfc044b89c95098502207067ff507a164aabc Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 21 Jan 2026 18:41:20 +0000 Subject: [PATCH 33/60] cargo fmt --- src/activation.rs | 14 ++++++++--- src/neuralnet.rs | 49 +++++++++++++++++++++++++++++---------- src/tests.rs | 59 +++++++++++++++++++++++++++++++++-------------- 3 files changed, 90 insertions(+), 32 deletions(-) diff --git a/src/activation.rs b/src/activation.rs index 3d15e0d..7857fae 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -93,9 +93,13 @@ impl ActivationRegistry { } /// Fetches a random activation fn that applies to the provided scope. - pub fn random_activation_in_scope(&self, scope: NeuronScope, rng: &mut impl rand::Rng) -> ActivationFn { + pub fn random_activation_in_scope( + &self, + scope: NeuronScope, + rng: &mut impl rand::Rng, + ) -> ActivationFn { let mut iter = self.fns.values().cycle(); - let num_iterations = rng.random_range(0..self.fns.len()-1); + let num_iterations = rng.random_range(0..self.fns.len() - 1); for _ in 0..num_iterations { iter.next().unwrap(); @@ -153,7 +157,11 @@ pub struct ActivationFn { impl ActivationFn { /// Creates a new ActivationFn object. - pub fn new(func: Arc, scope: NeuronScope, name: &'static str) -> Self { + pub fn new( + func: Arc, + scope: NeuronScope, + name: &'static str, + ) -> Self { Self { func, name, scope } } } diff --git a/src/neuralnet.rs b/src/neuralnet.rs index e5f980b..713d7b8 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1,7 +1,8 @@ use std::{ collections::{HashMap, HashSet}, sync::{ - Arc, atomic::{AtomicBool, AtomicUsize, Ordering} + atomic::{AtomicBool, AtomicUsize, Ordering}, + Arc, }, }; @@ -194,7 +195,12 @@ impl NeuralNetwork { let new_loc = NeuronLocation::Hidden(self.hidden_layers.len()); let outputs = n.outputs.keys().cloned().collect::>(); for loc in outputs { - if !self.neuron_exists(loc) || !self.is_connection_safe(Connection { from: new_loc, to: loc }) { + if !self.neuron_exists(loc) + || !self.is_connection_safe(Connection { + from: new_loc, + to: loc, + }) + { n.outputs.remove(&loc); valid = false; continue; @@ -214,7 +220,10 @@ impl NeuralNetwork { let new_loc = NeuronLocation::Hidden(self.hidden_layers.len()); let a = self.get_neuron_mut(connection.from); - let w = a.outputs.remove(&connection.to).expect("invalid connection.to"); + let w = a + .outputs + .remove(&connection.to) + .expect("invalid connection.to"); a.outputs.insert(new_loc, w); @@ -235,7 +244,7 @@ impl NeuralNetwork { pub fn add_connection_unchecked(&mut self, connection: Connection, weight: f32) { let a = self.get_neuron_mut(connection.from); a.outputs.insert(connection.to, weight); - + let b = self.get_neuron_mut(connection.to); b.input_count += 1; } @@ -243,7 +252,14 @@ impl NeuralNetwork { /// Returns false if the connection is cyclic or the input/output neurons are otherwise invalid in some other way. /// Can be O(n) over the number of neurons in the network. pub fn is_connection_safe(&self, connection: Connection) -> bool { - if connection.from.is_output() || connection.to.is_input() || (self.neuron_exists(connection.from) && self.get_neuron(connection.from).outputs.contains_key(&connection.to)) { + if connection.from.is_output() + || connection.to.is_input() + || (self.neuron_exists(connection.from) + && self + .get_neuron(connection.from) + .outputs + .contains_key(&connection.to)) + { return false; } let mut visited = HashSet::from([connection.from]); @@ -280,7 +296,11 @@ impl NeuralNetwork { /// Attempts to add a random connection, retrying if unsafe. /// Returns the connection if it established one before reaching max_retries. - pub fn add_random_connection(&mut self, max_retries: usize, rng: &mut impl rand::Rng) -> Option { + pub fn add_random_connection( + &mut self, + max_retries: usize, + rng: &mut impl rand::Rng, + ) -> Option { for _ in 0..max_retries { let a = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); let b = self.random_location_in_scope(rng, !NeuronScope::INPUT); @@ -291,7 +311,7 @@ impl NeuralNetwork { return Some(conn); } } - + None } @@ -340,9 +360,9 @@ impl NeuralNetwork { layers.push((NeuronLocation::Output(0), O)); } - let (mut loc, size) = layers[rng.random_range(0..layers.len())]; + let (mut loc, size) = layers[rng.random_range(0..layers.len())]; loc.set_inner(rng.random_range(0..size)); - loc + loc } /// Remove a connection and any hanging neurons caused by the deletion @@ -350,7 +370,9 @@ impl NeuralNetwork { /// Returns whether it removed a hanging neuron. pub fn remove_connection(&mut self, connection: Connection) -> bool { let a = self.get_neuron_mut(connection.from); - a.outputs.remove(&connection.to).expect("invalid connection"); + a.outputs + .remove(&connection.to) + .expect("invalid connection"); let b = self.get_neuron_mut(connection.to); b.input_count -= 1; @@ -374,7 +396,10 @@ impl NeuralNetwork { let n = self.get_neuron(loc); let locs: Vec<_> = n.outputs.keys().cloned().collect(); for loc2 in locs { - self.remove_connection(Connection { from: loc, to: loc2 }); + self.remove_connection(Connection { + from: loc, + to: loc2, + }); } let i = loc.unwrap(); @@ -469,7 +494,7 @@ impl RandomlyMutable for NeuralNetwork { impl Mitosis for NeuralNetwork { fn divide(&self, rate: f32, rng: &mut impl prelude::Rng) -> Self { let mut child = self.clone(); - + for _ in 0..self.mutation_settings.mutation_passes { child.mutate(rate, rng); } diff --git a/src/tests.rs b/src/tests.rs index 18993c2..74e073d 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use crate::{activation::builtin::linear_activation, *}; -use genetic_rs::prelude::rand::{SeedableRng, rngs::StdRng}; +use genetic_rs::prelude::rand::{rngs::StdRng, SeedableRng}; use rayon::prelude::*; #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -26,7 +26,11 @@ fn assert_graph_invariants(net: &NeuralNetwork(net: &NeuralNetwork, loc: NeuronLocation, visited: &mut HashMap) { +fn dfs( + net: &NeuralNetwork, + loc: NeuronLocation, + visited: &mut HashMap, +) { if let Some(existing) = visited.get(&loc) { match *existing { GraphCheckState::CurrentCycle => panic!("cycle detected on {loc:?}"), @@ -101,12 +105,10 @@ fn assert_network_invariants(net: &NeuralNetwork const TEST_COUNT: u64 = 1000; fn rng_test(test: impl Fn(&mut StdRng) + Sync) { - (0..TEST_COUNT) - .into_par_iter() - .for_each(|seed| { - let mut rng = StdRng::seed_from_u64(seed); - test(&mut rng); - }); + (0..TEST_COUNT).into_par_iter().for_each(|seed| { + let mut rng = StdRng::seed_from_u64(seed); + test(&mut rng); + }); } #[test] @@ -124,26 +126,49 @@ fn split_connection() { let mut net = NeuralNetwork::<1, 1>::new(MutationSettings::default(), &mut rng); assert_network_invariants(&net); - - net.split_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0) }, &mut rng); + + net.split_connection( + Connection { + from: NeuronLocation::Input(0), + to: NeuronLocation::Output(0), + }, + &mut rng, + ); assert_network_invariants(&net); - assert_eq!(*net.input_layer[0].outputs.keys().next().unwrap(), NeuronLocation::Hidden(0)); - assert_eq!(*net.hidden_layers[0].outputs.keys().next().unwrap(), NeuronLocation::Output(0)); + assert_eq!( + *net.input_layer[0].outputs.keys().next().unwrap(), + NeuronLocation::Hidden(0) + ); + assert_eq!( + *net.hidden_layers[0].outputs.keys().next().unwrap(), + NeuronLocation::Output(0) + ); } #[test] fn add_connection() { let mut rng = StdRng::seed_from_u64(0xabcdef); let mut net = NeuralNetwork { - input_layer: [Neuron::new_with_activation(HashMap::new(), activation_fn!(linear_activation), &mut rng)], + input_layer: [Neuron::new_with_activation( + HashMap::new(), + activation_fn!(linear_activation), + &mut rng, + )], hidden_layers: vec![], - output_layer: [Neuron::new_with_activation(HashMap::new(), activation_fn!(linear_activation), &mut rng)], - mutation_settings: MutationSettings::default() + output_layer: [Neuron::new_with_activation( + HashMap::new(), + activation_fn!(linear_activation), + &mut rng, + )], + mutation_settings: MutationSettings::default(), }; assert_network_invariants(&net); - let mut conn = Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0) }; + let mut conn = Connection { + from: NeuronLocation::Input(0), + to: NeuronLocation::Output(0), + }; assert!(net.add_connection(conn, 0.1)); assert_network_invariants(&net); @@ -205,4 +230,4 @@ fn add_connection() { // assert_network_invariants(&net); // } // }); -// } \ No newline at end of file +// } From 68e76546341bf26f5757a4f8f1fdd5c39ced9167 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 29 Jan 2026 23:00:39 +0000 Subject: [PATCH 34/60] implement remove_connection --- src/neuralnet.rs | 29 +++++++++++++++++++++++++++++ src/tests.rs | 47 ++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 713d7b8..ef0accb 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -315,6 +315,35 @@ impl NeuralNetwork { None } + /// Attempts to remove a random connection, retrying if the neuron it found + /// doesn't have any outbound connections. + /// Returns the connection if it removed one before reaching max_retries. + pub fn remove_random_connection( + &mut self, + max_retries: usize, + rng: &mut impl rand::Rng, + ) -> Option { + for _ in 0..max_retries { + let a = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); + + let an = self.get_neuron(a); + if an.outputs.is_empty() { + continue; + } + + let mut iter = an.outputs.keys().skip(rng.random_range(0..an.outputs.len())); + let b = iter.next().unwrap(); + + let conn = Connection { from: a, to: *b }; + + self.remove_connection(conn); + + return Some(conn); + } + + None + } + /// Mutates a connection's weight. pub fn mutate_weight(&mut self, connection: Connection, rng: &mut impl Rng) { let rate = self.mutation_settings.weight_mutation_amount; diff --git a/src/tests.rs b/src/tests.rs index 74e073d..8d69bc2 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use crate::{activation::builtin::linear_activation, *}; +use crate::{activation::{NeuronScope, builtin::linear_activation}, *}; use genetic_rs::prelude::rand::{rngs::StdRng, SeedableRng}; use rayon::prelude::*; @@ -216,6 +216,51 @@ fn add_connection() { }); } +#[test] +fn remove_connection() { + let mut rng = StdRng::seed_from_u64(0xabcdef); + let mut net = NeuralNetwork { + input_layer: [Neuron::new_with_activation( + HashMap::from([(NeuronLocation::Output(0), 0.1), (NeuronLocation::Hidden(0), 1.0)]), + activation_fn!(linear_activation), + &mut rng, + )], + hidden_layers: vec![Neuron { + input_count: 1, + outputs: HashMap::new(), // not sure whether i want neurons with no outputs to break the invariant/be removed + bias: 0.0, + activation_fn: activation_fn!(linear_activation), + }], + output_layer: [Neuron { + input_count: 1, + outputs: HashMap::new(), + bias: 0.0, + activation_fn: activation_fn!(linear_activation), + }], + mutation_settings: MutationSettings::default(), + }; + assert_network_invariants(&net); + + assert!(!net.remove_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0)})); + assert_network_invariants(&net); + + assert!(net.remove_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Hidden(0)})); + assert_network_invariants(&net); + + rng_test(|rng| { + let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + assert_network_invariants(&net); + + for _ in 0..50 { + net.add_random_connection(10, rng); + assert_network_invariants(&net); + + net.remove_random_connection(5, rng); + assert_network_invariants(&net); + } + }); +} + // TODO will use this once we have all the individual functions tested // const NUM_MUTATIONS: usize = 1000; // const MUTATION_RATE: f32 = 0.25; From 7918a27992849ff99db474635150c5aa1d0ef839 Mon Sep 17 00:00:00 2001 From: HyperCodec Date: Sat, 31 Jan 2026 16:23:34 -0500 Subject: [PATCH 35/60] fix remove_connection, add bias mutation, and complete general mutation function --- src/neuralnet.rs | 171 +++++++++++++++++++++++++++++++---------------- src/tests.rs | 67 +++++++++++++------ 2 files changed, 159 insertions(+), 79 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index ef0accb..88aa0c4 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1,5 +1,5 @@ use std::{ - collections::{HashMap, HashSet}, + collections::{HashMap, HashSet, VecDeque}, sync::{ atomic::{AtomicBool, AtomicUsize, Ordering}, Arc, @@ -315,35 +315,52 @@ impl NeuralNetwork { None } - /// Attempts to remove a random connection, retrying if the neuron it found + /// Attempts to get a random connection, retrying if the neuron it found /// doesn't have any outbound connections. - /// Returns the connection if it removed one before reaching max_retries. - pub fn remove_random_connection( + /// Returns the connection if it found one before reaching max_retries. + pub fn get_random_connection( &mut self, max_retries: usize, rng: &mut impl rand::Rng, ) -> Option { for _ in 0..max_retries { let a = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let an = self.get_neuron(a); if an.outputs.is_empty() { continue; } - let mut iter = an.outputs.keys().skip(rng.random_range(0..an.outputs.len())); + let mut iter = an + .outputs + .keys() + .skip(rng.random_range(0..an.outputs.len())); let b = iter.next().unwrap(); let conn = Connection { from: a, to: *b }; - - self.remove_connection(conn); - return Some(conn); } None } + /// Attempts to remove a random connection, retrying if the neuron it found + /// doesn't have any outbound connections. Also removes hanging neurons created + /// by removing the connection. + /// + /// Returns the connection if it removed one before reaching max_retries. + pub fn remove_random_connection( + &mut self, + max_retries: usize, + rng: &mut impl rand::Rng, + ) -> Option { + if let Some(conn) = self.get_random_connection(max_retries, rng) { + self.remove_connection(conn); + Some(conn) + } else { + None + } + } + /// Mutates a connection's weight. pub fn mutate_weight(&mut self, connection: Connection, rng: &mut impl Rng) { let rate = self.mutation_settings.weight_mutation_amount; @@ -394,50 +411,86 @@ impl NeuralNetwork { loc } - /// Remove a connection and any hanging neurons caused by the deletion + /// Remove a connection and indicate whether the destination neuron became hanging /// (with the exception of output layer neurons). - /// Returns whether it removed a hanging neuron. - pub fn remove_connection(&mut self, connection: Connection) -> bool { + /// Returns `true` if the destination neuron has input_count == 0 and should be removed. + /// Callers must handle the removal of the destination neuron if needed. + pub fn remove_connection_raw(&mut self, connection: Connection) -> bool { let a = self.get_neuron_mut(connection.from); - a.outputs - .remove(&connection.to) - .expect("invalid connection"); + if a.outputs.remove(&connection.to).is_none() { + panic!("invalid connection"); + } let b = self.get_neuron_mut(connection.to); + + // if the invariants held at the beginning of the call, + // this should never underflow. b.input_count -= 1; - if connection.to.is_hidden() && b.input_count == 0 { - // hanging neuron that must be deleted. - self.remove_neuron(connection.to); + // signal removal + connection.to.is_hidden() && b.input_count == 0 + } + + /// Remove a connection and downshift all connection indices to compensate for it. + /// This will also deal with hanging neurons iteratively to avoid recursion that + /// can invalidate stored indices during nested deletions. + /// This method is preferable to [`remove_connection_raw`][NeuralNetwork::remove_connection_raw] for a majority of usecases, + /// as it preserves the invariants of the neural network. + pub fn remove_connection(&mut self, conn: Connection) -> bool { + if self.remove_connection_raw(conn) { + self.remove_neuron(conn.to); return true; } - false } /// Remove a neuron and downshift all connection indices to compensate for it. - /// This will also deal with hanging neurons and such. + /// This will also deal with hanging neurons iteratively to avoid recursion that + /// can invalidate stored indices during nested deletions. pub fn remove_neuron(&mut self, loc: NeuronLocation) { if !loc.is_hidden() { panic!("cannot remove neurons in input or output layer"); } - let n = self.get_neuron(loc); - let locs: Vec<_> = n.outputs.keys().cloned().collect(); - for loc2 in locs { - self.remove_connection(Connection { - from: loc, - to: loc2, - }); - } + let mut work = VecDeque::new(); + work.push_back(loc); - let i = loc.unwrap(); - self.hidden_layers.remove(i); + while let Some(cur_loc) = work.pop_front() { + // if the neuron was already removed due to earlier deletions, skip. + // i don't think it realistically should ever happen, but just in case. + if !self.neuron_exists(cur_loc) { + continue; + } + + let outputs = { + let n = self.get_neuron(cur_loc); + n.outputs.keys().cloned().collect::>() + }; + + for target in outputs { + if self.remove_connection_raw(Connection { + from: cur_loc, + to: target, + }) { + // target became hanging; schedule it for removal. + work.push_back(target); + } + } + + // Re-check that the neuron still exists and is hidden before removing. + if !self.neuron_exists(cur_loc) || !cur_loc.is_hidden() { + continue; + } - self.downshift_connections(i); + let i = cur_loc.unwrap(); + if i < self.hidden_layers.len() { + self.hidden_layers.remove(i); + self.downshift_connections(i, &mut work); // O(n^2) bad, but we can optimize later if it's a problem. + } + } } - fn downshift_connections(&mut self, i: usize) { + fn downshift_connections(&mut self, i: usize, work: &mut VecDeque) { self.input_layer .par_iter_mut() .for_each(|n| n.downshift_outputs(i)); @@ -445,11 +498,16 @@ impl NeuralNetwork { self.hidden_layers .par_iter_mut() .for_each(|n| n.downshift_outputs(i)); + + work.par_iter_mut().for_each(|loc| match loc { + NeuronLocation::Hidden(j) if *j > i => *j -= 1, + _ => {} + }); } - // TODO maybe more parallelism and pass Connection info. + // TODO maybe pass Connection info. /// Runs the `callback` on the weights of the neural network in parallel, allowing it to modify weight values. - pub fn map_weights(&mut self, callback: impl Fn(&mut f32) + Sync) { + pub fn mutate_weights(&mut self, callback: impl Fn(&mut f32) + Sync) { for n in &mut self.input_layer { n.outputs.par_iter_mut().for_each(|(_, w)| callback(w)); } @@ -459,6 +517,13 @@ impl NeuralNetwork { } } + /// Runs the `callback` on the neurons of the neural network in parallel, allowing it to modify neuron values. + pub fn mutate_neurons(&mut self, callback: impl Fn(&mut Neuron) + Sync) { + self.input_layer.par_iter_mut().for_each(|n| callback(n)); + self.hidden_layers.par_iter_mut().for_each(|n| callback(n)); + self.output_layer.par_iter_mut().for_each(|n| callback(n)); + } + fn clear_input_counts(&mut self) { self.input_layer .par_iter_mut() @@ -478,45 +543,37 @@ impl RandomlyMutable for NeuralNetwork { // for each type of mutation if rng.random::() <= rate { // split connection - let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let n = self.get_neuron(from); - let (to, _) = n.random_output(rng); - - self.split_connection(Connection { from, to }, rng); + // TODO add a setting for max_retries + if let Some(conn) = self.get_random_connection(10, rng) { + self.split_connection(conn, rng); + } } if rng.random::() <= rate { // add connection - let weight = rng.random::(); - - let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let to = self.random_location_in_scope(rng, !NeuronScope::INPUT); - - let mut connection = Connection { from, to }; - while !self.add_connection(connection, weight) { - let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let to = self.random_location_in_scope(rng, !NeuronScope::INPUT); - connection = Connection { from, to }; - } + self.add_random_connection(10, rng); } if rng.random::() <= rate { // remove connection - - let from = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let a = self.get_neuron(from); - let (to, _) = a.random_output(rng); - - self.remove_connection(Connection { from, to }); + self.remove_random_connection(10, rng); } - self.map_weights(|w| { + self.mutate_weights(|w| { let mut rng = rand::rng(); if rng.random::() <= rate { *w += rng.random_range(-rate..rate); } }); + + self.mutate_neurons(|n| { + let mut rng = rand::rng(); + + if rng.random::() <= rate { + n.bias += rng.random_range(-rate..rate); + } + }) } } diff --git a/src/tests.rs b/src/tests.rs index 8d69bc2..8a3db69 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,6 +1,9 @@ use std::collections::HashMap; -use crate::{activation::{NeuronScope, builtin::linear_activation}, *}; +use crate::{ + activation::{builtin::linear_activation, NeuronScope}, + *, +}; use genetic_rs::prelude::rand::{rngs::StdRng, SeedableRng}; use rayon::prelude::*; @@ -221,7 +224,10 @@ fn remove_connection() { let mut rng = StdRng::seed_from_u64(0xabcdef); let mut net = NeuralNetwork { input_layer: [Neuron::new_with_activation( - HashMap::from([(NeuronLocation::Output(0), 0.1), (NeuronLocation::Hidden(0), 1.0)]), + HashMap::from([ + (NeuronLocation::Output(0), 0.1), + (NeuronLocation::Hidden(0), 1.0), + ]), activation_fn!(linear_activation), &mut rng, )], @@ -241,38 +247,55 @@ fn remove_connection() { }; assert_network_invariants(&net); - assert!(!net.remove_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Output(0)})); + assert!(!net.remove_connection(Connection { + from: NeuronLocation::Input(0), + to: NeuronLocation::Output(0) + })); assert_network_invariants(&net); - assert!(net.remove_connection(Connection { from: NeuronLocation::Input(0), to: NeuronLocation::Hidden(0)})); + assert!(net.remove_connection(Connection { + from: NeuronLocation::Input(0), + to: NeuronLocation::Hidden(0) + })); assert_network_invariants(&net); - + rng_test(|rng| { let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); assert_network_invariants(&net); - for _ in 0..50 { + for _ in 0..70 { net.add_random_connection(10, rng); assert_network_invariants(&net); - net.remove_random_connection(5, rng); - assert_network_invariants(&net); + if rng.random_bool(0.25) { + // rng allows network to form more complex edge cases. + net.remove_random_connection(5, rng); + // don't need to remove neuron since this + // method handles it automatically. + assert_network_invariants(&net); + } } }); } // TODO will use this once we have all the individual functions tested -// const NUM_MUTATIONS: usize = 1000; -// const MUTATION_RATE: f32 = 0.25; -// #[test] -// fn mutate() { -// rng_test(|rng| { -// let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); -// assert_network_invariants(&net); - -// for _ in 0..NUM_MUTATIONS { -// net.mutate(MUTATION_RATE, rng); -// assert_network_invariants(&net); -// } -// }); -// } +const NUM_MUTATIONS: usize = 50; +const MUTATION_RATE: f32 = 0.25; +#[test] +fn mutate() { + rng_test(|rng| { + let mut net = NeuralNetwork::<10, 10>::new( + MutationSettings { + mutation_rate: MUTATION_RATE, + ..Default::default() + }, + rng, + ); + assert_network_invariants(&net); + + for _ in 0..NUM_MUTATIONS { + net.mutate(MUTATION_RATE, rng); + assert_network_invariants(&net); + } + }); +} From f5b4d5f36648267130fda406c6426ce8cdbb310a Mon Sep 17 00:00:00 2001 From: HyperCodec Date: Sat, 31 Jan 2026 16:39:10 -0500 Subject: [PATCH 36/60] implement activation mutation --- src/neuralnet.rs | 44 +++++++++++++++++++++++++++++++++++++++----- src/tests.rs | 6 +----- 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 88aa0c4..90fdb72 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -524,6 +524,29 @@ impl NeuralNetwork { self.output_layer.par_iter_mut().for_each(|n| callback(n)); } + /// Mutates the activation functions of the neurons in the neural network. + pub fn mutate_activations(&mut self, rate: f32) { + let reg = ACTIVATION_REGISTRY.read().unwrap(); + self.input_layer.par_iter_mut().for_each(|n| { + let mut rng = rand::rng(); + if rng.random_bool(rate as f64) { + n.mutate_activation(®.activations_in_scope(NeuronScope::INPUT), &mut rng); + } + }); + self.hidden_layers.par_iter_mut().for_each(|n| { + let mut rng = rand::rng(); + if rng.random_bool(rate as f64) { + n.mutate_activation(®.activations_in_scope(NeuronScope::HIDDEN), &mut rng); + } + }); + self.output_layer.par_iter_mut().for_each(|n| { + let mut rng = rand::rng(); + if rng.random_bool(rate as f64) { + n.mutate_activation(®.activations_in_scope(NeuronScope::OUTPUT), &mut rng); + } + }); + } + fn clear_input_counts(&mut self) { self.input_layer .par_iter_mut() @@ -541,6 +564,8 @@ impl RandomlyMutable for NeuralNetwork { fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { // TODO maybe allow specifying probability // for each type of mutation + + // graph mutations if rng.random::() <= rate { // split connection // TODO add a setting for max_retries @@ -559,6 +584,9 @@ impl RandomlyMutable for NeuralNetwork { self.remove_random_connection(10, rng); } + // internal mutations + self.mutate_activations(rate); + self.mutate_weights(|w| { let mut rng = rand::rng(); @@ -664,12 +692,9 @@ impl Neuron { /// Takes a collection of activation functions and chooses a random one from them to use. pub fn new_with_activations( outputs: HashMap, - activations: impl IntoIterator, + activations: &[ActivationFn], rng: &mut impl Rng, ) -> Self { - // TODO get random in iterator form - let mut activations: Vec<_> = activations.into_iter().collect(); - // TODO maybe Result instead. if activations.is_empty() { panic!("Empty activations list provided"); @@ -677,7 +702,7 @@ impl Neuron { Self::new_with_activation( outputs, - activations.remove(rng.random_range(0..activations.len())), + activations[rng.random_range(0..activations.len())].clone(), rng, ) } @@ -726,6 +751,15 @@ impl Neuron { self.outputs .retain(|loc, _| output_exists(*loc, hidden_len, output_len)); } + + /// Replaces the activation function with a random one. + pub fn mutate_activation(&mut self, activations: &[ActivationFn], rng: &mut impl Rng) { + if activations.is_empty() { + panic!("Empty activations list provided"); + } + + self.activation_fn = activations[rng.random_range(0..activations.len())].clone(); + } } /// A pseudo-pointer of sorts that is used for caching. diff --git a/src/tests.rs b/src/tests.rs index 8a3db69..488f451 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,9 +1,6 @@ use std::collections::HashMap; -use crate::{ - activation::{builtin::linear_activation, NeuronScope}, - *, -}; +use crate::{activation::builtin::linear_activation, *}; use genetic_rs::prelude::rand::{rngs::StdRng, SeedableRng}; use rayon::prelude::*; @@ -278,7 +275,6 @@ fn remove_connection() { }); } -// TODO will use this once we have all the individual functions tested const NUM_MUTATIONS: usize = 50; const MUTATION_RATE: f32 = 0.25; #[test] From cd0f97eb0c07a62f9e775ed4fe2cdbd3b0e67a6c Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:23:13 +0000 Subject: [PATCH 37/60] use genetic-rs context api for mutation settings --- Cargo.lock | 132 +++++++++++++++++++++++++++++++++++++---------- Cargo.toml | 2 +- src/neuralnet.rs | 125 ++++++++++++++++++++++++++------------------ 3 files changed, 182 insertions(+), 77 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8d66b01..78158ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16,15 +16,15 @@ checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -41,21 +41,55 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "darling" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" +dependencies = [ + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" +dependencies = [ + "darling_core", + "quote", + "syn", +] [[package]] name = "either" -version = "1.9.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "genetic-rs" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5047d738fba12c89533a4321c98a5ebb3ef180364244fd712d3f28893c55fcb" +checksum = "ba4095966caf1ba9e16f0b3a6b3c58468ce21d3fd4beccf207f141fc325e0802" dependencies = [ "genetic-rs-common", "genetic-rs-macros", @@ -63,20 +97,22 @@ dependencies = [ [[package]] name = "genetic-rs-common" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b143ce3476694e634396a5583a3863dab8055c05465b1be95398fcb4eca6cc" +checksum = "49d7c66e226c1c506c3948d1bb799b59141a8b388d7188c2091ef1c69a2aaeba" dependencies = [ + "itertools", "rand", "rayon", ] [[package]] name = "genetic-rs-macros" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b7f7b4257f21a5904db4fda2160ad129762dfcf1ff997c2ef21387dd8565850" +checksum = "a5a20679fa28498b37ba820d1fdf1c7d948b5fd47333608a3e336dd63a7c12c5" dependencies = [ + "darling", "genetic-rs-common", "proc-macro2", "quote", @@ -95,6 +131,21 @@ dependencies = [ "wasip2", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.14" @@ -109,9 +160,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "memchr" @@ -136,24 +187,27 @@ dependencies = [ [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] [[package]] name = "proc-macro2" -version = "1.0.91" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307e3004becf10f5a6e0d59d20f3cd28231b0e0827a96cd3e0ce6d14bc1e4bb3" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.35" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" dependencies = [ "proc-macro2", ] @@ -271,11 +325,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "syn" -version = "2.0.89" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", @@ -284,9 +344,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "wasip2" @@ -303,6 +363,26 @@ version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +[[package]] +name = "zerocopy" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zmij" version = "1.0.14" diff --git a/Cargo.toml b/Cargo.toml index 5303f49..02daff0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ serde = ["dep:serde", "dep:serde-big-array"] [dependencies] atomic_float = "1.1.0" bitflags = "2.10.0" -genetic-rs = { version = "1.0.0", features = ["rayon", "derive"] } +genetic-rs = { version = "1.1.0", features = ["rayon", "derive"] } lazy_static = "1.5.0" rayon = "1.11.0" replace_with = "0.1.8" diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 90fdb72..4520b3d 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -24,31 +24,6 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "serde")] use serde_big_array::BigArray; -/// The mutation settings for [`NeuralNetwork`]. -/// Does not affect [`NeuralNetwork::mutate`], only [`NeuralNetwork::divide`] and [`NeuralNetwork::crossover`]. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, PartialEq)] -pub struct MutationSettings { - /// The chance of each mutation type to occur. - pub mutation_rate: f32, - - /// The number of times to try to mutate the network. - pub mutation_passes: usize, - - /// The maximum amount that the weights will be mutated by. - pub weight_mutation_amount: f32, -} - -impl Default for MutationSettings { - fn default() -> Self { - Self { - mutation_rate: 0.01, - mutation_passes: 3, - weight_mutation_amount: 0.5, - } - } -} - /// An abstract neural network type with `I` input neurons and `O` output neurons. /// Hidden neurons are not organized into layers, but rather float and link freely /// (or at least in any way that doesn't cause a cyclic dependency). @@ -67,15 +42,12 @@ pub struct NeuralNetwork { /// The output layer of neurons. Their values will be returned from [`NeuralNetwork::predict`]. #[cfg_attr(feature = "serde", serde(with = "BigArray"))] pub output_layer: [Neuron; O], - - /// The mutation settings for the network. - pub mutation_settings: MutationSettings, } impl NeuralNetwork { // TODO option to set default output layer activations /// Creates a new random neural network with the given settings. - pub fn new(mutation_settings: MutationSettings, rng: &mut impl rand::Rng) -> Self { + pub fn new(rng: &mut impl rand::Rng) -> Self { let mut output_layer = Vec::with_capacity(O); for _ in 0..O { @@ -119,7 +91,6 @@ impl NeuralNetwork { input_layer, hidden_layers: vec![], output_layer, - mutation_settings, } } @@ -306,8 +277,7 @@ impl NeuralNetwork { let b = self.random_location_in_scope(rng, !NeuronScope::INPUT); let conn = Connection { from: a, to: b }; - let rate = self.mutation_settings.mutation_rate; - if self.add_connection(conn, rng.random_range(-rate..rate)) { + if self.add_connection(conn, rng.random()) { return Some(conn); } } @@ -362,10 +332,9 @@ impl NeuralNetwork { } /// Mutates a connection's weight. - pub fn mutate_weight(&mut self, connection: Connection, rng: &mut impl Rng) { - let rate = self.mutation_settings.weight_mutation_amount; + pub fn mutate_weight(&mut self, connection: Connection, amount: f32, rng: &mut impl Rng) { let n = self.get_neuron_mut(connection.from); - n.mutate_weight(connection.to, rate, rng).unwrap(); + n.mutate_weight(connection.to, amount, rng).unwrap(); } /// Get a random valid location within the network. @@ -560,8 +529,43 @@ impl NeuralNetwork { } } +/// The mutation settings for [`NeuralNetwork`]. +/// Does not affect [`NeuralNetwork::mutate`], only [`NeuralNetwork::divide`] and [`NeuralNetwork::crossover`]. +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone, PartialEq)] +pub struct MutationSettings { + /// The chance of each mutation type to occur. + pub mutation_rate: f32, + + /// The maximum amount that the weights will be mutated by in one mutation pass. + pub weight_mutation_amount: f32, + + /// The maximum amount that biases will be mutated by in one mutation pass. + pub bias_mutation_amount: f32, + + /// The maximum number of retries for adding connections. + pub max_add_retries: usize, + + /// The maximum number of retries for removing connections. + pub max_remove_retries: usize, +} + +impl Default for MutationSettings { + fn default() -> Self { + Self { + mutation_rate: 0.01, + weight_mutation_amount: 0.5, + bias_mutation_amount: 0.5, + max_add_retries: 10, + max_remove_retries: 10, + } + } +} + impl RandomlyMutable for NeuralNetwork { - fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { + type Context = MutationSettings; + + fn mutate(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl Rng) { // TODO maybe allow specifying probability // for each type of mutation @@ -576,49 +580,70 @@ impl RandomlyMutable for NeuralNetwork { if rng.random::() <= rate { // add connection - self.add_random_connection(10, rng); + self.add_random_connection(settings.max_add_retries, rng); } if rng.random::() <= rate { // remove connection - self.remove_random_connection(10, rng); + self.remove_random_connection(settings.max_remove_retries, rng); } // internal mutations self.mutate_activations(rate); - self.mutate_weights(|w| { + self.map_weights(|w| { let mut rng = rand::rng(); if rng.random::() <= rate { - *w += rng.random_range(-rate..rate); + let amount = settings.weight_mutation_amount; + *w += rng.random_range(-amount..amount); } }); + } +} - self.mutate_neurons(|n| { - let mut rng = rand::rng(); +/// The settings used for `NeuralNetwork` mitosis. +#[derive(Debug, Clone, PartialEq)] +pub struct MitosisSettings { + /// The mutation settings to use during mitosis. + pub mutation_settings: MutationSettings, - if rng.random::() <= rate { - n.bias += rng.random_range(-rate..rate); - } - }) + /// The number of times to apply mutation during mitosis. + pub mutation_passes: usize, +} + +impl Default for MitosisSettings { + fn default() -> Self { + Self { + mutation_settings: MutationSettings::default(), + mutation_passes: 3, + } } } impl Mitosis for NeuralNetwork { - fn divide(&self, rate: f32, rng: &mut impl prelude::Rng) -> Self { + type Context = MitosisSettings; + + fn divide(&self, settings: &MitosisSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { let mut child = self.clone(); - for _ in 0..self.mutation_settings.mutation_passes { - child.mutate(rate, rng); + for _ in 0..settings.mutation_passes { + child.mutate(&settings.mutation_settings, rate, rng); } child } } +pub struct CrossoverSettings { + pub mutation_settings: MutationSettings, + pub mutation_passes: usize, +} + impl Crossover for NeuralNetwork { - fn crossover(&self, other: &Self, rate: f32, rng: &mut impl prelude::Rng) -> Self { + type Context = CrossoverSettings; + + fn crossover(&self, other: &Self, settings: &CrossoverSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { todo!() } } From 03eb490f9f682d811d17c31b8b5bb7424e318849 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:23:32 +0000 Subject: [PATCH 38/60] add missing unstaged changes --- src/neuralnet.rs | 29 ++++++++++++++++++----------- src/tests.rs | 22 ++++++++-------------- 2 files changed, 26 insertions(+), 25 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 4520b3d..9b1da9e 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -548,6 +548,9 @@ pub struct MutationSettings { /// The maximum number of retries for removing connections. pub max_remove_retries: usize, + + /// The maximum number of retries for splitting connections. + pub max_split_retries: usize, } impl Default for MutationSettings { @@ -558,6 +561,7 @@ impl Default for MutationSettings { bias_mutation_amount: 0.5, max_add_retries: 10, max_remove_retries: 10, + max_split_retries: 10, } } } @@ -573,7 +577,7 @@ impl RandomlyMutable for NeuralNetwork { if rng.random::() <= rate { // split connection // TODO add a setting for max_retries - if let Some(conn) = self.get_random_connection(10, rng) { + if let Some(conn) = self.get_random_connection(settings.max_split_retries, rng) { self.split_connection(conn, rng); } } @@ -591,7 +595,7 @@ impl RandomlyMutable for NeuralNetwork { // internal mutations self.mutate_activations(rate); - self.map_weights(|w| { + self.mutate_weights(|w| { let mut rng = rand::rng(); if rng.random::() <= rate { @@ -602,17 +606,17 @@ impl RandomlyMutable for NeuralNetwork { } } -/// The settings used for `NeuralNetwork` mitosis. +/// The settings used for [`NeuralNetwork`] reproduction. #[derive(Debug, Clone, PartialEq)] -pub struct MitosisSettings { - /// The mutation settings to use during mitosis. +pub struct ReproductionSettings { + /// The mutation settings to use during reproduction. pub mutation_settings: MutationSettings, - /// The number of times to apply mutation during mitosis. + /// The number of times to apply mutation during reproduction. pub mutation_passes: usize, } -impl Default for MitosisSettings { +impl Default for ReproductionSettings { fn default() -> Self { Self { mutation_settings: MutationSettings::default(), @@ -622,9 +626,9 @@ impl Default for MitosisSettings { } impl Mitosis for NeuralNetwork { - type Context = MitosisSettings; + type Context = ReproductionSettings; - fn divide(&self, settings: &MitosisSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { + fn divide(&self, settings: &ReproductionSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { let mut child = self.clone(); for _ in 0..settings.mutation_passes { @@ -635,9 +639,12 @@ impl Mitosis for NeuralNetwork { } } +/// The settings used for [`NeuralNetwork`] crossover. pub struct CrossoverSettings { - pub mutation_settings: MutationSettings, - pub mutation_passes: usize, + /// The reproduction settings to use during crossover, which will be applied to the child after crossover. + pub reproduction_settings: ReproductionSettings, + + // TODO other crossover settings. } impl Crossover for NeuralNetwork { diff --git a/src/tests.rs b/src/tests.rs index 488f451..a93d9c0 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -114,7 +114,7 @@ fn rng_test(test: impl Fn(&mut StdRng) + Sync) { #[test] fn create_network() { rng_test(|rng| { - let net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + let net = NeuralNetwork::<10, 10>::new(rng); assert_network_invariants(&net); }); } @@ -124,7 +124,7 @@ fn split_connection() { // rng doesn't matter here since it's just adding bias in eval let mut rng = StdRng::seed_from_u64(0xabcdef); - let mut net = NeuralNetwork::<1, 1>::new(MutationSettings::default(), &mut rng); + let mut net = NeuralNetwork::<1, 1>::new(&mut rng); assert_network_invariants(&net); net.split_connection( @@ -161,7 +161,6 @@ fn add_connection() { activation_fn!(linear_activation), &mut rng, )], - mutation_settings: MutationSettings::default(), }; assert_network_invariants(&net); @@ -207,7 +206,7 @@ fn add_connection() { // random stress testing rng_test(|rng| { - let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + let mut net = NeuralNetwork::<10, 10>::new(rng); assert_network_invariants(&net); for _ in 0..50 { net.add_random_connection(10, rng); @@ -240,7 +239,6 @@ fn remove_connection() { bias: 0.0, activation_fn: activation_fn!(linear_activation), }], - mutation_settings: MutationSettings::default(), }; assert_network_invariants(&net); @@ -257,7 +255,7 @@ fn remove_connection() { assert_network_invariants(&net); rng_test(|rng| { - let mut net = NeuralNetwork::<10, 10>::new(MutationSettings::default(), rng); + let mut net = NeuralNetwork::<10, 10>::new(rng); assert_network_invariants(&net); for _ in 0..70 { @@ -280,17 +278,13 @@ const MUTATION_RATE: f32 = 0.25; #[test] fn mutate() { rng_test(|rng| { - let mut net = NeuralNetwork::<10, 10>::new( - MutationSettings { - mutation_rate: MUTATION_RATE, - ..Default::default() - }, - rng, - ); + let mut net = NeuralNetwork::<10, 10>::new(rng); assert_network_invariants(&net); + let settings = MutationSettings::default(); + for _ in 0..NUM_MUTATIONS { - net.mutate(MUTATION_RATE, rng); + net.mutate(&settings, MUTATION_RATE, rng); assert_network_invariants(&net); } }); From 7659e64d82b496fe9c7cfc1d604f0dcb014d6bdb Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Fri, 6 Feb 2026 17:15:05 +0000 Subject: [PATCH 39/60] implement basic crossover merge (unfinished) --- Cargo.toml | 2 +- src/neuralnet.rs | 103 ++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 98 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 02daff0..50a9b9f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ serde = ["dep:serde", "dep:serde-big-array"] [dependencies] atomic_float = "1.1.0" bitflags = "2.10.0" -genetic-rs = { version = "1.1.0", features = ["rayon", "derive"] } +genetic-rs = { version = "1.1.0", features = ["rayon"] } lazy_static = "1.5.0" rayon = "1.11.0" replace_with = "0.1.8" diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 9b1da9e..34c7604 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -516,6 +516,35 @@ impl NeuralNetwork { }); } + /// Recounts inputs for all neurons in the network + /// and removes any invalid connections. + pub fn reset_input_counts(&mut self) { + self.clear_input_counts(); + + for i in 0..I { + self.reset_inputs_for_neuron(NeuronLocation::Input(i)); + } + + for i in 0..self.hidden_layers.len() { + self.reset_inputs_for_neuron(NeuronLocation::Hidden(i)); + } + } + + fn reset_inputs_for_neuron(&mut self, loc: NeuronLocation) { + let outputs = self.get_neuron(loc).outputs.keys().cloned().collect::>(); + let outputs2 = outputs.into_iter().filter(|&loc| { + if !self.neuron_exists(loc) { + return false; + } + + let target = self.get_neuron_mut(loc); + target.input_count += 1; + true + }).collect::>(); + + self.get_neuron_mut(loc).outputs.retain(|loc, _| outputs2.contains(loc)); + } + fn clear_input_counts(&mut self) { self.input_layer .par_iter_mut() @@ -527,6 +556,15 @@ impl NeuralNetwork { .par_iter_mut() .for_each(|n| n.input_count = 0); } + + /// Iterates over the network and removes any hanging neurons in the hidden layer(s). + pub fn prune_hanging_neurons(&mut self) { + for i in 0..self.hidden_layers.len() { + if self.hidden_layers[i].input_count == 0 { + self.remove_neuron(NeuronLocation::Hidden(i)); + } + } + } } /// The mutation settings for [`NeuralNetwork`]. @@ -610,7 +648,7 @@ impl RandomlyMutable for NeuralNetwork { #[derive(Debug, Clone, PartialEq)] pub struct ReproductionSettings { /// The mutation settings to use during reproduction. - pub mutation_settings: MutationSettings, + pub mutation: MutationSettings, /// The number of times to apply mutation during reproduction. pub mutation_passes: usize, @@ -619,7 +657,7 @@ pub struct ReproductionSettings { impl Default for ReproductionSettings { fn default() -> Self { Self { - mutation_settings: MutationSettings::default(), + mutation: MutationSettings::default(), mutation_passes: 3, } } @@ -632,7 +670,7 @@ impl Mitosis for NeuralNetwork { let mut child = self.clone(); for _ in 0..settings.mutation_passes { - child.mutate(&settings.mutation_settings, rate, rng); + child.mutate(&settings.mutation, rate, rng); } child @@ -640,9 +678,10 @@ impl Mitosis for NeuralNetwork { } /// The settings used for [`NeuralNetwork`] crossover. +#[derive(Debug, Default, Clone, PartialEq)] pub struct CrossoverSettings { /// The reproduction settings to use during crossover, which will be applied to the child after crossover. - pub reproduction_settings: ReproductionSettings, + pub repr: ReproductionSettings, // TODO other crossover settings. } @@ -650,8 +689,60 @@ pub struct CrossoverSettings { impl Crossover for NeuralNetwork { type Context = CrossoverSettings; - fn crossover(&self, other: &Self, settings: &CrossoverSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { - todo!() + fn crossover(&self, other: &Self, settings: &CrossoverSettings, rate: f32, rng: &mut impl rand::Rng) -> Self { + // merge (temporarily breaking invariants) and then resolve invariants. + let mut child = NeuralNetwork { + input_layer: self.input_layer.clone(), + hidden_layers: vec![], + output_layer: self.output_layer.clone(), + }; + + for i in 0..I { + if rng.random_bool(0.5) { + child.input_layer[i] = other.input_layer[i].clone(); + } + } + + for i in 0..O { + if rng.random_bool(0.5) { + child.output_layer[i] = other.output_layer[i].clone(); + } + } + + let larger; + let smaller; + if self.hidden_layers.len() >= other.hidden_layers.len() { + larger = &self.hidden_layers; + smaller = &other.hidden_layers; + } else { + larger = &other.hidden_layers; + smaller = &self.hidden_layers; + } + + for i in 0..larger.len() { + if i < smaller.len() { + if rng.random_bool(0.5) { + child.hidden_layers.push(smaller[i].clone()); + } else { + child.hidden_layers.push(larger[i].clone()); + } + continue; + } + + // larger is the only one with spare neurons, add them. + child.hidden_layers.push(larger[i].clone()); + } + + // resolve invariants + child.reset_input_counts(); + // TODO cycle invariant + child.prune_hanging_neurons(); + + for _ in 0..settings.repr.mutation_passes { + child.mutate(&settings.repr.mutation, rate, rng); + } + + child } } From a2efc87db4b7f4675b1eab7159a29dd461e40904 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:43:47 +0000 Subject: [PATCH 40/60] finish crossover implementation (invariant fixing) --- errors.txt | 4 +++ src/neuralnet.rs | 71 +++++++++++++++++++++++++++++++++++++++++++----- src/tests.rs | 26 ++++++++++++++++++ 3 files changed, 94 insertions(+), 7 deletions(-) create mode 100644 errors.txt diff --git a/errors.txt b/errors.txt new file mode 100644 index 0000000..9f261d3 --- /dev/null +++ b/errors.txt @@ -0,0 +1,4 @@ + Compiling neat v0.5.1 (/workspaces/neat) + Finished `test` profile [unoptimized + debuginfo] target(s) in 0.78s + Running unittests src/lib.rs (target/debug/deps/neat-53245cf135e05518) + Doc-tests neat diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 34c7604..383c5d7 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -393,14 +393,15 @@ impl NeuralNetwork { let b = self.get_neuron_mut(connection.to); // if the invariants held at the beginning of the call, - // this should never underflow. - b.input_count -= 1; + // this should never underflow, but some cases like remove_cycles + // may temporarily break invariants. + b.input_count = b.input_count.saturating_sub(1); // signal removal connection.to.is_hidden() && b.input_count == 0 } - /// Remove a connection and downshift all connection indices to compensate for it. + /// Remove a connection from the network. /// This will also deal with hanging neurons iteratively to avoid recursion that /// can invalidate stored indices during nested deletions. /// This method is preferable to [`remove_connection_raw`][NeuralNetwork::remove_connection_raw] for a majority of usecases, @@ -414,16 +415,20 @@ impl NeuralNetwork { } /// Remove a neuron and downshift all connection indices to compensate for it. + /// Returns the number of neurons removed that were under the index of the removed neuron (including itself). /// This will also deal with hanging neurons iteratively to avoid recursion that /// can invalidate stored indices during nested deletions. - pub fn remove_neuron(&mut self, loc: NeuronLocation) { + pub fn remove_neuron(&mut self, loc: NeuronLocation) -> usize { if !loc.is_hidden() { panic!("cannot remove neurons in input or output layer"); } + let initial_i = loc.unwrap(); + let mut work = VecDeque::new(); work.push_back(loc); + let mut removed = 0; while let Some(cur_loc) = work.pop_front() { // if the neuron was already removed due to earlier deletions, skip. // i don't think it realistically should ever happen, but just in case. @@ -454,9 +459,14 @@ impl NeuralNetwork { let i = cur_loc.unwrap(); if i < self.hidden_layers.len() { self.hidden_layers.remove(i); + if i <= initial_i { + removed += 1; + } self.downshift_connections(i, &mut work); // O(n^2) bad, but we can optimize later if it's a problem. } } + + removed } fn downshift_connections(&mut self, i: usize, work: &mut VecDeque) { @@ -559,11 +569,58 @@ impl NeuralNetwork { /// Iterates over the network and removes any hanging neurons in the hidden layer(s). pub fn prune_hanging_neurons(&mut self) { - for i in 0..self.hidden_layers.len() { + let mut i = 0; + while i < self.hidden_layers.len() { + let mut new_i = i + 1; if self.hidden_layers[i].input_count == 0 { - self.remove_neuron(NeuronLocation::Hidden(i)); + // this saturating_sub is a code smell but it works and avoids some edge cases where indices can get messed up. + new_i = new_i.saturating_sub(self.remove_neuron(NeuronLocation::Hidden(i))); + } + i = new_i; + } + } + + /// Uses DFS to find and remove all cycles in O(n+e) time. + /// Expects [`prune_hanging_neurons`][NeuralNetwork::prune_hanging_neurons] to be called afterwards + pub fn remove_cycles(&mut self) { + let mut visited = HashMap::new(); + + for i in 0..I { + self.remove_cycles_dfs(&mut visited, None, NeuronLocation::Input(i)); + } + + // unattached cycles (will cause problems since they + // never get deleted by input_count == 0) + for i in 0..self.hidden_layers.len() { + let loc = NeuronLocation::Hidden(i); + if !visited.contains_key(&loc) { + self.remove_cycles_dfs(&mut visited, None, loc); + } + } + } + + // colored dfs + fn remove_cycles_dfs(&mut self, visited: &mut HashMap, prev: Option, current: NeuronLocation) { + if let Some(&existing) = visited.get(¤t) { + if existing == 0 { + // part of current dfs + // prev must exist here since visited would be empty on first call. + // only doing raw here since we recalculate input counts and prune hanging neurons later. + self.remove_connection_raw(Connection { from: prev.unwrap(), to: current }); } + + // already fully visited, no need to check again + return; + } + + visited.insert(current, 0); + + let outputs = self.get_neuron(current).outputs.keys().cloned().collect::>(); + for loc in outputs { + self.remove_cycles_dfs(visited, Some(current), loc); } + + visited.insert(current, 1); } } @@ -734,8 +791,8 @@ impl Crossover for NeuralNetwork { } // resolve invariants + child.remove_cycles(); child.reset_input_counts(); - // TODO cycle invariant child.prune_hanging_neurons(); for _ in 0..settings.repr.mutation_passes { diff --git a/src/tests.rs b/src/tests.rs index a93d9c0..993c7fc 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -273,6 +273,8 @@ fn remove_connection() { }); } +// TODO remove_neuron test + const NUM_MUTATIONS: usize = 50; const MUTATION_RATE: f32 = 0.25; #[test] @@ -289,3 +291,27 @@ fn mutate() { } }); } + +#[test] +fn crossover() { + rng_test(|rng| { + let mut net1 = NeuralNetwork::<10, 10>::new(rng); + assert_network_invariants(&net1); + + let mut net2 = NeuralNetwork::<10, 10>::new(rng); + assert_network_invariants(&net2); + + let settings = CrossoverSettings::default(); + + for _ in 0..NUM_MUTATIONS { + let a = net1.crossover(&net2, &settings, MUTATION_RATE, rng); + assert_network_invariants(&a); + + let b = net2.crossover(&net1, &settings, MUTATION_RATE, rng); + assert_network_invariants(&b); + + net1 = a; + net2 = b; + } + }); +} \ No newline at end of file From f68c6b7200b463fad37d3fe14975547420893e95 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:50:30 +0000 Subject: [PATCH 41/60] remove accidentally committed temp file --- errors.txt | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 errors.txt diff --git a/errors.txt b/errors.txt deleted file mode 100644 index 9f261d3..0000000 --- a/errors.txt +++ /dev/null @@ -1,4 +0,0 @@ - Compiling neat v0.5.1 (/workspaces/neat) - Finished `test` profile [unoptimized + debuginfo] target(s) in 0.78s - Running unittests src/lib.rs (target/debug/deps/neat-53245cf135e05518) - Doc-tests neat From c0560b06984fcb4c3b59d849fea7f17b3be29899 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:51:47 +0000 Subject: [PATCH 42/60] update example todo messages --- examples/basic.rs | 2 +- examples/extra_dna.rs | 3 --- examples/extra_genes.rs | 3 +++ 3 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 examples/extra_dna.rs create mode 100644 examples/extra_genes.rs diff --git a/examples/basic.rs b/examples/basic.rs index 85f58cb..bcd5ffb 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -1,3 +1,3 @@ fn main() { - todo!("use NeuralNetwork as the entire DNA"); + todo!("use NeuralNetwork as the entire genome"); } diff --git a/examples/extra_dna.rs b/examples/extra_dna.rs deleted file mode 100644 index 038709f..0000000 --- a/examples/extra_dna.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - todo!("use AgentDNA with additional params") -} diff --git a/examples/extra_genes.rs b/examples/extra_genes.rs new file mode 100644 index 0000000..f2da976 --- /dev/null +++ b/examples/extra_genes.rs @@ -0,0 +1,3 @@ +fn main() { + todo!("use NeuralNetwork along with other genomes for more complex organisms"); +} From d8f8fe2a4af949176d0b0406600ad2799dd68345 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:37:22 +0000 Subject: [PATCH 43/60] publicize activation fn name field --- src/activation.rs | 4 +++- src/tests.rs | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/activation.rs b/src/activation.rs index 7857fae..f61a123 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -152,7 +152,9 @@ pub struct ActivationFn { /// The scope defining where the activation function can appear. pub scope: NeuronScope, - pub(crate) name: &'static str, + + /// The name of the activation function, used for debugging and serialization. + pub name: &'static str, } impl ActivationFn { diff --git a/src/tests.rs b/src/tests.rs index 993c7fc..c329dae 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -314,4 +314,9 @@ fn crossover() { net2 = b; } }); +} + +#[cfg(feature = "serde")] +mod serde { + // TODO } \ No newline at end of file From 01f804b42c9d04367943d8dc4a7da6a195e08f0e Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:53:35 +0000 Subject: [PATCH 44/60] add methods to specify activation registry during mutation --- src/neuralnet.rs | 99 +++++++++++++++++++++++++++--------------------- 1 file changed, 56 insertions(+), 43 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 383c5d7..9410f54 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -205,12 +205,6 @@ impl NeuralNetwork { self.hidden_layers.push(new_n); } - /// Changes a neuron's activation function to a random one in its scope. - pub fn mutate_activation(&mut self, loc: NeuronLocation, rng: &mut impl Rng) { - let reg = ACTIVATION_REGISTRY.read().unwrap(); - self.get_neuron_mut(loc).activation_fn = reg.random_activation_in_scope(loc.into(), rng); - } - /// Adds a connection but does not check for cyclic linkages. pub fn add_connection_unchecked(&mut self, connection: Connection, weight: f32) { let a = self.get_neuron_mut(connection.from); @@ -484,15 +478,14 @@ impl NeuralNetwork { }); } - // TODO maybe pass Connection info. /// Runs the `callback` on the weights of the neural network in parallel, allowing it to modify weight values. - pub fn mutate_weights(&mut self, callback: impl Fn(&mut f32) + Sync) { + pub fn update_weights(&mut self, callback: impl Fn(&NeuronLocation, &mut f32) + Sync) { for n in &mut self.input_layer { - n.outputs.par_iter_mut().for_each(|(_, w)| callback(w)); + n.outputs.par_iter_mut().for_each(|(loc, w)| callback(loc, w)); } for n in &mut self.hidden_layers { - n.outputs.par_iter_mut().for_each(|(_, w)| callback(w)); + n.outputs.par_iter_mut().for_each(|(loc, w)| callback(loc, w)); } } @@ -506,6 +499,11 @@ impl NeuralNetwork { /// Mutates the activation functions of the neurons in the neural network. pub fn mutate_activations(&mut self, rate: f32) { let reg = ACTIVATION_REGISTRY.read().unwrap(); + self.mutate_activations_with_reg(rate, ®); + } + + /// Mutates the activation functions of the neurons in the neural network, using a provided registry. + pub fn mutate_activations_with_reg(&mut self, rate: f32, reg: &ActivationRegistry) { self.input_layer.par_iter_mut().for_each(|n| { let mut rng = rand::rng(); if rng.random_bool(rate as f64) { @@ -622,6 +620,52 @@ impl NeuralNetwork { visited.insert(current, 1); } + + /// Performs just the mutations that modify the graph structure of the neural network, + /// and not the internal mutations that only modify values such as activation functions, weights, and biases. + pub fn perform_graph_mutations(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl rand::Rng) { + // TODO maybe allow specifying probability + // for each type of mutation + if rng.random_bool(rate as f64) { + // split connection + if let Some(conn) = self.get_random_connection(settings.max_split_retries, rng) { + self.split_connection(conn, rng); + } + } + + if rng.random_bool(rate as f64) { + // add connection + self.add_random_connection(settings.max_add_retries, rng); + } + + if rng.random_bool(rate as f64) { + // remove connection + self.remove_random_connection(settings.max_remove_retries, rng); + } + } + + /// Performs just the mutations that modify internal values such as activation functions, weights, and biases, + /// and not the graph mutations that modify the structure of the neural network. + pub fn perform_internal_mutations(&mut self, settings: &MutationSettings, rate: f32) { + self.mutate_activations(rate); + self.mutate_weights(settings.weight_mutation_amount); + } + + /// Same as [`mutate`][NeuralNetwork::mutate] but allows specifying a custom activation registry for activation mutations. + pub fn mutate_with_reg(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl rand::Rng, reg: &ActivationRegistry) { + self.perform_graph_mutations(settings, rate, rng); + self.mutate_activations_with_reg(rate, reg); + self.mutate_weights(settings.weight_mutation_amount); + } + + /// Mutates all weights by a random amount up to `max_amount` in either direction. + pub fn mutate_weights(&mut self, max_amount: f32) { + self.update_weights(|_, w| { + let mut rng = rand::rng(); + let amount = rng.random_range(-max_amount..max_amount); + *w += amount; + }); + } } /// The mutation settings for [`NeuralNetwork`]. @@ -665,39 +709,8 @@ impl RandomlyMutable for NeuralNetwork { type Context = MutationSettings; fn mutate(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl Rng) { - // TODO maybe allow specifying probability - // for each type of mutation - - // graph mutations - if rng.random::() <= rate { - // split connection - // TODO add a setting for max_retries - if let Some(conn) = self.get_random_connection(settings.max_split_retries, rng) { - self.split_connection(conn, rng); - } - } - - if rng.random::() <= rate { - // add connection - self.add_random_connection(settings.max_add_retries, rng); - } - - if rng.random::() <= rate { - // remove connection - self.remove_random_connection(settings.max_remove_retries, rng); - } - - // internal mutations - self.mutate_activations(rate); - - self.mutate_weights(|w| { - let mut rng = rand::rng(); - - if rng.random::() <= rate { - let amount = settings.weight_mutation_amount; - *w += rng.random_range(-amount..amount); - } - }); + let reg = ACTIVATION_REGISTRY.read().unwrap(); + self.mutate_with_reg(settings, rate, rng, ®); } } From d34221817e9b28f8cfb5cc7f872d90d2bf5d99fd Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Mon, 9 Feb 2026 18:19:46 +0000 Subject: [PATCH 45/60] refactor panicking get_neuron and get_neuron_mut to Index and IndexMut, then implement Option versions in their place. --- src/neuralnet.rs | 83 +++++++++++++++++++++++++++++------------------- src/tests.rs | 4 +-- 2 files changed, 53 insertions(+), 34 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 9410f54..b95c7df 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1,9 +1,7 @@ use std::{ - collections::{HashMap, HashSet, VecDeque}, - sync::{ - atomic::{AtomicBool, AtomicUsize, Ordering}, - Arc, - }, + collections::{HashMap, HashSet, VecDeque}, ops::{Index, IndexMut}, sync::{ + Arc, atomic::{AtomicBool, AtomicUsize, Ordering} + } }; use atomic_float::AtomicF32; @@ -120,7 +118,7 @@ impl NeuralNetwork { rayon::yield_now(); } - let n = self.get_neuron(loc); + let n = &self[loc]; let val = n.activate(cache.get(loc)); n.outputs.par_iter().for_each(|(&loc2, weight)| { @@ -130,11 +128,11 @@ impl NeuralNetwork { } /// Get a neuron at the specified [`NeuronLocation`]. - pub fn get_neuron(&self, loc: NeuronLocation) -> &Neuron { - match loc { - NeuronLocation::Input(i) => &self.input_layer[i], - NeuronLocation::Hidden(i) => &self.hidden_layers[i], - NeuronLocation::Output(i) => &self.output_layer[i], + pub fn get_neuron(&self, loc: NeuronLocation) -> Option<&Neuron> { + if !self.neuron_exists(loc) { + None + } else { + Some(&self[loc]) } } @@ -148,11 +146,11 @@ impl NeuralNetwork { } /// Get a mutable reference to the neuron at the specified [`NeuronLocation`]. - pub fn get_neuron_mut(&mut self, loc: NeuronLocation) -> &mut Neuron { - match loc { - NeuronLocation::Input(i) => &mut self.input_layer[i], - NeuronLocation::Hidden(i) => &mut self.hidden_layers[i], - NeuronLocation::Output(i) => &mut self.output_layer[i], + pub fn get_neuron_mut(&mut self, loc: NeuronLocation) -> Option<&mut Neuron> { + if !self.neuron_exists(loc) { + None + } else { + Some(&mut self[loc]) } } @@ -177,7 +175,7 @@ impl NeuralNetwork { continue; } - let n = self.get_neuron_mut(loc); + let n = &mut self[loc]; n.input_count += 1; } @@ -190,7 +188,7 @@ impl NeuralNetwork { pub fn split_connection(&mut self, connection: Connection, rng: &mut impl Rng) { let new_loc = NeuronLocation::Hidden(self.hidden_layers.len()); - let a = self.get_neuron_mut(connection.from); + let a = &mut self[connection.from]; let w = a .outputs .remove(&connection.to) @@ -207,10 +205,10 @@ impl NeuralNetwork { /// Adds a connection but does not check for cyclic linkages. pub fn add_connection_unchecked(&mut self, connection: Connection, weight: f32) { - let a = self.get_neuron_mut(connection.from); + let a = &mut self[connection.from]; a.outputs.insert(connection.to, weight); - let b = self.get_neuron_mut(connection.to); + let b = &mut self[connection.to]; b.input_count += 1; } @@ -220,8 +218,7 @@ impl NeuralNetwork { if connection.from.is_output() || connection.to.is_input() || (self.neuron_exists(connection.from) - && self - .get_neuron(connection.from) + && self[connection.from] .outputs .contains_key(&connection.to)) { @@ -236,7 +233,7 @@ impl NeuralNetwork { return false; } - let n = self.get_neuron(current); + let n = &self[current]; for (loc, _) in &n.outputs { if !self.dfs(visited, *loc) { return false; @@ -289,7 +286,7 @@ impl NeuralNetwork { ) -> Option { for _ in 0..max_retries { let a = self.random_location_in_scope(rng, !NeuronScope::OUTPUT); - let an = self.get_neuron(a); + let an = &self[a]; if an.outputs.is_empty() { continue; } @@ -327,7 +324,7 @@ impl NeuralNetwork { /// Mutates a connection's weight. pub fn mutate_weight(&mut self, connection: Connection, amount: f32, rng: &mut impl Rng) { - let n = self.get_neuron_mut(connection.from); + let n = &mut self[connection.from]; n.mutate_weight(connection.to, amount, rng).unwrap(); } @@ -379,12 +376,12 @@ impl NeuralNetwork { /// Returns `true` if the destination neuron has input_count == 0 and should be removed. /// Callers must handle the removal of the destination neuron if needed. pub fn remove_connection_raw(&mut self, connection: Connection) -> bool { - let a = self.get_neuron_mut(connection.from); + let a = &mut self[connection.from]; if a.outputs.remove(&connection.to).is_none() { panic!("invalid connection"); } - let b = self.get_neuron_mut(connection.to); + let b = &mut self[connection.to]; // if the invariants held at the beginning of the call, // this should never underflow, but some cases like remove_cycles @@ -431,7 +428,7 @@ impl NeuralNetwork { } let outputs = { - let n = self.get_neuron(cur_loc); + let n = &self[cur_loc]; n.outputs.keys().cloned().collect::>() }; @@ -539,18 +536,18 @@ impl NeuralNetwork { } fn reset_inputs_for_neuron(&mut self, loc: NeuronLocation) { - let outputs = self.get_neuron(loc).outputs.keys().cloned().collect::>(); + let outputs = self[loc].outputs.keys().cloned().collect::>(); let outputs2 = outputs.into_iter().filter(|&loc| { if !self.neuron_exists(loc) { return false; } - let target = self.get_neuron_mut(loc); + let target = &mut self[loc]; target.input_count += 1; true }).collect::>(); - self.get_neuron_mut(loc).outputs.retain(|loc, _| outputs2.contains(loc)); + self[loc].outputs.retain(|loc, _| outputs2.contains(loc)); } fn clear_input_counts(&mut self) { @@ -613,7 +610,7 @@ impl NeuralNetwork { visited.insert(current, 0); - let outputs = self.get_neuron(current).outputs.keys().cloned().collect::>(); + let outputs = self[current].outputs.keys().cloned().collect::>(); for loc in outputs { self.remove_cycles_dfs(visited, Some(current), loc); } @@ -668,6 +665,28 @@ impl NeuralNetwork { } } +impl Index for NeuralNetwork { + type Output = Neuron; + + fn index(&self, loc: NeuronLocation) -> &Self::Output { + match loc { + NeuronLocation::Input(i) => &self.input_layer[i], + NeuronLocation::Hidden(i) => &self.hidden_layers[i], + NeuronLocation::Output(i) => &self.output_layer[i], + } + } +} + +impl IndexMut for NeuralNetwork { + fn index_mut(&mut self, loc: NeuronLocation) -> &mut Self::Output { + match loc { + NeuronLocation::Input(i) => &mut self.input_layer[i], + NeuronLocation::Hidden(i) => &mut self.hidden_layers[i], + NeuronLocation::Output(i) => &mut self.output_layer[i], + } + } +} + /// The mutation settings for [`NeuralNetwork`]. /// Does not affect [`NeuralNetwork::mutate`], only [`NeuralNetwork::divide`] and [`NeuralNetwork::crossover`]. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] diff --git a/src/tests.rs b/src/tests.rs index c329dae..5a3252e 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -40,7 +40,7 @@ fn dfs( visited.insert(loc, GraphCheckState::CurrentCycle); - for loc2 in net.get_neuron(loc).outputs.keys() { + for loc2 in net[loc].outputs.keys() { dfs(net, *loc2, visited); } @@ -71,7 +71,7 @@ fn assert_cache_consistency(net: &NeuralNetwork< }; for i in 0..I { - let n = net.get_neuron(NeuronLocation::Input(i)); + let n = &net[NeuronLocation::Input(i)]; for loc in n.outputs.keys() { cache.tally(*loc); } From c3a6e25d29f91cd4dab69e65dde032379a0c5090 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:46:45 +0000 Subject: [PATCH 46/60] fix serde feature --- Cargo.lock | 12 ++++++++++++ Cargo.toml | 2 +- src/activation.rs | 4 ++-- src/neuralnet.rs | 25 ++++++++++++++++++++++++- src/tests.rs | 19 ++++++++++++++++++- 5 files changed, 57 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78158ca..1d675b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -183,6 +183,7 @@ dependencies = [ "serde", "serde-big-array", "serde_json", + "serde_path_to_error", ] [[package]] @@ -325,6 +326,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "strsim" version = "0.11.1" diff --git a/Cargo.toml b/Cargo.toml index 50a9b9f..0c5882d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,6 @@ rustdoc-args = ["--cfg", "docsrs"] default = [] serde = ["dep:serde", "dep:serde-big-array"] - [dependencies] atomic_float = "1.1.0" bitflags = "2.10.0" @@ -34,3 +33,4 @@ serde-big-array = { version = "0.5.1", optional = true } [dev-dependencies] serde_json = "1.0.149" +serde_path_to_error = "0.1.20" diff --git a/src/activation.rs b/src/activation.rs index f61a123..9453a05 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -170,7 +170,7 @@ impl ActivationFn { impl fmt::Debug for ActivationFn { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(f, "{}", self.name) + write!(f, "{}", self.name) } } @@ -197,7 +197,7 @@ impl<'a> Deserialize<'a> for ActivationFn { let reg = ACTIVATION_REGISTRY.read().unwrap(); - let f = reg.fns.get(&name); + let f = reg.fns.get(name.as_str()); if f.is_none() { panic!("Activation function {name} not found"); diff --git a/src/neuralnet.rs b/src/neuralnet.rs index b95c7df..6ca7bee 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -17,11 +17,33 @@ use crate::{ use rayon::prelude::*; #[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[cfg(feature = "serde")] use serde_big_array::BigArray; +#[cfg(feature = "serde")] +mod outputs_serde { + use super::*; + use std::collections::HashMap; + + pub fn serialize(map: &HashMap, serializer: S) -> Result + where + S: Serializer, + { + let vec: Vec<(NeuronLocation, f32)> = map.iter().map(|(k, v)| (*k, *v)).collect(); + vec.serialize(serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + let vec: Vec<(NeuronLocation, f32)> = Vec::deserialize(deserializer)?; + Ok(vec.into_iter().collect()) + } +} + /// An abstract neural network type with `I` input neurons and `O` output neurons. /// Hidden neurons are not organized into layers, but rather float and link freely /// (or at least in any way that doesn't cause a cyclic dependency). @@ -863,6 +885,7 @@ pub struct Neuron { pub input_count: usize, /// The connections and weights to other neurons. + #[cfg_attr(feature = "serde", serde(with = "outputs_serde"))] pub outputs: HashMap, /// The initial value of the neuron. diff --git a/src/tests.rs b/src/tests.rs index 5a3252e..e6da044 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -318,5 +318,22 @@ fn crossover() { #[cfg(feature = "serde")] mod serde { - // TODO + use crate::*; + use super::rng_test; + + #[test] + fn full_serde() { + rng_test(|rng| { + let net1 = NeuralNetwork::<10, 10>::new(rng); + + let mut buf = Vec::new(); + let writer = std::io::Cursor::new(&mut buf); + let mut serializer = serde_json::Serializer::new(writer); + + serde_path_to_error::serialize(&net1, &mut serializer).unwrap(); + let serialized = serde_json::to_string(&net1).unwrap(); + let net2: NeuralNetwork<10, 10> = serde_json::from_str(&serialized).unwrap(); + assert_eq!(net1, net2); + }); + } } \ No newline at end of file From 041599b670255b94f12f262fd6164eea52e53859 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 11 Feb 2026 12:44:38 +0000 Subject: [PATCH 47/60] make basic example --- Cargo.toml | 15 +++++++++ examples/basic.rs | 69 ++++++++++++++++++++++++++++++++++++++++- examples/extra_genes.rs | 17 ++++++++++ src/neuralnet.rs | 62 ++++++++++++++++++++++++------------ 4 files changed, 143 insertions(+), 20 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0c5882d..76929ad 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,21 @@ rustdoc-args = ["--cfg", "docsrs"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[[example]] +name = "basic" +path = "examples/basic.rs" +required-features = ["genetic-rs/derive"] + +[[example]] +name = "extra_genes" +path = "examples/extra_genes.rs" +required-features = ["genetic-rs/derive"] + +[[example]] +name = "rps" +path = "examples/rps.rs" +required-features = ["genetic-rs/derive"] + [features] default = [] serde = ["dep:serde", "dep:serde-big-array"] diff --git a/examples/basic.rs b/examples/basic.rs index bcd5ffb..a0fa11a 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -1,3 +1,70 @@ +use neat::*; + +// approximate the to_degrees function, which should be pretty +// hard for the network to learn since it's not really close to -1..1 mapping. +fn fitness(net: &NeuralNetwork<1, 1>) -> f32 { + let mut rng = rand::rng(); + let mut total_fitness = 0.0; + + // it's good practice to test on multiple inputs to get a more accurate fitness score + for _ in 0..100 { + let input = rng.random_range(-10.0..10.0); + let output = net.predict([input])[0]; + let expected_output = input.to_degrees(); + + // basically just using negative error as fitness + total_fitness -= (output - expected_output).abs(); + } + + total_fitness +} + fn main() { - todo!("use NeuralNetwork as the entire genome"); + let mut rng = rand::rng(); + + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, 250), + FitnessEliminator::new_with_default(fitness), + CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + ); + + for i in 0..=200 { + sim.next_generation(); + + // sample a genome to print its fitness. + // this value should approach 0 as the generations go on, since the fitness is negative error. + // because of hte way CrossoverEliminator works internally, the parent genomes (i.e. prev generation champs) + // are more likely to be at the start of the genomes vector. + let sample = &sim.genomes[0]; + let fit = fitness(sample); + println!("Gen {i} sample fitness: {fit}"); + } + println!("Training complete, now you can test the network!"); + + let net = &sim.genomes[0]; + println!("Network in use: {:#?}", net); + + loop { + let mut input_text = String::new(); + println!("Enter a number to convert to degrees (or 'exit' to quit): "); + std::io::stdin().read_line(&mut input_text).unwrap(); + let input_text = input_text.trim(); + if input_text.eq_ignore_ascii_case("exit") { + break; + } + let input: f32 = match input_text.parse() { + Ok(num) => num, + Err(_) => { + println!("Invalid input, please enter a valid number."); + continue; + } + }; + + let output = net.predict([input])[0]; + let expected_output = input.to_degrees(); + println!( + "Network output: {}, Expected output: {}", + output, expected_output + ); + } } diff --git a/examples/extra_genes.rs b/examples/extra_genes.rs index f2da976..2081b22 100644 --- a/examples/extra_genes.rs +++ b/examples/extra_genes.rs @@ -1,3 +1,20 @@ +use neat::*; + +struct Genome { + brain: NeuralNetwork<10, 4>, + stats: PhysicalStats, +} + +struct PhysicalStats { + speed: f32, + sight_range: u32, +} + +struct Organism { + genome: Genome, + energy: f32, +} + fn main() { todo!("use NeuralNetwork along with other genomes for more complex organisms"); } diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 6ca7bee..995369e 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -123,7 +123,14 @@ impl NeuralNetwork { .into_par_iter() .for_each(|i| self.eval(NeuronLocation::Input(i), cache.clone())); - cache.output() + let mut outputs = [0.0; O]; + for i in 0..O { + let n = &self.output_layer[i]; + let val = cache.get(NeuronLocation::Output(i)); + outputs[i] = n.activate(val); + } + + outputs } fn eval(&self, loc: NeuronLocation, cache: Arc>) { @@ -398,9 +405,9 @@ impl NeuralNetwork { /// Returns `true` if the destination neuron has input_count == 0 and should be removed. /// Callers must handle the removal of the destination neuron if needed. pub fn remove_connection_raw(&mut self, connection: Connection) -> bool { - let a = &mut self[connection.from]; + let a = self.get_neuron_mut(connection.from).expect("invalid connection.from"); if a.outputs.remove(&connection.to).is_none() { - panic!("invalid connection"); + panic!("invalid connection.to"); } let b = &mut self[connection.to]; @@ -699,6 +706,12 @@ impl Index for NeuralNetwork GenerateRandom for NeuralNetwork { + fn gen_random(rng: &mut impl rand::Rng) -> Self { + Self::new(rng) + } +} + impl IndexMut for NeuralNetwork { fn index_mut(&mut self, loc: NeuronLocation) -> &mut Self::Output { match loc { @@ -1107,11 +1120,11 @@ pub struct NeuralNetCache { impl NeuralNetCache { /// Gets the value of a neuron at the given location. - pub fn get(&self, loc: impl AsRef) -> f32 { - match loc.as_ref() { - NeuronLocation::Input(i) => self.input_layer[*i].value.load(Ordering::SeqCst), - NeuronLocation::Hidden(i) => self.hidden_layers[*i].value.load(Ordering::SeqCst), - NeuronLocation::Output(i) => self.output_layer[*i].value.load(Ordering::SeqCst), + pub fn get(&self, loc: NeuronLocation) -> f32 { + match loc { + NeuronLocation::Input(i) => self.input_layer[i].value.load(Ordering::SeqCst), + NeuronLocation::Hidden(i) => self.hidden_layers[i].value.load(Ordering::SeqCst), + NeuronLocation::Output(i) => self.output_layer[i].value.load(Ordering::SeqCst), } } @@ -1159,17 +1172,6 @@ impl NeuralNetCache { } } - /// Fetches and packs the output layer values into an array. - pub fn output(&self) -> [f32; O] { - let output: Vec<_> = self - .output_layer - .par_iter() - .map(|c| c.value.load(Ordering::SeqCst)) - .collect(); - - output.try_into().unwrap() - } - /// Attempts to claim a neuron. Returns false if it has already been claimed. pub fn claim(&self, loc: impl AsRef) -> bool { match loc.as_ref() { @@ -1206,3 +1208,25 @@ impl From<&NeuralNetwork> for NeuralNetCac } } } + +/// A trait for getting the index of the maximum element. +pub trait MaxIndex { + /// Returns the index of the maximum element. + fn max_index(self) -> usize; +} + +impl<'a, T: PartialOrd + 'a, I: Iterator> MaxIndex for I { + fn max_index(self) -> usize { + let mut max_i = 0; + let mut max_v = None; + + for (i, v) in self.enumerate() { + if max_v.is_none() || v > max_v.unwrap() { + max_i = i; + max_v = Some(v); + } + } + + max_i + } +} \ No newline at end of file From fb732a2d38f9685208b51cde9e9e91fd5d94ea77 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 11 Feb 2026 14:15:27 +0000 Subject: [PATCH 48/60] small changes to basic example --- examples/basic.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/examples/basic.rs b/examples/basic.rs index a0fa11a..b6320fb 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -1,7 +1,7 @@ use neat::*; // approximate the to_degrees function, which should be pretty -// hard for the network to learn since it's not really close to -1..1 mapping. +// hard for a traditional network to learn since it's not really close to -1..1 mapping. fn fitness(net: &NeuralNetwork<1, 1>) -> f32 { let mut rng = rand::rng(); let mut total_fitness = 0.0; @@ -12,7 +12,9 @@ fn fitness(net: &NeuralNetwork<1, 1>) -> f32 { let output = net.predict([input])[0]; let expected_output = input.to_degrees(); - // basically just using negative error as fitness + // basically just using negative error as fitness. + // percentage error doesn't work as well here since + // expected_output can be either very small or very large in magnitude. total_fitness -= (output - expected_output).abs(); } @@ -33,8 +35,8 @@ fn main() { // sample a genome to print its fitness. // this value should approach 0 as the generations go on, since the fitness is negative error. - // because of hte way CrossoverEliminator works internally, the parent genomes (i.e. prev generation champs) - // are more likely to be at the start of the genomes vector. + // with the way CrossoverRepopulator (and all builtin repopulators) works internally, the parent genomes + // (i.e. prev generation champs) are more likely to be at the start of the genomes vector. let sample = &sim.genomes[0]; let fit = fitness(sample); println!("Gen {i} sample fitness: {fit}"); From b44210041a56846b3052ee5859d6780e97068979 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 11 Feb 2026 15:44:53 +0000 Subject: [PATCH 49/60] mitigate the fuckass bug on release optimizations --- src/neuralnet.rs | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 995369e..405a567 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -83,7 +83,7 @@ impl NeuralNetwork { for _ in 0..I { let mut already_chosen = HashSet::new(); let num_outputs = rng.random_range(1..=O); - let mut outputs = HashMap::with_capacity(num_outputs); + let mut outputs = HashMap::new(); for _ in 0..num_outputs { let mut j = rng.random_range(0..O); @@ -246,6 +246,7 @@ impl NeuralNetwork { pub fn is_connection_safe(&self, connection: Connection) -> bool { if connection.from.is_output() || connection.to.is_input() + || connection.from == connection.to || (self.neuron_exists(connection.from) && self[connection.from] .outputs @@ -608,9 +609,10 @@ impl NeuralNetwork { /// Expects [`prune_hanging_neurons`][NeuralNetwork::prune_hanging_neurons] to be called afterwards pub fn remove_cycles(&mut self) { let mut visited = HashMap::new(); + let mut edges_to_remove: HashSet = HashSet::new(); for i in 0..I { - self.remove_cycles_dfs(&mut visited, None, NeuronLocation::Input(i)); + self.remove_cycles_dfs(&mut visited, &mut edges_to_remove, None, NeuronLocation::Input(i)); } // unattached cycles (will cause problems since they @@ -618,19 +620,33 @@ impl NeuralNetwork { for i in 0..self.hidden_layers.len() { let loc = NeuronLocation::Hidden(i); if !visited.contains_key(&loc) { - self.remove_cycles_dfs(&mut visited, None, loc); + self.remove_cycles_dfs(&mut visited, &mut edges_to_remove, None, loc); } } + + for conn in edges_to_remove { + // only doing raw here since we recalculate input counts and + // prune hanging neurons later. + self.remove_connection_raw(conn); + } } // colored dfs - fn remove_cycles_dfs(&mut self, visited: &mut HashMap, prev: Option, current: NeuronLocation) { + fn remove_cycles_dfs( + &mut self, + visited: &mut HashMap, + edges_to_remove: &mut HashSet, + prev: Option, + current: NeuronLocation, + ) { if let Some(&existing) = visited.get(¤t) { if existing == 0 { - // part of current dfs + // part of current dfs - found a cycle // prev must exist here since visited would be empty on first call. - // only doing raw here since we recalculate input counts and prune hanging neurons later. - self.remove_connection_raw(Connection { from: prev.unwrap(), to: current }); + let prev = prev.unwrap(); + if self[prev].outputs.contains_key(¤t) { + edges_to_remove.insert(Connection { from: prev, to: current }); + } } // already fully visited, no need to check again @@ -641,7 +657,7 @@ impl NeuralNetwork { let outputs = self[current].outputs.keys().cloned().collect::>(); for loc in outputs { - self.remove_cycles_dfs(visited, Some(current), loc); + self.remove_cycles_dfs(visited, edges_to_remove, Some(current), loc); } visited.insert(current, 1); @@ -880,7 +896,7 @@ fn output_exists(loc: NeuronLocation, hidden_len: usize, output_len: usize) -> b /// A helper struct for operations on connections between neurons. /// It does not contain information about the weight. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Connection { /// The source of the connection. @@ -1011,7 +1027,7 @@ impl Neuron { } /// A pseudo-pointer of sorts that is used for caching. -#[derive(Hash, Clone, Copy, Debug, Eq, PartialEq)] +#[derive(Hash, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum NeuronLocation { /// Points to a neuron in the input layer at contained index. From 71b6007c50874e62d7c700b78ffb0f3c55772cb0 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 11 Feb 2026 15:51:03 +0000 Subject: [PATCH 50/60] add error calculation to basic example --- examples/basic.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/basic.rs b/examples/basic.rs index b6320fb..8d45a12 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -65,8 +65,8 @@ fn main() { let output = net.predict([input])[0]; let expected_output = input.to_degrees(); println!( - "Network output: {}, Expected output: {}", - output, expected_output + "Network output: {}, Expected output: {}, Error: {}", + output, expected_output, (output - expected_output).abs() ); } } From 3d5256b3f29b84ed977e1feb76e2e5c078dcd3be Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Wed, 11 Feb 2026 18:12:36 +0000 Subject: [PATCH 51/60] fix fmt and clippy warnings --- Cargo.toml | 5 -- examples/basic.rs | 8 ++-- src/activation.rs | 2 +- src/neuralnet.rs | 114 +++++++++++++++++++++++++++++++--------------- src/tests.rs | 8 ++-- 5 files changed, 88 insertions(+), 49 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 76929ad..ce1dad3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,11 +27,6 @@ name = "extra_genes" path = "examples/extra_genes.rs" required-features = ["genetic-rs/derive"] -[[example]] -name = "rps" -path = "examples/rps.rs" -required-features = ["genetic-rs/derive"] - [features] default = [] serde = ["dep:serde", "dep:serde-big-array"] diff --git a/examples/basic.rs b/examples/basic.rs index 8d45a12..d32b865 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -30,7 +30,7 @@ fn main() { CrossoverRepopulator::new(0.25, CrossoverSettings::default()), ); - for i in 0..=200 { + for i in 0..=150 { sim.next_generation(); // sample a genome to print its fitness. @@ -45,7 +45,7 @@ fn main() { let net = &sim.genomes[0]; println!("Network in use: {:#?}", net); - + loop { let mut input_text = String::new(); println!("Enter a number to convert to degrees (or 'exit' to quit): "); @@ -66,7 +66,9 @@ fn main() { let expected_output = input.to_degrees(); println!( "Network output: {}, Expected output: {}, Error: {}", - output, expected_output, (output - expected_output).abs() + output, + expected_output, + (output - expected_output).abs() ); } } diff --git a/src/activation.rs b/src/activation.rs index 9453a05..84018b2 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -183,7 +183,7 @@ impl PartialEq for ActivationFn { #[cfg(feature = "serde")] impl Serialize for ActivationFn { fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.name) + serializer.serialize_str(self.name) } } diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 405a567..1d0d2e5 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1,7 +1,10 @@ use std::{ - collections::{HashMap, HashSet, VecDeque}, ops::{Index, IndexMut}, sync::{ - Arc, atomic::{AtomicBool, AtomicUsize, Ordering} - } + collections::{HashMap, HashSet, VecDeque}, + ops::{Index, IndexMut}, + sync::{ + atomic::{AtomicBool, AtomicUsize, Ordering}, + Arc, + }, }; use atomic_float::AtomicF32; @@ -27,7 +30,10 @@ mod outputs_serde { use super::*; use std::collections::HashMap; - pub fn serialize(map: &HashMap, serializer: S) -> Result + pub fn serialize( + map: &HashMap, + serializer: S, + ) -> Result where S: Serializer, { @@ -124,10 +130,10 @@ impl NeuralNetwork { .for_each(|i| self.eval(NeuronLocation::Input(i), cache.clone())); let mut outputs = [0.0; O]; - for i in 0..O { + for (i, output) in outputs.iter_mut().enumerate().take(O) { let n = &self.output_layer[i]; let val = cache.get(NeuronLocation::Output(i)); - outputs[i] = n.activate(val); + *output = n.activate(val); } outputs @@ -248,9 +254,7 @@ impl NeuralNetwork { || connection.to.is_input() || connection.from == connection.to || (self.neuron_exists(connection.from) - && self[connection.from] - .outputs - .contains_key(&connection.to)) + && self[connection.from].outputs.contains_key(&connection.to)) { return false; } @@ -264,7 +268,7 @@ impl NeuralNetwork { } let n = &self[current]; - for (loc, _) in &n.outputs { + for loc in n.outputs.keys() { if !self.dfs(visited, *loc) { return false; } @@ -406,7 +410,9 @@ impl NeuralNetwork { /// Returns `true` if the destination neuron has input_count == 0 and should be removed. /// Callers must handle the removal of the destination neuron if needed. pub fn remove_connection_raw(&mut self, connection: Connection) -> bool { - let a = self.get_neuron_mut(connection.from).expect("invalid connection.from"); + let a = self + .get_neuron_mut(connection.from) + .expect("invalid connection.from"); if a.outputs.remove(&connection.to).is_none() { panic!("invalid connection.to"); } @@ -508,19 +514,23 @@ impl NeuralNetwork { /// Runs the `callback` on the weights of the neural network in parallel, allowing it to modify weight values. pub fn update_weights(&mut self, callback: impl Fn(&NeuronLocation, &mut f32) + Sync) { for n in &mut self.input_layer { - n.outputs.par_iter_mut().for_each(|(loc, w)| callback(loc, w)); + n.outputs + .par_iter_mut() + .for_each(|(loc, w)| callback(loc, w)); } for n in &mut self.hidden_layers { - n.outputs.par_iter_mut().for_each(|(loc, w)| callback(loc, w)); + n.outputs + .par_iter_mut() + .for_each(|(loc, w)| callback(loc, w)); } } /// Runs the `callback` on the neurons of the neural network in parallel, allowing it to modify neuron values. pub fn mutate_neurons(&mut self, callback: impl Fn(&mut Neuron) + Sync) { - self.input_layer.par_iter_mut().for_each(|n| callback(n)); - self.hidden_layers.par_iter_mut().for_each(|n| callback(n)); - self.output_layer.par_iter_mut().for_each(|n| callback(n)); + self.input_layer.par_iter_mut().for_each(&callback); + self.hidden_layers.par_iter_mut().for_each(&callback); + self.output_layer.par_iter_mut().for_each(&callback); } /// Mutates the activation functions of the neurons in the neural network. @@ -567,15 +577,18 @@ impl NeuralNetwork { fn reset_inputs_for_neuron(&mut self, loc: NeuronLocation) { let outputs = self[loc].outputs.keys().cloned().collect::>(); - let outputs2 = outputs.into_iter().filter(|&loc| { - if !self.neuron_exists(loc) { - return false; - } + let outputs2 = outputs + .into_iter() + .filter(|&loc| { + if !self.neuron_exists(loc) { + return false; + } - let target = &mut self[loc]; - target.input_count += 1; - true - }).collect::>(); + let target = &mut self[loc]; + target.input_count += 1; + true + }) + .collect::>(); self[loc].outputs.retain(|loc, _| outputs2.contains(loc)); } @@ -612,7 +625,12 @@ impl NeuralNetwork { let mut edges_to_remove: HashSet = HashSet::new(); for i in 0..I { - self.remove_cycles_dfs(&mut visited, &mut edges_to_remove, None, NeuronLocation::Input(i)); + self.remove_cycles_dfs( + &mut visited, + &mut edges_to_remove, + None, + NeuronLocation::Input(i), + ); } // unattached cycles (will cause problems since they @@ -645,16 +663,19 @@ impl NeuralNetwork { // prev must exist here since visited would be empty on first call. let prev = prev.unwrap(); if self[prev].outputs.contains_key(¤t) { - edges_to_remove.insert(Connection { from: prev, to: current }); + edges_to_remove.insert(Connection { + from: prev, + to: current, + }); } } // already fully visited, no need to check again return; } - + visited.insert(current, 0); - + let outputs = self[current].outputs.keys().cloned().collect::>(); for loc in outputs { self.remove_cycles_dfs(visited, edges_to_remove, Some(current), loc); @@ -665,7 +686,12 @@ impl NeuralNetwork { /// Performs just the mutations that modify the graph structure of the neural network, /// and not the internal mutations that only modify values such as activation functions, weights, and biases. - pub fn perform_graph_mutations(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl rand::Rng) { + pub fn perform_graph_mutations( + &mut self, + settings: &MutationSettings, + rate: f32, + rng: &mut impl rand::Rng, + ) { // TODO maybe allow specifying probability // for each type of mutation if rng.random_bool(rate as f64) { @@ -694,7 +720,13 @@ impl NeuralNetwork { } /// Same as [`mutate`][NeuralNetwork::mutate] but allows specifying a custom activation registry for activation mutations. - pub fn mutate_with_reg(&mut self, settings: &MutationSettings, rate: f32, rng: &mut impl rand::Rng, reg: &ActivationRegistry) { + pub fn mutate_with_reg( + &mut self, + settings: &MutationSettings, + rate: f32, + rng: &mut impl rand::Rng, + reg: &ActivationRegistry, + ) { self.perform_graph_mutations(settings, rate, rng); self.mutate_activations_with_reg(rate, reg); self.mutate_weights(settings.weight_mutation_amount); @@ -806,7 +838,12 @@ impl Default for ReproductionSettings { impl Mitosis for NeuralNetwork { type Context = ReproductionSettings; - fn divide(&self, settings: &ReproductionSettings, rate: f32, rng: &mut impl prelude::Rng) -> Self { + fn divide( + &self, + settings: &ReproductionSettings, + rate: f32, + rng: &mut impl prelude::Rng, + ) -> Self { let mut child = self.clone(); for _ in 0..settings.mutation_passes { @@ -822,14 +859,19 @@ impl Mitosis for NeuralNetwork { pub struct CrossoverSettings { /// The reproduction settings to use during crossover, which will be applied to the child after crossover. pub repr: ReproductionSettings, - // TODO other crossover settings. } impl Crossover for NeuralNetwork { type Context = CrossoverSettings; - fn crossover(&self, other: &Self, settings: &CrossoverSettings, rate: f32, rng: &mut impl rand::Rng) -> Self { + fn crossover( + &self, + other: &Self, + settings: &CrossoverSettings, + rate: f32, + rng: &mut impl rand::Rng, + ) -> Self { // merge (temporarily breaking invariants) and then resolve invariants. let mut child = NeuralNetwork { input_layer: self.input_layer.clone(), @@ -842,7 +884,7 @@ impl Crossover for NeuralNetwork { child.input_layer[i] = other.input_layer[i].clone(); } } - + for i in 0..O { if rng.random_bool(0.5) { child.output_layer[i] = other.output_layer[i].clone(); @@ -995,7 +1037,7 @@ impl Neuron { pub fn random_output(&self, rng: &mut impl Rng) -> (NeuronLocation, f32) { // will panic if outputs is empty let i = rng.random_range(0..self.outputs.len()); - let x = self.outputs.iter().skip(i).next().unwrap(); + let x = self.outputs.iter().nth(i).unwrap(); (*x.0, *x.1) } @@ -1245,4 +1287,4 @@ impl<'a, T: PartialOrd + 'a, I: Iterator> MaxIndex for I { max_i } -} \ No newline at end of file +} diff --git a/src/tests.rs b/src/tests.rs index e6da044..acd13e3 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -306,10 +306,10 @@ fn crossover() { for _ in 0..NUM_MUTATIONS { let a = net1.crossover(&net2, &settings, MUTATION_RATE, rng); assert_network_invariants(&a); - + let b = net2.crossover(&net1, &settings, MUTATION_RATE, rng); assert_network_invariants(&b); - + net1 = a; net2 = b; } @@ -318,8 +318,8 @@ fn crossover() { #[cfg(feature = "serde")] mod serde { - use crate::*; use super::rng_test; + use crate::*; #[test] fn full_serde() { @@ -336,4 +336,4 @@ mod serde { assert_eq!(net1, net2); }); } -} \ No newline at end of file +} From 9c41d2c58c9e22e1c068ea1ccf2f68275c0b5f27 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 12:32:19 +0000 Subject: [PATCH 52/60] implement extra genes example (vibe code + cleanup for readability and customizability) --- examples/extra_genes.rs | 361 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 354 insertions(+), 7 deletions(-) diff --git a/examples/extra_genes.rs b/examples/extra_genes.rs index 2081b22..782d893 100644 --- a/examples/extra_genes.rs +++ b/examples/extra_genes.rs @@ -1,20 +1,367 @@ use neat::*; +use std::f32::consts::PI; -struct Genome { - brain: NeuralNetwork<10, 4>, - stats: PhysicalStats, +// ========================================================================== +// SIMULATION CONSTANTS - Adjust these to experiment with different dynamics +// ========================================================================== + +// World/Environment Settings +const WORLD_WIDTH: f32 = 800.0; +const WORLD_HEIGHT: f32 = 600.0; +const INITIAL_FOOD_COUNT: usize = 20; +const FOOD_RESPAWN_THRESHOLD: usize = 10; +const FOOD_DETECTION_DISTANCE: f32 = 10.0; + +// Energy/Food Settings +const BASE_FOOD_ENERGY: f32 = 20.0; // Energy from each food item +const STRENGTH_ENERGY_MULTIPLIER: f32 = 10.0; // Extra energy per strength stat +const MOVEMENT_ENERGY_COST: f32 = 0.2; // Cost per unit of movement +const IDLE_ENERGY_COST: f32 = 0.1; // Cost per timestep just existing + +// Fitness Settings +const FITNESS_PER_FOOD: f32 = 100.0; // Points per food eaten + +// Physical Stats - Min/Max Bounds +const SPEED_MIN: f32 = 0.5; +const SPEED_MAX: f32 = 6.0; +const STRENGTH_MIN: f32 = 0.2; +const STRENGTH_MAX: f32 = 4.0; +const SENSE_RANGE_MIN: f32 = 30.0; +const SENSE_RANGE_MAX: f32 = 250.0; +const ENERGY_CAPACITY_MIN: f32 = 50.0; +const ENERGY_CAPACITY_MAX: f32 = 400.0; + +// Physical Stats - Initial Generation Range +const SPEED_INIT_MIN: f32 = 1.0; +const SPEED_INIT_MAX: f32 = 5.0; +const STRENGTH_INIT_MIN: f32 = 0.5; +const STRENGTH_INIT_MAX: f32 = 3.0; +const SENSE_RANGE_INIT_MIN: f32 = 50.0; +const SENSE_RANGE_INIT_MAX: f32 = 200.0; +const ENERGY_CAPACITY_INIT_MIN: f32 = 100.0; +const ENERGY_CAPACITY_INIT_MAX: f32 = 300.0; + +// Mutation Settings +const SPEED_MUTATION_PROB: f32 = 0.3; +const SPEED_MUTATION_RANGE: f32 = 0.5; +const STRENGTH_MUTATION_PROB: f32 = 0.2; +const STRENGTH_MUTATION_RANGE: f32 = 0.3; +const SENSE_MUTATION_PROB: f32 = 0.2; +const SENSE_MUTATION_RANGE: f32 = 20.0; +const CAPACITY_MUTATION_PROB: f32 = 0.2; +const CAPACITY_MUTATION_RANGE: f32 = 30.0; + +// Genetic Algorithm Settings +const POPULATION_SIZE: usize = 150; +const HIGHEST_GENERATION: usize = 250; +const SIMULATION_TIMESTEPS: usize = 500; +const MUTATION_RATE: f32 = 0.3; + +/// Mutation settings for physical stats +#[derive(Clone, Debug)] +struct PhysicalStatsMutationSettings { + speed_prob: f32, + speed_range: f32, + strength_prob: f32, + strength_range: f32, + sense_prob: f32, + sense_range: f32, + capacity_prob: f32, + capacity_range: f32, +} + +impl Default for PhysicalStatsMutationSettings { + fn default() -> Self { + Self { + speed_prob: SPEED_MUTATION_PROB, + speed_range: SPEED_MUTATION_RANGE, + strength_prob: STRENGTH_MUTATION_PROB, + strength_range: STRENGTH_MUTATION_RANGE, + sense_prob: SENSE_MUTATION_PROB, + sense_range: SENSE_MUTATION_RANGE, + capacity_prob: CAPACITY_MUTATION_PROB, + capacity_range: CAPACITY_MUTATION_RANGE, + } + } } +/// Physical traits/stats for an organism +#[derive(Clone, Debug, PartialEq)] struct PhysicalStats { + /// Speed multiplier (faster = longer strides but more energy cost) speed: f32, - sight_range: u32, + /// Strength stat (affects energy from food) + strength: f32, + /// Sense range (how far it can detect food) + sense_range: f32, + /// Energy capacity (larger = can go longer without food) + energy_capacity: f32, } -struct Organism { - genome: Genome, +impl PhysicalStats { + fn clamp(&mut self) { + self.speed = self.speed.clamp(SPEED_MIN, SPEED_MAX); + self.strength = self.strength.clamp(STRENGTH_MIN, STRENGTH_MAX); + self.sense_range = self.sense_range.clamp(SENSE_RANGE_MIN, SENSE_RANGE_MAX); + self.energy_capacity = self.energy_capacity.clamp(ENERGY_CAPACITY_MIN, ENERGY_CAPACITY_MAX); + } +} + +impl GenerateRandom for PhysicalStats { + fn gen_random(rng: &mut impl rand::Rng) -> Self { + let mut stats = PhysicalStats { + speed: rng.random_range(SPEED_INIT_MIN..SPEED_INIT_MAX), + strength: rng.random_range(STRENGTH_INIT_MIN..STRENGTH_INIT_MAX), + sense_range: rng.random_range(SENSE_RANGE_INIT_MIN..SENSE_RANGE_INIT_MAX), + energy_capacity: rng.random_range(ENERGY_CAPACITY_INIT_MIN..ENERGY_CAPACITY_INIT_MAX), + }; + stats.clamp(); + stats + } +} + +impl RandomlyMutable for PhysicalStats { + type Context = PhysicalStatsMutationSettings; + + fn mutate(&mut self, context: &Self::Context, _severity: f32, rng: &mut impl rand::Rng) { + if rng.random::() < context.speed_prob { + self.speed += rng.random_range(-context.speed_range..context.speed_range); + } + if rng.random::() < context.strength_prob { + self.strength += rng.random_range(-context.strength_range..context.strength_range); + } + if rng.random::() < context.sense_prob { + self.sense_range += rng.random_range(-context.sense_range..context.sense_range); + } + if rng.random::() < context.capacity_prob { + self.energy_capacity += rng.random_range(-context.capacity_range..context.capacity_range); + } + self.clamp(); + } +} + +impl Crossover for PhysicalStats { + type Context = PhysicalStatsMutationSettings; + + fn crossover( + &self, + other: &Self, + context: &Self::Context, + _severity: f32, + rng: &mut impl rand::Rng, + ) -> Self { + let mut child = PhysicalStats { + speed: (self.speed + other.speed) / 2.0 + rng.random_range(-context.speed_range..context.speed_range), + strength: (self.strength + other.strength) / 2.0 + rng.random_range(-context.strength_range..context.strength_range), + sense_range: (self.sense_range + other.sense_range) / 2.0 + + rng.random_range(-context.sense_range..context.sense_range), + energy_capacity: (self.energy_capacity + other.energy_capacity) / 2.0 + + rng.random_range(-context.capacity_range..context.capacity_range), + }; + child.clamp(); + child + } +} + +/// A complete organism genome containing both neural network and physical traits +#[derive(Clone, Debug, PartialEq, GenerateRandom, RandomlyMutable, Crossover)] +#[randmut(create_context = OrganismCtx)] +#[crossover(with_context = OrganismCtx)] +struct OrganismGenome { + brain: NeuralNetwork<8, 2>, + stats: PhysicalStats, +} + + +/// Running instance of an organism with current position and energy +struct OrganismInstance { + genome: OrganismGenome, + x: f32, + y: f32, + angle: f32, energy: f32, + lifetime: usize, + food_eaten: usize, +} + +impl OrganismInstance { + fn new(genome: OrganismGenome) -> Self { + let energy = genome.stats.energy_capacity; + Self { + genome, + x: rand::random::() * WORLD_WIDTH, + y: rand::random::() * WORLD_HEIGHT, + angle: rand::random::() * 2.0 * PI, + energy, + lifetime: 0, + food_eaten: 0, + } + } + + /// Simulate one timestep: sense food, decide movement, consume energy, age + fn step(&mut self, food_sources: &[(f32, f32)]) { + self.lifetime += 1; + + // find nearest food + let mut nearest_food_dist = f32::INFINITY; + let mut nearest_food_angle = 0.0; + let mut nearest_food_x_diff = 0.0; + let mut nearest_food_y_diff = 0.0; + + for &(fx, fy) in food_sources { + let dx = fx - self.x; + let dy = fy - self.y; + let dist = (dx * dx + dy * dy).sqrt(); + + if dist < self.genome.stats.sense_range && dist < nearest_food_dist { + nearest_food_dist = dist; + nearest_food_angle = (dy.atan2(dx) - self.angle).sin(); + nearest_food_x_diff = (dx / 100.0).clamp(-1.0, 1.0); + nearest_food_y_diff = (dy / 100.0).clamp(-1.0, 1.0); + } + } + + let sense_food = if nearest_food_dist < self.genome.stats.sense_range { + 1.0 + } else { + 0.0 + }; + + // Create inputs for neural network: + // 0: current energy level (0-1) + // 1: food detected (0 or 1) + // 2: nearest food angle (normalized) + // 3: nearest food x diff + // 4: nearest food y diff + // 5: speed stat (normalized) + // 6: energy capacity (normalized) + // 7: age (slow-paced, up to 1 at age 1000) + let inputs = [ + (self.energy / self.genome.stats.energy_capacity).clamp(0.0, 1.0), + sense_food, + nearest_food_angle, + nearest_food_x_diff, + nearest_food_y_diff, + (self.genome.stats.speed / 5.0).clamp(0.0, 1.0), + (self.genome.stats.energy_capacity / 200.0).clamp(0.0, 1.0), + (self.lifetime as f32 / 1000.0).clamp(0.0, 1.0), + ]; + + // get movement outputs from neural network + let outputs = self.genome.brain.predict(inputs); + let move_forward = (outputs[0] * self.genome.stats.speed).clamp(-5.0, 5.0); + let turn = (outputs[1] * PI / 4.0).clamp(-PI / 8.0, PI / 8.0); + + // update position and angle + self.angle += turn; + self.x += move_forward * self.angle.cos(); + self.y += move_forward * self.angle.sin(); + + // wrap around world + if self.x < 0.0 { + self.x += WORLD_WIDTH; + } else if self.x >= WORLD_WIDTH { + self.x -= WORLD_WIDTH; + } + if self.y < 0.0 { + self.y += WORLD_HEIGHT; + } else if self.y >= WORLD_HEIGHT { + self.y -= WORLD_HEIGHT; + } + + // consume energy for movement + let movement_cost = (move_forward.abs() / self.genome.stats.speed).max(0.5); + self.energy -= movement_cost * MOVEMENT_ENERGY_COST; + + // consume energy for existing + self.energy -= IDLE_ENERGY_COST; + } + + /// Check if organism lands on food and consume it + fn eat(&mut self, food_sources: &mut Vec<(f32, f32)>) { + food_sources.retain(|&(fx, fy)| { + let dx = fx - self.x; + let dy = fy - self.y; + let dist = (dx * dx + dy * dy).sqrt(); + if dist < FOOD_DETECTION_DISTANCE { + // ate food + self.energy += BASE_FOOD_ENERGY + (self.genome.stats.strength * STRENGTH_ENERGY_MULTIPLIER); + self.energy = self.energy.min(self.genome.stats.energy_capacity); + self.food_eaten += 1; + false + } else { + true + } + }); + } + + fn is_alive(&self) -> bool { + self.energy > 0.0 + } + + fn fitness(&self) -> f32 { + let food_fitness = (self.food_eaten as f32) * FITNESS_PER_FOOD; + food_fitness + } +} + +/// Evaluate an organism's fitness by running a simulation +fn evaluate_organism(genome: &OrganismGenome) -> f32 { + let mut rng = rand::rng(); + + let mut food_sources: Vec<(f32, f32)> = (0..INITIAL_FOOD_COUNT) + .map(|_| { + ( + rng.random_range(0.0..WORLD_WIDTH), + rng.random_range(0.0..WORLD_HEIGHT), + ) + }) + .collect(); + + let mut instance = OrganismInstance::new(genome.clone()); + + for _ in 0..SIMULATION_TIMESTEPS { + if instance.is_alive() { + instance.step(&food_sources); + instance.eat(&mut food_sources); + } + + // respawn food + if food_sources.len() < FOOD_RESPAWN_THRESHOLD { + food_sources.push(( + rng.random_range(0.0..WORLD_WIDTH), + rng.random_range(0.0..WORLD_HEIGHT), + )); + } + } + + instance.fitness() } fn main() { - todo!("use NeuralNetwork along with other genomes for more complex organisms"); + let mut rng = rand::rng(); + + println!("Starting genetic NEAT simulation with physical traits"); + println!("Population: {} organisms", POPULATION_SIZE); + println!("Each has: Neural Network Brain + Physical Stats (Speed, Strength, Sense Range, Energy Capacity)\n"); + + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, POPULATION_SIZE), + FitnessEliminator::new_with_default(evaluate_organism), + CrossoverRepopulator::new(MUTATION_RATE, OrganismCtx::default()), + ); + + for generation in 0..=HIGHEST_GENERATION { + sim.next_generation(); + + let sample = &sim.genomes[0]; + let fitness = evaluate_organism(sample); + + println!( + "Gen {}: Sample fitness: {:.1} | Speed: {:.2}, Strength: {:.2}, Sense: {:.1}, Capacity: {:.1}", + generation, fitness, sample.stats.speed, sample.stats.strength, sample.stats.sense_range, sample.stats.energy_capacity + ); + } + + println!("\nSimulation complete!"); } From 23a1196fc0b51ab26b9800f2ba50ecd3879e2bc2 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 12:52:38 +0000 Subject: [PATCH 53/60] add serde example --- .gitignore | 4 ++-- Cargo.toml | 5 +++++ examples/extra_genes.rs | 17 +++++++++++------ examples/serde.rs | 35 +++++++++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 8 deletions(-) create mode 100644 examples/serde.rs diff --git a/.gitignore b/.gitignore index b2d8069..ee3dd5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ /target/ /.vscode/ -best-agent.json -fitness-plot.svg \ No newline at end of file +fitness-plot.svg +network.json \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index ce1dad3..4c342f0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,11 @@ name = "extra_genes" path = "examples/extra_genes.rs" required-features = ["genetic-rs/derive"] +[[example]] +name = "serde" +path = "examples/serde.rs" +required-features = ["serde"] + [features] default = [] serde = ["dep:serde", "dep:serde-big-array"] diff --git a/examples/extra_genes.rs b/examples/extra_genes.rs index 782d893..d4e6f01 100644 --- a/examples/extra_genes.rs +++ b/examples/extra_genes.rs @@ -103,7 +103,9 @@ impl PhysicalStats { self.speed = self.speed.clamp(SPEED_MIN, SPEED_MAX); self.strength = self.strength.clamp(STRENGTH_MIN, STRENGTH_MAX); self.sense_range = self.sense_range.clamp(SENSE_RANGE_MIN, SENSE_RANGE_MAX); - self.energy_capacity = self.energy_capacity.clamp(ENERGY_CAPACITY_MIN, ENERGY_CAPACITY_MAX); + self.energy_capacity = self + .energy_capacity + .clamp(ENERGY_CAPACITY_MIN, ENERGY_CAPACITY_MAX); } } @@ -134,7 +136,8 @@ impl RandomlyMutable for PhysicalStats { self.sense_range += rng.random_range(-context.sense_range..context.sense_range); } if rng.random::() < context.capacity_prob { - self.energy_capacity += rng.random_range(-context.capacity_range..context.capacity_range); + self.energy_capacity += + rng.random_range(-context.capacity_range..context.capacity_range); } self.clamp(); } @@ -151,8 +154,10 @@ impl Crossover for PhysicalStats { rng: &mut impl rand::Rng, ) -> Self { let mut child = PhysicalStats { - speed: (self.speed + other.speed) / 2.0 + rng.random_range(-context.speed_range..context.speed_range), - strength: (self.strength + other.strength) / 2.0 + rng.random_range(-context.strength_range..context.strength_range), + speed: (self.speed + other.speed) / 2.0 + + rng.random_range(-context.speed_range..context.speed_range), + strength: (self.strength + other.strength) / 2.0 + + rng.random_range(-context.strength_range..context.strength_range), sense_range: (self.sense_range + other.sense_range) / 2.0 + rng.random_range(-context.sense_range..context.sense_range), energy_capacity: (self.energy_capacity + other.energy_capacity) / 2.0 @@ -172,7 +177,6 @@ struct OrganismGenome { stats: PhysicalStats, } - /// Running instance of an organism with current position and energy struct OrganismInstance { genome: OrganismGenome, @@ -285,7 +289,8 @@ impl OrganismInstance { let dist = (dx * dx + dy * dy).sqrt(); if dist < FOOD_DETECTION_DISTANCE { // ate food - self.energy += BASE_FOOD_ENERGY + (self.genome.stats.strength * STRENGTH_ENERGY_MULTIPLIER); + self.energy += + BASE_FOOD_ENERGY + (self.genome.stats.strength * STRENGTH_ENERGY_MULTIPLIER); self.energy = self.energy.min(self.genome.stats.energy_capacity); self.food_eaten += 1; false diff --git a/examples/serde.rs b/examples/serde.rs new file mode 100644 index 0000000..70e3e3d --- /dev/null +++ b/examples/serde.rs @@ -0,0 +1,35 @@ +use neat::{activation::register_activation, *}; + +const OUTPUT_PATH: &str = "network.json"; + +fn magic_activation(x: f32) -> f32 { + // just a random activation function to show that it gets serialized and deserialized correctly. + (x * 2.0).sin() +} + +fn main() { + // custom activation functions must be registered before deserialization, since the network needs to know how to deserialize them. + register_activation(activation_fn!(magic_activation)); + + let mut rng = rand::rng(); + let mut net = NeuralNetwork::<10, 10>::new(&mut rng); + + println!("Mutating network..."); + + for _ in 0..100 { + net.mutate(&MutationSettings::default(), 0.25, &mut rng); + } + + let file = std::fs::File::create(OUTPUT_PATH).expect("Failed to create file for network output"); + serde_json::to_writer_pretty(file, &net).expect("Failed to write network to file"); + + println!("Network saved to {OUTPUT_PATH}"); + + // reopen because for some reason io hates working properly with both read and write + // (even when using OpenOptions) + let file = std::fs::File::open(OUTPUT_PATH).expect("Failed to open network file for reading"); + let net2: NeuralNetwork<10, 10> = + serde_json::from_reader(file).expect("Failed to parse network from file"); + assert_eq!(net, net2); + println!("Network successfully loaded from file and matches original!"); +} From a467b3f6cbd5a34040d0b275401aa2fc2c8a5ba4 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 13:53:33 +0000 Subject: [PATCH 54/60] add some documentation to README --- README.md | 67 ++++++++++++++++++++++++++++++++++++++++++++++- examples/serde.rs | 3 ++- src/lib.rs | 40 +++++++++++++++++++++++----- src/neuralnet.rs | 24 +---------------- 4 files changed, 103 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 4e9828b..d477ecd 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,72 @@ Implementation of the NEAT algorithm using `genetic-rs`. *Do you like this crate and want to support it? If so, leave a ⭐* # How To Use -TODO +The `NeuralNetwork` struct is the main type exported by this crate. The `I` is the number of input neurons, and `O` is the number of output neurons. It implements `GenerateRandom`, `RandomlyMutable`, `Mitosis`, and `Crossover`, with a lot of customizability. This means that you can use it standalone as your organism's entire genome: +```rust +use neat::*; + +fn fitness(net: &NeuralNetwork<5, 6>) -> f32 { + // ideally you'd test multiple times for consistency, + // but this is just a simple example. + // it's also generally good to normalize your inputs between -1..1, + // but NEAT is usually flexible enough to still work anyways + let inputs = [1.0, 2.0, 3.0, 4.0, 5.0]; + let outputs = net.predict(inputs); + + todo!("test output"); +} + +fn main() { + let mut rng = rand::rng(); + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, 100), + FitnessEliminator::new_with_default(fitness), + CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + ); + + sim.perform_generations(100); +} +``` + +Or just a part of a more complex genome: +```rust +use neat::*; + +#[derive(Clone, Debug)] +struct PhysicalStats { + strength: f32, + speed: f32, + // ... +} + +// ... implement `RandomlyMutable`, `GenerateRandom`, `Crossover`, etc. + +#[derive(Clone, Debug, GenerateRandom, RandomlyMutable, Mitosis, Crossover)] +#[randmut(create_context = MyGenomeCtx)] +#[crossover(with_context = MyGenomeCtx)] +struct MyGenome { + brain: NeuralNetwork<4, 2>, + stats: PhysicalStats, +} + +fn fitness(genome: &MyGenome) -> f32 { + todo!("test using both brain and stats"); +} + +// main is the exact same as before +fn main() { + let mut rng = rand::rng(); + let mut sim = GeneticSim::new( + Vec::gen_random(&mut rng, 100), + FitnessEliminator::new_with_default(fitness), + CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + ); + + sim.perform_generations(100); +} +``` + +If you want more in-depth examples, look at the [examples](https://github.com/HyperCodec/neat/tree/main/examples). You can also check out the [genetic-rs docs](https://docs.rs/genetic_rs) to see what other options you have to customize your genetic simulation. ### License This crate falls under the `MIT` license diff --git a/examples/serde.rs b/examples/serde.rs index 70e3e3d..90b4e81 100644 --- a/examples/serde.rs +++ b/examples/serde.rs @@ -20,7 +20,8 @@ fn main() { net.mutate(&MutationSettings::default(), 0.25, &mut rng); } - let file = std::fs::File::create(OUTPUT_PATH).expect("Failed to create file for network output"); + let file = + std::fs::File::create(OUTPUT_PATH).expect("Failed to create file for network output"); serde_json::to_writer_pretty(file, &net).expect("Failed to write network to file"); println!("Network saved to {OUTPUT_PATH}"); diff --git a/src/lib.rs b/src/lib.rs index 6c93cd4..67bbdcd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,4 @@ -//! A crate implementing NeuroEvolution of Augmenting Topologies (NEAT) using a directed acyclic graph. -//! It provides an easy-to-use [`NeuralNetwork`] type that -//! integrates directly into the [`genetic-rs`](https://crates.io/crates/genetic-rs) ecosystem. -//! -//! Look at the README, docs, or examples to learn how to use this crate. +#![doc = include_str!("../README.md")] #![warn(missing_docs)] @@ -16,5 +12,37 @@ pub use neuralnet::*; pub use genetic_rs::{self, prelude::*}; +/// A trait for getting the index of the maximum element. +pub trait MaxIndex { + /// Returns the index of the maximum element. + fn max_index(self) -> Option; +} + +impl> MaxIndex for I { + fn max_index(self) -> Option { + // enumerate now so we don't accidentally + // skip the index of the first element + let mut iter = self.enumerate(); + + let mut max_i = 0; + + let first = iter.next(); + if first.is_none() { + return None; + } + + let mut max_v = first.unwrap().1; + + for (i, v) in iter { + if v > max_v { + max_v = v; + max_i = i; + } + } + + Some(max_i) + } +} + #[cfg(test)] -mod tests; +mod tests; \ No newline at end of file diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 1d0d2e5..cbc3c64 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1265,26 +1265,4 @@ impl From<&NeuralNetwork> for NeuralNetCac output_layer, } } -} - -/// A trait for getting the index of the maximum element. -pub trait MaxIndex { - /// Returns the index of the maximum element. - fn max_index(self) -> usize; -} - -impl<'a, T: PartialOrd + 'a, I: Iterator> MaxIndex for I { - fn max_index(self) -> usize { - let mut max_i = 0; - let mut max_v = None; - - for (i, v) in self.enumerate() { - if max_v.is_none() || v > max_v.unwrap() { - max_i = i; - max_v = Some(v); - } - } - - max_i - } -} +} \ No newline at end of file From e4488e42b36af91adaaa5f8c5454bef53df319ae Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 13:55:46 +0000 Subject: [PATCH 55/60] fix fmt and clippy warnings --- src/lib.rs | 13 +++---------- src/neuralnet.rs | 2 +- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 67bbdcd..572c7af 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,4 @@ #![doc = include_str!("../README.md")] - #![warn(missing_docs)] /// Contains the types surrounding activation functions. @@ -23,15 +22,9 @@ impl> MaxIndex for I { // enumerate now so we don't accidentally // skip the index of the first element let mut iter = self.enumerate(); - - let mut max_i = 0; - let first = iter.next(); - if first.is_none() { - return None; - } - - let mut max_v = first.unwrap().1; + let mut max_i = 0; + let mut max_v = iter.next()?.1; for (i, v) in iter { if v > max_v { @@ -45,4 +38,4 @@ impl> MaxIndex for I { } #[cfg(test)] -mod tests; \ No newline at end of file +mod tests; diff --git a/src/neuralnet.rs b/src/neuralnet.rs index cbc3c64..ba45d22 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -1265,4 +1265,4 @@ impl From<&NeuralNetwork> for NeuralNetCac output_layer, } } -} \ No newline at end of file +} From 93fb1d1328397776d954b6310b3de051a4ebf92c Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 14:15:43 +0000 Subject: [PATCH 56/60] ignore annoying doctest and make a shitty fitness function so the other one passes --- README.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d477ecd..0d2d3f8 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,9 @@ fn fitness(net: &NeuralNetwork<5, 6>) -> f32 { let inputs = [1.0, 2.0, 3.0, 4.0, 5.0]; let outputs = net.predict(inputs); - todo!("test output"); + // simple fitness: sum of outputs + // you should replace this with a real fitness test + outputs.iter().sum() } fn main() { @@ -39,7 +41,7 @@ fn main() { ``` Or just a part of a more complex genome: -```rust +```rust,ignore use neat::*; #[derive(Clone, Debug)] @@ -60,7 +62,10 @@ struct MyGenome { } fn fitness(genome: &MyGenome) -> f32 { - todo!("test using both brain and stats"); + let inputs = [1.0, 2.0, 3.0, 4.0]; + let outputs = genome.brain.predict(inputs); + // fitness uses both brain output and stats + outputs.iter().sum::() + genome.stats.strength + genome.stats.speed } // main is the exact same as before From 998fa930923bdd784de2a77c8908b6e6c5120afc Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 14:49:52 +0000 Subject: [PATCH 57/60] rework settings/context for qol and extra customizability --- README.md | 13 +++++++-- examples/basic.rs | 2 +- src/neuralnet.rs | 70 +++++++++++++++++++++++++++++++++++++---------- src/tests.rs | 2 +- 4 files changed, 68 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 0d2d3f8..52d2741 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ fn main() { let mut sim = GeneticSim::new( Vec::gen_random(&mut rng, 100), FitnessEliminator::new_with_default(fitness), - CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + CrossoverRepopulator::new(0.25, ReproductionSettings::default()), ); sim.perform_generations(100); @@ -61,6 +61,15 @@ struct MyGenome { stats: PhysicalStats, } +impl Default for MyGenomeCtx { + fn default() -> Self { + Self { + brain: ReproductionSettings::default(), + stats: PhysicalStats::default(), + } + } +} + fn fitness(genome: &MyGenome) -> f32 { let inputs = [1.0, 2.0, 3.0, 4.0]; let outputs = genome.brain.predict(inputs); @@ -74,7 +83,7 @@ fn main() { let mut sim = GeneticSim::new( Vec::gen_random(&mut rng, 100), FitnessEliminator::new_with_default(fitness), - CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + CrossoverRepopulator::new(0.25, MyGenomeCtx::default()), ); sim.perform_generations(100); diff --git a/examples/basic.rs b/examples/basic.rs index d32b865..74dbe91 100644 --- a/examples/basic.rs +++ b/examples/basic.rs @@ -27,7 +27,7 @@ fn main() { let mut sim = GeneticSim::new( Vec::gen_random(&mut rng, 250), FitnessEliminator::new_with_default(fitness), - CrossoverRepopulator::new(0.25, CrossoverSettings::default()), + CrossoverRepopulator::new(0.25, ReproductionSettings::default()), ); for i in 0..=150 { diff --git a/src/neuralnet.rs b/src/neuralnet.rs index ba45d22..d35fe03 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -8,6 +8,7 @@ use std::{ }; use atomic_float::AtomicF32; +use bitflags::bitflags; use genetic_rs::prelude::*; use rand::Rng; use replace_with::replace_with_or_abort; @@ -694,19 +695,19 @@ impl NeuralNetwork { ) { // TODO maybe allow specifying probability // for each type of mutation - if rng.random_bool(rate as f64) { + if settings.allowed_mutations.contains(GraphMutations::SPLIT_CONNECTION) && rng.random_bool(rate as f64) { // split connection if let Some(conn) = self.get_random_connection(settings.max_split_retries, rng) { self.split_connection(conn, rng); } } - if rng.random_bool(rate as f64) { + if settings.allowed_mutations.contains(GraphMutations::ADD_CONNECTION) && rng.random_bool(rate as f64) { // add connection self.add_random_connection(settings.max_add_retries, rng); } - if rng.random_bool(rate as f64) { + if settings.allowed_mutations.contains(GraphMutations::REMOVE_CONNECTION) && rng.random_bool(rate as f64) { // remove connection self.remove_random_connection(settings.max_remove_retries, rng); } @@ -792,6 +793,11 @@ pub struct MutationSettings { /// The maximum number of retries for splitting connections. pub max_split_retries: usize, + + /// The types of graph mutations to allow during mutation. + /// Graph mutations are mutations that modify the structure of the neural network, + /// such as adding/removing connections and adding neurons. + pub allowed_mutations: GraphMutations, } impl Default for MutationSettings { @@ -803,10 +809,52 @@ impl Default for MutationSettings { max_add_retries: 10, max_remove_retries: 10, max_split_retries: 10, + allowed_mutations: GraphMutations::default(), } } } +bitflags! { + /// The types of graph mutations to allow during mutation. + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct GraphMutations: u8 { + /// Mutation that splits an existing connection into two via a hidden neuron. + const SPLIT_CONNECTION = 0b00000001; + /// Mutation that adds a new connection between neurons. + const ADD_CONNECTION = 0b00000010; + /// Mutation that removes an existing connection. + const REMOVE_CONNECTION = 0b00000100; + } +} + +impl Default for GraphMutations { + fn default() -> Self { + Self::all() + } +} + +#[cfg(feature = "serde")] +impl Serialize for GraphMutations { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.bits().serialize(serializer) + } +} + +#[cfg(feature = "serde")] +impl<'de> Deserialize<'de> for GraphMutations { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let bits = u8::deserialize(deserializer)?; + GraphMutations::from_bits(bits) + .ok_or_else(|| serde::de::Error::custom("invalid bit pattern for GraphMutations")) + } +} + impl RandomlyMutable for NeuralNetwork { type Context = MutationSettings; @@ -854,21 +902,13 @@ impl Mitosis for NeuralNetwork { } } -/// The settings used for [`NeuralNetwork`] crossover. -#[derive(Debug, Default, Clone, PartialEq)] -pub struct CrossoverSettings { - /// The reproduction settings to use during crossover, which will be applied to the child after crossover. - pub repr: ReproductionSettings, - // TODO other crossover settings. -} - impl Crossover for NeuralNetwork { - type Context = CrossoverSettings; + type Context = ReproductionSettings; fn crossover( &self, other: &Self, - settings: &CrossoverSettings, + settings: &ReproductionSettings, rate: f32, rng: &mut impl rand::Rng, ) -> Self { @@ -920,8 +960,8 @@ impl Crossover for NeuralNetwork { child.reset_input_counts(); child.prune_hanging_neurons(); - for _ in 0..settings.repr.mutation_passes { - child.mutate(&settings.repr.mutation, rate, rng); + for _ in 0..settings.mutation_passes { + child.mutate(&settings.mutation, rate, rng); } child diff --git a/src/tests.rs b/src/tests.rs index acd13e3..b1345a2 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -301,7 +301,7 @@ fn crossover() { let mut net2 = NeuralNetwork::<10, 10>::new(rng); assert_network_invariants(&net2); - let settings = CrossoverSettings::default(); + let settings = ReproductionSettings::default(); for _ in 0..NUM_MUTATIONS { let a = net1.crossover(&net2, &settings, MUTATION_RATE, rng); From d933e5db2d989fb1ffe2a4c96e93f6ef3f214a84 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 14:50:45 +0000 Subject: [PATCH 58/60] cargo fmt --- src/neuralnet.rs | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/src/neuralnet.rs b/src/neuralnet.rs index d35fe03..1aa174b 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -695,19 +695,31 @@ impl NeuralNetwork { ) { // TODO maybe allow specifying probability // for each type of mutation - if settings.allowed_mutations.contains(GraphMutations::SPLIT_CONNECTION) && rng.random_bool(rate as f64) { + if settings + .allowed_mutations + .contains(GraphMutations::SPLIT_CONNECTION) + && rng.random_bool(rate as f64) + { // split connection if let Some(conn) = self.get_random_connection(settings.max_split_retries, rng) { self.split_connection(conn, rng); } } - if settings.allowed_mutations.contains(GraphMutations::ADD_CONNECTION) && rng.random_bool(rate as f64) { + if settings + .allowed_mutations + .contains(GraphMutations::ADD_CONNECTION) + && rng.random_bool(rate as f64) + { // add connection self.add_random_connection(settings.max_add_retries, rng); } - if settings.allowed_mutations.contains(GraphMutations::REMOVE_CONNECTION) && rng.random_bool(rate as f64) { + if settings + .allowed_mutations + .contains(GraphMutations::REMOVE_CONNECTION) + && rng.random_bool(rate as f64) + { // remove connection self.remove_random_connection(settings.max_remove_retries, rng); } From 35301fe9e5d4724f98e1a69dc4c041bc7de7a35d Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 15:00:38 +0000 Subject: [PATCH 59/60] bump neat version number for update --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d675b0..0a8db66 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -172,7 +172,7 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "neat" -version = "0.5.1" +version = "1.0.0" dependencies = [ "atomic_float", "bitflags", diff --git a/Cargo.toml b/Cargo.toml index 4c342f0..455b99c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "neat" description = "Crate for working with NEAT in rust" -version = "0.5.1" +version = "1.0.0" edition = "2021" authors = ["HyperCodec"] repository = "https://github.com/HyperCodec/neat" From 85ea80df728619c2e9b853324ff1ce590c2698b7 Mon Sep 17 00:00:00 2001 From: Tristan Murphy <72839119+HyperCodec@users.noreply.github.com> Date: Thu, 12 Feb 2026 15:06:38 +0000 Subject: [PATCH 60/60] prevent CI-CD from double checking --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index fd849f4..fef7ca6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -2,7 +2,7 @@ name: CI-CD on: push: - branches: [main, dev] + branches: [main] pull_request: jobs: