From 05c7dcbe1132bed107e430d6e00ad1e4c40eb879 Mon Sep 17 00:00:00 2001 From: vandomej Date: Fri, 5 Apr 2024 14:04:45 -0700 Subject: [PATCH] debugging merge nodes --- gemla/Cargo.toml | 3 +- gemla/src/bin/bin.rs | 2 + gemla/src/bin/fighter_nn/fighter_context.rs | 2 +- gemla/src/bin/fighter_nn/mod.rs | 58 +++++++------- .../bin/fighter_nn/neural_network_utility.rs | 78 +++++++++++-------- gemla/src/core/mod.rs | 9 +-- 6 files changed, 82 insertions(+), 70 deletions(-) diff --git a/gemla/Cargo.toml b/gemla/Cargo.toml index 2dd14c4..4208653 100644 --- a/gemla/Cargo.toml +++ b/gemla/Cargo.toml @@ -26,10 +26,11 @@ rand = "0.8.5" log = "0.4.21" env_logger = "0.11.3" futures = "0.3.30" -tokio = { version = "1.36.0", features = ["full"] } +tokio = { version = "1.37.0", features = ["full"] } num_cpus = "1.16.0" easy-parallel = "3.3.1" fann = "0.1.8" async-trait = "0.1.78" async-recursion = "1.1.0" lerp = "0.5.0" +console-subscriber = "0.2.0" diff --git a/gemla/src/bin/bin.rs b/gemla/src/bin/bin.rs index 464f1aa..f032e96 100644 --- a/gemla/src/bin/bin.rs +++ b/gemla/src/bin/bin.rs @@ -32,6 +32,8 @@ struct Args { /// TODO fn main() -> Result<()> { env_logger::init(); + // console_subscriber::init(); + info!("Starting"); let now = Instant::now(); diff --git a/gemla/src/bin/fighter_nn/fighter_context.rs b/gemla/src/bin/fighter_nn/fighter_context.rs index 503bbf8..a87706b 100644 --- a/gemla/src/bin/fighter_nn/fighter_context.rs +++ b/gemla/src/bin/fighter_nn/fighter_context.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tokio::sync::Semaphore; -const SHARED_SEMAPHORE_CONCURRENCY_LIMIT: usize = 20; +const SHARED_SEMAPHORE_CONCURRENCY_LIMIT: usize = 50; #[derive(Debug, Clone)] diff --git a/gemla/src/bin/fighter_nn/mod.rs b/gemla/src/bin/fighter_nn/mod.rs index e781a9d..d4683b9 100644 --- a/gemla/src/bin/fighter_nn/mod.rs +++ b/gemla/src/bin/fighter_nn/mod.rs @@ -3,15 +3,15 @@ extern crate fann; pub mod neural_network_utility; pub mod fighter_context; -use std::{cmp::max, fs::{self, File}, io::{self, BufRead, BufReader}, ops::Range, path::{Path, PathBuf}}; +use std::{cmp::max, collections::{HashSet, VecDeque}, fs::{self, File}, io::{self, BufRead, BufReader}, ops::Range, panic::{catch_unwind, AssertUnwindSafe}, path::{Path, PathBuf}, sync::{Arc, Mutex}, time::Duration}; use fann::{ActivationFunc, Fann}; -use futures::future::join_all; +use futures::{executor::block_on, future::{join, join_all, select_all}, stream::FuturesUnordered, FutureExt, StreamExt}; use gemla::{core::genetic_node::{GeneticNode, GeneticNodeContext}, error::Error}; use lerp::Lerp; use rand::prelude::*; use serde::{Deserialize, Serialize}; use anyhow::Context; -use tokio::process::Command; +use tokio::{process::Command, sync::{mpsc, Semaphore}, task, time::{sleep, timeout, Sleep}}; use uuid::Uuid; use std::collections::HashMap; use async_trait::async_trait; @@ -21,7 +21,7 @@ use self::neural_network_utility::{crossbreed, major_mutation}; const BASE_DIR: &str = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations"; const POPULATION: usize = 50; -const NEURAL_NETWORK_INPUTS: usize = 14; +const NEURAL_NETWORK_INPUTS: usize = 18; const NEURAL_NETWORK_OUTPUTS: usize = 8; const NEURAL_NETWORK_HIDDEN_LAYERS_MIN: usize = 1; const NEURAL_NETWORK_HIDDEN_LAYERS_MAX: usize = 10; @@ -239,9 +239,9 @@ impl GeneticNode for FighterNN { let mut connections = new_fann.get_connections(); // Vector of connections for c in &mut connections { if thread_rng().gen_range(0.0..1.0) < self.minor_mutation_rate { - debug!("Minor mutation on connection {:?}", c); + trace!("Minor mutation on connection {:?}", c); c.weight += thread_rng().gen_range(self.weight_initialization_range.clone()); - debug!("New weight: {}", c.weight); + trace!("New weight: {}", c.weight); } } @@ -413,7 +413,7 @@ async fn run_1v1_simulation(nn_path_1: &PathBuf, nn_path_2: &PathBuf) -> Result< let opposing_score = read_score_from_file(&score_file, &nn_2_id).await .with_context(|| format!("Failed to read score from file: {:?}", score_file))?; - trace!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); + debug!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); return Ok((round_score, opposing_score)); @@ -428,7 +428,7 @@ async fn run_1v1_simulation(nn_path_1: &PathBuf, nn_path_2: &PathBuf) -> Result< let opposing_score = read_score_from_file(&opposite_score_file, &nn_2_id).await .with_context(|| format!("Failed to read score from file: {:?}", opposite_score_file))?; - trace!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); + debug!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); return Ok((round_score, opposing_score)); } @@ -438,24 +438,28 @@ async fn run_1v1_simulation(nn_path_1: &PathBuf, nn_path_2: &PathBuf) -> Result< let config2_arg = format!("-NN2Config=\"{}\"", nn_path_2.to_str().unwrap()); let disable_unreal_rendering_arg = "-nullrhi".to_string(); - while !score_file.exists() { - let _output = if thread_rng().gen_range(0..100) < 1 { - Command::new(GAME_EXECUTABLE_PATH) - .arg(&config1_arg) - .arg(&config2_arg) - .output() - .await - .expect("Failed to execute game") - } else { - Command::new(GAME_EXECUTABLE_PATH) - .arg(&config1_arg) - .arg(&config2_arg) - .arg(&disable_unreal_rendering_arg) - .output() - .await - .expect("Failed to execute game") - }; - } + // debug!("the following command {} {} {} {}", GAME_EXECUTABLE_PATH, config1_arg, config2_arg, disable_unreal_rendering_arg); + + trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id); + + let _output = if thread_rng().gen_range(0..100) < 1 { + Command::new(GAME_EXECUTABLE_PATH) + .arg(&config1_arg) + .arg(&config2_arg) + .output() + .await + .expect("Failed to execute game") + } else { + Command::new(GAME_EXECUTABLE_PATH) + .arg(&config1_arg) + .arg(&config2_arg) + .arg(&disable_unreal_rendering_arg) + .output() + .await + .expect("Failed to execute game") + }; + + trace!("Simulation completed for {} vs {}: {}", nn_1_id, nn_2_id, score_file.exists()); // Read the score from the file if score_file.exists() { @@ -465,7 +469,7 @@ async fn run_1v1_simulation(nn_path_1: &PathBuf, nn_path_2: &PathBuf) -> Result< let opposing_score = read_score_from_file(&score_file, &nn_2_id).await .with_context(|| format!("Failed to read score from file: {:?}", score_file))?; - trace!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); + debug!("{} scored {}, while {} scored {}", nn_1_id, round_score, nn_2_id, opposing_score); return Ok((round_score, opposing_score)) diff --git a/gemla/src/bin/fighter_nn/neural_network_utility.rs b/gemla/src/bin/fighter_nn/neural_network_utility.rs index 1dcc5ae..a3e5bc1 100644 --- a/gemla/src/bin/fighter_nn/neural_network_utility.rs +++ b/gemla/src/bin/fighter_nn/neural_network_utility.rs @@ -171,17 +171,17 @@ pub fn consolidate_old_connections(primary: &Fann, secondary: &Fann, new_shape: if *is_primary { let original_from_neuron = to_non_bias_network_id(connection.from_neuron, &primary_shape); let original_to_neuron = to_non_bias_network_id(connection.to_neuron, &primary_shape); - debug!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id); + trace!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id); } else { let original_from_neuron = to_non_bias_network_id(connection.from_neuron, &secondary_shape); let original_to_neuron = to_non_bias_network_id(connection.to_neuron, &secondary_shape); - debug!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id); + trace!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id); } let translated_from = to_bias_network_id(previous_new_id, &new_shape); let translated_to = to_bias_network_id(new_id, &new_shape); new_fann.set_weight(translated_from, translated_to, connection.weight); } else { - debug!("Connection not found for ({}, {}) -> ({}, {})", previous_new_id, new_id, previous_neuron_id, neuron_id); + trace!("Connection not found for ({}, {}) -> ({}, {})", previous_new_id, new_id, previous_neuron_id, neuron_id); } } } @@ -193,11 +193,13 @@ pub fn consolidate_old_connections(primary: &Fann, secondary: &Fann, new_shape: for (neuron_id, is_primary, _, new_id) in current_layer_connections.iter() { let translated_neuron_id = to_bias_network_id(new_id, &new_shape); - let mut connection; + let mut connection = None; let mut found_in_primary = false; if *is_primary { - let primary_bias_neuron = get_bias_neuron_for_layer(layer, &primary_shape).unwrap(); - connection = primary_connections.iter() + let primary_bias_neuron = get_bias_neuron_for_layer(layer, &primary_shape); + if let Some(primary_bias_neuron) = primary_bias_neuron + { + connection = primary_connections.iter() .find(|connection| { let to_neuron = to_non_bias_network_id(connection.to_neuron, &primary_shape); @@ -207,9 +209,29 @@ pub fn consolidate_old_connections(primary: &Fann, secondary: &Fann, new_shape: false } }); + } + if let None = connection { - let secondary_bias_neuron = get_bias_neuron_for_layer(layer, &secondary_shape).unwrap(); + let secondary_bias_neuron = get_bias_neuron_for_layer(layer, &secondary_shape); + if let Some(secondary_bias_neuron) = secondary_bias_neuron { + connection = secondary_connections.iter() + .find(|connection| { + let to_neuron = to_non_bias_network_id(connection.to_neuron, &secondary_shape); + + if let Some(to_neuron) = to_neuron { + connection.from_neuron == secondary_bias_neuron && to_neuron == *neuron_id + } else { + false + } + }); + } + } else { + found_in_primary = true; + } + } else { + let secondary_bias_neuron = get_bias_neuron_for_layer(layer, &secondary_shape); + if let Some(secondary_bias_neuron) = secondary_bias_neuron { connection = secondary_connections.iter() .find(|connection| { let to_neuron = to_non_bias_network_id(connection.to_neuron, &secondary_shape); @@ -220,34 +242,22 @@ pub fn consolidate_old_connections(primary: &Fann, secondary: &Fann, new_shape: false } }); - } else { - found_in_primary = true; } - } else { - let secondary_bias_neuron = get_bias_neuron_for_layer(layer, &secondary_shape).unwrap(); - connection = secondary_connections.iter() - .find(|connection| { - let to_neuron = to_non_bias_network_id(connection.to_neuron, &secondary_shape); - - if let Some(to_neuron) = to_neuron { - connection.from_neuron == secondary_bias_neuron && to_neuron == *neuron_id - } else { - false - } - }); if let None = connection { - let primary_bias_neuron = get_bias_neuron_for_layer(layer, &primary_shape).unwrap(); - connection = primary_connections.iter() - .find(|connection| { - let to_neuron = to_non_bias_network_id(connection.to_neuron, &primary_shape); + let primary_bias_neuron = get_bias_neuron_for_layer(layer, &primary_shape); + if let Some(primary_bias_neuron) = primary_bias_neuron { + connection = primary_connections.iter() + .find(|connection| { + let to_neuron = to_non_bias_network_id(connection.to_neuron, &primary_shape); - if let Some(to_neuron) = to_neuron { - connection.from_neuron == primary_bias_neuron && to_neuron == *neuron_id - } else { - false - } - }); + if let Some(to_neuron) = to_neuron { + connection.from_neuron == primary_bias_neuron && to_neuron == *neuron_id + } else { + false + } + }); + } } else { found_in_primary = true; } @@ -257,15 +267,15 @@ pub fn consolidate_old_connections(primary: &Fann, secondary: &Fann, new_shape: if *is_primary { let original_from_neuron = to_non_bias_network_id(connection.from_neuron, &primary_shape); let original_to_neuron = to_non_bias_network_id(connection.to_neuron, &primary_shape); - debug!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id); + trace!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id); } else { let original_from_neuron = to_non_bias_network_id(connection.from_neuron, &secondary_shape); let original_to_neuron = to_non_bias_network_id(connection.to_neuron, &secondary_shape); - debug!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id); + trace!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id); } new_fann.set_weight(bias_neuron, translated_neuron_id, connection.weight); } else { - debug!("Connection not found for bias ({}, {}) -> ({}, {}) primary: {}", bias_neuron, neuron_id, bias_neuron, translated_neuron_id, is_primary); + trace!("Connection not found for bias ({}, {}) -> ({}, {}) primary: {}", bias_neuron, neuron_id, bias_neuron, translated_neuron_id, is_primary); } } } diff --git a/gemla/src/core/mod.rs b/gemla/src/core/mod.rs index 9d831a3..4c2b85d 100644 --- a/gemla/src/core/mod.rs +++ b/gemla/src/core/mod.rs @@ -6,7 +6,7 @@ pub mod genetic_node; use crate::{error::Error, tree::Tree}; use async_recursion::async_recursion; use file_linked::{constants::data_format::DataFormat, FileLinked}; -use futures::{executor::block_on, future}; +use futures::{executor::{block_on, LocalPool}, future, task::{LocalFutureObj, LocalSpawn, LocalSpawnExt}, FutureExt}; use genetic_node::{GeneticNode, GeneticNodeWrapper, GeneticState}; use log::{info, trace, warn}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; @@ -334,12 +334,7 @@ where node.process_node(gemla_context.clone()).await?; - if node.state() == GeneticState::Simulate - { - node.process_node(gemla_context.clone()).await?; - } - - trace!( + info!( "{:?} completed in {:?} for {}", node_state, node_state_time.elapsed(),