Final adjustments for round 2

This commit is contained in:
vandomej 2024-04-06 09:55:18 -07:00
parent 7a1f82ac63
commit a11def630a
6 changed files with 209 additions and 103 deletions

View file

@ -36,7 +36,7 @@ def hierarchy_pos(G, root=None, width=1., vert_gap=0.2, vert_loc=0, xcenter=0.5)
return _hierarchy_pos(G, root, width, vert_gap, vert_loc, xcenter)
# Simplified JSON data for demonstration
with open('gemla/test.json', 'r') as file:
with open('gemla/round2.json', 'r') as file:
simplified_json_data = json.load(file)
# Function to traverse the tree and create a graph

View file

@ -1,5 +1,5 @@
#[derive(Debug)]
pub enum DataFormat {
Bincode,
Json
Json,
}

View file

@ -1,19 +1,21 @@
//! A wrapper around an object that ties it to a physical file
pub mod error;
pub mod constants;
pub mod error;
use anyhow::{anyhow, Context};
use constants::data_format::DataFormat;
use error::Error;
use log::info;
use serde::{de::DeserializeOwned, Serialize};
use tokio::sync::RwLock;
use std::{
fs::{copy, remove_file, File}, io::{ErrorKind, Write}, path::{Path, PathBuf}, sync::Arc, thread::{self, JoinHandle}
fs::{copy, remove_file, File},
io::{ErrorKind, Write},
path::{Path, PathBuf},
sync::Arc,
thread::{self, JoinHandle},
};
use tokio::sync::RwLock;
/// A wrapper around an object `T` that ties the object to a physical file
#[derive(Debug)]
@ -146,7 +148,7 @@ where
path: path.to_path_buf(),
temp_file_path,
file_thread: None,
data_format
data_format,
};
result.write_data().await?;
@ -341,7 +343,6 @@ where
self.write_data().await?;
Ok(result)
}
}
@ -417,16 +418,16 @@ where
.ok_or_else(|| anyhow!("Unable to get filename for tempfile {}", path.display()))?
));
match File::open(path).map_err(Error::from).and_then(|file| {
match data_format {
match File::open(path)
.map_err(Error::from)
.and_then(|file| match data_format {
DataFormat::Bincode => bincode::deserialize_from::<File, T>(file)
.with_context(|| format!("Unable to deserialize file {}", path.display()))
.map_err(Error::from),
DataFormat::Json => serde_json::from_reader(file)
.with_context(|| format!("Unable to deserialize file {}", path.display()))
.map_err(Error::from),
}
}) {
}) {
Ok(val) => Ok(FileLinked {
val: Arc::new(RwLock::new(val)),
path: path.to_path_buf(),
@ -457,7 +458,11 @@ where
}
}
fn from_temp_file(temp_file_path: &Path, path: &Path, data_format: &DataFormat) -> Result<T, Error> {
fn from_temp_file(
temp_file_path: &Path,
path: &Path,
data_format: &DataFormat,
) -> Result<T, Error> {
let file = File::open(temp_file_path)
.with_context(|| format!("Unable to open file {}", temp_file_path.display()))?;
@ -505,7 +510,7 @@ mod tests {
pub async fn run<F, Fut>(&self, op: F) -> ()
where
F: FnOnce(PathBuf) -> Fut,
Fut: std::future::Future<Output = ()>
Fut: std::future::Future<Output = ()>,
{
op(self.path.clone()).await
}
@ -523,132 +528,169 @@ mod tests {
async fn test_readonly() {
let path = PathBuf::from("test_readonly");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let val = vec!["one", "two", ""];
cleanup
.run(|p| async move {
let val = vec!["one", "two", ""];
let linked_object = FileLinked::new(val.clone(), &p, DataFormat::Json).await.expect("Unable to create file linked object");
let linked_object_arc = linked_object.readonly();
let linked_object_ref = linked_object_arc.read().await;
assert_eq!(*linked_object_ref, val);
}).await;
let linked_object = FileLinked::new(val.clone(), &p, DataFormat::Json)
.await
.expect("Unable to create file linked object");
let linked_object_arc = linked_object.readonly();
let linked_object_ref = linked_object_arc.read().await;
assert_eq!(*linked_object_ref, val);
})
.await;
}
#[tokio::test]
async fn test_new() {
let path = PathBuf::from("test_new");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let val = "test";
cleanup
.run(|p| async move {
let val = "test";
FileLinked::new(val, &p, DataFormat::Bincode).await.expect("Unable to create file linked object");
FileLinked::new(val, &p, DataFormat::Bincode)
.await
.expect("Unable to create file linked object");
let file = File::open(&p).expect("Unable to open file");
let result: String =
bincode::deserialize_from(file).expect("Unable to deserialize from file");
assert_eq!(result, val);
}).await;
let file = File::open(&p).expect("Unable to open file");
let result: String =
bincode::deserialize_from(file).expect("Unable to deserialize from file");
assert_eq!(result, val);
})
.await;
}
#[tokio::test]
async fn test_mutate() {
let path = PathBuf::from("test_mutate");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let list = vec![1, 2, 3, 4];
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json).await.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
cleanup
.run(|p| async move {
let list = vec![1, 2, 3, 4];
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json)
.await
.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
drop(file_linked_list_ref);
file_linked_list.mutate(|v1| v1.push(5)).await.expect("Error mutating file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
drop(file_linked_list_ref);
file_linked_list
.mutate(|v1| v1.push(5))
.await
.expect("Error mutating file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4, 5]);
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4, 5]);
drop(file_linked_list_ref);
file_linked_list.mutate(|v1| v1[1] = 1).await.expect("Error mutating file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
drop(file_linked_list_ref);
file_linked_list
.mutate(|v1| v1[1] = 1)
.await
.expect("Error mutating file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
drop(file_linked_list);
}).await;
drop(file_linked_list);
})
.await;
}
#[tokio::test]
async fn test_async_mutate() {
let path = PathBuf::from("test_async_mutate");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let list = vec![1, 2, 3, 4];
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json).await.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
cleanup
.run(|p| async move {
let list = vec![1, 2, 3, 4];
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json)
.await
.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
drop(file_linked_list_ref);
file_linked_list.mutate_async(|v1| async move {
let mut v = v1.write().await;
v.push(5);
v[1] = 1;
Ok::<(), Error>(())
}).await.expect("Error mutating file linked object").expect("Error mutating file linked object");
drop(file_linked_list_ref);
file_linked_list
.mutate_async(|v1| async move {
let mut v = v1.write().await;
v.push(5);
v[1] = 1;
Ok::<(), Error>(())
})
.await
.expect("Error mutating file linked object")
.expect("Error mutating file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
drop(file_linked_list);
}).await;
drop(file_linked_list);
})
.await;
}
#[tokio::test]
async fn test_replace() {
let path = PathBuf::from("test_replace");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let val1 = String::from("val1");
let val2 = String::from("val2");
let mut file_linked_list = FileLinked::new(val1.clone(), &p, DataFormat::Bincode).await.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
cleanup
.run(|p| async move {
let val1 = String::from("val1");
let val2 = String::from("val2");
let mut file_linked_list = FileLinked::new(val1.clone(), &p, DataFormat::Bincode)
.await
.expect("Unable to create file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, val1);
assert_eq!(*file_linked_list_ref, val1);
file_linked_list.replace(val2.clone()).await.expect("Error replacing file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
file_linked_list
.replace(val2.clone())
.await
.expect("Error replacing file linked object");
let file_linked_list_arc = file_linked_list.readonly();
let file_linked_list_ref = file_linked_list_arc.read().await;
assert_eq!(*file_linked_list_ref, val2);
assert_eq!(*file_linked_list_ref, val2);
drop(file_linked_list);
}).await;
drop(file_linked_list);
})
.await;
}
#[tokio::test]
async fn test_from_file(){
async fn test_from_file() {
let path = PathBuf::from("test_from_file");
let cleanup = CleanUp::new(&path);
cleanup.run(|p| async move {
let value: Vec<f64> = vec![2.0, 3.0, 5.0];
let file = File::create(&p).expect("Unable to create file");
cleanup
.run(|p| async move {
let value: Vec<f64> = vec![2.0, 3.0, 5.0];
let file = File::create(&p).expect("Unable to create file");
bincode::serialize_into(&file, &value).expect("Unable to serialize into file");
drop(file);
bincode::serialize_into(&file, &value).expect("Unable to serialize into file");
drop(file);
let linked_object: FileLinked<Vec<f64>> = FileLinked::from_file(&p, DataFormat::Bincode).expect("Unable to create file linked object");
let linked_object_arc = linked_object.readonly();
let linked_object_ref = linked_object_arc.read().await;
let linked_object: FileLinked<Vec<f64>> =
FileLinked::from_file(&p, DataFormat::Bincode)
.expect("Unable to create file linked object");
let linked_object_arc = linked_object.readonly();
let linked_object_ref = linked_object_arc.read().await;
assert_eq!(*linked_object_ref, value);
assert_eq!(*linked_object_ref, value);
drop(linked_object);
}).await;
drop(linked_object);
})
.await;
}
}

View file

@ -1,19 +1,23 @@
use std::sync::Arc;
use serde::ser::SerializeTuple;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use tokio::sync::Semaphore;
const SHARED_SEMAPHORE_CONCURRENCY_LIMIT: usize = 50;
const VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT: usize = 1;
#[derive(Debug, Clone)]
pub struct FighterContext {
pub shared_semaphore: Arc<Semaphore>,
pub visible_simulations: Arc<Semaphore>,
}
impl Default for FighterContext {
fn default() -> Self {
FighterContext {
shared_semaphore: Arc::new(Semaphore::new(SHARED_SEMAPHORE_CONCURRENCY_LIMIT)),
visible_simulations: Arc::new(Semaphore::new(VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT)),
}
}
}
@ -28,19 +32,47 @@ impl Serialize for FighterContext {
// This part is tricky since Semaphore does not expose its initial permits.
// You might need to store the concurrency limit as a separate field if this assumption doesn't hold.
let concurrency_limit = SHARED_SEMAPHORE_CONCURRENCY_LIMIT;
serializer.serialize_u64(concurrency_limit as u64)
let visible_concurrency_limit = VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT;
// serializer.serialize_u64(concurrency_limit as u64)
// Serialize the concurrency limit as a tuple
let mut state = serializer.serialize_tuple(2)?;
state.serialize_element(&concurrency_limit)?;
state.serialize_element(&visible_concurrency_limit)?;
state.end()
}
}
// Custom deserialization to reconstruct the FighterContext from a concurrency limit.
impl<'de> Deserialize<'de> for FighterContext {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
fn deserialize<D>(_: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let concurrency_limit = u64::deserialize(deserializer)?;
// Deserialize the tuple
Ok(FighterContext {
shared_semaphore: Arc::new(Semaphore::new(concurrency_limit as usize)),
shared_semaphore: Arc::new(Semaphore::new(SHARED_SEMAPHORE_CONCURRENCY_LIMIT)),
visible_simulations: Arc::new(Semaphore::new(VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT)),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serialization() {
let context = FighterContext::default();
let serialized = serde_json::to_string(&context).unwrap();
let deserialized: FighterContext = serde_json::from_str(&serialized).unwrap();
assert_eq!(
context.shared_semaphore.available_permits(),
deserialized.shared_semaphore.available_permits()
);
assert_eq!(
context.visible_simulations.available_permits(),
deserialized.visible_simulations.available_permits()
);
}
}

View file

@ -162,6 +162,7 @@ impl GeneticNode for FighterNN {
for i in 0..self.population_size {
let self_clone = self.clone();
let semaphore_clone = context.gemla_context.shared_semaphore.clone();
let display_simulation_semaphore = context.gemla_context.visible_simulations.clone();
let task = async move {
let nn = self_clone
@ -176,6 +177,7 @@ impl GeneticNode for FighterNN {
let folder = self_clone.folder.clone();
let generation = self_clone.generation;
let semaphore_clone = semaphore_clone.clone();
let display_simulation_semaphore = display_simulation_semaphore.clone();
let random_nn = folder
.join(format!("{}", generation))
@ -188,7 +190,19 @@ impl GeneticNode for FighterNN {
.await
.with_context(|| "Failed to acquire semaphore permit")?;
let (score, _) = run_1v1_simulation(&nn_clone, &random_nn).await?;
let display_simulation =
match display_simulation_semaphore.try_acquire_owned() {
Ok(s) => Some(s),
Err(_) => None,
};
let (score, _) = if let Some(display_simulation) = display_simulation {
let result = run_1v1_simulation(&nn_clone, &random_nn, true).await?;
drop(display_simulation);
result
} else {
run_1v1_simulation(&nn_clone, &random_nn, false).await?
};
drop(permit);
@ -206,7 +220,7 @@ impl GeneticNode for FighterNN {
Ok(scores) => scores.into_iter().sum::<f32>() / SIMULATION_ROUNDS as f32,
Err(e) => return Err(e), // Return the error if results collection failed
};
trace!("NN {:06}_fighter_nn_{} scored {}", self_clone.id, i, score);
debug!("NN {:06}_fighter_nn_{} scored {}", self_clone.id, i, score);
Ok((i, score))
};
@ -366,6 +380,7 @@ impl GeneticNode for FighterNN {
.join(right.generation.to_string())
.join(right.get_individual_id(right_nn_id));
let semaphore_clone = gemla_context.shared_semaphore.clone();
let display_simulation_semaphore = gemla_context.visible_simulations.clone();
let future = async move {
let permit = semaphore_clone
@ -373,8 +388,19 @@ impl GeneticNode for FighterNN {
.await
.with_context(|| "Failed to acquire semaphore permit")?;
let (left_score, right_score) =
run_1v1_simulation(&left_nn_path, &right_nn_path).await?;
let display_simulation = match display_simulation_semaphore.try_acquire_owned() {
Ok(s) => Some(s),
Err(_) => None,
};
let (left_score, right_score) = if let Some(display_simulation) = display_simulation
{
let result = run_1v1_simulation(&left_nn_path, &right_nn_path, true).await?;
drop(display_simulation);
result
} else {
run_1v1_simulation(&left_nn_path, &right_nn_path, false).await?
};
drop(permit);
@ -398,6 +424,8 @@ impl GeneticNode for FighterNN {
// Use the sigmoid function to determine lerp amount
let lerp_amount = 1.0 / (1.0 + (-score_difference).exp());
debug!("Lerp amount: {}", lerp_amount);
let mut nn_shapes = HashMap::new();
// Function to copy NNs from a source FighterNN to the new folder.
@ -517,7 +545,11 @@ impl FighterNN {
}
}
async fn run_1v1_simulation(nn_path_1: &Path, nn_path_2: &Path) -> Result<(f32, f32), Error> {
async fn run_1v1_simulation(
nn_path_1: &Path,
nn_path_2: &Path,
display_simulation: bool,
) -> Result<(f32, f32), Error> {
// Construct the score file path
let base_folder = nn_path_1.parent().unwrap();
let nn_1_id = nn_path_1.file_stem().unwrap().to_str().unwrap();
@ -574,7 +606,7 @@ async fn run_1v1_simulation(nn_path_1: &Path, nn_path_2: &Path) -> Result<(f32,
trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id);
let _output = if thread_rng().gen_range(0..100) < 1 {
let _output = if display_simulation {
Command::new(GAME_EXECUTABLE_PATH)
.arg(&config1_arg)
.arg(&config2_arg)