Final adjustments for round 2
This commit is contained in:
parent
7a1f82ac63
commit
a11def630a
6 changed files with 209 additions and 103 deletions
|
@ -36,7 +36,7 @@ def hierarchy_pos(G, root=None, width=1., vert_gap=0.2, vert_loc=0, xcenter=0.5)
|
||||||
return _hierarchy_pos(G, root, width, vert_gap, vert_loc, xcenter)
|
return _hierarchy_pos(G, root, width, vert_gap, vert_loc, xcenter)
|
||||||
|
|
||||||
# Simplified JSON data for demonstration
|
# Simplified JSON data for demonstration
|
||||||
with open('gemla/test.json', 'r') as file:
|
with open('gemla/round2.json', 'r') as file:
|
||||||
simplified_json_data = json.load(file)
|
simplified_json_data = json.load(file)
|
||||||
|
|
||||||
# Function to traverse the tree and create a graph
|
# Function to traverse the tree and create a graph
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum DataFormat {
|
pub enum DataFormat {
|
||||||
Bincode,
|
Bincode,
|
||||||
Json
|
Json,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
pub mod data_format;
|
pub mod data_format;
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
//! A wrapper around an object that ties it to a physical file
|
//! A wrapper around an object that ties it to a physical file
|
||||||
|
|
||||||
pub mod error;
|
|
||||||
pub mod constants;
|
pub mod constants;
|
||||||
|
pub mod error;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use constants::data_format::DataFormat;
|
use constants::data_format::DataFormat;
|
||||||
use error::Error;
|
use error::Error;
|
||||||
use log::info;
|
use log::info;
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use std::{
|
use std::{
|
||||||
fs::{copy, remove_file, File}, io::{ErrorKind, Write}, path::{Path, PathBuf}, sync::Arc, thread::{self, JoinHandle}
|
fs::{copy, remove_file, File},
|
||||||
|
io::{ErrorKind, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
thread::{self, JoinHandle},
|
||||||
};
|
};
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
|
|
||||||
/// A wrapper around an object `T` that ties the object to a physical file
|
/// A wrapper around an object `T` that ties the object to a physical file
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -146,7 +148,7 @@ where
|
||||||
path: path.to_path_buf(),
|
path: path.to_path_buf(),
|
||||||
temp_file_path,
|
temp_file_path,
|
||||||
file_thread: None,
|
file_thread: None,
|
||||||
data_format
|
data_format,
|
||||||
};
|
};
|
||||||
|
|
||||||
result.write_data().await?;
|
result.write_data().await?;
|
||||||
|
@ -341,7 +343,6 @@ where
|
||||||
|
|
||||||
self.write_data().await?;
|
self.write_data().await?;
|
||||||
|
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -417,16 +418,16 @@ where
|
||||||
.ok_or_else(|| anyhow!("Unable to get filename for tempfile {}", path.display()))?
|
.ok_or_else(|| anyhow!("Unable to get filename for tempfile {}", path.display()))?
|
||||||
));
|
));
|
||||||
|
|
||||||
match File::open(path).map_err(Error::from).and_then(|file| {
|
match File::open(path)
|
||||||
match data_format {
|
.map_err(Error::from)
|
||||||
|
.and_then(|file| match data_format {
|
||||||
DataFormat::Bincode => bincode::deserialize_from::<File, T>(file)
|
DataFormat::Bincode => bincode::deserialize_from::<File, T>(file)
|
||||||
.with_context(|| format!("Unable to deserialize file {}", path.display()))
|
.with_context(|| format!("Unable to deserialize file {}", path.display()))
|
||||||
.map_err(Error::from),
|
.map_err(Error::from),
|
||||||
DataFormat::Json => serde_json::from_reader(file)
|
DataFormat::Json => serde_json::from_reader(file)
|
||||||
.with_context(|| format!("Unable to deserialize file {}", path.display()))
|
.with_context(|| format!("Unable to deserialize file {}", path.display()))
|
||||||
.map_err(Error::from),
|
.map_err(Error::from),
|
||||||
}
|
}) {
|
||||||
}) {
|
|
||||||
Ok(val) => Ok(FileLinked {
|
Ok(val) => Ok(FileLinked {
|
||||||
val: Arc::new(RwLock::new(val)),
|
val: Arc::new(RwLock::new(val)),
|
||||||
path: path.to_path_buf(),
|
path: path.to_path_buf(),
|
||||||
|
@ -457,7 +458,11 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_temp_file(temp_file_path: &Path, path: &Path, data_format: &DataFormat) -> Result<T, Error> {
|
fn from_temp_file(
|
||||||
|
temp_file_path: &Path,
|
||||||
|
path: &Path,
|
||||||
|
data_format: &DataFormat,
|
||||||
|
) -> Result<T, Error> {
|
||||||
let file = File::open(temp_file_path)
|
let file = File::open(temp_file_path)
|
||||||
.with_context(|| format!("Unable to open file {}", temp_file_path.display()))?;
|
.with_context(|| format!("Unable to open file {}", temp_file_path.display()))?;
|
||||||
|
|
||||||
|
@ -505,7 +510,7 @@ mod tests {
|
||||||
pub async fn run<F, Fut>(&self, op: F) -> ()
|
pub async fn run<F, Fut>(&self, op: F) -> ()
|
||||||
where
|
where
|
||||||
F: FnOnce(PathBuf) -> Fut,
|
F: FnOnce(PathBuf) -> Fut,
|
||||||
Fut: std::future::Future<Output = ()>
|
Fut: std::future::Future<Output = ()>,
|
||||||
{
|
{
|
||||||
op(self.path.clone()).await
|
op(self.path.clone()).await
|
||||||
}
|
}
|
||||||
|
@ -523,132 +528,169 @@ mod tests {
|
||||||
async fn test_readonly() {
|
async fn test_readonly() {
|
||||||
let path = PathBuf::from("test_readonly");
|
let path = PathBuf::from("test_readonly");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let val = vec!["one", "two", ""];
|
.run(|p| async move {
|
||||||
|
let val = vec!["one", "two", ""];
|
||||||
|
|
||||||
let linked_object = FileLinked::new(val.clone(), &p, DataFormat::Json).await.expect("Unable to create file linked object");
|
let linked_object = FileLinked::new(val.clone(), &p, DataFormat::Json)
|
||||||
let linked_object_arc = linked_object.readonly();
|
.await
|
||||||
let linked_object_ref = linked_object_arc.read().await;
|
.expect("Unable to create file linked object");
|
||||||
assert_eq!(*linked_object_ref, val);
|
let linked_object_arc = linked_object.readonly();
|
||||||
}).await;
|
let linked_object_ref = linked_object_arc.read().await;
|
||||||
|
assert_eq!(*linked_object_ref, val);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_new() {
|
async fn test_new() {
|
||||||
let path = PathBuf::from("test_new");
|
let path = PathBuf::from("test_new");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let val = "test";
|
.run(|p| async move {
|
||||||
|
let val = "test";
|
||||||
|
|
||||||
FileLinked::new(val, &p, DataFormat::Bincode).await.expect("Unable to create file linked object");
|
FileLinked::new(val, &p, DataFormat::Bincode)
|
||||||
|
.await
|
||||||
|
.expect("Unable to create file linked object");
|
||||||
|
|
||||||
let file = File::open(&p).expect("Unable to open file");
|
let file = File::open(&p).expect("Unable to open file");
|
||||||
let result: String =
|
let result: String =
|
||||||
bincode::deserialize_from(file).expect("Unable to deserialize from file");
|
bincode::deserialize_from(file).expect("Unable to deserialize from file");
|
||||||
assert_eq!(result, val);
|
assert_eq!(result, val);
|
||||||
}).await;
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_mutate() {
|
async fn test_mutate() {
|
||||||
let path = PathBuf::from("test_mutate");
|
let path = PathBuf::from("test_mutate");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let list = vec![1, 2, 3, 4];
|
.run(|p| async move {
|
||||||
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json).await.expect("Unable to create file linked object");
|
let list = vec![1, 2, 3, 4];
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json)
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Unable to create file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
|
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
|
||||||
|
|
||||||
drop(file_linked_list_ref);
|
drop(file_linked_list_ref);
|
||||||
file_linked_list.mutate(|v1| v1.push(5)).await.expect("Error mutating file linked object");
|
file_linked_list
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
.mutate(|v1| v1.push(5))
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Error mutating file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4, 5]);
|
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4, 5]);
|
||||||
|
|
||||||
drop(file_linked_list_ref);
|
drop(file_linked_list_ref);
|
||||||
file_linked_list.mutate(|v1| v1[1] = 1).await.expect("Error mutating file linked object");
|
file_linked_list
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
.mutate(|v1| v1[1] = 1)
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Error mutating file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
|
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
|
||||||
|
|
||||||
drop(file_linked_list);
|
drop(file_linked_list);
|
||||||
}).await;
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_async_mutate() {
|
async fn test_async_mutate() {
|
||||||
let path = PathBuf::from("test_async_mutate");
|
let path = PathBuf::from("test_async_mutate");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let list = vec![1, 2, 3, 4];
|
.run(|p| async move {
|
||||||
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json).await.expect("Unable to create file linked object");
|
let list = vec![1, 2, 3, 4];
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
let mut file_linked_list = FileLinked::new(list, &p, DataFormat::Json)
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Unable to create file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
|
assert_eq!(*file_linked_list_ref, vec![1, 2, 3, 4]);
|
||||||
|
|
||||||
drop(file_linked_list_ref);
|
drop(file_linked_list_ref);
|
||||||
file_linked_list.mutate_async(|v1| async move {
|
file_linked_list
|
||||||
let mut v = v1.write().await;
|
.mutate_async(|v1| async move {
|
||||||
v.push(5);
|
let mut v = v1.write().await;
|
||||||
v[1] = 1;
|
v.push(5);
|
||||||
Ok::<(), Error>(())
|
v[1] = 1;
|
||||||
}).await.expect("Error mutating file linked object").expect("Error mutating file linked object");
|
Ok::<(), Error>(())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.expect("Error mutating file linked object")
|
||||||
|
.expect("Error mutating file linked object");
|
||||||
|
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
|
assert_eq!(*file_linked_list_ref, vec![1, 1, 3, 4, 5]);
|
||||||
|
|
||||||
drop(file_linked_list);
|
drop(file_linked_list);
|
||||||
}).await;
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_replace() {
|
async fn test_replace() {
|
||||||
let path = PathBuf::from("test_replace");
|
let path = PathBuf::from("test_replace");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let val1 = String::from("val1");
|
.run(|p| async move {
|
||||||
let val2 = String::from("val2");
|
let val1 = String::from("val1");
|
||||||
let mut file_linked_list = FileLinked::new(val1.clone(), &p, DataFormat::Bincode).await.expect("Unable to create file linked object");
|
let val2 = String::from("val2");
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
let mut file_linked_list = FileLinked::new(val1.clone(), &p, DataFormat::Bincode)
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Unable to create file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, val1);
|
assert_eq!(*file_linked_list_ref, val1);
|
||||||
|
|
||||||
file_linked_list.replace(val2.clone()).await.expect("Error replacing file linked object");
|
file_linked_list
|
||||||
let file_linked_list_arc = file_linked_list.readonly();
|
.replace(val2.clone())
|
||||||
let file_linked_list_ref = file_linked_list_arc.read().await;
|
.await
|
||||||
|
.expect("Error replacing file linked object");
|
||||||
|
let file_linked_list_arc = file_linked_list.readonly();
|
||||||
|
let file_linked_list_ref = file_linked_list_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*file_linked_list_ref, val2);
|
assert_eq!(*file_linked_list_ref, val2);
|
||||||
|
|
||||||
drop(file_linked_list);
|
drop(file_linked_list);
|
||||||
}).await;
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_from_file(){
|
async fn test_from_file() {
|
||||||
let path = PathBuf::from("test_from_file");
|
let path = PathBuf::from("test_from_file");
|
||||||
let cleanup = CleanUp::new(&path);
|
let cleanup = CleanUp::new(&path);
|
||||||
cleanup.run(|p| async move {
|
cleanup
|
||||||
let value: Vec<f64> = vec![2.0, 3.0, 5.0];
|
.run(|p| async move {
|
||||||
let file = File::create(&p).expect("Unable to create file");
|
let value: Vec<f64> = vec![2.0, 3.0, 5.0];
|
||||||
|
let file = File::create(&p).expect("Unable to create file");
|
||||||
|
|
||||||
bincode::serialize_into(&file, &value).expect("Unable to serialize into file");
|
bincode::serialize_into(&file, &value).expect("Unable to serialize into file");
|
||||||
drop(file);
|
drop(file);
|
||||||
|
|
||||||
let linked_object: FileLinked<Vec<f64>> = FileLinked::from_file(&p, DataFormat::Bincode).expect("Unable to create file linked object");
|
let linked_object: FileLinked<Vec<f64>> =
|
||||||
let linked_object_arc = linked_object.readonly();
|
FileLinked::from_file(&p, DataFormat::Bincode)
|
||||||
let linked_object_ref = linked_object_arc.read().await;
|
.expect("Unable to create file linked object");
|
||||||
|
let linked_object_arc = linked_object.readonly();
|
||||||
|
let linked_object_ref = linked_object_arc.read().await;
|
||||||
|
|
||||||
assert_eq!(*linked_object_ref, value);
|
assert_eq!(*linked_object_ref, value);
|
||||||
|
|
||||||
drop(linked_object);
|
drop(linked_object);
|
||||||
}).await;
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,23 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use serde::ser::SerializeTuple;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use tokio::sync::Semaphore;
|
use tokio::sync::Semaphore;
|
||||||
|
|
||||||
const SHARED_SEMAPHORE_CONCURRENCY_LIMIT: usize = 50;
|
const SHARED_SEMAPHORE_CONCURRENCY_LIMIT: usize = 50;
|
||||||
|
const VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT: usize = 1;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct FighterContext {
|
pub struct FighterContext {
|
||||||
pub shared_semaphore: Arc<Semaphore>,
|
pub shared_semaphore: Arc<Semaphore>,
|
||||||
|
pub visible_simulations: Arc<Semaphore>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for FighterContext {
|
impl Default for FighterContext {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
FighterContext {
|
FighterContext {
|
||||||
shared_semaphore: Arc::new(Semaphore::new(SHARED_SEMAPHORE_CONCURRENCY_LIMIT)),
|
shared_semaphore: Arc::new(Semaphore::new(SHARED_SEMAPHORE_CONCURRENCY_LIMIT)),
|
||||||
|
visible_simulations: Arc::new(Semaphore::new(VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,19 +32,47 @@ impl Serialize for FighterContext {
|
||||||
// This part is tricky since Semaphore does not expose its initial permits.
|
// This part is tricky since Semaphore does not expose its initial permits.
|
||||||
// You might need to store the concurrency limit as a separate field if this assumption doesn't hold.
|
// You might need to store the concurrency limit as a separate field if this assumption doesn't hold.
|
||||||
let concurrency_limit = SHARED_SEMAPHORE_CONCURRENCY_LIMIT;
|
let concurrency_limit = SHARED_SEMAPHORE_CONCURRENCY_LIMIT;
|
||||||
serializer.serialize_u64(concurrency_limit as u64)
|
let visible_concurrency_limit = VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT;
|
||||||
|
// serializer.serialize_u64(concurrency_limit as u64)
|
||||||
|
|
||||||
|
// Serialize the concurrency limit as a tuple
|
||||||
|
let mut state = serializer.serialize_tuple(2)?;
|
||||||
|
state.serialize_element(&concurrency_limit)?;
|
||||||
|
state.serialize_element(&visible_concurrency_limit)?;
|
||||||
|
state.end()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Custom deserialization to reconstruct the FighterContext from a concurrency limit.
|
// Custom deserialization to reconstruct the FighterContext from a concurrency limit.
|
||||||
impl<'de> Deserialize<'de> for FighterContext {
|
impl<'de> Deserialize<'de> for FighterContext {
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
fn deserialize<D>(_: D) -> Result<Self, D::Error>
|
||||||
where
|
where
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
{
|
{
|
||||||
let concurrency_limit = u64::deserialize(deserializer)?;
|
// Deserialize the tuple
|
||||||
Ok(FighterContext {
|
Ok(FighterContext {
|
||||||
shared_semaphore: Arc::new(Semaphore::new(concurrency_limit as usize)),
|
shared_semaphore: Arc::new(Semaphore::new(SHARED_SEMAPHORE_CONCURRENCY_LIMIT)),
|
||||||
|
visible_simulations: Arc::new(Semaphore::new(VISIBLE_SIMULATIONS_CONCURRENCY_LIMIT)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_serialization() {
|
||||||
|
let context = FighterContext::default();
|
||||||
|
let serialized = serde_json::to_string(&context).unwrap();
|
||||||
|
let deserialized: FighterContext = serde_json::from_str(&serialized).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
context.shared_semaphore.available_permits(),
|
||||||
|
deserialized.shared_semaphore.available_permits()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
context.visible_simulations.available_permits(),
|
||||||
|
deserialized.visible_simulations.available_permits()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -162,6 +162,7 @@ impl GeneticNode for FighterNN {
|
||||||
for i in 0..self.population_size {
|
for i in 0..self.population_size {
|
||||||
let self_clone = self.clone();
|
let self_clone = self.clone();
|
||||||
let semaphore_clone = context.gemla_context.shared_semaphore.clone();
|
let semaphore_clone = context.gemla_context.shared_semaphore.clone();
|
||||||
|
let display_simulation_semaphore = context.gemla_context.visible_simulations.clone();
|
||||||
|
|
||||||
let task = async move {
|
let task = async move {
|
||||||
let nn = self_clone
|
let nn = self_clone
|
||||||
|
@ -176,6 +177,7 @@ impl GeneticNode for FighterNN {
|
||||||
let folder = self_clone.folder.clone();
|
let folder = self_clone.folder.clone();
|
||||||
let generation = self_clone.generation;
|
let generation = self_clone.generation;
|
||||||
let semaphore_clone = semaphore_clone.clone();
|
let semaphore_clone = semaphore_clone.clone();
|
||||||
|
let display_simulation_semaphore = display_simulation_semaphore.clone();
|
||||||
|
|
||||||
let random_nn = folder
|
let random_nn = folder
|
||||||
.join(format!("{}", generation))
|
.join(format!("{}", generation))
|
||||||
|
@ -188,7 +190,19 @@ impl GeneticNode for FighterNN {
|
||||||
.await
|
.await
|
||||||
.with_context(|| "Failed to acquire semaphore permit")?;
|
.with_context(|| "Failed to acquire semaphore permit")?;
|
||||||
|
|
||||||
let (score, _) = run_1v1_simulation(&nn_clone, &random_nn).await?;
|
let display_simulation =
|
||||||
|
match display_simulation_semaphore.try_acquire_owned() {
|
||||||
|
Ok(s) => Some(s),
|
||||||
|
Err(_) => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (score, _) = if let Some(display_simulation) = display_simulation {
|
||||||
|
let result = run_1v1_simulation(&nn_clone, &random_nn, true).await?;
|
||||||
|
drop(display_simulation);
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
run_1v1_simulation(&nn_clone, &random_nn, false).await?
|
||||||
|
};
|
||||||
|
|
||||||
drop(permit);
|
drop(permit);
|
||||||
|
|
||||||
|
@ -206,7 +220,7 @@ impl GeneticNode for FighterNN {
|
||||||
Ok(scores) => scores.into_iter().sum::<f32>() / SIMULATION_ROUNDS as f32,
|
Ok(scores) => scores.into_iter().sum::<f32>() / SIMULATION_ROUNDS as f32,
|
||||||
Err(e) => return Err(e), // Return the error if results collection failed
|
Err(e) => return Err(e), // Return the error if results collection failed
|
||||||
};
|
};
|
||||||
trace!("NN {:06}_fighter_nn_{} scored {}", self_clone.id, i, score);
|
debug!("NN {:06}_fighter_nn_{} scored {}", self_clone.id, i, score);
|
||||||
Ok((i, score))
|
Ok((i, score))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -366,6 +380,7 @@ impl GeneticNode for FighterNN {
|
||||||
.join(right.generation.to_string())
|
.join(right.generation.to_string())
|
||||||
.join(right.get_individual_id(right_nn_id));
|
.join(right.get_individual_id(right_nn_id));
|
||||||
let semaphore_clone = gemla_context.shared_semaphore.clone();
|
let semaphore_clone = gemla_context.shared_semaphore.clone();
|
||||||
|
let display_simulation_semaphore = gemla_context.visible_simulations.clone();
|
||||||
|
|
||||||
let future = async move {
|
let future = async move {
|
||||||
let permit = semaphore_clone
|
let permit = semaphore_clone
|
||||||
|
@ -373,8 +388,19 @@ impl GeneticNode for FighterNN {
|
||||||
.await
|
.await
|
||||||
.with_context(|| "Failed to acquire semaphore permit")?;
|
.with_context(|| "Failed to acquire semaphore permit")?;
|
||||||
|
|
||||||
let (left_score, right_score) =
|
let display_simulation = match display_simulation_semaphore.try_acquire_owned() {
|
||||||
run_1v1_simulation(&left_nn_path, &right_nn_path).await?;
|
Ok(s) => Some(s),
|
||||||
|
Err(_) => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (left_score, right_score) = if let Some(display_simulation) = display_simulation
|
||||||
|
{
|
||||||
|
let result = run_1v1_simulation(&left_nn_path, &right_nn_path, true).await?;
|
||||||
|
drop(display_simulation);
|
||||||
|
result
|
||||||
|
} else {
|
||||||
|
run_1v1_simulation(&left_nn_path, &right_nn_path, false).await?
|
||||||
|
};
|
||||||
|
|
||||||
drop(permit);
|
drop(permit);
|
||||||
|
|
||||||
|
@ -398,6 +424,8 @@ impl GeneticNode for FighterNN {
|
||||||
// Use the sigmoid function to determine lerp amount
|
// Use the sigmoid function to determine lerp amount
|
||||||
let lerp_amount = 1.0 / (1.0 + (-score_difference).exp());
|
let lerp_amount = 1.0 / (1.0 + (-score_difference).exp());
|
||||||
|
|
||||||
|
debug!("Lerp amount: {}", lerp_amount);
|
||||||
|
|
||||||
let mut nn_shapes = HashMap::new();
|
let mut nn_shapes = HashMap::new();
|
||||||
|
|
||||||
// Function to copy NNs from a source FighterNN to the new folder.
|
// Function to copy NNs from a source FighterNN to the new folder.
|
||||||
|
@ -517,7 +545,11 @@ impl FighterNN {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run_1v1_simulation(nn_path_1: &Path, nn_path_2: &Path) -> Result<(f32, f32), Error> {
|
async fn run_1v1_simulation(
|
||||||
|
nn_path_1: &Path,
|
||||||
|
nn_path_2: &Path,
|
||||||
|
display_simulation: bool,
|
||||||
|
) -> Result<(f32, f32), Error> {
|
||||||
// Construct the score file path
|
// Construct the score file path
|
||||||
let base_folder = nn_path_1.parent().unwrap();
|
let base_folder = nn_path_1.parent().unwrap();
|
||||||
let nn_1_id = nn_path_1.file_stem().unwrap().to_str().unwrap();
|
let nn_1_id = nn_path_1.file_stem().unwrap().to_str().unwrap();
|
||||||
|
@ -574,7 +606,7 @@ async fn run_1v1_simulation(nn_path_1: &Path, nn_path_2: &Path) -> Result<(f32,
|
||||||
|
|
||||||
trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id);
|
trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id);
|
||||||
|
|
||||||
let _output = if thread_rng().gen_range(0..100) < 1 {
|
let _output = if display_simulation {
|
||||||
Command::new(GAME_EXECUTABLE_PATH)
|
Command::new(GAME_EXECUTABLE_PATH)
|
||||||
.arg(&config1_arg)
|
.arg(&config1_arg)
|
||||||
.arg(&config2_arg)
|
.arg(&config2_arg)
|
||||||
|
|
Loading…
Add table
Reference in a new issue