Formatting code
This commit is contained in:
parent
5ab3c2382e
commit
95fdad1034
4 changed files with 103 additions and 60 deletions
|
@ -3,7 +3,7 @@ extern crate fann;
|
||||||
pub mod fighter_context;
|
pub mod fighter_context;
|
||||||
pub mod neural_network_utility;
|
pub mod neural_network_utility;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{Context, anyhow};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use fann::{ActivationFunc, Fann};
|
use fann::{ActivationFunc, Fann};
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
|
@ -114,8 +114,10 @@ impl GeneticNode for FighterNN {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let mut nn_shapes = HashMap::new();
|
let mut nn_shapes = HashMap::new();
|
||||||
let weight_initialization_amplitude = rng().random_range(0.0..NEURAL_NETWORK_INITIAL_WEIGHT_MAX);
|
let weight_initialization_amplitude =
|
||||||
let weight_initialization_range = -weight_initialization_amplitude..weight_initialization_amplitude;
|
rng().random_range(0.0..NEURAL_NETWORK_INITIAL_WEIGHT_MAX);
|
||||||
|
let weight_initialization_range =
|
||||||
|
-weight_initialization_amplitude..weight_initialization_amplitude;
|
||||||
|
|
||||||
// Create the first generation in this folder
|
// Create the first generation in this folder
|
||||||
for i in 0..POPULATION {
|
for i in 0..POPULATION {
|
||||||
|
@ -201,7 +203,6 @@ impl GeneticNode for FighterNN {
|
||||||
i
|
i
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
let secondary_id = loop {
|
let secondary_id = loop {
|
||||||
if allotted_simulations.is_empty() || allotted_simulations.len() == 1 {
|
if allotted_simulations.is_empty() || allotted_simulations.len() == 1 {
|
||||||
// Select a random id
|
// Select a random id
|
||||||
|
@ -243,7 +244,8 @@ impl GeneticNode for FighterNN {
|
||||||
let task = {
|
let task = {
|
||||||
let self_clone = self.clone();
|
let self_clone = self.clone();
|
||||||
let semaphore_clone = context.gemla_context.shared_semaphore.clone();
|
let semaphore_clone = context.gemla_context.shared_semaphore.clone();
|
||||||
let display_simulation_semaphore = context.gemla_context.visible_simulations.clone();
|
let display_simulation_semaphore =
|
||||||
|
context.gemla_context.visible_simulations.clone();
|
||||||
|
|
||||||
let folder = self_clone.folder.clone();
|
let folder = self_clone.folder.clone();
|
||||||
let generation = self_clone.r#generation;
|
let generation = self_clone.r#generation;
|
||||||
|
@ -260,12 +262,16 @@ impl GeneticNode for FighterNN {
|
||||||
|
|
||||||
// Introducing a new scope for acquiring permits and running simulations
|
// Introducing a new scope for acquiring permits and running simulations
|
||||||
let simulation_result = async move {
|
let simulation_result = async move {
|
||||||
let permit = semaphore_clone.acquire_owned().await
|
let permit = semaphore_clone
|
||||||
|
.acquire_owned()
|
||||||
|
.await
|
||||||
.with_context(|| "Failed to acquire semaphore permit")?;
|
.with_context(|| "Failed to acquire semaphore permit")?;
|
||||||
|
|
||||||
let display_simulation = display_simulation_semaphore.try_acquire_owned().ok();
|
let display_simulation = display_simulation_semaphore.try_acquire_owned().ok();
|
||||||
|
|
||||||
let (primary_score, secondary_score) = if let Some(display_simulation) = display_simulation {
|
let (primary_score, secondary_score) = if let Some(display_simulation) =
|
||||||
|
display_simulation
|
||||||
|
{
|
||||||
let result = run_1v1_simulation(&primary_nn, &secondary_nn, true).await?;
|
let result = run_1v1_simulation(&primary_nn, &secondary_nn, true).await?;
|
||||||
drop(display_simulation); // Explicitly dropping resources no longer needed
|
drop(display_simulation); // Explicitly dropping resources no longer needed
|
||||||
result
|
result
|
||||||
|
@ -300,7 +306,8 @@ impl GeneticNode for FighterNN {
|
||||||
// resolve results for any errors
|
// resolve results for any errors
|
||||||
let mut scores = HashMap::new();
|
let mut scores = HashMap::new();
|
||||||
for result in results.into_iter() {
|
for result in results.into_iter() {
|
||||||
let (primary_id, primary_score, secondary_id, secondary_score) = result.with_context(|| "Failed to run simulation")?;
|
let (primary_id, primary_score, secondary_id, secondary_score) =
|
||||||
|
result.with_context(|| "Failed to run simulation")?;
|
||||||
|
|
||||||
// If score exists, add the new score to the existing score
|
// If score exists, add the new score to the existing score
|
||||||
if let Some((existing_score, count)) = scores.get_mut(&(primary_id as u64)) {
|
if let Some((existing_score, count)) = scores.get_mut(&(primary_id as u64)) {
|
||||||
|
@ -480,8 +487,9 @@ impl GeneticNode for FighterNN {
|
||||||
.with_context(|| format!("Failed to create directory {:?}", folder.join("0")))?;
|
.with_context(|| format!("Failed to create directory {:?}", folder.join("0")))?;
|
||||||
|
|
||||||
let get_highest_scores = |fighter: &FighterNN| -> Vec<(u64, f32)> {
|
let get_highest_scores = |fighter: &FighterNN| -> Vec<(u64, f32)> {
|
||||||
let mut sorted_scores: Vec<_> =
|
let mut sorted_scores: Vec<_> = fighter.scores[fighter.r#generation as usize]
|
||||||
fighter.scores[fighter.r#generation as usize].iter().collect();
|
.iter()
|
||||||
|
.collect();
|
||||||
sorted_scores.sort_by(|a, b| b.1.partial_cmp(a.1).unwrap());
|
sorted_scores.sort_by(|a, b| b.1.partial_cmp(a.1).unwrap());
|
||||||
sorted_scores
|
sorted_scores
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -537,7 +545,10 @@ impl GeneticNode for FighterNN {
|
||||||
run_1v1_simulation(&left_nn_path, &right_nn_path, false).await?
|
run_1v1_simulation(&left_nn_path, &right_nn_path, false).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("{} vs {} -> {} vs {}", left_nn_id, right_nn_id, left_score, right_score);
|
debug!(
|
||||||
|
"{} vs {} -> {} vs {}",
|
||||||
|
left_nn_id, right_nn_id, left_score, right_score
|
||||||
|
);
|
||||||
|
|
||||||
drop(permit);
|
drop(permit);
|
||||||
|
|
||||||
|
@ -734,7 +745,11 @@ fn should_continue(scores: &[HashMap<u64, f32>], lenience: u64) -> Result<bool,
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"Highest Q3 value: {} at generation {}, Highest Median value: {} at generation {}, Continuing? {}",
|
"Highest Q3 value: {} at generation {}, Highest Median value: {} at generation {}, Continuing? {}",
|
||||||
highest_q3_value, generation_with_highest_q3 + 1, highest_median, generation_with_highest_median + 1, result
|
highest_q3_value,
|
||||||
|
generation_with_highest_q3 + 1,
|
||||||
|
highest_median,
|
||||||
|
generation_with_highest_median + 1,
|
||||||
|
result
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -826,10 +841,7 @@ async fn run_1v1_simulation(
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
"Executing the following command {} {} {} {}",
|
"Executing the following command {} {} {} {}",
|
||||||
GAME_EXECUTABLE_PATH,
|
GAME_EXECUTABLE_PATH, config1_arg, config2_arg, disable_unreal_rendering_arg
|
||||||
config1_arg,
|
|
||||||
config2_arg,
|
|
||||||
disable_unreal_rendering_arg
|
|
||||||
);
|
);
|
||||||
|
|
||||||
trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id);
|
trace!("Running simulation for {} vs {}", nn_1_id, nn_2_id);
|
||||||
|
@ -906,8 +918,7 @@ async fn read_score_from_file(file_path: &Path, nn_id: &str) -> Result<f32, io::
|
||||||
"NN ID not found in scores file",
|
"NN ID not found in scores file",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
Err(_) =>
|
Err(_) => {
|
||||||
{
|
|
||||||
if attempts >= 2 {
|
if attempts >= 2 {
|
||||||
// Attempt 5 times before giving up.
|
// Attempt 5 times before giving up.
|
||||||
return Ok(-100.0);
|
return Ok(-100.0);
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
use std::{cmp::min, cmp::Ordering, collections::HashMap, ops::Range};
|
use std::{cmp::Ordering, cmp::min, collections::HashMap, ops::Range};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use fann::{ActivationFunc, Fann};
|
use fann::{ActivationFunc, Fann};
|
||||||
use gemla::error::Error;
|
use gemla::error::Error;
|
||||||
use rand::{
|
use rand::{
|
||||||
|
Rng,
|
||||||
distr::{Distribution, Uniform},
|
distr::{Distribution, Uniform},
|
||||||
|
rng,
|
||||||
seq::IteratorRandom,
|
seq::IteratorRandom,
|
||||||
rng, Rng,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -208,13 +209,37 @@ pub fn consolidate_old_connections(
|
||||||
to_non_bias_network_id(connection.from_neuron, &primary_shape);
|
to_non_bias_network_id(connection.from_neuron, &primary_shape);
|
||||||
let original_to_neuron =
|
let original_to_neuron =
|
||||||
to_non_bias_network_id(connection.to_neuron, &primary_shape);
|
to_non_bias_network_id(connection.to_neuron, &primary_shape);
|
||||||
trace!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id);
|
trace!(
|
||||||
|
"Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]",
|
||||||
|
previous_new_id,
|
||||||
|
new_id,
|
||||||
|
original_from_neuron,
|
||||||
|
original_to_neuron,
|
||||||
|
connection.weight,
|
||||||
|
found_in_primary,
|
||||||
|
connection.from_neuron,
|
||||||
|
connection.to_neuron,
|
||||||
|
previous_neuron_id,
|
||||||
|
neuron_id
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
let original_from_neuron =
|
let original_from_neuron =
|
||||||
to_non_bias_network_id(connection.from_neuron, &secondary_shape);
|
to_non_bias_network_id(connection.from_neuron, &secondary_shape);
|
||||||
let original_to_neuron =
|
let original_to_neuron =
|
||||||
to_non_bias_network_id(connection.to_neuron, &secondary_shape);
|
to_non_bias_network_id(connection.to_neuron, &secondary_shape);
|
||||||
trace!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", previous_new_id, new_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, previous_neuron_id, neuron_id);
|
trace!(
|
||||||
|
"Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]",
|
||||||
|
previous_new_id,
|
||||||
|
new_id,
|
||||||
|
original_from_neuron,
|
||||||
|
original_to_neuron,
|
||||||
|
connection.weight,
|
||||||
|
found_in_primary,
|
||||||
|
connection.from_neuron,
|
||||||
|
connection.to_neuron,
|
||||||
|
previous_neuron_id,
|
||||||
|
neuron_id
|
||||||
|
);
|
||||||
}
|
}
|
||||||
let translated_from = to_bias_network_id(previous_new_id, &new_shape);
|
let translated_from = to_bias_network_id(previous_new_id, &new_shape);
|
||||||
let translated_to = to_bias_network_id(new_id, &new_shape);
|
let translated_to = to_bias_network_id(new_id, &new_shape);
|
||||||
|
@ -222,10 +247,7 @@ pub fn consolidate_old_connections(
|
||||||
} else {
|
} else {
|
||||||
trace!(
|
trace!(
|
||||||
"Connection not found for ({}, {}) -> ({}, {})",
|
"Connection not found for ({}, {}) -> ({}, {})",
|
||||||
previous_new_id,
|
previous_new_id, new_id, previous_neuron_id, neuron_id
|
||||||
new_id,
|
|
||||||
previous_neuron_id,
|
|
||||||
neuron_id
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -317,23 +339,43 @@ pub fn consolidate_old_connections(
|
||||||
to_non_bias_network_id(connection.from_neuron, &primary_shape);
|
to_non_bias_network_id(connection.from_neuron, &primary_shape);
|
||||||
let original_to_neuron =
|
let original_to_neuron =
|
||||||
to_non_bias_network_id(connection.to_neuron, &primary_shape);
|
to_non_bias_network_id(connection.to_neuron, &primary_shape);
|
||||||
trace!("Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id);
|
trace!(
|
||||||
|
"Primary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]",
|
||||||
|
bias_neuron,
|
||||||
|
translated_neuron_id,
|
||||||
|
original_from_neuron,
|
||||||
|
original_to_neuron,
|
||||||
|
connection.weight,
|
||||||
|
found_in_primary,
|
||||||
|
connection.from_neuron,
|
||||||
|
connection.to_neuron,
|
||||||
|
bias_neuron,
|
||||||
|
neuron_id
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
let original_from_neuron =
|
let original_from_neuron =
|
||||||
to_non_bias_network_id(connection.from_neuron, &secondary_shape);
|
to_non_bias_network_id(connection.from_neuron, &secondary_shape);
|
||||||
let original_to_neuron =
|
let original_to_neuron =
|
||||||
to_non_bias_network_id(connection.to_neuron, &secondary_shape);
|
to_non_bias_network_id(connection.to_neuron, &secondary_shape);
|
||||||
trace!("Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]", bias_neuron, translated_neuron_id, original_from_neuron, original_to_neuron, connection.weight, found_in_primary, connection.from_neuron, connection.to_neuron, bias_neuron, neuron_id);
|
trace!(
|
||||||
|
"Secondary: Adding connection from ({} -> {}) translated to ({:?} -> {:?}) with weight {} for primary:{} [{} -> {}] [{} -> {}]",
|
||||||
|
bias_neuron,
|
||||||
|
translated_neuron_id,
|
||||||
|
original_from_neuron,
|
||||||
|
original_to_neuron,
|
||||||
|
connection.weight,
|
||||||
|
found_in_primary,
|
||||||
|
connection.from_neuron,
|
||||||
|
connection.to_neuron,
|
||||||
|
bias_neuron,
|
||||||
|
neuron_id
|
||||||
|
);
|
||||||
}
|
}
|
||||||
new_fann.set_weight(bias_neuron, translated_neuron_id, connection.weight);
|
new_fann.set_weight(bias_neuron, translated_neuron_id, connection.weight);
|
||||||
} else {
|
} else {
|
||||||
trace!(
|
trace!(
|
||||||
"Connection not found for bias ({}, {}) -> ({}, {}) primary: {}",
|
"Connection not found for bias ({}, {}) -> ({}, {}) primary: {}",
|
||||||
bias_neuron,
|
bias_neuron, neuron_id, bias_neuron, translated_neuron_id, is_primary
|
||||||
neuron_id,
|
|
||||||
bias_neuron,
|
|
||||||
translated_neuron_id,
|
|
||||||
is_primary
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -388,8 +430,7 @@ pub fn crossbreed_neuron_arrays(
|
||||||
if neuron_id >= &segment.0 && neuron_id <= &segment.1 {
|
if neuron_id >= &segment.0 && neuron_id <= &segment.1 {
|
||||||
// We need to do something different depending on whether the neuron layer is, lower, higher or equal to the target layer
|
// We need to do something different depending on whether the neuron layer is, lower, higher or equal to the target layer
|
||||||
|
|
||||||
match layer.cmp(¤t_layer)
|
match layer.cmp(¤t_layer) {
|
||||||
{
|
|
||||||
Ordering::Equal => {
|
Ordering::Equal => {
|
||||||
new_neurons.push((*neuron_id, is_primary, current_layer, 0));
|
new_neurons.push((*neuron_id, is_primary, current_layer, 0));
|
||||||
|
|
||||||
|
@ -455,7 +496,12 @@ pub fn crossbreed_neuron_arrays(
|
||||||
.filter(|(id, l)| id > &highest_id.0 && *l == layer - 1)
|
.filter(|(id, l)| id > &highest_id.0 && *l == layer - 1)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
for (neuron_id, _) in neurons_to_add {
|
for (neuron_id, _) in neurons_to_add {
|
||||||
new_neurons.push((*neuron_id, is_primary, current_layer, 0));
|
new_neurons.push((
|
||||||
|
*neuron_id,
|
||||||
|
is_primary,
|
||||||
|
current_layer,
|
||||||
|
0,
|
||||||
|
));
|
||||||
|
|
||||||
if is_primary {
|
if is_primary {
|
||||||
primary_last_layer = current_layer;
|
primary_last_layer = current_layer;
|
||||||
|
|
|
@ -100,19 +100,5 @@ def visualize_fann_network(network_file):
|
||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
# Path to the FANN network file
|
# Path to the FANN network file
|
||||||
fann_path = 'F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_4f2be613-ab26-4384-9a65-450e043984ea\\6\\4f2be613-ab26-4384-9a65-450e043984ea_fighter_nn_0.net'
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_fc294503-7b2a-40f8-be59-ccc486eb3f79\\0\\fc294503-7b2a-40f8-be59-ccc486eb3f79_fighter_nn_0.net"
|
|
||||||
# fann_path = 'F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_99c30a7f-40ab-4faf-b16a-b44703fdb6cd\\0\\99c30a7f-40ab-4faf-b16a-b44703fdb6cd_fighter_nn_0.net'
|
|
||||||
# Has a 4 layer network
|
|
||||||
# # Generation 1
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98\\1\\16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98_fighter_nn_0.net"
|
|
||||||
# # Generation 5
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98\\5\\16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98_fighter_nn_0.net"
|
|
||||||
# # Generation 10
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98\\10\\16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98_fighter_nn_0.net"
|
|
||||||
# # Generation 20
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98\\20\\16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98_fighter_nn_0.net"
|
|
||||||
# # Generation 32
|
|
||||||
# fann_path = "F:\\\\vandomej\\Projects\\dootcamp-AI-Simulation\\Simulations\\fighter_nn_16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98\\32\\16dfa1b4-03c7-45a6-84b4-22fe3c8e2d98_fighter_nn_0.net"
|
|
||||||
fann_path = select_file()
|
fann_path = select_file()
|
||||||
visualize_fann_network(fann_path)
|
visualize_fann_network(fann_path)
|
Loading…
Add table
Reference in a new issue