Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions act/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,9 @@ def save_mse_corr(


def print_run_stats(config: SimulationConfig):
output_folder = utils.get_output_folder_name(config)
model_data_dir = utils.get_last_model_data_folder_name(config)

target_params = config["optimization_parameters"].get("target_params")
if(config["run_mode"] == "segregated"):
segregation_index = utils.get_segregation_index(config)
segregation_dir = f"seg_module_{segregation_index+1}/"
model_data_dir = os.path.join(output_folder, segregation_dir)
else:
model_data_dir = output_folder + "model_data/"
pred_passive_json_path = model_data_dir + "pred_passive_properties.json"
metrics = pd.read_csv(model_data_dir + "metrics.csv")
preds_df = pd.read_csv(model_data_dir + "pred.csv", index_col=0)
Expand All @@ -169,7 +164,7 @@ def print_run_stats(config: SimulationConfig):
]

preds = np.array(preds_df)
print(output_folder)
print(model_data_dir)
print(f"Med MSE: {metrics['mse'].median():.4f} ({metrics['mse'].std():.4f})")
print(f"Med Corr: {metrics['corr'].median():.4f} ({metrics['corr'].std():.4f})")
print()
Expand Down
33 changes: 16 additions & 17 deletions act/simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ def _run_generate_target_traces(config: SimulationConfig, ignore_segregation=Fal


def _run(config: SimulationConfig):

if config["optimization_parameters"]["num_epochs"] < 1:
raise ValueError("Number of epochs is expected to be >= 1.")

output_folder = utils.get_output_folder_name(config) + "_temp_/"
if not os.path.exists(output_folder):
os.mkdir(output_folder)

output_folder = utils.create_model_data_folder(config)
segregation_index = utils.get_segregation_index(config)

# if there is a target_cell specified then use it too
os.mkdir(temp_modfiles_dir)
Expand All @@ -88,7 +88,6 @@ def _run(config: SimulationConfig):
os.system(f"nrnivmodl {temp_modfiles_dir}")
ltohto = False
logger = ACTLogger()
segregation_index = utils.get_segregation_index(config) # if needed
if config["run_mode"] == "segregated" and config["segregation"][
segregation_index
].get("use_lto_amps", False):
Expand Down Expand Up @@ -374,20 +373,20 @@ def _run(config: SimulationConfig):
learned_params = {param: predict for param, predict in zip(params, predictions)}
if config["run_mode"] == "segregated":
# save a copy of the outputs for future development
base_output_folder = utils.get_output_folder_name(config)
run_output_folder_name = f"{config['run_mode']}"
seg_folder = os.path.join(
base_output_folder, f"seg_module_{segregation_index+1}"
)
shutil.move(output_folder, seg_folder)
#base_output_folder = utils.get_output_folder_name(config)
#run_output_folder_name = f"{config['run_mode']}"
#seg_folder = os.path.join(
#base_output_folder, f"{random_seed}-seed_module_{segregation_index+1}"
#)
#shutil.move(output_folder, seg_folder)
utils.update_segregation(config, learned_params)
else:
base_output_folder = utils.get_output_folder_name(config)
orig_folder = os.path.join(
base_output_folder, "model_data"
)
shutil.move(output_folder, orig_folder)
utils.save_learned_params(learned_params)
#base_output_folder = utils.get_output_folder_name(config)
#orig_folder = os.path.join(
#base_output_folder, f"{random_seed}-seed_model_data"
#)
#shutil.move(output_folder, orig_folder)
utils.save_learned_params(learned_params, config)


def run_generate_target_traces(
Expand Down
103 changes: 84 additions & 19 deletions act/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
pc = h.ParallelContext() # object to access MPI methods
MPI_RANK = int(pc.id())


def create_output_folder(config: SimulationConfig, overwrite=True) -> str:
if(config["output"]["auto_structure"] == True):
print("AUTO STRUCTURED")
Expand All @@ -45,14 +44,81 @@ def create_output_folder(config: SimulationConfig, overwrite=True) -> str:
def get_output_folder_name(config: SimulationConfig) -> str:
cell_name = config["cell"]["name"]
num_slices = f"{config['optimization_parameters']['parametric_distribution']['n_slices']}"
random_seed = f"{config['optimization_parameters']['random_seed']}"
run_mode = f"{config['run_mode']}" #"segregated" "origin
if(run_mode == "segregated"):
run_mode_name = "seg"
else:
run_mode_name = "orig"

return f"./output/{cell_name}_{run_mode_name}_{num_slices}-slice_{random_seed}-seed/"
return f"./output/{cell_name}_{run_mode_name}_{num_slices}-slice/"

def get_sim_data_folder_name(config: SimulationConfig) -> str:
segregation_index = get_segregation_index(config)
if config["run_mode"] == "segregated":
sim_dir = get_output_folder_name(config) + "sim_data/" + f"module_{segregation_index+1}/"
else:
sim_dir = get_output_folder_name(config) + "sim_data/"
return sim_dir

def get_param_values_file(config: SimulationConfig) -> str:
random_seed = f"{config['optimization_parameters']['random_seed']}"
return get_output_folder_name(config) + "sim_data/" + f"parameter_values_{random_seed}-seed.json"

def get_sim_output_folder_name(config: SimulationConfig) -> str:
return get_sim_data_folder_name(config) + "output/"

def create_model_data_folder(config: SimulationConfig) -> str:
segregation_index = get_segregation_index(config)
random_seed = f"{config['optimization_parameters']['random_seed']}"
if config["run_mode"] == "segregated":

output_folder = get_output_folder_name(config) + "model_data/"

if not os.path.exists(output_folder):
os.mkdir(output_folder)

output_folder = output_folder + f"{random_seed}-seed/"

if not os.path.exists(output_folder):
os.mkdir(output_folder)

output_folder = output_folder + f"module_{segregation_index+1}/"

if not os.path.exists(output_folder):
os.mkdir(output_folder)
else:
output_folder = get_output_folder_name(config) + "model_data/"

if not os.path.exists(output_folder):
os.mkdir(output_folder)

output_folder = output_folder + f"{random_seed}-seed/"

if not os.path.exists(output_folder):
os.mkdir(output_folder)

return output_folder

def get_model_data_folder_name(config: SimulationConfig) -> str:
output_dir = get_output_folder_name(config)
random_seed = f"{config['optimization_parameters']['random_seed']}"
segregation_index = get_segregation_index(config)
if(config["run_mode"] == "segregated"):
model_data_dir = output_dir + "model_data/"+ f"{random_seed}-seed/" + f"module_{segregation_index+1}/"
else:
model_data_dir = output_dir + "model_data/"+ f"{random_seed}-seed/"
return model_data_dir

def get_last_model_data_folder_name(config: SimulationConfig) -> str:
# This is used for the plotting scripts because the module # was incremented, so we need the "last run"
output_dir = get_output_folder_name(config)
random_seed = f"{config['optimization_parameters']['random_seed']}"
segregation_index = get_segregation_index(config)
if(config["run_mode"] == "segregated"):
model_data_dir = output_dir + "model_data/"+ f"{random_seed}-seed/" + f"module_{segregation_index}/"
else:
model_data_dir = output_dir + "model_data/"+ f"{random_seed}-seed/"
return model_data_dir

def set_cell_parameters(cell, parameter_list: list, parameter_values: list) -> None:
for sec in cell.all:
Expand Down Expand Up @@ -198,8 +264,7 @@ def cleanup_simulation():


def get_segregation_index(config: SimulationConfig):
output_dir = get_output_folder_name(config)
parameter_values_file = output_dir + "sim_data" + "parameter_values.json"
parameter_values_file = get_param_values_file(config)

if config["run_mode"] != "segregated":
return -1
Expand All @@ -215,7 +280,7 @@ def get_segregation_index(config: SimulationConfig):
def load_preset_params(config: SimulationConfig):
# Returns a dict of learned params from segregation
# if segregation is not used then returns an empty dict
parameter_values_file = "parameter_values.json"
parameter_values_file = get_param_values_file(config)

if config["run_mode"] != "segregated":
return {}
Expand All @@ -239,7 +304,7 @@ def load_preset_params(config: SimulationConfig):
def load_learned_params(config: SimulationConfig):
# Returns a dict of learned params from segregation
# if segregation is not used then returns an empty dict
parameter_values_file = "parameter_values.json"
parameter_values_file = get_param_values_file(config)

if not os.path.exists(parameter_values_file):
return {}
Expand All @@ -250,7 +315,7 @@ def load_learned_params(config: SimulationConfig):


def get_learned_variability(config: SimulationConfig):
parameter_values_file = "parameter_values.json"
parameter_values_file = get_param_values_file(config)
lv = 0
if os.path.exists(parameter_values_file):
with open(parameter_values_file, "r") as fp:
Expand All @@ -261,7 +326,7 @@ def get_learned_variability(config: SimulationConfig):


def get_learned_variability_params(config: SimulationConfig):
parameter_values_file = "parameter_values.json"
parameter_values_file = get_param_values_file(config)
lvp = []
if os.path.exists(parameter_values_file):
with open(parameter_values_file, "r") as fp:
Expand All @@ -278,8 +343,8 @@ def update_segregation(config: SimulationConfig, learned_params):
# And updates the parameter_values.json if that parameter was
# in the current segregation index
# learned_params = {'channel'(str):value(float),}
output_dir = get_output_folder_name(config) + "sim_data/"
parameter_values_file = output_dir + "parameter_values.json"
output_dir = get_sim_data_folder_name(config)
parameter_values_file = get_param_values_file(config)
if os.path.exists(parameter_values_file):
print(f"Updating {parameter_values_file} for learned parameters")
with open(parameter_values_file, "r") as fp:
Expand Down Expand Up @@ -346,8 +411,8 @@ def update_segregation(config: SimulationConfig, learned_params):
)


def save_learned_params(learned_params):
parameter_values_file = "parameter_values.json"
def save_learned_params(learned_params, config: SimulationConfig):
parameter_values_file = get_param_values_file(config)
if os.path.exists(parameter_values_file):
print(f"Updating {parameter_values_file} for learned parameters")
with open(parameter_values_file, "r") as fp:
Expand All @@ -360,9 +425,9 @@ def save_learned_params(learned_params):


def build_parametric_network(config: SimulationConfig):
output_dir = get_output_folder_name(config) + "sim_data/"
output_dir = get_sim_data_folder_name(config)
config_file = output_dir + "simulation_act_simulation_config.json"
parameter_values_file = output_dir + "parameter_values.json"
parameter_values_file = get_param_values_file(config)

params = [p["channel"] for p in config["optimization_parameters"]["params"]]

Expand Down Expand Up @@ -615,10 +680,10 @@ def generate_parametric_traces(config: SimulationConfig):
traces for a large collection of cells and generates an h5
file for injestion later.
"""
output_dir = get_output_folder_name(config) + "sim_data/"
output_dir = get_sim_data_folder_name(config)
passive_properties = config.get("cell", {}).get("passive_properties", None)
config_file = output_dir + "simulation_act_simulation_config.json"
parameter_values_file = output_dir + "parameter_values.json"
parameter_values_file = get_param_values_file(config)
with open(parameter_values_file) as f:
param_dict = json.load(f)
params = param_dict["parameters"]
Expand Down Expand Up @@ -673,8 +738,8 @@ def load_parametric_traces(config: SimulationConfig, drop_ramp=False):
"""
Return a torch tensor of all traces in the specified h5 file
"""
output_dir = get_output_folder_name(config) + "sim_data/"
parameter_values_file = output_dir + "parameter_values.json"
output_dir = get_sim_data_folder_name(config)
parameter_values_file = get_param_values_file(config)
traces_file = output_dir + "output/v_report.h5"

if not os.path.exists(parameter_values_file) or not os.path.exists(traces_file):
Expand Down
8 changes: 5 additions & 3 deletions simulation/analyze_res.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@
from simulation_configs import selected_config

from act import analysis
import meta_sweep


def main():
def main(config):
analysis.print_run_stats(selected_config)


if __name__ == "__main__":
main()
if '--sweep' in sys.argv:
selected_config = meta_sweep.get_meta_params_for_sweep()
main(selected_config)
18 changes: 18 additions & 0 deletions simulation/batch_generate_traces_sweep.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/bin/bash

#SBATCH -N 1
#SBATCH -n 48
#SBATCH -W
#SBATCH --qos=normal
#SBATCH --job-name=act
#SBATCH --output=output/bmtk_sim.out
#SBATCH --time 0-12:00

START=$(date)
mpiexec nrniv -mpi -python generate_traces.py --sweep
#mpiexec ./components_homogenous/mechanisms/x86_64/special -mpi run_network.py simulation_configECP_base_homogenous.json
END=$(date)

printf "Start: $START \nEnd: $END\n"

echo "Done running model at $(date)"
48 changes: 35 additions & 13 deletions simulation/generate_arma_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,42 @@
from act import utils
from simulation_configs import selected_config
import warnings
import os.path
import meta_sweep


warnings.filterwarnings("ignore")

if __name__ == "__main__":
traces, params, amps = utils.load_parametric_traces(selected_config)
segregation_index = utils.get_segregation_index(selected_config)

arima_order = (10, 0, 10)
if selected_config.get("summary_features", {}).get("arima_order"):
arima_order = tuple(selected_config["summary_features"]["arima_order"])
if selected_config["run_mode"] == "segregated" and selected_config["segregation"][segregation_index].get("arima_order",None):
print(f"custom arima order for segregation set")
arima_order = tuple(selected_config["segregation"][segregation_index]["arima_order"])
print(f"ARIMA order set to {arima_order}")

output_dir = utils.get_output_folder_name(selected_config) + "sim_data/output/arima_stats.json"
utils.arima_coefs_proc_map(traces, output_file=output_dir, arima_order=arima_order)
if '--sweep' in sys.argv:
selected_config = meta_sweep.get_meta_params_for_sweep()

output_dir = utils.get_sim_output_folder_name(selected_config)
arma_stats_file = output_dir + "arima_stats.json"
arma_stats_exists = os.path.exists(arma_stats_file)
generate_arma = selected_config["optimization_parameters"]["generate_arma"]

if (arma_stats_exists):
print("--------------------------------------------------------------------")
print(f"ARMA STATS ALREADY GENERATED - Using stats from: {arma_stats_file}")
print("--------------------------------------------------------------------")
elif (not generate_arma):
print("-------------------------------------------------")
print("ARMA STATS TURNED OFF IN SIMULATION CONFIGURATION")
print("-------------------------------------------------")
else:
traces, params, amps = utils.load_parametric_traces(selected_config)
segregation_index = utils.get_segregation_index(selected_config)

arima_order = (10, 0, 10)
if selected_config.get("summary_features", {}).get("arima_order"):
arima_order = tuple(selected_config["summary_features"]["arima_order"])
if selected_config["run_mode"] == "segregated" and selected_config["segregation"][segregation_index].get("arima_order",None):
print(f"custom arima order for segregation set")
arima_order = tuple(selected_config["segregation"][segregation_index]["arima_order"])
print(f"ARIMA order set to {arima_order}")

print(output_dir)

utils.arima_coefs_proc_map(traces, output_file=arma_stats_file, arima_order=arima_order)

Loading