result images
@ -20,10 +20,11 @@ def main():
|
|||||||
# data_mean_freq_step_stimulus_examples()
|
# data_mean_freq_step_stimulus_examples()
|
||||||
# data_mean_freq_step_stimulus_with_detections()
|
# data_mean_freq_step_stimulus_with_detections()
|
||||||
# data_fi_curve()
|
# data_fi_curve()
|
||||||
# p_unit_example()
|
|
||||||
# fi_point_detection()
|
|
||||||
|
|
||||||
|
p_unit_example()
|
||||||
|
fi_point_detection()
|
||||||
p_unit_heterogeneity()
|
p_unit_heterogeneity()
|
||||||
|
|
||||||
# test_fi_curve_colors()
|
# test_fi_curve_colors()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -227,7 +228,7 @@ def fi_point_detection():
|
|||||||
f_baseline = fi.get_f_baseline_frequencies()[-1]
|
f_baseline = fi.get_f_baseline_frequencies()[-1]
|
||||||
f_base_idx = fi.indices_f_baseline[-1]
|
f_base_idx = fi.indices_f_baseline[-1]
|
||||||
|
|
||||||
axes[0].plot(f_trace_times[-1][:idx], f_traces[-1][:idx])
|
axes[0].plot(f_trace_times[-1][:idx], f_traces[-1][:idx], color=consts.COLOR_DATA)
|
||||||
axes[0].plot([f_trace_times[-1][idx] for idx in f_zero_idx], (f_zero, ), ",", marker=consts.f0_marker, color=consts.COLOR_DATA_f0)
|
axes[0].plot([f_trace_times[-1][idx] for idx in f_zero_idx], (f_zero, ), ",", marker=consts.f0_marker, color=consts.COLOR_DATA_f0)
|
||||||
axes[0].plot([f_trace_times[-1][idx] for idx in f_inf_idx], (f_inf, f_inf), color=consts.COLOR_DATA_finf, linewidth=4)
|
axes[0].plot([f_trace_times[-1][idx] for idx in f_inf_idx], (f_inf, f_inf), color=consts.COLOR_DATA_finf, linewidth=4)
|
||||||
axes[0].plot([f_trace_times[-1][idx] for idx in f_base_idx], (f_baseline, f_baseline), color="grey", linewidth=4)
|
axes[0].plot([f_trace_times[-1][idx] for idx in f_base_idx], (f_baseline, f_baseline), color="grey", linewidth=4)
|
||||||
|
@ -1,23 +1,135 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from analysis import get_fit_info, get_behaviour_values, calculate_percent_errors
|
import matplotlib.gridspec as gridspec
|
||||||
|
from analysis import get_fit_info, get_behaviour_values, get_parameter_values, behaviour_correlations, parameter_correlations
|
||||||
from ModelFit import get_best_fit
|
from ModelFit import get_best_fit
|
||||||
from Baseline import BaselineModel, BaselineCellData
|
from Baseline import BaselineModel, BaselineCellData
|
||||||
import Figure_constants as consts
|
import Figure_constants as consts
|
||||||
|
|
||||||
|
|
||||||
|
parameter_titles = {"input_scaling": r"$\alpha$", "delta_a": r"$\Delta_A$",
|
||||||
|
"mem_tau": r"$\tau_m$", "noise_strength": r"$\sqrt{2D}$",
|
||||||
|
"refractory_period": "$t_{ref}$", "tau_a": r"$\tau_A$",
|
||||||
|
"v_offset": r"$I_{Bias}$", "dend_tau": r"$\tau_{dend}$"}
|
||||||
|
|
||||||
|
behaviour_titles = {"baseline_frequency": "base rate", "Burstiness": "Burst", "coefficient_of_variation": "CV",
|
||||||
|
"serial_correlation": "SC", "vector_strength": "VS",
|
||||||
|
"f_inf_slope": r"$f_{\infty}$ Slope", "f_zero_slope": r"$f_0$ Slope",
|
||||||
|
"f_zero_middle": r"$f_0$ middle"}
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
dir_path = "results/final_2/"
|
dir_path = "results/final_2/"
|
||||||
fits_info = get_fit_info(dir_path)
|
fits_info = get_fit_info(dir_path)
|
||||||
|
|
||||||
# cell_behaviour, model_behaviour = get_behaviour_values(fits_info)
|
# cell_behaviour, model_behaviour = get_behaviour_values(fits_info)
|
||||||
# behaviour_overview_pairs(cell_behaviour, model_behaviour)
|
# plot_cell_model_comp_baseline(cell_behaviour, model_behaviour)
|
||||||
|
# plot_cell_model_comp_adaption(cell_behaviour, model_behaviour)
|
||||||
|
# plot_cell_model_comp_burstiness(cell_behaviour, model_behaviour)
|
||||||
|
#
|
||||||
|
behaviour_correlations_plot(fits_info)
|
||||||
|
#
|
||||||
|
# labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||||
|
# par_labels = [parameter_titles[l] for l in labels]
|
||||||
|
# fig, ax = plt.subplots(1, 1)
|
||||||
|
# create_correlation_plot(ax, par_labels, corr_values, corrected_p_values, "", colorbar=True)
|
||||||
|
# plt.savefig(consts.SAVE_FOLDER + "parameter_correlations.png")
|
||||||
|
# plt.close()
|
||||||
|
|
||||||
|
# create_parameter_distributions(get_parameter_values(fits_info))
|
||||||
|
|
||||||
|
|
||||||
# errors = calculate_percent_errors(fits_info)
|
# errors = calculate_percent_errors(fits_info)
|
||||||
# create_boxplots(errors)
|
# create_boxplots(errors)
|
||||||
|
|
||||||
example_good_hist_fits(dir_path)
|
# example_good_hist_fits(dir_path)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def create_parameter_distributions(par_values):
|
||||||
|
|
||||||
|
fig, axes = plt.subplots(4, 2)
|
||||||
|
|
||||||
|
if len(par_values.keys()) != 8:
|
||||||
|
print("not eight parameters")
|
||||||
|
|
||||||
|
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||||
|
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||||
|
axes_flat = axes.flatten()
|
||||||
|
for i, l in enumerate(labels):
|
||||||
|
min_v = min(par_values[l]) * 0.95
|
||||||
|
max_v = max(par_values[l]) * 1.05
|
||||||
|
step = (max_v - min_v) / 20
|
||||||
|
bins = np.arange(min_v, max_v+step, step)
|
||||||
|
axes_flat[i].hist(par_values[l], bins=bins, color=consts.COLOR_MODEL, alpha=0.75)
|
||||||
|
axes_flat[i].set_title(parameter_titles[l])
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.savefig(consts.SAVE_FOLDER + "parameter_distributions.png")
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
|
def behaviour_correlations_plot(fits_info):
|
||||||
|
|
||||||
|
fig = plt.figure(tight_layout=True, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||||
|
gs = gridspec.GridSpec(2, 2, width_ratios=(1, 1), height_ratios=(5, 1), hspace=0.025, wspace=0.05)
|
||||||
|
# fig, axes = plt.subplots(1, 2, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||||
|
|
||||||
|
keys, corr_values, corrected_p_values = behaviour_correlations(fits_info, model_values=False)
|
||||||
|
labels = [behaviour_titles[k] for k in keys]
|
||||||
|
img = create_correlation_plot(fig.add_subplot(gs[0, 0]), labels, corr_values, corrected_p_values, "Data")
|
||||||
|
|
||||||
|
keys, corr_values, corrected_p_values = behaviour_correlations(fits_info, model_values=True)
|
||||||
|
labels = [behaviour_titles[k] for k in keys]
|
||||||
|
img = create_correlation_plot(fig.add_subplot(gs[0, 1]), labels, corr_values, corrected_p_values, "Model", y_label=False)
|
||||||
|
|
||||||
|
ax_col = fig.add_subplot(gs[1, :])
|
||||||
|
data = [np.arange(-1, 1.001, 0.01)] * 10
|
||||||
|
ax_col.set_xticks([0, 25, 50, 75, 100, 125, 150, 175, 200])
|
||||||
|
ax_col.set_xticklabels([-1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1])
|
||||||
|
ax_col.set_yticks([])
|
||||||
|
ax_col.imshow(data)
|
||||||
|
ax_col.set_xlabel("Correlation Coefficients")
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.savefig(consts.SAVE_FOLDER + "behaviour_correlations.png")
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
|
def create_correlation_plot(ax, labels, correlations, p_values, title, y_label=True):
|
||||||
|
|
||||||
|
cleaned_cors = np.zeros(correlations.shape)
|
||||||
|
|
||||||
|
for i in range(correlations.shape[0]):
|
||||||
|
for j in range(correlations.shape[1]):
|
||||||
|
if abs(p_values[i, j]) < 0.05:
|
||||||
|
cleaned_cors[i, j] = correlations[i, j]
|
||||||
|
|
||||||
|
im = ax.imshow(cleaned_cors, vmin=-1, vmax=1)
|
||||||
|
|
||||||
|
# We want to show all ticks...
|
||||||
|
ax.set_xticks(np.arange(len(labels)))
|
||||||
|
ax.set_xticklabels(labels)
|
||||||
|
|
||||||
|
# ... and label them with the respective list entries
|
||||||
|
if y_label:
|
||||||
|
ax.set_yticks(np.arange(len(labels)))
|
||||||
|
ax.set_yticklabels(labels)
|
||||||
|
else:
|
||||||
|
ax.set_yticklabels([])
|
||||||
|
ax.set_title(title)
|
||||||
|
|
||||||
|
# Rotate the tick labels and set their alignment.
|
||||||
|
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
|
||||||
|
rotation_mode="anchor")
|
||||||
|
|
||||||
|
# Loop over data dimensions and create text annotations.
|
||||||
|
for i in range(len(labels)):
|
||||||
|
for j in range(len(labels)):
|
||||||
|
text = ax.text(j, i, "{:.2f}".format(correlations[i, j]),
|
||||||
|
ha="center", va="center", color="w")
|
||||||
|
|
||||||
|
return im
|
||||||
|
|
||||||
def example_good_hist_fits(dir_path):
|
def example_good_hist_fits(dir_path):
|
||||||
strong_bursty_cell = "2018-05-08-ac-invivo-1"
|
strong_bursty_cell = "2018-05-08-ac-invivo-1"
|
||||||
@ -66,98 +178,245 @@ def create_boxplots(errors):
|
|||||||
plt.close()
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
def behaviour_overview_pairs(cell_behaviour, model_behaviour):
|
|
||||||
# behaviour_keys = ["Burstiness", "coefficient_of_variation", "serial_correlation",
|
|
||||||
# "vector_strength", "f_inf_slope", "f_zero_slope", "baseline_frequency"]
|
|
||||||
|
|
||||||
pairs = [("baseline_frequency", "vector_strength", "serial_correlation"),
|
def plot_cell_model_comp_baseline(cell_behavior, model_behaviour):
|
||||||
("Burstiness", "coefficient_of_variation"),
|
fig = plt.figure(figsize=(12, 6))
|
||||||
("f_inf_slope", "f_zero_slope")]
|
# ("baseline_frequency", "vector_strength", "serial_correlation")
|
||||||
|
|
||||||
for pair in pairs:
|
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||||
cell = []
|
# the size of the marginal axes and the main axes in both directions.
|
||||||
model = []
|
# Also adjust the subplot parameters for a square plot.
|
||||||
for behaviour in pair:
|
gs = fig.add_gridspec(2, 3, width_ratios=[5, 5, 5], height_ratios=[3, 7],
|
||||||
cell.append(cell_behaviour[behaviour])
|
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||||
model.append(model_behaviour[behaviour])
|
wspace=0.25, hspace=0.05)
|
||||||
overview_pair(cell, model, pair)
|
num_of_bins = 20
|
||||||
|
# baseline freq plot:
|
||||||
|
i = 0
|
||||||
|
cell = cell_behavior["baseline_frequency"]
|
||||||
|
model = model_behaviour["baseline_frequency"]
|
||||||
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["baseline_frequency"], bins)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
cell = cell_behavior["vector_strength"]
|
||||||
|
model = model_behaviour["vector_strength"]
|
||||||
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["vector_strength"], bins)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
cell = cell_behavior["serial_correlation"]
|
||||||
|
model = model_behaviour["serial_correlation"]
|
||||||
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["serial_correlation"], bins)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.savefig(consts.SAVE_FOLDER + "fit_baseline_comparison.png", transparent=True)
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
def overview_pair(cell, model, titles):
|
def plot_cell_model_comp_adaption(cell_behavior, model_behaviour):
|
||||||
fig = plt.figure(figsize=(8, 6))
|
fig = plt.figure(figsize=(8, 6))
|
||||||
|
|
||||||
columns = len(cell)
|
# ("f_inf_slope", "f_zero_slope")
|
||||||
|
|
||||||
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||||
# the size of the marginal axes and the main axes in both directions.
|
# the size of the marginal axes and the main axes in both directions.
|
||||||
# Also adjust the subplot parameters for a square plot.
|
# Also adjust the subplot parameters for a square plot.
|
||||||
gs = fig.add_gridspec(2, columns, width_ratios=[5] * columns, height_ratios=[3, 7],
|
gs = fig.add_gridspec(2, 2, width_ratios=[5, 5], height_ratios=[3, 7],
|
||||||
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||||
wspace=0.2, hspace=0.05)
|
wspace=0.25, hspace=0.05)
|
||||||
|
num_of_bins = 20
|
||||||
for i in range(len(cell)):
|
# baseline freq plot:
|
||||||
if titles[i] == "f_zero_slope":
|
i = 0
|
||||||
length_before = len(cell[i])
|
cell = cell_behavior["f_inf_slope"]
|
||||||
idx = np.array(cell[i]) < 30000
|
model = model_behaviour["f_inf_slope"]
|
||||||
cell[i] = np.array(cell[i])[idx]
|
minimum = min(min(cell), min(model))
|
||||||
model[i] = np.array(model[i])[idx]
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
idx = np.array(model[i]) < 30000
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
cell[i] = np.array(cell[i])[idx]
|
|
||||||
model[i] = np.array(model[i])[idx]
|
ax = fig.add_subplot(gs[1, i])
|
||||||
print("removed {} values from f_zero_slope plot.".format(length_before - len(cell[i])))
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
ax = fig.add_subplot(gs[1, i])
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["f_inf_slope"], bins)
|
||||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
i += 1
|
||||||
scatter_hist(cell[i], model[i], ax, ax_histx, titles[i])
|
|
||||||
|
cell = cell_behavior["f_zero_slope"]
|
||||||
# plt.tight_layout()
|
model = model_behaviour["f_zero_slope"]
|
||||||
plt.show()
|
length_before = len(cell)
|
||||||
|
idx = np.array(cell) < 25000
|
||||||
|
cell = np.array(cell)[idx]
|
||||||
|
model = np.array(model)[idx]
|
||||||
|
|
||||||
|
idx = np.array(model) < 25000
|
||||||
|
cell = np.array(cell)[idx]
|
||||||
|
model = np.array(model)[idx]
|
||||||
|
print("removed {} values from f_zero_slope plot.".format(length_before - len(cell)))
|
||||||
|
|
||||||
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["f_zero_slope"], bins)
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.savefig(consts.SAVE_FOLDER + "fit_adaption_comparison.png", transparent=True)
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
def grouped_error_overview_behaviour_dist(cell_behaviours, model_behaviours):
|
def plot_cell_model_comp_burstiness(cell_behavior, model_behaviour):
|
||||||
# start with a square Figure
|
fig = plt.figure(figsize=(8, 6))
|
||||||
fig = plt.figure(figsize=(12, 12))
|
|
||||||
|
|
||||||
rows = 4
|
# ("Burstiness", "coefficient_of_variation")
|
||||||
columns = 2
|
|
||||||
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||||
# the size of the marginal axes and the main axes in both directions.
|
# the size of the marginal axes and the main axes in both directions.
|
||||||
# Also adjust the subplot parameters for a square plot.
|
# Also adjust the subplot parameters for a square plot.
|
||||||
gs = fig.add_gridspec(rows*2, columns, width_ratios=[5]*columns, height_ratios=[3, 7] * rows,
|
gs = fig.add_gridspec(2, 2, width_ratios=[5, 5], height_ratios=[3, 7],
|
||||||
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||||
wspace=0.2, hspace=0.5)
|
wspace=0.25, hspace=0.05)
|
||||||
|
num_of_bins = 20
|
||||||
for i, behaviour in enumerate(sorted(cell_behaviours.keys())):
|
# baseline freq plot:
|
||||||
col = int(np.floor(i / rows))
|
i = 0
|
||||||
row = i - rows*col
|
cell = cell_behavior["Burstiness"]
|
||||||
ax = fig.add_subplot(gs[row*2 + 1, col])
|
model = model_behaviour["Burstiness"]
|
||||||
ax_histx = fig.add_subplot(gs[row*2, col])
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
# use the previously defined function
|
step = (maximum - minimum) / num_of_bins
|
||||||
scatter_hist(cell_behaviours[behaviour], model_behaviours[behaviour], ax, ax_histx, behaviour)
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["Burstiness"], bins)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
cell = cell_behavior["coefficient_of_variation"]
|
||||||
|
model = model_behaviour["coefficient_of_variation"]
|
||||||
|
|
||||||
|
minimum = min(min(cell), min(model))
|
||||||
|
maximum = max(max(cell), max(model))
|
||||||
|
step = (maximum - minimum) / num_of_bins
|
||||||
|
bins = np.arange(minimum, maximum + step, step)
|
||||||
|
|
||||||
|
ax = fig.add_subplot(gs[1, i])
|
||||||
|
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["coefficient_of_variation"], bins)
|
||||||
|
|
||||||
plt.tight_layout()
|
plt.tight_layout()
|
||||||
plt.show()
|
plt.savefig(consts.SAVE_FOLDER + "fit_burstiness_comparison.png", transparent=True)
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
def scatter_hist(cell_values, model_values, ax, ax_histx, behaviour, ax_histy=None):
|
# def behaviour_overview_pairs(cell_behaviour, model_behaviour):
|
||||||
|
# # behaviour_keys = ["Burstiness", "coefficient_of_variation", "serial_correlation",
|
||||||
|
# # "vector_strength", "f_inf_slope", "f_zero_slope", "baseline_frequency"]
|
||||||
|
#
|
||||||
|
# pairs = [("baseline_frequency", "vector_strength", "serial_correlation"),
|
||||||
|
# ("Burstiness", "coefficient_of_variation"),
|
||||||
|
# ("f_inf_slope", "f_zero_slope")]
|
||||||
|
#
|
||||||
|
# for pair in pairs:
|
||||||
|
# cell = []
|
||||||
|
# model = []
|
||||||
|
# for behaviour in pair:
|
||||||
|
# cell.append(cell_behaviour[behaviour])
|
||||||
|
# model.append(model_behaviour[behaviour])
|
||||||
|
# overview_pair(cell, model, pair)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def overview_pair(cell, model, titles):
|
||||||
|
# fig = plt.figure(figsize=(8, 6))
|
||||||
|
#
|
||||||
|
# columns = len(cell)
|
||||||
|
#
|
||||||
|
# # Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||||
|
# # the size of the marginal axes and the main axes in both directions.
|
||||||
|
# # Also adjust the subplot parameters for a square plot.
|
||||||
|
# gs = fig.add_gridspec(2, columns, width_ratios=[5] * columns, height_ratios=[3, 7],
|
||||||
|
# left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||||
|
# wspace=0.2, hspace=0.05)
|
||||||
|
#
|
||||||
|
# for i in range(len(cell)):
|
||||||
|
# if titles[i] == "f_zero_slope":
|
||||||
|
# length_before = len(cell[i])
|
||||||
|
# idx = np.array(cell[i]) < 30000
|
||||||
|
# cell[i] = np.array(cell[i])[idx]
|
||||||
|
# model[i] = np.array(model[i])[idx]
|
||||||
|
#
|
||||||
|
# idx = np.array(model[i]) < 30000
|
||||||
|
# cell[i] = np.array(cell[i])[idx]
|
||||||
|
# model[i] = np.array(model[i])[idx]
|
||||||
|
# print("removed {} values from f_zero_slope plot.".format(length_before - len(cell[i])))
|
||||||
|
# ax = fig.add_subplot(gs[1, i])
|
||||||
|
# ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||||
|
# scatter_hist(cell[i], model[i], ax, ax_histx, titles[i])
|
||||||
|
#
|
||||||
|
# # plt.tight_layout()
|
||||||
|
# plt.show()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def grouped_error_overview_behaviour_dist(cell_behaviours, model_behaviours):
|
||||||
|
# # start with a square Figure
|
||||||
|
# fig = plt.figure(figsize=(12, 12))
|
||||||
|
#
|
||||||
|
# rows = 4
|
||||||
|
# columns = 2
|
||||||
|
# # Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||||
|
# # the size of the marginal axes and the main axes in both directions.
|
||||||
|
# # Also adjust the subplot parameters for a square plot.
|
||||||
|
# gs = fig.add_gridspec(rows*2, columns, width_ratios=[5]*columns, height_ratios=[3, 7] * rows,
|
||||||
|
# left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||||
|
# wspace=0.2, hspace=0.5)
|
||||||
|
#
|
||||||
|
# for i, behaviour in enumerate(sorted(cell_behaviours.keys())):
|
||||||
|
# col = int(np.floor(i / rows))
|
||||||
|
# row = i - rows*col
|
||||||
|
# ax = fig.add_subplot(gs[row*2 + 1, col])
|
||||||
|
# ax_histx = fig.add_subplot(gs[row*2, col])
|
||||||
|
#
|
||||||
|
# # use the previously defined function
|
||||||
|
# scatter_hist(cell_behaviours[behaviour], model_behaviours[behaviour], ax, ax_histx, behaviour)
|
||||||
|
#
|
||||||
|
# plt.tight_layout()
|
||||||
|
# plt.show()
|
||||||
|
#
|
||||||
|
|
||||||
|
def scatter_hist(cell_values, model_values, ax, ax_histx, behaviour, bins, ax_histy=None):
|
||||||
# copied from matplotlib
|
# copied from matplotlib
|
||||||
|
|
||||||
# no labels
|
# no labels
|
||||||
ax_histx.tick_params(axis="cell", labelbottom=False)
|
ax_histx.tick_params(axis="cell", labelbottom=False)
|
||||||
# ax_histy.tick_params(axis="model_values", labelleft=False)
|
# ax_histy.tick_params(axis="model_values", labelleft=False)
|
||||||
# the scatter plot:
|
# the scatter plot:
|
||||||
ax.scatter(cell_values, model_values)
|
|
||||||
|
|
||||||
minimum = min(min(cell_values), min(model_values))
|
minimum = min(min(cell_values), min(model_values))
|
||||||
maximum = max(max(cell_values), max(model_values))
|
maximum = max(max(cell_values), max(model_values))
|
||||||
ax.plot((minimum, maximum), (minimum, maximum), color="grey")
|
ax.plot((minimum, maximum), (minimum, maximum), color="grey")
|
||||||
|
ax.scatter(cell_values, model_values, color="black")
|
||||||
|
|
||||||
ax.set_xlabel("cell")
|
ax.set_xlabel("cell")
|
||||||
ax.set_ylabel("model")
|
ax.set_ylabel("model")
|
||||||
|
|
||||||
ax_histx.hist(cell_values, color="blue", alpha=0.5)
|
ax_histx.hist(cell_values, bins=bins, color=consts.COLOR_DATA, alpha=0.75)
|
||||||
ax_histx.hist(model_values, color="orange", alpha=0.5)
|
ax_histx.hist(model_values, bins=bins, color=consts.COLOR_MODEL, alpha=0.75)
|
||||||
ax_labels = ax.get_xticklabels()
|
ax_labels = ax.get_xticklabels()
|
||||||
ax_histx.set_xticklabels([])
|
ax_histx.set_xticklabels([])
|
||||||
ax.set_xticklabels(ax_labels)
|
ax.set_xticklabels(ax_labels)
|
||||||
|
66
analysis.py
@ -42,7 +42,7 @@ def main():
|
|||||||
# labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
# labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||||
# create_correlation_plot(labels, corr_values, corrected_p_values)
|
# create_correlation_plot(labels, corr_values, corrected_p_values)
|
||||||
|
|
||||||
# create_parameter_distributions(get_parameter_values(fits_info))
|
create_parameter_distributions(get_parameter_values(fits_info))
|
||||||
cell_b, model_b = get_behaviour_values(fits_info)
|
cell_b, model_b = get_behaviour_values(fits_info)
|
||||||
create_behaviour_distributions(cell_b, model_b)
|
create_behaviour_distributions(cell_b, model_b)
|
||||||
pass
|
pass
|
||||||
@ -115,7 +115,8 @@ def behaviour_correlations(fits_info, model_values=True):
|
|||||||
else:
|
else:
|
||||||
behaviour_values = bv_cell
|
behaviour_values = bv_cell
|
||||||
|
|
||||||
labels = sorted(behaviour_values.keys())
|
labels = ["baseline_frequency", "serial_correlation", "vector_strength", "coefficient_of_variation", "Burstiness",
|
||||||
|
"f_inf_slope", "f_zero_slope"]
|
||||||
corr_values = np.zeros((len(labels), len(labels)))
|
corr_values = np.zeros((len(labels), len(labels)))
|
||||||
p_values = np.ones((len(labels), len(labels)))
|
p_values = np.ones((len(labels), len(labels)))
|
||||||
|
|
||||||
@ -133,7 +134,8 @@ def behaviour_correlations(fits_info, model_values=True):
|
|||||||
def parameter_correlations(fits_info):
|
def parameter_correlations(fits_info):
|
||||||
parameter_values = get_parameter_values(fits_info)
|
parameter_values = get_parameter_values(fits_info)
|
||||||
|
|
||||||
labels = sorted(parameter_values.keys())
|
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||||
|
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||||
corr_values = np.zeros((len(labels), len(labels)))
|
corr_values = np.zeros((len(labels), len(labels)))
|
||||||
p_values = np.ones((len(labels), len(labels)))
|
p_values = np.ones((len(labels), len(labels)))
|
||||||
|
|
||||||
@ -148,64 +150,6 @@ def parameter_correlations(fits_info):
|
|||||||
return labels, corr_values, corrected_p_values
|
return labels, corr_values, corrected_p_values
|
||||||
|
|
||||||
|
|
||||||
def create_correlation_plot(labels, correlations, p_values):
|
|
||||||
|
|
||||||
cleaned_cors = np.zeros(correlations.shape)
|
|
||||||
|
|
||||||
for i in range(correlations.shape[0]):
|
|
||||||
for j in range(correlations.shape[1]):
|
|
||||||
if abs(p_values[i, j]) < 0.05:
|
|
||||||
cleaned_cors[i, j] = correlations[i, j]
|
|
||||||
|
|
||||||
fig, ax = plt.subplots()
|
|
||||||
im = ax.imshow(cleaned_cors, vmin=-1, vmax=1)
|
|
||||||
|
|
||||||
cbar = ax.figure.colorbar(im, ax=ax)
|
|
||||||
cbar.ax.set_ylabel("Correlation coefficient", rotation=-90, va="bottom")
|
|
||||||
|
|
||||||
# We want to show all ticks...
|
|
||||||
ax.set_xticks(np.arange(len(labels)))
|
|
||||||
ax.set_yticks(np.arange(len(labels)))
|
|
||||||
# ... and label them with the respective list entries
|
|
||||||
ax.set_xticklabels(labels)
|
|
||||||
ax.set_yticklabels(labels)
|
|
||||||
|
|
||||||
# Rotate the tick labels and set their alignment.
|
|
||||||
plt.setp(ax.get_xticklabels(), rotation=45, ha="right",
|
|
||||||
rotation_mode="anchor")
|
|
||||||
|
|
||||||
# Loop over data dimensions and create text annotations.
|
|
||||||
for i in range(len(labels)):
|
|
||||||
for j in range(len(labels)):
|
|
||||||
text = ax.text(j, i, "{:.2f}".format(correlations[i, j]),
|
|
||||||
ha="center", va="center", color="w")
|
|
||||||
|
|
||||||
fig.tight_layout()
|
|
||||||
plt.show()
|
|
||||||
|
|
||||||
|
|
||||||
def create_parameter_distributions(par_values):
|
|
||||||
|
|
||||||
fig, axes = plt.subplots(4, 2)
|
|
||||||
|
|
||||||
if len(par_values.keys()) != 8:
|
|
||||||
print("not eight parameters")
|
|
||||||
|
|
||||||
labels = sorted(par_values.keys())
|
|
||||||
axes_flat = axes.flatten()
|
|
||||||
for i, l in enumerate(labels):
|
|
||||||
min_v = min(par_values[l]) * 0.95
|
|
||||||
max_v = max(par_values[l]) * 1.05
|
|
||||||
step = (max_v - min_v) / 15
|
|
||||||
bins = np.arange(min_v, max_v+step, step)
|
|
||||||
axes_flat[i].hist(par_values[l], bins=bins)
|
|
||||||
axes_flat[i].set_title(l)
|
|
||||||
|
|
||||||
plt.tight_layout()
|
|
||||||
plt.show()
|
|
||||||
plt.close()
|
|
||||||
|
|
||||||
|
|
||||||
def create_behaviour_distributions(cell_b_values, model_b_values):
|
def create_behaviour_distributions(cell_b_values, model_b_values):
|
||||||
fig, axes = plt.subplots(4, 2)
|
fig, axes = plt.subplots(4, 2)
|
||||||
|
|
||||||
|
@ -161,13 +161,13 @@ When the fish's EOD is unperturbed P units fire every few EOD periods but they h
|
|||||||
|
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[H]
|
||||||
{\caption{\label{fig:heterogeneity_isi_hist} \textbf{A--C} 100\,ms of cell membrane voltage and \textbf{D--F} interspike interval histograms, each for three different cells. Showing the variability between cells of spiking behavior of P units in baseline conditions. \textbf{A} and \textbf{D}: A non bursting cell with a baseline firing rate of 133\,Hz (EODf: 806\,Hz), \textbf{B} and \textbf{E}: A cell with some bursts and a baseline firing rate of 235\,Hz (EODf: 682\,Hz) and \textbf{C} and \textbf{F}: A strongly bursting cell with longer breaks between bursts. Baseline rate of 153\,Hz and EODf of 670\,Hz }}
|
{\caption{\label{fig:heterogeneity_isi_hist} Variability in spiking behavior between P units under baseline conditions. \textbf{A--C} 100\,ms of cell membrane voltage and \textbf{D--F} interspike interval histograms, each for three different cells. \textbf{A} and \textbf{D}: A non bursting cell with a baseline firing rate of 133\,Hz (EODf: 806\,Hz), \textbf{B} and \textbf{E}: A cell with some bursts and a baseline firing rate of 235\,Hz (EODf: 682\,Hz) and \textbf{C} and \textbf{F}: A strongly bursting cell with longer breaks between bursts. Baseline rate of 153\,Hz and EODf of 670\,Hz }}
|
||||||
{\includegraphics[width=1\textwidth]{figures/isi_hist_heterogeneity.png}}
|
{\includegraphics[width=\textwidth]{figures/isi_hist_heterogeneity.png}}
|
||||||
\end{figure}
|
\end{figure}
|
||||||
|
|
||||||
\todo{heterogeneity more}
|
\todo{heterogeneity more}
|
||||||
|
|
||||||
Furthermore show P units a pronounced heterogeneity in their spiking behavior (fig.~\ref{fig:heterogeneity_isi_hist}, \cite{gussin2007limits}). This is an important aspect one needs to consider when trying to understand what and how information is encoded in the spike trains of the neuron. A single neuron might be an independent unit from all other neurons but through different tuning curves a full picture of the stimulus can be encoded in the population while a single neuron only encodes a small feature space. This type of encoding is ubiquitous in the nervous system and is used in (EXAMPLE) for (EXAMPLE feature) PLUS MORE... \todo{refs}. Even though P units were already modelled based on a simple leaky integrate-and-fire neuron \citep{chacron2001simple} and conductance based \citep{kashimori1996model} and well studied (\todo{refS}), but up to this point there no model that tries to cover the full breadth of heterogeneity of the P unit population. Having such a model could help shed light into the population code used in the electric sense, allow researchers gain a better picture how higher brain areas might process the information and get one step closer to the full path between sensory input and behavioural output.
|
Furthermore show P units a pronounced heterogeneity in their spiking behavior (fig.~\ref{fig:heterogeneity_isi_hist}, \cite{gussin2007limits}). This is an important aspect one needs to consider when trying to understand what and how information is encoded in the spike trains of the neuron. A single neuron might be an independent unit from all other neurons but through different tuning curves a full picture of the stimulus can be encoded in the population while a single neuron only encodes a small feature space. This type of encoding is ubiquitous in the nervous system and is used in the visual sense for color vision, PLUS MORE... \todo{refs}. Even though P units were already modelled based on a simple leaky integrate-and-fire neuron \citep{chacron2001simple} and conductance based \citep{kashimori1996model} and well studied (\todo{refS}), but up to this point there no model that tries to cover the full breadth of heterogeneity of the P unit population. Having such a model could help shed light into the population code used in the electric sense, allow researchers gain a better picture how higher brain areas might process the information and get one step closer to the full path between sensory input and behavioural output.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -371,7 +371,7 @@ Together this results in the dynamics seen in equations \ref{eq:full_model_dynam
|
|||||||
parameter & explanation & unit \\
|
parameter & explanation & unit \\
|
||||||
\hline
|
\hline
|
||||||
$\alpha$ & stimulus scaling factor & [cm] \\
|
$\alpha$ & stimulus scaling factor & [cm] \\
|
||||||
$tau_m$ & membrane time constant & [ms]\\
|
$\tau_m$ & membrane time constant & [ms]\\
|
||||||
$I_{Bias}$ & bias current & [mV] \\
|
$I_{Bias}$ & bias current & [mV] \\
|
||||||
$\sqrt{2D}$ & noise strength & [mV$\sqrt{\text{s}}$]\\
|
$\sqrt{2D}$ & noise strength & [mV$\sqrt{\text{s}}$]\\
|
||||||
$\tau_A$ & adaption time constant & [ms] \\
|
$\tau_A$ & adaption time constant & [ms] \\
|
||||||
@ -447,13 +447,43 @@ All errors were then summed up for the full error. The fits were done with the N
|
|||||||
|
|
||||||
\section{Results}
|
\section{Results}
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.5]{figures/fit_baseline_comparison.png}
|
||||||
|
\caption{\label{fig:comp_baseline} }
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.5]{figures/fit_adaption_comparison.png}
|
||||||
|
\caption{\label{fig:comp_adaption} Excluded 8 value pairs from Onset Slope as they had slopes higher than 30000}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
\section{Discussion}
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.5]{figures/fit_burstiness_comparison.png}
|
||||||
|
\caption{\label{fig:comp_burstiness} }
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.6]{figures/behaviour_correlations.png}
|
||||||
|
\caption{\label{fig:behavior_correlations} Additional $f_{\infty}$ correlation hängt vermutlich mit der bursty-baseline freq correlation zusammen. Je höher die feuerrate umso höher chance zu bursten und bursts haben eine stärker negative SC.}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.6]{figures/parameter_distributions.png}
|
||||||
|
\caption{\label{fig:parameter_distributions} }
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[H]
|
||||||
|
\includegraphics[scale=0.6]{figures/parameter_correlations.png}
|
||||||
|
\caption{\label{fig:parameter_correlations} }
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
|
||||||
|
\section{Discussion}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
\newpage
|
||||||
\bibliography{citations}
|
\bibliography{citations}
|
||||||
\bibliographystyle{apalike}
|
\bibliographystyle{apalike}
|
||||||
|
|
||||||
|
BIN
thesis/figures/behaviour_correlations.png
Normal file
After Width: | Height: | Size: 62 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
BIN
thesis/figures/fit_adaption_comparison.png
Normal file
After Width: | Height: | Size: 34 KiB |
BIN
thesis/figures/fit_baseline_comparison.png
Normal file
After Width: | Height: | Size: 46 KiB |
BIN
thesis/figures/fit_burstiness_comparison.png
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
thesis/figures/parameter_correlations.png
Normal file
After Width: | Height: | Size: 59 KiB |
BIN
thesis/figures/parameter_distributions.png
Normal file
After Width: | Height: | Size: 22 KiB |