figuresgit push origin master ! and fiigure descriptions of results
@ -8,6 +8,7 @@ FIG_SIZE_MEDIUM = (6, 6)
|
||||
FIG_SIZE_LARGE = (8, 8)
|
||||
FIG_SIZE_SMALL_WIDE = (6, 4)
|
||||
FIG_SIZE_MEDIUM_WIDE = (8, 6)
|
||||
FIG_SIZE_MEDIUM_HIGH = (6, 8)
|
||||
|
||||
|
||||
|
||||
@ -17,12 +18,12 @@ colors_muted = ptc.colors_muted
|
||||
|
||||
|
||||
COLOR_DATA = ptc.colors_vivid["blue"]
|
||||
COLOR_DATA_f0 = ptc.colors_vivid["red"]
|
||||
COLOR_DATA_finf = ptc.colors_vivid["green"]
|
||||
COLOR_DATA_f0 = colors_muted["red"]
|
||||
COLOR_DATA_finf = ptc.colors_vivid["blue"]
|
||||
|
||||
COLOR_MODEL = colors_muted["orange"]
|
||||
COLOR_MODEL_f0 = colors_muted["red"]
|
||||
COLOR_MODEL_finf = colors_muted["green"]
|
||||
COLOR_MODEL_f0 = colors_muted["orange"]
|
||||
COLOR_MODEL_finf = ptc.colors_vivid["green"]
|
||||
|
||||
f0_marker = (3, 0, 0) # "^"
|
||||
finf_marker = (4, 0, 0) # "s"
|
||||
|
@ -89,7 +89,7 @@ def p_unit_heterogeneity():
|
||||
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "isi_hist_heterogeneity.png", transparent=True)
|
||||
plt.savefig(consts.SAVE_FOLDER + "isi_hist_heterogeneity.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
@ -203,7 +203,7 @@ def p_unit_example():
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=2.2)
|
||||
fig.label_axes()
|
||||
plt.savefig("thesis/figures/p_unit_example.png", transparent=True)
|
||||
plt.savefig("thesis/figures/p_unit_example.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
@ -268,7 +268,7 @@ def fi_point_detection():
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
plt.savefig("thesis/figures/f_point_detection.png", transparent=True)
|
||||
plt.savefig("thesis/figures/f_point_detection.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
@ -410,6 +410,5 @@ def test_fi_curve_colors():
|
||||
plt.close()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
@ -129,6 +129,10 @@ def stimulus_development():
|
||||
# axes[2].set_ylim((0, 1.05))
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=1.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "stimulus_development.pdf")
|
||||
plt.close()
|
||||
|
||||
|
@ -55,6 +55,10 @@ def am_generation():
|
||||
axes[2].set_xlim((time[0], time[-1]))
|
||||
axes[2].set_xticks([-25, 0, 25, 50, 75])
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=1.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig("thesis/figures/amGeneration.pdf")
|
||||
plt.close()
|
||||
|
||||
@ -111,6 +115,10 @@ def stimuli_protocol_examples():
|
||||
|
||||
axes[2].set_xlim((time_start, time_end))
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig("thesis/figures/stimuliExamples.pdf")
|
||||
plt.close()
|
||||
|
||||
|
@ -4,6 +4,9 @@ import matplotlib.gridspec as gridspec
|
||||
from analysis import get_filtered_fit_info, get_behaviour_values, get_parameter_values, behaviour_correlations, parameter_correlations
|
||||
from ModelFit import get_best_fit
|
||||
from Baseline import BaselineModel, BaselineCellData
|
||||
from FiCurve import FICurveModel, FICurveCellData
|
||||
from CellData import CellData
|
||||
import functions as fu
|
||||
import Figure_constants as consts
|
||||
|
||||
|
||||
@ -12,65 +15,118 @@ parameter_titles = {"input_scaling": r"$\alpha$", "delta_a": r"$\Delta_A$",
|
||||
"refractory_period": "$t_{ref}$", "tau_a": r"$\tau_A$",
|
||||
"v_offset": r"$I_{Bias}$", "dend_tau": r"$\tau_{dend}$"}
|
||||
|
||||
behaviour_titles = {"baseline_frequency": "base rate", "Burstiness": "Burst", "coefficient_of_variation": "CV",
|
||||
parameter_xlabels = {"input_scaling": "cm", "delta_a": r"$\Delta_A$",
|
||||
"mem_tau": r"$\tau_m$", "noise_strength": r"$\sqrt{2D}$",
|
||||
"refractory_period": "$t_{ref}$", "tau_a": r"$\tau_A$",
|
||||
"v_offset": r"$I_{Bias}$", "dend_tau": r"$\tau_{dend}$"}
|
||||
|
||||
behaviour_titles = {"baseline_frequency": "Base Rate", "Burstiness": "Burst", "coefficient_of_variation": "CV",
|
||||
"serial_correlation": "SC", "vector_strength": "VS",
|
||||
"f_inf_slope": r"$f_{\infty}$ Slope", "f_zero_slope": r"$f_0$ Slope",
|
||||
"f_zero_middle": r"$f_0$ middle"}
|
||||
"f_zero_middle": r"$f_0$ middle", "eodf": "EODf"}
|
||||
|
||||
|
||||
def main():
|
||||
dir_path = "results/final_2/"
|
||||
fits_info = get_filtered_fit_info(dir_path)
|
||||
print("Cells left:", len(fits_info))
|
||||
cell_behaviour, model_behaviour = get_behaviour_values(fits_info)
|
||||
plot_cell_model_comp_baseline(cell_behaviour, model_behaviour)
|
||||
plot_cell_model_comp_adaption(cell_behaviour, model_behaviour)
|
||||
plot_cell_model_comp_burstiness(cell_behaviour, model_behaviour)
|
||||
#
|
||||
behaviour_correlations_plot(fits_info)
|
||||
#
|
||||
labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||
par_labels = [parameter_titles[l] for l in labels]
|
||||
fig, ax = plt.subplots(1, 1)
|
||||
#ax, labels, correlations, p_values, title, y_label=True
|
||||
create_correlation_plot(ax, par_labels, corr_values, corrected_p_values, "")
|
||||
plt.savefig(consts.SAVE_FOLDER + "parameter_correlations.png")
|
||||
plt.close()
|
||||
|
||||
# create_parameter_distributions(get_parameter_values(fits_info))
|
||||
|
||||
|
||||
# dend_tau_and_ref_effect()
|
||||
# quit()
|
||||
dir_path = "results/final_2/"
|
||||
fits_info = get_filtered_fit_info(dir_path, filter=True)
|
||||
# print("Cells left:", len(fits_info))
|
||||
# cell_behaviour, model_behaviour = get_behaviour_values(fits_info)
|
||||
# plot_cell_model_comp_baseline(cell_behaviour, model_behaviour)
|
||||
# plot_cell_model_comp_adaption(cell_behaviour, model_behaviour)
|
||||
# plot_cell_model_comp_burstiness(cell_behaviour, model_behaviour)
|
||||
# #
|
||||
# behaviour_correlations_plot(fits_info)
|
||||
# #
|
||||
parameter_correlation_plot(fits_info)
|
||||
# #
|
||||
create_parameter_distributions(get_parameter_values(fits_info))
|
||||
# create_parameter_distributions(get_parameter_values(fits_info, scaled=True, goal_eodf=800), "scaled_to_800_")
|
||||
# errors = calculate_percent_errors(fits_info)
|
||||
# create_boxplots(errors)
|
||||
|
||||
# example_good_hist_fits(dir_path)
|
||||
# example_bad_hist_fits(dir_path)
|
||||
# example_good_fi_fits(dir_path)
|
||||
# example_bad_fi_fits(dir_path)
|
||||
|
||||
|
||||
def dend_tau_and_ref_effect():
|
||||
cells = ["2012-12-21-am-invivo-1", "2014-03-19-ad-invivo-1", "2018-05-08-ac-invivo-1"]
|
||||
cell_type = ["no burster", "burster", "strong burster"]
|
||||
folders = ["results/ref_and_tau/no_dend_tau/", "results/ref_and_tau/no_ref_period/", "results/final_2/"]
|
||||
title = [r"without $\tau_{dend}$", r"without $t_{ref}$", "with both"]
|
||||
|
||||
def create_parameter_distributions(par_values):
|
||||
fig, axes = plt.subplots(len(cells), 3, figsize=(12, 9), sharey="row", sharex="all")
|
||||
|
||||
fig, axes = plt.subplots(4, 2)
|
||||
for i, cell in enumerate(cells):
|
||||
cell_data = CellData("data/final/" + cell)
|
||||
cell_baseline = BaselineCellData(cell_data)
|
||||
cell_baseline.load_values(cell_data.get_data_path())
|
||||
eodf = cell_data.get_eod_frequency()
|
||||
for j, folder in enumerate(folders):
|
||||
fit = get_best_fit(folder + cell)
|
||||
model_baseline = BaselineModel(fit.get_model(), eodf)
|
||||
cell_isis = cell_baseline.get_interspike_intervals() * eodf
|
||||
model_isis = model_baseline.get_interspike_intervals() * eodf
|
||||
bins = np.arange(0, 0.025, 0.0001) * eodf
|
||||
|
||||
axes[i, j].hist(cell_isis, density=True, bins=bins, color=consts.COLOR_DATA, alpha=0.5)
|
||||
axes[i, j].hist(model_isis, density=True, bins=bins, color=consts.COLOR_MODEL, alpha=0.75)
|
||||
if j == 0:
|
||||
axes[i, j].set_ylabel(cell_type[i])
|
||||
axes[i, j].set_yticklabels([])
|
||||
if i == 0:
|
||||
axes[0, j].set_title(title[j])
|
||||
plt.xlim(0, 17.5)
|
||||
fig.text(0.5, 0.04, 'Time in EOD periods', ha='center', va='center') # shared x label
|
||||
fig.text(0.06, 0.5, 'ISI Density', ha='center', va='center', rotation='vertical') # shared y label
|
||||
|
||||
fig.text(0.135, 0.9, 'A', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.4075, 0.9, 'B', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.68, 0.9, 'C', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.11, 0.86, '1', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.11, 0.59, '2', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.11, 0.32, '3', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "dend_ref_effect.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
def create_parameter_distributions(par_values, prefix=""):
|
||||
fig, axes = plt.subplots(4, 2, gridspec_kw={"left": 0.1, "hspace": 0.5}, figsize=consts.FIG_SIZE_MEDIUM_HIGH)
|
||||
|
||||
if len(par_values.keys()) != 8:
|
||||
print("not eight parameters")
|
||||
|
||||
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||
x_labels = ["[cm]", "[mV]", "[ms]", r"[mV$\sqrt{s}$]", "[ms]", "[mVms]", "[ms]", "[ms]"]
|
||||
axes_flat = axes.flatten()
|
||||
for i, l in enumerate(labels):
|
||||
min_v = min(par_values[l]) * 0.95
|
||||
max_v = max(par_values[l]) * 1.05
|
||||
step = (max_v - min_v) / 20
|
||||
bins = np.arange(min_v, max_v+step, step)
|
||||
if "ms" in x_labels[i]:
|
||||
bins *= 1000
|
||||
par_values[l] = np.array(par_values[l]) * 1000
|
||||
axes_flat[i].hist(par_values[l], bins=bins, color=consts.COLOR_MODEL, alpha=0.75)
|
||||
axes_flat[i].set_title(parameter_titles[l])
|
||||
|
||||
# axes_flat[i].set_title(parameter_titles[l])
|
||||
axes_flat[i].set_xlabel(parameter_titles[l] + " " + x_labels[i])
|
||||
fig.text(0.01, 0.5, 'Count', ha='center', va='center', rotation='vertical') # shared y label
|
||||
plt.tight_layout()
|
||||
plt.savefig(consts.SAVE_FOLDER + "parameter_distributions.png")
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=1.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + prefix + "parameter_distributions.pdf")
|
||||
plt.close()
|
||||
|
||||
|
||||
def behaviour_correlations_plot(fits_info):
|
||||
|
||||
fig = plt.figure(tight_layout=True, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||
gs = gridspec.GridSpec(2, 2, width_ratios=(1, 1), height_ratios=(5, 1), hspace=0.025, wspace=0.05)
|
||||
# fig, axes = plt.subplots(1, 2, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||
@ -91,12 +147,22 @@ def behaviour_correlations_plot(fits_info):
|
||||
ax_col.imshow(data)
|
||||
ax_col.set_xlabel("Correlation Coefficients")
|
||||
plt.tight_layout()
|
||||
plt.savefig(consts.SAVE_FOLDER + "behaviour_correlations.png")
|
||||
plt.savefig(consts.SAVE_FOLDER + "behaviour_correlations.pdf")
|
||||
plt.close()
|
||||
|
||||
|
||||
def create_correlation_plot(ax, labels, correlations, p_values, title, y_label=True):
|
||||
def parameter_correlation_plot(fits_info):
|
||||
labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||
par_labels = [parameter_titles[l] for l in labels]
|
||||
fig, ax = plt.subplots(1, 1)
|
||||
# ax, labels, correlations, p_values, title, y_label=True
|
||||
im = create_correlation_plot(ax, par_labels, corr_values, corrected_p_values, "")
|
||||
fig.colorbar(im, ax=ax)
|
||||
plt.savefig(consts.SAVE_FOLDER + "parameter_correlations.pdf")
|
||||
plt.close()
|
||||
|
||||
|
||||
def create_correlation_plot(ax, labels, correlations, p_values, title, y_label=True):
|
||||
cleaned_cors = np.zeros(correlations.shape)
|
||||
|
||||
for i in range(correlations.shape[0]):
|
||||
@ -105,12 +171,17 @@ def create_correlation_plot(ax, labels, correlations, p_values, title, y_label=T
|
||||
cleaned_cors[i, j] = correlations[i, j]
|
||||
else:
|
||||
cleaned_cors[i, j] = np.NAN
|
||||
|
||||
if j >= i:
|
||||
cleaned_cors[i, j] = np.NAN
|
||||
im = ax.imshow(cleaned_cors, vmin=-1, vmax=1)
|
||||
|
||||
# We want to show all ticks...
|
||||
ax.set_xticks(np.arange(len(labels)))
|
||||
ax.set_xticklabels(labels)
|
||||
|
||||
# remove frame:
|
||||
ax.spines['top'].set_visible(False)
|
||||
ax.spines['right'].set_visible(False)
|
||||
# ... and label them with the respective list entries
|
||||
if y_label:
|
||||
ax.set_yticks(np.arange(len(labels)))
|
||||
@ -126,21 +197,28 @@ def create_correlation_plot(ax, labels, correlations, p_values, title, y_label=T
|
||||
# Loop over data dimensions and create text annotations.
|
||||
for i in range(len(labels)):
|
||||
for j in range(len(labels)):
|
||||
if p_values[i][j] < 0.0001:
|
||||
text = ax.text(j, i, "***", ha="center", va="center", color="b")
|
||||
elif p_values[i][j] < 0.001:
|
||||
text = ax.text(j, i, "**", ha="center", va="center", color="b")
|
||||
elif p_values[i][j] < 0.05:
|
||||
text = ax.text(j, i, "*", ha="center", va="center", color="b")
|
||||
if j >= i:
|
||||
continue
|
||||
|
||||
if cleaned_cors[i, j] != np.NAN:
|
||||
text = ax.text(j, i, "{:.2f}".format(cleaned_cors[i, j]), ha="center", va="center", color="w")
|
||||
|
||||
# if p_values[i][j] < 0.0001:
|
||||
# text = ax.text(j, i, "***", ha="center", va="center", color="b")
|
||||
# elif p_values[i][j] < 0.001:
|
||||
# text = ax.text(j, i, "**", ha="center", va="center", color="b")
|
||||
# elif p_values[i][j] < 0.05:
|
||||
# text = ax.text(j, i, "*", ha="center", va="center", color="b")
|
||||
|
||||
return im
|
||||
|
||||
|
||||
def example_good_hist_fits(dir_path):
|
||||
strong_bursty_cell = "2018-05-08-ac-invivo-1"
|
||||
bursty_cell = "2014-03-19-ad-invivo-1"
|
||||
non_bursty_cell = "2012-12-21-am-invivo-1"
|
||||
|
||||
fig, axes = plt.subplots(1, 3, sharex="all", figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||
fig, axes = plt.subplots(1, 3, sharex="all", figsize=(8, 4))
|
||||
|
||||
for i, cell in enumerate([non_bursty_cell, bursty_cell, strong_bursty_cell]):
|
||||
fit_dir = dir_path + cell + "/"
|
||||
@ -156,20 +234,144 @@ def example_good_hist_fits(dir_path):
|
||||
cell_isi = BaselineCellData(cell_data).get_interspike_intervals() * eodf
|
||||
|
||||
bins = np.arange(0, 0.025, 0.0001) * eodf
|
||||
axes[i].hist(model_isi, bins=bins, density=True, alpha=0.75, color=consts.COLOR_MODEL)
|
||||
axes[i].hist(cell_isi, bins=bins, density=True, alpha=0.5, color=consts.COLOR_DATA)
|
||||
axes[i].hist(model_isi, bins=bins, density=True, alpha=0.5, color=consts.COLOR_MODEL)
|
||||
axes[i].set_xlabel("ISI in EOD periods")
|
||||
axes[0].set_ylabel("Density")
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "example_good_isi_hist_fits.png", transparent=True)
|
||||
plt.savefig(consts.SAVE_FOLDER + "example_good_isi_hist_fits.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
def create_boxplots(errors):
|
||||
def example_bad_hist_fits(dir_path):
|
||||
strong_bursty_cell = "2018-05-08-ab-invivo-1"
|
||||
bursty_cell = "2014-06-06-ag-invivo-1"
|
||||
extra_structure_cell = "2014-12-11-ad-invivo-1"
|
||||
|
||||
fig, axes = plt.subplots(1, 3, sharex="all", figsize=(8, 4))
|
||||
|
||||
for i, cell in enumerate([bursty_cell, strong_bursty_cell, extra_structure_cell]):
|
||||
fit_dir = dir_path + cell + "/"
|
||||
fit = get_best_fit(fit_dir)
|
||||
|
||||
cell_data = fit.get_cell_data()
|
||||
eodf = cell_data.get_eod_frequency()
|
||||
|
||||
model = fit.get_model()
|
||||
baseline_model = BaselineModel(model, eodf, trials=5)
|
||||
|
||||
model_isi = np.array(baseline_model.get_interspike_intervals()) * eodf
|
||||
cell_isi = BaselineCellData(cell_data).get_interspike_intervals() * eodf
|
||||
|
||||
bins = np.arange(0, 0.025, 0.0001) * eodf
|
||||
axes[i].hist(model_isi, bins=bins, density=True, alpha=0.75, color=consts.COLOR_MODEL)
|
||||
axes[i].hist(cell_isi, bins=bins, density=True, alpha=0.5, color=consts.COLOR_DATA)
|
||||
|
||||
axes[i].set_xlabel("ISI in EOD periods")
|
||||
axes[0].set_ylabel("Density")
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "example_bad_isi_hist_fits.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
def example_good_fi_fits(dir_path):
|
||||
fig, axes = plt.subplots(1, 3, figsize=(8, 4), sharey="all")
|
||||
for i, cell in enumerate(["2012-12-21-am-invivo-1", "2013-02-21-ag-invivo-1", "2014-03-19-ae-invivo-1"]):
|
||||
fit_dir = dir_path + cell + "/"
|
||||
fit = get_best_fit(fit_dir)
|
||||
|
||||
cell_data = fit.get_cell_data()
|
||||
eodf = cell_data.get_eod_frequency()
|
||||
|
||||
model = fit.get_model()
|
||||
contrasts = cell_data.get_fi_contrasts()
|
||||
fi_curve_data = FICurveCellData(cell_data, contrasts, save_dir=cell_data.get_data_path())
|
||||
contrasts = fi_curve_data.stimulus_values
|
||||
x_values = np.arange(min(contrasts), max(contrasts), 0.001)
|
||||
fi_curve_model = FICurveModel(model, contrasts, eodf, trials=10)
|
||||
|
||||
f_zero_fit = fi_curve_data.f_zero_fit
|
||||
f_inf_fit = fi_curve_data.f_inf_fit
|
||||
|
||||
# f zero response
|
||||
axes[i].plot(contrasts, fi_curve_data.get_f_zero_frequencies(), ',',
|
||||
marker=consts.f0_marker, alpha=0.75, color=consts.COLOR_DATA_f0)
|
||||
axes[i].plot(x_values, fu.full_boltzmann(x_values, f_zero_fit[0], f_zero_fit[1], f_zero_fit[2], f_zero_fit[3]),
|
||||
color=consts.COLOR_DATA_f0, alpha=0.75)
|
||||
axes[i].plot(contrasts, fi_curve_model.get_f_zero_frequencies(), ',',
|
||||
marker=consts.f0_marker, alpha=0.75, color=consts.COLOR_MODEL_f0)
|
||||
|
||||
# f inf response
|
||||
axes[i].plot(contrasts, fi_curve_data.get_f_inf_frequencies(), ',',
|
||||
marker=consts.finf_marker, alpha=0.5, color=consts.COLOR_DATA_finf)
|
||||
axes[i].plot(x_values, fu.clipped_line(x_values, f_inf_fit[0], f_inf_fit[1]),
|
||||
color=consts.COLOR_DATA_finf, alpha=0.5)
|
||||
axes[i].plot(contrasts, fi_curve_model.get_f_inf_frequencies(), ',',
|
||||
marker=consts.finf_marker, alpha=0.75, color=consts.COLOR_MODEL_finf)
|
||||
|
||||
axes[i].set_xlabel("Contrast")
|
||||
axes[0].set_ylabel("Frequency [Hz]")
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "example_good_fi_fits.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
def example_bad_fi_fits(dir_path):
|
||||
fig, axes = plt.subplots(1, 2, figsize=(8, 4))
|
||||
# "2013-01-08-aa-invivo-1" candidate cell
|
||||
for i, cell in enumerate(["2012-12-13-ao-invivo-1", "2014-01-23-ab-invivo-1"]):
|
||||
fit_dir = dir_path + cell + "/"
|
||||
fit = get_best_fit(fit_dir)
|
||||
|
||||
cell_data = fit.get_cell_data()
|
||||
eodf = cell_data.get_eod_frequency()
|
||||
|
||||
model = fit.get_model()
|
||||
contrasts = cell_data.get_fi_contrasts()
|
||||
fi_curve_data = FICurveCellData(cell_data, contrasts, save_dir=cell_data.get_data_path())
|
||||
contrasts = fi_curve_data.stimulus_values
|
||||
x_values = np.arange(min(contrasts), max(contrasts), 0.001)
|
||||
fi_curve_model = FICurveModel(model, contrasts, eodf, trials=10)
|
||||
|
||||
f_zero_fit = fi_curve_data.f_zero_fit
|
||||
f_inf_fit = fi_curve_data.f_inf_fit
|
||||
|
||||
# f zero response
|
||||
axes[i].plot(contrasts, fi_curve_data.get_f_zero_frequencies(), ',',
|
||||
marker=consts.f0_marker, alpha=0.75, color=consts.COLOR_DATA_f0)
|
||||
axes[i].plot(x_values, fu.full_boltzmann(x_values, f_zero_fit[0], f_zero_fit[1], f_zero_fit[2], f_zero_fit[3]),
|
||||
color=consts.COLOR_DATA_f0, alpha=0.75)
|
||||
axes[i].plot(contrasts, fi_curve_model.get_f_zero_frequencies(), ',',
|
||||
marker=consts.f0_marker, alpha=0.75, color=consts.COLOR_MODEL_f0)
|
||||
|
||||
# f inf response
|
||||
axes[i].plot(contrasts, fi_curve_data.get_f_inf_frequencies(), ',',
|
||||
marker=consts.finf_marker, alpha=0.5, color=consts.COLOR_DATA_finf)
|
||||
axes[i].plot(x_values, fu.clipped_line(x_values, f_inf_fit[0], f_inf_fit[1]),
|
||||
color=consts.COLOR_DATA_finf, alpha=0.5)
|
||||
axes[i].plot(contrasts, fi_curve_model.get_f_inf_frequencies(), ',',
|
||||
marker=consts.finf_marker, alpha=0.75, color=consts.COLOR_MODEL_finf)
|
||||
|
||||
axes[i].set_xlabel("Contrast")
|
||||
axes[0].set_ylabel("Frequency [Hz]")
|
||||
plt.tight_layout()
|
||||
consts.set_figure_labels(xoffset=-2.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "example_bad_fi_fits.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
def create_boxplots(errors):
|
||||
labels = ["{}_n:{}".format(k, len(errors[k])) for k in sorted(errors.keys())]
|
||||
for k in sorted(errors.keys()):
|
||||
print("{}: median %-error: {:.2f}".format(k, np.median(errors[k])))
|
||||
@ -182,17 +384,11 @@ def create_boxplots(errors):
|
||||
plt.close()
|
||||
|
||||
|
||||
|
||||
def plot_cell_model_comp_baseline(cell_behavior, model_behaviour):
|
||||
fig = plt.figure(figsize=(12, 6))
|
||||
# ("baseline_frequency", "vector_strength", "serial_correlation")
|
||||
|
||||
# Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||
# the size of the marginal axes and the main axes in both directions.
|
||||
# Also adjust the subplot parameters for a square plot.
|
||||
gs = fig.add_gridspec(2, 3, width_ratios=[5, 5, 5], height_ratios=[3, 7],
|
||||
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||
wspace=0.25, hspace=0.05)
|
||||
wspace=0.25, hspace=0.2)
|
||||
num_of_bins = 20
|
||||
# baseline freq plot:
|
||||
i = 0
|
||||
@ -206,6 +402,9 @@ def plot_cell_model_comp_baseline(cell_behavior, model_behaviour):
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["baseline_frequency"], bins)
|
||||
ax.set_xlabel(r"Cell [Hz]")
|
||||
ax.set_ylabel(r"Model [Hz]")
|
||||
ax_histx.set_ylabel("Count")
|
||||
i += 1
|
||||
|
||||
cell = cell_behavior["vector_strength"]
|
||||
@ -218,6 +417,9 @@ def plot_cell_model_comp_baseline(cell_behavior, model_behaviour):
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["vector_strength"], bins)
|
||||
ax.set_xlabel(r"Cell")
|
||||
ax.set_ylabel(r"Model")
|
||||
ax_histx.set_ylabel("Count")
|
||||
i += 1
|
||||
|
||||
cell = cell_behavior["serial_correlation"]
|
||||
@ -230,10 +432,16 @@ def plot_cell_model_comp_baseline(cell_behavior, model_behaviour):
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["serial_correlation"], bins)
|
||||
ax.set_xlabel(r"Cell")
|
||||
ax.set_ylabel(r"Model")
|
||||
fig.text(0.09, 0.925, 'A', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.375, 0.925, 'B', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.6625, 0.925, 'C', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
ax_histx.set_ylabel("Count")
|
||||
i += 1
|
||||
|
||||
plt.tight_layout()
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_baseline_comparison.png", transparent=True)
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_baseline_comparison.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
@ -246,7 +454,7 @@ def plot_cell_model_comp_adaption(cell_behavior, model_behaviour):
|
||||
# Also adjust the subplot parameters for a square plot.
|
||||
gs = fig.add_gridspec(2, 2, width_ratios=[5, 5], height_ratios=[3, 7],
|
||||
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||
wspace=0.25, hspace=0.05)
|
||||
wspace=0.3, hspace=0.2)
|
||||
num_of_bins = 20
|
||||
# baseline freq plot:
|
||||
i = 0
|
||||
@ -260,6 +468,9 @@ def plot_cell_model_comp_adaption(cell_behavior, model_behaviour):
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["f_inf_slope"], bins)
|
||||
ax.set_xlabel(r"Cell [Hz]")
|
||||
ax.set_ylabel(r"Model [Hz]")
|
||||
ax_histx.set_ylabel("Count")
|
||||
i += 1
|
||||
|
||||
cell = cell_behavior["f_zero_slope"]
|
||||
@ -282,9 +493,16 @@ def plot_cell_model_comp_adaption(cell_behavior, model_behaviour):
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["f_zero_slope"], bins)
|
||||
ax.set_xlabel("Cell [Hz]")
|
||||
ax.set_ylabel("Model [Hz]")
|
||||
ax_histx.set_ylabel("Count")
|
||||
|
||||
plt.tight_layout()
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_adaption_comparison.png", transparent=True)
|
||||
|
||||
fig.text(0.085, 0.925, 'A', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.54, 0.925, 'B', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_adaption_comparison.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
@ -297,7 +515,7 @@ def plot_cell_model_comp_burstiness(cell_behavior, model_behaviour):
|
||||
# Also adjust the subplot parameters for a square plot.
|
||||
gs = fig.add_gridspec(2, 2, width_ratios=[5, 5], height_ratios=[3, 7],
|
||||
left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||
wspace=0.25, hspace=0.05)
|
||||
wspace=0.25, hspace=0.2)
|
||||
num_of_bins = 20
|
||||
# baseline freq plot:
|
||||
i = 0
|
||||
@ -309,7 +527,10 @@ def plot_cell_model_comp_burstiness(cell_behavior, model_behaviour):
|
||||
bins = np.arange(minimum, maximum + step, step)
|
||||
|
||||
ax = fig.add_subplot(gs[1, i])
|
||||
ax.set_xlabel("Cell [%ms]")
|
||||
ax.set_ylabel("Model [%ms]")
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
ax_histx.set_ylabel("Count")
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["Burstiness"], bins)
|
||||
i += 1
|
||||
|
||||
@ -325,111 +546,32 @@ def plot_cell_model_comp_burstiness(cell_behavior, model_behaviour):
|
||||
ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
scatter_hist(cell, model, ax, ax_histx, behaviour_titles["coefficient_of_variation"], bins)
|
||||
|
||||
ax.set_xlabel("Cell")
|
||||
ax.set_ylabel("Model")
|
||||
ax_histx.set_ylabel("Count")
|
||||
|
||||
plt.tight_layout()
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_burstiness_comparison.png", transparent=True)
|
||||
|
||||
fig.text(0.085, 0.925, 'A', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
fig.text(0.53, 0.925, 'B', ha='center', va='center', rotation='horizontal', size=16, family='serif')
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "fit_burstiness_comparison.pdf", transparent=True)
|
||||
plt.close()
|
||||
|
||||
|
||||
# def behaviour_overview_pairs(cell_behaviour, model_behaviour):
|
||||
# # behaviour_keys = ["Burstiness", "coefficient_of_variation", "serial_correlation",
|
||||
# # "vector_strength", "f_inf_slope", "f_zero_slope", "baseline_frequency"]
|
||||
#
|
||||
# pairs = [("baseline_frequency", "vector_strength", "serial_correlation"),
|
||||
# ("Burstiness", "coefficient_of_variation"),
|
||||
# ("f_inf_slope", "f_zero_slope")]
|
||||
#
|
||||
# for pair in pairs:
|
||||
# cell = []
|
||||
# model = []
|
||||
# for behaviour in pair:
|
||||
# cell.append(cell_behaviour[behaviour])
|
||||
# model.append(model_behaviour[behaviour])
|
||||
# overview_pair(cell, model, pair)
|
||||
#
|
||||
#
|
||||
# def overview_pair(cell, model, titles):
|
||||
# fig = plt.figure(figsize=(8, 6))
|
||||
#
|
||||
# columns = len(cell)
|
||||
#
|
||||
# # Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||
# # the size of the marginal axes and the main axes in both directions.
|
||||
# # Also adjust the subplot parameters for a square plot.
|
||||
# gs = fig.add_gridspec(2, columns, width_ratios=[5] * columns, height_ratios=[3, 7],
|
||||
# left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||
# wspace=0.2, hspace=0.05)
|
||||
#
|
||||
# for i in range(len(cell)):
|
||||
# if titles[i] == "f_zero_slope":
|
||||
# length_before = len(cell[i])
|
||||
# idx = np.array(cell[i]) < 30000
|
||||
# cell[i] = np.array(cell[i])[idx]
|
||||
# model[i] = np.array(model[i])[idx]
|
||||
#
|
||||
# idx = np.array(model[i]) < 30000
|
||||
# cell[i] = np.array(cell[i])[idx]
|
||||
# model[i] = np.array(model[i])[idx]
|
||||
# print("removed {} values from f_zero_slope plot.".format(length_before - len(cell[i])))
|
||||
# ax = fig.add_subplot(gs[1, i])
|
||||
# ax_histx = fig.add_subplot(gs[0, i], sharex=ax)
|
||||
# scatter_hist(cell[i], model[i], ax, ax_histx, titles[i])
|
||||
#
|
||||
# # plt.tight_layout()
|
||||
# plt.show()
|
||||
#
|
||||
#
|
||||
# def grouped_error_overview_behaviour_dist(cell_behaviours, model_behaviours):
|
||||
# # start with a square Figure
|
||||
# fig = plt.figure(figsize=(12, 12))
|
||||
#
|
||||
# rows = 4
|
||||
# columns = 2
|
||||
# # Add a gridspec with two rows and two columns and a ratio of 2 to 7 between
|
||||
# # the size of the marginal axes and the main axes in both directions.
|
||||
# # Also adjust the subplot parameters for a square plot.
|
||||
# gs = fig.add_gridspec(rows*2, columns, width_ratios=[5]*columns, height_ratios=[3, 7] * rows,
|
||||
# left=0.1, right=0.9, bottom=0.1, top=0.9,
|
||||
# wspace=0.2, hspace=0.5)
|
||||
#
|
||||
# for i, behaviour in enumerate(sorted(cell_behaviours.keys())):
|
||||
# col = int(np.floor(i / rows))
|
||||
# row = i - rows*col
|
||||
# ax = fig.add_subplot(gs[row*2 + 1, col])
|
||||
# ax_histx = fig.add_subplot(gs[row*2, col])
|
||||
#
|
||||
# # use the previously defined function
|
||||
# scatter_hist(cell_behaviours[behaviour], model_behaviours[behaviour], ax, ax_histx, behaviour)
|
||||
#
|
||||
# plt.tight_layout()
|
||||
# plt.show()
|
||||
#
|
||||
|
||||
def scatter_hist(cell_values, model_values, ax, ax_histx, behaviour, bins, ax_histy=None):
|
||||
def scatter_hist(cell_values, model_values, ax, ax_histx, behaviour, bins):
|
||||
# copied from matplotlib
|
||||
|
||||
# no labels
|
||||
ax_histx.tick_params(axis="cell", labelbottom=False)
|
||||
# ax_histy.tick_params(axis="model_values", labelleft=False)
|
||||
# the scatter plot:
|
||||
minimum = min(min(cell_values), min(model_values))
|
||||
maximum = max(max(cell_values), max(model_values))
|
||||
ax.plot((minimum, maximum), (minimum, maximum), color="grey")
|
||||
ax.scatter(cell_values, model_values, color="black")
|
||||
|
||||
ax.set_xlabel("cell")
|
||||
ax.set_ylabel("model")
|
||||
|
||||
ax_histx.hist(cell_values, bins=bins, color=consts.COLOR_DATA, alpha=0.75)
|
||||
ax_histx.hist(model_values, bins=bins, color=consts.COLOR_MODEL, alpha=0.75)
|
||||
ax_labels = ax.get_xticklabels()
|
||||
ax_histx.set_xticklabels([])
|
||||
ax.set_xticklabels(ax_labels)
|
||||
ax_histx.set_xticks(ax.get_xticks())
|
||||
ax_histx.set_xlim(ax.get_xlim())
|
||||
ax_histx.set_title(behaviour)
|
||||
|
||||
# ax_histy.hist(y, bins=bins, orientation='horizontal')
|
||||
ax_histx.hist(cell_values, bins=bins, color=consts.COLOR_DATA, alpha=0.50)
|
||||
|
||||
ax_histx.set_title(behaviour)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -237,5 +237,5 @@ class ModelFit:
|
||||
plt.show()
|
||||
else:
|
||||
plt.savefig(save_path + cell.get_cell_name() + "_master_plot.pdf")
|
||||
|
||||
plt.close()
|
||||
|
||||
|
61
analysis.py
@ -9,6 +9,7 @@ from ModelFit import get_best_fit
|
||||
from Baseline import BaselineModel
|
||||
from FiCurve import FICurveModel
|
||||
from CellData import CellData
|
||||
from models.LIFACnoise import LifacNoiseModel
|
||||
from stimuli.SinusoidalStepStimulus import SinusoidalStepStimulus
|
||||
|
||||
|
||||
@ -42,13 +43,13 @@ def main():
|
||||
# labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||
# create_correlation_plot(labels, corr_values, corrected_p_values)
|
||||
|
||||
create_parameter_distributions(get_parameter_values(fits_info))
|
||||
# create_parameter_distributions(get_parameter_values(fits_info))
|
||||
cell_b, model_b = get_behaviour_values(fits_info)
|
||||
create_behaviour_distributions(cell_b, model_b)
|
||||
pass
|
||||
|
||||
|
||||
def get_filtered_fit_info(folder):
|
||||
def get_filtered_fit_info(folder, filter=True):
|
||||
fits_info = {}
|
||||
|
||||
for item in os.listdir(folder):
|
||||
@ -58,21 +59,20 @@ def get_filtered_fit_info(folder):
|
||||
cell_behaviour, model_behaviour = results.get_behaviour_values()
|
||||
|
||||
# filter:
|
||||
if cell_behaviour["f_zero_slope"] > 50000:
|
||||
print("cell with too high f_zero_slope. cell_folder:", cell_folder)
|
||||
if model_behaviour["f_zero_slope"] > 50000 or cell_behaviour["f_zero_slope"] > 50000:
|
||||
print("f_zero_slope used to filter a fit")
|
||||
continue
|
||||
# if (abs(model_behaviour["f_inf_slope"] - cell_behaviour["f_inf_slope"]) / cell_behaviour["f_inf_slope"]) > 0.25:
|
||||
# print("f_inf_slope used to filter a fit")
|
||||
# print((abs(model_behaviour["f_inf_slope"] - cell_behaviour["f_inf_slope"]) / cell_behaviour["f_inf_slope"]))
|
||||
# continue
|
||||
if abs((model_behaviour["coefficient_of_variation"] - cell_behaviour["coefficient_of_variation"]) /
|
||||
cell_behaviour["coefficient_of_variation"]) > 0.25:
|
||||
print("CV used to filter a fit")
|
||||
continue
|
||||
|
||||
fits_info[item] = [results.get_final_parameters(), model_behaviour, cell_behaviour]
|
||||
if filter:
|
||||
if model_behaviour["f_zero_slope"] > 50000 or cell_behaviour["f_zero_slope"] > 50000:
|
||||
print("f_zero_slope used to filter a fit, ", cell_folder)
|
||||
continue
|
||||
# if (abs(model_behaviour["f_inf_slope"] - cell_behaviour["f_inf_slope"]) / cell_behaviour["f_inf_slope"]) > 0.25:
|
||||
# print("f_inf_slope used to filter a fit")
|
||||
# print((abs(model_behaviour["f_inf_slope"] - cell_behaviour["f_inf_slope"]) / cell_behaviour["f_inf_slope"]))
|
||||
# continue
|
||||
if abs((model_behaviour["coefficient_of_variation"] - cell_behaviour["coefficient_of_variation"]) /
|
||||
cell_behaviour["coefficient_of_variation"]) > 0.33:
|
||||
print("CV used to filter a fit, ", cell_folder)
|
||||
continue
|
||||
eodf = results.get_cell_data().get_eod_frequency()
|
||||
fits_info[item] = [results.get_final_parameters(), model_behaviour, cell_behaviour, eodf]
|
||||
|
||||
return fits_info
|
||||
|
||||
@ -95,16 +95,23 @@ def calculate_percent_errors(fits_info):
|
||||
return errors
|
||||
|
||||
|
||||
def get_parameter_values(fits_info):
|
||||
def get_parameter_values(fits_info, scaled=False, goal_eodf=800):
|
||||
par_keys = sorted(["input_scaling", "delta_a", "mem_tau", "noise_strength",
|
||||
"refractory_period", "tau_a", "v_offset", "dend_tau"])
|
||||
parameter_values = {}
|
||||
for cell in sorted(fits_info.keys()):
|
||||
|
||||
final_parameters = fits_info[cell][0]
|
||||
if scaled:
|
||||
factor = goal_eodf / fits_info[cell][3]
|
||||
final_parameters = LifacNoiseModel(final_parameters).get_eodf_scaled_parameters(factor)
|
||||
|
||||
for par in par_keys:
|
||||
|
||||
if par not in parameter_values.keys():
|
||||
parameter_values[par] = []
|
||||
|
||||
parameter_values[par].append(fits_info[cell][0][par])
|
||||
parameter_values[par].append(final_parameters[par])
|
||||
|
||||
return parameter_values
|
||||
|
||||
@ -126,19 +133,27 @@ def get_behaviour_values(fits_info):
|
||||
|
||||
def behaviour_correlations(fits_info, model_values=True):
|
||||
bv_cell, bv_model = get_behaviour_values(fits_info)
|
||||
eod_frequencies = [fits_info[cell][3] for cell in sorted(fits_info.keys())]
|
||||
if model_values:
|
||||
behaviour_values = bv_model
|
||||
else:
|
||||
behaviour_values = bv_cell
|
||||
|
||||
labels = ["baseline_frequency", "serial_correlation", "vector_strength", "coefficient_of_variation", "Burstiness",
|
||||
labels = ["eodf", "baseline_frequency", "serial_correlation", "vector_strength", "coefficient_of_variation", "Burstiness",
|
||||
"f_inf_slope", "f_zero_slope"]
|
||||
corr_values = np.zeros((len(labels), len(labels)))
|
||||
p_values = np.ones((len(labels), len(labels)))
|
||||
|
||||
for i in range(len(labels)):
|
||||
for j in range(len(labels)):
|
||||
c, p = pearsonr(behaviour_values[labels[i]], behaviour_values[labels[j]])
|
||||
if i == j and labels[j] == "eodf":
|
||||
c, p = pearsonr(eod_frequencies, eod_frequencies)
|
||||
elif labels[i] == "eodf":
|
||||
c, p = pearsonr(eod_frequencies, behaviour_values[labels[j]])
|
||||
elif labels[j] == "eodf":
|
||||
c, p = pearsonr(behaviour_values[labels[i]], eod_frequencies)
|
||||
else:
|
||||
c, p = pearsonr(behaviour_values[labels[i]], behaviour_values[labels[j]])
|
||||
corr_values[i, j] = c
|
||||
p_values[i, j] = p
|
||||
|
||||
@ -148,7 +163,7 @@ def behaviour_correlations(fits_info, model_values=True):
|
||||
|
||||
|
||||
def parameter_correlations(fits_info):
|
||||
parameter_values = get_parameter_values(fits_info)
|
||||
parameter_values = get_parameter_values(fits_info, scaled=True)
|
||||
|
||||
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||
@ -188,8 +203,6 @@ def create_behaviour_distributions(cell_b_values, model_b_values):
|
||||
plt.show()
|
||||
plt.close()
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def sensitivity_analysis(dir_path, par_range=(0.5, 1.6, 0.1), contrast_range=(-0.3, 0.4, 0.1), parameters=None, behaviours=None, max_models=None):
|
||||
models = []
|
||||
|
@ -182,6 +182,15 @@ class LifacNoiseModel(AbstractModel):
|
||||
def get_model_copy(self):
|
||||
return LifacNoiseModel(self.parameters)
|
||||
|
||||
def get_eodf_scaled_parameters(self, factor):
|
||||
scaled_parameters = self.parameters.copy()
|
||||
time_param_keys = ["refractory_period", "tau_a", "mem_tau", "dend_tau"]
|
||||
|
||||
for key in time_param_keys:
|
||||
scaled_parameters[key] = self.parameters[key] / factor
|
||||
|
||||
return scaled_parameters
|
||||
|
||||
def find_v_offset(self, goal_baseline_frequency, base_stimulus, threshold=2, border=50000):
|
||||
test_model = self.get_model_copy()
|
||||
simulation_length = 6
|
||||
|
397
random_models.py
@ -2,45 +2,381 @@
|
||||
import numpy as np
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.gridspec as gridspec
|
||||
from scipy.optimize import curve_fit
|
||||
from scipy.stats import multivariate_normal, pearsonr
|
||||
|
||||
from analysis import get_parameter_values, get_filtered_fit_info, parameter_correlations, get_behaviour_values
|
||||
from ModelFit import get_best_fit
|
||||
import functions as fu
|
||||
from Baseline import BaselineModel
|
||||
from FiCurve import FICurveModel
|
||||
from models.LIFACnoise import LifacNoiseModel
|
||||
from Figures_results import create_correlation_plot
|
||||
import Figure_constants as consts
|
||||
|
||||
LOG_TRANSFORM = {"v_offset": False, 'input_scaling': True, 'dend_tau': True, 'tau_a': True, 'delta_a': True,
|
||||
'refractory_period': False, 'noise_strength': True, 'mem_tau': True}
|
||||
|
||||
behaviour_titles = {"baseline_frequency": "Base Rate", "Burstiness": "Burst", "coefficient_of_variation": "CV",
|
||||
"serial_correlation": "SC", "vector_strength": "VS",
|
||||
"f_inf_slope": r"$f_{\infty}$ Slope", "f_zero_slope": r"$f_0$ Slope"}
|
||||
|
||||
parameter_titles = {"input_scaling": r"$\alpha$", "delta_a": r"$\Delta_A$",
|
||||
"mem_tau": r"$\tau_m$", "noise_strength": r"$\sqrt{2D}$",
|
||||
"refractory_period": "$t_{ref}$", "tau_a": r"$\tau_A$",
|
||||
"v_offset": r"$I_{Bias}$", "dend_tau": r"$\tau_{dend}$"}
|
||||
|
||||
|
||||
recalculate = False
|
||||
num_of_models = 100
|
||||
|
||||
|
||||
def main():
|
||||
folder = "results/final_2/"
|
||||
fit_infos = get_filtered_fit_info(folder, filter=True)
|
||||
goal_eodf = 800
|
||||
param_values = get_parameter_values(fit_infos, scaled=True, goal_eodf=goal_eodf)
|
||||
|
||||
# plots 1
|
||||
keys, means, cov_matrix = calculate_means_and_covariances(param_values)
|
||||
par_list = draw_random_models(1000, keys, means, cov_matrix, seed=1)
|
||||
parameter_correlation_plot(par_list, fit_infos)
|
||||
plot_distributions_with_set_fits(param_values)
|
||||
|
||||
if recalculate:
|
||||
keys, means, cov_matrix = calculate_means_and_covariances(param_values)
|
||||
par_list = draw_random_models(num_of_models, keys, means, cov_matrix)
|
||||
|
||||
behaviour = model_behaviour_distributions(par_list, eodf=goal_eodf)
|
||||
|
||||
save_behaviour(behaviour, par_list)
|
||||
else:
|
||||
behaviour, par_list = load_behavior()
|
||||
create_behaviour_distributions(behaviour, fit_infos)
|
||||
compare_distribution_random_vs_fitted_params(par_list, param_values)
|
||||
|
||||
|
||||
def compare_distribution_random_vs_fitted_params(par_list, scaled_param_values):
|
||||
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||
x_labels = ["[cm]", "[mV]", "[ms]", r"[mV$\sqrt{s}$]", "[ms]", "[mVms]", "[ms]", "[ms]"]
|
||||
model_parameter_values = {}
|
||||
for l in labels:
|
||||
model_parameter_values[l] = []
|
||||
|
||||
for params in par_list:
|
||||
for l in labels:
|
||||
model_parameter_values[l].append(params[l])
|
||||
|
||||
fig, axes = plt.subplots(4, 2, gridspec_kw={"left": 0.1, "hspace":0.5}, figsize=consts.FIG_SIZE_MEDIUM_HIGH)
|
||||
axes_flat = axes.flatten()
|
||||
for i, l in enumerate(labels):
|
||||
rand_model_values = model_parameter_values[l]
|
||||
fitted_model_values = scaled_param_values[l]
|
||||
|
||||
if "ms" in x_labels[i]:
|
||||
rand_model_values = np.array(rand_model_values) * 1000
|
||||
fitted_model_values = np.array(fitted_model_values) * 1000
|
||||
|
||||
min_v = min(min(rand_model_values), min(fitted_model_values)) * 0.95
|
||||
max_v = max(max(rand_model_values), max(fitted_model_values)) * 1.05
|
||||
# limit = 50000
|
||||
# if max_v > limit:
|
||||
# print("For {} the max value was limited to {}, {} values were excluded!".format(l, limit, np.sum(np.array(cell_b_values[l]) > limit)))
|
||||
# max_v = limit
|
||||
step = (max_v - min_v) / 30
|
||||
bins = np.arange(min_v, max_v + step, step)
|
||||
axes_flat[i].hist(fitted_model_values, bins=bins, alpha=0.5, density=True)
|
||||
axes_flat[i].hist(rand_model_values, bins=bins, alpha=0.5, density=True)
|
||||
axes_flat[i].set_xlabel(parameter_titles[l] + " " + x_labels[i])
|
||||
axes_flat[i].set_yticks([])
|
||||
axes_flat[i].set_yticklabels([])
|
||||
|
||||
fig.text(0.01, 0.5, 'Density', ha='center', va='center', rotation='vertical') # shared y label
|
||||
plt.tight_layout()
|
||||
|
||||
param_values = get_parameter_distributions(folder)
|
||||
plot_distributions(param_values)
|
||||
get_gauss_fits(param_values)
|
||||
pass
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=0)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "compare_parameter_dist_random_models.pdf")
|
||||
plt.close()
|
||||
|
||||
def get_gauss_fits(param_values):
|
||||
gauss_fits = {}
|
||||
num_of_bins = 30
|
||||
|
||||
# keys = ['delta_a', 'dend_tau', 'input_scaling', 'mem_tau', 'noise_strength', 'refractory_period', 'tau_a', 'v_offset']
|
||||
def parameter_correlation_plot(par_list, fits_info):
|
||||
|
||||
key = 'v_offset'
|
||||
fig = plt.figure(tight_layout=True, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||
gs = gridspec.GridSpec(2, 2, width_ratios=(1, 1), height_ratios=(5, 1), hspace=0.025, wspace=0.05)
|
||||
# fig, axes = plt.subplots(1, 2, figsize=consts.FIG_SIZE_MEDIUM_WIDE)
|
||||
|
||||
values = param_values[key]
|
||||
rand_labels, rand_corr_values, rand_corrected_p_values = parameter_correlations_from_par_list(par_list)
|
||||
par_labels = [parameter_titles[l] for l in rand_labels]
|
||||
img = create_correlation_plot(fig.add_subplot(gs[0, 1]), par_labels, rand_corr_values, rand_corrected_p_values * 10e50, "Drawn Models")
|
||||
|
||||
bins = calculate_bins(values, num_of_bins)
|
||||
normal, n_bins, patches = plt.hist(values, bins=bins, density=True)
|
||||
labels, corr_values, corrected_p_values = parameter_correlations(fits_info)
|
||||
par_labels = [parameter_titles[l] for l in labels]
|
||||
img = create_correlation_plot(fig.add_subplot(gs[0, 0]), par_labels, corr_values, corrected_p_values, "Fitted Models",
|
||||
y_label=False)
|
||||
|
||||
ax_col = fig.add_subplot(gs[1, :])
|
||||
data = [np.arange(-1, 1.001, 0.01)] * 10
|
||||
ax_col.set_xticks([0, 25, 50, 75, 100, 125, 150, 175, 200])
|
||||
ax_col.set_xticklabels([-1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1])
|
||||
ax_col.set_yticks([])
|
||||
ax_col.imshow(data)
|
||||
ax_col.set_xlabel("Correlation Coefficients")
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=1.5)
|
||||
fig.label_axes()
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "rand_parameter_correlations_comparison.pdf")
|
||||
plt.close()
|
||||
|
||||
# fit gauss:
|
||||
bin_width = np.mean(np.diff(n_bins))
|
||||
middle_of_bins = n_bins + bin_width / 2
|
||||
|
||||
# try:
|
||||
# n_gauss_pars = fit_gauss(middle_of_bins[:-1], normal)
|
||||
# x = np.arange(min(param_values[key]), max(param_values[key]),
|
||||
# (max(param_values[key]) - min(param_values[key])) / 100)
|
||||
# axes[i, 0].plot(x, fu.gauss(x, n_gauss_pars[0], n_gauss_pars[1], n_gauss_pars[2]))
|
||||
# pass
|
||||
def parameter_correlations_from_par_list(par_list):
|
||||
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||
|
||||
parameter_values = {}
|
||||
for l in labels:
|
||||
parameter_values[l] = []
|
||||
|
||||
for params in par_list:
|
||||
for l in labels:
|
||||
parameter_values[l].append(params[l])
|
||||
|
||||
corr_values = np.zeros((len(labels), len(labels)))
|
||||
p_values = np.ones((len(labels), len(labels)))
|
||||
|
||||
for i in range(len(labels)):
|
||||
for j in range(len(labels)):
|
||||
c, p = pearsonr(parameter_values[labels[i]], parameter_values[labels[j]])
|
||||
corr_values[i, j] = c
|
||||
p_values[i, j] = p
|
||||
|
||||
corrected_p_values = p_values * sum(range(len(labels)))
|
||||
|
||||
return labels, corr_values, corrected_p_values
|
||||
|
||||
|
||||
def model_behaviour_distributions(par_list, eodf=800):
|
||||
behaviour = {}
|
||||
|
||||
for key in behaviour_titles.keys():
|
||||
behaviour[key] = []
|
||||
|
||||
for i, parset in enumerate(par_list):
|
||||
|
||||
model = LifacNoiseModel(parset)
|
||||
baseline = BaselineModel(model, eodf)
|
||||
|
||||
behaviour["baseline_frequency"].append(baseline.get_baseline_frequency())
|
||||
behaviour["Burstiness"].append(baseline.get_burstiness())
|
||||
behaviour["coefficient_of_variation"].append(baseline.get_coefficient_of_variation())
|
||||
behaviour["serial_correlation"].append(baseline.get_serial_correlation(1)[0])
|
||||
behaviour["vector_strength"].append(baseline.get_vector_strength())
|
||||
|
||||
fi_curve = FICurveModel(model, np.arange(-0.3, 0.301, 0.1), eodf)
|
||||
behaviour["f_inf_slope"].append(fi_curve.f_inf_fit[0])
|
||||
behaviour["f_zero_slope"].append(fi_curve.get_f_zero_fit_slope_at_straight())
|
||||
|
||||
print("{:} of {:}".format(i + 1, len(par_list)))
|
||||
|
||||
return behaviour
|
||||
|
||||
|
||||
def save_behaviour(behaviour, par_list):
|
||||
# save behaviour:
|
||||
keys = np.array(sorted(behaviour.keys()))
|
||||
data_points = len(behaviour[keys[0]])
|
||||
data = np.zeros((len(keys), data_points))
|
||||
for i, k in enumerate(keys):
|
||||
k_data = np.array(behaviour[k])
|
||||
data[i, :] = k_data
|
||||
|
||||
np.save("data/random_model_behaviour_data.npy", data)
|
||||
np.save("data/random_model_behaviour_keys.npy", keys)
|
||||
|
||||
# save parameter list:
|
||||
|
||||
par_keys = np.array(sorted(par_list[0].keys()))
|
||||
num_models = len(par_list)
|
||||
|
||||
pars_data = np.zeros((num_models, len(par_keys)))
|
||||
|
||||
for i, params in enumerate(par_list):
|
||||
params_array = np.array([params[k] for k in par_keys])
|
||||
pars_data[i, :] = params_array
|
||||
|
||||
np.save("data/random_model_parameter_data.npy", pars_data)
|
||||
np.save("data/random_model_parameter_keys.npy", par_keys)
|
||||
|
||||
|
||||
def load_behavior():
|
||||
data = np.load("data/random_model_behaviour_data.npy")
|
||||
keys = np.load("data/random_model_behaviour_keys.npy")
|
||||
behaviour = {}
|
||||
for i, k in enumerate(keys):
|
||||
behaviour[k] = data[i, :]
|
||||
|
||||
pars_data = np.load("data/random_model_parameter_data.npy")
|
||||
par_keys = np.load("data/random_model_parameter_keys.npy")
|
||||
par_list = []
|
||||
|
||||
for i in range(len(pars_data[:, 0])):
|
||||
param_dict = {}
|
||||
for j, k in enumerate(par_keys):
|
||||
param_dict[k] = pars_data[i, j]
|
||||
par_list.append(param_dict)
|
||||
|
||||
return behaviour, par_list
|
||||
|
||||
|
||||
def create_behaviour_distributions(drawn_model_behaviour, fits_info):
|
||||
fig, axes = plt.subplots(4, 2, gridspec_kw={"left": 0.1, "hspace":0.5}, figsize=consts.FIG_SIZE_MEDIUM_HIGH)
|
||||
cell_behaviour, fitted_model_behaviour = get_behaviour_values(fits_info)
|
||||
labels = ['Burstiness', 'baseline_frequency', 'coefficient_of_variation', 'f_inf_slope', 'f_zero_slope', 'serial_correlation', 'vector_strength']
|
||||
unit = ["[%ms]", "[Hz]", "", "[Hz]", "[Hz]", "", ""]
|
||||
|
||||
axes_flat = axes.flatten()
|
||||
for i, l in enumerate(labels):
|
||||
min_v = min(drawn_model_behaviour[l]) * 0.95
|
||||
max_v = max(drawn_model_behaviour[l]) * 1.05
|
||||
# limit = 50000
|
||||
# if max_v > limit:
|
||||
# print("For {} the max value was limited to {}, {} values were excluded!".format(l, limit, np.sum(np.array(cell_b_values[l]) > limit)))
|
||||
# max_v = limit
|
||||
step = (max_v - min_v) / 15
|
||||
bins = np.arange(min_v, max_v + step, step)
|
||||
axes_flat[i].hist(drawn_model_behaviour[l], bins=bins, alpha=0.75, density=False, color=consts.COLOR_MODEL)
|
||||
axes_flat[i].hist(cell_behaviour[l], bins=bins, alpha=0.5, density=False, color=consts.COLOR_DATA)
|
||||
axes_flat[i].set_xlabel(behaviour_titles[l] + " " + unit[i])
|
||||
axes_flat[i].set_yticks([])
|
||||
axes_flat[i].set_yticklabels([])
|
||||
axes_flat[-1].set_visible(False)
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=0)
|
||||
fig.label_axes()
|
||||
fig.text(0.02, 0.5, 'Density', ha='center', va='center', rotation='vertical') # shared y label
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "random_models_behaviour_dist.pdf")
|
||||
plt.close()
|
||||
|
||||
|
||||
def test_plot_models(par_list, eodf):
|
||||
|
||||
for pars in par_list:
|
||||
baseline = BaselineModel(LifacNoiseModel(pars), eodf)
|
||||
baseline.plot_interspike_interval_histogram()
|
||||
|
||||
fi_curve = FICurveModel(LifacNoiseModel(pars), np.arange(-0.3, 0.31, 0.1), eodf)
|
||||
fi_curve.plot_fi_curve()
|
||||
|
||||
|
||||
def calculate_means_and_covariances(param_values):
|
||||
transformed_values = {}
|
||||
keys = sorted(param_values.keys())
|
||||
for key in keys:
|
||||
if LOG_TRANSFORM[key]:
|
||||
transformed_values[key] = np.log(np.array(param_values[key]))
|
||||
else:
|
||||
transformed_values[key] = np.array(param_values[key])
|
||||
transformed_fits = get_gauss_fits()
|
||||
means = np.array([transformed_fits[k][1] for k in keys])
|
||||
|
||||
cov_matrix = np.zeros((len(keys), len(keys)))
|
||||
|
||||
for i, k1 in enumerate(keys):
|
||||
for j, k2 in enumerate(keys):
|
||||
cor, p = pearsonr(transformed_values[k1], transformed_values[k2])
|
||||
cov_matrix[i, j] = cor * transformed_fits[k1][2] * transformed_fits[k2][2]
|
||||
|
||||
return keys, means, cov_matrix
|
||||
|
||||
|
||||
def draw_random_models(num_of_models, keys, means, cov_matrix, seed=None):
|
||||
if seed is not None:
|
||||
transformed_model_params = multivariate_normal.rvs(means, cov_matrix, num_of_models, seed)
|
||||
else:
|
||||
transformed_model_params = multivariate_normal.rvs(means, cov_matrix, num_of_models)
|
||||
|
||||
drawn_parameters = []
|
||||
|
||||
for par_set in transformed_model_params:
|
||||
retransformed_parameters = {}
|
||||
|
||||
for i, k in enumerate(keys):
|
||||
if LOG_TRANSFORM[k]:
|
||||
retransformed_parameters[k] = np.exp(par_set[i])
|
||||
else:
|
||||
retransformed_parameters[k] = par_set[i]
|
||||
|
||||
drawn_parameters.append(retransformed_parameters)
|
||||
|
||||
return drawn_parameters
|
||||
|
||||
|
||||
def get_gauss_fits():
|
||||
# TODO NOT NORMED TO INTEGRAL OF 1 !!!!!!!
|
||||
transformed_gauss_fits = {}
|
||||
# fit parameter: amplitude, mean, sigma
|
||||
transformed_gauss_fits["delta_a"] = [0.52555418, -2.17583514, 0.658713652] # tweak
|
||||
transformed_gauss_fits["dend_tau"] = [0.90518987, -5.509343763, 0.3593178] # good
|
||||
transformed_gauss_fits["mem_tau"] = [0.85176348, -6.2468377, 0.42126255] # good
|
||||
transformed_gauss_fits["input_scaling"] = [0.57239028, 5., 0.6] # [0.37239028, 5.92264105, 1.77342945] # tweak
|
||||
transformed_gauss_fits["noise_strength"] = [0.62216977, -3.49622807, 0.58081673]# good
|
||||
transformed_gauss_fits["tau_a"] = [0.82351638, -2.39879173, 0.45725644] # good
|
||||
transformed_gauss_fits["v_offset"] = [1.33749859e-02, -1.91220096e+01, 1.71068108e+01] # good
|
||||
transformed_gauss_fits["refractory_period"] = [1.370406256, 9.14715386e-04, 2.33470418e-04]
|
||||
|
||||
return transformed_gauss_fits
|
||||
|
||||
|
||||
def plot_distributions_with_set_fits(param_values):
|
||||
|
||||
fig, axes = plt.subplots(4, 2, gridspec_kw={"left": 0.1, "hspace":0.5}, figsize=consts.FIG_SIZE_MEDIUM_HIGH)
|
||||
|
||||
gauss_fits = get_gauss_fits()
|
||||
bin_number = 30
|
||||
|
||||
labels = ["input_scaling", "v_offset", "mem_tau", "noise_strength",
|
||||
"tau_a", "delta_a", "dend_tau", "refractory_period"]
|
||||
x_labels = ["[ln(cm)]", "[mV]", "[ln(s)]", r"[ln(mV$\sqrt{s}$)]", "[ln(s)]", "[ln(mVs)]", "[ln(s)]", "[ms]"]
|
||||
for i, key in enumerate(labels):
|
||||
k = i % 2
|
||||
m = int(i/2)
|
||||
|
||||
values = param_values[key]
|
||||
if LOG_TRANSFORM[key]:
|
||||
values = np.log(np.array(param_values[key]))
|
||||
|
||||
x = np.arange(min(values), max(values), (max(values) - min(values)) / 100)
|
||||
plot_x = np.arange(min(values), max(values), (max(values) - min(values)) / 100)
|
||||
if "ms" in x_labels[i]:
|
||||
values = np.array(values) * 1000
|
||||
plot_x *= 1000
|
||||
|
||||
gauss_param = gauss_fits[key]
|
||||
|
||||
bins = calculate_bins(values, bin_number)
|
||||
|
||||
axes[m, k].hist(values, bins=bins, density=True, alpha=0.75, color=consts.COLOR_MODEL)
|
||||
axes[m, k].plot(plot_x, fu.gauss(x, gauss_param[0], gauss_param[1], gauss_param[2]), color="black")
|
||||
axes[m, k].set_xlabel(parameter_titles[key] + " " + x_labels[i])
|
||||
axes[m, k].set_yticklabels([])
|
||||
axes[m, k].set_yticks([])
|
||||
|
||||
plt.tight_layout()
|
||||
|
||||
consts.set_figure_labels(xoffset=-2.5, yoffset=0)
|
||||
fig.label_axes()
|
||||
fig.text(0.02, 0.5, 'Density', ha='center', va='center', rotation='vertical') # shared y label
|
||||
|
||||
plt.savefig(consts.SAVE_FOLDER + "parameter_distribution_with_gauss_fits.pdf")
|
||||
plt.close()
|
||||
|
||||
|
||||
def calculate_bins(values, num_of_bins):
|
||||
@ -49,34 +385,30 @@ def calculate_bins(values, num_of_bins):
|
||||
step = (maximum - minimum) / (num_of_bins-1)
|
||||
|
||||
bins = np.arange(minimum-0.5*step, maximum + step, step)
|
||||
print(minimum, maximum)
|
||||
print(bins)
|
||||
return bins
|
||||
|
||||
|
||||
def plot_distributions(param_values):
|
||||
bin_number = 30
|
||||
fig, axes = plt.subplots(len(param_values.keys()), 2)
|
||||
print(sorted(param_values.keys()))
|
||||
for i, key in enumerate(sorted(param_values.keys())):
|
||||
|
||||
# normal hist:
|
||||
values = param_values[key]
|
||||
bins = calculate_bins(values, bin_number)
|
||||
print("bins:", len(bins))
|
||||
normal, n_bins, patches = axes[i, 0].hist(values, bins=calculate_bins(values, bin_number), density=True)
|
||||
axes[i, 0].set_title(key)
|
||||
|
||||
# fit gauss:
|
||||
bin_width = np.mean(np.diff(n_bins))
|
||||
middle_of_bins = n_bins + bin_width / 2
|
||||
print("mid_bins:", len(middle_of_bins))
|
||||
axes[i, 0].plot(middle_of_bins[:-1], normal, 'o')
|
||||
try:
|
||||
n_gauss_pars = fit_gauss(middle_of_bins[:-1], normal)
|
||||
x = np.arange(min(param_values[key]), max(param_values[key]),
|
||||
(max(param_values[key]) - min(param_values[key])) / 100)
|
||||
axes[i, 0].plot(x, fu.gauss(x, n_gauss_pars[0], n_gauss_pars[1], n_gauss_pars[2]))
|
||||
print(key, ": normal:", n_gauss_pars)
|
||||
except RuntimeError as e:
|
||||
pass
|
||||
|
||||
@ -92,15 +424,10 @@ def plot_distributions(param_values):
|
||||
x = np.arange(min(log_values), max(log_values),
|
||||
(max(log_values) - min(log_values)) / 100)
|
||||
axes[i, 1].plot(x, fu.gauss(x, l_gauss_pars[0], l_gauss_pars[1], l_gauss_pars[2]))
|
||||
print(key, ": log:", l_gauss_pars)
|
||||
except RuntimeError as e:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
plt.close()
|
||||
@ -134,11 +461,5 @@ def get_parameter_distributions(folder, param_keys=None):
|
||||
return parameter_values
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -16,6 +16,9 @@ import multiprocessing as mp
|
||||
|
||||
SAVE_DIRECTORY = "./results/final_3"
|
||||
SAVE_DIRECTORY_BEST = "./results/final_3_best/"
|
||||
|
||||
# SAVE_DIRECTORY = "./results/ref_and_tau/no_dend_tau/"
|
||||
# SAVE_DIRECTORY_BEST = "./results/ref_and_tau/ndt_best/"
|
||||
# [vs, sc, cv, isi_hist, bursty, f_inf, f_inf_slope, f_zero, f_zero_slope, f0_curve]
|
||||
ERROR_WEIGHTS = (1, 1, 1, 1, 1, 1, 1, 1, 0, 1)
|
||||
|
||||
@ -109,7 +112,6 @@ def fit_cell_parallel(cell_data, start_parameters):
|
||||
best_fit.generate_master_plot(SAVE_DIRECTORY)
|
||||
|
||||
|
||||
|
||||
def iget_start_parameters():
|
||||
# mem_tau, input_scaling, noise_strength, dend_tau,
|
||||
# expand by tau_a, delta_a ?
|
||||
|
59
test.py
@ -32,25 +32,42 @@ from matplotlib import gridspec
|
||||
# g = gaussian_kernel(kernel_width, dt)
|
||||
# rate = np.convolve(binary, g, mode='same')
|
||||
|
||||
fit = ModelFit("results/final_2/2012-07-12-ag-invivo-1/start_parameter_4_err_6.11/")
|
||||
fit = get_best_fit("results/final_2/2012-12-21-am-invivo-1/")
|
||||
model = fit.get_model()
|
||||
print(model.parameters)
|
||||
eodf = fit.get_cell_data().get_eod_frequency()
|
||||
baseline = BaselineModel(model, eod_frequency=eodf)
|
||||
print("model BF: {:.2f}".format(baseline.get_baseline_frequency()))
|
||||
|
||||
model_no_dend = LifacNoiseModel({'step_size': 5e-05, 'mem_tau': 0.0018888888888888887, 'v_base': 0, 'v_zero': 0, 'threshold': 1, 'v_offset': -175.9718894958496, 'input_scaling': 364.44444444444446, 'delta_a': 0.03, 'tau_a': 0.1511111111111111, 'a_zero': 4.402004449113477, 'noise_strength': 0, 'dend_tau': 5e-05, 'refractory_period': 0.0007944444444444448}
|
||||
)
|
||||
|
||||
baseline = BaselineModel(model_no_dend, eod_frequency=eodf)
|
||||
|
||||
print("no dend BF: {:.2f}".format(baseline.get_baseline_frequency()))
|
||||
# baseline.plot_interspike_interval_histogram()
|
||||
|
||||
for i in range(10):
|
||||
v1, spikes = model.simulate(SinusoidalStepStimulus(eodf, 0, 0), 30)
|
||||
print("last spikes:", max(spikes))
|
||||
print(len(spikes) / 30)
|
||||
time, freq = hF.calculate_time_and_frequency_trace(spikes, 5e-05)
|
||||
plt.plot(time, freq)
|
||||
plt.show()
|
||||
cell_data = fit.get_cell_data()
|
||||
eodf = cell_data.get_eod_frequency()
|
||||
parameters = model.parameters
|
||||
|
||||
time_param_keys = ["refractory_period", "tau_a", "mem_tau", "dend_tau"]
|
||||
contrasts = np.arange(-0.3, 0.3, 0.05)
|
||||
baseline_normal = BaselineModel(model, eodf)
|
||||
fi_curve_normal = FICurveModel(model, contrasts, eodf)
|
||||
fi_curve_normal.plot_fi_curve()
|
||||
normal_isis = baseline_normal.get_interspike_intervals() * eodf
|
||||
normal_bins = np.arange(0, 0.05, 0.0001) * eodf
|
||||
|
||||
factor = 1.1
|
||||
scaled_eodf = eodf * factor
|
||||
scaled_model = model.get_model_copy()
|
||||
|
||||
for key in time_param_keys:
|
||||
scaled_model.parameters[key] = parameters[key] / factor
|
||||
|
||||
baseline_scaled = BaselineModel(scaled_model, scaled_eodf)
|
||||
fi_curve_scaled = FICurveModel(scaled_model, contrasts, scaled_eodf)
|
||||
fi_curve_scaled.plot_fi_curve()
|
||||
scaled_isis = np.array(baseline_scaled.get_interspike_intervals()) * scaled_eodf
|
||||
scaled_bins = np.arange(0, 0.05, 0.0001) * scaled_eodf
|
||||
|
||||
# plt.hist(normal_isis, bins=normal_bins, alpha=0.5, label="normal")
|
||||
# plt.hist(scaled_isis, bins=scaled_bins, alpha=0.5, label="scaled")
|
||||
# plt.legend()
|
||||
# plt.show()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -5,6 +5,12 @@ Babineau, D., Lewis, J.~E., and Longtin, A. (2007).
|
||||
\newblock Spatial acuity and prey detection in weakly electric fish.
|
||||
\newblock {\em PLoS Computational Biology}, 3(3):e38.
|
||||
|
||||
\bibitem[Barlow et~al., 1961]{barlow1961possible}
|
||||
Barlow, H.~B. et~al. (1961).
|
||||
\newblock Possible principles underlying the transformation of sensory
|
||||
messages.
|
||||
\newblock {\em Sensory communication}, 1:217--234.
|
||||
|
||||
\bibitem[Bastian, 1981]{bastian1981electrolocation}
|
||||
Bastian, J. (1981).
|
||||
\newblock Electrolocation.
|
||||
|
@ -3,44 +3,44 @@ Capacity: max_strings=35307, hash_size=35307, hash_prime=30011
|
||||
The top-level auxiliary file: Masterthesis.aux
|
||||
The style file: apalike.bst
|
||||
Database file #1: citations.bib
|
||||
You've used 15 entries,
|
||||
You've used 16 entries,
|
||||
1935 wiz_defined-function locations,
|
||||
580 strings with 6551 characters,
|
||||
and the built_in function-call counts, 6198 in all, are:
|
||||
= -- 623
|
||||
> -- 238
|
||||
< -- 5
|
||||
+ -- 80
|
||||
- -- 78
|
||||
* -- 574
|
||||
:= -- 1075
|
||||
add.period$ -- 45
|
||||
call.type$ -- 15
|
||||
change.case$ -- 113
|
||||
chr.to.int$ -- 15
|
||||
cite$ -- 15
|
||||
duplicate$ -- 208
|
||||
empty$ -- 441
|
||||
format.name$ -- 99
|
||||
if$ -- 1187
|
||||
586 strings with 6698 characters,
|
||||
and the built_in function-call counts, 6592 in all, are:
|
||||
= -- 665
|
||||
> -- 251
|
||||
< -- 6
|
||||
+ -- 84
|
||||
- -- 82
|
||||
* -- 606
|
||||
:= -- 1143
|
||||
add.period$ -- 48
|
||||
call.type$ -- 16
|
||||
change.case$ -- 119
|
||||
chr.to.int$ -- 16
|
||||
cite$ -- 16
|
||||
duplicate$ -- 222
|
||||
empty$ -- 470
|
||||
format.name$ -- 105
|
||||
if$ -- 1265
|
||||
int.to.chr$ -- 1
|
||||
int.to.str$ -- 0
|
||||
missing$ -- 14
|
||||
newline$ -- 78
|
||||
num.names$ -- 45
|
||||
pop$ -- 80
|
||||
missing$ -- 15
|
||||
newline$ -- 83
|
||||
num.names$ -- 48
|
||||
pop$ -- 83
|
||||
preamble$ -- 1
|
||||
purify$ -- 114
|
||||
purify$ -- 120
|
||||
quote$ -- 0
|
||||
skip$ -- 155
|
||||
skip$ -- 167
|
||||
stack$ -- 0
|
||||
substring$ -- 540
|
||||
swap$ -- 15
|
||||
substring$ -- 577
|
||||
swap$ -- 16
|
||||
text.length$ -- 0
|
||||
text.prefix$ -- 0
|
||||
top$ -- 0
|
||||
type$ -- 90
|
||||
type$ -- 96
|
||||
warning$ -- 0
|
||||
while$ -- 57
|
||||
while$ -- 61
|
||||
width$ -- 0
|
||||
write$ -- 197
|
||||
write$ -- 210
|
||||
|
@ -3,7 +3,9 @@
|
||||
\usepackage{graphicx}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{natbib}
|
||||
\usepackage[breaklinks=true,bookmarks=true,bookmarksopen=true,pdfpagemode=UseNone,pdfstartview=FitH,colorlinks=false,citecolor=blue]{hyperref}
|
||||
\usepackage{xcolor}
|
||||
\usepackage[breaklinks=true,colorlinks=true,citecolor=blue!30!black,urlcolor=blue!30!black,linkcolor=blue!30!black]{hyperref}
|
||||
|
||||
|
||||
\usepackage[utf8x]{inputenc}
|
||||
\usepackage[english]{babel}
|
||||
@ -97,6 +99,7 @@ Außerdem erkläre ich, dass die eingereichte Arbeit weder vollständig noch in
|
||||
\item make plot labels consistent (Units: in mV vs [mV])
|
||||
|
||||
\item update number of cells / fish etc
|
||||
|
||||
\end{itemize}
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
@ -142,9 +145,9 @@ Außerdem erkläre ich, dass die eingereichte Arbeit weder vollständig noch in
|
||||
\end{enumerate}
|
||||
\newpage
|
||||
|
||||
The environment of an organism holds important information that it needs to survive. Information about predators to avoid, food to find and potential mates. That means that the ability to sense and process this information is of vital importance for any organism. At the same time the environment also contains a lot of information that is irrelevant to an organism. \todo{ref} suggested already that the sensory systems of an organism should be specialized to extract the information it needs while filtering out the noise and irrelevant information, to efficiently use the limited coding capacity of the sensory systems.
|
||||
The environment of an organism holds important information that it needs to survive. Information about predators to avoid, food to find and potential mates. That means that the ability to sense and process this information is of vital importance for any organism. At the same time the environment also contains a lot of information that is irrelevant to an organism. \cite{barlow1961possible} suggested already that the sensory systems of an organism should be specialized to extract the information it needs while filtering out the noise and irrelevant information, to efficiently use the limited coding capacity of the sensory systems.
|
||||
|
||||
One interesting model system for questions of perception and adaptive signal processing is the electric fish \AptLepto (Brown ghost knife fish).
|
||||
One interesting model system for questions adaptive signal processing is the electric fish \AptLepto (Brown ghost knife fish).
|
||||
\lepto generate a sinusoidal electric field with the electric organ in their tail enabling them to use active electroreception which they use to find prey and communicate with each other (\cite{maciver2001prey}, \cite{zupanc2006electric}). The different use cases of this electric organ discharge (EOD) come with the necessity to detect a wide range of different amplitude modulations (AMs). Electrolocation of object in the surrounding water like small prey or rocks cause small low frequency AMs \citep{babineau2007spatial}. At the same time other electric fish can cause stronger and higher frequency AMs through interference between the electric fields and their communication signals like chirps, short increases in their EOD frequency \citep{zupanc2006electric}. This means that the electroreceptors need to be able to encode a wide range of changes in EOD amplitude, in speed as well as strength.
|
||||
The EOD and its AMs are encoded by electroreceptor organs in the skin. \lepto have two kinds of tuberous electrosensory organs: the T and P type units \citep{scheich1973coding}. The T units (time coder) are strongly phase locked to the EOD and fire regularly once every EOD period. They encode the phase of the EOD in their spike timing. The P units (probability coders) on the other hand do not fire every EOD period. Instead they fire irregularly with a certain probability that depends on the EOD amplitude. That way they encode information about the EOD amplitude in their firing probability \citep{scheich1973coding}. An example of the firing behavior of a P unit is shown in figure~\ref{fig:p_unit_example}.
|
||||
When the fish's EOD is unperturbed P units fire every few EOD periods but they have a certain variability in their firing (fig. \ref{fig:p_unit_example} B) and show negative correlation between successive interspike intervals (ISIs)(fig. \ref{fig:p_unit_example} C). When presented with a step increase in EOD amplitude P units show strong adaption behavior. After a strong increase in firing rate reacting to the onset of the step, the firing rate quickly decays back to a steady state (fig. \ref{fig:p_unit_example} D). When using different sizes of steps both the onset and the steady state response scale with its size and direction of the step (fig. \ref{fig:p_unit_example} E).
|
||||
@ -153,8 +156,8 @@ When the fish's EOD is unperturbed P units fire every few EOD periods but they h
|
||||
%
|
||||
|
||||
\begin{figure}[H]
|
||||
{\caption{\label{fig:p_unit_example} Example behavior of a p-unit with a high baseline firing rate and an EODf of 744\,Hz. \textbf{A}: A 100\,ms voltage trace of the baseline recording with spikes marked by the black lines. \textbf{B}: The histogram of the ISI with the x-axis in EOD periods, showing the phase locking of the firing. \textbf{C}: The serial correlation of the ISI showing a negative correlation for lags one and two. \textbf{D}: The response of the p-unit to a step increase in EOD amplitude. In \todo{color} the averaged frequency over 10 trials. The p-unit strongly reacts to the onset of the stimulus but very quickly adapts to the new stimulus and then shows a steady state response. \textbf{E}: The fi-curve visualizes the onset and steady-state response of the neuron for different step sizes (contrasts). In \todo{color} the detected onset responses and the fitted Boltzmann, in \todo{color} the detected steady-state response and the linear fit.}}
|
||||
{\includegraphics[width=1\textwidth]{figures/p_unit_example.png}}
|
||||
{\caption{\label{fig:p_unit_example} Example behavior of a p-unit with a high baseline firing rate and an EODf of 744\,Hz. \textbf{A}: A 100\,ms voltage trace of the baseline recording with spikes marked by the black lines. \textbf{B}: The histogram of the ISI with the x-axis in EOD periods, showing the phase locking of the firing. \textbf{C}: The serial correlation of the ISI showing a negative correlation for lags one and two. \textbf{D}: The response of the p-unit to a step increase in EOD amplitude. In \todo{color} the averaged frequency over 10 trials. The P-unit strongly reacts to the onset of the stimulus but very quickly adapts to the new stimulus and then shows a steady state response. \textbf{E}: The f-I curve visualizes the onset and steady-state response of the neuron for different step sizes (contrasts). In \todo{color} the detected onset responses and the fitted Boltzmann, in \todo{color} the detected steady-state response and the linear fit.}}
|
||||
{\includegraphics[width=1\textwidth]{figures/p_unit_example.pdf}}
|
||||
\end{figure}
|
||||
\newpage
|
||||
|
||||
@ -162,10 +165,10 @@ When the fish's EOD is unperturbed P units fire every few EOD periods but they h
|
||||
|
||||
\begin{figure}[H]
|
||||
{\caption{\label{fig:heterogeneity_isi_hist} Variability in spiking behavior between P units under baseline conditions. \textbf{A--C} 100\,ms of cell membrane voltage and \textbf{D--F} interspike interval histograms, each for three different cells. \textbf{A} and \textbf{D}: A non bursting cell with a baseline firing rate of 133\,Hz (EODf: 806\,Hz), \textbf{B} and \textbf{E}: A cell with some bursts and a baseline firing rate of 235\,Hz (EODf: 682\,Hz) and \textbf{C} and \textbf{F}: A strongly bursting cell with longer breaks between bursts. Baseline rate of 153\,Hz and EODf of 670\,Hz }}
|
||||
{\includegraphics[width=\textwidth]{figures/isi_hist_heterogeneity.png}}
|
||||
{\includegraphics[width=\textwidth]{figures/isi_hist_heterogeneity.pdf}}
|
||||
\end{figure}
|
||||
|
||||
\todo{heterogeneity more}
|
||||
\todo{heterogeneity more, bursts important for coding in other systems}
|
||||
|
||||
Furthermore show P units a pronounced heterogeneity in their spiking behavior (fig.~\ref{fig:heterogeneity_isi_hist}, \cite{gussin2007limits}). This is an important aspect one needs to consider when trying to understand what and how information is encoded in the spike trains of the neuron. A single neuron might be an independent unit from all other neurons but through different tuning curves a full picture of the stimulus can be encoded in the population even when a single neuron only encodes a small feature space. This type of encoding is ubiquitous in the nervous system and is used in the visual sense for color vision, PLUS MORE... \todo{refs}. Even though P units were already modelled based on a simple leaky integrate-and-fire neuron \citep{chacron2001simple} and conductance based \citep{kashimori1996model} and well studied (\cite{bastian1981electrolocation}, \cite{ratnam2000nonrenewal} \cite{benda2005spike}). There is up to this point no model that tries to cover the full breadth of heterogeneity of the P unit population. Having such a model could help shed light into the population code used in the electric sense, allow researchers gain a better picture how higher brain areas might process the information and get one step closer to the full path between sensory input and behavioral output.
|
||||
|
||||
@ -278,7 +281,7 @@ Finally the ISI-histogram was calculated within a range of 0--50\,ms and a bin s
|
||||
% trim={<left> <lower> <right> <upper>}
|
||||
%\parbox[c][0mm][t]{80mm}{\hspace{-10.5mm}\large\sffamily A\hspace{50.5mm} \large\sffamily B}
|
||||
%\raisebox{70mm}[10]{\large\sffamily A)}
|
||||
\includegraphics[trim={10mm 5mm 10mm 5mm}, scale=0.8]{figures/f_point_detection.png}
|
||||
\includegraphics[trim={10mm 5mm 10mm 5mm}, scale=0.8]{figures/f_point_detection.pdf}
|
||||
|
||||
\caption{\label{fig:f_point_detection} \textbf{A}: The averaged response of a cell to a step in EOD amplitude. The step of the stimulus is marked by the back bar. The detected values for the onset ($f_0$) and steady-state ($f_{\infty}$) response are marked in \todo{color}. $f_0$ is detected as the highest deviation from the mean frequency before the stimulus while $f_{\infty}$ is the average frequency in the 0.1\,s time window, 25\,ms before the end of the stimulus. \textbf{B}: The fi-curve visualizes the onset and steady-state response of the neuron for different stimuli contrasts. In \todo{color} the detected onset responses and the fitted Boltzmann, in \todo{color} the detected steady-state response and the linear fit.}
|
||||
\end{figure}
|
||||
@ -361,7 +364,7 @@ Together this results in the dynamics seen in equations \ref{eq:full_model_dynam
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/stimulus_development.pdf}
|
||||
\caption{\label{fig:stim_development} The stimulus modification in the model. The fish's EOD is simulated with a sin wave. It is rectified at the synapse and then further low-pass filtered in the dendrite.}
|
||||
\caption{\label{fig:stim_development} The stimulus modification in the model. The fish's EOD is simulated with a sin wave. It is rectified at the synapse and then low-pass filtered in the dendrite.}
|
||||
\end{figure}
|
||||
|
||||
|
||||
@ -398,23 +401,23 @@ The error function was constructed from both the baseline characteristics: VS, C
|
||||
The error of the VS, CV, SC, and burstiness was calculated as the scaled absolute difference:
|
||||
|
||||
\begin{equation}
|
||||
err_i = |x^M_i - x^C_i| * c_i
|
||||
err_i = c_i |x^M_i - x^C_i|
|
||||
\end{equation}
|
||||
with $x^M_i$ the model value for the characteristic $i$, $x^C_i$ the corresponding cell value and $c_i$ a scaling factor that is the same for all cells but different between characteristics. The scaling factor was used to make all errors a similar size. They are listed in table \ref{tab:scaling_factors}.
|
||||
|
||||
The error for the slope of the $f_{inf}$ fit was the scaled relative difference:
|
||||
|
||||
\begin{equation}
|
||||
err_i = |1 - ((x^M_i - x^C_i) / x^C_i)| * c_i
|
||||
err_i = c_i|1 - ((x^M_i - x^C_i) / x^C_i)|
|
||||
\end{equation}
|
||||
|
||||
For the $f_{inf}$ and $f_0$ responses the average scaled difference off all contrasts was taken and finally the error for the ISI-histogram and the step-response was calculated with a mean-square error. For the histogram over all bins but for the step response only the first 50\,ms after stimulus onset as an error for the adaption time constant.
|
||||
|
||||
\begin{equation}
|
||||
err_i = (\langle (x^M_i - x^C_i)²\rangle) * c_i
|
||||
err_i = c_i (\langle (x^M_i - x^C_i)²\rangle)
|
||||
\end{equation}
|
||||
|
||||
All errors were then summed up for the full error. The fits were done with the Nelder-Mead algorithm of scipy minimize \citep{gao2012implementing}. All model variables listed above in table \ref{tab:parameter_explanation} were fit at the same time except for $I_{Bias}$. $I_{Bias}$ was determined before each fitting iteration and set to a value giving the correct baseline frequency.
|
||||
All errors were then summed up for the full error. The fits were done with the Nelder-Mead algorithm of scipy minimize \citep{gao2012implementing}. All model variables listed above in table \ref{tab:parameter_explanation} were fit at the same time except for $I_{Bias}$. $I_{Bias}$ was determined before each fitting iteration and set to a value giving the correct baseline frequency within 2\,Hz.
|
||||
|
||||
|
||||
\begin{table}[H]
|
||||
@ -433,55 +436,127 @@ All errors were then summed up for the full error. The fits were done with the N
|
||||
\end{tabular}
|
||||
\caption{\label{tab:scaling_factors} Scaling factors for fitting errors.}
|
||||
\end{table}
|
||||
% errors
|
||||
%[error_vs, error_sc, error_cv, error_hist, error_bursty, error_f_inf, error_f_inf_slope, error_f_zero, error_f_zero_slope_at_straight, error_f0_curve]
|
||||
|
||||
% differences for baseline characteristics
|
||||
% rms of ISI bins, f_0 response frequency trace
|
||||
% abs diff of vs, sc, cv, burstiness
|
||||
|
||||
% fi-curve difference between f_inf slope , mean relative error between f_points
|
||||
|
||||
% baseline frequency "set" with I_Bias
|
||||
\todo{explain removal of bad models! Filter criteria how many filtered etc.}
|
||||
|
||||
\todo{explain how to draw random models from fitted model parameters! here probably?}
|
||||
|
||||
\section{Results}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.5]{figures/fit_baseline_comparison.png}
|
||||
\caption{\label{fig:comp_baseline} }
|
||||
\centering
|
||||
\includegraphics[width=\textwidth]{figures/dend_ref_effect.pdf}
|
||||
\caption{\label{fig:dend_ref_effect} Effect of the addition of $\tau_{dend}$ and $t_{ref}$ to the model. Rows \textbf{1--3} different cells: not bursting, bursting or strongly bursting in \todo{color} the cell and in \todo{color} the model. The fits in each column (\textbf{A--C}) were done with different parameters to show their effect on the model. Column \textbf{A}: The cells were fit without $\tau_{dend}$. This causes the model to be unable to fit the vector strength correctly and the models are too strongly locked to the EOD phase. \textbf{B}: The models were fit without $t_{ref}$, because of that the model cannot match the burstiness of the cell. Visible in the missing high peak at the first EOD period. In column \textbf{C} the model all parameters. It can match the full spiking behavior of the cells for the different strengths of bursting.}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.5]{figures/fit_adaption_comparison.png}
|
||||
\caption{\label{fig:comp_adaption} Excluded 8 value pairs from Onset Slope as they had slopes higher than 30000}
|
||||
\includegraphics[scale=0.6]{figures/example_bad_isi_hist_fits.pdf}
|
||||
\caption{\label{fig:example_bad_isi_fits} \todo{Add pointer arrows in plot?} Problem cases in which the model ISI histogram wasn't fit correctly to the cell. \textbf{A--C} ISI histograms of different cells (\todo{color}) and their corresponding model (\todo{color}). \textbf{A}: Strongly bursting cell with large pauses between bursts, where the Model doesn't manage to reproduce the long pauses. \textbf{B}: Bursting cell with a high probability of firing in the first and second following EOD period. Here the model can't reproduce the high probability on the second following EOD period. \textbf{C}: Cell with a higher order structure \todo{??} in its ISI histogram. It only has a high firing probability every second EOD period which is also not represented in the model.}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.5]{figures/fit_burstiness_comparison.png}
|
||||
\caption{\label{fig:comp_burstiness} }
|
||||
\includegraphics[scale=0.6]{figures/example_good_fi_fits.pdf}
|
||||
\caption{\label{fig:example_good_fi_fits} Good fit examples of the f-I curve. \textbf{A--C}: Three cells with different response patterns which are all well matched by their models. \todo{Color explanation} }
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/example_bad_fi_fits.pdf}
|
||||
\caption{\label{fig:example_bad_fi_fits} Examples of bad fits of the f-I curve. \textbf{A--C}: Different cells. \todo{Color explanation}. \textbf{A}: model that did not fit the negative contrast responses of the $f_0$ response well but was successful in the positive half. It also was not successful in the $f_\infty$ response and shows a wrong slope. \textbf{B}: A fit that was successful for the lower $f_0$ response but overshoots the limit of the cell and fires too fast for high positive contrasts. It also has a slightly wrong $f_\infty$ response slope.}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/behaviour_correlations.png}
|
||||
\caption{\label{fig:behavior_correlations} Additional $f_{\infty}$ correlation hängt vermutlich mit der bursty-baseline freq correlation zusammen. Je höher die feuerrate umso höher chance zu bursten und bursts haben eine stärker negative SC.}
|
||||
\includegraphics[scale=0.5]{figures/fit_baseline_comparison.pdf}
|
||||
\caption{\label{fig:comp_baseline} Comparison of the cell behavior and the behavior of the corresponding fit: \textbf{A} baseline firing rate, \textbf{B} vector strength (VS) and \textbf{C} serial correlation (SC). The histograms compare the distributions of the cell (\todo{color}) and the model (\todo{color}). Below \todo{what is this plot called} In grey the line on which cell and model values are equal. \textbf{A}: The baseline firing rate of the cell and the model. The base rate agrees near perfectly as it is set to be equal within a margin of 2\,Hz during the fitting process. \textbf{B}: The vector strength agrees well for most cells but if the cells have a vector strength above 0.8 the probability increases for the models to show often a weaker VS than the cell. \textbf{C}: Comparison of the SC with lag 1. Here the models cluster more strongly and don't show quite the same range like the cells do. Models of cells with a strongly negative SC often have a weaker negative SC while the models in the opposite case show too strong negative correlations. In general is the fitting of the SC a lot more variable than the precise fitting of the VS.}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/parameter_distributions.png}
|
||||
\caption{\label{fig:parameter_distributions} }
|
||||
\includegraphics[scale=0.5]{figures/fit_adaption_comparison.pdf}
|
||||
\caption{\label{fig:comp_adaption} Comparison of the cell behavior and the behavior of the corresponding fit: \textbf{A} steady state $f_\infty$ and \textbf{B} onset $f_0$ response slope. In grey the line on which cell and model values are equal. Excluded \todo{how many} value pairs from $f_0$ slope as they had slopes higher than \todo{}. In \textbf{A} the $f_\infty$ slope pairs. Cell and models show good agreement with a low scattering in both direction. \textbf{B} The $f_0$ values show a higher spread and for steeper slopes the models have more often too flat slopes.}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/parameter_correlations.png}
|
||||
\caption{\label{fig:parameter_correlations} }
|
||||
\includegraphics[scale=0.5]{figures/fit_burstiness_comparison.pdf}
|
||||
\caption{\label{fig:comp_burstiness} Comparison of the cell behavior and the behavior of the corresponding fit: \textbf{A} burstiness, \textbf{B} coefficient of variation (CV). \textbf{A}: The model values for the burstiness agree well with the values of the model but again show a tendency that the higher the value of the cell the more the model value is below it. \textbf{B}: The CV also shows the problem of the burstiness but the values drift apart more slowly starting around 0.6.}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/behaviour_correlations.pdf}
|
||||
\caption{\label{fig:behavior_correlations} Significant correlations between the behavior variables in the data and the fitted models $p < 0.05$ (Bonferroni corrected). The models contain all the same correlations as the data except for the correlation between the baseline firing rate and the VS, but they also show four additional correlations not seen within the cells: bursting - base rate, SC - $f_\infty$ slope, $f_0$ slope - base rate, SC - base rate.}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/parameter_distributions.pdf}
|
||||
\caption{\label{fig:parameter_distributions} Distributions of all eight model parameters. \textbf{A}: input scaling $\alpha$, \textbf{B}: Bias current $I_{Bias}$, \textbf{C}: membrane time constant $\tau_m$, \textbf{D}: noise strength $\sqrt{2D}$, \textbf{E}: adaption time constant $\tau_A$, \textbf{F}: adaption strength $\Delta_A$, \textbf{G}: time constant of the dendritic low pass filter $\tau_{dend}$, \textbf{H}: refractory period $t_{ref}$}
|
||||
\end{figure}
|
||||
|
||||
\todo{image with rescaled time parameters to 800\,Hz, add to above figure?}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/parameter_correlations.pdf}
|
||||
\caption{\label{fig:parameter_correlations} Significant correlations between model parameters $p < 0.05$ (Bonferroni corrected).}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/parameter_distribution_with_gauss_fits.pdf}
|
||||
\caption{\label{fig:parameter_dist_with_gauss_fits} Gauss fits used as approximations for the parameter distribution. In black the Gaussian fit used. All parameters except for $t_{ref}$ and $I_{Bias}$ were log transformed to get a more Gaussian distribution. \textbf{A}: Log input scaling $\alpha$, \textbf{B}: bias current $I_{Bias}$, \textbf{C}: Log membrane time constant $\tau_m$, \textbf{D}: Log noise strength $\sqrt{2D}$, \textbf{E}: Log adaption time constant $\tau_A$, \textbf{F}: Log adaption strength $\Delta_A$, \textbf{G}: Log time constant of the dendritic low pass filter $\tau_{dend}$, \textbf{H}: refractory period $t_{ref}$}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/compare_parameter_dist_random_models.pdf}
|
||||
\caption{\label{fig:drawn_parameter_dist} Parameter distribution between randomly drawn models \todo{color}orange and the fitted ones blue\todo{color}. \textbf{A}: input scaling $\alpha$, \textbf{B}: Bias current $I_{Bias}$, \textbf{C}: membrane time constant $\tau_m$, \textbf{D}: noise strength $\sqrt{2D}$, \textbf{E}: adaption time constant $\tau_A$, \textbf{F}: adaption strength $\Delta_A$, \textbf{G}: time constant of the dendritic low pass filter $\tau_{dend}$, \textbf{H}: refractory period $t_{ref}$}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/rand_parameter_correlations_comparison.pdf}
|
||||
\caption{\label{fig:drawn_parameter_corr} Parameter correlation comparison between the fitted parameters and the ones drawn from the multivariant normal distribution. There are four correlations that do not agree between the two, but those are inconsistent in the drawn models (see discussion).}
|
||||
\end{figure}
|
||||
|
||||
\begin{figure}[H]
|
||||
\includegraphics[scale=0.6]{figures/random_models_behaviour_dist.pdf}
|
||||
\caption{\label{fig:drawn_behavior_dist} Behavior distribution of the randomly drawn models \todo{color}(orange) and the original cells \todo{color}(blue). The distribution of the seven behavior characteristics agree well for the most part, but especially the vector strength (VS) in \textbf{G} is offset to the distribution seen in the cells.}
|
||||
\end{figure}
|
||||
|
||||
|
||||
\section{Discussion}
|
||||
|
||||
|
||||
\subsection*{Fitting quality}
|
||||
|
||||
\begin{itemize}
|
||||
\item many bursty cells not well fitted
|
||||
\item bursties might need more data to be well defined (high variance in rate traces even with a mean of 7-10 trials)
|
||||
\item different fitting routine / weights might also improve consistency of the fitting
|
||||
\item Model CAN fit most types of cells but some with weird structure in ISI hist are not possible
|
||||
\end{itemize}
|
||||
|
||||
\subsection*{Behavior correlations}
|
||||
|
||||
\begin{itemize}
|
||||
\item Data correlation between base rate and VS unexpected and missing in the model.
|
||||
\item data correlation between base rate and $f_0$ expected as $f_0$ ~ $f_\infty$ and $f_\infty$ ~ base rate (appeared in the model)
|
||||
|
||||
\item Model correlation between base rate and burstiness probably error because of the problems to "fit the gap" between burst and other ISIs. (appeared in model)
|
||||
\item above probably causes base rate - SC correlation and this causes the additional SC - $f_\infty$ correlation over $f_\infty$ ~ base rate
|
||||
|
||||
\end{itemize}
|
||||
|
||||
\subsection*{random models}
|
||||
|
||||
\begin{itemize}
|
||||
\item Gauss fits are in some cases questionable at best
|
||||
\item resulting parameter distributions as such also not really that good
|
||||
|
||||
\item Parameter correlations have 4--5 correlations whose significance is strongly inconsistent/random even when using 1000 drawn models (while compensating for higher power): thus acceptable result??
|
||||
\item behavior distribution not perfect by any means but quite alright except for the VS. Which definitely needs improvement! Maybe possible with more tweaking of the gauss fits.
|
||||
|
||||
\end{itemize}
|
||||
|
||||
\newpage
|
||||
\bibliography{citations}
|
||||
|
@ -264,5 +264,15 @@
|
||||
year={1973}
|
||||
}
|
||||
|
||||
@article{barlow1961possible,
|
||||
title={Possible principles underlying the transformation of sensory messages},
|
||||
author={Barlow, Horace B and others},
|
||||
journal={Sensory communication},
|
||||
volume={1},
|
||||
pages={217--234},
|
||||
year={1961}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
BIN
thesis/figures/behaviour_correlations.pdf
Normal file
Before Width: | Height: | Size: 32 KiB |
BIN
thesis/figures/compare_parameter_dist_random_models.pdf
Normal file
BIN
thesis/figures/dend_ref_effect.pdf
Normal file
BIN
thesis/figures/example_bad_fi_fits.pdf
Normal file
BIN
thesis/figures/example_bad_isi_hist_fits.pdf
Normal file
BIN
thesis/figures/example_good_fi_fits.pdf
Normal file
BIN
thesis/figures/example_good_isi_hist_fits.pdf
Normal file
Before Width: | Height: | Size: 17 KiB |
BIN
thesis/figures/f_point_detection.pdf
Normal file
BIN
thesis/figures/fit_adaption_comparison.pdf
Normal file
Before Width: | Height: | Size: 29 KiB |
BIN
thesis/figures/fit_baseline_comparison.pdf
Normal file
Before Width: | Height: | Size: 39 KiB |
BIN
thesis/figures/fit_burstiness_comparison.pdf
Normal file
Before Width: | Height: | Size: 25 KiB |
BIN
thesis/figures/isi_hist_heterogeneity.pdf
Normal file
BIN
thesis/figures/p_unit_example.pdf
Normal file
BIN
thesis/figures/parameter_correlations.pdf
Normal file
Before Width: | Height: | Size: 15 KiB |
BIN
thesis/figures/parameter_distribution_with_gauss_fits.pdf
Normal file
BIN
thesis/figures/parameter_distributions.pdf
Normal file
Before Width: | Height: | Size: 22 KiB |
BIN
thesis/figures/png_versions/behaviour_correlations.png
Normal file
After Width: | Height: | Size: 41 KiB |
After Width: | Height: | Size: 21 KiB |
BIN
thesis/figures/png_versions/dend_ref_effect.png
Normal file
After Width: | Height: | Size: 25 KiB |
BIN
thesis/figures/png_versions/example_bad_fi_fits.png
Normal file
After Width: | Height: | Size: 37 KiB |
BIN
thesis/figures/png_versions/example_bad_isi_hist_fits.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
thesis/figures/png_versions/example_good_fi_fits.png
Normal file
After Width: | Height: | Size: 35 KiB |
BIN
thesis/figures/png_versions/example_good_isi_hist_fits.png
Normal file
After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
BIN
thesis/figures/png_versions/fit_adaption_comparison.png
Normal file
After Width: | Height: | Size: 38 KiB |
BIN
thesis/figures/png_versions/fit_baseline_comparison.png
Normal file
After Width: | Height: | Size: 54 KiB |
BIN
thesis/figures/png_versions/fit_burstiness_comparison.png
Normal file
After Width: | Height: | Size: 35 KiB |
Before Width: | Height: | Size: 68 KiB After Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 50 KiB After Width: | Height: | Size: 50 KiB |
BIN
thesis/figures/png_versions/parameter_correlations.png
Normal file
After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 43 KiB |
BIN
thesis/figures/png_versions/parameter_distributions.png
Normal file
After Width: | Height: | Size: 25 KiB |
After Width: | Height: | Size: 42 KiB |
After Width: | Height: | Size: 30 KiB |
BIN
thesis/figures/png_versions/random_models_behaviour_dist.png
Normal file
After Width: | Height: | Size: 24 KiB |
After Width: | Height: | Size: 26 KiB |