stuff
This commit is contained in:
parent
40bb5ddd2b
commit
804f94ad0b
@ -94,6 +94,9 @@ class CellData:
|
|||||||
def get_cell_name(self):
|
def get_cell_name(self):
|
||||||
return os.path.basename(self.data_path)
|
return os.path.basename(self.data_path)
|
||||||
|
|
||||||
|
def has_sam_recordings(self):
|
||||||
|
return self.parser.has_sam_recordings()
|
||||||
|
|
||||||
def get_baseline_length(self):
|
def get_baseline_length(self):
|
||||||
return self.parser.get_baseline_length()
|
return self.parser.get_baseline_length()
|
||||||
|
|
||||||
|
@ -19,6 +19,9 @@ class AbstractParser:
|
|||||||
def get_baseline_length(self):
|
def get_baseline_length(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
|
def has_sam_recordings(self):
|
||||||
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
def get_fi_curve_contrasts(self):
|
def get_fi_curve_contrasts(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
@ -70,6 +73,9 @@ class DatParser(AbstractParser):
|
|||||||
self.fi_recording_times = []
|
self.fi_recording_times = []
|
||||||
self.sampling_interval = -1
|
self.sampling_interval = -1
|
||||||
|
|
||||||
|
def has_sam_recordings(self):
|
||||||
|
return exists(self.sam_file)
|
||||||
|
|
||||||
def get_baseline_length(self):
|
def get_baseline_length(self):
|
||||||
lengths = []
|
lengths = []
|
||||||
for metadata, key, data in Dl.iload(self.baseline_file):
|
for metadata, key, data in Dl.iload(self.baseline_file):
|
||||||
|
33
lines_of_code.py
Normal file
33
lines_of_code.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def count_lines_folder(folder):
|
||||||
|
lines_of_code = 0
|
||||||
|
files = 0
|
||||||
|
for file in os.listdir(folder):
|
||||||
|
|
||||||
|
if os.path.isdir(file):
|
||||||
|
continue
|
||||||
|
if not file.endswith(".py"):
|
||||||
|
continue
|
||||||
|
# print(file)
|
||||||
|
files += 1
|
||||||
|
with open(os.path.join(folder, file)) as file:
|
||||||
|
lines_of_code += len(file.readlines())
|
||||||
|
return lines_of_code, files
|
||||||
|
|
||||||
|
|
||||||
|
total_lines = 0
|
||||||
|
total_files = 0
|
||||||
|
|
||||||
|
folders = [".", "tests/", "models/", "introduction/", "stimuli/"]
|
||||||
|
|
||||||
|
for folder in folders:
|
||||||
|
lines, files = count_lines_folder(folder)
|
||||||
|
print(folder, files, lines)
|
||||||
|
total_lines += lines
|
||||||
|
total_files += files
|
||||||
|
|
||||||
|
print("Total lines of code:", total_lines)
|
||||||
|
print("Total files with code:", total_files)
|
@ -9,10 +9,13 @@ import helperFunctions as hF
|
|||||||
from CellData import CellData
|
from CellData import CellData
|
||||||
from ModelFit import ModelFit, get_best_fit
|
from ModelFit import ModelFit, get_best_fit
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sam_analysis("results/final_2/2011-10-25-ad-invivo-1/")
|
run_sam_analysis_for_all_cells("results/final_2")
|
||||||
|
|
||||||
|
# sam_analysis("results/final_2/2011-10-25-ad-invivo-1/")
|
||||||
|
|
||||||
# plot_traces_with_spiketimes()
|
# plot_traces_with_spiketimes()
|
||||||
# plot_mean_of_cuts()
|
# plot_mean_of_cuts()
|
||||||
@ -27,6 +30,21 @@ def main():
|
|||||||
test_model_response(model, eod_freq, 0.1, np.arange(5, 2500, 5))
|
test_model_response(model, eod_freq, 0.1, np.arange(5, 2500, 5))
|
||||||
|
|
||||||
|
|
||||||
|
def run_sam_analysis_for_all_cells(folder):
|
||||||
|
count = 0
|
||||||
|
for item in os.listdir(folder):
|
||||||
|
cell_folder = os.path.join(folder, item)
|
||||||
|
fit = get_best_fit(cell_folder, use_comparable_error=False)
|
||||||
|
cell_data = fit.get_cell_data()
|
||||||
|
|
||||||
|
if cell_data.has_sam_recordings():
|
||||||
|
count += 1
|
||||||
|
# print("Fit quality:", fit.get_fit_routine_error())
|
||||||
|
sam_analysis(cell_folder)
|
||||||
|
print(count)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_model_response(model: LifacNoiseModel, eod_freq, contrast, modulation_frequencies):
|
def test_model_response(model: LifacNoiseModel, eod_freq, contrast, modulation_frequencies):
|
||||||
|
|
||||||
stds = []
|
stds = []
|
||||||
@ -182,11 +200,11 @@ def sam_analysis(fit_path):
|
|||||||
# TODO problem of cutting the pdf as in some cases the pdf is shorter than 1 modulation frequency period!
|
# TODO problem of cutting the pdf as in some cases the pdf is shorter than 1 modulation frequency period!
|
||||||
# length info wrong ? always at least one period?
|
# length info wrong ? always at least one period?
|
||||||
|
|
||||||
if 1/mod_freq > durations[0] / 4:
|
# if 1/mod_freq > durations[0] / 4:
|
||||||
print("skipped mod_freq: {}".format(mod_freq))
|
# print("skipped mod_freq: {}".format(mod_freq))
|
||||||
print("Duration: {} while mod_freq period: {:.2f}".format(durations[0], 1/mod_freq))
|
# print("Duration: {} while mod_freq period: {:.2f}".format(durations[0], 1/mod_freq))
|
||||||
print("Maybe long enough duration? unique durations:", u_durations)
|
# print("Maybe long enough duration? unique durations:", u_durations)
|
||||||
continue
|
# continue
|
||||||
mfreq_data = {}
|
mfreq_data = {}
|
||||||
cell_means = []
|
cell_means = []
|
||||||
model_means = []
|
model_means = []
|
||||||
@ -196,24 +214,32 @@ def sam_analysis(fit_path):
|
|||||||
for i in range(len(delta_freqs)):
|
for i in range(len(delta_freqs)):
|
||||||
if delta_freqs[i] != mod_freq:
|
if delta_freqs[i] != mod_freq:
|
||||||
continue
|
continue
|
||||||
|
if len(spiketimes[i]) == 0:
|
||||||
|
print("No spiketimes found at index!")
|
||||||
|
continue
|
||||||
if len(spiketimes[i]) > 1:
|
if len(spiketimes[i]) > 1:
|
||||||
print("There are more spiketimes in one 'point'! Only the first was used! ")
|
print("There are more spiketimes in one 'point'! Only the first was used! ")
|
||||||
|
|
||||||
|
|
||||||
spikes = spiketimes[i][0]
|
spikes = spiketimes[i][0]
|
||||||
|
|
||||||
cell_pdf = spiketimes_calculate_pdf(spikes, step_size)
|
cell_pdf = spiketimes_calculate_pdf(spikes, step_size)
|
||||||
|
|
||||||
cell_cuts = cut_pdf_into_periods(cell_pdf, 1/mod_freq, step_size, factor=1.0)
|
cell_cuts = cut_pdf_into_periods(cell_pdf, 1/mod_freq, step_size)
|
||||||
cell_mean = np.mean(cell_cuts, axis=0)
|
cell_mean = np.mean(cell_cuts, axis=0)
|
||||||
cell_means.append(cell_mean)
|
cell_means.append(cell_mean)
|
||||||
|
|
||||||
stimulus = SAM(eod_freq, contrasts[i] / 100, mod_freq)
|
stimulus = SAM(eod_freq, contrasts[i] / 100, mod_freq)
|
||||||
v1, spikes_model = model.simulate(stimulus, durations[i] * 4)
|
v1, spikes_model = model.simulate(stimulus, 10)
|
||||||
model_pdf = spiketimes_calculate_pdf(spikes_model, step_size)
|
model_pdf = spiketimes_calculate_pdf(spikes_model, step_size)
|
||||||
model_cuts = cut_pdf_into_periods(model_pdf, 1/mod_freq, step_size, factor=1.0)
|
model_cuts = cut_pdf_into_periods(model_pdf, 1/mod_freq, step_size)
|
||||||
model_mean = np.mean(model_cuts, axis=0)
|
model_mean = np.mean(model_cuts, axis=0)
|
||||||
model_means.append(model_mean)
|
model_means.append(model_mean)
|
||||||
|
|
||||||
|
min_length = min(min([len(cm) for cm in cell_means]), min([len(mm) for mm in model_means]))
|
||||||
|
for i in range(len(cell_means)):
|
||||||
|
cell_means[i] = cell_means[i][:min_length]
|
||||||
|
model_means[i] = model_means[i][:min_length]
|
||||||
final_cell_mean = np.mean(cell_means, axis=0)
|
final_cell_mean = np.mean(cell_means, axis=0)
|
||||||
final_model_mean = np.mean(model_means, axis=0)
|
final_model_mean = np.mean(model_means, axis=0)
|
||||||
cell_stds.append(np.std(final_cell_mean))
|
cell_stds.append(np.std(final_cell_mean))
|
||||||
@ -225,53 +251,53 @@ def sam_analysis(fit_path):
|
|||||||
final_model_mean_phase_corrected = np.roll(final_model_mean, approx_offset)
|
final_model_mean_phase_corrected = np.roll(final_model_mean, approx_offset)
|
||||||
|
|
||||||
# PLOT EVERY MOD FREQ
|
# PLOT EVERY MOD FREQ
|
||||||
fig, axes = plt.subplots(1, 5, figsize=(15, 5), sharex=True)
|
# fig, axes = plt.subplots(1, 5, figsize=(15, 5), sharex=True)
|
||||||
for c in cell_means:
|
# for c in cell_means:
|
||||||
axes[0].plot(c, color="grey", alpha=0.2)
|
# axes[0].plot(c, color="grey", alpha=0.2)
|
||||||
axes[0].plot(np.mean(cell_means, axis=0), color="black")
|
# axes[0].plot(np.mean(cell_means, axis=0), color="black")
|
||||||
axes[0].set_title("Cell response")
|
# axes[0].set_title("Cell response")
|
||||||
axis_cell = axes[0].axis()
|
# axis_cell = axes[0].axis()
|
||||||
|
#
|
||||||
for m in model_means:
|
# for m in model_means:
|
||||||
axes[1].plot(m, color="grey", alpha=0.2)
|
# axes[1].plot(m, color="grey", alpha=0.2)
|
||||||
axes[1].plot(np.mean(model_means, axis=0), color="black")
|
# axes[1].plot(np.mean(model_means, axis=0), color="black")
|
||||||
axes[1].set_title("Model response")
|
# axes[1].set_title("Model response")
|
||||||
axis_model = axes[1].axis()
|
# axis_model = axes[1].axis()
|
||||||
ylim_top = max(axis_cell[3], axis_model[3])
|
# ylim_top = max(axis_cell[3], axis_model[3])
|
||||||
axes[1].set_ylim(0, ylim_top)
|
# axes[1].set_ylim(0, ylim_top)
|
||||||
axes[0].set_ylim(0, ylim_top)
|
# axes[0].set_ylim(0, ylim_top)
|
||||||
axes[2].set_ylim(0, ylim_top)
|
# axes[2].set_ylim(0, ylim_top)
|
||||||
|
#
|
||||||
|
# axes[2].plot(final_cell_mean, label="cell")
|
||||||
axes[2].plot(final_cell_mean, label="cell")
|
# axes[2].plot(final_model_mean, label="model")
|
||||||
axes[2].plot(final_model_mean, label="model")
|
# axes[2].plot(final_model_mean_phase_corrected, label="model p-cor")
|
||||||
axes[2].plot(final_model_mean_phase_corrected, label="model p-cor")
|
# axes[2].legend()
|
||||||
axes[2].legend()
|
# axes[2].set_title("cell-model overlapped")
|
||||||
axes[2].set_title("cell-model overlapped")
|
# axes[3].plot((final_model_mean - final_cell_mean) / final_cell_mean, label="normal")
|
||||||
axes[3].plot((final_model_mean - final_cell_mean) / final_cell_mean, label="normal")
|
# axes[3].plot((final_model_mean_phase_corrected- final_cell_mean) / final_cell_mean, label="phase cor")
|
||||||
axes[3].plot((final_model_mean_phase_corrected- final_cell_mean) / final_cell_mean, label="phase cor")
|
# axes[3].set_title("rel. error")
|
||||||
axes[3].set_title("rel. error")
|
# axes[3].legend()
|
||||||
axes[3].legend()
|
# axes[4].plot(final_model_mean - final_cell_mean, label="normal")
|
||||||
axes[4].plot(final_model_mean - final_cell_mean, label="normal")
|
# axes[4].plot(final_model_mean_phase_corrected - final_cell_mean, label="phase cor")
|
||||||
axes[4].plot(final_model_mean_phase_corrected - final_cell_mean, label="phase cor")
|
# axes[4].set_title("abs. error (Hz)")
|
||||||
axes[4].set_title("abs. error (Hz)")
|
# axes[4].legend()
|
||||||
axes[4].legend()
|
#
|
||||||
|
# fig.suptitle("modulation frequency: {}".format(mod_freq))
|
||||||
fig.suptitle("modulation frequency: {}".format(mod_freq))
|
#
|
||||||
|
# # plt.tight_layout()
|
||||||
# plt.tight_layout()
|
# # plt.show()
|
||||||
plt.show()
|
# plt.close()
|
||||||
plt.close()
|
|
||||||
|
|
||||||
fig, ax = plt.subplots(1, 1)
|
fig, ax = plt.subplots(1, 1)
|
||||||
|
|
||||||
ax.plot(u_delta_freqs, cell_stds, label="cell stds")
|
ax.plot(u_delta_freqs[-len(cell_stds):], cell_stds, label="cell stds")
|
||||||
ax.plot(u_delta_freqs, model_stds, label="model stds")
|
ax.plot(u_delta_freqs[-len(model_stds):], model_stds, label="model stds")
|
||||||
ax.set_title("response modulation depth")
|
ax.set_title("response modulation depth")
|
||||||
ax.set_xlabel("Modulation frequency")
|
ax.set_xlabel("Modulation frequency")
|
||||||
ax.set_ylabel("STD")
|
ax.set_ylabel("STD")
|
||||||
ax.legend()
|
ax.legend()
|
||||||
plt.show()
|
plt.savefig("figures/sam/" + cell_data.get_cell_name() + ".png")
|
||||||
|
# plt.show()
|
||||||
plt.close()
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
@ -335,14 +361,16 @@ def approximate_axon_delay_in_idx(cell_data, model):
|
|||||||
|
|
||||||
cell_pdf = spiketimes_calculate_pdf(spikes, step_size)
|
cell_pdf = spiketimes_calculate_pdf(spikes, step_size)
|
||||||
|
|
||||||
cell_cuts = cut_pdf_into_periods(cell_pdf, 1/mod_freq, step_size, factor=1.0)
|
cell_cuts = cut_pdf_into_periods(cell_pdf, 1/mod_freq, step_size)
|
||||||
|
if len(cell_cuts) == 0:
|
||||||
|
continue
|
||||||
cell_mean = np.mean(cell_cuts, axis=0)
|
cell_mean = np.mean(cell_cuts, axis=0)
|
||||||
cell_means.append(cell_mean)
|
cell_means.append(cell_mean)
|
||||||
|
|
||||||
stimulus = SAM(eod_freq, contrasts[i] / 100, mod_freq)
|
stimulus = SAM(eod_freq, contrasts[i] / 100, mod_freq)
|
||||||
v1, spikes_model = model.simulate(stimulus, durations[i] * 4)
|
v1, spikes_model = model.simulate(stimulus, durations[i] * 4)
|
||||||
model_pdf = spiketimes_calculate_pdf(spikes_model, step_size)
|
model_pdf = spiketimes_calculate_pdf(spikes_model, step_size)
|
||||||
model_cuts = cut_pdf_into_periods(model_pdf, 1/mod_freq, step_size, factor=1.0)
|
model_cuts = cut_pdf_into_periods(model_pdf, 1/mod_freq, step_size)
|
||||||
model_mean = np.mean(model_cuts, axis=0)
|
model_mean = np.mean(model_cuts, axis=0)
|
||||||
model_means.append(model_mean)
|
model_means.append(model_mean)
|
||||||
|
|
||||||
@ -355,7 +383,10 @@ def approximate_axon_delay_in_idx(cell_data, model):
|
|||||||
axon_delays.append(offset)
|
axon_delays.append(offset)
|
||||||
|
|
||||||
mean_delay = np.mean(axon_delays)
|
mean_delay = np.mean(axon_delays)
|
||||||
return int(round(mean_delay))
|
if np.isnan(mean_delay):
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return int(round(mean_delay))
|
||||||
|
|
||||||
|
|
||||||
def generate_pdf(model, stimulus, trials=4, sim_length=3, kernel_width=0.005):
|
def generate_pdf(model, stimulus, trials=4, sim_length=3, kernel_width=0.005):
|
||||||
@ -393,25 +424,27 @@ def spiketimes_calculate_pdf(spikes, step_size, kernel_width=0.001):
|
|||||||
return rate
|
return rate
|
||||||
|
|
||||||
|
|
||||||
def cut_pdf_into_periods(pdf, period, step_size, factor=1.5):
|
def cut_pdf_into_periods(pdf, period, step_size, factor=0.0):
|
||||||
|
|
||||||
if period < 0:
|
if period < 0:
|
||||||
print("cut_pdf_into_periods(): Period was negative! Absolute value taken to continue")
|
# print("cut_pdf_into_periods(): Period was negative! Absolute value taken to continue")
|
||||||
period = abs(period)
|
period = abs(period)
|
||||||
|
|
||||||
if period / step_size > len(pdf):
|
idx_period_length = int(period / float(step_size))
|
||||||
return [pdf]
|
offset_per_step = period / float(step_size) - idx_period_length
|
||||||
|
cut_length = idx_period_length + int(factor * idx_period_length)
|
||||||
idx_period_length = int(period/float(step_size))
|
num_of_cuts = int(len(pdf) / (idx_period_length + offset_per_step))
|
||||||
offset_per_step = period/float(step_size) - idx_period_length
|
|
||||||
cut_length = int(period / float(step_size) * factor)
|
|
||||||
num_of_cuts = int(len(pdf) / (idx_period_length+offset_per_step))
|
|
||||||
|
|
||||||
if len(pdf) - (num_of_cuts * idx_period_length + (num_of_cuts * offset_per_step)) < cut_length - idx_period_length:
|
if len(pdf) - (num_of_cuts * idx_period_length + (num_of_cuts * offset_per_step)) < cut_length - idx_period_length:
|
||||||
num_of_cuts -= 1
|
num_of_cuts -= 1
|
||||||
|
|
||||||
if num_of_cuts <= 1:
|
if idx_period_length * 0.9 > len(pdf):
|
||||||
raise RuntimeError("Probability density function to short to cut.")
|
return []
|
||||||
|
# raise RuntimeError("SAM stimulus is too short for the given mod freq period.")
|
||||||
|
|
||||||
|
if cut_length > len(pdf) or num_of_cuts < 1:
|
||||||
|
return [pdf]
|
||||||
|
|
||||||
cuts = np.zeros((num_of_cuts-1, cut_length))
|
cuts = np.zeros((num_of_cuts-1, cut_length))
|
||||||
for i in np.arange(1, num_of_cuts, 1):
|
for i in np.arange(1, num_of_cuts, 1):
|
||||||
offset_correction = int(offset_per_step * i)
|
offset_correction = int(offset_per_step * i)
|
||||||
|
Loading…
Reference in New Issue
Block a user