add functions to read SAM behaviour of data cells
This commit is contained in:
parent
55516b64d2
commit
539d8605bd
44
CellData.py
44
CellData.py
@ -41,6 +41,15 @@ class CellData:
|
|||||||
self.time_axes = None
|
self.time_axes = None
|
||||||
# self.metadata = None
|
# self.metadata = None
|
||||||
|
|
||||||
|
self.sam_spiketimes = None
|
||||||
|
self.sam_contrasts = None
|
||||||
|
self.sam_delta_fs = None
|
||||||
|
self.sam_eod_freqs = None
|
||||||
|
self.sam_durations = None
|
||||||
|
self.sam_trans_amplitudes = None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
self.sampling_interval = self.parser.get_sampling_interval()
|
self.sampling_interval = self.parser.get_sampling_interval()
|
||||||
self.recording_times = self.parser.get_recording_times()
|
self.recording_times = self.parser.get_recording_times()
|
||||||
|
|
||||||
@ -100,6 +109,30 @@ class CellData:
|
|||||||
|
|
||||||
return contrast
|
return contrast
|
||||||
|
|
||||||
|
def get_sam_spiketimes(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_spiketimes
|
||||||
|
|
||||||
|
def get_sam_contrasts(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_contrasts
|
||||||
|
|
||||||
|
def get_sam_delta_frequencies(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_delta_fs
|
||||||
|
|
||||||
|
def get_sam_durations(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_durations
|
||||||
|
|
||||||
|
def get_sam_eod_frequencies(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_eod_freqs
|
||||||
|
|
||||||
|
def get_sam_trans_amplitudes(self):
|
||||||
|
self.__read_sam_info__()
|
||||||
|
return self.sam_trans_amplitudes
|
||||||
|
|
||||||
def get_mean_fi_curve_isi_frequencies(self):
|
def get_mean_fi_curve_isi_frequencies(self):
|
||||||
if self.mean_isi_frequencies is None:
|
if self.mean_isi_frequencies is None:
|
||||||
self.time_axes, self.mean_isi_frequencies = hf.all_calculate_mean_isi_frequency_traces(
|
self.time_axes, self.mean_isi_frequencies = hf.all_calculate_mean_isi_frequency_traces(
|
||||||
@ -172,6 +205,17 @@ class CellData:
|
|||||||
self.fi_intensities, self.fi_spiketimes, self.fi_trans_amplitudes = hf.merge_similar_intensities(
|
self.fi_intensities, self.fi_spiketimes, self.fi_trans_amplitudes = hf.merge_similar_intensities(
|
||||||
intensities, spiketimes, trans_amplitudes)
|
intensities, spiketimes, trans_amplitudes)
|
||||||
|
|
||||||
|
def __read_sam_info__(self):
|
||||||
|
if self.sam_spiketimes is None:
|
||||||
|
spiketimes, contrasts, delta_fs, eod_freqs, durations, trans_amplitudes = self.parser.get_sam_info()
|
||||||
|
|
||||||
|
self.sam_spiketimes = spiketimes
|
||||||
|
self.sam_contrasts = contrasts
|
||||||
|
self.sam_delta_fs = delta_fs
|
||||||
|
self.sam_eod_freqs = eod_freqs
|
||||||
|
self.sam_durations = durations
|
||||||
|
self.sam_trans_amplitudes = trans_amplitudes
|
||||||
|
|
||||||
# def get_metadata(self):
|
# def get_metadata(self):
|
||||||
# self.__read_metadata__()
|
# self.__read_metadata__()
|
||||||
# return self.metadata
|
# return self.metadata
|
||||||
|
@ -13,8 +13,8 @@ MODEL = 2
|
|||||||
|
|
||||||
class AbstractParser:
|
class AbstractParser:
|
||||||
|
|
||||||
def cell_get_metadata(self):
|
# def cell_get_metadata(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
# raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
def get_baseline_traces(self):
|
def get_baseline_traces(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
@ -31,6 +31,9 @@ class AbstractParser:
|
|||||||
def get_fi_frequency_traces(self):
|
def get_fi_frequency_traces(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
|
def get_sam_info(self):
|
||||||
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
def get_sampling_interval(self):
|
def get_sampling_interval(self):
|
||||||
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
raise NotImplementedError("NOT YET OVERRIDDEN FROM ABSTRACT CLASS")
|
||||||
|
|
||||||
@ -53,6 +56,7 @@ class DatParser(AbstractParser):
|
|||||||
self.base_path = dir_path
|
self.base_path = dir_path
|
||||||
self.fi_file = self.base_path + "/fispikes1.dat"
|
self.fi_file = self.base_path + "/fispikes1.dat"
|
||||||
self.baseline_file = self.base_path + "/basespikes1.dat"
|
self.baseline_file = self.base_path + "/basespikes1.dat"
|
||||||
|
self.sam_file = self.base_path + "/samallspikes1.dat"
|
||||||
self.stimuli_file = self.base_path + "/stimuli.dat"
|
self.stimuli_file = self.base_path + "/stimuli.dat"
|
||||||
self.__test_data_file_existence__()
|
self.__test_data_file_existence__()
|
||||||
|
|
||||||
@ -156,6 +160,58 @@ class DatParser(AbstractParser):
|
|||||||
|
|
||||||
return trans_amplitudes, intensities, spiketimes
|
return trans_amplitudes, intensities, spiketimes
|
||||||
|
|
||||||
|
def get_sam_info(self):
|
||||||
|
contrasts = []
|
||||||
|
delta_fs = []
|
||||||
|
spiketimes = []
|
||||||
|
durations = []
|
||||||
|
eod_freqs = []
|
||||||
|
trans_amplitudes = []
|
||||||
|
index = -1
|
||||||
|
for metadata, key, data in Dl.iload(self.sam_file):
|
||||||
|
factor = 1
|
||||||
|
if key[0][0] == 'time':
|
||||||
|
if key[1][0] == 'ms':
|
||||||
|
factor = 1/1000
|
||||||
|
elif key[1][0] == 's':
|
||||||
|
factor = 1
|
||||||
|
else:
|
||||||
|
print("DataParser Dat: Unknown time notation:", key[1][0])
|
||||||
|
if len(metadata) != 0:
|
||||||
|
|
||||||
|
stimulus_dict = metadata[0]["----- Stimulus -------------------------------------------------------"]
|
||||||
|
analysis_dict = metadata[0]["----- Analysis -------------------------------------------------------"]
|
||||||
|
eod_freq = float(metadata[0]["EOD rate"][:-2]) # in Hz
|
||||||
|
trans_amplitude = metadata[0]["trans. amplitude"][:-2] # in mV
|
||||||
|
|
||||||
|
duration = float(stimulus_dict["duration"][:-2]) * factor # normally saved in ms? so change it with the factor
|
||||||
|
contrast = float(stimulus_dict["contrast"][:-1]) # in percent
|
||||||
|
delta_f = float(stimulus_dict["deltaf"][:-2])
|
||||||
|
|
||||||
|
# delta_f = metadata[0]["true deltaf"]
|
||||||
|
# contrast = metadata[0]["true contrast"]
|
||||||
|
|
||||||
|
contrasts.append(contrast)
|
||||||
|
delta_fs.append(delta_f)
|
||||||
|
durations.append(duration)
|
||||||
|
eod_freqs.append(eod_freq)
|
||||||
|
trans_amplitudes.append(trans_amplitude)
|
||||||
|
spiketimes.append([])
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if data.shape[1] != 1:
|
||||||
|
raise RuntimeError("DatParser:get_sam_spiketimes():\n read data has more than one dimension!")
|
||||||
|
|
||||||
|
spike_time_data = data[:, 0] * factor # saved in ms so use the factor to change it.
|
||||||
|
if len(spike_time_data) < 10:
|
||||||
|
continue
|
||||||
|
if spike_time_data[-1] < 0.1:
|
||||||
|
print("# ignoring spike-train that ends before one tenth of a second.")
|
||||||
|
continue
|
||||||
|
spiketimes[index].append(spike_time_data)
|
||||||
|
|
||||||
|
return spiketimes, contrasts, delta_fs, eod_freqs, durations, trans_amplitudes
|
||||||
|
|
||||||
def __get_traces__(self, repro):
|
def __get_traces__(self, repro):
|
||||||
time_traces = []
|
time_traces = []
|
||||||
v1_traces = []
|
v1_traces = []
|
||||||
@ -242,7 +298,8 @@ class DatParser(AbstractParser):
|
|||||||
raise RuntimeError(self.stimuli_file + " file doesn't exist!")
|
raise RuntimeError(self.stimuli_file + " file doesn't exist!")
|
||||||
if not exists(self.fi_file):
|
if not exists(self.fi_file):
|
||||||
raise RuntimeError(self.fi_file + " file doesn't exist!")
|
raise RuntimeError(self.fi_file + " file doesn't exist!")
|
||||||
|
if not exists(self.sam_file):
|
||||||
|
raise RuntimeError(self.sam_file + " file doesn't exist!")
|
||||||
|
|
||||||
# MODEL PARSER: ------------------------------
|
# MODEL PARSER: ------------------------------
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user