change detect spikes approach

This commit is contained in:
a.ott 2020-05-20 15:22:55 +02:00
parent e84ac53657
commit f7ffdcc66f

View File

@ -313,7 +313,7 @@ def calculate_vector_strength_from_v1_trace(times, eods, v1_traces):
print("-----LENGTH OF TIMES = 0")
for recording in range(len(times)):
spiketime_idices = detect_spikes_indices(v1_traces[recording])
spiketime_idices = detect_spike_indices(v1_traces[recording])
rel_spikes, eod_durs = eods_around_spikes(times[recording], eods[recording], spiketime_idices)
relative_spike_times.extend(rel_spikes)
eod_durations.extend(eod_durs)
@ -332,7 +332,7 @@ def calculate_vector_strength_from_spiketimes(time, eod, spiketimes, sampling_in
return __vector_strength__(rel_spikes, eod_durs)
def detect_spikes_indices(v1, split=20, threshold=3):
def detect_spike_indices(v1, split=20, threshold=3.0):
total = len(v1)
all_peaks = []
@ -350,19 +350,86 @@ def detect_spikes_indices(v1, split=20, threshold=3):
return all_peaks
def detect_spiketimes(time, v1, split=20, threshold=3):
all_peak_indicies = detect_spikes_indices(v1, split, threshold)
def detect_spike_indices_automatic_split(v1, min_length=3000, threshold=3.0):
split_start = 0
step_size = 250
break_threshold = 0.1
splits = []
return [time[p_idx] for p_idx in all_peak_indicies]
if len(v1) < min_length:
splits = [(0, len(v1))]
else:
last_max = max(v1[0:min_length])
last_min = min(v1[0:min_length])
idx = min_length
while idx < len(v1):
if idx + step_size > len(v1):
splits.append((split_start, len(v1)))
break
# max_dif = abs((max(v1[idx:idx+step_size]) / last_max) - 1)
# min_dif = abs((min(v1[idx:idx+step_size]) / last_min) - 1)
# print("last_max: {:.2f}, current_max: {:.2f}".format(last_max, max(v1[idx:idx+step_size])))
# print("max_dif: {:.2f}, min_dif: {:.2f}".format(max_dif, min_dif))
max_similar = abs((max(v1[idx:idx+step_size]) / last_max) - 1) < break_threshold
min_similar = abs((min(v1[idx:idx+step_size]) / last_min) - 1) < break_threshold
if not max_similar or not min_similar:
# print("new split")
end_idx = np.argmin(v1[idx-20:idx+21]) - 20
splits.append((split_start, idx+end_idx))
split_start = idx+end_idx
last_max = max(v1[split_start:split_start + min_length])
last_min = min(v1[split_start:split_start + min_length])
idx = split_start + min_length
continue
else:
pass
# print("elongated!")
idx += step_size
def calculate_phases(relative_spike_times, eod_durations):
phase_times = np.zeros(len(relative_spike_times))
if splits[-1][1] != len(v1):
splits.append((split_start, len(v1)))
for i in range(len(relative_spike_times)):
phase_times[i] = (relative_spike_times[i] / eod_durations[i]) * 2 * np.pi
# plt.plot(v1)
#
# for s in splits:
# plt.plot(s, (max(v1[s[0]:s[1]]), max(v1[s[0]:s[1]])))
# plt.show()
all_peaks = []
for s in splits:
first_index = s[0]
last_index = s[1]
std = np.std(v1[first_index:last_index])
peaks, _ = detect_peaks(v1[first_index:last_index], std * threshold)
peaks = peaks + first_index
# plt.plot(peaks, [np.mean(v1[first_index:last_index]) for _ in peaks], 'o')
all_peaks.extend(peaks)
# plt.show()
all_peaks = np.array(all_peaks)
return all_peaks
def detect_spiketimes(time, v1, split=80, threshold=2.8):
# all_peak_indicies = detect_spikes_indices(v1, split, threshold)
all_peak_indicies = detect_spike_indices_automatic_split(v1, threshold=threshold)
return [time[p_idx] for p_idx in all_peak_indicies]
return phase_times
# def calculate_phases(relative_spike_times, eod_durations):
# phase_times = np.zeros(len(relative_spike_times))
#
# for i in range(len(relative_spike_times)):
# phase_times[i] = (relative_spike_times[i] / eod_durations[i]) * 2 * np.pi
#
# return phase_times
def eods_around_spikes(time, eod, spiketime_idices):
@ -396,48 +463,48 @@ def eods_around_spikes(time, eod, spiketime_idices):
return np.array(relative_spike_times), np.array(eod_durations)
def search_eod_start_and_end_times(time, eod, index):
# TODO might break if a spike is in the cut off first or last eod!
# search start_time:
previous = index
working_idx = index-1
while True:
if eod[working_idx] < 0 < eod[previous]:
first_value = eod[working_idx]
second_value = eod[previous]
dif = second_value - first_value
part = np.abs(first_value/dif)
time_dif = np.abs(time[previous] - time[working_idx])
start_time = time[working_idx] + time_dif*part
break
previous = working_idx
working_idx -= 1
# search end_time
previous = index
working_idx = index + 1
while True:
if eod[previous] < 0 < eod[working_idx]:
first_value = eod[previous]
second_value = eod[working_idx]
dif = second_value - first_value
part = np.abs(first_value / dif)
time_dif = np.abs(time[previous] - time[working_idx])
end_time = time[working_idx] + time_dif * part
break
previous = working_idx
working_idx += 1
return start_time, end_time
# def search_eod_start_and_end_times(time, eod, index):
# # TODO might break if a spike is in the cut off first or last eod!
#
# # search start_time:
# previous = index
# working_idx = index-1
# while True:
# if eod[working_idx] < 0 < eod[previous]:
# first_value = eod[working_idx]
# second_value = eod[previous]
#
# dif = second_value - first_value
# part = np.abs(first_value/dif)
#
# time_dif = np.abs(time[previous] - time[working_idx])
# start_time = time[working_idx] + time_dif*part
#
# break
#
# previous = working_idx
# working_idx -= 1
#
# # search end_time
# previous = index
# working_idx = index + 1
# while True:
# if eod[previous] < 0 < eod[working_idx]:
# first_value = eod[previous]
# second_value = eod[working_idx]
#
# dif = second_value - first_value
# part = np.abs(first_value / dif)
#
# time_dif = np.abs(time[previous] - time[working_idx])
# end_time = time[working_idx] + time_dif * part
#
# break
#
# previous = working_idx
# working_idx += 1
#
# return start_time, end_time
def __vector_strength__(relative_spike_times: np.ndarray, eod_durations: np.ndarray):
@ -455,12 +522,12 @@ def __vector_strength__(relative_spike_times: np.ndarray, eod_durations: np.ndar
def detect_f_zero_in_frequency_trace(time, frequency, stimulus_start, sampling_interval, peak_buffer_percent=0.05, buffer=0.025):
freq_before = frequency[int(time[0]+buffer/sampling_interval):int((stimulus_start - time[0] - buffer) / sampling_interval)]
if len(freq_before) < 3:
print("Length of reference frequency before the stimulus too short (< 3 points)")
if time[0] + 2*buffer > stimulus_start:
print("F_zero detection: Not enough frequency trace before start of the stimulus.")
return 0
freq_before = frequency[int(time[0]+buffer/sampling_interval):int((stimulus_start - time[0] - buffer) / sampling_interval)]
min_before = min(freq_before)
max_before = max(freq_before)
mean_before = np.mean(freq_before)
@ -468,6 +535,10 @@ def detect_f_zero_in_frequency_trace(time, frequency, stimulus_start, sampling_i
# time where the f-zero is searched in
start_idx, end_idx = time_window_detect_f_zero(time[0], stimulus_start, sampling_interval, buffer)
if start_idx < 0:
raise ValueError("Time window to detect f_zero starts in an negative index!")
min_during_start_of_stim = min(frequency[start_idx:end_idx])
max_during_start_of_stim = max(frequency[start_idx:end_idx])