From 625ab29dfdd48bbdb8eaf86ca1740ce0fbef426f Mon Sep 17 00:00:00 2001 From: Carolin Sachgau Date: Thu, 22 Nov 2018 11:59:52 +0100 Subject: [PATCH] cutoff to factor for pause at end of trial applied --- analysis_graphs_new.py | 44 ++++++++++++++++++++++++++++++++---------- icr_analysis.py | 38 ++++++++++++++++++++++-------------- open_nixio.py | 2 +- open_nixio_new.py | 4 +++- 4 files changed, 62 insertions(+), 26 deletions(-) diff --git a/analysis_graphs_new.py b/analysis_graphs_new.py index 631d5d8..9498822 100644 --- a/analysis_graphs_new.py +++ b/analysis_graphs_new.py @@ -21,14 +21,39 @@ cell_name = sys.argv[1].split('/')[-2] # Open Nixio File intervals_dict = open_nixio_new(sys.argv[1]) + # Kernel Density estimator: gaussian fit t = np.arange(-sigma*4, sigma*4, 1/sampling_rate) fxn = np.exp(-0.5*(t/sigma)**2) / np.sqrt(2*np.pi) / sigma # gaussian function -for (repro, speed, direct, pos, comb) in intervals_dict: - spike_train = intervals_dict[speed, direct, pos, comb] - avg_convolve_spikes = gaussian_convolve(spike_train, fxn, sampling_rate) +# for (rep, speed, direct, pos, comb) in intervals_dict: +# spike_train = intervals_dict[rep, speed, direct, pos, comb] +# avg_convolve_spikes = gaussian_convolve(spike_train, fxn, sampling_rate) +# p, freq, std_four, mn_four = fourier_psd(avg_convolve_spikes, sampling_rate) +# +# # Graphing +# fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1) +# +# # Firing Rate Graph +# firing_times = np.arange(0, len(avg_convolve_spikes)) +# ax1.plot((firing_times / sampling_rate), avg_convolve_spikes) +# ax1.set_title('Firing Rate of trial ' + str((speed, direct)) + ' comb = ' + str(comb) + '\n') +# ax1.set_xlabel('Time (s)') +# ax1.set_ylabel('Firing rate (Hz)') +# +# # Fourier Graph +# ax2.semilogy(freq[freq < 200], p[freq < 200]) +# ax2.axhline(y=(mn_four+std_four), xmin=0, xmax=1, linestyle='--', color='red') +# # ax2.axvline(x=max_four,linestyle='--', color='green') +# +# plt.savefig(('nonavg_' + '_' + str(rep) + '_' + str(cell_name) + '_' + str(speed) + '_' + str(pos) +# + '_' + str(comb) + '_' + str(direct) + '.png')) +# plt.close(fig) + +for (rep, time, speed, direct, comb) in intervals_dict: + spike_train = intervals_dict[(rep, time, speed, direct, comb)] + avg_convolve_spikes = gaussian_convolve(spike_train, fxn, sampling_rate, time) p, freq, std_four, mn_four = fourier_psd(avg_convolve_spikes, sampling_rate) # Graphing @@ -37,17 +62,16 @@ for (repro, speed, direct, pos, comb) in intervals_dict: # Firing Rate Graph firing_times = np.arange(0, len(avg_convolve_spikes)) ax1.plot((firing_times / sampling_rate), avg_convolve_spikes) - ax1.set_title('Firing Rate of trial ' + str((speed, pos)) + ' comb = ' + str(comb) + '\n') + ax1.set_title('Firing Rate of trial ' + str((speed, direct)) + ' comb = ' + str(comb) + '\n') ax1.set_xlabel('Time (s)') ax1.set_ylabel('Firing rate (Hz)') # Fourier Graph - ax2.semilogy(freq[freq < 200], p[freq < 200]) - ax2.axhline(y=(mn_four+std_four), xmin=0, xmax=1, linestyle='--', color='red') - # ax2.axvline(x=max_four,linestyle='--', color='green') - - plt.savefig(('nonavg_' + str(repro) +'_' + str(cell_name) + '_' + str(speed) + '_' + str(pos) - + '_' + str(comb) + '_' + str(direct) + '.png')) + ax2.semilogy(freq[freq < 400], p[freq < 400]) + ax2.axhline(y=(mn_four + std_four), xmin=0, xmax=1, linestyle='--', color='red') + plt.tight_layout() + plt.savefig((str(rep) + '_''avg_' + '_' + str(cell_name) + '_' + str(speed) + '_' + str(comb) + + '_' + str(direct) + '.png')) plt.close(fig) # --------------------------------------------------------------------------------------------------------------------- diff --git a/icr_analysis.py b/icr_analysis.py index 3d061ab..940063c 100644 --- a/icr_analysis.py +++ b/icr_analysis.py @@ -23,21 +23,31 @@ def avgNestedLists(nested_vals): return output -def gaussian_convolve(spike_train, fxn, sampling_rate): - +def gaussian_convolve(all_spike_trains, fxn, sampling_rate, time): + """ + Takes an array of spike trains of different sizes, + convolves it with a gaussian, returns the average gaussian convolve spikes + """ all_convolve_spikes = [] - trial_length = int((spike_train[-1] - spike_train[0]) * sampling_rate) - spike_train = spike_train - spike_train[0] # changing spike train to start at 0 (subtracting baseline) - trial_time = np.arange(0, (trial_length + 1), 1) - trial_bool = np.zeros(len(trial_time)) - # Boolean list in length of trial length, where 1 means spike happened, 0 means no spike - spike_indx = (spike_train * sampling_rate).astype(np.int) - trial_bool[spike_indx] = 1 - # trial_bool = trial_bool[30000:(len(trial_bool)-30000)] - convolve_spikes = np.asarray( - [convolve(trial_bool, fxn, mode='valid')]) # convolve gaussian with boolean spike list - all_convolve_spikes.append(convolve_spikes[0, :]) - avg_convolve_spikes = avgNestedLists(all_convolve_spikes) + for spike_train in all_spike_trains: + time_cutoff = time * sampling_rate + trial_length = int((spike_train[-1] - spike_train[0]) * sampling_rate) + spike_train = spike_train - spike_train[0] # changing spike train to start at 0 (subtracting baseline) + trial_time = np.arange(0, (trial_length + 1), 1) + trial_bool = np.zeros(len(trial_time)) + #Boolean list in length of trial length, where 1 means spike happened, 0 means no spike + spike_indx = (spike_train * sampling_rate).astype(np.int) + trial_bool[spike_indx] = 1 + + # trial_bool = trial_bool[30000:(len(trial_bool)-30000)] + convolve_spikes = np.asarray([convolve(trial_bool, fxn, mode='valid')]) # convolve gaussian with boolean spike list + all_convolve_spikes.append(convolve_spikes[0, :][:time_cutoff]) + # + # cutoff = min([len(i) for i in all_convolve_spikes]) + # for ix, convolved in enumerate(all_convolve_spikes): + # all_convolve_spikes[ix] = all_convolve_spikes[ix][:cutoff] + #avg_convolve_spikes = avgNestedLists(all_convolve_spikes) + avg_convolve_spikes = np.mean(all_convolve_spikes, 0) return avg_convolve_spikes diff --git a/open_nixio.py b/open_nixio.py index 1d92f14..e213f34 100644 --- a/open_nixio.py +++ b/open_nixio.py @@ -33,7 +33,7 @@ def open_nixio(nix_file, avg_opt): curr_spikes = spikes[(spikes < comb_pos[idx + 1]) & (spikes > comb_pos[idx])] if avg_opt == 'average': - intervals_dict[(curr_speed, curr_dir, curr_comb)].append((curr_pos, curr_spikes)) + intervals_dict[(curr_speed, curr_dir, curr_comb)].append(curr_spikes) else: intervals_dict.update({(curr_speed, curr_dir, curr_pos, curr_comb): curr_spikes}) diff --git a/open_nixio_new.py b/open_nixio_new.py index 6e0c328..05dc896 100644 --- a/open_nixio_new.py +++ b/open_nixio_new.py @@ -24,6 +24,7 @@ def open_nixio_new(nix_file): if tag.name.startswith('Baseline'): continue curr_comb = tag.metadata["RePro-Info"]["settings"]["object"] + travel_dist = tag.metadata["RePro-Info"]["settings"]["traveldist"] repro_pos, = tag.position repro_ext, = tag.extent idx_qry = np.logical_and(repro_pos < comb_pos, comb_pos < (repro_pos + repro_ext)) @@ -34,9 +35,10 @@ def open_nixio_new(nix_file): if idx == (len(comb_pos)-1): break curr_speed = feature_dict['speed'][idx] + travel_time = travel_dist/curr_speed curr_pos = comb_pos[idx] curr_dir = feature_dict['direction'][idx] curr_spikes = spikes[(spikes < comb_pos[idx + 1]) & (spikes > comb_pos[idx])] - intervals_dict.update({(tag.name, curr_speed, curr_dir, curr_pos, curr_comb): curr_spikes}) + intervals_dict[(tag.name, travel_time, curr_speed, curr_dir, curr_comb)].append(curr_spikes) return intervals_dict \ No newline at end of file