Initial Commit
This commit is contained in:
commit
01320b8458
80
analysis_graphs.py
Normal file
80
analysis_graphs.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# ---------------------------------------------------------------------------------------------------------------------
|
||||||
|
# Name: Firing Rate and Fourier Script (moving comb repro)
|
||||||
|
# Purpose: Takes nixio spike data from moving comb repro and plots firing rate and power spectrum density graph
|
||||||
|
# Usage: python3 analysis_graphs.py average
|
||||||
|
# Author: Carolin Sachgau, University of Tuebingen
|
||||||
|
# Created: 20/09/2018
|
||||||
|
# ---------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from IPython import embed
|
||||||
|
import sys
|
||||||
|
from icr_analysis import *
|
||||||
|
from open_nixio import *
|
||||||
|
|
||||||
|
# Parameters
|
||||||
|
sampling_rate = 20000
|
||||||
|
sigma = 0.01 # for Gaussian
|
||||||
|
delay = 1.5 # delay in seconds after comb reaches one end, before commencing movement again
|
||||||
|
cell_name = sys.argv[1].split('/')[-2]
|
||||||
|
|
||||||
|
# Open Nixio File
|
||||||
|
curr_comb, intervals_dict = open_nixio(sys.argv[1], sys.argv[2])
|
||||||
|
|
||||||
|
# Kernel Density estimator: gaussian fit
|
||||||
|
t = np.arange(-sigma*4, sigma*4, 1/sampling_rate)
|
||||||
|
fxn = np.exp(-0.5*(t/sigma)**2) / np.sqrt(2*np.pi) / sigma # gaussian function
|
||||||
|
|
||||||
|
|
||||||
|
if sys.argv[2] == 'average':
|
||||||
|
for (speed, direct, comb) in intervals_dict:
|
||||||
|
for trial in intervals_dict[(speed, direct, comb)]:
|
||||||
|
spike_train = trial[1]
|
||||||
|
pos = trial[0]
|
||||||
|
avg_convolve_spikes = gaussian_convolve(spike_train, fxn, sampling_rate)
|
||||||
|
p, freq, std_four, mn_four = fourier_psd(avg_convolve_spikes, sampling_rate)
|
||||||
|
|
||||||
|
# Graphing
|
||||||
|
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
|
||||||
|
|
||||||
|
# Firing Rate Graph
|
||||||
|
firing_times = np.arange(0, len(avg_convolve_spikes))
|
||||||
|
ax1.plot((firing_times / sampling_rate), avg_convolve_spikes)
|
||||||
|
ax1.set_title('Firing Rate of trial ' + str((speed, direct)) + ' comb = ' + str(comb) + '\n')
|
||||||
|
ax1.set_xlabel('Time (s)')
|
||||||
|
ax1.set_ylabel('Firing rate (Hz)')
|
||||||
|
|
||||||
|
# Fourier Graph
|
||||||
|
ax2.semilogy(freq[freq < 400], p[freq < 400])
|
||||||
|
ax2.axhline(y=(mn_four+std_four), xmin=0, xmax=1, linestyle='--', color='red')
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.savefig(('avg_' + '_' + str(cell_name) + '_' + str(speed) + '_' + str(comb)
|
||||||
|
+ '_' + str(direct) + '.png'))
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
|
elif sys.argv[2] == 'nonaverage':
|
||||||
|
for (speed, direct, pos, comb) in intervals_dict:
|
||||||
|
spike_train = intervals_dict[speed, direct, pos, comb]
|
||||||
|
avg_convolve_spikes = gaussian_convolve(spike_train, fxn, sampling_rate)
|
||||||
|
p, freq, std_four, mn_four = fourier_psd(avg_convolve_spikes, sampling_rate)
|
||||||
|
|
||||||
|
# Graphing
|
||||||
|
fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
|
||||||
|
|
||||||
|
# Firing Rate Graph
|
||||||
|
firing_times = np.arange(0, len(avg_convolve_spikes))
|
||||||
|
ax1.plot((firing_times / sampling_rate), avg_convolve_spikes)
|
||||||
|
ax1.set_title('Firing Rate of trial ' + str((speed, pos)) + ' comb = ' + str(comb) + '\n')
|
||||||
|
ax1.set_xlabel('Time (s)')
|
||||||
|
ax1.set_ylabel('Firing rate (Hz)')
|
||||||
|
|
||||||
|
# Fourier Graph
|
||||||
|
ax2.semilogy(freq[freq < 200], p[freq < 200])
|
||||||
|
ax2.axhline(y=(mn_four+std_four), xmin=0, xmax=1, linestyle='--', color='red')
|
||||||
|
# ax2.axvline(x=max_four,linestyle='--', color='green')
|
||||||
|
|
||||||
|
plt.savefig(('nonavg_' + '_' + str(cell_name) + '_' + str(speed) + '_' + str(pos)
|
||||||
|
+ '_' + str(comb) + '_' + str(direct) + '.png'))
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------------------------------------------------
|
50
icr_analysis.py
Normal file
50
icr_analysis.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import numpy as np
|
||||||
|
from IPython import embed
|
||||||
|
from scipy.signal import convolve
|
||||||
|
import matplotlib.mlab as mlab
|
||||||
|
|
||||||
|
|
||||||
|
def avgNestedLists(nested_vals):
|
||||||
|
"""
|
||||||
|
Averages a 2-D array and returns a 1-D array of all of the columns
|
||||||
|
averaged together, regardless of their dimensions.
|
||||||
|
"""
|
||||||
|
output = []
|
||||||
|
maximum = 0
|
||||||
|
for lst in nested_vals:
|
||||||
|
if len(lst) > maximum:
|
||||||
|
maximum = len(lst)
|
||||||
|
for index in range(maximum): # Go through each index of longest list
|
||||||
|
temp = []
|
||||||
|
for lst in nested_vals: # Go through each list
|
||||||
|
if index < len(lst): # If not an index error
|
||||||
|
temp.append(lst[index])
|
||||||
|
output.append(np.nanmean(temp))
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def gaussian_convolve(spike_train, fxn, sampling_rate):
|
||||||
|
|
||||||
|
all_convolve_spikes = []
|
||||||
|
trial_length = int((spike_train[-1] - spike_train[0]) * sampling_rate)
|
||||||
|
spike_train = spike_train - spike_train[0] # changing spike train to start at 0 (subtracting baseline)
|
||||||
|
trial_time = np.arange(0, (trial_length + 1), 1)
|
||||||
|
trial_bool = np.zeros(len(trial_time))
|
||||||
|
# Boolean list in length of trial length, where 1 means spike happened, 0 means no spike
|
||||||
|
spike_indx = (spike_train * sampling_rate).astype(np.int)
|
||||||
|
trial_bool[spike_indx] = 1
|
||||||
|
# trial_bool = trial_bool[30000:(len(trial_bool)-30000)]
|
||||||
|
convolve_spikes = np.asarray(
|
||||||
|
[convolve(trial_bool, fxn, mode='valid')]) # convolve gaussian with boolean spike list
|
||||||
|
all_convolve_spikes.append(convolve_spikes[0, :])
|
||||||
|
avg_convolve_spikes = avgNestedLists(all_convolve_spikes)
|
||||||
|
return avg_convolve_spikes
|
||||||
|
|
||||||
|
|
||||||
|
def fourier_psd(avg_convolve_spikes, sampling_rate):
|
||||||
|
p, freq = mlab.psd(avg_convolve_spikes, NFFT=sampling_rate * 3, noverlap=sampling_rate * 1.5,
|
||||||
|
Fs=sampling_rate,
|
||||||
|
detrend=mlab.detrend_mean)
|
||||||
|
std_four = np.std(freq[5:])
|
||||||
|
mn_four = np.mean(freq)
|
||||||
|
return p, freq, std_four, mn_four
|
44
open_nixio.py
Normal file
44
open_nixio.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import nixio as nix
|
||||||
|
from IPython import embed
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
|
def open_nixio(nix_file, avg_opt):
|
||||||
|
# Opening file using nixio
|
||||||
|
f = nix.File.open(nix_file, nix.FileMode.ReadOnly)
|
||||||
|
b = f.blocks[0] # first block
|
||||||
|
meta = b.metadata
|
||||||
|
mt = b.multi_tags['moving object-1']
|
||||||
|
tags = b.tags["MovingObjects_1"]
|
||||||
|
|
||||||
|
# Important parts of nixio file
|
||||||
|
curr_comb = tags.metadata["RePro-Info"]["settings"]["object"]
|
||||||
|
|
||||||
|
comb_pos = mt.positions[:] #
|
||||||
|
spikes = b.data_arrays["Spikes-1"][:] # spikes for all trials in one recording
|
||||||
|
|
||||||
|
# All feature tags: ['GlobalEField', 'GlobalEFieldAM', 'LocalEField', 'I', 'EOD Rate', 'EOD Amplitude',
|
||||||
|
# 'AmplifierMode', 'abs_time', 'delay', 'amplitude', 'speed', 'lateral position', 'direction']
|
||||||
|
feature_dict = {}
|
||||||
|
for feat in mt.features:
|
||||||
|
feature_dict.update({feat.data.name[16:]: mt.features[feat.data.name].data[:]})
|
||||||
|
|
||||||
|
# Spike data in intervals of comb position + speed
|
||||||
|
intervals_dict = defaultdict(list)
|
||||||
|
for idx, position in enumerate(comb_pos):
|
||||||
|
if idx == (len(comb_pos)-1):
|
||||||
|
break
|
||||||
|
curr_speed = feature_dict['speed'][idx]
|
||||||
|
curr_pos = comb_pos[idx]
|
||||||
|
curr_dir = feature_dict['direction'][idx]
|
||||||
|
curr_spikes = spikes[(spikes < comb_pos[idx + 1]) & (spikes > comb_pos[idx])]
|
||||||
|
|
||||||
|
if avg_opt == 'average':
|
||||||
|
intervals_dict[(curr_speed, curr_dir, curr_comb)].append((curr_pos, curr_spikes))
|
||||||
|
else:
|
||||||
|
intervals_dict.update({(curr_speed, curr_dir, curr_pos, curr_comb): curr_spikes})
|
||||||
|
|
||||||
|
# Spike data at baseline
|
||||||
|
# comb_baseline = spikes[(spikes < comb_pos[0])]
|
||||||
|
|
||||||
|
return curr_comb, intervals_dict
|
48
raster_plot.py
Normal file
48
raster_plot.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# ---------------------------------------------------------------------------------------------------------------------
|
||||||
|
# Name: Firing Rate and Fourier Script (moving comb repro)
|
||||||
|
# Purpose: Takes nixio spike data from moving comb repro and plots firing rate and power spectrum density graph
|
||||||
|
# Usage: python3 analysis_graphs.py average
|
||||||
|
# Author: Carolin Sachgau, University of Tuebingen
|
||||||
|
# Created: 20/09/2018
|
||||||
|
# ---------------------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
from open_nixio import *
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from IPython import embed
|
||||||
|
import sys
|
||||||
|
|
||||||
|
colorCodes = np.array([[0, 1, 1],
|
||||||
|
|
||||||
|
[1, 0, 0],
|
||||||
|
|
||||||
|
[0, 1, 0],
|
||||||
|
|
||||||
|
[0, 0, 1],
|
||||||
|
|
||||||
|
[1, 0, 1]])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sampling_rate = 20000
|
||||||
|
sigma = 0.01 # for Gaussian
|
||||||
|
delay = 1.5 # delay in seconds after comb reaches one end, before commencing movement again
|
||||||
|
cell_name = sys.argv[1].split('/')[-2]
|
||||||
|
|
||||||
|
curr_comb, intervals_dict = open_nixio(sys.argv[1], sys.argv[2])
|
||||||
|
|
||||||
|
spike_repeats = []
|
||||||
|
for (speed, direct, comb) in intervals_dict:
|
||||||
|
s_trials = intervals_dict[(speed, direct, comb)]
|
||||||
|
|
||||||
|
for trial in intervals_dict[(speed, direct, comb)]:
|
||||||
|
spike_train = trial[1]
|
||||||
|
spike_train = spike_train - spike_train[0] # changing spike train to start at 0 (subtracting baseline)
|
||||||
|
spike_repeats.append(spike_train)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
plt.eventplot(spike_repeats, linelengths = 0.05, lineoffsets = 0.05)
|
||||||
|
ax.set_title('Raster Plot of ' + str((speed, direct)) + ' comb = ' + str(comb) + '\n')
|
||||||
|
ax.set_xlabel('Time (s)')
|
||||||
|
ax.set_ylabel('Trials')
|
||||||
|
plt.show()
|
Loading…
Reference in New Issue
Block a user