Merge branch 'master' of https://whale.am28.uni-tuebingen.de/git/jgrewe/gp_neurobio
This commit is contained in:
commit
4ea45b4ba6
@ -1,4 +1,5 @@
|
|||||||
from read_chirp_data import *
|
from read_chirp_data import *
|
||||||
|
from utility import *
|
||||||
#import nix_helpers as nh
|
#import nix_helpers as nh
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -15,26 +16,14 @@ data = ("2018-11-09-ad-invivo-1", "2018-11-09-ae-invivo-1", "2018-11-09-ag-inviv
|
|||||||
#for dataset in data:
|
#for dataset in data:
|
||||||
eod = read_chirp_eod(os.path.join(data_dir, dataset))
|
eod = read_chirp_eod(os.path.join(data_dir, dataset))
|
||||||
times = read_chirp_times(os.path.join(data_dir, dataset))
|
times = read_chirp_times(os.path.join(data_dir, dataset))
|
||||||
|
df_map = map_keys(eod)
|
||||||
|
|
||||||
|
|
||||||
df_map = {} #Keys werden nach df sortiert ausgegeben
|
|
||||||
for k in eod.keys():
|
|
||||||
df = k[1]
|
|
||||||
ch = k[3]
|
|
||||||
if df in df_map.keys():
|
|
||||||
df_map[df].append(k)
|
|
||||||
else:
|
|
||||||
df_map[df] = [k]
|
|
||||||
|
|
||||||
print(ch) #die Chirphöhe wird ausgegeben, um zu bestimmen, ob Chirps oder Chirps large benutzt wurde
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#die äußere Schleife geht für alle Keys durch und somit durch alle dfs
|
#die äußere Schleife geht für alle Keys durch und somit durch alle dfs
|
||||||
#die innnere Schleife bildet die 16 Wiederholungen einer Frequenz in 4 Subplots ab
|
#die innnere Schleife bildet die 16 Wiederholungen einer Frequenz ab
|
||||||
for idx in df_map.keys():
|
for i in df_map.keys():
|
||||||
freq = list(df_map[idx])
|
freq = list(df_map[i])
|
||||||
fig,axs = plt.subplots(2, 2, sharex = True, sharey = True)
|
fig,axs = plt.subplots(2, 2, sharex = True, sharey = True)
|
||||||
|
|
||||||
for idx, k in enumerate(freq):
|
for idx, k in enumerate(freq):
|
||||||
@ -58,18 +47,37 @@ for idx in df_map.keys():
|
|||||||
|
|
||||||
|
|
||||||
fig.suptitle('EOD for chirps', fontsize = 16)
|
fig.suptitle('EOD for chirps', fontsize = 16)
|
||||||
plt.show()
|
axs[0,0].set_ylabel('Amplitude [mV]')
|
||||||
|
axs[0,1].set_xlabel('Amplitude [mV]')
|
||||||
|
axs[1,0].set_xlabel('Time [ms]')
|
||||||
|
axs[1,1].set_xlabel('Time [ms]')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#Problem: axs hat keine label-Funktion, also müsste axes nochmal definiert werden. Momentan erscheint Schrift nur auf einem der Subplots
|
#for i in df_map.keys():
|
||||||
|
|
||||||
|
freq = list(df_map['-50Hz'])
|
||||||
|
ls_mod = []
|
||||||
|
beat_mods = []
|
||||||
|
for k in freq:
|
||||||
|
e1 = eod[k]
|
||||||
|
zeit = np.asarray(e1[0])
|
||||||
|
ampl = np.asarray(e1[1])
|
||||||
|
|
||||||
|
ct = times[k]
|
||||||
|
for chirp in ct:
|
||||||
|
time_cut = zeit[(zeit > chirp-10) & (zeit < chirp+10)]
|
||||||
|
eods_cut = ampl[(zeit > chirp-10) & (zeit < chirp+10)]
|
||||||
|
beat_cut = ampl[(zeit > chirp-55) & (zeit < chirp-10)]
|
||||||
|
|
||||||
#ax = plt.gca()
|
chirp_mod = np.std(eods_cut) #Std vom Bereich um den Chirp
|
||||||
#ax.set_ylabel('Time [ms]')
|
beat_mod = np.std(beat_cut) #Std vom Bereich vor dem Chirp
|
||||||
#ax.set_xlabel('Amplitude [mV]')
|
ls_mod.append(chirp_mod)
|
||||||
#ax.label_outer()
|
beat_mods.append(beat_mod)
|
||||||
|
|
||||||
|
#Länge des Mods ist 160, 16 Wiederholungen mal 10 Chirps pro Trial
|
||||||
|
#Verwendung der Std für die Amplitudenmodulation?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#next Step: relative Amplitudenmodulation berechnen, Max und Min der Amplitude bestimmen, EOD und Chirps zuordnen, Unterschied berechnen
|
#Chirps einer Phase zuordnen - zusammen plotten?
|
||||||
|
@ -1,16 +1,19 @@
|
|||||||
from read_baseline_data import *
|
from read_baseline_data import *
|
||||||
|
from read_chirp_data import *
|
||||||
|
from utility import *
|
||||||
#import nix_helpers as nh
|
#import nix_helpers as nh
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from IPython import embed #Funktionen importieren
|
from IPython import embed #Funktionen importieren
|
||||||
|
|
||||||
|
|
||||||
data_dir = "../data"
|
data_dir = "../data"
|
||||||
dataset = "2018-11-09-aa-invivo-1"
|
dataset = "2018-11-09-ad-invivo-1"
|
||||||
#data = ("2018-11-09-aa-invivo-1", "2018-11-09-ab-invivo-1", "2018-11-09-ac-invivo-1", "2018-11-09-ad-invivo-1", "2018-11-13-aa-invivo-1", "2018-11-13-ab-invivo-1", "2018-11-13-ad-invivo-1", "2018-11-09-af-invivo-1", "2018-11-09-ag-invivo-1", "2018-11-13-ah-invivo-1", "2018-11-13-ai-invivo-1", "2018-11-13-aj-invivo-1", "2018-11-13-ak-invivo-1", "2018-11-13-al-invivo-1", "2018-11-14-aa-invivo-1", "2018-11-14-ab-invivo-1", "2018-11-14-ac-invivo-1", "2018-11-14-ad-invivo-1", "2018-11-14-ae-invivo-1", "2018-11-14-af-invivo-1", "2018-11-14-ag-invivo-1", "2018-11-14-aa-invivo-1", "2018-11-14-aj-invivo-1", "2018-11-14-ak-invivo-1", "2018-11-14-al-invivo-1", "2018-11-14-am-invivo-1", "2018-11-14-an-invivo-1")
|
#data = ("2018-11-09-aa-invivo-1", "2018-11-09-ab-invivo-1", "2018-11-09-ac-invivo-1", "2018-11-09-ad-invivo-1", "2018-11-13-aa-invivo-1", "2018-11-13-ab-invivo-1", "2018-11-13-ad-invivo-1", "2018-11-09-af-invivo-1", "2018-11-09-ag-invivo-1", "2018-11-13-ah-invivo-1", "2018-11-13-ai-invivo-1", "2018-11-13-aj-invivo-1", "2018-11-13-ak-invivo-1", "2018-11-13-al-invivo-1", "2018-11-14-aa-invivo-1", "2018-11-14-ab-invivo-1", "2018-11-14-ac-invivo-1", "2018-11-14-ad-invivo-1", "2018-11-14-ae-invivo-1", "2018-11-14-af-invivo-1", "2018-11-14-ag-invivo-1", "2018-11-14-aa-invivo-1", "2018-11-14-aj-invivo-1", "2018-11-14-ak-invivo-1", "2018-11-14-al-invivo-1", "2018-11-14-am-invivo-1", "2018-11-14-an-invivo-1")
|
||||||
spike_times = read_baseline_spikes(os.path.join(data_dir, dataset))
|
|
||||||
|
|
||||||
|
|
||||||
#spike_frequency = len(spike_times) / spike_times[-1]
|
|
||||||
|
spike_times = read_baseline_spikes(os.path.join(data_dir, dataset))
|
||||||
#inst_frequency = 1. / np.diff(spike_times)
|
#inst_frequency = 1. / np.diff(spike_times)
|
||||||
spike_rate = np.diff(spike_times)
|
spike_rate = np.diff(spike_times)
|
||||||
|
|
||||||
@ -21,7 +24,6 @@ plt.hist(spike_rate,x)
|
|||||||
mu = np.mean(spike_rate)
|
mu = np.mean(spike_rate)
|
||||||
sigma = np.std(spike_rate)
|
sigma = np.std(spike_rate)
|
||||||
cv = sigma/mu
|
cv = sigma/mu
|
||||||
print(cv)
|
|
||||||
|
|
||||||
plt.title('A.lepto ISI Histogramm', fontsize = 14)
|
plt.title('A.lepto ISI Histogramm', fontsize = 14)
|
||||||
plt.xlabel('duration ISI[ms]', fontsize = 12)
|
plt.xlabel('duration ISI[ms]', fontsize = 12)
|
||||||
@ -32,3 +34,24 @@ plt.yticks(fontsize = 12)
|
|||||||
plt.show()
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#Nyquist-Theorem Plot:
|
||||||
|
|
||||||
|
chirp_spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
|
||||||
|
df_map = map_keys(chirp_spikes)
|
||||||
|
|
||||||
|
|
||||||
|
for i in df_map.keys():
|
||||||
|
freq = list(df_map[i])
|
||||||
|
for k in freq:
|
||||||
|
spikes = chirp_spikes[k]
|
||||||
|
phase_map = map_keys(spikes)
|
||||||
|
for p in phase_map:
|
||||||
|
spike_rate = 1./ np.diff(p)
|
||||||
|
|
||||||
|
print(spike_rate)
|
||||||
|
#
|
||||||
|
# plt.plot(spikes, rate)
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
import os
|
||||||
|
import nixio as nix
|
||||||
|
from IPython import embed
|
||||||
|
|
||||||
|
|
||||||
def read_chirp_spikes(dataset):
|
def read_chirp_spikes(dataset):
|
||||||
@ -85,10 +87,37 @@ def read_chirp_times(dataset):
|
|||||||
return chirp_times
|
return chirp_times
|
||||||
|
|
||||||
|
|
||||||
|
def read_chirp_stimulus(dataset):
|
||||||
|
base = dataset.split(os.path.sep)[-1] + ".nix"
|
||||||
|
nix_file = nix.File.open(os.path.join(dataset, base), nix.FileMode.ReadOnly)
|
||||||
|
b = nix_file.blocks[0]
|
||||||
|
data = {}
|
||||||
|
for t in b.tags:
|
||||||
|
if "Chirps" in t.name:
|
||||||
|
stims = []
|
||||||
|
index = int(t.name.split("_")[-1])
|
||||||
|
df = t.metadata["RePro-Info"]["settings"]["deltaf"]
|
||||||
|
cs = t.metadata["RePro-Info"]["settings"]["chirpsize"]
|
||||||
|
stim_da = t.references["GlobalEFieldStimulus"]
|
||||||
|
si = stim_da.dimensions[0].sampling_interval
|
||||||
|
for mt in b.multi_tags:
|
||||||
|
if mt.positions[0] >= t.position[0] and \
|
||||||
|
mt.positions[0] < (t.position[0] + t.extent[0]):
|
||||||
|
break
|
||||||
|
for i in range(len(mt.positions)):
|
||||||
|
start_index = int(mt.positions[i] / si)
|
||||||
|
end_index = int((mt.positions[i] + mt.extents[i]) / si) - 1
|
||||||
|
stim = stim_da[start_index:end_index]
|
||||||
|
time = stim_da.dimensions[0].axis(len(stim)) + mt.positions[i]
|
||||||
|
stims.append((time, stim))
|
||||||
|
data[(index, df, cs)] = stims
|
||||||
|
nix_file.close()
|
||||||
|
return data
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
data_dir = "../data"
|
data_dir = "../data"
|
||||||
dataset = "2018-11-09-ad-invivo-1"
|
dataset = "2018-11-20-ad-invivo-1"
|
||||||
spikes = load_chirp_spikes(os.path.join(data_dir, dataset))
|
#spikes = load_chirp_spikes(os.path.join(data_dir, dataset))
|
||||||
chirp_times = load_chirp_times(os.path.join(data_dir, dataset))
|
#chirp_times = load_chirp_times(os.path.join(data_dir, dataset))
|
||||||
chirp_eod = load_chirp_eod(os.path.join(data_dir, dataset))
|
#chirp_eod = load_chirp_eod(os.path.join(data_dir, dataset))
|
||||||
|
stim = read_chirp_stimulus(os.path.join(data_dir, dataset))
|
||||||
|
@ -4,13 +4,21 @@ from read_chirp_data import *
|
|||||||
from utility import *
|
from utility import *
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
|
|
||||||
|
# define sampling rate and data path
|
||||||
|
sampling_rate = 40 #kHz
|
||||||
data_dir = "../data"
|
data_dir = "../data"
|
||||||
dataset = "2018-11-09-ad-invivo-1"
|
dataset = "2018-11-09-ad-invivo-1"
|
||||||
|
# parameters for binning, smoothing and plotting
|
||||||
|
num_bin = 12
|
||||||
|
window = sampling_rate
|
||||||
|
time_axis = np.arange(-50, 50, 1/sampling_rate)
|
||||||
|
|
||||||
|
# read data from files
|
||||||
spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
|
spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
|
||||||
eod = read_chirp_eod(os.path.join(data_dir, dataset))
|
eod = read_chirp_eod(os.path.join(data_dir, dataset))
|
||||||
times = read_chirp_times(os.path.join(data_dir, dataset))
|
chirp_times = read_chirp_times(os.path.join(data_dir, dataset))
|
||||||
|
|
||||||
|
# make a delta f map for the quite more complicated keys
|
||||||
df_map = {}
|
df_map = {}
|
||||||
for k in spikes.keys():
|
for k in spikes.keys():
|
||||||
df = k[1]
|
df = k[1]
|
||||||
@ -19,33 +27,55 @@ for k in spikes.keys():
|
|||||||
else:
|
else:
|
||||||
df_map[df] = [k]
|
df_map[df] = [k]
|
||||||
|
|
||||||
# make phases together, 12 phases
|
# differentiate between phases
|
||||||
spikes_mat = {}
|
phase_vec = np.arange(0, 1+1/num_bin, 1/num_bin)
|
||||||
|
cut_range = np.arange(-50*sampling_rate, 50*sampling_rate, 1)
|
||||||
|
|
||||||
|
# make dictionaries for spiketimes
|
||||||
|
df_phase_time = {}
|
||||||
|
df_phase_binary = {}
|
||||||
|
|
||||||
|
# iterate over delta f, repetition, phases and a single chirp
|
||||||
for deltaf in df_map.keys():
|
for deltaf in df_map.keys():
|
||||||
|
df_phase_time[deltaf] = {}
|
||||||
|
df_phase_binary[deltaf] = {}
|
||||||
for rep in df_map[deltaf]:
|
for rep in df_map[deltaf]:
|
||||||
for phase in spikes[rep]:
|
for phase in spikes[rep]:
|
||||||
#print(phase)
|
for idx in np.arange(num_bin):
|
||||||
spikes_one_chirp = spikes[rep][phase]
|
# check the phase
|
||||||
if deltaf == '-50Hz' and phase == (9, 0.54):
|
if phase[1] > phase_vec[idx] and phase[1] < phase_vec[idx+1]:
|
||||||
spikes_mat[deltaf, rep, phase] = spikes_one_chirp
|
|
||||||
|
# get spikes between 50 ms befor and after the chirp
|
||||||
plot_spikes = spikes[(0, '-50Hz', '20%', '100Hz')][(0, 0.789)]
|
spikes_to_cut = np.asarray(spikes[rep][phase])
|
||||||
|
spikes_cut = spikes_to_cut[(spikes_to_cut > -50) & (spikes_to_cut < 50)]
|
||||||
mu = 1
|
spikes_idx = np.round(spikes_cut*sampling_rate)
|
||||||
sigma = 1
|
# also save as binary, 0 no spike, 1 spike
|
||||||
time_gauss = np.arange(-4, 4, 1)
|
binary_spikes = np.isin(cut_range, spikes_idx)*1
|
||||||
gauss = gaussian(time_gauss, mu, sigma)
|
|
||||||
# spikes during time vec (00010000001)?
|
# add the spikes to the dictionaries with the correct df and phase
|
||||||
smoothed_spikes = np.convolve(plot_spikes, gauss, 'same')
|
if idx in df_phase_time[deltaf].keys():
|
||||||
window = np.mean(np.diff(plot_spikes))
|
df_phase_time[deltaf][idx].append(spikes_cut)
|
||||||
time_vec = np.arange(plot_spikes[0], plot_spikes[-1]+window, window)
|
df_phase_binary[deltaf][idx] = np.vstack((df_phase_binary[deltaf][idx], binary_spikes))
|
||||||
|
else:
|
||||||
fig, ax = plt.subplots()
|
df_phase_time[deltaf][idx] = [spikes_cut]
|
||||||
ax.scatter(plot_spikes, np.ones(len(plot_spikes))*10, marker='|', color='k')
|
df_phase_binary[deltaf][idx] = binary_spikes
|
||||||
ax.plot(time_vec, smoothed_spikes)
|
|
||||||
plt.show()
|
|
||||||
|
|
||||||
#embed()
|
# for plotting iterate over delta f and phases
|
||||||
#exit()
|
for df in df_phase_time.keys():
|
||||||
#hist_data = plt.hist(plot_spikes, bins=np.arange(-200, 400, 20))
|
for phase in df_phase_time[df].keys():
|
||||||
#ax.plot(hist_data[1][:-1], hist_data[0])
|
plot_trials = df_phase_time[df][phase]
|
||||||
|
plot_trials_binary = np.mean(df_phase_binary[df][phase], axis=0)
|
||||||
|
|
||||||
|
smoothed_spikes = smooth(plot_trials_binary, window)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots(2, 1)
|
||||||
|
for i, trial in enumerate(plot_trials):
|
||||||
|
ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
|
||||||
|
ax[1].plot(time_axis, smoothed_spikes)
|
||||||
|
|
||||||
|
ax[0].set_title(df)
|
||||||
|
ax[0].set_ylabel('repetition', fontsize=12)
|
||||||
|
|
||||||
|
ax[1].set_xlabel('time [ms]', fontsize=12)
|
||||||
|
ax[1].set_ylabel('firing rate [?]', fontsize=12)
|
||||||
|
plt.show()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
|
from IPython import embed
|
||||||
|
|
||||||
|
|
||||||
def zero_crossing(eod, time):
|
def zero_crossing(eod, time):
|
||||||
@ -23,3 +24,25 @@ def gaussian(x, mu, sig):
|
|||||||
y = np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
|
y = np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
|
||||||
return y
|
return y
|
||||||
|
|
||||||
|
|
||||||
|
def smooth(data, window):
|
||||||
|
mu = 1
|
||||||
|
sigma = window
|
||||||
|
time_gauss = np.arange(-4 * sigma, 4 * sigma, 1)
|
||||||
|
gauss = gaussian(time_gauss, mu, sigma)
|
||||||
|
gauss_norm = gauss/(np.sum(gauss)/len(gauss))
|
||||||
|
smoothed_data = np.convolve(data, gauss_norm, 'same')
|
||||||
|
return smoothed_data
|
||||||
|
|
||||||
|
|
||||||
|
def map_keys(input):
|
||||||
|
df_map = {}
|
||||||
|
for k in input.keys():
|
||||||
|
df = k[1]
|
||||||
|
#ch = k[3]
|
||||||
|
if df in df_map.keys():
|
||||||
|
df_map[df].append(k)
|
||||||
|
else:
|
||||||
|
df_map[df] = [k]
|
||||||
|
return df_map
|
||||||
|
#print(ch)
|
||||||
|
Loading…
Reference in New Issue
Block a user