Merge branch 'master' of https://whale.am28.uni-tuebingen.de/git/jgrewe/gp_neurobio
This commit is contained in:
commit
b33a051fd1
@ -58,7 +58,7 @@ axs[1,1].set_xlabel('Time [ms]')
|
||||
|
||||
freq = list(df_map['-50Hz'])
|
||||
ls_mod = []
|
||||
beat_mods = []
|
||||
ls_beat = []
|
||||
for k in freq:
|
||||
e1 = eod[k]
|
||||
zeit = np.asarray(e1[0])
|
||||
@ -69,14 +69,19 @@ for k in freq:
|
||||
time_cut = zeit[(zeit > chirp-10) & (zeit < chirp+10)]
|
||||
eods_cut = ampl[(zeit > chirp-10) & (zeit < chirp+10)]
|
||||
beat_cut = ampl[(zeit > chirp-55) & (zeit < chirp-10)]
|
||||
|
||||
|
||||
chirp_mod = np.std(eods_cut) #Std vom Bereich um den Chirp
|
||||
beat_mod = np.std(beat_cut) #Std vom Bereich vor dem Chirp
|
||||
ls_mod.append(chirp_mod)
|
||||
beat_mods.append(beat_mod)
|
||||
ls_mod.append(chirp_mod) #in die richtige Reihenfolge bringen?
|
||||
#momentan nicht nach Chirp-Platz sortiert, sondern nacheinander
|
||||
|
||||
#erst beat_cuts auf die gleiche Länge bringen!
|
||||
ls_beat.append(beat_cut)
|
||||
|
||||
#Länge des Mods ist 160, 16 Wiederholungen mal 10 Chirps pro Trial
|
||||
#Verwendung der Std für die Amplitudenmodulation?
|
||||
#beat_mod = np.std(ls_beat) #Std vom Bereich vor dem Chirp
|
||||
plt.figure()
|
||||
plt.scatter(np.arange(0,len(ls_mod),1), ls_mod)
|
||||
plt.scatter(np.arange(0,len(ls_mod),1), np.ones(len(ls_mod))/2, color = 'violet')
|
||||
plt.show()
|
||||
|
||||
|
||||
|
||||
|
@ -4,7 +4,7 @@ from utility import *
|
||||
#import nix_helpers as nh
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from IPython import embed #Funktionen importieren
|
||||
from IPython import embed #Funktionen imposrtieren
|
||||
|
||||
|
||||
data_dir = "../data"
|
||||
@ -40,18 +40,44 @@ plt.show()
|
||||
|
||||
chirp_spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
|
||||
df_map = map_keys(chirp_spikes)
|
||||
|
||||
|
||||
ls_rate = {}
|
||||
for i in df_map.keys():
|
||||
freq = list(df_map[i])
|
||||
ls_rate[i] = []
|
||||
for k in freq:
|
||||
spikes = chirp_spikes[k]
|
||||
phase_map = map_keys(spikes)
|
||||
for p in phase_map:
|
||||
spike_rate = 1./ np.diff(p)
|
||||
for phase in chirp_spikes[k]:
|
||||
spikes = chirp_spikes[k][phase]
|
||||
rate = len(spikes)/ 1.2
|
||||
ls_rate[i].append(rate)
|
||||
|
||||
|
||||
plt.figure()
|
||||
sort_df = sorted(df_map.keys(),reverse = False)
|
||||
print(sort_df)
|
||||
|
||||
for i in sort_df:
|
||||
plt.plot(np.arange(0,len(ls_rate[i]),1),ls_rate[i], label = i)
|
||||
|
||||
plt.vlines(10, ymin = 200, ymax = 300)
|
||||
plt.vlines(30, ymin = 200, ymax = 300)
|
||||
plt.vlines(50, ymin = 200, ymax = 300)
|
||||
plt.vlines(70, ymin = 200, ymax = 300)
|
||||
plt.vlines(90, ymin = 200, ymax = 300)
|
||||
plt.vlines(110, ymin = 200, ymax = 300)
|
||||
plt.vlines(130, ymin = 200, ymax = 300)
|
||||
plt.vlines(150, ymin = 200, ymax = 300)
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
|
||||
print(spike_rate)
|
||||
#
|
||||
# plt.plot(spikes, rate)
|
||||
# plt.show()
|
||||
|
||||
#mittlere Feuerrate einer Frequenz auf Frequenz
|
||||
|
||||
plt.figure()
|
||||
ls_mean = []
|
||||
for i in sort_df:
|
||||
mean = np.mean(ls_rate[i])
|
||||
ls_mean.append(mean)
|
||||
|
||||
plt.plot(np.arange(0,len(ls_mean),1),ls_mean)
|
||||
plt.show()
|
||||
|
@ -10,8 +10,10 @@ data_dir = "../data"
|
||||
dataset = "2018-11-09-ad-invivo-1"
|
||||
# parameters for binning, smoothing and plotting
|
||||
num_bin = 12
|
||||
window = sampling_rate
|
||||
window = 1
|
||||
time_axis = np.arange(-50, 50, 1/sampling_rate)
|
||||
bin_size = 1
|
||||
spike_bins = np.arange(-50, 50+bin_size, bin_size)
|
||||
|
||||
# read data from files
|
||||
spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
|
||||
@ -60,22 +62,43 @@ for deltaf in df_map.keys():
|
||||
df_phase_time[deltaf][idx] = [spikes_cut]
|
||||
df_phase_binary[deltaf][idx] = binary_spikes
|
||||
|
||||
# for plotting iterate over delta f and phases
|
||||
# for plotting and calculating iterate over delta f and phases
|
||||
for df in df_phase_time.keys():
|
||||
for phase in df_phase_time[df].keys():
|
||||
|
||||
# plot
|
||||
plot_trials = df_phase_time[df][phase]
|
||||
plot_trials_binary = np.mean(df_phase_binary[df][phase], axis=0)
|
||||
|
||||
smoothed_spikes = smooth(plot_trials_binary, window)
|
||||
# calculation
|
||||
overall_spikerate = (np.sum(plot_trials_binary)/len(plot_trials_binary))*sampling_rate*1000
|
||||
|
||||
'''
|
||||
spike_rate = np.zeros(len(spike_bins)-1)
|
||||
for idx in range(len(spike_bins)-1):
|
||||
bin_start = spike_bins[idx]*sampling_rate
|
||||
bin_end = spike_bins[idx+1]*sampling_rate
|
||||
spike_rate[idx] = np.sum(plot_trials_binary[bin_start:bin_end])/bin_size*sampling_rate
|
||||
|
||||
print(np.std(spike_rate))
|
||||
plt.plot(spike_rate)
|
||||
plt.show()
|
||||
|
||||
embed()
|
||||
exit()
|
||||
'''
|
||||
|
||||
smoothed_spikes = smooth(plot_trials_binary, window, 1./sampling_rate)
|
||||
|
||||
fig, ax = plt.subplots(2, 1)
|
||||
fig, ax = plt.subplots(2, 1, sharex=True)
|
||||
for i, trial in enumerate(plot_trials):
|
||||
ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
|
||||
ax[1].plot(time_axis, smoothed_spikes)
|
||||
ax[1].plot(time_axis, smoothed_spikes*1000)
|
||||
|
||||
ax[0].set_title(df)
|
||||
ax[0].set_ylabel('repetition', fontsize=12)
|
||||
|
||||
ax[1].set_xlabel('time [ms]', fontsize=12)
|
||||
ax[1].set_ylabel('firing rate [?]', fontsize=12)
|
||||
ax[1].set_ylabel('firing rate [Hz]', fontsize=12)
|
||||
print(overall_spikerate)
|
||||
plt.show()
|
||||
|
@ -20,18 +20,16 @@ def vector_strength(spike_times, eod_durations):
|
||||
return vs
|
||||
|
||||
|
||||
def gaussian(x, mu, sig):
|
||||
y = np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
|
||||
def gaussian(x, sig):
|
||||
y = np.exp(-0.5 * (x/sig)**2) / np.sqrt(2*np.pi)/sig
|
||||
return y
|
||||
|
||||
|
||||
def smooth(data, window):
|
||||
mu = 1
|
||||
def smooth(data, window, dt):
|
||||
sigma = window
|
||||
time_gauss = np.arange(-4 * sigma, 4 * sigma, 1)
|
||||
gauss = gaussian(time_gauss, mu, sigma)
|
||||
gauss_norm = gauss/(np.sum(gauss)/len(gauss))
|
||||
smoothed_data = np.convolve(data, gauss_norm, 'same')
|
||||
time_gauss = np.arange(-4 * sigma, 4 * sigma, dt)
|
||||
gauss = gaussian(time_gauss, sigma)
|
||||
smoothed_data = np.convolve(data, gauss, 'same')
|
||||
return smoothed_data
|
||||
|
||||
|
||||
|
BIN
papers/Dunlap_etal_HBehv_2002.pdf
Normal file
BIN
papers/Dunlap_etal_HBehv_2002.pdf
Normal file
Binary file not shown.
BIN
papers/Henninger_etal_JNeurosci_2018.pdf
Normal file
BIN
papers/Henninger_etal_JNeurosci_2018.pdf
Normal file
Binary file not shown.
BIN
papers/Hupe_Lewis_JEB_2008.pdf
Normal file
BIN
papers/Hupe_Lewis_JEB_2008.pdf
Normal file
Binary file not shown.
BIN
papers/Hupe_Lewis_JExpBiol_2008.pdf
Normal file
BIN
papers/Hupe_Lewis_JExpBiol_2008.pdf
Normal file
Binary file not shown.
BIN
papers/Hupe_etal_JPhysiol_2008.pdf
Normal file
BIN
papers/Hupe_etal_JPhysiol_2008.pdf
Normal file
Binary file not shown.
BIN
papers/Zakon_Dunlap_BrainBehavEvol_1999.pdf
Normal file
BIN
papers/Zakon_Dunlap_BrainBehavEvol_1999.pdf
Normal file
Binary file not shown.
Loading…
Reference in New Issue
Block a user