This commit is contained in:
efish 2018-11-29 10:46:15 +01:00
commit 509be03d92
5 changed files with 189 additions and 169 deletions

View File

@ -7,31 +7,45 @@ from IPython import embed
# plot and data values
inch_factor = 2.54
sampling_rate = 40000
data_dir = '../data'
#dataset = '2018-11-09-ad-invivo-1'
dataset = '2018-11-14-al-invivo-1'
dataset = '2018-11-09-ad-invivo-1'
#dataset = '2018-11-13-aa-invivo-1'
# read eod and time of baseline
time, eod = read_baseline_eod(os.path.join(data_dir, dataset))
eod_norm = eod - np.mean(eod)
# calculate eod times and indices by zero crossings
threshold = 0
shift_eod = np.roll(eod_norm, 1)
eod_times = time[(eod_norm >= threshold) & (shift_eod < threshold)]
eod_duration = eod_times[2]- eod_times[1] #time in s
eod_duration = eod_times[2]- eod_times[1]
# read spikes during baseline activity
spikes = read_baseline_spikes(os.path.join(data_dir, dataset))
spikes = read_baseline_spikes(os.path.join(data_dir, dataset)) #spikes in s
# calculate interpike intervals and plot them
interspikeintervals = np.diff(spikes)
interspikeintervals = np.diff(spikes)/eod_duration
fig, ax = plt.subplots(figsize=(20/inch_factor, 10/inch_factor))
plt.hist(interspikeintervals, bins=np.arange(0, np.max(interspikeintervals), 0.0001), color='royalblue')
plt.xlabel("time [ms]", fontsize = 22)
plt.hist(interspikeintervals, bins=np.arange(0, np.max(interspikeintervals), 0.1), color='royalblue')
plt.xlabel("eod cycles", fontsize = 22)
plt.xticks(fontsize = 18)
plt.ylabel("number of \n interspikeintervals", fontsize = 22)
plt.yticks(fontsize = 18)
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
fig.tight_layout()
plt.show()
plt.show()
#plt.show()
#plt.savefig('isis.pdf')
exit()
plt.savefig('isis.png')
@ -85,10 +99,10 @@ plt.yticks(fontsize=18)
ax1.spines['top'].set_visible(False)
ax2 = ax1.twinx()
ax2.fill_between(time_axis, mu_eod+std_eod, mu_eod-std_eod, color='navy', alpha=0.5)
ax2.fill_between(time_axis, mu_eod+std_eod, mu_eod-std_eod, color='royalblue', alpha=0.5)
ax2.plot(time_axis, mu_eod, color='black', lw=2)
ax2.set_ylabel('voltage [mV]', fontsize=22)
ax2.tick_params(axis='y', labelcolor='navy')
ax2.tick_params(axis='y', labelcolor='royalblue')
ax2.spines['top'].set_visible(False)
plt.yticks(fontsize=18)

View File

@ -54,16 +54,17 @@ for deltaf in df_map.keys():
# get spikes between 60 ms before and after the chirp
spikes_to_cut = np.asarray(spikes[rep][phase])
spikes_cut = spikes_to_cut[(spikes_to_cut > -cut_window) & (spikes_to_cut < cut_window)]
spikes_raster = spikes_to_cut[(spikes_to_cut > -cut_window+5) & (spikes_to_cut < cut_window-5)]
spikes_idx = np.round(spikes_cut*sampling_rate)
# also save as binary, 0 no spike, 1 spike
binary_spikes = np.isin(cut_range, spikes_idx)*1
# add the spikes to the dictionaries with the correct df and phase
if idx in df_phase_time[deltaf].keys():
df_phase_time[deltaf][idx].append(spikes_cut)
df_phase_time[deltaf][idx].append(spikes_raster)
df_phase_binary[deltaf][idx] = np.vstack((df_phase_binary[deltaf][idx], binary_spikes))
else:
df_phase_time[deltaf][idx] = [spikes_cut]
df_phase_time[deltaf][idx] = [spikes_raster]
df_phase_binary[deltaf][idx] = binary_spikes
@ -80,15 +81,13 @@ for df in df_phase_time.keys():
smoothed_spikes = smooth(plot_trials_binary, window, 1./sampling_rate)
fig, ax = plt.subplots(2, 1, sharex=True, figsize=(20/inch_factor, 15/inch_factor))
fig, ax = plt.subplots(2, 1, sharex=True, figsize=(18/inch_factor, 13/inch_factor))
for i, trial in enumerate(plot_trials):
ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
ax[1].plot(time_axis, smoothed_spikes*1000, color='royalblue', lw = 2)
ax[1].plot(time_axis[0+5*sampling_rate:-5*sampling_rate], smoothed_spikes[0+5*sampling_rate:-5*sampling_rate]*1000, color='royalblue', lw = 2)
ax[0].set_title('df = %s Hz' %(df))
ax[0].set_title('df = %s Hz' %(df), fontsize = 18)
ax[0].set_ylabel('repetition', fontsize=22)
ax[0].yaxis.set_label_coords(-0.1, 0.5)
ax[0].set_yticks(np.arange(1, len(plot_trials)+1,2))

View File

@ -13,7 +13,7 @@ cut_range = np.arange(-cut_window * sampling_rate, 0, 1)
window = 1
# norm: -150, 150, 300 aa, #ac, aj??
data = ["2018-11-13-al-invivo-1"]#, "2018-11-13-ad-invivo-1", "2018-11-13-ah-invivo-1", "2018-11-13-ai-invivo-1",
data = ["2018-11-13-aa-invivo-1"]#, "2018-11-13-ad-invivo-1", "2018-11-13-ah-invivo-1", "2018-11-13-ai-invivo-1",
#"2018-11-13-ak-invivo-1", "2018-11-13-al-invivo-1"]
'''
@ -67,10 +67,10 @@ for dataset in data:
binary_spikes = np.isin(cut_range, spikes_idx) * 1
smoothed_data = smooth(binary_spikes, window, 1 / sampling_rate)
train = smoothed_data[window:beat_window+window]
norm_train = train*1000#/spikerate
norm_train = train*1000/spikerate
rep_rates.append(np.std(norm_train))#/spikerate)
break
df_rate = np.median(rep_rates)/spikerate
df_rate = np.mean(rep_rates)
#embed()
#exit()
if df in rates.keys():

View File

@ -8,7 +8,8 @@ from IPython import embed
# define sampling rate and data path
sampling_rate = 40 #kHz
data_dir = "../data"
#dataset = "2018-11-13-al-invivo-1"
dataset = "2018-11-13-ah-invivo-1"
'''
data = ["2018-11-09-ad-invivo-1", "2018-11-09-ae-invivo-1", "2018-11-09-ag-invivo-1", "2018-11-13-aa-invivo-1",
"2018-11-13-ac-invivo-1", "2018-11-13-ad-invivo-1", "2018-11-13-ah-invivo-1", "2018-11-13-ai-invivo-1",
@ -18,9 +19,10 @@ data = ["2018-11-09-ad-invivo-1", "2018-11-09-ae-invivo-1", "2018-11-09-ag-inviv
"2018-11-20-aa-invivo-1", "2018-11-20-ab-invivo-1", "2018-11-20-ac-invivo-1", "2018-11-20-ad-invivo-1",
"2018-11-20-ae-invivo-1", "2018-11-20-af-invivo-1", "2018-11-20-ag-invivo-1", "2018-11-20-ah-invivo-1",
"2018-11-20-ai-invivo-1"]
'''
data = ["2018-11-13-aa-invivo-1", "2018-11-13-ac-invivo-1", "2018-11-13-ad-invivo-1", "2018-11-13-ah-invivo-1",
"2018-11-13-ai-invivo-1", "2018-11-13-aj-invivo-1", "2018-11-13-ak-invivo-1", "2018-11-13-al-invivo-1"]
'''
# parameters for binning, smoothing and plotting
cut_window = 20
@ -53,147 +55,152 @@ df_phase_binary = {}
#embed()
#exit()
for dataset in data:
spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
df_map = map_keys(spikes)
print(dataset)
# iterate over delta f, repetition, phases and a single chirp
for deltaf in df_map.keys():
df_phase_time[deltaf] = {}
df_phase_binary[deltaf] = {}
for rep in df_map[deltaf]:
chirp_size = int(rep[-1].strip('Hz'))
#print(chirp_size)
if chirp_size == 150:
continue
for phase in spikes[rep]:
for idx in np.arange(number_bins):
# check the phase
if phase[1] > phase_vec[idx] and phase[1] < phase_vec[idx+1]:
# get spikes between 40 ms before and after the chirp
spikes_to_cut = np.asarray(spikes[rep][phase])
spikes_cut = spikes_to_cut[(spikes_to_cut > -cut_window*2) & (spikes_to_cut < cut_window*2)]
spikes_idx = np.round(spikes_cut*sampling_rate)
# also save as binary, 0 no spike, 1 spike
binary_spikes = np.isin(cut_range, spikes_idx)*1
# add the spikes to the dictionaries with the correct df and phase
if idx in df_phase_time[deltaf].keys():
df_phase_time[deltaf][idx].append(spikes_cut)
df_phase_binary[deltaf][idx] = np.vstack((df_phase_binary[deltaf][idx], binary_spikes))
else:
df_phase_time[deltaf][idx] = [spikes_cut]
df_phase_binary[deltaf][idx] = binary_spikes
# make dictionaries for csi and beat
csi_trains = {}
csi_rates = {}
beat = {}
# for plotting and calculating iterate over delta f and phases
for df in df_phase_time.keys():
csi_trains[df] = []
csi_rates[df] = []
beat[df] = []
beat_duration = int(abs(1/df*1000)*sampling_rate) #steps
beat_window = 0
# beat window is at most 20 ms long, multiples of beat_duration
while beat_window+beat_duration <= cut_window*sampling_rate:
beat_window = beat_window+beat_duration
for phase in df_phase_time[df].keys():
# csi calculation
# trains for synchrony and rate
trials_binary = df_phase_binary[df][phase]
train_chirp = []
train_beat = []
#csi_spikerate = []
for i, trial in enumerate(trials_binary):
smoothed_trial = smooth(trial, window, 1/sampling_rate)
train_chirp.append(smoothed_trial[chirp_start:chirp_end])
train_beat.append(smoothed_trial[chirp_start-beat_window:chirp_start])
#std_chirp = np.std(smoothed_trial[chirp_start:chirp_end])
#std_beat = np.std(smoothed_trial[chirp_start-beat_window:chirp_start])
#csi = (std_chirp - std_beat)/(std_chirp + std_beat)
#csi_spikerate.append(csi)
std_chirp = np.std(np.mean(train_chirp, axis=0))
std_beat = np.std(np.mean(train_beat, axis=0))
beat[df].append(std_beat)
csi_spikerate = (std_chirp - std_beat) / (std_chirp + std_beat)
rcs = []
rbs = []
for i, train in enumerate(train_chirp):
for j, train2 in enumerate(train_chirp):
if i >= j:
continue
#for dataset in data:
spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
df_map = map_keys(spikes)
print(dataset)
# iterate over delta f, repetition, phases and a single chirp
for deltaf in df_map.keys():
df_phase_time[deltaf] = {}
df_phase_binary[deltaf] = {}
for rep in df_map[deltaf]:
chirp_size = int(rep[-1].strip('Hz'))
#print(chirp_size)
if chirp_size == 150:
continue
for phase in spikes[rep]:
for idx in np.arange(number_bins):
# check the phase
if phase[1] > phase_vec[idx] and phase[1] < phase_vec[idx+1]:
# get spikes between 40 ms before and after the chirp
spikes_to_cut = np.asarray(spikes[rep][phase])
spikes_cut = spikes_to_cut[(spikes_to_cut > -cut_window*2) & (spikes_to_cut < cut_window*2)]
spikes_idx = np.round(spikes_cut*sampling_rate)
# also save as binary, 0 no spike, 1 spike
binary_spikes = np.isin(cut_range, spikes_idx)*1
# add the spikes to the dictionaries with the correct df and phase
if idx in df_phase_time[deltaf].keys():
df_phase_time[deltaf][idx].append(spikes_cut)
df_phase_binary[deltaf][idx] = np.vstack((df_phase_binary[deltaf][idx], binary_spikes))
else:
rc, _ = ss.pearsonr(train, train2)
rb, _ = ss.pearsonr(train_beat[i], train_beat[j])
rcs.append(rc)
rbs.append(rb)
r_train_chirp = np.mean(rcs)
r_train_beat = np.mean(rbs)
csi_train = (r_train_chirp - r_train_beat) / (r_train_chirp + r_train_beat)
# add the csi to the dictionaries with the correct df and phase
csi_trains[df].append(csi_train)
csi_rates[df].append(np.mean(csi_spikerate))
'''
# plot
plot_trials = df_phase_time[df][phase]
plot_trials_binary = np.mean(df_phase_binary[df][phase], axis=0)
# calculation
#overall_spikerate = (np.sum(plot_trials_binary)/len(plot_trials_binary))*sampling_rate*1000
smoothed_spikes = smooth(plot_trials_binary, window, 1./sampling_rate)
fig, ax = plt.subplots(2, 1, sharex=True)
for i, trial in enumerate(plot_trials):
ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
ax[1].plot(time_axis, smoothed_spikes*1000)
ax[0].set_title(df)
ax[0].set_ylabel('repetition', fontsize=12)
ax[1].set_xlabel('time [ms]', fontsize=12)
ax[1].set_ylabel('firing rate [Hz]', fontsize=12)
plt.show()
'''
'''
fig, ax = plt.subplots()
for i, k in enumerate(sorted(csi_rates.keys())):
ax.scatter(np.ones(len(csi_rates[k]))*i, csi_rates[k], s=20)
#ax.plot(i, np.mean(csi_rates[k]), 'o', markersize=15)
ax.legend(sorted(csi_rates.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
ax.plot(np.arange(-1, len(csi_rates.keys())+1), np.zeros(len(csi_rates.keys())+2), 'silver', linewidth=2, linestyle='--')
#ax.set_xticklabels(sorted(csi_rates.keys()))
fig.tight_layout()
plt.show()
fig, ax = plt.subplots()
for i, k in enumerate(sorted(csi_trains.keys())):
ax.plot(np.ones(len(csi_trains[k]))*i, csi_trains[k], 'o')
#ax.plot(i, np.mean(csi_trains[k]), 'o', markersize=15)
ax.legend(sorted(csi_trains.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
ax.plot(np.arange(-1, len(csi_trains.keys())+1), np.zeros(len(csi_trains.keys())+2), 'silver', linewidth=2, linestyle='--')
#ax.set_xticklabels(sorted(csi_trains.keys()))
fig.tight_layout()
plt.show()
'''
fig, ax = plt.subplots()
for i, k in enumerate(sorted(beat.keys())):
ax.plot(np.ones(len(beat[k]))*i, beat[k], 'o')
ax.legend(sorted(beat.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
#ax.set_xticklabels(sorted(csi_trains.keys()))
fig.tight_layout()
plt.show()
df_phase_time[deltaf][idx] = [spikes_cut]
df_phase_binary[deltaf][idx] = binary_spikes
# make dictionaries for csi and beat
csi_trains = {}
csi_rates = {}
beat = {}
# for plotting and calculating iterate over delta f and phases
for df in df_phase_time.keys():
csi_trains[df] = []
csi_rates[df] = []
beat[df] = []
beat_duration = int(abs(1/df*1000)*sampling_rate) #steps
beat_window = 0
# beat window is at most 20 ms long, multiples of beat_duration
while beat_window+beat_duration <= cut_window*sampling_rate:
beat_window = beat_window+beat_duration
for phase in df_phase_time[df].keys():
# csi calculation
# trains for synchrony and rate
trials_binary = df_phase_binary[df][phase]
train_chirp = []
train_beat = []
#csi_spikerate = []
for i, trial in enumerate(trials_binary):
smoothed_trial = smooth(trial, window, 1/sampling_rate)
train_chirp.append(smoothed_trial[chirp_start:chirp_end])
train_beat.append(smoothed_trial[chirp_start-beat_window:chirp_start])
#std_chirp = np.std(smoothed_trial[chirp_start:chirp_end])
#std_beat = np.std(smoothed_trial[chirp_start-beat_window:chirp_start])
#csi = (std_chirp - std_beat)/(std_chirp + std_beat)
#csi_spikerate.append(csi)
std_chirp = np.std(np.mean(train_chirp, axis=0))
std_beat = np.std(np.mean(train_beat, axis=0))
beat[df].append(std_beat)
csi_spikerate = (std_chirp - std_beat) / (std_chirp + std_beat)
rcs = []
rbs = []
for i, train in enumerate(train_chirp):
for j, train2 in enumerate(train_chirp):
if i >= j:
continue
else:
rc, _ = ss.pearsonr(train, train2)
rb, _ = ss.pearsonr(train_beat[i], train_beat[j])
rcs.append(rc)
rbs.append(rb)
r_train_chirp = np.mean(rcs)
r_train_beat = np.mean(rbs)
csi_train = (r_train_chirp - r_train_beat) / (r_train_chirp + r_train_beat)
# add the csi to the dictionaries with the correct df and phase
csi_trains[df].append(csi_train)
csi_rates[df].append(np.mean(csi_spikerate))
'''
# plot
plot_trials = df_phase_time[df][phase]
plot_trials_binary = np.mean(df_phase_binary[df][phase], axis=0)
# calculation
#overall_spikerate = (np.sum(plot_trials_binary)/len(plot_trials_binary))*sampling_rate*1000
smoothed_spikes = smooth(plot_trials_binary, window, 1./sampling_rate)
fig, ax = plt.subplots(2, 1, sharex=True)
for i, trial in enumerate(plot_trials):
ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
ax[1].plot(time_axis, smoothed_spikes*1000)
ax[0].set_title(df)
ax[0].set_ylabel('repetition', fontsize=12)
ax[1].set_xlabel('time [ms]', fontsize=12)
ax[1].set_ylabel('firing rate [Hz]', fontsize=12)
plt.show()
'''
upper_limit = np.max(sorted(csi_rates.keys()))+30
lower_limit = np.min(sorted(csi_rates.keys()))-30
fig, ax = plt.subplots()
for i, k in enumerate(sorted(csi_rates.keys())):
ax.scatter(np.ones(len(csi_rates[k]))*k, csi_rates[k], s=20)
#ax.plot(i, np.mean(csi_rates[k]), 'o', markersize=15)
#ax.legend(sorted(csi_rates.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
ax.plot([lower_limit, upper_limit], np.zeros(2), 'silver', linewidth=2, linestyle='--')
#ax.set_xticklabels(sorted(csi_rates.keys()))
fig.tight_layout()
plt.show()
'''
fig, ax = plt.subplots()
for i, k in enumerate(sorted(csi_trains.keys())):
ax.plot(np.ones(len(csi_trains[k]))*i, csi_trains[k], 'o')
#ax.plot(i, np.mean(csi_trains[k]), 'o', markersize=15)
ax.legend(sorted(csi_trains.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
ax.plot(np.arange(-1, len(csi_trains.keys())+1), np.zeros(len(csi_trains.keys())+2), 'silver', linewidth=2, linestyle='--')
#ax.set_xticklabels(sorted(csi_trains.keys()))
fig.tight_layout()
plt.show()
'''
'''
fig, ax = plt.subplots()
for i, k in enumerate(sorted(beat.keys())):
ax.plot(np.ones(len(beat[k]))*i, beat[k], 'o')
ax.legend(sorted(beat.keys()), loc='upper left', bbox_to_anchor=(1.04, 1))
#ax.set_xticklabels(sorted(csi_trains.keys()))
fig.tight_layout()
plt.show()
'''

View File

@ -35,13 +35,13 @@ for k, t in enumerate(time):
p += f * stepsize
signal[k] = a * np.sin(6.28318530717959 * p)
fig = plt.figure(figsize = (20/inch_factor, 15/inch_factor))
fig = plt.figure(figsize = (20/inch_factor, 12/inch_factor))
ax1 = fig.add_subplot(211)
plt.yticks(fontsize=18)
ax2 = fig.add_subplot(212, sharex=ax1)
plt.setp(ax1.get_xticklabels(), visible=False)
ax1.plot(time*1000, signal, color = 'midnightblue', lw = 1)
ax2.plot(time*1000, freq, color = 'midnightblue', lw = 3)
ax1.plot(time*1000, signal, color = 'royalblue', lw = 1)
ax2.plot(time*1000, freq, color = 'royalblue', lw = 3)
ax1.set_ylabel("field [mV]", fontsize = 22)