1152 lines
62 KiB
Python
1152 lines
62 KiB
Python
##from update_project import **
|
|
import numpy as np
|
|
import pandas as pd
|
|
from IPython import embed
|
|
from matplotlib import gridspec, pyplot as plt
|
|
import matplotlib.mlab as ml
|
|
from scipy.ndimage import gaussian_filter
|
|
|
|
from threefish.utils0 import find_code_vs_not, load_folder_name
|
|
from plotstyle import plot_style
|
|
from threefish.utils0 import resave_small_files, ISI_frequency, colorbar_outside, cr_spikes_mat, create_stimulus_SAM, default_figsize, \
|
|
eod_fish_r_generation, extract_am, \
|
|
find_base_fr, find_base_fr2, plt_RAM_perc, \
|
|
remove_xticks, remove_yticks, save_visualization, simulate, time
|
|
from threefish.utils1_suscept import all_damping_variants, calc_ps, check_var_substract_method, chose_certain_group, \
|
|
colors_suscept_paper_dots, convert_csv_str_to_float, create_full_matrix2, default_model0, deltaf1_label, \
|
|
deltaf2_label, deltat_choice, diagonal_points, diff_label, exclude_file_name_short, \
|
|
extract_waves, fbasename_small, find_all_dir_cells, gaussian_intro, get_arrays_for_three, get_axis_on_full_matrix, \
|
|
get_cutoffs_nr, get_file_names_exclude, get_mat_susept, get_phaseshifts, get_psds_ROC, labels_didactic2, \
|
|
load_cells_three, log_calc_psd, nonlin_title, outputmodel, perc_model_full, plt_model_big, \
|
|
plt_peaks_several, plt_psds_ROC, predefine_grouping_frame, rescale_colorbar_and_values, restrict_cell_type, \
|
|
save_arrays_susept, sum_label, title_motivation, \
|
|
two_deltaf1_label, two_deltaf2_label, version_final
|
|
from threefish.utils0 import eod_fish_e_generation, join_y, load_b_public, set_clim_same, set_xlabel_arrow, set_ylabel_arrow, chose_mat_max_value
|
|
from threefish.utils1_suscept import load_data_susept, restrict_punits #test_spikes_clusters,
|
|
|
|
|
|
def model_full(c1=10, mult_type='_multsorted2_', devs=['05'], end='all', chose_score='mean_nrs',
|
|
detections=['MeanTrialsIndexPhaseSort'], sorted_on='LocalReconst0.2NormAm', dfs = ['m1', 'm2']):
|
|
|
|
plot_style()
|
|
default_figsize(column=2, length=2.7)
|
|
grid = gridspec.GridSpec(1, 4, wspace=0.15, bottom = 0.2, width_ratios = [3,1, 1.5,1.5], hspace=0.15, top=0.85, left=0.075, right=0.98)
|
|
|
|
axes = []
|
|
##################################
|
|
# model part
|
|
ls = '--'
|
|
lw = 0.5
|
|
ax = plt.subplot(grid[0])
|
|
|
|
axes.append(ax)
|
|
|
|
cell = '2012-07-03-ak-invivo-1'
|
|
|
|
perc, im, stack_final, stack_saved = plt_model_big(ax, ls = ls, lw = 0.75, cell = cell, lines = True)
|
|
fr_waves = 139
|
|
fr_noise = 120
|
|
f1 = 33
|
|
f2 = 139
|
|
color01, color012, color01_2, color02, color0_burst, color0 = colors_suscept_paper_dots()
|
|
###############################
|
|
# data part
|
|
|
|
|
|
data_square_extra = False
|
|
if data_square_extra:
|
|
ax = plt.subplot(grid[0])
|
|
axes.append(ax)
|
|
cell = '2012-07-03-ak-invivo-1'
|
|
mat_rev,stack_final_rev = load_stack_data_susept(cell, redo = True, save_name = version_final(), end = '_revQuadrant_')
|
|
mat, stack = load_stack_data_susept(cell, save_name=version_final(), end = '')
|
|
|
|
new_keys, stack_plot = convert_csv_str_to_float(stack)
|
|
#mat = RAM_norm_data(stack['isf'].iloc[0], stack_plot,
|
|
# stack['snippets'].unique()[0], stack_here=stack) #
|
|
|
|
new_keys, stack_plot = convert_csv_str_to_float(stack_final_rev)
|
|
#mat_rev = RAM_norm_data(stack_final_rev['isf'].iloc[0], stack_plot,
|
|
# stack_final_rev['snippets'].unique()[0], stack_here=stack_final_rev) #
|
|
|
|
mat, add_nonlin_title, resize_val = rescale_colorbar_and_values(mat)
|
|
mat_rev, add_nonlin_title, resize_val = rescale_colorbar_and_values(mat_rev, resize_val = resize_val)
|
|
|
|
#try:
|
|
full_matrix = create_full_matrix2(np.array(mat),np.array(mat_rev))
|
|
#except:
|
|
# print('full matrix something')
|
|
# embed()
|
|
|
|
stack_final = get_axis_on_full_matrix(full_matrix, mat)
|
|
|
|
abs_matrix = np.abs(stack_final)
|
|
abs_matrix, add_nonlin_title, resize_val = rescale_colorbar_and_values(abs_matrix,add_nonlin_title = 'k')
|
|
|
|
|
|
ax.axhline(0, color = 'white', linestyle = ls, linewidth = lw)
|
|
ax.axvline(0, color='white', linestyle = ls, linewidth = lw)
|
|
im = plt_RAM_perc(ax, perc, abs_matrix)
|
|
cbar, left, bottom, width, height = colorbar_outside(ax, im, add=5, width=0.01)
|
|
|
|
set_clim_same([im], mats=[abs_matrix], lim_type='up', nr_clim='perc', clims='', percnr=perc_model_full())
|
|
|
|
#clim = im.get_clim()
|
|
#if clim[1]> 1000:
|
|
#todo: change clim values with different Hz values
|
|
#embed()
|
|
cbar.set_label(nonlin_title(add_nonlin_title = ' ['+add_nonlin_title), rotation=90, labelpad=8)
|
|
|
|
set_ylabel_arrow(ax, xpos = -0.07, ypos = 0.97)
|
|
set_xlabel_arrow(ax, xpos=1, ypos=-0.07)
|
|
''' eod_fr, stack_spikes = plt_data_suscept_single(ax, cbar_label, cell, cells, f, fig, file_names_exclude, lp, title,
|
|
width)'''
|
|
cbar, left, bottom, width, height = colorbar_outside(ax, im, add=5, width=0.01)
|
|
|
|
#################
|
|
# power spectra data
|
|
log = ''#'log'#'log'#'log'#'log'
|
|
ylim_log = (-14.2, 3)#(-14.2, 3)
|
|
nfft = 20000#2 ** 13
|
|
xlim_psd = [0, 300]
|
|
|
|
DF1_desired_orig = [133, 166]#33
|
|
DF2_desired_orig = [-33, 53]#166
|
|
|
|
#grid0 = gridspec.GridSpecFromSubplotSpec(len(DF1_desired_orig), 1, wspace=0.15, hspace=0.35,
|
|
# subplot_spec=grid[1])
|
|
|
|
markers = ['s', 'o']
|
|
ms = 14
|
|
data_extra = False
|
|
if data_extra:
|
|
DF1_desired, DF2_desired, fr, eod_fr, arrays_len = plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid[3], mult_type, sorted_on, ms = ms, markers = ['s', 'o'],clip_on = True, DF2_desired = DF2_desired_orig, DF1_desired = DF1_desired_orig, alpha = [1,1], log = log, ylim_log = ylim_log, nfft = nfft, xlim_psd = xlim_psd)
|
|
fr_mult = fr / eod_fr
|
|
|
|
multwise = False
|
|
if multwise:
|
|
DF1_frmult = np.abs((np.array(DF1_desired)-1)/fr_mult)
|
|
DF2_frmult = np.abs((np.array(DF2_desired) - 1) / fr_mult)
|
|
else:
|
|
DF1_frmult = np.array(DF1_desired_orig)/fr
|
|
DF2_frmult = np.array(DF2_desired_orig) / fr
|
|
#embed()
|
|
DF1_frmult[0] = 1
|
|
DF2_frmult[1] = 0.32339256#1.06949369
|
|
print(DF1_frmult)
|
|
print(DF2_frmult)
|
|
|
|
grid0 = gridspec.GridSpecFromSubplotSpec(len(DF1_desired), 1, wspace=0.15, hspace=0.35,
|
|
subplot_spec=grid[2])
|
|
|
|
else:
|
|
markers = ['o', 'o', 'o', 'o', ]
|
|
DF1_frmult = [0.28, 0.28, 1, 0.81]
|
|
DF2_frmult = [1-0.28, 1.28, 0.28, 1.18] # 1.06949369
|
|
|
|
grid0 = gridspec.GridSpecFromSubplotSpec(2, 2, wspace=0.15, hspace=0.4,
|
|
subplot_spec=grid[2::])
|
|
|
|
#################
|
|
#################
|
|
# power spectra model
|
|
|
|
|
|
|
|
|
|
|
|
#DF1_frmult[1] = 0.4
|
|
#DF2_frmult[1] = 1.8
|
|
#DF1_frmult[1] = 1.45
|
|
#DF2_frmult[0] = 0.1
|
|
#DF1_frmult[1] = 0.4
|
|
#DF2_frmult[1] = 0.6
|
|
#ylim_log = (-25.2, 3)
|
|
ylim_log = (-20, 5) #
|
|
|
|
#########################
|
|
# punkte die zur zweiten Reihe gehören
|
|
|
|
diagonal = 'line'
|
|
combis = diagonal_points()
|
|
freq1_ratio = 1 / 2
|
|
freq2_ratio = 2 / 3 # 0.1
|
|
# for combi in combis:
|
|
diagonal = 'diagonal1' # 'vertical3'#'diagonal2'#'diagonal3'#'inside'#'horizontal'#'diagonal'#'vertical'#
|
|
|
|
diagonal = 'test_data_cell_2022-01-05-aa-invivo-1'
|
|
diagonal = 'diagonal1'
|
|
|
|
freq1_ratio = combis[diagonal][0]
|
|
freq2_ratio = combis[diagonal][1]
|
|
diagonal = ''
|
|
freq1_ratio = 1.17#0.25
|
|
freq2_ratio = 0.37 # 0.1
|
|
freq1_ratio = 1.2#0.25
|
|
freq2_ratio = 0.7 # 0.1
|
|
plus_q = 'plus' # 'minus'#'plus'##'minus'
|
|
way = '' # 'mult'#'absolut'
|
|
ways = ['mult_minimum_1', 'absolut', 'mult_env_3', 'mult_f1_3', 'mult_f2_3', 'mult_minimum_3', 'mult_env_1',
|
|
'mult_f1_1', 'mult_f2_1', ]
|
|
length = 2*40 # 5
|
|
reshuffled = '' # ,
|
|
|
|
|
|
|
|
alphas = [1,0.5]
|
|
a_size = 0.0125#25#0.04#0.015
|
|
a_size = 0.0085#125
|
|
|
|
# ATTENTION: Diese Zelle ('2012-07-03-ak-invivo-1') braucht längere Abschnitte, mindsetesn 5 Sekunden damit das Powerspectrum nicht so niosy ist!
|
|
#embed()dev_spikes='original',
|
|
#embed()#stack_final
|
|
fr_noise, eod_fr_mm, axes2 = plt_model_full_model2(grid0, stack_final = stack_final , reshuffled=reshuffled, dev=0.0005, a_f1s=[a_size], af_2 = a_size,
|
|
stimulus_length=length, plus_q=plus_q, stack_saved = stack_saved,
|
|
diagonal=diagonal, runs=1, nfft = nfft, xlim_psd = xlim_psd, ylim_log = ylim_log,
|
|
cells=[cell], dev_spikes = '05', markers = markers, DF1_frmult = DF1_frmult, DF2_frmult = DF2_frmult,
|
|
log = log, ms = ms, clip_on = True, array_len = [1,1,1,1,1]) #arrays_len a_f1s=[0.02]"2012-12-13-an-invivo-1"'2013-01-08-aa-invivo-1'
|
|
|
|
|
|
|
|
|
|
|
|
#.share
|
|
|
|
#ax.plot(fr_noise * f1 / fr_waves, fr_noise * f2 / fr_waves, 'o', ms=5, markeredgecolor=color012,
|
|
# markerfacecolor="None", alpha = 0.5)
|
|
#ax.plot(-fr_noise * f1 / fr_waves, fr_noise * f2 / fr_waves, 'o', ms=5, markeredgecolor=color01_2,
|
|
# markerfacecolor="None", alpha = 0.5)
|
|
|
|
|
|
|
|
|
|
if data_extra:
|
|
DF2_hz = np.abs(np.array(DF1_desired) * eod_fr - eod_fr)
|
|
DF1_hz = np.abs(np.array(DF2_desired) * eod_fr - eod_fr)
|
|
for f in range(len(DF1_hz)):
|
|
ax.plot(fr_noise * DF1_hz[f] / fr_waves, fr_noise * DF2_hz[f] / fr_waves, markers[f], ms=5, markeredgecolor=color012,
|
|
markerfacecolor="None")#, alpha = alphas[f]
|
|
ax.plot(-fr_noise * DF1_hz[f] / fr_waves, fr_noise * DF2_hz[f] / fr_waves, markers[f], ms=5, markeredgecolor=color01_2,
|
|
markerfacecolor="None")#, alpha = alphas[f]
|
|
else:
|
|
#embed()
|
|
letters_plot = True
|
|
if letters_plot:
|
|
letters = ['B', 'C', 'D', 'E']
|
|
for f in range(len(DF1_frmult)):
|
|
ax.text((fr_noise*DF1_frmult[f]), (fr_noise*DF2_frmult[f]-1), letters[f], color = color012, ha = 'center', va = 'center')#, alpha = alphas[f]
|
|
ax.text((fr_noise*DF1_frmult[f]-1), -(fr_noise*DF2_frmult[f]-1), letters[f], color = color01_2, ha = 'center', va = 'center')#, alpha = alphas[f]
|
|
|
|
else:
|
|
for f in range(len(DF1_frmult)):
|
|
ax.plot((fr_noise*DF1_frmult[f]), (fr_noise*DF2_frmult[f]-1), markers[f], ms=5, markeredgecolor=color012,
|
|
markerfacecolor="None")#, alpha = alphas[f]
|
|
ax.plot(-(fr_noise*DF1_frmult[f]-1), (fr_noise*DF2_frmult[f]-1), markers[f], ms=5, markeredgecolor=color01_2,
|
|
markerfacecolor="None")#, alpha = alphas[f]
|
|
|
|
#embed()
|
|
#tag2(fig=fig, xoffs=[-4.5, -4.5, -4.5, -5.5], yoffs=1.25)
|
|
#axes = plt.gca()
|
|
fig = plt.gcf()#[axes[0], axes[3], axes[4]]
|
|
fig.tag([axes[0], axes2[0], axes2[1], axes2[2], axes2[3]], xoffs=-4.5, yoffs=1.2) # ax_ams[3],
|
|
|
|
save_visualization()
|
|
|
|
|
|
def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha = 1, trials_nr = 15,
|
|
single_waves=['_SingleWave_', '_SeveralWave_', ], cell_start=13,
|
|
several_peaks_nr = 2, a_f1s=[0, 0.005, 0.01, 0.05, 0.1, 0.2, ], a_frs=[1],
|
|
add_half=0, log = 'log', xlim = [0, 350], freqs_mult1 = None, freqs_mult2 = None,
|
|
nfft=int(2 ** 15), gain=1, us_name=''):
|
|
model_cells = pd.read_csv(load_folder_name('calc_model_core') + "/models_big_fit_d_right.csv")
|
|
if len(cells) < 1:
|
|
cells = model_cells.cell.loc[range(cell_start, len(model_cells))]
|
|
|
|
plot_style()
|
|
fr = float('nan')
|
|
for cell in cells:
|
|
|
|
# eod_fr, eod_fj, eod_fe = frequency_choice(model_params, step1, step2, three_waves, a_fj, step_type=step_type,
|
|
# symmetry=symmetry, start_mult_jammer=start_mult_jammer,
|
|
# start_mult_emitter=start_mult_emitter, start_f1=start_f1, end_f1=end_f1,
|
|
# start_f2=start_f2, end_f2=end_f2)
|
|
|
|
# sachen die ich variieren will
|
|
###########################################
|
|
|
|
####### VARY HERE
|
|
for single_wave in single_waves:
|
|
if single_wave == '_SingleWave_':
|
|
a_f2s = [0] # , 0,0.2
|
|
else:
|
|
a_f2s = [a_f2]
|
|
for a_f2 in a_f2s:
|
|
# 150
|
|
|
|
titles_amp = ['base eodf', 'baseline to Zero', ]
|
|
for a, a_fr in enumerate(a_frs):
|
|
# fig, ax = plt.subplots(4 + 1, len(a_f1s), figsize=(5, 5.5)) # sharex=True,
|
|
# ax = ax.reshape(len(ax),1)
|
|
# fig = plt.figure(figsize=(12, 5.5))
|
|
#default_figsize(column=2, length=2.3) # 3
|
|
# default_setting(column = 2, length = 5.5)
|
|
#grid = gridspec.GridSpec(3, 2, wspace=0.35, left=0.095, hspace=0.2, top=0.94, bottom=0.25,
|
|
# right=0.95)
|
|
ax = {}
|
|
for aa, a_f1 in enumerate(a_f1s):
|
|
|
|
SAM, cell, damping, damping_type, deltat, eod_fish_r, eod_fr, f1, f2, freqs1, freqs2, model_params, offset, phase_right, phaseshift_fr, rate_adapted, rate_baseline_after, rate_baseline_before, sampling, spike_adapted, spikes, stimuli, stimulus_altered, stimulus_length, time_array, v_dent_output, v_mem_output = outputmodel(
|
|
a_fr, add_half, cell, model_cells, single_wave, trials_nr, freqs_mult1 = freqs_mult1, freqs_mult2 = freqs_mult2)
|
|
|
|
|
|
# ax[3].xscalebar(0.1, -0.02, 20, 'ms', va='right', ha='bottom') ##ylim[0]
|
|
# ax[3].set_xlabel('Time [ms]')
|
|
#axp = plt.subplot(grid[:, 1])
|
|
axp.show_spines('lb')
|
|
|
|
# ax[4, 0].set_xlim(0.1 * 1000, 0.125 * 1000)
|
|
|
|
# ax[4, 1].get_shared_x_axes().join(*ax[4, :])
|
|
base_cut, mat_base = find_base_fr(spike_adapted, deltat, stimulus_length, time_array)
|
|
fr = np.mean(base_cut)
|
|
frate, isis_diff = ISI_frequency(time_array, spike_adapted[0], fill=0.0)
|
|
isi = (
|
|
np.diff(spike_adapted[0]))
|
|
cv0 = np.std(isi) / np.mean(isi)
|
|
cv1 = np.std(frate) / np.mean(frate)
|
|
|
|
|
|
fs = 11
|
|
# for fff, freq2 in enumerate(freqs2):
|
|
# freq2 = [freq2]
|
|
|
|
for ff, freq1 in enumerate(freqs1):
|
|
freq1 = [freq1]
|
|
freq2 = [freqs2[ff]]
|
|
# time_var = time.time()
|
|
beat1 = freq1 - eod_fr
|
|
titles = False
|
|
if titles:
|
|
plt.suptitle('diverging from half fr by ' + str(add_half) + ' f1:' + str(
|
|
np.round(freq1)[0]) + ' f2:' + str(np.round(freq2)[0]) + ' Hz \n' + str(
|
|
beat1) + ' Hz Beat\n' + titles[ff] + titles_amp[a] + ' ' + cell + ' cv ' + str(
|
|
np.round(cv0, 3)) + '_a_f0_' + str(a_fr) + '_a_f1_' + str(a_f1) + '_a_f2_' + str(
|
|
a_f2) + ' tr_nr ' + str(trials_nr))
|
|
|
|
# if printing:
|
|
# print(cell )
|
|
# f_corr = create_beat_corr(np.array([freq1[f1]]), np.array([eod_fr]))
|
|
# create the second eod_fish1 array analogous to the eod_fish_r array
|
|
|
|
phaseshift_f1, phaseshift_f2 = get_phaseshifts(a_f1, a_f2, phase_right, phaseshift_fr)
|
|
eod_fish1, time_fish_e = eod_fish_e_generation(time_array, a_f1, freq1, f1)
|
|
eod_fish2, time_fish_j = eod_fish_e_generation(time_array, a_f2, freq2, f2)
|
|
|
|
eod_stimulus = eod_fish1 + eod_fish2
|
|
|
|
for t in range(trials_nr):
|
|
stimulus, eod_fish_sam = create_stimulus_SAM(SAM, eod_stimulus, eod_fish_r, freq1, f1,
|
|
eod_fr,
|
|
time_array, a_f1)
|
|
|
|
|
|
stimulus_orig = stimulus * 1
|
|
|
|
# damping variants
|
|
std_dump, max_dump, range_dump, stimulus, damping_output = all_damping_variants(
|
|
stimulus, time_array, damping_type, eod_fr, gain, damping, us_name, plot=False,
|
|
std_dump=0, max_dump=0, range_dump=0)
|
|
|
|
stimuli.append(stimulus)
|
|
|
|
cvs, adapt_output, baseline_after, _, rate_adapted[t], rate_baseline_before[t], \
|
|
rate_baseline_after[t], spikes[t], \
|
|
stimulus_altered[t], \
|
|
v_dent_output[t], offset_new, v_mem_output[t], noise_final = simulate(cell, offset,
|
|
stimulus,
|
|
adaptation_yes_e=f1,
|
|
**model_params)
|
|
|
|
#embed()
|
|
stimulus_altered_output = np.mean(stimulus_altered, axis=0)
|
|
# time_var2 = time.time()
|
|
|
|
|
|
test_stimulus_stability = False
|
|
|
|
# time_model = time_var2 - time_var # 8
|
|
|
|
|
|
spikes_mat = [[]] * len(spikes)
|
|
pps = [[]] * len(spikes)
|
|
for s in range(len(spikes)):
|
|
spikes_mat[s] = cr_spikes_mat(spikes[s], 1 / deltat, int(stimulus_length * 1 / deltat))
|
|
pps[s], f = ml.psd(spikes_mat[s] - np.mean(spikes_mat[s]), Fs=1 / deltat, NFFT=nfft,
|
|
noverlap=nfft // 2)
|
|
pp_mean = np.mean(pps, axis=0)
|
|
sampling_rate = 1 / deltat
|
|
smoothed05 = gaussian_filter(spikes_mat, sigma=gaussian_intro() * sampling_rate)
|
|
mat05 = np.mean(smoothed05, axis=0)
|
|
|
|
|
|
beat1 = np.round(freq1 - eod_fr)[0]
|
|
# if titles:
|
|
# ax[0].set_title('a_f1 ' + str(a_f1), fontsize=fs)
|
|
|
|
# ax[0, aa].set_title('f1:'+str(np.round(freq1)[0]) +' f2:'+str(np.round(freq2)[0]) + ' Hz \n'+str(beat1) + ' Hz Beat\n' +titles[ff], fontsize = fs)
|
|
# ax[0].plot((time_array - min) * 1000, stimulus, color='grey', linewidth=0.5)
|
|
|
|
beat1 = (freq1 - eod_fr)[0]
|
|
beat2 = (freq2 - eod_fr)[0]
|
|
|
|
test = False
|
|
if test:
|
|
ax = plt.subplot(1,1,1)
|
|
ax.axvline(fr, color = 'blue')
|
|
ax.axvline(beat1, color = 'green', linestyle = '-.')
|
|
ax.axvline(beat2, color = 'purple', linestyle = '--')
|
|
ax.plot(f, pp_mean)
|
|
plt.show()
|
|
#embed()
|
|
nr = 2
|
|
color_01, color_012, color01_2, color_02, color0_burst, color0 = colors_suscept_paper_dots()
|
|
if 'Several' in single_wave:
|
|
freqs_beat = [fr, np.abs(beat1), np.abs(beat2), np.abs(np.abs(beat2) + np.abs(beat1)),np.abs(np.abs(beat2) - np.abs(beat1))
|
|
] # np.abs(beat2 - beat1),np.abs(beat2 + beat1),
|
|
|
|
colors = [color0,color_01, color_02, color_012, color01_2] # 'blue'
|
|
labels = ['','', '', '', ''] # small , '|B1-B2|'
|
|
add_texts = [nr, nr + 0.35, nr + 0.2, nr + 0.2, nr + 0.2] # [1.1,1.1,1.1]
|
|
texts_left = [-7, -7, -7, -7,-7]
|
|
else:
|
|
freqs_beat = [np.abs(beat1), np.abs(beat1) * 2, np.abs(beat1 * 3),
|
|
np.abs(beat1 * 4)] # np.abs(beat1) / 2,
|
|
colors = ['black', 'orange', 'blue', 'purple', 'black'] # 'grey',
|
|
add_texts = [nr + 0.1, nr + 0.1, nr + 0.1, nr + 0.1] # [1.1,1.1,1.1,1.1]
|
|
texts_left = [3, 0, 0, 0]
|
|
labels = labels_didactic2() # colors_didactic, labels_didactic
|
|
if (np.mean(stimulus) != 0) & (np.mean(stimulus) != 1):
|
|
# try:
|
|
eod_interp, eod_norm = extract_am(stimulus, time_array, sampling=sampling_rate,
|
|
eodf=eod_fr,
|
|
emb=False,
|
|
extract='', norm=False)
|
|
for l in range(len(spikes)):
|
|
spikes[l] = (spikes[l] - min) * 1000
|
|
pp, f = ml.psd(mat05 - np.mean(mat05), Fs=1 / deltat, NFFT=nfft,
|
|
noverlap=nfft // 2)
|
|
if log:
|
|
pp_mean = 10 * np.log10(pp_mean / np.max(pp_mean))
|
|
|
|
print(freqs_beat)
|
|
print(labels)
|
|
plt_peaks_several(freqs_beat, pp_mean, axp, pp_mean, f, labels, 0, colors, ha='center',
|
|
add_texts=add_texts, texts_left=texts_left, add_log=2.5,
|
|
rots=[0, 0, 0, 0, 0], several_peaks_nr=several_peaks_nr, exact=False,
|
|
text_extra=True, perc_peaksize=perc, rel='rel', alphas=[alpha] * len(colors),
|
|
ms=14, clip_on=True, several_peaks=True, log=log) # True
|
|
|
|
axp.plot(f, pp_mean, color='black', zorder=0) # 0.45
|
|
axp.set_xlim(xlim)
|
|
|
|
test = False
|
|
if test:
|
|
test_spikes_clusters(eod_fish_r, spikes, mat05, sampling, s_name='ms', resamp_fact=1000)
|
|
|
|
if log == 'log':
|
|
axp.set_ylabel('dB')
|
|
else:
|
|
axp.set_ylabel('Amplitude [Hz]')
|
|
|
|
axp.set_xlabel('Frequency [Hz]')
|
|
return fr
|
|
|
|
def plt_model_full_model2(grid0, reshuffled='reshuffled', af_2 = 0.1, dev=0.0005, a_f1s=[0.03],
|
|
printing=False, plus_q='minus', diagonal='diagonal', stack_final = [],
|
|
stimulus_length=0.5, runs=1, cells=[], stack_saved = [],
|
|
nfft=int(4096), beat='', nfft_for_morph=4096 * 4, gain=1,
|
|
DF2_frmult = [], DF1_frmult = [],
|
|
array_len = [20,20,20,20,20],
|
|
xlim_psd = [], ylim_log = [],
|
|
fish_jammer='Alepto', markers = [], clip_on = True,
|
|
ms = 14, us_name='', dev_spikes='original', log =''):
|
|
plot_style()
|
|
#model_cells = pd.read_csv(load_folder_name('calc_model_core') + "/models_big_fit_d_right.csv")
|
|
model_cells = resave_small_files("models_big_fit_d_right.csv", load_folder='calc_model_core', index_col = True)
|
|
#embed()
|
|
|
|
if len(cells) < 1:
|
|
cells = len(model_cells)
|
|
|
|
axes = []
|
|
ps = []
|
|
results_diff = pd.DataFrame()
|
|
for g in range(len(DF2_frmult)):
|
|
|
|
freq2_ratio = DF2_frmult[g]
|
|
freq1_ratio = DF1_frmult[g]
|
|
#embed()
|
|
ax = plt.subplot(grid0[g])
|
|
|
|
axes.append(ax)
|
|
try:
|
|
trials_nr = array_len[g]
|
|
except:
|
|
print('array nr something')
|
|
|
|
embed()
|
|
|
|
for cell_here in cells:
|
|
###########################################
|
|
single_waves = ['_SeveralWave_'] # , '_SingleWave_']
|
|
for single_wave in single_waves:
|
|
if single_wave == '_SingleWave_':
|
|
a_f2s = [0] # , 0,0.2
|
|
else:
|
|
a_f2s = [af_2]
|
|
for a_f2 in a_f2s:
|
|
# ,0.05,0.01, 0.005, 0.1, 0.2] # 0.001,
|
|
for a_f1 in a_f1s:
|
|
a_frs = [1]
|
|
titles_amp = ['base eodf'] # ,'baseline to Zero',]
|
|
for a, a_fr in enumerate(a_frs):
|
|
|
|
#embed()
|
|
model_params = model_cells[model_cells['cell'] == cell_here].iloc[0]
|
|
# model_params = model_cells.iloc[cell_nr]
|
|
|
|
|
|
eod_fr = model_params['EODf'] # .iloc[0]
|
|
|
|
offset = model_params.pop('v_offset')
|
|
|
|
cell = model_params.pop('cell')
|
|
print(cell)
|
|
SAM, adapt_offset, cell_recording, constant_reduction, damping, damping_type, dent_tau_change, exponential, f1, f2, fish_emitter, fish_receiver, fish_morph_harmonics_var, lower_tol, mimick, n, phase_right, phaseshift_fr, sampling_factor, upper_tol, zeros = default_model0()
|
|
# in case you want a different sampling here we can adujust
|
|
time_array, sampling, deltat = deltat_choice(model_params, sampling_factor, eod_fr,
|
|
stimulus_length)
|
|
|
|
# generate the eod_fish_r in the four mimick variants (copy, thunderfish, mimick, just sinus)
|
|
eod_fish_r, deltat, eod_fr, time_array = eod_fish_r_generation(time_array, eod_fr, a_fr,
|
|
stimulus_length, phaseshift_fr,
|
|
cell_recording, zeros, mimick,
|
|
sampling, fish_receiver, deltat,
|
|
nfft, nfft_for_morph,
|
|
fish_morph_harmonics_var=fish_morph_harmonics_var,
|
|
beat=beat)
|
|
sampling = 1 / deltat
|
|
# prepare for adapting offset due to baseline modification
|
|
|
|
spikes_base = [[]] * trials_nr
|
|
color01, color012, color01_2, color02, color0_burst, color0 = colors_suscept_paper_dots()
|
|
for run in range(runs):
|
|
print(run)
|
|
t1 = time.time()
|
|
for t in range(trials_nr):
|
|
# get the baseline properties here
|
|
# baseline_after,spikes_base,rate_adapted, rate_baseline_before, rate_baseline_after, np.array(spike_times), stimulus_power, v_dent_output[int(0.05 / deltat):-1], offset, v_mem_output
|
|
stimulus = eod_fish_r
|
|
stimulus_base = eod_fish_r
|
|
#else:
|
|
power_here = 'sinz'
|
|
|
|
cvs, adapt_output, baseline_after_b, _, rate_adapted_b, rate_baseline_before_b, rate_baseline_after_b, \
|
|
spikes_base[t], _, _, offset_new, _,noise_final = simulate(cell, offset, stimulus,
|
|
deltat=deltat,
|
|
**model_params)#adaptation_variant=adapt_offset,
|
|
#adaptation_yes_j=f2,
|
|
#adaptation_yes_e=f1,
|
|
#adaptation_yes_t=t,
|
|
#power_variant=power_here,
|
|
#power_alpha=alpha,
|
|
#power_nr=n,
|
|
|
|
if t == 0:
|
|
# here we record the changes in the offset due to the adaptation
|
|
change_offset = offset - offset_new
|
|
# and we subsequently reset the offset to be the new adapted for all subsequent trials
|
|
offset = offset_new * 1
|
|
|
|
if printing:
|
|
print('Baseline time' + str(time.time() - t1))
|
|
base_cut, mat_base = find_base_fr(spikes_base, deltat, stimulus_length, time_array, dev=dev)
|
|
|
|
fr = np.mean(base_cut)
|
|
if 'diagonal' in diagonal:
|
|
|
|
two_third_fr = fr * freq2_ratio
|
|
freq1_ratio = (1 - freq2_ratio)
|
|
third_fr = fr * freq1_ratio
|
|
else:
|
|
two_third_fr = fr * freq2_ratio
|
|
try:
|
|
third_fr = fr * freq1_ratio
|
|
except:
|
|
print('freq1 something')
|
|
embed()
|
|
#embed()
|
|
if plus_q == 'minus':
|
|
two_third_fr = -two_third_fr
|
|
third_fr = -third_fr
|
|
freqs2 = [eod_fr + two_third_fr] # , eod_fr - third_fr, two_third_fr,
|
|
# third_fr,
|
|
# two_third_eodf, eod_fr - two_third_eodf,
|
|
# third_eodf, eod_fr - third_eodf, ]
|
|
freqs1 = [
|
|
eod_fr + third_fr] # , eod_fr - two_third_fr, third_fr,two_third_fr,third_eodf, eod_fr - third_eodf,two_third_eodf, eod_fr - two_third_eodf, ]
|
|
#embed()
|
|
sampling_rate = 1 / deltat
|
|
base_cut, mat_base, smoothed0, mat0 = find_base_fr2(spikes_base, deltat,
|
|
stimulus_length, dev=dev)
|
|
#fr = np.mean(base_cut)
|
|
frate, isis_diff = ISI_frequency(time_array, spikes_base[0], fill=0.0)
|
|
isi = np.diff(spikes_base[0])
|
|
cv0 = np.std(isi) / np.mean(isi)
|
|
cv1 = np.std(frate) / np.mean(frate)
|
|
#embed()
|
|
for ff, freq1 in enumerate(freqs1):
|
|
freq1 = [freq1]
|
|
freq2 = [freqs2[ff]]
|
|
#embed()
|
|
# time_var = time.time()
|
|
|
|
# if printing:
|
|
# print(cell )
|
|
# f_corr = create_beat_corr(np.array([freq1[f1]]), np.array([eod_fr]))
|
|
# create the second eod_fish1 array analogous to the eod_fish_r array
|
|
t1 = time.time()
|
|
phaseshift_f1, phaseshift_f2 = get_phaseshifts(a_f1, a_f2, phase_right, phaseshift_fr)
|
|
eod_fish1, time_fish_e = eod_fish_e_generation(time_array, a_f1, freq1, f1,
|
|
phaseshift_f1, sampling,
|
|
stimulus_length, nfft_for_morph,
|
|
cell_recording,
|
|
fish_morph_harmonics_var, zeros,
|
|
mimick, fish_emitter,
|
|
thistype='emitter')
|
|
eod_fish2, time_fish_j = eod_fish_e_generation(time_array, a_f2, freq2, f2,
|
|
phaseshift_f2, sampling,
|
|
stimulus_length, nfft_for_morph,
|
|
cell_recording,
|
|
fish_morph_harmonics_var, zeros,
|
|
mimick, fish_jammer,
|
|
thistype='jammer')
|
|
|
|
eod_stimulus = eod_fish1 + eod_fish2
|
|
|
|
v_mems, offset_new, mat01, mat02, mat012, smoothed01, smoothed02, smoothed012, stimulus_01, stimulus_02, stimulus_012, mat05_01, spikes_01, mat05_02, spikes_02, mat05_012, spikes_012 = get_arrays_for_three(cell, a_f2, a_f1, SAM, eod_stimulus, eod_fish_r, freq2,
|
|
eod_fish1, eod_fish2, stimulus_length, offset, model_params, n,
|
|
'sinz', adapt_offset, deltat, f2, trials_nr, time_array, f1,
|
|
freq1, eod_fr, reshuffle=reshuffled, dev=dev)
|
|
if printing:
|
|
print('Generation process' + str(time.time() - t1))
|
|
|
|
|
|
|
|
if 'f1' not in results_diff.keys():
|
|
results_diff['f1'] = freq1
|
|
results_diff['f2'] = freq2
|
|
results_diff['f0'] = eod_fr
|
|
results_diff['fr'] = fr
|
|
else:
|
|
#embed()#.loc[position_diff, 'f1']
|
|
results_diff.loc[g, 'f1'] = freq1[0]
|
|
results_diff.loc[g, 'f2'] = freq2[0]
|
|
results_diff.loc[g, 'f0'] = eod_fr
|
|
results_diff.loc[g, 'fr'] = fr
|
|
|
|
if run == 0:
|
|
|
|
##################################
|
|
# power spectrum
|
|
|
|
if dev_spikes == 'original':
|
|
#nfft = 2 ** 15
|
|
|
|
p0, p02, p01, p012, fs = calc_ps(nfft, [np.mean(mat012, axis=0)],
|
|
[np.mean(mat01, axis=0)],
|
|
[np.mean(mat02, axis=0)],
|
|
[np.mean(mat0, axis=0)],
|
|
sampling_rate=sampling_rate)
|
|
|
|
else:
|
|
#nfft = 2 ** 15
|
|
p0, p02, p01, p012, fs = calc_ps(nfft, smoothed012,
|
|
smoothed01, smoothed02, smoothed0,
|
|
sampling_rate=sampling_rate)
|
|
|
|
# pp_mean = np.log
|
|
ps.append(p012)
|
|
#p_arrays = [p012]
|
|
p0_means = []
|
|
for j in range(len(ps)):
|
|
sampling = 40000
|
|
|
|
|
|
try:
|
|
ax = axes[j]
|
|
except:
|
|
print('axes something thing')
|
|
embed()
|
|
|
|
#for i in range(len(p0)):
|
|
# ax.plot(fs, ps[j], color='grey')
|
|
|
|
if log == 'log':
|
|
#p012 = 10 * np.log10(ps[j] / np.max(ps))
|
|
#embed()
|
|
p012 = log_calc_psd(log, ps[j], np.max(ps))
|
|
else:
|
|
p012 = ps[j]
|
|
|
|
p0_mean = p012[0]#np.mean(ps[j], axis=0)
|
|
p0_means.append(p0_mean)
|
|
try:
|
|
ax.plot(fs, p0_mean, color='black', zorder = 100) # plt_peaks(ax[0], p01, fs, 'orange')
|
|
except:
|
|
print('m something')
|
|
embed()
|
|
try:
|
|
DF1 = np.abs(results_diff.f1.iloc[j] - results_diff.f0.iloc[j])
|
|
DF2 = np.abs(results_diff.f2.iloc[j] - results_diff.f0.iloc[j])
|
|
except:
|
|
print('something')
|
|
embed()
|
|
fr = results_diff.fr.iloc[j]
|
|
#embed()
|
|
#for p in range(len(p0_means)): np.abs(DF2 * 2),
|
|
freqs = [np.abs(DF1),
|
|
np.abs(DF2),
|
|
np.abs(np.abs(DF1) - np.abs(DF2)),
|
|
np.abs(DF1) + np.abs(DF2), fr]
|
|
colors = [color01, color02,
|
|
color01_2, color012, color0]# color02,np.abs(DF2 * 2), color02,'DF2_H1',
|
|
labels = [deltaf1_label(), deltaf2_label(),
|
|
diff_label(), sum_label(), fbasename_small()]
|
|
marker = markers[j]#'DF1_H1','DF1_H4',v
|
|
|
|
if len(stack_final) > 0:
|
|
adjust_factor = 5
|
|
pos_y = np.argmin(np.abs(stack_final.index - np.abs(DF2)))
|
|
pos_x = np.argmin(np.abs(stack_final.keys() - np.abs(DF1)))
|
|
xi2_sum = stack_final.iloc[pos_y, stack_final.keys()[pos_x]]
|
|
#stack_final.iloc[pos_x][stack_final.keys()[pos_y]]
|
|
xi2_sum = stack_final[stack_final.keys()[pos_x]].iloc[pos_y]
|
|
sum_peak = ((adjust_factor *a_f1 * a_f2 * np.abs(xi2_sum))** 2)/4
|
|
|
|
|
|
#embed()
|
|
#stack_final.keys()[pos_x_minus] + stack_final.index[pos_y]
|
|
pos_x_minus = np.argmin(np.abs(stack_final.keys() + np.abs(DF1)))
|
|
#xi2_diff = stack_final.iloc[pos_x, stack_final.keys()[pos_y_minus]]
|
|
vals = stack_final[stack_final.keys()[pos_x_minus]][stack_final[stack_final.keys()[pos_x_minus]].index > 0]
|
|
xi2_diff = stack_final[stack_final.keys()[pos_x_minus]].iloc[pos_y]#stack_final.iloc[pos_x, stack_final.keys()[pos_y_minus]]
|
|
#stack_final[stack_final.keys()[pos_x_minus]].iloc[pos_y-1]
|
|
diff_peak = ((adjust_factor *a_f1 * a_f2 * np.abs(xi2_diff))** 2)/4
|
|
|
|
#model_show = get_stack_initial(cell, cell_add, cells_save, creation_time_update, direct_load, path1, redo,
|
|
# save_name_rev, size_update)
|
|
#embed()
|
|
ax.scatter(np.abs(DF1)+np.abs(DF2), sum_peak, color = 'None', edgecolor='black')
|
|
ax.scatter(np.abs(np.abs(DF1) - np.abs(DF2)), diff_peak, color='None', edgecolor='black')
|
|
|
|
|
|
if len(stack_saved) > 0:
|
|
print('todo: tranfervalues noch nicht da')
|
|
#f_axis, vals = get_transferfunction(stack_saved)
|
|
|
|
|
|
#embed()
|
|
p_passed = plt_peaks_several(freqs, p0_mean, ax, p0_mean, fs, labels, 0, colors, emb=False, marker=marker, add_log=2.5,
|
|
exact=False, perc_peaksize=0, ms=ms, clip_on=clip_on, log=log, add_not_log = 350)# perc = 0.2
|
|
|
|
ax.set_xlim(0, 300)
|
|
#ax.text(1,1, 'rt_xi=' + str(np.round(sum_peak / diff_peak,2))+' rt_ps=' + str(np.round(p_passed[3] / p_passed[2],2)), ha = 'right', va = 'top', transform=ax.transAxes)
|
|
print('sumpeak'+str(np.abs(xi2_sum))+'diffpeak'+str(np.abs(xi2_diff)))
|
|
#embed()#
|
|
#if j == 0:
|
|
# ax.text(1, 1, 'Model', ha='right', va='top', transform=ax.transAxes)
|
|
if j < 2:
|
|
remove_xticks(ax)
|
|
ax.set_xlabel('')
|
|
else:
|
|
ax.set_xlabel('Frequency [Hz]')
|
|
|
|
|
|
if j == 0:
|
|
ax.legend(ncol = 6, loc = (-0.2, 1.35))#-0.5
|
|
|
|
|
|
if log == 'log':
|
|
ax.set_xlim(xlim_psd)#
|
|
ax.set_ylim(ylim_log)
|
|
if j in [1, 3]:
|
|
remove_yticks(ax)
|
|
else:
|
|
ax.set_ylabel('dB')
|
|
else:
|
|
if j in [1, 3]:
|
|
remove_yticks(ax)
|
|
else:
|
|
ax.set_ylabel('Frequency [Hz]')
|
|
join_y(axes)#[1::]
|
|
|
|
axes.append(ax)
|
|
|
|
|
|
return fr, eod_fr, axes
|
|
|
|
|
|
|
|
def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_type, sorted_on, ms = 14, log = 'log', markers = ['s', 'o'],
|
|
clip_on = False, alpha = [], DF2_desired = [-33, -100], DF1_desired = [133, 66], ylim_log = (-15, 3), nfft =2 ** 13, xlim_psd = [0, 235]):
|
|
# mean_type = '_MeanTrialsIndexPhaseSort_Min0.25sExcluded_'
|
|
extract = ''
|
|
datasets, data_dir = find_all_dir_cells()
|
|
cells = ['2022-01-28-ah-invivo-1'] # , '2022-01-28-af-invivo-1', '2022-01-28-ab-invivo-1',
|
|
# '2022-01-27-ab-invivo-1', ] # ,'2022-01-28-ah-invivo-1', '2022-01-28-af-invivo-1', ]
|
|
append_others = 'apend_others' # '#'apend_others'#'apend_others'#'apend_others'##'apend_others'
|
|
autodefine = '_DFdesired_'
|
|
autodefine = 'triangle_diagonal_fr' # ['triangle_fr', 'triangle_diagonal_fr', 'triangle_df2_fr','triangle_df2_eodf''triangle_df1_eodf', ] # ,'triangle_df2_fr''triangle_df1_fr','_triangle_diagonal__fr',]
|
|
# 167(167, 133) (166, 249)
|
|
# (133, 265)167, -33) das ist ein komischer Punkt: (166,83)
|
|
#(66 / 166
|
|
autodefine = '_dfchosen_closest_'
|
|
autodefine = '_dfchosen_closest_first_'
|
|
cells = ['2021-08-03-ac-invivo-1'] ##'2021-08-03-ad-invivo-1',,[10, ][5 ]
|
|
# c1s = [10] # 1, 10,
|
|
# c2s = [10]
|
|
minsetting = 'Min0.25sExcluded'
|
|
c2 = 10
|
|
# detections = ['MeanTrialsIndexPhaseSort'] # ['AllTrialsIndex'] # ,'MeanTrialsIndexPhaseSort''DetectionAnalysis''_MeanTrialsPhaseSort'
|
|
# detections = ['AllTrialsIndex'] # ['_MeanTrialsIndexPhaseSort_Min0.25sExcluded_extended_eod_loc_synch']
|
|
extend_trials = '' # 'extended'#''#'extended'#''#'extended'#''#'extended'#''#'extended'#''#'extended'# ok kein Plan was das hier ist
|
|
# phase_sorting = ''#'PhaseSort'
|
|
eodftype = '_psdEOD_'
|
|
concat = '' # 'TrialsConcat'
|
|
indices = ['_allindices_']
|
|
chirps = [
|
|
''] # '_ChirpsDelete3_',,'_ChirpsDelete3_'','','',''#'_ChirpsDelete3_'#''#'_ChirpsDelete3_'#'#'_ChirpsDelete2_'#''#'_ChirpsDelete_'#''#'_ChirpsDelete_'#''#'_ChirpsDelete_'#''#'_ChirpsCache_'
|
|
extract = '' # '_globalmax_'
|
|
devs_savename = ['original', '05'] # ['05']#####################
|
|
# control = pd.read_pickle(
|
|
# load_folder_name(
|
|
# 'calc_model') + '/modell_all_cell_no_sinz3_afe0.1__afr1__afj0.1__length1.5_adaptoffsetallall2___stepefish' + step + 'Hz_ratecorrrisidual35__modelbigfit_nfft4096.pkl')
|
|
if len(cells) < 1:
|
|
data_dir, cells = load_cells_three(end, data_dir=data_dir, datasets=datasets)
|
|
cells, p_units_cells, pyramidals = restrict_cell_type(cells, 'p-units')
|
|
# default_settings(fs=8)
|
|
start = 'min' #
|
|
cells = ['2021-08-03-ac-invivo-1']
|
|
tag_cells = []
|
|
fr = float('nan')
|
|
eod_fr = float('nan')
|
|
arrays_len = []
|
|
for c, cell in enumerate(cells):
|
|
counter_pic = 0
|
|
contrasts = [c2]
|
|
tag_cell = []
|
|
for c, contrast in enumerate(contrasts):
|
|
contrast_small = 'c2'
|
|
contrast_big = 'c1'
|
|
contrasts1 = [c1]
|
|
for contrast1 in contrasts1:
|
|
for devname_orig in devs:
|
|
datapoints = [1000]
|
|
for d in datapoints:
|
|
################################
|
|
# prepare DF1 desired
|
|
|
|
# chose_score = 'auci02_012-auci_base_01'
|
|
|
|
# hier muss das halt stimmen mit der auswahl
|
|
# hier wollen wir eigntlich kein autodefine
|
|
# sondern wir wollen so ein diagonal ding haben
|
|
df1 = []
|
|
df2 = []
|
|
for df in range(len(DF1_desired)):
|
|
divergnce, fr, pivot_chosen, max_val, max_x, max_y, mult, DF1, DF2, min_y, min_x, min_val, diff_cut = chose_mat_max_value(
|
|
DF1_desired[df], DF2_desired[df], '', mult_type, eodftype, indices, cell,
|
|
contrast_small,
|
|
contrast_big, contrast1, dfs, start, devname_orig, contrast, autodefine=autodefine,
|
|
cut_matrix='cut', chose_score=chose_score) # chose_score = 'auci02_012-auci_base_01'
|
|
df1.append(DF1[0])
|
|
df2.append(DF2[0])
|
|
|
|
DF1_desired = df1 # [::-1]
|
|
DF2_desired = df2 # [::-1]
|
|
|
|
|
|
#######################################
|
|
# ROC part
|
|
# fr, celltype = get_fr_from_info(cell, data_dir[c])
|
|
|
|
version_comp, subfolder, mod_name_slash, mod_name, subfolder_path = find_code_vs_not()
|
|
b = load_b_public(c, cell, data_dir)
|
|
|
|
mt_sorted = predefine_grouping_frame(b, eodftype=eodftype, cell_name=cell)
|
|
|
|
counter_waves = 0
|
|
|
|
mt_sorted = mt_sorted[(mt_sorted['c2'] == c2) & (mt_sorted['c1'] == c1)]
|
|
|
|
test = False
|
|
if test:
|
|
mt_sorted[['DF1, DF2', 'm1, m2']]
|
|
mt_sorted['DF1, DF2']
|
|
ax00s = []
|
|
p_means_all_all = {}
|
|
choices = [[[0, 1, 2,3, 6]] * 6, [[0, 1, 2, 3, 4]] * 6]
|
|
for gg in range(len(DF1_desired)):
|
|
|
|
|
|
# try:
|
|
grid0 = gridspec.GridSpecFromSubplotSpec(len(DF1_desired), 1, wspace=0.15, hspace=0.35,
|
|
subplot_spec=grid)
|
|
t3 = time.time()
|
|
# except:
|
|
# print('time thing')
|
|
# embed()
|
|
ax_w = []
|
|
|
|
###################
|
|
# all trials in one
|
|
grouped = mt_sorted.groupby(
|
|
['c1', 'c2', 'm1, m2'],
|
|
as_index=False)
|
|
# try:
|
|
grouped_mean = chose_certain_group(DF1_desired[gg],
|
|
DF2_desired[gg], grouped,
|
|
several=True, emb=False,
|
|
concat=True)
|
|
# mt_sorted['m1, m2']
|
|
|
|
|
|
# except:
|
|
# print('grouped thing')
|
|
# embed()
|
|
###################
|
|
# groups sorted by repro tag
|
|
# todo: evnetuell die tuples gleich hier umspeichern vom csv ''
|
|
|
|
grouped = mt_sorted.groupby(
|
|
['c1', 'c2', 'm1, m2', 'repro_tag_id'],
|
|
as_index=False)
|
|
grouped_orig = chose_certain_group(DF1_desired[gg],
|
|
DF2_desired[gg],
|
|
grouped,
|
|
several=True)
|
|
gr_trials = len(grouped_orig)
|
|
###################
|
|
|
|
groups_variants = [grouped_mean]
|
|
group_mean = [grouped_orig[0][0], grouped_mean]
|
|
|
|
for d, detection in enumerate(detections):
|
|
mean_type = '_' + detection # + '_' + minsetting + '_' + extend_trials + concat
|
|
|
|
##############################################################
|
|
# load plotting arrays
|
|
arrays, arrays_original, spikes_pure = save_arrays_susept(
|
|
data_dir, cell, c, chirps, devs, extract, group_mean, mean_type, plot_group=0,
|
|
rocextra=False, sorted_on=sorted_on)
|
|
####################################################
|
|
|
|
####################################################
|
|
# hier checken wir ob für diesen einen Punkt das funkioniert mit der standardabweichung
|
|
|
|
try:
|
|
check_var_substract_method(spikes_pure)
|
|
except:
|
|
print('var checking not possible')
|
|
# fig = plt.figure()
|
|
# grid = gridspec.GridSpec(2, 1, wspace=0.7, left=0.05, top=0.95, bottom=0.15,
|
|
# right=0.98)
|
|
|
|
##########################################################################
|
|
# part with the power spectra
|
|
|
|
xlim = [0, 100]
|
|
|
|
# plt.savefig(r'C:\Users\alexi\OneDrive - bwedu\Präsentations\latex\experimental_protocol.pdf')
|
|
|
|
# fr_end = divergence_title_add_on(group_mean, fr[gg], autodefine)
|
|
|
|
###########################################
|
|
stimulus_length = 0.3
|
|
deltat = 1 / 40000
|
|
eodf = np.mean(group_mean[1].eodf)
|
|
eod_fr = eodf
|
|
|
|
a_fr = 1
|
|
|
|
eod_fe = eodf + np.mean(
|
|
group_mean[1].DF2) # data.eodf.iloc[0] + 10 # cell_model.eode.iloc[0]
|
|
a_fe = group_mean[0][1] / 100
|
|
eod_fj = eodf + np.mean(
|
|
group_mean[1].DF1) # data.eodf.iloc[0] + 50 # cell_model.eodj.iloc[0]
|
|
a_fj = group_mean[0][0] / 100
|
|
variant_cell = 'no' # 'receiver_emitter_jammer'
|
|
print('f0' + str(eod_fr))
|
|
print('f1' + str(eod_fe))
|
|
print('f2' + str(eod_fj))
|
|
eod_fish_j, time_array, time_fish_r, eod_fish_r, time_fish_e, eod_fish_e, time_fish_sam, eod_fish_sam, stimulus_am, stimulus_sam = extract_waves(
|
|
variant_cell, '',
|
|
stimulus_length, deltat, eod_fr, a_fr, a_fe, [eod_fe], 0, eod_fj, a_fj)
|
|
|
|
jammer_name = 'female'
|
|
cocktail_names = False
|
|
if cocktail_names:
|
|
titles = ['receiver ',
|
|
'+' + 'intruder ',
|
|
'+' + jammer_name,
|
|
|
|
'+' + jammer_name + '+intruder',
|
|
[]] ##'receiver + ' + 'receiver + receiver
|
|
else:
|
|
titles = title_motivation()
|
|
gs = [0, 1, 2, 3, 4]
|
|
waves_presents = [['receiver', '', '', 'all'],
|
|
['receiver', 'emitter', '', 'all'],
|
|
['receiver', '', 'jammer', 'all'],
|
|
|
|
['receiver', 'emitter', 'jammer', 'all'],
|
|
] # ['', '', '', ''],['receiver', '', '', 'all'],
|
|
# ['receiver', '', 'jammer', 'all'],
|
|
# ['receiver', 'emitter', '', 'all'],'receiver', 'emitter', 'jammer',
|
|
symbols = [''] # '$+$', '$-$', '$-$', '$=$',
|
|
symbols = ['', '', '', '', '']
|
|
|
|
time_array = time_array * 1000
|
|
|
|
color0 = 'green'
|
|
color0_burst = 'darkgreen'
|
|
color01 = 'green'
|
|
color02 = 'red'
|
|
color012 = 'orange'
|
|
color01_2 = 'purple'
|
|
color01, color012, color01_2, color02, color0_burst, color0 = colors_suscept_paper_dots()
|
|
colors_am = ['black', 'black', 'black', 'black'] # color01, color02, color012]
|
|
extracted = [False, True, True, True]
|
|
extracted2 = [False, False, False, False]
|
|
|
|
printing = True
|
|
if printing:
|
|
print(time.time() - t3)
|
|
|
|
|
|
|
|
##########################################
|
|
# spike response
|
|
array_chosen = 1
|
|
if d == 0: #
|
|
|
|
# plot the psds
|
|
|
|
p_means_all = {}
|
|
names = ['0', '02', '01',
|
|
'012'] ## names = ['012']#'0', '02', '01',
|
|
for j in range(len(arrays)): # [arrays[-1]]
|
|
########################################
|
|
# get the corresponding psds
|
|
# hier kann man aussuchen welches power spektrum machen haben will
|
|
f, nfft = get_psds_ROC(array_chosen, arrays, arrays_original, j,
|
|
mean_type,
|
|
names, p_means_all, nfft = nfft)
|
|
# f, nfft = get_psds_ROC(array_chosen, [arrays[-1]], [arrays_original[-1]], j, mean_type,
|
|
# names, p_means_all)
|
|
# ax_as.append(ax_a)
|
|
ps = {}
|
|
p_means = {}
|
|
ax_ps = []
|
|
|
|
#color012_minus = 'purple' # ,
|
|
names = ['0', '02', '01', '012'] #
|
|
|
|
colors_p = [color0, color02, color01, color012, color02, color01, color01_2,
|
|
color0_burst, color0_burst,
|
|
color0, color0]
|
|
ax00 = plt.subplot(grid0[gg])
|
|
ax00s.append(ax00)
|
|
if gg == 0:
|
|
ax00.text(1, 1, 'Data', ha='right', va = 'top', transform=ax00.transAxes)
|
|
# todo: da nicht alle vier über einander plotten das ist das problem!
|
|
#embed()
|
|
choice = choices[gg]
|
|
arrays_len.append(len(spikes_pure['012']))
|
|
|
|
#labels = labels_all_motivation(DF1, DF2, fr_isi)
|
|
|
|
labels = ['$f_{base}$',
|
|
deltaf1_label(),
|
|
deltaf2_label(),
|
|
sum_label(),
|
|
two_deltaf1_label(),
|
|
two_deltaf2_label(),
|
|
diff_label(),
|
|
'fr_bc',
|
|
'fr_given_burst_corr_individual', 'highest_fr_burst_corr_individual',
|
|
'fr', 'fr_given',
|
|
'highest_fr'] # '$|$DF1-DF2$|$=' + str(np.abs(np.abs(DF1) - np.abs(DF2))) + 'Hz',
|
|
#freqs = [fr_isi, np.abs(DF2), np.abs(DF1),
|
|
# np.abs(DF1) + np.abs(DF2), 2 * np.abs(DF2), 2 * np.abs(DF1),
|
|
# np.abs(np.abs(DF1) - np.abs(DF2)), ]
|
|
if len(alpha)> 0:
|
|
alphas = [alpha[gg]]*len(labels)
|
|
else:
|
|
alphas = []
|
|
#embed()
|
|
ax00, fr_isi = plt_psds_ROC(arrays, ax00, ax_ps, cell, colors_p, f, grid0,
|
|
group_mean, nfft, p_means, p_means_all, ps, 4,
|
|
spikes_pure, time_array, range_plot=[3], names=names,
|
|
ax01=ax00, ms = ms, clip_on = clip_on, xlim_psd=xlim_psd, alphas = alphas, marker = markers[gg], choice = choice, labels = labels, ylim_log=ylim_log, log=log, text_extra=False)
|
|
# [arrays[-1]]arrays, ax00, ax_ps, cell, colors_p, f, [-1]grid0, group_mean, nfft, p_means, p_means_all, ps, row,spikes_pure, time_array,
|
|
ax00.show_spines('b')
|
|
if gg == 0:
|
|
ax00.legend(ncol=6, loc=(-1.16, 1.1))
|
|
if gg != len(DF1_desired) - 1:
|
|
remove_xticks(ax00)
|
|
ax00.set_xlabel('')
|
|
|
|
axes = []
|
|
axes.append(ax_w)
|
|
return DF1_desired, DF2_desired, fr, eod_fr, arrays_len
|
|
|
|
|
|
def load_stack_data_susept(cell, save_name, end = '', redo = False):
|
|
load_name = load_folder_name('calc_RAM') + '/' + save_name+end
|
|
add = '_cell' + cell +end# str(f) # + '_amp_' + str(amp)
|
|
#embed()
|
|
stack_cell = load_data_susept(load_name + '_' + cell + '.pkl', load_name + '_' + cell, add=add,
|
|
load_version='csv', redo = redo)
|
|
file_names_exclude = get_file_names_exclude()
|
|
stack_cell = stack_cell[~stack_cell['file_name'].isin(file_names_exclude)]
|
|
# if len(stack_cell):
|
|
file_names = stack_cell.file_name.unique()
|
|
#embed()
|
|
file_names = exclude_file_name_short(file_names)
|
|
cut_off_nr = get_cutoffs_nr(file_names)
|
|
try:
|
|
maxs = list(map(float, cut_off_nr))
|
|
except:
|
|
embed()
|
|
file_names = file_names[np.argmax(maxs)]
|
|
#embed()
|
|
stack_file = stack_cell[stack_cell['file_name'] == file_names]
|
|
amps = [np.min(stack_file.amp.unique())]
|
|
amps = restrict_punits(cell, amps)
|
|
amp = np.min(amps)#[0]
|
|
# for amp in amps:
|
|
stack_amps = stack_file[stack_file['amp'] == amp]
|
|
|
|
lengths = stack_amps.stimulus_length.unique()
|
|
trial_nr_double = stack_amps.trial_nr.unique()
|
|
trial_nr = np.max(trial_nr_double)
|
|
stack_final = stack_amps[
|
|
(stack_amps['stimulus_length'] == np.max(lengths)) & (stack_amps.trial_nr == trial_nr)]
|
|
mat, new_keys = get_mat_susept(stack_final)
|
|
return mat,stack_final
|
|
|
|
|
|
if __name__ == '__main__':
|
|
model_full() |