update zusammenarbeit

This commit is contained in:
saschuta 2024-04-11 13:21:46 +02:00
parent 354de49294
commit 506a387679
95 changed files with 348062 additions and 179534 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

After

Width:  |  Height:  |  Size: 101 KiB

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

After

Width:  |  Height:  |  Size: 146 KiB

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

After

Width:  |  Height:  |  Size: 147 KiB

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 80 KiB

View File

@ -7,7 +7,7 @@ import numpy as np
from utils_all_down import default_settings
from utils_suseptibility import colors_overview
from utils_suseptibility import default_figsize, NLI_scorename2,pearson_label, exclude_nans_for_corr, kernel_scatter, \
plt_burst_modulation_hists, \
scatter_with_marginals_colorcoded, \
version_final
from utils_all import update_cell_names, load_overview_susept, make_log_ticks, p_units_to_show, save_visualization, setting_overview_score
from scipy import stats
@ -114,7 +114,7 @@ def data_overview3():
#ax_here = []
#axd = plt.subplot(grid_lower_lower[0, c])
# embed()
#kernel_histogram(axk, colors[str(cell_type_here)], np.array(x_axis), norm=True, step=0.03, alpha=0.5)
#embed()
@ -166,11 +166,12 @@ def data_overview3():
xlimk = None
labelpad = 0.5#-1
cmap, _, y_axis = plt_burst_modulation_hists(axx, axy, var_item_names[v], axs, cell_type_here, x_axis[v],
frame_file, scores_here[v], ymin=ymin, xmin=xmin,
burst_fraction_reset=burst_corr_reset, var_item=var_type,
max_x=max_x[v], xlim=xlimk, x_pos=1, labelpad = labelpad,
burst_fraction=burst_fraction[c], ha='right')
cmap, _, y_axis = scatter_with_marginals_colorcoded(var_item_names[v], axs, cell_type_here, x_axis[v],
frame_file, scores_here[v], axy, axx, ymin=ymin,
xmin=xmin, burst_fraction_reset=burst_corr_reset,
var_item=var_type, labelpad=labelpad, max_x=max_x[v],
xlim=xlimk, x_pos=1, burst_fraction=burst_fraction[c],
ha='right')
print(cell_type_here + ' median '+scores_here[v]+''+str(np.nanmedian(frame_file[scores_here[v]])))
print(cell_type_here + ' max ' + x_axis[v] + '' + str(np.nanmax(frame_file[x_axis[v]])))
@ -410,7 +411,7 @@ def data_overview3():
#set_ylim_same()
#ax[1, 1].get_shared_y_axes().join(*ax[1, 1::])
# embed()
#counter += 1
#embed()
@ -448,7 +449,7 @@ def plt_specific_cells(axs, cell_type_here, cv_name, frame_file, score, marker =
cells_extra = frame_file[frame_file['cell'].isin(cells_plot2)].index
# ax = plt.subplot(grid[1, cv_n])
# todo: hier nur noch die kleinste und größte Amplitude nehmen
# embed()
if not marker:
axs.scatter(frame_file[cv_name].loc[cells_extra], frame_file[score].loc[cells_extra],
s=9, facecolor="None", edgecolor='black', alpha=0.7, clip_on=False) # colors[str(cell_type_here)]
@ -467,9 +468,9 @@ def plt_var_axis(ax_j, axls, axss,score_name, burst_fraction, cell_type_here, co
axk, axl, axs, axls, axss, ax_j = get_grid_4(ax_j, axls, axss, grid0[counter])
counter += 1
cmap, _, y_axis = plt_burst_modulation_hists(axk, axl, var_item_names[v], axs, cell_type_here, x_axis[v],
frame_file, scores_here[v], var_item=var_type,
burst_fraction=burst_fraction[v])
cmap, _, y_axis = scatter_with_marginals_colorcoded(var_item_names[v], axs, cell_type_here, x_axis[v],
frame_file, scores_here[v], axl, axk, var_item=var_type,
burst_fraction=burst_fraction[v])
axs.set_ylabel(score_name[v])
axs.set_xlabel(x_axis_names[v])
if v in [0, 1]:
@ -536,11 +537,10 @@ def species_with_both_cells(grid0, cell_types,ax_j, axls, axss, colors, cv_name_
if c in [0, 2]:
axk.set_title(species)
# embed()
if len(frame_file) > 0:
axs, x_axis = kernel_scatter(axl, cell_types, axk, axs, ax_j, c, cell_type_here, colors, cv_n, cv_name,
frame_file, grid0, max_val,
score, log = log)
axs, x_axis = kernel_scatter(axl, axk, axs, c, cell_type_here, colors, cv_name,
frame_file, score, log = log)
if log:
axl.set_yscale('log')
axl.set_yticks_blank()

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 113 KiB

After

Width:  |  Height:  |  Size: 111 KiB

View File

@ -49,7 +49,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
plot_style()
default_figsize(column=2, length=3.1) #.254.75 0.75
grid = gridspec.GridSpec(2, 5, wspace=0.95, bottom=0.09,
hspace=0.25, width_ratios = [1,0,1,1,1], left=0.09, right=0.93, top=0.9)
hspace=0.25, width_ratios = [2,0,2,2,2], left=0.09, right=0.93, top=0.9)
a = 0
maxs = []
@ -64,7 +64,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
ref_types, adapt_types, noises_added, level_extraction, receiver_contrast, contrasts, ]
nr = '2'
# embed()
# cell_contrasts = ["2013-01-08-aa-invivo-1"]
# cells_triangl_contrast = np.concatenate([cells_all,cell_contrasts])
# cells_triangl_contrast = 1
@ -84,14 +84,15 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
#################################
# data cells
# embed()
grid_data = gridspec.GridSpecFromSubplotSpec(1, 1, grid[0, 0],
hspace=hs)
fr_print = False#True
#ypos_x_modelanddata()
nr = 1
ax_data, stack_spikes_all, eod_frs = plt_data_susept(fig, grid_data, cells_all, cell_type='p-unit', width=width,
cbar_label=True, eod_metrice = eod_metrice, nr = nr, amp_given = 1,xlabel = False, lp=lp, title=True)
cbar_label=True, fr_print = fr_print, eod_metrice = eod_metrice, nr = nr, amp_given = 1,xlabel = False, lp=lp, title=True)
for ax_external in ax_data:
# ax.set_ylabel(F2_xlabel())
# remove_xticks(ax)
@ -123,14 +124,47 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
# for trial in trials:#.009
trial_nr = 1000000#1000000
save_names = [
'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_'+str(trial_nr)+'_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_afe_0.025_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_afe_0.025_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_' + str(
trial_nr) + '_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_' + str(
trial_nr) + '_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
]
save_names = [
'calc_RAM_model-2__nfft_whole_power_1_afe_0.023_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_afe_0.023_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_'+str(trial_nr)+'_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_'+str(trial_nr)+'_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
] #calc_RAM_model-2__nfft_whole_power_1_afe_2.6_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV
##########
# Erklärung
# Ich habe hier 0.009 und nicht 0.25 weil das Modell einen Fehler hat
# den Stimulus in den Daten habe ich überprüft der tatsächliche stimulus ist 2.3 Prozent
# sollte aber 2.5 Prozent sein
# Im Fall von 2.5 Prozent wäre das ein Fehler von 0.36 sonst von 0.39
# Hier werde ich nun mit dem Fehler von 0.36 verfahren
# das bedeutet aber das sich den Stimulus zwar mit 0.009 ins Modell reintue später für die
# Susceptiblitätsberechnung sollte ich ihn aber um den Faktor 0.36 teilen
bias_factor = 0.36
save_names = [
'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_' + str(
trial_nr) + '_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
'calc_RAM_model-2__nfft_whole_power_1_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_' + str(
trial_nr) + '_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
]
#'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_11_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
#'calc_RAM_model-2__nfft_whole_power_1_afe_0.009_RAM_RAM_additiv_cv_adapt_factor_scaled_cNoise_0.1_cSig_0.9_cutoff1_300_cutoff2_300no_sinz_length1_TrialsStim_500000_a_fr_1__trans1s__TrialsNr_1_fft_o_forward_fft_i_forward_Hz_mV',
@ -139,7 +173,8 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
tr_name = trial_nr/1000000
if tr_name == 1:
tr_name = 1#'$c=1\,\%$','$c=0\,\%$'
cs = ['$c=1\,\%$','$c=0\,\%$']
c = 2.5
cs = ['$c=%.1f$' %(c)+'$\,\%$','$c=0\,\%$']
titles = ['Model\n$N=11$', 'Model\n$N=%s $' % (tr_name) +'\,million',
'Model\,('+noise_name().lower()+')' + '\n' + '$N=11$',
'Model\,('+noise_name().lower()+')' + '\n' + '$N=%s$' % (tr_name) + '\,million',
@ -148,6 +183,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
# 'Model\,('+noise_name().lower()+')' + '\n' + '$N=%s$' % (tr_name) + '\,million\n $c=1\,\%$ '
ax_model = []
for s, sav_name in enumerate(save_names):
try:
ax_external = plt.subplot(grid[nrs_s[s]])
@ -167,13 +203,13 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
# full_matrix = create_full_matrix2(np.array(stack), np.array(stack_rev))
# stack_final = get_axis_on_full_matrix(full_matrix, stack)
# im = plt_RAM_perc(ax, perc, np.abs(stack))
# embed()
stack = load_model_susept(path, cells_save, save_name.split(r'/')[-1] + cell_add)
if len(stack)> 0:
add_nonlin_title, cbar, fig, stack_plot, im = plt_single_square_modl(ax_external, cell, stack, perc, titles[s],
width, eod_metrice = eod_metrice, titles_plot=True,
resize=True, nr = nr)
resize=True,bias_factor = bias_factor, fr_print = fr_print, nr = nr)
# if s in [1,3,5]:
@ -202,14 +238,14 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
if len(cells) > 1:
a += 1
set_clim_same_here(ims, mats=mats, lim_type='up', nr_clim='perc', clims='', percnr=95)
set_clim_same(ims, mats=mats, lim_type='up', nr_clim='perc', clims='', percnr=perc_model_full())
#################################################
# Flowcharts
var_types = ['', 'additiv_cv_adapt_factor_scaled']#'additiv_cv_adapt_factor_scaled',
##additiv_cv_adapt_factor_scaled
a_fes = [0.009, 0]#, 0.009
a_fes = [c/100, 0]#, 0.009
eod_fe = [750, 750]#, 750
ylim = [-0.5, 0.5]
c_sigs = [0, 0.9]#, 0.9
@ -226,7 +262,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
eod_fr = model_params['EODf'] # .iloc[0]
deltat = model_params.pop("deltat") # .iloc[0]
v_offset = model_params.pop("v_offset") # .iloc[0]
# embed()eod_fr = stack.eod_fr.iloc[0]
eod_fr = stack.eod_fr.iloc[0]
print(var_types[g] + ' a_fe ' + str(a_fes[g]))
noise_final_c, spike_times, stimulus, stimulus_here, time, v_dent_output, v_mem_output, frame = get_flowchart_params(
a_fes, a_fr, g, c_sigs[g], cell, deltat, eod_fr, model_params, stimulus_length, v_offset, var_types,
@ -235,7 +271,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
if (len(np.unique(frame.RAM_afe)) > 1) & (len(np.unique(frame.RAM_noise)) > 1):
grid_lowpass2 = gridspec.GridSpecFromSubplotSpec(4, 1,
subplot_spec=grid_here,height_ratios=[1, 1,1, 0.1], hspace=0.2)
subplot_spec=grid_here,height_ratios=[1, 1,1, 0.1], hspace=0.05)
# if (np.unique(frame.RAM_afe) != 0):grid_left[g]
@ -291,11 +327,14 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
ax_external = plt.subplot(grid_lowpass[0])
ax_external.show_spines('l')
ax_intrinsic.show_spines('l')
ax_external.axhline(0, color='black', lw=0.5)
ax_external.axhline(0, color='red', lw=0.5)
join_x([ax_intrinsic,ax_external])
join_y([ax_intrinsic, ax_external])
vers = 'second'
ax_intrinsic.set_yticks_delta(6)
ax_external.set_yticks_delta(6)
ax_external.text(-0.6, 0.5, '$\%$', va='center', rotation=90, transform=ax_external.transAxes)
ax_intrinsic.text(-0.6, 0.5, '$\%$', va='center', rotation=90, transform=ax_intrinsic.transAxes)
remove_xticks(ax_intrinsic)
@ -305,7 +344,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
ax_ams.append(ax_external)
remove_xticks(ax_external)
# embed()
ax_n, ff, pp, ff_am, pp_am = plot_lowpass2([grid_lowpass[2]], time, noise_final_c, deltat, eod_fr,
extract=False, color1='grey', lw=1)
remove_yticks(ax_n)
@ -334,7 +373,7 @@ def model_and_data2(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1
transform=ax_intrinsic.transAxes)
#embed()
set_same_ylim(ax_ams, up='up')
# embed()
axes = np.concatenate([ax_data, ax_model])
axes = [ax_ams[0], axes[1], axes[2], ax_ams[1], axes[3], axes[4], ]#ax_ams[2], axes[5], axes[6],
@ -380,7 +419,7 @@ if __name__ == '__main__':
model = resave_small_files("models_big_fit_d_right.csv", load_folder='calc_model_core')
cells = model.cell.unique()
# embed()
params = {'cells': cells}
show = True

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,243 @@
,spikes
0,0.0091
1,0.01665
2,0.02315
3,0.0287
4,0.03725
5,0.046900000000000004
6,0.0631
7,0.07055
8,0.07385
9,0.08025
10,0.08575
11,0.09335
12,0.1051
13,0.11585000000000001
14,0.1245
15,0.13305
16,0.1374
17,0.1482
18,0.1525
19,0.16870000000000002
20,0.17725000000000002
21,0.1826
22,0.1903
23,0.19775
24,0.2031
25,0.21705000000000002
26,0.22360000000000002
27,0.23225
28,0.23875000000000002
29,0.2462
30,0.25365
31,0.2591
32,0.2721
33,0.2795
34,0.2861
35,0.29475
36,0.3022
37,0.3118
38,0.31825000000000003
39,0.33025000000000004
40,0.342
41,0.3463
42,0.3529
43,0.3668
44,0.371
45,0.37865000000000004
46,0.38955
47,0.39495
48,0.40555
49,0.41305000000000003
50,0.42400000000000004
51,0.4293
52,0.438
53,0.44225000000000003
54,0.453
55,0.4627
56,0.4746
57,0.47985
58,0.48425
59,0.49175
60,0.5004000000000001
61,0.5133
62,0.5176000000000001
63,0.52295
64,0.536
65,0.54035
66,0.5466500000000001
67,0.55545
68,0.5704
69,0.5800500000000001
70,0.58975
71,0.5961500000000001
72,0.60265
73,0.6103500000000001
74,0.6189
75,0.6264000000000001
76,0.635
77,0.6404000000000001
78,0.64895
79,0.66515
80,0.6695
81,0.6738000000000001
82,0.68665
83,0.69425
84,0.70065
85,0.7062
86,0.7202000000000001
87,0.73285
88,0.7372500000000001
89,0.7416
90,0.7512500000000001
91,0.75575
92,0.7675000000000001
93,0.7804500000000001
94,0.7867500000000001
95,0.79545
96,0.8041
97,0.81915
98,0.8246
99,0.8321000000000001
100,0.8376
101,0.84285
102,0.8460500000000001
103,0.859
104,0.86865
105,0.8786
106,0.8912500000000001
107,0.8979
108,0.9043
109,0.9118
110,0.91605
111,0.92145
112,0.9356500000000001
113,0.9409500000000001
114,0.9527
115,0.96345
116,0.9667
117,0.9807
118,0.9882000000000001
119,0.99595
120,1.0023
121,1.0098500000000001
122,1.01635
123,1.026
124,1.0379
125,1.0463500000000001
126,1.05175
127,1.06255
128,1.0712000000000002
129,1.07555
130,1.0842
131,1.09165
132,1.096
133,1.1099
134,1.11755
135,1.12405
136,1.1347
137,1.14765
138,1.1509
139,1.15845
140,1.17455
141,1.18005
142,1.18755
143,1.1918
144,1.1983000000000001
145,1.21005
146,1.2197500000000001
147,1.2273
148,1.23715
149,1.2457
150,1.2511
151,1.2576
152,1.2694
153,1.2780500000000001
154,1.28545
155,1.2941500000000001
156,1.2996
157,1.30725
158,1.3124500000000001
159,1.3190000000000002
160,1.3318
161,1.3447
162,1.3512
163,1.3598000000000001
164,1.36955
165,1.3751
166,1.38135
167,1.3922
168,1.39765
169,1.40735
170,1.4128
171,1.42025
172,1.4288500000000002
173,1.4353
174,1.44395
175,1.45365
176,1.4622000000000002
177,1.47295
178,1.48045
179,1.487
180,1.4998500000000001
181,1.5052
182,1.51295
183,1.51815
184,1.5322500000000001
185,1.53655
186,1.5409000000000002
187,1.5537
188,1.5645
189,1.5732000000000002
190,1.57955
191,1.58925
192,1.59695
193,1.60535
194,1.6108500000000001
195,1.6216000000000002
196,1.6269
197,1.6356000000000002
198,1.6464
199,1.6528500000000002
200,1.6657000000000002
201,1.6701000000000001
202,1.67655
203,1.6852
204,1.6938
205,1.70235
206,1.7088
207,1.72275
208,1.7334500000000002
209,1.7379
210,1.7432500000000002
211,1.7509000000000001
212,1.7628000000000001
213,1.77025
214,1.7821
215,1.78855
216,1.79495
217,1.80465
218,1.8166
219,1.82305
220,1.83275
221,1.83925
222,1.84345
223,1.8467
224,1.8597000000000001
225,1.8682
226,1.8780000000000001
227,1.89185
228,1.89735
229,1.90805
230,1.91235
231,1.9167
232,1.9220000000000002
233,1.9297000000000002
234,1.94255
235,1.9532500000000002
236,1.963
237,1.9672500000000002
238,1.98035
239,1.9878
240,1.9932
241,1.99855
1 spikes
2 0 0.0091
3 1 0.01665
4 2 0.02315
5 3 0.0287
6 4 0.03725
7 5 0.046900000000000004
8 6 0.0631
9 7 0.07055
10 8 0.07385
11 9 0.08025
12 10 0.08575
13 11 0.09335
14 12 0.1051
15 13 0.11585000000000001
16 14 0.1245
17 15 0.13305
18 16 0.1374
19 17 0.1482
20 18 0.1525
21 19 0.16870000000000002
22 20 0.17725000000000002
23 21 0.1826
24 22 0.1903
25 23 0.19775
26 24 0.2031
27 25 0.21705000000000002
28 26 0.22360000000000002
29 27 0.23225
30 28 0.23875000000000002
31 29 0.2462
32 30 0.25365
33 31 0.2591
34 32 0.2721
35 33 0.2795
36 34 0.2861
37 35 0.29475
38 36 0.3022
39 37 0.3118
40 38 0.31825000000000003
41 39 0.33025000000000004
42 40 0.342
43 41 0.3463
44 42 0.3529
45 43 0.3668
46 44 0.371
47 45 0.37865000000000004
48 46 0.38955
49 47 0.39495
50 48 0.40555
51 49 0.41305000000000003
52 50 0.42400000000000004
53 51 0.4293
54 52 0.438
55 53 0.44225000000000003
56 54 0.453
57 55 0.4627
58 56 0.4746
59 57 0.47985
60 58 0.48425
61 59 0.49175
62 60 0.5004000000000001
63 61 0.5133
64 62 0.5176000000000001
65 63 0.52295
66 64 0.536
67 65 0.54035
68 66 0.5466500000000001
69 67 0.55545
70 68 0.5704
71 69 0.5800500000000001
72 70 0.58975
73 71 0.5961500000000001
74 72 0.60265
75 73 0.6103500000000001
76 74 0.6189
77 75 0.6264000000000001
78 76 0.635
79 77 0.6404000000000001
80 78 0.64895
81 79 0.66515
82 80 0.6695
83 81 0.6738000000000001
84 82 0.68665
85 83 0.69425
86 84 0.70065
87 85 0.7062
88 86 0.7202000000000001
89 87 0.73285
90 88 0.7372500000000001
91 89 0.7416
92 90 0.7512500000000001
93 91 0.75575
94 92 0.7675000000000001
95 93 0.7804500000000001
96 94 0.7867500000000001
97 95 0.79545
98 96 0.8041
99 97 0.81915
100 98 0.8246
101 99 0.8321000000000001
102 100 0.8376
103 101 0.84285
104 102 0.8460500000000001
105 103 0.859
106 104 0.86865
107 105 0.8786
108 106 0.8912500000000001
109 107 0.8979
110 108 0.9043
111 109 0.9118
112 110 0.91605
113 111 0.92145
114 112 0.9356500000000001
115 113 0.9409500000000001
116 114 0.9527
117 115 0.96345
118 116 0.9667
119 117 0.9807
120 118 0.9882000000000001
121 119 0.99595
122 120 1.0023
123 121 1.0098500000000001
124 122 1.01635
125 123 1.026
126 124 1.0379
127 125 1.0463500000000001
128 126 1.05175
129 127 1.06255
130 128 1.0712000000000002
131 129 1.07555
132 130 1.0842
133 131 1.09165
134 132 1.096
135 133 1.1099
136 134 1.11755
137 135 1.12405
138 136 1.1347
139 137 1.14765
140 138 1.1509
141 139 1.15845
142 140 1.17455
143 141 1.18005
144 142 1.18755
145 143 1.1918
146 144 1.1983000000000001
147 145 1.21005
148 146 1.2197500000000001
149 147 1.2273
150 148 1.23715
151 149 1.2457
152 150 1.2511
153 151 1.2576
154 152 1.2694
155 153 1.2780500000000001
156 154 1.28545
157 155 1.2941500000000001
158 156 1.2996
159 157 1.30725
160 158 1.3124500000000001
161 159 1.3190000000000002
162 160 1.3318
163 161 1.3447
164 162 1.3512
165 163 1.3598000000000001
166 164 1.36955
167 165 1.3751
168 166 1.38135
169 167 1.3922
170 168 1.39765
171 169 1.40735
172 170 1.4128
173 171 1.42025
174 172 1.4288500000000002
175 173 1.4353
176 174 1.44395
177 175 1.45365
178 176 1.4622000000000002
179 177 1.47295
180 178 1.48045
181 179 1.487
182 180 1.4998500000000001
183 181 1.5052
184 182 1.51295
185 183 1.51815
186 184 1.5322500000000001
187 185 1.53655
188 186 1.5409000000000002
189 187 1.5537
190 188 1.5645
191 189 1.5732000000000002
192 190 1.57955
193 191 1.58925
194 192 1.59695
195 193 1.60535
196 194 1.6108500000000001
197 195 1.6216000000000002
198 196 1.6269
199 197 1.6356000000000002
200 198 1.6464
201 199 1.6528500000000002
202 200 1.6657000000000002
203 201 1.6701000000000001
204 202 1.67655
205 203 1.6852
206 204 1.6938
207 205 1.70235
208 206 1.7088
209 207 1.72275
210 208 1.7334500000000002
211 209 1.7379
212 210 1.7432500000000002
213 211 1.7509000000000001
214 212 1.7628000000000001
215 213 1.77025
216 214 1.7821
217 215 1.78855
218 216 1.79495
219 217 1.80465
220 218 1.8166
221 219 1.82305
222 220 1.83275
223 221 1.83925
224 222 1.84345
225 223 1.8467
226 224 1.8597000000000001
227 225 1.8682
228 226 1.8780000000000001
229 227 1.89185
230 228 1.89735
231 229 1.90805
232 230 1.91235
233 231 1.9167
234 232 1.9220000000000002
235 233 1.9297000000000002
236 234 1.94255
237 235 1.9532500000000002
238 236 1.963
239 237 1.9672500000000002
240 238 1.98035
241 239 1.9878
242 240 1.9932
243 241 1.99855

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 81 KiB

After

Width:  |  Height:  |  Size: 81 KiB

View File

@ -38,7 +38,7 @@ def model_full(c1=10, mult_type='_multsorted2_', devs=['05'], save=True, end='al
ax = plt.subplot(grid[0])
axes.append(ax)
cell = '2012-07-03-ak-invivo-1'
mat_rev,stack_final_rev = load_stack_data_susept(cell, save_name = version_final(), end = '_revQuadrant_')
mat_rev,stack_final_rev = load_stack_data_susept(cell, redo = True, save_name = version_final(), end = '_revQuadrant_')
mat, stack = load_stack_data_susept(cell, save_name=version_final(), end = '')
new_keys, stack_plot = convert_csv_str_to_float(stack)
@ -69,7 +69,7 @@ def model_full(c1=10, mult_type='_multsorted2_', devs=['05'], save=True, end='al
im = plt_RAM_perc(ax, perc, abs_matrix)
cbar, left, bottom, width, height = colorbar_outside(ax, im, add=5, width=0.01)
set_clim_same_here([im], mats=[abs_matrix], lim_type='up', nr_clim='perc', clims='', percnr=95)
set_clim_same([im], mats=[abs_matrix], lim_type='up', nr_clim='perc', clims='', percnr=perc_model_full())
#clim = im.get_clim()
#if clim[1]> 1000:
@ -145,7 +145,7 @@ def model_full(c1=10, mult_type='_multsorted2_', devs=['05'], save=True, end='al
diagonal = 'test_data_cell_2022-01-05-aa-invivo-1'
diagonal = 'diagonal1'
# embed()
freq1_ratio = combis[diagonal][0]
freq2_ratio = combis[diagonal][1]
diagonal = ''
@ -212,7 +212,7 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
model_cells = pd.read_csv(load_folder_name('calc_model_core') + "/models_big_fit_d_right.csv")
if len(cells) < 1:
cells = model_cells.cell.loc[range(cell_start, len(model_cells))]
# embed()
plot_style()
fr = float('nan')
for cell in cells:
@ -264,12 +264,12 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
isi = np.diff(spike_adapted[0])
cv0 = np.std(isi) / np.mean(isi)
cv1 = np.std(frate) / np.mean(frate)
# embed()
fs = 11
# for fff, freq2 in enumerate(freqs2):
# freq2 = [freq2]
# embed()
for ff, freq1 in enumerate(freqs1):
freq1 = [freq1]
freq2 = [freqs2[ff]]
@ -287,7 +287,7 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
# print(cell )
# f_corr = create_beat_corr(np.array([freq1[f1]]), np.array([eod_fr]))
# create the second eod_fish1 array analogous to the eod_fish_r array
# embed()
phaseshift_f1, phaseshift_f2 = get_phaseshifts(a_f1, a_f2, phase_right, phaseshift_fr)
eod_fish1, time_fish_e = eod_fish_e_generation(time_array, a_f1, freq1, f1)
eod_fish2, time_fish_j = eod_fish_e_generation(time_array, a_f2, freq2, f2)
@ -299,7 +299,7 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
eod_fr,
time_array, a_f1)
# embed()
stimulus_orig = stimulus * 1
# damping variants
@ -308,24 +308,24 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
std_dump=0, max_dump=0, range_dump=0)
stimuli.append(stimulus)
# embed()
cvs, adapt_output, baseline_after, _, rate_adapted[t], rate_baseline_before[t], \
rate_baseline_after[t], spikes[t], \
stimulus_altered[t], \
v_dent_output[t], offset_new, v_mem_output[t], noise_final = simulate(cell, offset,
stimulus, f1,
stimulus,
adaptation_yes_e=f1,
**model_params)
#embed()
stimulus_altered_output = np.mean(stimulus_altered, axis=0)
# time_var2 = time.time()
# embed()
test_stimulus_stability = False
# embed()
# time_model = time_var2 - time_var # 8
# embed()ax[0, ff]
spikes_mat = [[]] * len(spikes)
pps = [[]] * len(spikes)
@ -411,11 +411,11 @@ def plt_model_full_model(axp, min=0.2, cells=[], a_f2 = 0.1, perc = 0.05, alpha
# pp_mean = np.log
print(freqs_beat)
print(labels)
plt_peaks_several(labels, freqs_beat, pp_mean, 0,
axp, pp_mean, colors, f, add_log=2.5,
text_extra=True, ha='center', rel='rel', rot=0, several_peaks=True,
exact=False, texts_left=texts_left, add_texts=add_texts,several_peaks_nr=several_peaks_nr,
rots=[0, 0, 0, 0,0], ms=14, alphas = [alpha]*len(colors), perc=perc, log=log, clip_on=True) # True
plt_peaks_several(freqs_beat, pp_mean, axp, pp_mean, f, labels, 0, colors, ha='center',
add_texts=add_texts, texts_left=texts_left, add_log=2.5,
rots=[0, 0, 0, 0, 0], several_peaks_nr=several_peaks_nr, exact=False,
text_extra=True, perc_peaksize=perc, rel='rel', alphas=[alpha] * len(colors),
ms=14, clip_on=True, several_peaks=True, log=log) # True
axp.plot(f, pp_mean, color='black', zorder=0) # 0.45
axp.set_xlim(xlim)
@ -452,7 +452,7 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
ps = []
results_diff = pd.DataFrame()
for g in range(len(DF2_frmult)):
# embed()
freq2_ratio = DF2_frmult[g]
freq1_ratio = DF1_frmult[g]
#embed()
@ -492,7 +492,7 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
model_params = model_cells[model_cells['cell'] == cell_here].iloc[0]
# model_params = model_cells.iloc[cell_nr]
# embed()
eod_fr = model_params['EODf'] # .iloc[0]
offset = model_params.pop('v_offset')
@ -548,7 +548,7 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
color01, color012, color01_2, color02, color0_burst, color0 = colors_suscept_paper_dots()
#fig = plt.figure(figsize=(11.5, 5.4))
# embed()
for run in range(runs):
print(run)
t1 = time.time()
@ -563,23 +563,28 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
power_here = 'sinz'
cvs, adapt_output, baseline_after_b, _, rate_adapted_b, rate_baseline_before_b, rate_baseline_after_b, \
spikes_base[t], _, _, offset_new, _,noise_final = simulate(cell, offset, stimulus, f1,
nr=n,
spikes_base[t], _, _, offset_new, _,noise_final = simulate(cell, offset, stimulus,
deltat=deltat,
adaptation_variant=adapt_offset,
adaptation_yes_j=f2,
adaptation_yes_e=f1,
adaptation_yes_t=t,
adaptation_upper_tol=upper_tol,
adaptation_lower_tol=lower_tol,
power_variant=power_here,
adapt_offset=adapt_offset,
add=add, alpha=alpha,
power_alpha=alpha,
power_nr=n,
reshuffle=reshuffled,
lower_tol=lower_tol,
upper_tol=upper_tol,
v_exp=v_exp, exp_tau=exp_tau,
dent_tau_change=dent_tau_change,
alter_taus=constant_reduction,
exponential=exponential,
exponential_mult=1,
exponential_plus=plus,
exponential_slope=slope,
sig_val=sig_val, j=f2,
deltat=deltat, t=t,
tau_change_choice=constant_reduction,
tau_change_val=dent_tau_change,
sigmoidal_mult=1,
sigmoidal_plus=plus,
sigmoidal_slope=slope,
sigmoidal_add=add,
sigmoidal_sigmoidal_val=sig_val,
LIF_exponential=exponential,
LIF_exponential_tau=exp_tau,
LIF_expontential__v=v_exp,
**model_params)
if t == 0:
@ -617,8 +622,8 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
eod_fr + third_fr] # , eod_fr - two_third_fr, third_fr,two_third_fr,third_eodf, eod_fr - third_eodf,two_third_eodf, eod_fr - two_third_eodf, ]
#embed()
sampling_rate = 1 / deltat
base_cut, mat_base, smoothed0, mat0 = find_base_fr2(sampling_rate, spikes_base, deltat,
stimulus_length, time_array, dev=dev)
base_cut, mat_base, smoothed0, mat0 = find_base_fr2(spikes_base, deltat,
stimulus_length, dev=dev)
#fr = np.mean(base_cut)
frate, isis_diff = ISI_frequency(time_array, spikes_base[0], fill=0.0)
isi = np.diff(spikes_base[0])
@ -638,30 +643,35 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
t1 = time.time()
phaseshift_f1, phaseshift_f2 = get_phaseshifts(a_f1, a_f2, phase_right, phaseshift_fr)
eod_fish1, time_fish_e = eod_fish_e_generation(time_array, a_f1, freq1, f1,
nfft_for_morph, phaseshift_f1,
cell_recording, fish_morph_harmonics_var,
zeros, mimick, fish_emitter, sampling,
stimulus_length, thistype='emitter')
phaseshift_f1, sampling,
stimulus_length, nfft_for_morph,
cell_recording,
fish_morph_harmonics_var, zeros,
mimick, fish_emitter,
thistype='emitter')
eod_fish2, time_fish_j = eod_fish_e_generation(time_array, a_f2, freq2, f2,
nfft_for_morph, phaseshift_f2,
cell_recording, fish_morph_harmonics_var,
zeros, mimick, fish_jammer, sampling,
stimulus_length, thistype='jammer')
phaseshift_f2, sampling,
stimulus_length, nfft_for_morph,
cell_recording,
fish_morph_harmonics_var, zeros,
mimick, fish_jammer,
thistype='jammer')
eod_stimulus = eod_fish1 + eod_fish2
v_mems, offset_new, mat01, mat02, mat012, smoothed01, smoothed02, smoothed012, stimulus_01, stimulus_02, stimulus_012, mat05_01, spikes_01, mat05_02, spikes_02, mat05_012, spikes_012 = get_arrays_for_three(
cell, a_f2, a_f1,
SAM, eod_stimulus, eod_fish_r, freq2, eod_fish1, eod_fish_r,
eod_fish2, stimulus_length,
baseline_with_wave_damping, baseline_without_wave,
offset, model_params, n, variant, t, adapt_offset,
upper_tol, lower_tol, dent_tau_change, constant_reduction,
exponential, plus, slope, add,
deltat, alpha, sig_val, v_exp, exp_tau, f2,
trials_nr, time_array,
f1, freq1, damping_type,
gain, eod_fr, damping, us_name, dev=dev, reshuffle=reshuffled)
cell, a_f2, a_f1, SAM, eod_stimulus, eod_fish_r, freq2, eod_fish1, eod_fish2,
stimulus_length, offset, model_params, n, 'sinz', adapt_offset, add, deltat,
f2, trials_nr, time_array, f1, freq1, eod_fr, reshuffle=reshuffled, dev=dev)
#cell, a_f2, a_f1,
#SAM, eod_stimulus, eod_fish_r, freq2, eod_fish1, eod_fish2, stimulus_length,
#baseline_with_wave_damping, offset, model_params, n, variant, adapt_offset,
#upper_tol, lower_tol, dent_tau_change, constant_reduction,
#exponential, plus, slope, add,
#deltat, alpha, sig_val, v_exp, exp_tau, f2,
#trials_nr, time_array,
#f1, freq1, damping_type,
#gain, eod_fr, damping, us_name, dev=dev, reshuffle=reshuffled)
if printing:
print('Generation process' + str(time.time() - t1))
@ -683,21 +693,21 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
##################################
# power spectrum
# embed()
if dev_spikes == 'original':
#nfft = 2 ** 15
# embed()
p0, p02, p01, p012, fs = calc_ps(nfft, [np.mean(mat012, axis=0)],
[np.mean(mat01, axis=0)],
[np.mean(mat02, axis=0)],
[np.mean(mat0, axis=0)],
test=False, sampling_rate=sampling_rate)
sampling_rate=sampling_rate)
else:
#nfft = 2 ** 15
p0, p02, p01, p012, fs = calc_ps(nfft, smoothed012,
smoothed01, smoothed02, smoothed0,
test=False, sampling_rate=sampling_rate)
sampling_rate=sampling_rate)
# pp_mean = np.log
ps.append(p012)
@ -719,7 +729,7 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
if log == 'log':
#p012 = 10 * np.log10(ps[j] / np.max(ps))
#embed()
p012 = log_calc_psd(ax, 'green', [], log, ps[j], np.max(ps))
p012 = log_calc_psd(log, ps[j], np.max(ps))
else:
p012 = ps[j]
@ -749,8 +759,8 @@ def plt_model_full_model2(grid0, reshuffled='reshuffled',af_2 = 0.1, datapoints=
'DF2_H2', '|DF1-DF2|', '|DF1+DF2|', 'baseline']
marker = markers[j]#'DF1_H1','DF1_H4',
#embed()
plt_peaks_several(labels, freqs, p0_mean, 0, ax,
p0_mean , colors, fs, emb = False, exact = False, marker = marker, log = log, perc = 0.08, add_log = 2.5, ms = ms, clip_on = clip_on)
plt_peaks_several(freqs, p0_mean, ax, p0_mean, fs, labels, 0, colors, emb=False, marker=marker, add_log=2.5,
exact=False, perc_peaksize=0.08, ms=ms, clip_on=clip_on, log=log)
ax.set_xlim(0, 300)
#ax.set_ylim(0 - 20,
# np.max(np.max(ps)) + 70) # np.min(np.min(p0_means))
@ -859,7 +869,7 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
DF1_desired = df1 # [::-1]
DF2_desired = df2 # [::-1]
# embed()
#######################################
# ROC part
@ -883,7 +893,7 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
choices = [[[0, 1, 2,3, 6]] * 6, [[0, 1, 2, 3, 4]] * 6]
for gg in range(len(DF1_desired)):
# embed()
# try:
grid0 = gridspec.GridSpecFromSubplotSpec(len(DF1_desired), 1, wspace=0.15, hspace=0.35,
subplot_spec=grid)
@ -905,14 +915,14 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
concat=True)
# mt_sorted['m1, m2']
# embed()
# except:
# print('grouped thing')
# embed()
###################
# groups sorted by repro tag
# todo: evnetuell die tuples gleich hier umspeichern vom csv ''
# embed()
grouped = mt_sorted.groupby(
['c1', 'c2', 'm1, m2', 'repro_tag_id'],
as_index=False)
@ -932,13 +942,13 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
##############################################################
# load plotting arrays
arrays, arrays_original, spikes_pure = save_arrays_susept(
data_dir, cell, c, b, chirps, devs, extract, group_mean, mean_type, plot_group=0,
data_dir, cell, c, chirps, devs, extract, group_mean, mean_type, plot_group=0,
rocextra=False, sorted_on=sorted_on)
####################################################
####################################################
# hier checken wir ob für diesen einen Punkt das funkioniert mit der standardabweichung
# embed()
try:
check_var_substract_method(spikes_pure)
except:
@ -953,7 +963,7 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
xlim = [0, 100]
# plt.savefig(r'C:\Users\alexi\OneDrive - bwedu\Präsentations\latex\experimental_protocol.pdf')
# embed()
# fr_end = divergence_title_add_on(group_mean, fr[gg], autodefine)
###########################################
@ -961,9 +971,9 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
deltat = 1 / 40000
eodf = np.mean(group_mean[1].eodf)
eod_fr = eodf
# embed()
a_fr = 1
# embed()
eod_fe = eodf + np.mean(
group_mean[1].DF2) # data.eodf.iloc[0] + 10 # cell_model.eode.iloc[0]
a_fe = group_mean[0][1] / 100
@ -1018,13 +1028,13 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
if printing:
print(time.time() - t3)
# embed()
##########################################
# spike response
array_chosen = 1
if d == 0: #
# embed()
# plot the psds
p_means_all = {}
@ -1097,12 +1107,12 @@ def plt_data_full_model(c1, chose_score, detections, devs, dfs, end, grid, mult_
return DF1_desired, DF2_desired, fr, eod_fr, arrays_len
def load_stack_data_susept(cell, save_name, end = ''):
def load_stack_data_susept(cell, save_name, end = '', redo = False):
load_name = load_folder_name('calc_RAM') + '/' + save_name+end
add = '_cell' + cell +end# str(f) # + '_amp_' + str(amp)
#embed()
stack_cell = load_data_susept(load_name + '_' + cell + '.pkl', load_name + '_' + cell, add=add,
load_version='csv')
load_version='csv', redo = redo)
file_names_exclude = get_file_names_exclude()
stack_cell = stack_cell[~stack_cell['file_name'].isin(file_names_exclude)]
# if len(stack_cell):

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,91 +1,87 @@
,1,0
0,2.355000000535088,15.005000000250956
1,15.530000000581655,16.480000000278604
2,17.055000000255692,18.205000000357185
3,19.93000000038666,21.730000000438675
4,21.555000000262968,34.605000000787925
5,36.78000000036337,37.55500000084322
6,38.23000000011308,39.18000000071953
7,39.75500000069661,52.90500000051435
8,56.05500000001558,55.705000000720986
9,57.55500000032117,57.255000000672965
10,59.07999999999521,58.75500000006906
11,62.28000000010144,75.10500000085341
12,75.73000000047688,76.6050000002495
13,77.20500000050453,78.2800000006817
14,80.17999999992827,93.25500000073117
15,82.00500000020912,94.70500000048088
16,95.35500000038229,97.68000000081412
17,98.2550000007912,99.43000000026115
18,99.90500000003595,115.63000000028734
19,116.15500000061803,117.13000000059293
20,117.65500000001413,118.68000000054491
21,119.30500000016838,133.830000000721
22,123.98000000030225,135.2800000004707
23,135.68000000032117,136.78000000077628
24,137.23000000027315,139.93000000032663
25,140.1550000000505,154.7800000008054
26,141.8800000001291,156.55500000053036
27,155.33000000050453,159.3050000001811
28,158.23000000000394,172.9050000004052
29,159.8300000005118,175.93000000038484
30,176.23000000003304,177.45500000005887
31,177.73000000033863,179.155000000769
32,179.329999999937,195.230000000315
33,182.38000000019457,196.73000000062058
34,195.78000000001413,199.85500000080248
35,197.40499999989044,213.43000000074863
36,200.38000000022367,214.90500000077628
37,202.3050000007068,217.88000000020003
38,217.08000000035173,219.73000000075882
39,218.55500000037938,235.85500000086068
40,220.1300000006093,237.32999999997884
41,236.35500000000394,238.8800000008403
42,237.88000000058747,254.08000000066278
43,239.48000000018584,255.58000000005887
44,244.27999999989044,257.0550000000865
45,255.88000000061658,260.1800000002684
46,257.48000000021494,274.95500000082285
47,260.38000000062385,276.5550000004212
48,262.1300000000709,278.2800000004998
49,277.0800000007519,293.0300000007763
50,278.5300000005016,296.0050000002
51,280.05500000017565,297.555000000152
52,296.4550000006064,299.15500000065987
53,297.9550000000025,314.280000000558
54,299.5300000002324,315.75500000058565
55,314.7300000000549,317.1800000000574
56,316.28000000000685,321.7300000006206
57,317.85500000023677,333.7050000000589
58,320.73000000036774,335.20500000036446
59,335.579999999937,338.15500000041976
60,337.1550000001669,339.88000000049834
61,338.6550000004725,353.20500000039357
62,340.30500000062676,356.20500000009525
63,356.5300000000214,357.70500000040084
64,358.0799999999734,374.55500000037756
65,359.68000000048124,377.0549999999774
66,362.93000000023386,380.23000000071517
67,376.1050000002804,393.70500000045905
68,377.7800000007126,395.2300000001331
69,380.63000000056564,398.1800000001884
70,395.73000000018584,399.78000000069625
71,398.6050000003168,415.080000000721
72,400.2300000001931,417.6299999999672
73,415.35500000009125,419.255000000753
74,416.8300000001189,434.3550000003732
75,418.3300000004245,435.8050000001229
76,422.8050000001538,437.28000000015055
77,436.25500000052926,453.805000000152
78,437.7800000002033,455.3050000004576
79,440.70499999998066,458.23000000023495
80,442.5300000002615,459.8050000004649
81,455.955000000359,473.4800000006133
82,458.80500000021203,476.3050000001884
83,460.43000000008834,477.9300000000647
84,476.8300000005191,479.53000000057256
85,478.2800000002688,494.5550000002684
86,479.8550000004987,495.9800000006497
87,484.7800000006835,497.380000000753
88,495.0800000005991,
89,496.6300000005511,
,0,1,2
0,5.360000000375287,3.6000000003509633,5.439999999890446
1,6.9100000003272655,5.150000000302942,7.064999999766755
2,28.034999999628774,6.5749999997747075,26.89000000007671
3,29.509999999656422,8.299999999853288,28.31500000045797
4,31.210000000366556,12.724999999936234,29.739999999929736
5,34.28499999999257,29.175000000013355,34.21499999965907
6,36.98499999999694,30.750000000243276,35.765000000520544
7,43.310000000285065,35.04999999984601,37.4149999997653
8,58.38499999962732,36.59999999979799,58.16500000035465
9,61.23500000038984,42.7999999996059,59.6400000003823
10,64.23500000009153,59.30000000023891,61.34000000018294
11,65.88500000024578,60.89999999983728,65.69000000034156
12,88.18499999987762,65.2499999999959,67.26499999966198
13,89.70999999955166,66.75000000030148,88.31499999994865
14,91.45999999990818,68.64999999959717,89.76499999969836
15,94.73499999993874,89.34999999963064,91.39000000048416
16,97.10999999996784,90.89999999958262,94.7649999998075
17,118.16000000025451,95.32499999966556,97.59000000029208
18,119.73499999957494,96.8500000002491,117.14000000027318
19,121.30999999980486,98.49999999949385,118.63999999966927
20,124.36000000006243,119.37499999965394,120.06500000005053
21,126.01000000021668,120.90000000023747,123.1150000003081
22,127.68499999973938,124.00000000014143,125.96500000016113
23,148.43500000032873,125.65000000029568,147.1899999996649
24,150.06000000020504,127.14999999969177,148.66499999969255
25,154.46000000001004,134.67499999967868,150.06499999979587
26,156.03500000023996,149.6250000003597,154.69000000028336
27,157.58500000019194,151.32500000016034,156.2149999999574
28,178.48499999972046,155.5750000001167,174.49000000031538
29,180.03499999967244,157.07499999951278,178.6650000003474
30,184.48500000003332,163.14999999974998,180.1150000000971
31,186.0349999999853,179.650000000383,181.7900000005293
32,187.58499999993728,181.37499999955207,186.11500000040996
33,208.58499999966807,185.57499999986203,187.84000000048854
34,210.08499999997366,187.09999999953607,208.9400000004216
35,212.38500000007843,189.05000000029713,210.28999999996904
36,216.03499999973064,194.75000000000318,211.9400000001233
37,217.58499999968262,209.77499999969905,216.1649999998017
38,238.68499999961568,211.32499999965103,217.86500000051183
39,240.21000000019922,215.77500000001191,238.83999999996468
40,241.8849999997219,217.2750000003175,240.34000000027027
41,246.05999999975393,223.27499999972088,241.91500000050019
42,247.6099999997059,239.87499999964666,245.0399999997726
43,268.76000000019485,241.62500000000318,246.54000000007818
44,270.2849999998689,245.8499999996816,267.59000000036485
45,271.88500000037675,247.37500000026512,269.0650000003925
46,276.2100000002574,249.07500000006576,270.5150000001422
47,277.6850000002851,269.89999999966994,274.98999999987154
48,298.90999999978885,271.39999999997553,276.56500000010146
49,300.35999999953856,275.79999999978054,297.61500000038814
50,301.9600000000464,277.3499999997325,299.11499999978423
51,305.010000000304,279.10000000008904,300.5400000001655
52,306.56000000025597,300.0249999998955,303.86499999984244
53,308.1349999995764,301.54999999956954,306.6150000004027
54,329.00999999973646,306.02500000020837,308.3150000002033
55,330.51000000004206,307.52499999960446,329.16500000008546
56,335.1100000002516,309.24999999968304,330.61499999983516
57,336.58500000027925,330.09999999956517,332.2399999997115
58,338.1849999998776,331.6250000001487,335.4399999998177
59,359.2350000001643,334.6999999997747,337.0150000000476
60,360.8600000000406,336.3000000002826,359.1649999998308
61,365.0099999997947,337.7500000000323,360.6650000001364
62,366.55999999974665,344.0500000000425,362.31500000029064
63,368.10999999969863,360.2750000003466,365.3649999996387
64,374.3349999997845,361.8250000002986,366.9650000001466
65,389.4599999996826,366.0999999996234,388.01500000043325
66,392.159999999687,367.64999999957536,389.41500000053657
67,395.2849999998689,373.89999999993915,390.8650000002863
68,396.88500000037675,390.39999999966267,392.5150000004405
69,398.4100000000508,394.85000000002356,396.7899999997653
70,419.63499999955457,396.3749999996976,398.61500000004617
71,422.2599999996346,397.8750000000032,418.2399999999516
72,425.45999999974083,402.6250000000614,419.8649999998279
73,426.91000000040003,420.72500000029277,421.3150000004871
74,428.43500000007407,425.12500000009777,425.5899999998119
75,435.960000000061,426.62499999949387,427.09000000011747
76,450.8349999999082,428.07500000015307,449.4899999999516
77,452.4100000001381,433.97500000026366,450.9649999999792
78,455.51000000004206,435.9749999997616,452.66499999977987
79,457.18499999956475,454.9499999997165,455.6899999997595
80,458.68499999987034,456.4999999996685,457.215000000343
81,480.8100000002851,457.97499999969614,479.56500000053074
82,482.3849999996055,461.324999999651,481.06499999992684
83,485.3850000002167,,482.91500000048563
84,486.98499999981505,,
85,488.60999999969135,,

1 0 1 2
2 0 15.005000000250956 5.360000000375287 2.355000000535088 3.6000000003509633 5.439999999890446
3 1 16.480000000278604 6.9100000003272655 15.530000000581655 5.150000000302942 7.064999999766755
4 2 18.205000000357185 28.034999999628774 17.055000000255692 6.5749999997747075 26.89000000007671
5 3 21.730000000438675 29.509999999656422 19.93000000038666 8.299999999853288 28.31500000045797
6 4 34.605000000787925 31.210000000366556 21.555000000262968 12.724999999936234 29.739999999929736
7 5 37.55500000084322 34.28499999999257 36.78000000036337 29.175000000013355 34.21499999965907
8 6 39.18000000071953 36.98499999999694 38.23000000011308 30.750000000243276 35.765000000520544
9 7 52.90500000051435 43.310000000285065 39.75500000069661 35.04999999984601 37.4149999997653
10 8 55.705000000720986 58.38499999962732 56.05500000001558 36.59999999979799 58.16500000035465
11 9 57.255000000672965 61.23500000038984 57.55500000032117 42.7999999996059 59.6400000003823
12 10 58.75500000006906 64.23500000009153 59.07999999999521 59.30000000023891 61.34000000018294
13 11 75.10500000085341 65.88500000024578 62.28000000010144 60.89999999983728 65.69000000034156
14 12 76.6050000002495 88.18499999987762 75.73000000047688 65.2499999999959 67.26499999966198
15 13 78.2800000006817 89.70999999955166 77.20500000050453 66.75000000030148 88.31499999994865
16 14 93.25500000073117 91.45999999990818 80.17999999992827 68.64999999959717 89.76499999969836
17 15 94.70500000048088 94.73499999993874 82.00500000020912 89.34999999963064 91.39000000048416
18 16 97.68000000081412 97.10999999996784 95.35500000038229 90.89999999958262 94.7649999998075
19 17 99.43000000026115 118.16000000025451 98.2550000007912 95.32499999966556 97.59000000029208
20 18 115.63000000028734 119.73499999957494 99.90500000003595 96.8500000002491 117.14000000027318
21 19 117.13000000059293 121.30999999980486 116.15500000061803 98.49999999949385 118.63999999966927
22 20 118.68000000054491 124.36000000006243 117.65500000001413 119.37499999965394 120.06500000005053
23 21 133.830000000721 126.01000000021668 119.30500000016838 120.90000000023747 123.1150000003081
24 22 135.2800000004707 127.68499999973938 123.98000000030225 124.00000000014143 125.96500000016113
25 23 136.78000000077628 148.43500000032873 135.68000000032117 125.65000000029568 147.1899999996649
26 24 139.93000000032663 150.06000000020504 137.23000000027315 127.14999999969177 148.66499999969255
27 25 154.7800000008054 154.46000000001004 140.1550000000505 134.67499999967868 150.06499999979587
28 26 156.55500000053036 156.03500000023996 141.8800000001291 149.6250000003597 154.69000000028336
29 27 159.3050000001811 157.58500000019194 155.33000000050453 151.32500000016034 156.2149999999574
30 28 172.9050000004052 178.48499999972046 158.23000000000394 155.5750000001167 174.49000000031538
31 29 175.93000000038484 180.03499999967244 159.8300000005118 157.07499999951278 178.6650000003474
32 30 177.45500000005887 184.48500000003332 176.23000000003304 163.14999999974998 180.1150000000971
33 31 179.155000000769 186.0349999999853 177.73000000033863 179.650000000383 181.7900000005293
34 32 195.230000000315 187.58499999993728 179.329999999937 181.37499999955207 186.11500000040996
35 33 196.73000000062058 208.58499999966807 182.38000000019457 185.57499999986203 187.84000000048854
36 34 199.85500000080248 210.08499999997366 195.78000000001413 187.09999999953607 208.9400000004216
37 35 213.43000000074863 212.38500000007843 197.40499999989044 189.05000000029713 210.28999999996904
38 36 214.90500000077628 216.03499999973064 200.38000000022367 194.75000000000318 211.9400000001233
39 37 217.88000000020003 217.58499999968262 202.3050000007068 209.77499999969905 216.1649999998017
40 38 219.73000000075882 238.68499999961568 217.08000000035173 211.32499999965103 217.86500000051183
41 39 235.85500000086068 240.21000000019922 218.55500000037938 215.77500000001191 238.83999999996468
42 40 237.32999999997884 241.8849999997219 220.1300000006093 217.2750000003175 240.34000000027027
43 41 238.8800000008403 246.05999999975393 236.35500000000394 223.27499999972088 241.91500000050019
44 42 254.08000000066278 247.6099999997059 237.88000000058747 239.87499999964666 245.0399999997726
45 43 255.58000000005887 268.76000000019485 239.48000000018584 241.62500000000318 246.54000000007818
46 44 257.0550000000865 270.2849999998689 244.27999999989044 245.8499999996816 267.59000000036485
47 45 260.1800000002684 271.88500000037675 255.88000000061658 247.37500000026512 269.0650000003925
48 46 274.95500000082285 276.2100000002574 257.48000000021494 249.07500000006576 270.5150000001422
49 47 276.5550000004212 277.6850000002851 260.38000000062385 269.89999999966994 274.98999999987154
50 48 278.2800000004998 298.90999999978885 262.1300000000709 271.39999999997553 276.56500000010146
51 49 293.0300000007763 300.35999999953856 277.0800000007519 275.79999999978054 297.61500000038814
52 50 296.0050000002 301.9600000000464 278.5300000005016 277.3499999997325 299.11499999978423
53 51 297.555000000152 305.010000000304 280.05500000017565 279.10000000008904 300.5400000001655
54 52 299.15500000065987 306.56000000025597 296.4550000006064 300.0249999998955 303.86499999984244
55 53 314.280000000558 308.1349999995764 297.9550000000025 301.54999999956954 306.6150000004027
56 54 315.75500000058565 329.00999999973646 299.5300000002324 306.02500000020837 308.3150000002033
57 55 317.1800000000574 330.51000000004206 314.7300000000549 307.52499999960446 329.16500000008546
58 56 321.7300000006206 335.1100000002516 316.28000000000685 309.24999999968304 330.61499999983516
59 57 333.7050000000589 336.58500000027925 317.85500000023677 330.09999999956517 332.2399999997115
60 58 335.20500000036446 338.1849999998776 320.73000000036774 331.6250000001487 335.4399999998177
61 59 338.15500000041976 359.2350000001643 335.579999999937 334.6999999997747 337.0150000000476
62 60 339.88000000049834 360.8600000000406 337.1550000001669 336.3000000002826 359.1649999998308
63 61 353.20500000039357 365.0099999997947 338.6550000004725 337.7500000000323 360.6650000001364
64 62 356.20500000009525 366.55999999974665 340.30500000062676 344.0500000000425 362.31500000029064
65 63 357.70500000040084 368.10999999969863 356.5300000000214 360.2750000003466 365.3649999996387
66 64 374.55500000037756 374.3349999997845 358.0799999999734 361.8250000002986 366.9650000001466
67 65 377.0549999999774 389.4599999996826 359.68000000048124 366.0999999996234 388.01500000043325
68 66 380.23000000071517 392.159999999687 362.93000000023386 367.64999999957536 389.41500000053657
69 67 393.70500000045905 395.2849999998689 376.1050000002804 373.89999999993915 390.8650000002863
70 68 395.2300000001331 396.88500000037675 377.7800000007126 390.39999999966267 392.5150000004405
71 69 398.1800000001884 398.4100000000508 380.63000000056564 394.85000000002356 396.7899999997653
72 70 399.78000000069625 419.63499999955457 395.73000000018584 396.3749999996976 398.61500000004617
73 71 415.080000000721 422.2599999996346 398.6050000003168 397.8750000000032 418.2399999999516
74 72 417.6299999999672 425.45999999974083 400.2300000001931 402.6250000000614 419.8649999998279
75 73 419.255000000753 426.91000000040003 415.35500000009125 420.72500000029277 421.3150000004871
76 74 434.3550000003732 428.43500000007407 416.8300000001189 425.12500000009777 425.5899999998119
77 75 435.8050000001229 435.960000000061 418.3300000004245 426.62499999949387 427.09000000011747
78 76 437.28000000015055 450.8349999999082 422.8050000001538 428.07500000015307 449.4899999999516
79 77 453.805000000152 452.4100000001381 436.25500000052926 433.97500000026366 450.9649999999792
80 78 455.3050000004576 455.51000000004206 437.7800000002033 435.9749999997616 452.66499999977987
81 79 458.23000000023495 457.18499999956475 440.70499999998066 454.9499999997165 455.6899999997595
82 80 459.8050000004649 458.68499999987034 442.5300000002615 456.4999999996685 457.215000000343
83 81 473.4800000006133 480.8100000002851 455.955000000359 457.97499999969614 479.56500000053074
84 82 476.3050000001884 482.3849999996055 458.80500000021203 461.324999999651 481.06499999992684
85 83 477.9300000000647 485.3850000002167 460.43000000008834 482.91500000048563
86 84 479.53000000057256 486.98499999981505 476.8300000005191
87 85 494.5550000002684 488.60999999969135 478.2800000002688
86 495.9800000006497 479.8550000004987
87 497.380000000753 484.7800000006835
88 495.0800000005991
89 496.6300000005511

View File

@ -1,72 +1,71 @@
,1,0
0,0.09999999997489795,7.850000000416912
1,8.849999999938518,16.800000000785076
2,12.225000000171349,22.82500000046639
3,21.175000000539512,28.925000000072032
4,26.975000000447835,37.925000000086584
5,36.25000000079126,45.37500000014916
6,42.07500000006803,49.95000000008076
7,48.150000000305226,59.0000000006512
8,54.15000000061809,64.82500000083746
9,63.10000000007676,75.42500000045038
10,69.17500000031396,77.12500000025102
11,75.22500000027321,87.60000000029322
12,81.15000000066175,95.0000000007094
13,93.05000000017571,101.05000000066866
14,96.27500000055988,107.25000000047658
15,105.12500000072578,115.97500000016225
16,111.32500000053369,122.02500000012151
17,120.22500000034597,131.02500000013606
18,126.32499999995161,137.02500000044893
19,133.9500000001408,143.07500000040818
20,136.77500000062537,149.15000000064538
21,145.72500000008404,158.32500000078653
22,156.09999999992397,167.1000000001186
23,162.17500000016116,173.15000000007785
24,169.9000000005526,179.2000000000371
25,174.22500000043328,185.15000000070359
26,180.30000000067048,191.5250000006381
27,189.25000000012915,203.07500000080836
28,195.27500000071996,209.10000000048967
29,203.0000000002019,216.67500000012296
30,210.7499999999618,224.1000000008171
31,219.27500000015243,230.02500000029613
32,225.57500000016262,233.3000000003267
33,234.27500000047985,242.2750000000633
34,240.30000000016116,249.8500000006061
35,246.32500000075197,255.72500000043874
36,252.4250000003576,266.25000000012733
37,258.5000000005948,275.22500000077343
38,267.4750000003314,278.2250000004751
39,276.475000000346,284.3000000007123
40,282.5749999999516,290.4750000002423
41,288.4250000004158,297.8250000001026
42,294.5500000002994,308.32500000042273
43,303.3499999999094,311.5500000008069
44,306.45000000072287,317.225000000235
45,315.5500000000302,329.4250000003558
46,324.5750000003227,334.0000000002874
47,330.7000000002063,339.8500000007516
48,338.1750000005468,344.4250000006832
49,344.05000000037944,354.9500000003718
50,351.52500000071996,362.4250000007123
51,357.6250000003256,371.375000000171
52,366.4750000004915,376.0750000005828
53,368.2750000004944,383.40000000016516
54,378.6250000000564,389.6750000008069
55,387.6500000003489,397.00000000038926
56,393.6750000000302,407.47500000043146
57,399.7500000002674,410.65000000025975
58,411.4999999999327,419.45000000077926
59,413.2250000000113,422.8250000001026
60,423.5500000002048,433.05000000009386
61,426.6750000003867,434.95000000029904
62,435.6250000007549,446.5000000004693
63,446.20000000008986,449.7000000005755
64,451.200000000199,458.57500000010987
65,459.6500000004653,464.7250000002714
66,462.75000000036925,473.45000000086657
67,471.8500000005861,479.6500000006745
68,479.5000000001437,485.7250000000022
69,485.4000000002543,494.85000000049695
70,492.7500000001146,
,0,1,2
0,4.374999999527063,0.8000000000265572,3.375000000005457
1,12.3249999996915,9.94999999988977,9.649999999737702
2,19.900000000234286,15.899999999646752,17.150000000356158
3,25.649999999586726,22.12499999973261,24.5250000004944
4,31.525000000328873,31.100000000378714,33.44999999967513
5,40.77500000039436,38.499999999885404,39.524999999912325
6,46.55000000002474,44.67500000032487,48.62500000012915
7,52.64999999963038,50.700000000006185,53.24999999970714
8,61.674999999922875,58.0000000002201,62.050000000226646
9,72.19999999961146,65.67500000005566,72.6750000001175
10,79.62500000030559,73.2250000003205,78.69999999979882
11,85.72499999991123,82.09999999985484,87.6000000005206
12,94.62499999972351,88.25000000001637,90.75000000007094
13,99.79999999995925,95.87500000020555,96.7500000003838
14,106.87500000040018,104.67499999981555,108.67500000017571
15,115.67500000001019,106.3749999996162,111.65000000050895
16,117.57500000021537,115.27500000033797,120.67499999989195
17,127.87500000013097,121.27499999974134,126.7249999998512
18,133.74999999996362,130.59999999973115,132.8000000000884
19,142.74999999997817,136.39999999963948,138.95000000024993
20,146.09999999993306,142.549999999801,147.87500000034015
21,156.45000000040454,149.77500000009059,155.50000000052933
22,160.874999999578,160.600000000386,161.4999999999327
23,171.62499999994907,169.37499999971806,169.09999999984393
24,177.47500000041327,172.60000000010223,179.42500000003747
25,182.24999999983993,181.42499999999018,183.97499999969114
26,188.17500000022847,186.22499999969477,191.74999999972897
27,197.1000000003187,193.44999999998436,198.97500000001855
28,206.0750000000553,202.40000000035252,207.9750000000331
29,212.12500000001455,208.5250000002361,211.04999999965912
30,218.10000000004948,215.97500000029868,223.0749999996533
31,230.0999999997657,222.32499999995525,229.00000000004184
32,233.17500000030122,231.07499999991887,235.0500000000011
33,242.1249999997599,235.7249999997748,241.0749999996824
34,245.2499999999418,241.72500000008768,250.07499999969696
35,254.37499999952706,250.599999999622,256.32500000006075
36,263.1499999997686,253.92500000020846,265.2000000005046
37,264.9250000004031,265.800000000354,271.2250000001859
38,275.3999999995358,272.0750000000862,280.17499999964457
39,279.9750000003769,277.8499999997166,287.900000000036
40,291.7749999996886,285.3749999997035,296.699999999646
41,293.4750000003987,290.15000000003965,298.3250000004318
42,299.4749999998021,296.0249999998723,307.3250000004464
43,308.324999999968,306.7750000002434,313.27500000020336
44,315.87500000023283,316.850000000386,322.4999999999909
45,323.4499999998661,320.1000000001386,331.3999999998032
46,329.47499999954744,328.9500000003045,337.425000000394
47,335.62499999970896,335.09999999955653,343.47500000035325
48,346.3000000001557,344.124999999849,352.6500000004944
49,350.8249999995314,352.949999999737,358.4250000001248
50,359.69999999997526,360.69999999949687,361.70000000015534
51,367.2500000002401,365.19999999950414,372.12499999964166
52,371.6500000000451,374.2750000003525,379.70000000018445
53,382.09999999980937,381.8000000003394,387.34999999974207
54,386.55000000017026,389.0750000002754,394.62499999967804
55,392.57499999985157,395.17499999988104,405.05000000007385
56,398.92499999950815,404.1250000002492,406.5749999997479
57,409.1500000004089,413.17499999991014,412.7249999999094
58,415.29999999966094,419.175000000223,418.82500000042455
59,422.82499999964784,426.77500000013424,427.67499999968095
60,431.775000000016,435.6749999999465,436.92499999974643
61,437.8999999998996,437.4000000000251,445.97500000031687
62,445.34999999996217,447.8750000000673,454.7749999999269
63,452.8250000003027,455.37499999977626,461.29999999971005
64,461.9750000001659,461.67499999978645,468.4500000000753
65,470.97500000018044,470.34999999982574,475.87499999985994
66,475.4499999999098,476.37499999950705,482.0999999999458
67,481.4249999999447,482.32500000017353,489.72500000013497
68,487.5499999998283,488.52499999998145,496.95000000042455
69,497.90000000029977,496.09999999961474,

1 0 1 2
2 0 7.850000000416912 4.374999999527063 0.09999999997489795 0.8000000000265572 3.375000000005457
3 1 16.800000000785076 12.3249999996915 8.849999999938518 9.94999999988977 9.649999999737702
4 2 22.82500000046639 19.900000000234286 12.225000000171349 15.899999999646752 17.150000000356158
5 3 28.925000000072032 25.649999999586726 21.175000000539512 22.12499999973261 24.5250000004944
6 4 37.925000000086584 31.525000000328873 26.975000000447835 31.100000000378714 33.44999999967513
7 5 45.37500000014916 40.77500000039436 36.25000000079126 38.499999999885404 39.524999999912325
8 6 49.95000000008076 46.55000000002474 42.07500000006803 44.67500000032487 48.62500000012915
9 7 59.0000000006512 52.64999999963038 48.150000000305226 50.700000000006185 53.24999999970714
10 8 64.82500000083746 61.674999999922875 54.15000000061809 58.0000000002201 62.050000000226646
11 9 75.42500000045038 72.19999999961146 63.10000000007676 65.67500000005566 72.6750000001175
12 10 77.12500000025102 79.62500000030559 69.17500000031396 73.2250000003205 78.69999999979882
13 11 87.60000000029322 85.72499999991123 75.22500000027321 82.09999999985484 87.6000000005206
14 12 95.0000000007094 94.62499999972351 81.15000000066175 88.25000000001637 90.75000000007094
15 13 101.05000000066866 99.79999999995925 93.05000000017571 95.87500000020555 96.7500000003838
16 14 107.25000000047658 106.87500000040018 96.27500000055988 104.67499999981555 108.67500000017571
17 15 115.97500000016225 115.67500000001019 105.12500000072578 106.3749999996162 111.65000000050895
18 16 122.02500000012151 117.57500000021537 111.32500000053369 115.27500000033797 120.67499999989195
19 17 131.02500000013606 127.87500000013097 120.22500000034597 121.27499999974134 126.7249999998512
20 18 137.02500000044893 133.74999999996362 126.32499999995161 130.59999999973115 132.8000000000884
21 19 143.07500000040818 142.74999999997817 133.9500000001408 136.39999999963948 138.95000000024993
22 20 149.15000000064538 146.09999999993306 136.77500000062537 142.549999999801 147.87500000034015
23 21 158.32500000078653 156.45000000040454 145.72500000008404 149.77500000009059 155.50000000052933
24 22 167.1000000001186 160.874999999578 156.09999999992397 160.600000000386 161.4999999999327
25 23 173.15000000007785 171.62499999994907 162.17500000016116 169.37499999971806 169.09999999984393
26 24 179.2000000000371 177.47500000041327 169.9000000005526 172.60000000010223 179.42500000003747
27 25 185.15000000070359 182.24999999983993 174.22500000043328 181.42499999999018 183.97499999969114
28 26 191.5250000006381 188.17500000022847 180.30000000067048 186.22499999969477 191.74999999972897
29 27 203.07500000080836 197.1000000003187 189.25000000012915 193.44999999998436 198.97500000001855
30 28 209.10000000048967 206.0750000000553 195.27500000071996 202.40000000035252 207.9750000000331
31 29 216.67500000012296 212.12500000001455 203.0000000002019 208.5250000002361 211.04999999965912
32 30 224.1000000008171 218.10000000004948 210.7499999999618 215.97500000029868 223.0749999996533
33 31 230.02500000029613 230.0999999997657 219.27500000015243 222.32499999995525 229.00000000004184
34 32 233.3000000003267 233.17500000030122 225.57500000016262 231.07499999991887 235.0500000000011
35 33 242.2750000000633 242.1249999997599 234.27500000047985 235.7249999997748 241.0749999996824
36 34 249.8500000006061 245.2499999999418 240.30000000016116 241.72500000008768 250.07499999969696
37 35 255.72500000043874 254.37499999952706 246.32500000075197 250.599999999622 256.32500000006075
38 36 266.25000000012733 263.1499999997686 252.4250000003576 253.92500000020846 265.2000000005046
39 37 275.22500000077343 264.9250000004031 258.5000000005948 265.800000000354 271.2250000001859
40 38 278.2250000004751 275.3999999995358 267.4750000003314 272.0750000000862 280.17499999964457
41 39 284.3000000007123 279.9750000003769 276.475000000346 277.8499999997166 287.900000000036
42 40 290.4750000002423 291.7749999996886 282.5749999999516 285.3749999997035 296.699999999646
43 41 297.8250000001026 293.4750000003987 288.4250000004158 290.15000000003965 298.3250000004318
44 42 308.32500000042273 299.4749999998021 294.5500000002994 296.0249999998723 307.3250000004464
45 43 311.5500000008069 308.324999999968 303.3499999999094 306.7750000002434 313.27500000020336
46 44 317.225000000235 315.87500000023283 306.45000000072287 316.850000000386 322.4999999999909
47 45 329.4250000003558 323.4499999998661 315.5500000000302 320.1000000001386 331.3999999998032
48 46 334.0000000002874 329.47499999954744 324.5750000003227 328.9500000003045 337.425000000394
49 47 339.8500000007516 335.62499999970896 330.7000000002063 335.09999999955653 343.47500000035325
50 48 344.4250000006832 346.3000000001557 338.1750000005468 344.124999999849 352.6500000004944
51 49 354.9500000003718 350.8249999995314 344.05000000037944 352.949999999737 358.4250000001248
52 50 362.4250000007123 359.69999999997526 351.52500000071996 360.69999999949687 361.70000000015534
53 51 371.375000000171 367.2500000002401 357.6250000003256 365.19999999950414 372.12499999964166
54 52 376.0750000005828 371.6500000000451 366.4750000004915 374.2750000003525 379.70000000018445
55 53 383.40000000016516 382.09999999980937 368.2750000004944 381.8000000003394 387.34999999974207
56 54 389.6750000008069 386.55000000017026 378.6250000000564 389.0750000002754 394.62499999967804
57 55 397.00000000038926 392.57499999985157 387.6500000003489 395.17499999988104 405.05000000007385
58 56 407.47500000043146 398.92499999950815 393.6750000000302 404.1250000002492 406.5749999997479
59 57 410.65000000025975 409.1500000004089 399.7500000002674 413.17499999991014 412.7249999999094
60 58 419.45000000077926 415.29999999966094 411.4999999999327 419.175000000223 418.82500000042455
61 59 422.8250000001026 422.82499999964784 413.2250000000113 426.77500000013424 427.67499999968095
62 60 433.05000000009386 431.775000000016 423.5500000002048 435.6749999999465 436.92499999974643
63 61 434.95000000029904 437.8999999998996 426.6750000003867 437.4000000000251 445.97500000031687
64 62 446.5000000004693 445.34999999996217 435.6250000007549 447.8750000000673 454.7749999999269
65 63 449.7000000005755 452.8250000003027 446.20000000008986 455.37499999977626 461.29999999971005
66 64 458.57500000010987 461.9750000001659 451.200000000199 461.67499999978645 468.4500000000753
67 65 464.7250000002714 470.97500000018044 459.6500000004653 470.34999999982574 475.87499999985994
68 66 473.45000000086657 475.4499999999098 462.75000000036925 476.37499999950705 482.0999999999458
69 67 479.6500000006745 481.4249999999447 471.8500000005861 482.32500000017353 489.72500000013497
70 68 485.7250000000022 487.5499999998283 479.5000000001437 488.52499999998145 496.95000000042455
71 69 494.85000000049695 497.90000000029977 485.4000000002543 496.09999999961474
70 492.7500000001146

View File

@ -1,76 +1,79 @@
,0,1
0,3.7000000006628397,0.8999999999021384
1,6.925000000137516,8.125000000191722
2,14.3250000005537,12.849999999971988
3,20.250000000032742,18.875000000562796
4,26.27500000062355,26.17500000077671
5,36.87500000023647,30.800000000354704
6,42.650000000776345,38.575000000392535
7,44.35000000057698,48.825000000661746
8,54.70000000013897,54.89999999998945
9,60.825000000022555,60.75000000045365
10,68.10000000086802,66.82500000069085
11,73.05000000042128,73.17500000034742
12,80.17500000050859,80.35000000008112
13,84.95000000084474,86.37500000067193
14,92.17500000022483,96.87500000008258
15,100.00000000081855,102.8500000001175
16,108.92499999999927,109.1000000004813
17,115.05000000079235,116.37500000041727
18,122.37500000037471,118.07500000021791
19,130.07500000048822,128.37500000013353
20,134.30000000016662,134.65000000077526
21,140.30000000047949,140.4500000006836
22,146.25000000023647,150.95000000009424
23,152.37500000012005,152.92500000022375
24,162.8250000007938,163.05000000001274
25,164.57500000024083,170.57499999999965
26,170.4000000004271,175.10000000028487
27,176.40000000073996,182.5250000000695
28,186.85000000050422,193.3000000007185
29,188.60000000086075,199.00000000042456
30,199.00000000006912,205.07500000066176
31,205.02500000065993,211.14999999998946
32,212.4750000007225,218.65000000060792
33,218.42500000047949,224.70000000056717
34,224.65000000056534,230.87500000009715
35,234.95000000048094,241.10000000008841
36,241.0000000004402,242.87500000072288
37,248.42500000022483,253.1250000000826
38,253.02500000043437,259.1500000006734
39,259.2250000002423,265.1250000007083
40,265.2000000002772,271.27499999996036
41,272.4750000002132,283.1000000004595
42,278.500000000804,284.75000000061374
43,284.7500000002583,289.62500000024266
44,290.6000000007225,295.52500000035326
45,302.50000000023647,307.1750000007258
46,307.12500000072396,309.12500000057736
47,313.20000000005166,319.20000000071997
48,320.65000000011423,320.80000000031833
49,331.12500000015643,331.5500000006894
50,332.625000000462,337.35000000059773
51,340.45000000014625,343.32500000063266
52,343.22500000007494,349.6500000000113
53,352.4000000002161,355.5000000004755
54,361.2000000007356,367.27500000041874
55,368.80000000064683,368.94999999994144
56,373.4000000008564,379.4500000002616
57,379.27500000068903,385.4250000002965
58,386.7250000007516,391.3750000000535
59,397.35000000064247,397.525000000215
60,403.35000000004584,403.57500000017427
61,410.77500000073996,409.6250000001335
62,416.7500000007749,415.6250000004464
63,421.5000000008331,424.62500000046094
64,428.8000000001375,433.4999999999953
65,433.4750000002714,439.72500000008114
66,440.75000000020736,445.70000000011606
67,446.8500000007225,451.5250000003023
68,457.4999999999818,459.2000000001379
69,466.4750000006279,469.4500000004071
70,470.8750000004329,475.45000000071997
71,478.60000000082437,477.12500000024266
72,481.47500000004584,487.4500000004362
73,493.35000000019136,493.77500000072433
74,494.9250000004213,
,1,0,2
0,0.9749999995259713,5.37499999973079,6.675000000271757
1,6.624999999585635,6.87500000003638,12.875000000079671
2,14.17499999985048,13.200000000324508,14.525000000233922
3,15.949999999575448,20.599999999831198,21.774999999891953
4,21.650000000190992,22.149999999783176,29.40000000008113
5,29.575000000077488,31.124999999519787,36.77500000021937
6,38.50000000016771,37.04999999990832,43.17500000043183
7,44.59999999977335,44.574999999895226,51.82500000019318
8,51.899999999987266,52.05000000023574,53.574999999640205
9,59.299999999493956,58.2750000003216,59.6499999998774
10,61.15000000005275,67.00000000000728,67.12500000021791
11,68.89999999981265,73.44999999986612,74.5249999997246
12,80.52499999990724,80.87499999965075,81.89999999986284
13,81.97499999965694,89.5250000003216,88.02499999974643
14,89.42499999971952,91.34999999969295,89.74999999982501
15,90.99999999994944,97.22499999952561,97.65000000034306
16,97.37499999988395,103.5249999995358,104.60000000030377
17,104.72499999974426,112.19999999957508,112.05000000036634
18,112.05000000023611,118.37500000001455,113.8749999997377
19,119.50000000029868,127.1999999999025,125.74999999988322
20,121.24999999974571,133.37500000034197,127.2500000001888
21,133.37499999994216,135.00000000021828,134.6000000000491
22,136.19999999951725,142.39999999972497,136.40000000005202
23,142.250000000386,148.44999999968422,149.77500000050313
24,149.67500000017063,157.4000000000524,157.17500000000982
25,151.34999999969332,164.9250000000393,158.9000000000884
26,157.42499999993052,172.37500000010186,164.82500000047693
27,166.47499999959146,179.87499999981083,172.19999999970568
28,172.25000000013134,181.57499999961146,179.87500000045074
29,179.7000000001939,187.64999999984866,187.3750000001597
30,181.50000000019682,196.6499999998632,193.29999999963874
31,194.77499999953616,202.52499999969586,194.97500000007093
32,196.49999999961474,210.17500000016298,202.45000000041145
33,202.324999999801,217.49999999974534,210.15000000052495
34,208.5750000001648,225.09999999965657,217.40000000018298
35,217.29999999985048,226.92499999993743,225.07500000001855
36,224.775000000191,232.60000000027503,232.37500000023246
37,226.77499999968896,240.37500000031287,234.1249999996795
38,238.5499999996322,247.6249999999709,241.72500000050022
39,240.17499999950851,255.3250000000844,247.5000000001306
40,247.45000000035398,262.775000000147,253.8750000000651
41,253.67499999953034,268.72499999990396,262.5249999998265
42,262.42499999949393,270.34999999978027,270.22499999994
43,268.62500000021134,277.75000000019645,277.5749999998003
44,277.42499999982135,285.2999999995518,279.27500000051043
45,279.124999999622,292.699999999968,292.5250000004813
46,286.57499999968456,298.82499999985157,294.3000000002063
47,292.6249999996438,300.50000000028376,300.22499999968534
48,300.17499999990866,314.2250000000786,307.74999999967224
49,307.57500000032485,322.82500000019354,315.325000000215
50,315.19999999960453,328.84999999987485,322.7249999997217
51,321.3250000003976,330.5000000000291,330.3499999999109
52,329.0750000001575,337.8999999995358,336.4250000001481
53,337.60000000034813,345.7250000001295,337.97500000010007
54,339.30000000014877,352.8500000002168,351.5500000000462
55,346.7749999995798,359.0750000003027,353.0999999999982
56,352.87500000009493,368.02499999976135,360.47500000013645
57,360.34999999952595,369.8500000000422,367.8500000002747
58,367.7249999996642,381.67499999963184,374.07500000036055
59,375.3249999995754,383.20000000021537,377.0499999997843
60,382.79999999991594,389.3500000003769,383.1500000002994
61,390.4250000001051,398.0249999995067,390.8749999997814
62,397.8249999996118,404.1749999996682,397.92499999994436
63,399.6249999996147,406.04999999959546,405.9249999997552
64,405.4750000000789,413.1250000000364,413.15000000004477
65,407.3750000002841,420.7249999999476,420.525000000183
66,419.075000000303,428.14999999973224,428.0500000001699
67,427.94999999983736,435.6999999999971,434.24999999997783
68,429.649999999638,437.4500000003536,435.9000000001321
69,435.7249999998752,449.39999999951397,443.2499999999924
70,443.1499999996598,456.9000000001324,450.8000000002572
71,450.774999999849,458.34999999988213,458.1249999998396
72,458.02499999950703,465.82500000022264,465.75000000002876
73,465.5749999997719,473.2750000002852,473.20000000009134
74,473.15000000031466,480.92499999984284,480.7749999997246
75,479.4250000000469,488.27499999970314,482.6000000000055
76,488.1000000000862,494.4500000001426,488.3999999999138
77,489.87499999981117,,

1 1 0 2
2 0 0.8999999999021384 0.9749999995259713 3.7000000006628397 5.37499999973079 6.675000000271757
3 1 8.125000000191722 6.624999999585635 6.925000000137516 6.87500000003638 12.875000000079671
4 2 12.849999999971988 14.17499999985048 14.3250000005537 13.200000000324508 14.525000000233922
5 3 18.875000000562796 15.949999999575448 20.250000000032742 20.599999999831198 21.774999999891953
6 4 26.17500000077671 21.650000000190992 26.27500000062355 22.149999999783176 29.40000000008113
7 5 30.800000000354704 29.575000000077488 36.87500000023647 31.124999999519787 36.77500000021937
8 6 38.575000000392535 38.50000000016771 42.650000000776345 37.04999999990832 43.17500000043183
9 7 48.825000000661746 44.59999999977335 44.35000000057698 44.574999999895226 51.82500000019318
10 8 54.89999999998945 51.899999999987266 54.70000000013897 52.05000000023574 53.574999999640205
11 9 60.75000000045365 59.299999999493956 60.825000000022555 58.2750000003216 59.6499999998774
12 10 66.82500000069085 61.15000000005275 68.10000000086802 67.00000000000728 67.12500000021791
13 11 73.17500000034742 68.89999999981265 73.05000000042128 73.44999999986612 74.5249999997246
14 12 80.35000000008112 80.52499999990724 80.17500000050859 80.87499999965075 81.89999999986284
15 13 86.37500000067193 81.97499999965694 84.95000000084474 89.5250000003216 88.02499999974643
16 14 96.87500000008258 89.42499999971952 92.17500000022483 91.34999999969295 89.74999999982501
17 15 102.8500000001175 90.99999999994944 100.00000000081855 97.22499999952561 97.65000000034306
18 16 109.1000000004813 97.37499999988395 108.92499999999927 103.5249999995358 104.60000000030377
19 17 116.37500000041727 104.72499999974426 115.05000000079235 112.19999999957508 112.05000000036634
20 18 118.07500000021791 112.05000000023611 122.37500000037471 118.37500000001455 113.8749999997377
21 19 128.37500000013353 119.50000000029868 130.07500000048822 127.1999999999025 125.74999999988322
22 20 134.65000000077526 121.24999999974571 134.30000000016662 133.37500000034197 127.2500000001888
23 21 140.4500000006836 133.37499999994216 140.30000000047949 135.00000000021828 134.6000000000491
24 22 150.95000000009424 136.19999999951725 146.25000000023647 142.39999999972497 136.40000000005202
25 23 152.92500000022375 142.250000000386 152.37500000012005 148.44999999968422 149.77500000050313
26 24 163.05000000001274 149.67500000017063 162.8250000007938 157.4000000000524 157.17500000000982
27 25 170.57499999999965 151.34999999969332 164.57500000024083 164.9250000000393 158.9000000000884
28 26 175.10000000028487 157.42499999993052 170.4000000004271 172.37500000010186 164.82500000047693
29 27 182.5250000000695 166.47499999959146 176.40000000073996 179.87499999981083 172.19999999970568
30 28 193.3000000007185 172.25000000013134 186.85000000050422 181.57499999961146 179.87500000045074
31 29 199.00000000042456 179.7000000001939 188.60000000086075 187.64999999984866 187.3750000001597
32 30 205.07500000066176 181.50000000019682 199.00000000006912 196.6499999998632 193.29999999963874
33 31 211.14999999998946 194.77499999953616 205.02500000065993 202.52499999969586 194.97500000007093
34 32 218.65000000060792 196.49999999961474 212.4750000007225 210.17500000016298 202.45000000041145
35 33 224.70000000056717 202.324999999801 218.42500000047949 217.49999999974534 210.15000000052495
36 34 230.87500000009715 208.5750000001648 224.65000000056534 225.09999999965657 217.40000000018298
37 35 241.10000000008841 217.29999999985048 234.95000000048094 226.92499999993743 225.07500000001855
38 36 242.87500000072288 224.775000000191 241.0000000004402 232.60000000027503 232.37500000023246
39 37 253.1250000000826 226.77499999968896 248.42500000022483 240.37500000031287 234.1249999996795
40 38 259.1500000006734 238.5499999996322 253.02500000043437 247.6249999999709 241.72500000050022
41 39 265.1250000007083 240.17499999950851 259.2250000002423 255.3250000000844 247.5000000001306
42 40 271.27499999996036 247.45000000035398 265.2000000002772 262.775000000147 253.8750000000651
43 41 283.1000000004595 253.67499999953034 272.4750000002132 268.72499999990396 262.5249999998265
44 42 284.75000000061374 262.42499999949393 278.500000000804 270.34999999978027 270.22499999994
45 43 289.62500000024266 268.62500000021134 284.7500000002583 277.75000000019645 277.5749999998003
46 44 295.52500000035326 277.42499999982135 290.6000000007225 285.2999999995518 279.27500000051043
47 45 307.1750000007258 279.124999999622 302.50000000023647 292.699999999968 292.5250000004813
48 46 309.12500000057736 286.57499999968456 307.12500000072396 298.82499999985157 294.3000000002063
49 47 319.20000000071997 292.6249999996438 313.20000000005166 300.50000000028376 300.22499999968534
50 48 320.80000000031833 300.17499999990866 320.65000000011423 314.2250000000786 307.74999999967224
51 49 331.5500000006894 307.57500000032485 331.12500000015643 322.82500000019354 315.325000000215
52 50 337.35000000059773 315.19999999960453 332.625000000462 328.84999999987485 322.7249999997217
53 51 343.32500000063266 321.3250000003976 340.45000000014625 330.5000000000291 330.3499999999109
54 52 349.6500000000113 329.0750000001575 343.22500000007494 337.8999999995358 336.4250000001481
55 53 355.5000000004755 337.60000000034813 352.4000000002161 345.7250000001295 337.97500000010007
56 54 367.27500000041874 339.30000000014877 361.2000000007356 352.8500000002168 351.5500000000462
57 55 368.94999999994144 346.7749999995798 368.80000000064683 359.0750000003027 353.0999999999982
58 56 379.4500000002616 352.87500000009493 373.4000000008564 368.02499999976135 360.47500000013645
59 57 385.4250000002965 360.34999999952595 379.27500000068903 369.8500000000422 367.8500000002747
60 58 391.3750000000535 367.7249999996642 386.7250000007516 381.67499999963184 374.07500000036055
61 59 397.525000000215 375.3249999995754 397.35000000064247 383.20000000021537 377.0499999997843
62 60 403.57500000017427 382.79999999991594 403.35000000004584 389.3500000003769 383.1500000002994
63 61 409.6250000001335 390.4250000001051 410.77500000073996 398.0249999995067 390.8749999997814
64 62 415.6250000004464 397.8249999996118 416.7500000007749 404.1749999996682 397.92499999994436
65 63 424.62500000046094 399.6249999996147 421.5000000008331 406.04999999959546 405.9249999997552
66 64 433.4999999999953 405.4750000000789 428.8000000001375 413.1250000000364 413.15000000004477
67 65 439.72500000008114 407.3750000002841 433.4750000002714 420.7249999999476 420.525000000183
68 66 445.70000000011606 419.075000000303 440.75000000020736 428.14999999973224 428.0500000001699
69 67 451.5250000003023 427.94999999983736 446.8500000007225 435.6999999999971 434.24999999997783
70 68 459.2000000001379 429.649999999638 457.4999999999818 437.4500000003536 435.9000000001321
71 69 469.4500000004071 435.7249999998752 466.4750000006279 449.39999999951397 443.2499999999924
72 70 475.45000000071997 443.1499999996598 470.8750000004329 456.9000000001324 450.8000000002572
73 71 477.12500000024266 450.774999999849 478.60000000082437 458.34999999988213 458.1249999998396
74 72 487.4500000004362 458.02499999950703 481.47500000004584 465.82500000022264 465.75000000002876
75 73 493.77500000072433 465.5749999997719 493.35000000019136 473.2750000002852 473.20000000009134
76 74 473.15000000031466 494.9250000004213 480.92499999984284 480.7749999997246
77 75 479.4250000000469 488.27499999970314 482.6000000000055
78 76 488.1000000000862 494.4500000001426 488.3999999999138
79 77 489.87499999981117

View File

@ -1,90 +1,88 @@
,0,1
0,2.21500000002689,0.6200000005089019
1,12.515000000851984,13.89500000075774
2,14.09000000017241,15.445000000709719
3,15.66500000040233,17.020000000030144
4,18.79000000058423,32.02000000035756
5,32.16500000012584,33.52000000066315
6,33.690000000709375,35.02000000005925
7,36.59000000020879,37.970000000114545
8,51.59000000053621,53.070000000644235
9,53.16500000076613,54.57000000004033
10,54.715000000718106,56.11999999999231
11,57.61500000021752,59.34500000037648
12,72.7650000003936,73.09500000044923
13,75.56500000060024,75.54500000040267
14,77.3899999999716,77.21999999992536
15,92.36500000002107,92.42000000065732
16,93.84000000004872,95.02000000045942
17,96.6650000005333,96.64500000033573
18,111.69000000022916,98.47000000061658
19,113.29000000073702,113.19500000061512
20,114.81500000041106,114.69500000001122
21,117.76500000046636,116.29500000051908
22,132.81500000044016,119.67000000075191
23,134.31500000074575,134.170000000068
24,135.86500000069773,135.72000000001998
25,150.99000000059587,137.37000000017423
26,153.6650000003223,152.3950000007796
27,155.36500000012293,154.39500000027755
28,157.16500000012584,156.72000000066026
29,173.26500000085926,158.44500000073884
30,174.7900000005333,173.42000000078832
31,176.53999999998032,174.9700000007403
32,192.8400000002088,176.52000000069228
33,194.34000000051438,192.7950000006428
34,195.81500000054203,194.2950000000389
35,197.81500000004,195.87000000026882
36,212.79000000008946,197.5950000003474
37,215.31499999996723,212.44499999991666
38,217.01500000067736,215.22000000075485
39,232.09000000001961,216.94499999992394
40,234.84000000057986,233.32000000007673
41,236.4900000007341,234.84500000066026
42,238.29000000073702,236.39500000061224
43,252.99000000045763,251.5950000004347
44,254.5400000004096,254.34500000008546
45,256.26500000048816,256.07000000016404
46,271.290000000184,258.0200000000156
47,273.91500000026406,272.44500000031684
48,275.6650000006206,274.0449999999152
49,279.99000000050125,275.5950000007767
50,293.5650000004474,278.42000000035176
51,295.0150000001971,293.4200000006792
52,296.6900000006293,294.97000000063116
53,312.96500000057983,296.52000000058314
54,314.4900000002539,311.8449999999763
55,316.09000000076173,314.39500000013203
56,317.76500000028443,316.0700000005642
57,332.6650000004096,320.77000000006655
58,334.16500000071517,332.5950000005657
59,335.7900000005915,334.37000000029064
60,340.0400000005478,338.5200000000447
61,353.615000000494,353.4950000000942
62,355.11500000079957,355.0200000006777
63,356.9150000008025,356.59499999999815
64,373.09000000055073,359.77000000073593
65,374.61500000022477,373.14500000027755
66,376.1900000004547,374.6950000002295
67,391.61500000005014,376.5200000005104
68,394.16500000020585,392.59500000005636
69,395.8650000000065,394.1950000005642
70,397.41500000086796,395.76999999988465
71,412.3650000006395,400.39500000037214
72,415.1400000005682,413.6200000000651
73,416.79000000072244,415.195000000295
74,433.16499999996574,416.8200000001713
75,434.7150000008272,431.8700000001451
76,436.2650000007792,433.44500000037505
77,438.1150000004285,436.1950000000258
78,452.865000000705,437.9450000003823
79,454.34000000073263,452.7949999999516
80,456.0150000002553,454.2950000002572
81,472.3900000004081,455.79500000056277
82,473.86500000043577,458.84499999991084
83,475.3900000001098,473.8700000005162
84,478.31500000079666,475.4200000004682
85,493.2650000005682,476.9950000006981
86,494.8400000007981,492.12000000059624
87,496.4400000003965,494.72000000039833
88,499.44000000009817,496.4450000004769
,0,1,2
0,18.92999999987338,2.57499999995375,20.395000000194614
1,20.47999999982536,20.775000000387397,21.99499999979298
2,22.05500000005528,23.249999999709278,23.595000000300843
3,24.930000000186247,25.04999999971219,26.494999999800257
4,26.655000000264828,28.474999999591407,29.519999999779884
5,29.77999999953723,32.59999999997703,50.52000000042017
6,49.204999999947596,50.474999999525934,52.120000000018536
7,52.330000000129495,52.374999999731116,55.09500000035177
8,55.07999999978025,55.100000000013424,56.645000000303746
9,56.97999999998543,58.12499999999305,59.64500000000543
10,61.27999999958816,80.59999999975149,80.6950000002921
11,80.73000000027648,82.17499999998141,83.4700000002208
12,82.3049999995969,85.1250000000367,85.14499999974349
13,85.15500000035942,86.70000000026663,86.67000000032702
14,86.85500000016006,88.57500000019387,89.56999999982644
15,89.80500000021536,110.59999999949683,110.6699999997595
16,110.90500000014842,112.24999999965108,112.31999999991375
17,112.50499999974679,113.89999999980533,113.89500000014367
18,115.20499999975115,116.65000000036558,116.7449999999967
19,116.88000000018334,119.67500000034521,119.72000000032995
20,118.45500000041326,140.77500000027825,140.74500000033868
21,139.35499999994178,142.44999999980095,142.27000000001271
22,142.30499999999708,143.95000000010654,143.84500000024263
23,145.3299999999767,146.77499999968163,146.69500000009566
24,147.0050000004089,149.67500000009053,149.844999999646
25,150.22999999988357,152.99999999976748,171.04499999978134
26,171.2549999998923,173.69999999980095,173.7950000003416
27,173.90500000025028,175.3000000003088,175.369999999662
28,175.50499999984865,176.92500000018512,177.16999999966492
29,177.25500000020517,179.87500000024042,179.92000000022517
30,181.52999999952996,183.24999999956376,200.9949999998803
31,202.47999999961436,202.3750000002768,202.59500000038815
32,204.0050000001979,203.97499999987517,205.41999999996324
33,205.60499999979626,205.80000000015602,207.09500000039543
34,207.40499999979917,208.47499999988244,209.99499999989484
35,211.7050000003114,213.12499999973838,231.01999999990358
36,229.9300000001135,231.15000000004542,232.67000000005783
37,232.7050000000422,233.99999999989845,235.7200000003154
38,235.6049999995416,235.75000000025497,237.37000000046964
39,237.30500000025174,239.9499999996554,241.77000000027465
40,238.9049999998501,241.89999999950697,261.22000000005346
41,261.22999999975985,261.249999999993,262.89500000048565
42,262.73000000006544,264.2000000000483,265.8699999999094
43,265.6050000001964,267.04999999990133,267.6699999999123
44,267.2300000000727,268.89999999955063,270.3200000002703
45,268.9049999995954,290.09999999968596,291.1700000001524
46,290.23000000021096,292.92500000017054,292.7200000001044
47,292.70499999953284,295.7250000003772,294.2950000003343
48,294.405000000243,297.3500000002535,297.26999999975806
49,297.50500000014694,300.3000000003088,299.12000000031685
50,299.42999999972056,321.52499999981256,321.39499999967074
51,320.1050000003856,324.39999999994353,323.1199999997493
52,322.92999999996067,326.0750000003757,325.79500000038524
53,325.854999999738,329.0249999995215,327.5200000004638
54,327.55499999953867,330.6749999996758,330.3450000000389
55,329.22999999997086,351.67500000031606,351.3950000003256
56,332.35500000015276,354.27500000011815,352.99499999992395
57,352.95499999998395,355.94999999964085,354.6450000000782
58,354.5549999995823,358.95000000025203,357.4200000000069
59,356.12999999981224,361.09999999959865,359.1450000000855
60,359.07999999986754,381.47499999965686,381.64500000012185
61,362.20500000004944,383.09999999953317,383.14500000042744
62,381.9799999998035,384.7499999996874,384.89499999987447
63,384.5799999996056,387.5249999996161,387.6200000001568
64,386.28000000031574,390.6000000001516,390.5199999996562
65,389.13000000016876,411.5749999996045,392.47000000041726
66,392.330000000275,413.100000000188,413.2200000000971
67,413.1800000001571,414.69999999978637,414.69500000012476
68,414.7799999997555,417.57499999991734,417.5449999999778
69,416.2800000000611,420.6250000001749,419.27000000005637
70,417.9799999998617,441.90000000023457,421.04499999978134
71,422.22999999981806,444.6249999996074,441.7950000003707
72,441.8800000000014,446.3000000000396,444.6199999999458
73,443.380000000307,447.8249999997136,446.3700000003023
74,446.23000000016003,450.84999999969324,448.14500000002727
75,449.2299999998617,471.7749999994997,453.8950000002892
76,451.10499999978896,473.4999999995783,471.8950000003183
77,471.93000000030264,476.2999999997849,474.7949999998177
78,473.42999999969874,477.99999999958555,476.4700000002499
79,476.37999999975403,481.00000000019674,478.12000000040416
80,478.00499999963034,,481.07000000045946
81,482.45499999999123,,501.89500000006365
82,500.6549999995153,,
83,503.4799999999999,,
84,505.35499999992714,,
85,508.1550000001338,,
86,509.88000000021236,,

1 0 1 2
2 0 2.21500000002689 18.92999999987338 0.6200000005089019 2.57499999995375 20.395000000194614
3 1 12.515000000851984 20.47999999982536 13.89500000075774 20.775000000387397 21.99499999979298
4 2 14.09000000017241 22.05500000005528 15.445000000709719 23.249999999709278 23.595000000300843
5 3 15.66500000040233 24.930000000186247 17.020000000030144 25.04999999971219 26.494999999800257
6 4 18.79000000058423 26.655000000264828 32.02000000035756 28.474999999591407 29.519999999779884
7 5 32.16500000012584 29.77999999953723 33.52000000066315 32.59999999997703 50.52000000042017
8 6 33.690000000709375 49.204999999947596 35.02000000005925 50.474999999525934 52.120000000018536
9 7 36.59000000020879 52.330000000129495 37.970000000114545 52.374999999731116 55.09500000035177
10 8 51.59000000053621 55.07999999978025 53.070000000644235 55.100000000013424 56.645000000303746
11 9 53.16500000076613 56.97999999998543 54.57000000004033 58.12499999999305 59.64500000000543
12 10 54.715000000718106 61.27999999958816 56.11999999999231 80.59999999975149 80.6950000002921
13 11 57.61500000021752 80.73000000027648 59.34500000037648 82.17499999998141 83.4700000002208
14 12 72.7650000003936 82.3049999995969 73.09500000044923 85.1250000000367 85.14499999974349
15 13 75.56500000060024 85.15500000035942 75.54500000040267 86.70000000026663 86.67000000032702
16 14 77.3899999999716 86.85500000016006 77.21999999992536 88.57500000019387 89.56999999982644
17 15 92.36500000002107 89.80500000021536 92.42000000065732 110.59999999949683 110.6699999997595
18 16 93.84000000004872 110.90500000014842 95.02000000045942 112.24999999965108 112.31999999991375
19 17 96.6650000005333 112.50499999974679 96.64500000033573 113.89999999980533 113.89500000014367
20 18 111.69000000022916 115.20499999975115 98.47000000061658 116.65000000036558 116.7449999999967
21 19 113.29000000073702 116.88000000018334 113.19500000061512 119.67500000034521 119.72000000032995
22 20 114.81500000041106 118.45500000041326 114.69500000001122 140.77500000027825 140.74500000033868
23 21 117.76500000046636 139.35499999994178 116.29500000051908 142.44999999980095 142.27000000001271
24 22 132.81500000044016 142.30499999999708 119.67000000075191 143.95000000010654 143.84500000024263
25 23 134.31500000074575 145.3299999999767 134.170000000068 146.77499999968163 146.69500000009566
26 24 135.86500000069773 147.0050000004089 135.72000000001998 149.67500000009053 149.844999999646
27 25 150.99000000059587 150.22999999988357 137.37000000017423 152.99999999976748 171.04499999978134
28 26 153.6650000003223 171.2549999998923 152.3950000007796 173.69999999980095 173.7950000003416
29 27 155.36500000012293 173.90500000025028 154.39500000027755 175.3000000003088 175.369999999662
30 28 157.16500000012584 175.50499999984865 156.72000000066026 176.92500000018512 177.16999999966492
31 29 173.26500000085926 177.25500000020517 158.44500000073884 179.87500000024042 179.92000000022517
32 30 174.7900000005333 181.52999999952996 173.42000000078832 183.24999999956376 200.9949999998803
33 31 176.53999999998032 202.47999999961436 174.9700000007403 202.3750000002768 202.59500000038815
34 32 192.8400000002088 204.0050000001979 176.52000000069228 203.97499999987517 205.41999999996324
35 33 194.34000000051438 205.60499999979626 192.7950000006428 205.80000000015602 207.09500000039543
36 34 195.81500000054203 207.40499999979917 194.2950000000389 208.47499999988244 209.99499999989484
37 35 197.81500000004 211.7050000003114 195.87000000026882 213.12499999973838 231.01999999990358
38 36 212.79000000008946 229.9300000001135 197.5950000003474 231.15000000004542 232.67000000005783
39 37 215.31499999996723 232.7050000000422 212.44499999991666 233.99999999989845 235.7200000003154
40 38 217.01500000067736 235.6049999995416 215.22000000075485 235.75000000025497 237.37000000046964
41 39 232.09000000001961 237.30500000025174 216.94499999992394 239.9499999996554 241.77000000027465
42 40 234.84000000057986 238.9049999998501 233.32000000007673 241.89999999950697 261.22000000005346
43 41 236.4900000007341 261.22999999975985 234.84500000066026 261.249999999993 262.89500000048565
44 42 238.29000000073702 262.73000000006544 236.39500000061224 264.2000000000483 265.8699999999094
45 43 252.99000000045763 265.6050000001964 251.5950000004347 267.04999999990133 267.6699999999123
46 44 254.5400000004096 267.2300000000727 254.34500000008546 268.89999999955063 270.3200000002703
47 45 256.26500000048816 268.9049999995954 256.07000000016404 290.09999999968596 291.1700000001524
48 46 271.290000000184 290.23000000021096 258.0200000000156 292.92500000017054 292.7200000001044
49 47 273.91500000026406 292.70499999953284 272.44500000031684 295.7250000003772 294.2950000003343
50 48 275.6650000006206 294.405000000243 274.0449999999152 297.3500000002535 297.26999999975806
51 49 279.99000000050125 297.50500000014694 275.5950000007767 300.3000000003088 299.12000000031685
52 50 293.5650000004474 299.42999999972056 278.42000000035176 321.52499999981256 321.39499999967074
53 51 295.0150000001971 320.1050000003856 293.4200000006792 324.39999999994353 323.1199999997493
54 52 296.6900000006293 322.92999999996067 294.97000000063116 326.0750000003757 325.79500000038524
55 53 312.96500000057983 325.854999999738 296.52000000058314 329.0249999995215 327.5200000004638
56 54 314.4900000002539 327.55499999953867 311.8449999999763 330.6749999996758 330.3450000000389
57 55 316.09000000076173 329.22999999997086 314.39500000013203 351.67500000031606 351.3950000003256
58 56 317.76500000028443 332.35500000015276 316.0700000005642 354.27500000011815 352.99499999992395
59 57 332.6650000004096 352.95499999998395 320.77000000006655 355.94999999964085 354.6450000000782
60 58 334.16500000071517 354.5549999995823 332.5950000005657 358.95000000025203 357.4200000000069
61 59 335.7900000005915 356.12999999981224 334.37000000029064 361.09999999959865 359.1450000000855
62 60 340.0400000005478 359.07999999986754 338.5200000000447 381.47499999965686 381.64500000012185
63 61 353.615000000494 362.20500000004944 353.4950000000942 383.09999999953317 383.14500000042744
64 62 355.11500000079957 381.9799999998035 355.0200000006777 384.7499999996874 384.89499999987447
65 63 356.9150000008025 384.5799999996056 356.59499999999815 387.5249999996161 387.6200000001568
66 64 373.09000000055073 386.28000000031574 359.77000000073593 390.6000000001516 390.5199999996562
67 65 374.61500000022477 389.13000000016876 373.14500000027755 411.5749999996045 392.47000000041726
68 66 376.1900000004547 392.330000000275 374.6950000002295 413.100000000188 413.2200000000971
69 67 391.61500000005014 413.1800000001571 376.5200000005104 414.69999999978637 414.69500000012476
70 68 394.16500000020585 414.7799999997555 392.59500000005636 417.57499999991734 417.5449999999778
71 69 395.8650000000065 416.2800000000611 394.1950000005642 420.6250000001749 419.27000000005637
72 70 397.41500000086796 417.9799999998617 395.76999999988465 441.90000000023457 421.04499999978134
73 71 412.3650000006395 422.22999999981806 400.39500000037214 444.6249999996074 441.7950000003707
74 72 415.1400000005682 441.8800000000014 413.6200000000651 446.3000000000396 444.6199999999458
75 73 416.79000000072244 443.380000000307 415.195000000295 447.8249999997136 446.3700000003023
76 74 433.16499999996574 446.23000000016003 416.8200000001713 450.84999999969324 448.14500000002727
77 75 434.7150000008272 449.2299999998617 431.8700000001451 471.7749999994997 453.8950000002892
78 76 436.2650000007792 451.10499999978896 433.44500000037505 473.4999999995783 471.8950000003183
79 77 438.1150000004285 471.93000000030264 436.1950000000258 476.2999999997849 474.7949999998177
80 78 452.865000000705 473.42999999969874 437.9450000003823 477.99999999958555 476.4700000002499
81 79 454.34000000073263 476.37999999975403 452.7949999999516 481.00000000019674 478.12000000040416
82 80 456.0150000002553 478.00499999963034 454.2950000002572 481.07000000045946
83 81 472.3900000004081 482.45499999999123 455.79500000056277 501.89500000006365
84 82 473.86500000043577 500.6549999995153 458.84499999991084
85 83 475.3900000001098 503.4799999999999 473.8700000005162
86 84 478.31500000079666 505.35499999992714 475.4200000004682
87 85 493.2650000005682 508.1550000001338 476.9950000006981
88 86 494.8400000007981 509.88000000021236 492.12000000059624
87 496.4400000003965 494.72000000039833
88 499.44000000009817 496.4450000004769

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 47 KiB

After

Width:  |  Height:  |  Size: 50 KiB

View File

@ -126,7 +126,7 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
mt_sorted = mt_sorted[(mt_sorted['c2'] == c2) & (mt_sorted['c1'] == c1)]
for gg in range(len(DF1_desired)):
# embed()
# try:
t3 = time.time()
# except:
@ -150,7 +150,7 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
###################
# groups sorted by repro tag
# todo: evnetuell die tuples gleich hier umspeichern vom csv ''
# embed()
grouped = mt_sorted.groupby(
['c1', 'c2', 'm1, m2', 'repro_tag_id'],
as_index=False)
@ -170,13 +170,13 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
##############################################################
# load plotting arrays
arrays, arrays_original, spikes_pure = save_arrays_susept(
data_dir, cell, c, b, chirps, devs, extract, group_mean, mean_type, plot_group=0,
rocextra=False, sorted_on=sorted_on, base_several = True, dev_desired = dev_desired)
data_dir, cell, c, chirps, devs, extract, group_mean, mean_type, plot_group=0,
rocextra=False, sorted_on=sorted_on, dev_desired = dev_desired)
####################################################
####################################################
# hier checken wir ob für diesen einen Punkt das funkioniert mit der standardabweichung
# embed()
try:
check_var_substract_method(spikes_pure)
except:
@ -200,7 +200,7 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
coh = False
if coh:
ax_w, d, data_dir, devs = plt_coherences(ax_w, d, data_dir, devs, grid)
ax_w, d, data_dir, devs = plt_coherences(ax_w, d, devs, grid)
# ax_cohs = plt.subplot(grid[0,1])
@ -216,9 +216,9 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
deltat = 1 / 40000
eodf = np.mean(group_mean[1].eodf)
eod_fr = eodf
# embed()
a_fr = 1
# embed()
eod_fe = eodf + np.mean(
group_mean[1].DF2) # data.eodf.iloc[0] + 10 # cell_model.eode.iloc[0]
a_fe = group_mean[0][1] / 100
@ -264,11 +264,10 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
extracted2 = [False, False, False, False]
for i in range(len(waves_presents)):
ax = plot_shemes4(eod_fish_r, eod_fish_e, eod_fish_j, grid0, time_array,
g=gs[i], title_top=True, add=0.1, eod_fr=eod_fr,
g=gs[i], title_top=True, eod_fr=eod_fr,
waves_present=waves_presents[i], ylim=ylim,
xlim=xlim, jammer_label='f2', color_am=colors_am[i],
emitter_label='f1',color_am2 = color01_2, receiver_label='f0',
extracted=extracted[i],extracted2=extracted2[i],
xlim=xlim, color_am=colors_am[i],
color_am2 = color01_2, extracted=extracted[i], extracted2=extracted2[i],
title=titles[i]) # 'intruder','receiver'#jammer_name
ax_w.append(ax)
@ -285,7 +284,7 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
if printing:
print(time.time() - t3)
# embed()
##########################################
# spike response
@ -302,7 +301,7 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
frs.append(fr)
fr = np.mean(frs)
#embed()
# embed()
base_several = False
if base_several:
spikes_new = []
@ -335,15 +334,11 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
fr_isi, ax_ps, ax_as = plot_arrays_ROC_psd_single3(
[[smoothed_base], arrays[2], arrays[1], arrays[3]],
[[mat_base], arrays_original[2], arrays_original[1],
arrays_original[3]], spikes_pure, fr, cell, grid0, chirps, extract,
mean_type,
group_mean, b, devs,
xlim=xlim, row=1 + d * 3,
arrays_original[3]], spikes_pure, cell, grid0, mean_type,
group_mean, xlim=xlim, row=1 + d * 3,
array_chosen=array_chosen,
color0_burst=color0_burst, mean_types=[mean_type],
color01=color01, color02=color02,ylim_log=(-15, 3),
color012=color012,color012_minus = color01_2,color0=color0,
color01_2=color01_2)
color0_burst=color0_burst, color01=color01, color02=color02,ylim_log=(-15, 3),
color012=color012,color012_minus = color01_2,color0=color0)
##########################################################################
@ -352,13 +347,13 @@ def motivation_all_small(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1',
# save_all(individual_tag, show, counter_contrast=0, savename='')
# print('individual_tag')
# embed()
axes = []
axes.append(ax_w)
# axes.extend(np.transpose(ax_as))
# axes.append(np.transpose(ax_ps))
# np.transpose(axes)
# embed()
#fig.tag(ax_w[0:3], xoffs=-2.3, yoffs=1.7)
#fig.tag(ax_w[3::], xoffs=-1.9, yoffs=1.4)
fig.tag(ax_w, xoffs=-1.9, yoffs=1.4)

BIN
motivation_stim.pdf Normal file

Binary file not shown.

BIN
motivation_stim.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

371
motivation_stim.py Normal file
View File

@ -0,0 +1,371 @@
#from matplotlib import gridspec as gridspec, pyplot as plt
#from plotstyle import plot_style
#from utils_all import chose_mat_max_value, default_settings, find_code_vs_not, save_visualization
#from utils_susept import check_var_substract_method, chose_certain_group, divergence_title_add_on, extract_waves, \
# find_all_dir_cells, \
# load_b_public, \
# load_cells_three, \
# plot_arrays_ROC_psd_single3, plot_shemes4, plt_coherences, predefine_grouping_frame, \
# restrict_cell_type, save_arrays_susept
from utils_suseptibility import *
#sys.path.insert(0, '..')
#from utils_suseptibility import motivation_small
#from utils import divergence_title_add_on,chose_certain_group,predefine_grouping_frame,check_nix_fish,find_all_dir_cells,load_cells_three,restrict_cell_type,plot_shemes2,plot_arrays_ROC_psd_single
def motivation_all_small_stim(dev_desired = '1',ylim=[-1.25, 1.25], c1=10, dfs=['m1', 'm2'], mult_type='_multsorted2_', top=0.94, devs=['2'],
figsize=None, redo=False, save=True, end='0', cut_matrix='malefemale', chose_score='mean_nrs',
a_fr=1, restrict='modulation', adapt='adaptoffsetallall2', step=str(30),
detections=['AllTrialsIndex'], variant='no', sorted_on='LocalReconst0.2Norm'):
autodefines = [
'triangle_diagonal_fr'] # ['triangle_fr', 'triangle_diagonal_fr', 'triangle_df2_fr','triangle_df2_eodf''triangle_df1_eodf', ] # ,'triangle_df2_fr''triangle_df1_fr','_triangle_diagonal__fr',]
cells = ['2021-08-03-ac-invivo-1'] ##'2021-08-03-ad-invivo-1',,[10, ][5 ]
c1s = [10] # 1, 10,
c2s = [10]
plot_style()
default_figsize(column=2, length=3.3) #6.7 ts=12, ls=12, fs=12
show = True
DF2_desired = [0.8]
DF1_desired = [0.87]
DF2_desired = [-0.23]
DF1_desired = [0.94]
# mean_type = '_MeanTrialsIndexPhaseSort_Min0.25sExcluded_'
extract = ''
datasets, data_dir = find_all_dir_cells()
cells = ['2022-01-28-ah-invivo-1'] # , '2022-01-28-af-invivo-1', '2022-01-28-ab-invivo-1',
# '2022-01-27-ab-invivo-1', ] # ,'2022-01-28-ah-invivo-1', '2022-01-28-af-invivo-1', ]
append_others = 'apend_others' # '#'apend_others'#'apend_others'#'apend_others'##'apend_others'
autodefine = '_DFdesired_'
autodefine = 'triangle_diagonal_fr' # ['triangle_fr', 'triangle_diagonal_fr', 'triangle_df2_fr','triangle_df2_eodf''triangle_df1_eodf', ] # ,'triangle_df2_fr''triangle_df1_fr','_triangle_diagonal__fr',]
DF2_desired = [-33]
DF1_desired = [133]
autodefine = '_dfchosen_closest_'
autodefine = '_dfchosen_closest_first_'
cells = ['2021-08-03-ac-invivo-1'] ##'2021-08-03-ad-invivo-1',,[10, ][5 ]
# c1s = [10] # 1, 10,
# c2s = [10]
minsetting = 'Min0.25sExcluded'
c2 = 10
# detections = ['MeanTrialsIndexPhaseSort'] # ['AllTrialsIndex'] # ,'MeanTrialsIndexPhaseSort''DetectionAnalysis''_MeanTrialsPhaseSort'
# detections = ['AllTrialsIndex'] # ['_MeanTrialsIndexPhaseSort_Min0.25sExcluded_extended_eod_loc_synch']
extend_trials = '' # 'extended'#''#'extended'#''#'extended'#''#'extended'#''#'extended'#''#'extended'# ok kein Plan was das hier ist
# phase_sorting = ''#'PhaseSort'
eodftype = '_psdEOD_'
concat = '' # 'TrialsConcat'
indices = ['_allindices_']
chirps = [
''] # '_ChirpsDelete3_',,'_ChirpsDelete3_'','','',''#'_ChirpsDelete3_'#''#'_ChirpsDelete3_'#'#'_ChirpsDelete2_'#''#'_ChirpsDelete_'#''#'_ChirpsDelete_'#''#'_ChirpsDelete_'#''#'_ChirpsCache_'
extract = '' # '_globalmax_'
devs_savename = ['original', '05'] # ['05']#####################
# control = pd.read_pickle(
# load_folder_name(
# 'calc_model') + '/modell_all_cell_no_sinz3_afe0.1__afr1__afj0.1__length1.5_adaptoffsetallall2___stepefish' + step + 'Hz_ratecorrrisidual35__modelbigfit_nfft4096.pkl')
if len(cells) < 1:
data_dir, cells = load_cells_three(end, data_dir=data_dir, datasets=datasets)
cells, p_units_cells, pyramidals = restrict_cell_type(cells, 'p-units')
# default_settings(fs=8)
start = 'min' #
cells = ['2021-08-03-ac-invivo-1']
tag_cells = []
for c, cell in enumerate(cells):
counter_pic = 0
contrasts = [c2]
tag_cell = []
for c, contrast in enumerate(contrasts):
contrast_small = 'c2'
contrast_big = 'c1'
contrasts1 = [c1]
for contrast1 in contrasts1:
for devname_orig in devs:
datapoints = [1000]
for d in datapoints:
################################
# prepare DF1 desired
# chose_score = 'auci02_012-auci_base_01'
# hier muss das halt stimmen mit der auswahl
# hier wollen wir eigntlich kein autodefine
# sondern wir wollen so ein diagonal ding haben
extra_f_calculatoin = False
if extra_f_calculatoin:
divergnce, fr, pivot_chosen, max_val, max_x, max_y, mult, DF1_desired, DF2_desired, min_y, min_x, min_val, diff_cut = chose_mat_max_value(
DF1_desired, DF2_desired, '', mult_type, eodftype, indices, cell, contrast_small,
contrast_big, contrast1, dfs, start, devname_orig, contrast, autodefine=autodefine,
cut_matrix='cut', chose_score=chose_score) # chose_score = 'auci02_012-auci_base_01'
DF1_desired = [1.2]#DF1_desired # [::-1]
DF2_desired = [0.95]#DF2_desired # [::-1]
#embed()
#######################################
# ROC part
# fr, celltype = get_fr_from_info(cell, data_dir[c])
version_comp, subfolder, mod_name_slash, mod_name, subfolder_path = find_code_vs_not()
b = load_b_public(c, cell, data_dir)
mt_sorted = predefine_grouping_frame(b, eodftype=eodftype, cell_name=cell)
counter_waves = 0
mt_sorted = mt_sorted[(mt_sorted['c2'] == c2) & (mt_sorted['c1'] == c1)]
for gg in range(len(DF1_desired)):
# try:
t3 = time.time()
# except:
# print('time thing')
# embed()
ax_w = []
###################
# all trials in one
grouped = mt_sorted.groupby(
['c1', 'c2', 'm1, m2'],
as_index=False)
# try:
grouped_mean = chose_certain_group(DF1_desired[gg],
DF2_desired[gg], grouped,
several=True, emb=False,
concat=True)
# except:
# print('grouped thing')
# embed()
###################
# groups sorted by repro tag
# todo: evnetuell die tuples gleich hier umspeichern vom csv ''
grouped = mt_sorted.groupby(
['c1', 'c2', 'm1, m2', 'repro_tag_id'],
as_index=False)
grouped_orig = chose_certain_group(DF1_desired[gg],
DF2_desired[gg],
grouped,
several=True)
gr_trials = len(grouped_orig)
###################
groups_variants = [grouped_mean]
group_mean = [grouped_orig[0][0], grouped_mean]
for d, detection in enumerate(detections):
mean_type = '_' + detection # + '_' + minsetting + '_' + extend_trials + concat
##############################################################
# load plotting arrays
arrays, arrays_original, spikes_pure = save_arrays_susept(
data_dir, cell, c, chirps, devs, extract, group_mean, mean_type, plot_group=0,
rocextra=False, sorted_on=sorted_on, dev_desired = dev_desired)
####################################################
####################################################
# hier checken wir ob für diesen einen Punkt das funkioniert mit der standardabweichung
try:
check_var_substract_method(spikes_pure)
except:
print('var checking not possible')
# fig = plt.figure()
# grid = gridspec.GridSpec(2, 1, wspace=0.7, left=0.05, top=0.95, bottom=0.15,
# right=0.98)
if figsize:
fig = plt.figure(figsize=figsize)
else:
fig = plt.figure()
grid = gridspec.GridSpec(1, 1, wspace=0.7, hspace=0.35, left=0.055, top=top,
bottom=0.15,
right=0.935) # height_ratios=[1, 2], height_ratios = [1,6]bottom=0.25, top=0.8,
hr = [1, 0.35, 1.2, 0, 3, ] # 1
##########################################################################
# several coherence plot
# frame_psd = pd.read_pickle(load_folder_name('calc_RAM')+'/noise_data11_nfft1sec_original__StimPreSaved4__first__CutatBeginning_0.05_s_NeurDelay_0.005_s_2021-08-03-ab-invivo-1.pkl')
# frame_psd = pd.read_pickle(load_folder_name('calc_RAM') + '/noise_data11_nfft1sec_original__StimPreSaved4__first__CutatBeginning_0.05_s_NeurDelay_0.005_s_2021-08-03-ab-invivo-1.pkl')
coh = False
if coh:
ax_w, d, data_dir, devs = plt_coherences(ax_w, d, devs, grid)
# ax_cohs = plt.subplot(grid[0,1])
##########################################################################
# part with the power spectra
grid0 = gridspec.GridSpecFromSubplotSpec(5, 4, wspace=0.15, hspace=0.35,
subplot_spec=grid[:, :],
height_ratios=hr)
xlim = [0, 100]
###########################################
stimulus_length = 0.3
deltat = 1 / 40000
eodf = np.mean(group_mean[1].eodf)
eod_fr = eodf
a_fr = 1
eod_fe = eodf + np.mean(
group_mean[1].DF2) # data.eodf.iloc[0] + 10 # cell_model.eode.iloc[0]
a_fe = group_mean[0][1] / 100
eod_fj = eodf + np.mean(
group_mean[1].DF1) # data.eodf.iloc[0] + 50 # cell_model.eodj.iloc[0]
a_fj = group_mean[0][0] / 100
variant_cell = 'no' # 'receiver_emitter_jammer'
print('f0' + str(eod_fr))
print('f1'+str(eod_fe))
print('f2' + str(eod_fj))
eod_fish_j, time_array, time_fish_r, eod_fish_r, time_fish_e, eod_fish_e, time_fish_sam, eod_fish_sam, stimulus_am, stimulus_sam = extract_waves(
variant_cell, '',
stimulus_length, deltat, eod_fr, a_fr, a_fe, [eod_fe], 0, eod_fj, a_fj)
jammer_name = 'female'
cocktail_names = False
if cocktail_names:
titles = ['receiver ',
'+' + 'intruder ',
'+' + jammer_name,
'+' + jammer_name + '+intruder',
[]] ##'receiver + ' + 'receiver + receiver
else:
titles = title_motivation()
gs = [0, 1, 2, 3, 4]
waves_presents = [['receiver', '', '', 'all'],
['receiver', 'emitter', '', 'all'],
['receiver', '', 'jammer', 'all'],
['receiver', 'emitter', 'jammer', 'all'],
] # ['', '', '', ''],['receiver', '', '', 'all'],
# ['receiver', '', 'jammer', 'all'],
# ['receiver', 'emitter', '', 'all'],'receiver', 'emitter', 'jammer',
symbols = [''] # '$+$', '$-$', '$-$', '$=$',
symbols = ['', '', '', '', '']
time_array = time_array * 1000
color01, color012, color01_2, color02, color0_burst, color0 = colors_suscept_paper_dots()
colors_am = ['black', 'black', 'black', 'black'] # color01, color02, color012]
extracted = [False, True, True, True]
extracted2 = [False, False, False, False]
for i in range(len(waves_presents)):
ax = plot_shemes4(eod_fish_r, eod_fish_e, eod_fish_j, grid0, time_array,
g=gs[i], title_top=True, eod_fr=eod_fr,
waves_present=waves_presents[i], ylim=ylim,
xlim=xlim, color_am=colors_am[i],
color_am2 = color01_2, extracted=extracted[i], extracted2=extracted2[i],
title=titles[i]) # 'intruder','receiver'#jammer_name
ax_w.append(ax)
if ax != []:
ax.text(1.1, 0.45, symbols[i], fontsize=35, transform=ax.transAxes)
bar = False
if bar:
if i == 0:
ax.plot([0, 20], [ylim[0] + 0.01, ylim[0] + 0.01], color='black')
ax.text(0, -0.16, '20 ms', va='center', fontsize=10,
transform=ax.transAxes)
printing = True
if printing:
print(time.time() - t3)
##########################################
# spike response
array_chosen = 1
if d == 0: #
#embed()
frs = []
for i in range(len(spikes_pure['base_0'])):
#duration = spikes_pure['base_0'][i][-1] / 1000
duration = 0.5
fr = len(spikes_pure['base_0'][i])/duration
frs.append(fr)
fr = np.mean(frs)
#embed()
base_several = False
if base_several:
spikes_new = []
for i in range(len(spikes_pure['base_0'])):
duration = 100
duration_full = 101#501
dur = np.arange(0, duration_full, duration)
for d_nr in range(len(dur) - 1):
#embed()
spikes_new.append(np.array(spikes_pure['base_0'][i][
(spikes_pure['base_0'][i] > dur[d_nr]) & (
spikes_pure['base_0'][i] < dur[
d_nr + 1])])/1000-dur[d_nr]/1000)
# spikes_pure['base_0'] = spikes_new
sampling_rate = 1/np.diff(time_array)
sampling_rate = int(sampling_rate[0]*1000)
spikes_mats = []
smoothed05 = []
for i in range(len(spikes_new)):
spikes_mat = cr_spikes_mat(spikes_new[i], sampling_rate, int(sampling_rate*duration/1000))
spikes_mats.append(spikes_mat)
smoothed05.append(gaussian_filter(spikes_mat, sigma=(float(dev_desired)/1000) * sampling_rate))
smoothed_base = np.mean(smoothed05, axis=0)
mat_base = np.mean(spikes_mats, axis=0)
else:
smoothed_base = arrays[0][0]
mat_base = arrays_original[0][0]
#embed()#arrays[0]v
fr_isi, ax_ps, ax_as = plot_arrays_ROC_psd_single4([[smoothed_base], arrays[2], arrays[1], arrays[3]],
[[smoothed_base], arrays[2], arrays[1], arrays[3]],
[[mat_base], arrays_original[2], arrays_original[1],
arrays_original[3]], spikes_pure, cell, grid0, mean_type,
group_mean, xlim=xlim, row=1 + d * 3,
array_chosen=array_chosen,
color0_burst=color0_burst, color01=color01, color02=color02,ylim_log=(-15, 3),
color012=color012,color012_minus = color01_2,color0=color0)
##########################################################################
individual_tag = 'DF1' + str(DF1_desired[gg]) + 'DF2' + str(
DF2_desired[gg]) + cell + '_c1_' + str(c1) + '_c2_' + str(c2) + mean_type
# save_all(individual_tag, show, counter_contrast=0, savename='')
# print('individual_tag')
axes = []
axes.append(ax_w)
# axes.extend(np.transpose(ax_as))
# axes.append(np.transpose(ax_ps))
# np.transpose(axes)
#fig.tag(ax_w[0:3], xoffs=-2.3, yoffs=1.7)
#fig.tag(ax_w[3::], xoffs=-1.9, yoffs=1.4)
fig.tag(ax_w, xoffs=-1.9, yoffs=1.4)
# ax_w, np.transpose(ax_as), ax_ps
if save:
save_visualization(individual_tag=individual_tag, show=show, pdf=True)
# fig = plt.gcf()
# fig.savefig
# plt.show()
if __name__ == '__main__':#2.5
motivation_all_small_stim(dev_desired = '1', c1=10, mult_type='_multsorted2_', devs=['05'], redo=True, save=True, end='all',
cut_matrix='malefemale', chose_score='mean_nrs', restrict='modulation_no_classes', step='50',
detections=['MeanTrialsIndexPhaseSort'], sorted_on='LocalReconst0.2NormAm')#

15506
motivation_stim_05_012.csv Normal file

File diff suppressed because it is too large Load Diff

15506
motivation_stim_05_base_0.csv Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,87 @@
,0,1,2
0,5.360000000375287,3.6000000003509633,5.439999999890446
1,6.9100000003272655,5.150000000302942,7.064999999766755
2,28.034999999628774,6.5749999997747075,26.89000000007671
3,29.509999999656422,8.299999999853288,28.31500000045797
4,31.210000000366556,12.724999999936234,29.739999999929736
5,34.28499999999257,29.175000000013355,34.21499999965907
6,36.98499999999694,30.750000000243276,35.765000000520544
7,43.310000000285065,35.04999999984601,37.4149999997653
8,58.38499999962732,36.59999999979799,58.16500000035465
9,61.23500000038984,42.7999999996059,59.6400000003823
10,64.23500000009153,59.30000000023891,61.34000000018294
11,65.88500000024578,60.89999999983728,65.69000000034156
12,88.18499999987762,65.2499999999959,67.26499999966198
13,89.70999999955166,66.75000000030148,88.31499999994865
14,91.45999999990818,68.64999999959717,89.76499999969836
15,94.73499999993874,89.34999999963064,91.39000000048416
16,97.10999999996784,90.89999999958262,94.7649999998075
17,118.16000000025451,95.32499999966556,97.59000000029208
18,119.73499999957494,96.8500000002491,117.14000000027318
19,121.30999999980486,98.49999999949385,118.63999999966927
20,124.36000000006243,119.37499999965394,120.06500000005053
21,126.01000000021668,120.90000000023747,123.1150000003081
22,127.68499999973938,124.00000000014143,125.96500000016113
23,148.43500000032873,125.65000000029568,147.1899999996649
24,150.06000000020504,127.14999999969177,148.66499999969255
25,154.46000000001004,134.67499999967868,150.06499999979587
26,156.03500000023996,149.6250000003597,154.69000000028336
27,157.58500000019194,151.32500000016034,156.2149999999574
28,178.48499999972046,155.5750000001167,174.49000000031538
29,180.03499999967244,157.07499999951278,178.6650000003474
30,184.48500000003332,163.14999999974998,180.1150000000971
31,186.0349999999853,179.650000000383,181.7900000005293
32,187.58499999993728,181.37499999955207,186.11500000040996
33,208.58499999966807,185.57499999986203,187.84000000048854
34,210.08499999997366,187.09999999953607,208.9400000004216
35,212.38500000007843,189.05000000029713,210.28999999996904
36,216.03499999973064,194.75000000000318,211.9400000001233
37,217.58499999968262,209.77499999969905,216.1649999998017
38,238.68499999961568,211.32499999965103,217.86500000051183
39,240.21000000019922,215.77500000001191,238.83999999996468
40,241.8849999997219,217.2750000003175,240.34000000027027
41,246.05999999975393,223.27499999972088,241.91500000050019
42,247.6099999997059,239.87499999964666,245.0399999997726
43,268.76000000019485,241.62500000000318,246.54000000007818
44,270.2849999998689,245.8499999996816,267.59000000036485
45,271.88500000037675,247.37500000026512,269.0650000003925
46,276.2100000002574,249.07500000006576,270.5150000001422
47,277.6850000002851,269.89999999966994,274.98999999987154
48,298.90999999978885,271.39999999997553,276.56500000010146
49,300.35999999953856,275.79999999978054,297.61500000038814
50,301.9600000000464,277.3499999997325,299.11499999978423
51,305.010000000304,279.10000000008904,300.5400000001655
52,306.56000000025597,300.0249999998955,303.86499999984244
53,308.1349999995764,301.54999999956954,306.6150000004027
54,329.00999999973646,306.02500000020837,308.3150000002033
55,330.51000000004206,307.52499999960446,329.16500000008546
56,335.1100000002516,309.24999999968304,330.61499999983516
57,336.58500000027925,330.09999999956517,332.2399999997115
58,338.1849999998776,331.6250000001487,335.4399999998177
59,359.2350000001643,334.6999999997747,337.0150000000476
60,360.8600000000406,336.3000000002826,359.1649999998308
61,365.0099999997947,337.7500000000323,360.6650000001364
62,366.55999999974665,344.0500000000425,362.31500000029064
63,368.10999999969863,360.2750000003466,365.3649999996387
64,374.3349999997845,361.8250000002986,366.9650000001466
65,389.4599999996826,366.0999999996234,388.01500000043325
66,392.159999999687,367.64999999957536,389.41500000053657
67,395.2849999998689,373.89999999993915,390.8650000002863
68,396.88500000037675,390.39999999966267,392.5150000004405
69,398.4100000000508,394.85000000002356,396.7899999997653
70,419.63499999955457,396.3749999996976,398.61500000004617
71,422.2599999996346,397.8750000000032,418.2399999999516
72,425.45999999974083,402.6250000000614,419.8649999998279
73,426.91000000040003,420.72500000029277,421.3150000004871
74,428.43500000007407,425.12500000009777,425.5899999998119
75,435.960000000061,426.62499999949387,427.09000000011747
76,450.8349999999082,428.07500000015307,449.4899999999516
77,452.4100000001381,433.97500000026366,450.9649999999792
78,455.51000000004206,435.9749999997616,452.66499999977987
79,457.18499999956475,454.9499999997165,455.6899999997595
80,458.68499999987034,456.4999999996685,457.215000000343
81,480.8100000002851,457.97499999969614,479.56500000053074
82,482.3849999996055,461.324999999651,481.06499999992684
83,485.3850000002167,,482.91500000048563
84,486.98499999981505,,
85,488.60999999969135,,
1 0 1 2
2 0 5.360000000375287 3.6000000003509633 5.439999999890446
3 1 6.9100000003272655 5.150000000302942 7.064999999766755
4 2 28.034999999628774 6.5749999997747075 26.89000000007671
5 3 29.509999999656422 8.299999999853288 28.31500000045797
6 4 31.210000000366556 12.724999999936234 29.739999999929736
7 5 34.28499999999257 29.175000000013355 34.21499999965907
8 6 36.98499999999694 30.750000000243276 35.765000000520544
9 7 43.310000000285065 35.04999999984601 37.4149999997653
10 8 58.38499999962732 36.59999999979799 58.16500000035465
11 9 61.23500000038984 42.7999999996059 59.6400000003823
12 10 64.23500000009153 59.30000000023891 61.34000000018294
13 11 65.88500000024578 60.89999999983728 65.69000000034156
14 12 88.18499999987762 65.2499999999959 67.26499999966198
15 13 89.70999999955166 66.75000000030148 88.31499999994865
16 14 91.45999999990818 68.64999999959717 89.76499999969836
17 15 94.73499999993874 89.34999999963064 91.39000000048416
18 16 97.10999999996784 90.89999999958262 94.7649999998075
19 17 118.16000000025451 95.32499999966556 97.59000000029208
20 18 119.73499999957494 96.8500000002491 117.14000000027318
21 19 121.30999999980486 98.49999999949385 118.63999999966927
22 20 124.36000000006243 119.37499999965394 120.06500000005053
23 21 126.01000000021668 120.90000000023747 123.1150000003081
24 22 127.68499999973938 124.00000000014143 125.96500000016113
25 23 148.43500000032873 125.65000000029568 147.1899999996649
26 24 150.06000000020504 127.14999999969177 148.66499999969255
27 25 154.46000000001004 134.67499999967868 150.06499999979587
28 26 156.03500000023996 149.6250000003597 154.69000000028336
29 27 157.58500000019194 151.32500000016034 156.2149999999574
30 28 178.48499999972046 155.5750000001167 174.49000000031538
31 29 180.03499999967244 157.07499999951278 178.6650000003474
32 30 184.48500000003332 163.14999999974998 180.1150000000971
33 31 186.0349999999853 179.650000000383 181.7900000005293
34 32 187.58499999993728 181.37499999955207 186.11500000040996
35 33 208.58499999966807 185.57499999986203 187.84000000048854
36 34 210.08499999997366 187.09999999953607 208.9400000004216
37 35 212.38500000007843 189.05000000029713 210.28999999996904
38 36 216.03499999973064 194.75000000000318 211.9400000001233
39 37 217.58499999968262 209.77499999969905 216.1649999998017
40 38 238.68499999961568 211.32499999965103 217.86500000051183
41 39 240.21000000019922 215.77500000001191 238.83999999996468
42 40 241.8849999997219 217.2750000003175 240.34000000027027
43 41 246.05999999975393 223.27499999972088 241.91500000050019
44 42 247.6099999997059 239.87499999964666 245.0399999997726
45 43 268.76000000019485 241.62500000000318 246.54000000007818
46 44 270.2849999998689 245.8499999996816 267.59000000036485
47 45 271.88500000037675 247.37500000026512 269.0650000003925
48 46 276.2100000002574 249.07500000006576 270.5150000001422
49 47 277.6850000002851 269.89999999966994 274.98999999987154
50 48 298.90999999978885 271.39999999997553 276.56500000010146
51 49 300.35999999953856 275.79999999978054 297.61500000038814
52 50 301.9600000000464 277.3499999997325 299.11499999978423
53 51 305.010000000304 279.10000000008904 300.5400000001655
54 52 306.56000000025597 300.0249999998955 303.86499999984244
55 53 308.1349999995764 301.54999999956954 306.6150000004027
56 54 329.00999999973646 306.02500000020837 308.3150000002033
57 55 330.51000000004206 307.52499999960446 329.16500000008546
58 56 335.1100000002516 309.24999999968304 330.61499999983516
59 57 336.58500000027925 330.09999999956517 332.2399999997115
60 58 338.1849999998776 331.6250000001487 335.4399999998177
61 59 359.2350000001643 334.6999999997747 337.0150000000476
62 60 360.8600000000406 336.3000000002826 359.1649999998308
63 61 365.0099999997947 337.7500000000323 360.6650000001364
64 62 366.55999999974665 344.0500000000425 362.31500000029064
65 63 368.10999999969863 360.2750000003466 365.3649999996387
66 64 374.3349999997845 361.8250000002986 366.9650000001466
67 65 389.4599999996826 366.0999999996234 388.01500000043325
68 66 392.159999999687 367.64999999957536 389.41500000053657
69 67 395.2849999998689 373.89999999993915 390.8650000002863
70 68 396.88500000037675 390.39999999966267 392.5150000004405
71 69 398.4100000000508 394.85000000002356 396.7899999997653
72 70 419.63499999955457 396.3749999996976 398.61500000004617
73 71 422.2599999996346 397.8750000000032 418.2399999999516
74 72 425.45999999974083 402.6250000000614 419.8649999998279
75 73 426.91000000040003 420.72500000029277 421.3150000004871
76 74 428.43500000007407 425.12500000009777 425.5899999998119
77 75 435.960000000061 426.62499999949387 427.09000000011747
78 76 450.8349999999082 428.07500000015307 449.4899999999516
79 77 452.4100000001381 433.97500000026366 450.9649999999792
80 78 455.51000000004206 435.9749999997616 452.66499999977987
81 79 457.18499999956475 454.9499999997165 455.6899999997595
82 80 458.68499999987034 456.4999999996685 457.215000000343
83 81 480.8100000002851 457.97499999969614 479.56500000053074
84 82 482.3849999996055 461.324999999651 481.06499999992684
85 83 485.3850000002167 482.91500000048563
86 84 486.98499999981505
87 85 488.60999999969135

View File

@ -0,0 +1,71 @@
,0,1,2
0,4.374999999527063,0.8000000000265572,3.375000000005457
1,12.3249999996915,9.94999999988977,9.649999999737702
2,19.900000000234286,15.899999999646752,17.150000000356158
3,25.649999999586726,22.12499999973261,24.5250000004944
4,31.525000000328873,31.100000000378714,33.44999999967513
5,40.77500000039436,38.499999999885404,39.524999999912325
6,46.55000000002474,44.67500000032487,48.62500000012915
7,52.64999999963038,50.700000000006185,53.24999999970714
8,61.674999999922875,58.0000000002201,62.050000000226646
9,72.19999999961146,65.67500000005566,72.6750000001175
10,79.62500000030559,73.2250000003205,78.69999999979882
11,85.72499999991123,82.09999999985484,87.6000000005206
12,94.62499999972351,88.25000000001637,90.75000000007094
13,99.79999999995925,95.87500000020555,96.7500000003838
14,106.87500000040018,104.67499999981555,108.67500000017571
15,115.67500000001019,106.3749999996162,111.65000000050895
16,117.57500000021537,115.27500000033797,120.67499999989195
17,127.87500000013097,121.27499999974134,126.7249999998512
18,133.74999999996362,130.59999999973115,132.8000000000884
19,142.74999999997817,136.39999999963948,138.95000000024993
20,146.09999999993306,142.549999999801,147.87500000034015
21,156.45000000040454,149.77500000009059,155.50000000052933
22,160.874999999578,160.600000000386,161.4999999999327
23,171.62499999994907,169.37499999971806,169.09999999984393
24,177.47500000041327,172.60000000010223,179.42500000003747
25,182.24999999983993,181.42499999999018,183.97499999969114
26,188.17500000022847,186.22499999969477,191.74999999972897
27,197.1000000003187,193.44999999998436,198.97500000001855
28,206.0750000000553,202.40000000035252,207.9750000000331
29,212.12500000001455,208.5250000002361,211.04999999965912
30,218.10000000004948,215.97500000029868,223.0749999996533
31,230.0999999997657,222.32499999995525,229.00000000004184
32,233.17500000030122,231.07499999991887,235.0500000000011
33,242.1249999997599,235.7249999997748,241.0749999996824
34,245.2499999999418,241.72500000008768,250.07499999969696
35,254.37499999952706,250.599999999622,256.32500000006075
36,263.1499999997686,253.92500000020846,265.2000000005046
37,264.9250000004031,265.800000000354,271.2250000001859
38,275.3999999995358,272.0750000000862,280.17499999964457
39,279.9750000003769,277.8499999997166,287.900000000036
40,291.7749999996886,285.3749999997035,296.699999999646
41,293.4750000003987,290.15000000003965,298.3250000004318
42,299.4749999998021,296.0249999998723,307.3250000004464
43,308.324999999968,306.7750000002434,313.27500000020336
44,315.87500000023283,316.850000000386,322.4999999999909
45,323.4499999998661,320.1000000001386,331.3999999998032
46,329.47499999954744,328.9500000003045,337.425000000394
47,335.62499999970896,335.09999999955653,343.47500000035325
48,346.3000000001557,344.124999999849,352.6500000004944
49,350.8249999995314,352.949999999737,358.4250000001248
50,359.69999999997526,360.69999999949687,361.70000000015534
51,367.2500000002401,365.19999999950414,372.12499999964166
52,371.6500000000451,374.2750000003525,379.70000000018445
53,382.09999999980937,381.8000000003394,387.34999999974207
54,386.55000000017026,389.0750000002754,394.62499999967804
55,392.57499999985157,395.17499999988104,405.05000000007385
56,398.92499999950815,404.1250000002492,406.5749999997479
57,409.1500000004089,413.17499999991014,412.7249999999094
58,415.29999999966094,419.175000000223,418.82500000042455
59,422.82499999964784,426.77500000013424,427.67499999968095
60,431.775000000016,435.6749999999465,436.92499999974643
61,437.8999999998996,437.4000000000251,445.97500000031687
62,445.34999999996217,447.8750000000673,454.7749999999269
63,452.8250000003027,455.37499999977626,461.29999999971005
64,461.9750000001659,461.67499999978645,468.4500000000753
65,470.97500000018044,470.34999999982574,475.87499999985994
66,475.4499999999098,476.37499999950705,482.0999999999458
67,481.4249999999447,482.32500000017353,489.72500000013497
68,487.5499999998283,488.52499999998145,496.95000000042455
69,497.90000000029977,496.09999999961474,
1 0 1 2
2 0 4.374999999527063 0.8000000000265572 3.375000000005457
3 1 12.3249999996915 9.94999999988977 9.649999999737702
4 2 19.900000000234286 15.899999999646752 17.150000000356158
5 3 25.649999999586726 22.12499999973261 24.5250000004944
6 4 31.525000000328873 31.100000000378714 33.44999999967513
7 5 40.77500000039436 38.499999999885404 39.524999999912325
8 6 46.55000000002474 44.67500000032487 48.62500000012915
9 7 52.64999999963038 50.700000000006185 53.24999999970714
10 8 61.674999999922875 58.0000000002201 62.050000000226646
11 9 72.19999999961146 65.67500000005566 72.6750000001175
12 10 79.62500000030559 73.2250000003205 78.69999999979882
13 11 85.72499999991123 82.09999999985484 87.6000000005206
14 12 94.62499999972351 88.25000000001637 90.75000000007094
15 13 99.79999999995925 95.87500000020555 96.7500000003838
16 14 106.87500000040018 104.67499999981555 108.67500000017571
17 15 115.67500000001019 106.3749999996162 111.65000000050895
18 16 117.57500000021537 115.27500000033797 120.67499999989195
19 17 127.87500000013097 121.27499999974134 126.7249999998512
20 18 133.74999999996362 130.59999999973115 132.8000000000884
21 19 142.74999999997817 136.39999999963948 138.95000000024993
22 20 146.09999999993306 142.549999999801 147.87500000034015
23 21 156.45000000040454 149.77500000009059 155.50000000052933
24 22 160.874999999578 160.600000000386 161.4999999999327
25 23 171.62499999994907 169.37499999971806 169.09999999984393
26 24 177.47500000041327 172.60000000010223 179.42500000003747
27 25 182.24999999983993 181.42499999999018 183.97499999969114
28 26 188.17500000022847 186.22499999969477 191.74999999972897
29 27 197.1000000003187 193.44999999998436 198.97500000001855
30 28 206.0750000000553 202.40000000035252 207.9750000000331
31 29 212.12500000001455 208.5250000002361 211.04999999965912
32 30 218.10000000004948 215.97500000029868 223.0749999996533
33 31 230.0999999997657 222.32499999995525 229.00000000004184
34 32 233.17500000030122 231.07499999991887 235.0500000000011
35 33 242.1249999997599 235.7249999997748 241.0749999996824
36 34 245.2499999999418 241.72500000008768 250.07499999969696
37 35 254.37499999952706 250.599999999622 256.32500000006075
38 36 263.1499999997686 253.92500000020846 265.2000000005046
39 37 264.9250000004031 265.800000000354 271.2250000001859
40 38 275.3999999995358 272.0750000000862 280.17499999964457
41 39 279.9750000003769 277.8499999997166 287.900000000036
42 40 291.7749999996886 285.3749999997035 296.699999999646
43 41 293.4750000003987 290.15000000003965 298.3250000004318
44 42 299.4749999998021 296.0249999998723 307.3250000004464
45 43 308.324999999968 306.7750000002434 313.27500000020336
46 44 315.87500000023283 316.850000000386 322.4999999999909
47 45 323.4499999998661 320.1000000001386 331.3999999998032
48 46 329.47499999954744 328.9500000003045 337.425000000394
49 47 335.62499999970896 335.09999999955653 343.47500000035325
50 48 346.3000000001557 344.124999999849 352.6500000004944
51 49 350.8249999995314 352.949999999737 358.4250000001248
52 50 359.69999999997526 360.69999999949687 361.70000000015534
53 51 367.2500000002401 365.19999999950414 372.12499999964166
54 52 371.6500000000451 374.2750000003525 379.70000000018445
55 53 382.09999999980937 381.8000000003394 387.34999999974207
56 54 386.55000000017026 389.0750000002754 394.62499999967804
57 55 392.57499999985157 395.17499999988104 405.05000000007385
58 56 398.92499999950815 404.1250000002492 406.5749999997479
59 57 409.1500000004089 413.17499999991014 412.7249999999094
60 58 415.29999999966094 419.175000000223 418.82500000042455
61 59 422.82499999964784 426.77500000013424 427.67499999968095
62 60 431.775000000016 435.6749999999465 436.92499999974643
63 61 437.8999999998996 437.4000000000251 445.97500000031687
64 62 445.34999999996217 447.8750000000673 454.7749999999269
65 63 452.8250000003027 455.37499999977626 461.29999999971005
66 64 461.9750000001659 461.67499999978645 468.4500000000753
67 65 470.97500000018044 470.34999999982574 475.87499999985994
68 66 475.4499999999098 476.37499999950705 482.0999999999458
69 67 481.4249999999447 482.32500000017353 489.72500000013497
70 68 487.5499999998283 488.52499999998145 496.95000000042455
71 69 497.90000000029977 496.09999999961474

View File

@ -0,0 +1,79 @@
,1,0,2
0,0.9749999995259713,5.37499999973079,6.675000000271757
1,6.624999999585635,6.87500000003638,12.875000000079671
2,14.17499999985048,13.200000000324508,14.525000000233922
3,15.949999999575448,20.599999999831198,21.774999999891953
4,21.650000000190992,22.149999999783176,29.40000000008113
5,29.575000000077488,31.124999999519787,36.77500000021937
6,38.50000000016771,37.04999999990832,43.17500000043183
7,44.59999999977335,44.574999999895226,51.82500000019318
8,51.899999999987266,52.05000000023574,53.574999999640205
9,59.299999999493956,58.2750000003216,59.6499999998774
10,61.15000000005275,67.00000000000728,67.12500000021791
11,68.89999999981265,73.44999999986612,74.5249999997246
12,80.52499999990724,80.87499999965075,81.89999999986284
13,81.97499999965694,89.5250000003216,88.02499999974643
14,89.42499999971952,91.34999999969295,89.74999999982501
15,90.99999999994944,97.22499999952561,97.65000000034306
16,97.37499999988395,103.5249999995358,104.60000000030377
17,104.72499999974426,112.19999999957508,112.05000000036634
18,112.05000000023611,118.37500000001455,113.8749999997377
19,119.50000000029868,127.1999999999025,125.74999999988322
20,121.24999999974571,133.37500000034197,127.2500000001888
21,133.37499999994216,135.00000000021828,134.6000000000491
22,136.19999999951725,142.39999999972497,136.40000000005202
23,142.250000000386,148.44999999968422,149.77500000050313
24,149.67500000017063,157.4000000000524,157.17500000000982
25,151.34999999969332,164.9250000000393,158.9000000000884
26,157.42499999993052,172.37500000010186,164.82500000047693
27,166.47499999959146,179.87499999981083,172.19999999970568
28,172.25000000013134,181.57499999961146,179.87500000045074
29,179.7000000001939,187.64999999984866,187.3750000001597
30,181.50000000019682,196.6499999998632,193.29999999963874
31,194.77499999953616,202.52499999969586,194.97500000007093
32,196.49999999961474,210.17500000016298,202.45000000041145
33,202.324999999801,217.49999999974534,210.15000000052495
34,208.5750000001648,225.09999999965657,217.40000000018298
35,217.29999999985048,226.92499999993743,225.07500000001855
36,224.775000000191,232.60000000027503,232.37500000023246
37,226.77499999968896,240.37500000031287,234.1249999996795
38,238.5499999996322,247.6249999999709,241.72500000050022
39,240.17499999950851,255.3250000000844,247.5000000001306
40,247.45000000035398,262.775000000147,253.8750000000651
41,253.67499999953034,268.72499999990396,262.5249999998265
42,262.42499999949393,270.34999999978027,270.22499999994
43,268.62500000021134,277.75000000019645,277.5749999998003
44,277.42499999982135,285.2999999995518,279.27500000051043
45,279.124999999622,292.699999999968,292.5250000004813
46,286.57499999968456,298.82499999985157,294.3000000002063
47,292.6249999996438,300.50000000028376,300.22499999968534
48,300.17499999990866,314.2250000000786,307.74999999967224
49,307.57500000032485,322.82500000019354,315.325000000215
50,315.19999999960453,328.84999999987485,322.7249999997217
51,321.3250000003976,330.5000000000291,330.3499999999109
52,329.0750000001575,337.8999999995358,336.4250000001481
53,337.60000000034813,345.7250000001295,337.97500000010007
54,339.30000000014877,352.8500000002168,351.5500000000462
55,346.7749999995798,359.0750000003027,353.0999999999982
56,352.87500000009493,368.02499999976135,360.47500000013645
57,360.34999999952595,369.8500000000422,367.8500000002747
58,367.7249999996642,381.67499999963184,374.07500000036055
59,375.3249999995754,383.20000000021537,377.0499999997843
60,382.79999999991594,389.3500000003769,383.1500000002994
61,390.4250000001051,398.0249999995067,390.8749999997814
62,397.8249999996118,404.1749999996682,397.92499999994436
63,399.6249999996147,406.04999999959546,405.9249999997552
64,405.4750000000789,413.1250000000364,413.15000000004477
65,407.3750000002841,420.7249999999476,420.525000000183
66,419.075000000303,428.14999999973224,428.0500000001699
67,427.94999999983736,435.6999999999971,434.24999999997783
68,429.649999999638,437.4500000003536,435.9000000001321
69,435.7249999998752,449.39999999951397,443.2499999999924
70,443.1499999996598,456.9000000001324,450.8000000002572
71,450.774999999849,458.34999999988213,458.1249999998396
72,458.02499999950703,465.82500000022264,465.75000000002876
73,465.5749999997719,473.2750000002852,473.20000000009134
74,473.15000000031466,480.92499999984284,480.7749999997246
75,479.4250000000469,488.27499999970314,482.6000000000055
76,488.1000000000862,494.4500000001426,488.3999999999138
77,489.87499999981117,,
1 1 0 2
2 0 0.9749999995259713 5.37499999973079 6.675000000271757
3 1 6.624999999585635 6.87500000003638 12.875000000079671
4 2 14.17499999985048 13.200000000324508 14.525000000233922
5 3 15.949999999575448 20.599999999831198 21.774999999891953
6 4 21.650000000190992 22.149999999783176 29.40000000008113
7 5 29.575000000077488 31.124999999519787 36.77500000021937
8 6 38.50000000016771 37.04999999990832 43.17500000043183
9 7 44.59999999977335 44.574999999895226 51.82500000019318
10 8 51.899999999987266 52.05000000023574 53.574999999640205
11 9 59.299999999493956 58.2750000003216 59.6499999998774
12 10 61.15000000005275 67.00000000000728 67.12500000021791
13 11 68.89999999981265 73.44999999986612 74.5249999997246
14 12 80.52499999990724 80.87499999965075 81.89999999986284
15 13 81.97499999965694 89.5250000003216 88.02499999974643
16 14 89.42499999971952 91.34999999969295 89.74999999982501
17 15 90.99999999994944 97.22499999952561 97.65000000034306
18 16 97.37499999988395 103.5249999995358 104.60000000030377
19 17 104.72499999974426 112.19999999957508 112.05000000036634
20 18 112.05000000023611 118.37500000001455 113.8749999997377
21 19 119.50000000029868 127.1999999999025 125.74999999988322
22 20 121.24999999974571 133.37500000034197 127.2500000001888
23 21 133.37499999994216 135.00000000021828 134.6000000000491
24 22 136.19999999951725 142.39999999972497 136.40000000005202
25 23 142.250000000386 148.44999999968422 149.77500000050313
26 24 149.67500000017063 157.4000000000524 157.17500000000982
27 25 151.34999999969332 164.9250000000393 158.9000000000884
28 26 157.42499999993052 172.37500000010186 164.82500000047693
29 27 166.47499999959146 179.87499999981083 172.19999999970568
30 28 172.25000000013134 181.57499999961146 179.87500000045074
31 29 179.7000000001939 187.64999999984866 187.3750000001597
32 30 181.50000000019682 196.6499999998632 193.29999999963874
33 31 194.77499999953616 202.52499999969586 194.97500000007093
34 32 196.49999999961474 210.17500000016298 202.45000000041145
35 33 202.324999999801 217.49999999974534 210.15000000052495
36 34 208.5750000001648 225.09999999965657 217.40000000018298
37 35 217.29999999985048 226.92499999993743 225.07500000001855
38 36 224.775000000191 232.60000000027503 232.37500000023246
39 37 226.77499999968896 240.37500000031287 234.1249999996795
40 38 238.5499999996322 247.6249999999709 241.72500000050022
41 39 240.17499999950851 255.3250000000844 247.5000000001306
42 40 247.45000000035398 262.775000000147 253.8750000000651
43 41 253.67499999953034 268.72499999990396 262.5249999998265
44 42 262.42499999949393 270.34999999978027 270.22499999994
45 43 268.62500000021134 277.75000000019645 277.5749999998003
46 44 277.42499999982135 285.2999999995518 279.27500000051043
47 45 279.124999999622 292.699999999968 292.5250000004813
48 46 286.57499999968456 298.82499999985157 294.3000000002063
49 47 292.6249999996438 300.50000000028376 300.22499999968534
50 48 300.17499999990866 314.2250000000786 307.74999999967224
51 49 307.57500000032485 322.82500000019354 315.325000000215
52 50 315.19999999960453 328.84999999987485 322.7249999997217
53 51 321.3250000003976 330.5000000000291 330.3499999999109
54 52 329.0750000001575 337.8999999995358 336.4250000001481
55 53 337.60000000034813 345.7250000001295 337.97500000010007
56 54 339.30000000014877 352.8500000002168 351.5500000000462
57 55 346.7749999995798 359.0750000003027 353.0999999999982
58 56 352.87500000009493 368.02499999976135 360.47500000013645
59 57 360.34999999952595 369.8500000000422 367.8500000002747
60 58 367.7249999996642 381.67499999963184 374.07500000036055
61 59 375.3249999995754 383.20000000021537 377.0499999997843
62 60 382.79999999991594 389.3500000003769 383.1500000002994
63 61 390.4250000001051 398.0249999995067 390.8749999997814
64 62 397.8249999996118 404.1749999996682 397.92499999994436
65 63 399.6249999996147 406.04999999959546 405.9249999997552
66 64 405.4750000000789 413.1250000000364 413.15000000004477
67 65 407.3750000002841 420.7249999999476 420.525000000183
68 66 419.075000000303 428.14999999973224 428.0500000001699
69 67 427.94999999983736 435.6999999999971 434.24999999997783
70 68 429.649999999638 437.4500000003536 435.9000000001321
71 69 435.7249999998752 449.39999999951397 443.2499999999924
72 70 443.1499999996598 456.9000000001324 450.8000000002572
73 71 450.774999999849 458.34999999988213 458.1249999998396
74 72 458.02499999950703 465.82500000022264 465.75000000002876
75 73 465.5749999997719 473.2750000002852 473.20000000009134
76 74 473.15000000031466 480.92499999984284 480.7749999997246
77 75 479.4250000000469 488.27499999970314 482.6000000000055
78 76 488.1000000000862 494.4500000001426 488.3999999999138
79 77 489.87499999981117

View File

@ -0,0 +1,88 @@
,0,1,2
0,18.92999999987338,2.57499999995375,20.395000000194614
1,20.47999999982536,20.775000000387397,21.99499999979298
2,22.05500000005528,23.249999999709278,23.595000000300843
3,24.930000000186247,25.04999999971219,26.494999999800257
4,26.655000000264828,28.474999999591407,29.519999999779884
5,29.77999999953723,32.59999999997703,50.52000000042017
6,49.204999999947596,50.474999999525934,52.120000000018536
7,52.330000000129495,52.374999999731116,55.09500000035177
8,55.07999999978025,55.100000000013424,56.645000000303746
9,56.97999999998543,58.12499999999305,59.64500000000543
10,61.27999999958816,80.59999999975149,80.6950000002921
11,80.73000000027648,82.17499999998141,83.4700000002208
12,82.3049999995969,85.1250000000367,85.14499999974349
13,85.15500000035942,86.70000000026663,86.67000000032702
14,86.85500000016006,88.57500000019387,89.56999999982644
15,89.80500000021536,110.59999999949683,110.6699999997595
16,110.90500000014842,112.24999999965108,112.31999999991375
17,112.50499999974679,113.89999999980533,113.89500000014367
18,115.20499999975115,116.65000000036558,116.7449999999967
19,116.88000000018334,119.67500000034521,119.72000000032995
20,118.45500000041326,140.77500000027825,140.74500000033868
21,139.35499999994178,142.44999999980095,142.27000000001271
22,142.30499999999708,143.95000000010654,143.84500000024263
23,145.3299999999767,146.77499999968163,146.69500000009566
24,147.0050000004089,149.67500000009053,149.844999999646
25,150.22999999988357,152.99999999976748,171.04499999978134
26,171.2549999998923,173.69999999980095,173.7950000003416
27,173.90500000025028,175.3000000003088,175.369999999662
28,175.50499999984865,176.92500000018512,177.16999999966492
29,177.25500000020517,179.87500000024042,179.92000000022517
30,181.52999999952996,183.24999999956376,200.9949999998803
31,202.47999999961436,202.3750000002768,202.59500000038815
32,204.0050000001979,203.97499999987517,205.41999999996324
33,205.60499999979626,205.80000000015602,207.09500000039543
34,207.40499999979917,208.47499999988244,209.99499999989484
35,211.7050000003114,213.12499999973838,231.01999999990358
36,229.9300000001135,231.15000000004542,232.67000000005783
37,232.7050000000422,233.99999999989845,235.7200000003154
38,235.6049999995416,235.75000000025497,237.37000000046964
39,237.30500000025174,239.9499999996554,241.77000000027465
40,238.9049999998501,241.89999999950697,261.22000000005346
41,261.22999999975985,261.249999999993,262.89500000048565
42,262.73000000006544,264.2000000000483,265.8699999999094
43,265.6050000001964,267.04999999990133,267.6699999999123
44,267.2300000000727,268.89999999955063,270.3200000002703
45,268.9049999995954,290.09999999968596,291.1700000001524
46,290.23000000021096,292.92500000017054,292.7200000001044
47,292.70499999953284,295.7250000003772,294.2950000003343
48,294.405000000243,297.3500000002535,297.26999999975806
49,297.50500000014694,300.3000000003088,299.12000000031685
50,299.42999999972056,321.52499999981256,321.39499999967074
51,320.1050000003856,324.39999999994353,323.1199999997493
52,322.92999999996067,326.0750000003757,325.79500000038524
53,325.854999999738,329.0249999995215,327.5200000004638
54,327.55499999953867,330.6749999996758,330.3450000000389
55,329.22999999997086,351.67500000031606,351.3950000003256
56,332.35500000015276,354.27500000011815,352.99499999992395
57,352.95499999998395,355.94999999964085,354.6450000000782
58,354.5549999995823,358.95000000025203,357.4200000000069
59,356.12999999981224,361.09999999959865,359.1450000000855
60,359.07999999986754,381.47499999965686,381.64500000012185
61,362.20500000004944,383.09999999953317,383.14500000042744
62,381.9799999998035,384.7499999996874,384.89499999987447
63,384.5799999996056,387.5249999996161,387.6200000001568
64,386.28000000031574,390.6000000001516,390.5199999996562
65,389.13000000016876,411.5749999996045,392.47000000041726
66,392.330000000275,413.100000000188,413.2200000000971
67,413.1800000001571,414.69999999978637,414.69500000012476
68,414.7799999997555,417.57499999991734,417.5449999999778
69,416.2800000000611,420.6250000001749,419.27000000005637
70,417.9799999998617,441.90000000023457,421.04499999978134
71,422.22999999981806,444.6249999996074,441.7950000003707
72,441.8800000000014,446.3000000000396,444.6199999999458
73,443.380000000307,447.8249999997136,446.3700000003023
74,446.23000000016003,450.84999999969324,448.14500000002727
75,449.2299999998617,471.7749999994997,453.8950000002892
76,451.10499999978896,473.4999999995783,471.8950000003183
77,471.93000000030264,476.2999999997849,474.7949999998177
78,473.42999999969874,477.99999999958555,476.4700000002499
79,476.37999999975403,481.00000000019674,478.12000000040416
80,478.00499999963034,,481.07000000045946
81,482.45499999999123,,501.89500000006365
82,500.6549999995153,,
83,503.4799999999999,,
84,505.35499999992714,,
85,508.1550000001338,,
86,509.88000000021236,,
1 0 1 2
2 0 18.92999999987338 2.57499999995375 20.395000000194614
3 1 20.47999999982536 20.775000000387397 21.99499999979298
4 2 22.05500000005528 23.249999999709278 23.595000000300843
5 3 24.930000000186247 25.04999999971219 26.494999999800257
6 4 26.655000000264828 28.474999999591407 29.519999999779884
7 5 29.77999999953723 32.59999999997703 50.52000000042017
8 6 49.204999999947596 50.474999999525934 52.120000000018536
9 7 52.330000000129495 52.374999999731116 55.09500000035177
10 8 55.07999999978025 55.100000000013424 56.645000000303746
11 9 56.97999999998543 58.12499999999305 59.64500000000543
12 10 61.27999999958816 80.59999999975149 80.6950000002921
13 11 80.73000000027648 82.17499999998141 83.4700000002208
14 12 82.3049999995969 85.1250000000367 85.14499999974349
15 13 85.15500000035942 86.70000000026663 86.67000000032702
16 14 86.85500000016006 88.57500000019387 89.56999999982644
17 15 89.80500000021536 110.59999999949683 110.6699999997595
18 16 110.90500000014842 112.24999999965108 112.31999999991375
19 17 112.50499999974679 113.89999999980533 113.89500000014367
20 18 115.20499999975115 116.65000000036558 116.7449999999967
21 19 116.88000000018334 119.67500000034521 119.72000000032995
22 20 118.45500000041326 140.77500000027825 140.74500000033868
23 21 139.35499999994178 142.44999999980095 142.27000000001271
24 22 142.30499999999708 143.95000000010654 143.84500000024263
25 23 145.3299999999767 146.77499999968163 146.69500000009566
26 24 147.0050000004089 149.67500000009053 149.844999999646
27 25 150.22999999988357 152.99999999976748 171.04499999978134
28 26 171.2549999998923 173.69999999980095 173.7950000003416
29 27 173.90500000025028 175.3000000003088 175.369999999662
30 28 175.50499999984865 176.92500000018512 177.16999999966492
31 29 177.25500000020517 179.87500000024042 179.92000000022517
32 30 181.52999999952996 183.24999999956376 200.9949999998803
33 31 202.47999999961436 202.3750000002768 202.59500000038815
34 32 204.0050000001979 203.97499999987517 205.41999999996324
35 33 205.60499999979626 205.80000000015602 207.09500000039543
36 34 207.40499999979917 208.47499999988244 209.99499999989484
37 35 211.7050000003114 213.12499999973838 231.01999999990358
38 36 229.9300000001135 231.15000000004542 232.67000000005783
39 37 232.7050000000422 233.99999999989845 235.7200000003154
40 38 235.6049999995416 235.75000000025497 237.37000000046964
41 39 237.30500000025174 239.9499999996554 241.77000000027465
42 40 238.9049999998501 241.89999999950697 261.22000000005346
43 41 261.22999999975985 261.249999999993 262.89500000048565
44 42 262.73000000006544 264.2000000000483 265.8699999999094
45 43 265.6050000001964 267.04999999990133 267.6699999999123
46 44 267.2300000000727 268.89999999955063 270.3200000002703
47 45 268.9049999995954 290.09999999968596 291.1700000001524
48 46 290.23000000021096 292.92500000017054 292.7200000001044
49 47 292.70499999953284 295.7250000003772 294.2950000003343
50 48 294.405000000243 297.3500000002535 297.26999999975806
51 49 297.50500000014694 300.3000000003088 299.12000000031685
52 50 299.42999999972056 321.52499999981256 321.39499999967074
53 51 320.1050000003856 324.39999999994353 323.1199999997493
54 52 322.92999999996067 326.0750000003757 325.79500000038524
55 53 325.854999999738 329.0249999995215 327.5200000004638
56 54 327.55499999953867 330.6749999996758 330.3450000000389
57 55 329.22999999997086 351.67500000031606 351.3950000003256
58 56 332.35500000015276 354.27500000011815 352.99499999992395
59 57 352.95499999998395 355.94999999964085 354.6450000000782
60 58 354.5549999995823 358.95000000025203 357.4200000000069
61 59 356.12999999981224 361.09999999959865 359.1450000000855
62 60 359.07999999986754 381.47499999965686 381.64500000012185
63 61 362.20500000004944 383.09999999953317 383.14500000042744
64 62 381.9799999998035 384.7499999996874 384.89499999987447
65 63 384.5799999996056 387.5249999996161 387.6200000001568
66 64 386.28000000031574 390.6000000001516 390.5199999996562
67 65 389.13000000016876 411.5749999996045 392.47000000041726
68 66 392.330000000275 413.100000000188 413.2200000000971
69 67 413.1800000001571 414.69999999978637 414.69500000012476
70 68 414.7799999997555 417.57499999991734 417.5449999999778
71 69 416.2800000000611 420.6250000001749 419.27000000005637
72 70 417.9799999998617 441.90000000023457 421.04499999978134
73 71 422.22999999981806 444.6249999996074 441.7950000003707
74 72 441.8800000000014 446.3000000000396 444.6199999999458
75 73 443.380000000307 447.8249999997136 446.3700000003023
76 74 446.23000000016003 450.84999999969324 448.14500000002727
77 75 449.2299999998617 471.7749999994997 453.8950000002892
78 76 451.10499999978896 473.4999999995783 471.8950000003183
79 77 471.93000000030264 476.2999999997849 474.7949999998177
80 78 473.42999999969874 477.99999999958555 476.4700000002499
81 79 476.37999999975403 481.00000000019674 478.12000000040416
82 80 478.00499999963034 481.07000000045946
83 81 482.45499999999123 501.89500000006365
84 82 500.6549999995153
85 83 503.4799999999999
86 84 505.35499999992714
87 85 508.1550000001338
88 86 509.88000000021236

View File

@ -22,7 +22,7 @@ from IPython import embed
def plot_style(ns=__main__):
print('## added imports plotstyle.py version')
# embed()
ns.palette = palettes['muted']
palette = ns.palette

View File

@ -4457,7 +4457,7 @@ We collected weakly electric gymnotoid fish in the vicinity of Manaus, Amazonas,
@Article{Meyer1982,
Title = {Androgens alter the tuning of electroreceptors.},
Author = {Meyer, J H and Zakon, H H},
Journal = Science,
Journal = {Science},
Year = {1982},
Pages = {635--637},
Volume = {217}
@ -4466,7 +4466,7 @@ We collected weakly electric gymnotoid fish in the vicinity of Manaus, Amazonas,
@ARTICLE{Meyer1982Impedances,
AUTHOR = {J. Harlan Meyer},
TITLE = {Behavioral responses of weakly electric fish to complex impedances.},
JOURNAL = JCompPhysiol,
JOURNAL = {JCompPhysiol},
YEAR = {1982},
VOLUME = {145},
PAGES = {459--470}
@ -4477,7 +4477,7 @@ We collected weakly electric gymnotoid fish in the vicinity of Manaus, Amazonas,
Author = {Meyer, J. Harlan
and Leong, Margaret
and Keller, Clifford H.},
Journal = JCompPhysiolA,
Journal = {JCompPhysiolA},
Year = {1987},
Number = {3},
Pages = {385--394},

BIN
susceptibility1.dvi Normal file

Binary file not shown.

View File

@ -522,9 +522,11 @@ The P-unit responses can be partially explained by simple linear filters. The li
\section*{Results}
Theoretical work shows that leaky-integrate-and-fire (LIF) model neurons show a distinct pattern of nonlinear stimulus encoding when the model is driven by two cosine signals. In the context of the weakly electric fish, such a setting is part of the animal's everyday life as the sinusoidal electric-organ discharges (EODs) of neighboring animals interfere with the own field and each lead to sinusoidal amplitude modulations (AMs) that are called beats and envelopes \cite{Middleton2006, Savard2011,Stamper2012Envelope}. The p-type electroreceptor afferents of the tuberous electrosensory system, i.e. the P-units, respond to such AMs of the underlying EOD carrier and their time-dependent firing rate carries information about the stimulus' time-course. P-units are heterogeneous in their baseline firing properties \cite{Grewe2017, Hladnik2023} and differ with respect to their baseline firing rates, the sensitivity and their noisiness. We here explore the nonlinear mechanism in different cells that exhibit distinctly different levels of noise, i.e. differences in the coefficient of variation (CV) of the interspike intervals (ISI). Low-CV P-units have a less noisy, more regular, firing pattern whereas high-CV P-units show a less regular firing pattern in their baseline activity.
Theoretical work \cite{Voronenko2017} shows that stochastic leaky integrate-and-fire (LIF) model neurons may show nonlinear stimulus encoding when the model is driven by two cosine signals with specific frequencies. In the context of weakly electric fish, such a setting is part of the animal's everyday life. The sinusoidal electric-organ discharges (EODs) of neighboring animals interfere and lead to amplitude modulations (AMs) that are called beats (two-fish interaction) and envelopes (multiple-fish interaction) \cite{Middleton2006, Savard2011,Stamper2012Envelope}. The p-type electroreceptor afferents of the tuberous electrosensory system, i.e. the P-units encode such AMs of the underlying EOD carrier in their time-dependent firing rates \cite{Bastian1981a,Walz2014}. We here explore nonlinear responses of different cells that exhibit distinctly different levels of output variability, quantified by the coefficient of variation (CV) of their interspike intervals (ISI). Low-CV P-units have a less noisy firing pattern that is closer to pacemaker firing, whereas high-CV P-units show a more irregular firing pattern that is closer to a Poisson process.
%P-units are heterogeneous in their baseline firing properties \cite{Grewe2017, Hladnik2023} and differ with respect to their baseline firing rates, the sensitivity and their noisiness.
\subsection*{Low-CV P-units exhibit nonlinear interactions} %frequency combinations withappearing when the input frequencies are related to \fbase{} are
\subsection*{Nonlinear signal transmission in low-CV P-units} %frequency combinations withappearing when the input frequencies are related to \fbase{} are
Second-order susceptibility is expected to be especially pronounced for low-CV cells \cite{Voronenko2017}. P-units fire action potentials probabilistically phase-locked to the self-generated EOD. Skipping of EOD cycles leads to the characteristic multimodal ISI distribution with maxima at integer multiples of the EOD period (\subfigrefb{fig:cells_suscept}{A}). In this example the ISI distribution has a CV of 0.2 which can be considered low among P-units\cite{Hladnik2023}. Spectral analysis of the baseline activity shows two major peaks, the first is located at the baseline firing rate (\fbase), the second is located at the discharge frequency of the electric organ (\feod{}) and is flanked by two smaller peaks at $\feod \pm \fbase{}$ (\subfigref{fig:cells_suscept}{B}).
@ -550,13 +552,15 @@ Irrespective of the CV, neither cell shows the complete proposed structure of no
}
\end{figure*}
\subsection*{Internal noise hides parts of the nonlinearity structure}
Traces of the proposed structure of second-order susceptibility are found in both ampullary and p-type electrosensory afferents. The nonlinerarity seems to depend on the CV, i.e. the level of intrinsic noise in the cells and in the next step we address whether the level of intrinsic masks the expected structures. One option to reduce the impact of this intrinsic noise is to average it out over many repetitions of the same stimulus. In the electrophysiological experiments we only have a limited recording duration and hence the number of stimulus repetitions is limited. To overcome these limitations we compared between an electrophysiologically recorded low-CV P-unit and its P-unit LIF model counterpart fitted to reproduce the P-unit behavior faithfully (see \figref{flowchart})\cite{Barayeu2023}. In the recording depicted in \subfigrefb{model_and_data}{A} the cell was stimulated with a weak RAM stimulus with the contrast of 1\,$\%$ (red, \subfigrefb{model_and_data}\,\panel[i]{B}). The across trial average ($\n{}=11$) shows the diagonal band in second-order susceptibility at \fsumb{} (yellow diagonal in pink edges) and an additional nonlinear band appeared at \fsumehalf{} (\subfigrefb{model_and_data}{A}). The matched model reproduces the same diagonal at \fsumb{} (\subfigrefb{model_and_data}\,\panel[ii]{B}) but not the diagonal at \fsumehalf. By increasing the number of trials in the simulation, the signal-to-noise ratio and thus the estimation of the nonlinearity structures can be improved. Still, even with 1 million repetitions, no changes are observable in the nonlinearity structures (\subfigrefb{model_and_data}\,\panel[iii]{B}). The increased trial count, however goes along with generally smaller second-order susceptibility values (\subfigrefb{model_and_data}\,\panel[iii]{B}). The decrease of the second-order susceptibility follows the relation $1/ \sqrt{N} $ (\figrefb{trialnr}).
\subsection*{High level of total noise hides parts of the nonlinearity structure}
Traces of the expected structure of second-order susceptibility are found in both ampullary and p-type electrosensory afferents. In the recordings shown above (\figrefb{fig:cells_suscept}, \figrefb{fig:ampullary}), the nonlinear response is strong whenever the two frequencies (\fone{}, \ftwo{}) fall onto the antidiagonal \fsumb{}, which is in line with theoretical expectations \cite{Voronenko2017}. However, a pronounced nonlinear response for frequencies with \foneb{} or \ftwob{}, although predicted by theory, cannot be observed. Here we investigate how these discrepancies can be understood.
%Note that this line doesn't appear in the susceptibility matrix of the model (\subfigrefb{model_and_data}{C})Each cell has an intrinsic noise level (\subfigrefb{model_and_data}{A}, bottom).
%The signal component (purple) compensates for this total noise reduction, is not a weak signal anymore and
Based on the Novikov-Furutsu Theorem \cite{Novikov1965, Furutsu1963} the intrinsic noise of a LIF model can be split up into several independent noise processes with the same correlation function. We make use of this and split the intrinsic noise $\xi$ into two parts: 90\% are then treated as signal ($\xi_{signal}$) while the remaining 10\% are treated as noise ($\xi_{noise}$, see methods for details). With this, the signal-to-noise ratio in the simulation can be arbitrarily varied and the combination of many repetitions and noise-split indeed reveals the triangular shape shown theoretically and for LIF models without carrier\cite{Voronenko2017}(\subfigrefb{model_and_data}\,\panel[iii]{C}).
In the electrophysiological experiments we only have a limited number of trials and this insufficient averaging may occlude the full nonlinear structure. This limitation can be overcome by using a computational model for the P-unit, a stochastic leaky integrate-and-fire model with adaptation current and dendritic preprocessing, with parameters fitted to the experimentally recorded P-unit (\figrefb{flowchart}) \cite{Barayeu2023}. The model faithfully reproduces the second-order susceptibility of a low-CV cell estimated from the same low number of repetitions ($\n{}=11$, compare \panel{A} and \panel[ii]{B} in \figrefb{model_and_data}). In the model we can increase the number of repetitions substantially but still do not observe the full nonlinear structure ($\n{}=10^6$, \subfigrefb{model_and_data}\,\panel[iii]{B}). A possible reason for this could be that by applying a broadband stimulus the effective input-noise level is increased and this may linearize the signal
transmission \cite{Longtin1993, Chialvo1997, Roddey2000, Voronenko2017}. Assuming that the intrinsic noise level in this P-unit is small enough, the full nonlinear structure should appear in the limit of weak AMs. Again, this cannot be done experimentally, because the problem of insufficient averaging becomes even more severe for weak AMs (low contrast). In the model, however, we know the time course of the intrinsic noise and can use this knowledge to determine the susceptibilities by input-output correlations via the Furutsu-Novikov theorem \cite{Furutsu1963, Novikov1965}. This theorem, in its simplest form, states that the cross-spectrum $S_{x\eta}(\omega)$ of a Gaussian noise $\eta(t)$ driving a nonlinear system and the system's output $x(t)$ is proportional to the linear susceptibility $S_{x\eta}(\omega)=\chi(\omega)S_{\eta\eta}(\omega)$. Here $\chi(\omega)$ characterizes the linear response to an infinitely weak signal $\varepsilon s(t)$ in the presence of the background noise $\eta(t)$. Likewise, the nonlinear susceptibility can be determined in an analogous fashion from higher-order input-output cross-spectra (see methods \eqref{eq:crosshigh}, \eqref{eq:susceptibility}) \cite{Egerland2020}. In line with an alternative derivation of the Furutsu-Novikov theorem \cite{Lindner2022}, we can split the total noise and consider a fraction of it as stimulus. This allows to calculate the susceptibility from the cross-spectrum between the output and this stimulus fraction of the noise. Adapting this approach to our P-unit model (see methods), we replace the intrinsic noise by an approximately equivalent RAM stimulus $s_\xi(t)$ and a weak remaining intrinsic noise ($\sqrt{2D \, c_{noise}} \cdot \xi(t)$, with $c_\text{noise} = 0.1$, see methods \eqref{eq:ram_split}, \eqref{eq:Noise_split_intrinsic}, \eqref{eq:Noise_split_intrinsic_dendrite}, \subfigrefb{model_and_data}\,\panel[i]{C}). We tune the amplitude / standard deviation of the RAM stimulus $s_\xi(t)$ such that the output firing rate and variability (CV) are the same as in the baseline activity (i.e. full intrinsic noise $\sqrt{2D}\xi(t)$ in the voltage equation but no RAM) and compute the second- and third-order cross-spectra between the RAM part of the noise $s_\xi(t)$ and the output spike train. This procedure has two consequences: (i) by means of the cross-spectrum between the output and $s_\xi(t)$, which is a large fraction of the noise, the signal-to-noise ratio of the measured susceptibilities is drastically improved; (ii) the total noise in the system has been reduced (by what was before the external RAM stimulus $s(t)$), which makes the system more nonlinear. For both reasons we now see the expected nonlinear features in the second-order susceptibility for a sufficient number of trials (\subfigrefb{model_and_data}\,\panel[iii]{B}), but not for a number of trials comparable to the experiment (\subfigrefb{model_and_data}\,\panel[ii]{B}). In addition to the strong response for \fsumb{}, we now also observe pronounced nonlinear responses at \foneb{} and \ftwob{} (vertical and horizontal lines, \subfigrefb{model_and_data}\,\panel[iii]{C}). Note, that the increased number of repetitions goes along with a substantial reduction of second-order susceptibility values (\subfigrefb{model_and_data}\,\panel[iii]{B}), that saturate in its peak values for $N>10^5$ (\figrefb{trialnr}). This demonstrates the limited reliability of the statistics that is based on 11 trials. However, we would like to point out that already the limited number of trials as used in the experiments reveals key features of the nonlinear response.
%Note that this line doesn't appear in the susceptibility matrix of the model (\subfigrefb{model_and_data}{C})Each cell has an intrinsic noise level (\subfigrefb{model_and_data}{A}, bottom).
%The signal component (purple) compensates for this total noise reduction, is not a weak signal anymore
%Adding an additional, independent, RAM stimulus to the simulation does not heavily influence the qualitative observations but the nonlinearity becomes weaker (compare \subfigrefb{model_and_data}\,\panel[iii]{C} and \panel[iv]{C})
% Based on this a weak RAM signal as the input to the P-unit model (red in \subfigrefb{model_and_data}\,\panel[i]{A}) can be approximated by a model where no RAM stimulus is present (red) but instead the total noise of the model is split up into a reduced intrinsic noise component (gray) and a signal component (purple), maintaining the CV and \fbase{} as during baseline (\subfigrefb{model_and_data}\,\panel[i]{B}, see methods section \ref{intrinsicsplit_methods} for more details). This signal component (purple) can be used for the calculation of the second-order susceptibility. With the reduced noise component the signal-to-noise ratio increases and the number of stimulus realizations can be reduced. This noise split cannot be applied in experimentally measured cells. If this noise split is applied in the model with $\n{}=11$ stimulus realizations the nonlinearity at \fsumb{} is still present (\subfigrefb{model_and_data}\,\panel[iii]{B}). If instead, the RAM stimulus is drawn 1 million times the diagonal nonlinearity band is complemented by vertical and horizontal lines appearing at \foneb{} and \ftwob{} (\subfigrefb{model_and_data}\,\panel[iv]{B}). These nonlinear structures correspond to the ones observed in previous works \cite{Voronenko2017}. If now a weak RAM stimulus is added (\subfigrefb{model_and_data}\,\panel[i]{C}, red), simultaneously the noise is split up into a noise and signal component (gray and purple) and the calculation is performed on the sum of the signal component and the RAM (red plus purple) only the diagonal band is present with $\n{}=11$ (\subfigrefb{model_and_data}\,\panel[iii]{C}) but also with 1\,million stimulus realizations (\subfigrefb{model_and_data}\,\panel[iv]{C}).
@ -684,7 +688,11 @@ The baseline firing rate \fbase{} was calculated as the number of spikes divided
\paragraph{White noise analysis} \label{response_modulation}
In the stimulus driven case, the neuronal activity of the recorded cell is modulated around the average firing rate that is similar to \fbase{} and in that way encodes the time-course of the stimulus.
The time-dependent response of the neuron was estimated from the spiking activity $x_k(t) = \sum_i\delta(t-t_{k,i})$ recorded for each stimulus presentation, $k$, by kernel convolution with a Gaussian kernel
The time-dependent response of the neuron was estimated from the spiking activity
\begin{equation}\label{eq:spikes}
x_k(t) = \sum_i\delta(t-t_{k,i})
\end{equation}
recorded for each stimulus presentation, $k$, by kernel convolution with a Gaussian kernel
\begin{equation}
K(t) = \scriptstyle \frac{1}{\sigma\sqrt{2\pi}} e^{-\frac{t^2}{2\sigma^2}}
@ -699,42 +707,42 @@ where $*$ denotes the convolution. $r(t)$ is then calculated as the across-trial
r(t) = \left\langle r_k(t) \right\rangle _k.
\end{equation}
To quantify how strongly the neuron is driven by the stimulus we quantified the response modulation as the standard deviation $\sigma_{M} = \sqrt{\langle (r(t)-\langle r(t) \rangle)^2\rangle}$.
To quantify how strongly the neuron is driven by the stimulus we quantified the response modulation as the standard deviation $\sigma_{M} = \sqrt{\langle (r(t)-\langle r(t) \rangle_t)^2\rangle_t}$, where $\langle \cdot \rangle_t$ indicates averaging over time.
\paragraph{Spectral analysis}\label{susceptibility_methods}
The neuron is driven by the stimulus and thus the neuronal response $r(t)$ depends on the stimulus $s(t)$. To investigate the relation between stimulus and response we calculated the first- and second-order susceptibility of the neuron to the stimulus in the frequency domain. The Fourier transforms of $s(t)$ and $r(t)$ are denoted as $\tilde s(\omega)$ and $\tilde r(\omega)$ and were calculated according to $\tilde x(\omega) = \int_{0}^{T} \, x(t) \cdot e^{- i \omega t}\,dt$, with $T$ being the signal duration. Stimuli had a duration of 10\,s and spectra of stimulus and response were calculated in separate segments of 0.5\,s with no overlap resulting in a spectral resolution of 2\,Hz. $r(t)$ was estimated by kernel convolution with a box kernel that had a width matching the sampling interval to preserve temporal accuracy as far as possible.
The neuron is driven by the stimulus and thus the spiking response $x(t)$ (\eqref{eq:spikes}) depends on the stimulus $s(t)$. To investigate the relation between stimulus and response we calculated the first- and second-order susceptibility of the neuron to the stimulus in the frequency domain. The Fourier transforms of $s(t)$ and $x(t)$ are denoted as $\tilde s(\omega)$ and $\tilde x(\omega)$ and were calculated according to $\tilde x(\omega) = \int_{0}^{T} \, x(t) \cdot e^{- i \omega t}\,dt$, with $T$ being the signal duration. Stimuli had a duration of 10\,s and spectra of stimulus and response were calculated in separate segments of 0.5\,s with no overlap resulting in a spectral resolution of 2\,Hz.
The power spectrum was calculated as
The power spectrum of the stimulus $s(t)$ was calculated as
\begin{equation}
\label{powereq}
\begin{split}
S_{ss}(\omega) = \frac{\langle \tilde s(\omega) \tilde s^* (\omega)\rangle}{T}
\end{split}
\end{equation}
with $\tilde s^* $ being the complex conjugate and $\langle ... \rangle$ denoting averaging over the segments. The cross-spectrum $S_{rs}(\omega)$ was calculated according to
with $\tilde s^* $ being the complex conjugate and $\langle ... \rangle$ denoting averaging over the segments. The power spectrum of the spike trains $S_{xx}$ was calculated accordingly. The cross-spectrum $S_{xs}(\omega)$ between stimulus and evoked spike trains was calculated according to
\begin{equation}
\label{cross}
\begin{split}
S_{rs}(\omega) = \frac{\langle \tilde r(\omega) \tilde s^* (\omega)\rangle}{T}
S_{xs}(\omega) = \frac{\langle \tilde x(\omega) \tilde s^* (\omega)\rangle}{T}
\end{split}
\end{equation}
From $S_{rs}(\omega)$ and $ S_{ss}(\omega)$ we calculated the linear susceptibility (transfer function) as
From $S_{xs}(\omega)$ and $ S_{ss}(\omega)$ we calculated the linear susceptibility (transfer function) as
\begin{equation}
\label{linearencoding_methods}
\begin{split}
\chi_{1}(\omega) = \frac{S_{rs}(\omega) }{S_{ss}(\omega) }
\chi_{1}(\omega) = \frac{S_{xs}(\omega) }{S_{ss}(\omega) }
\end{split}
\end{equation}
The second-order cross-spectrum that depends on the two frequencies $\omega_1$ and $\omega_2$ was calculated according to
\begin{equation}
\label{eq:crosshigh}
S_{rss} (\omega_{1},\omega_{2}) = \frac{\langle \tilde r (\omega_{1}+\omega_{2}) \tilde s^* (\omega_{1})\tilde s^* (\omega_{2}) \rangle}{T}
S_{xss} (\omega_{1},\omega_{2}) = \frac{\langle \tilde x (\omega_{1}+\omega_{2}) \tilde s^* (\omega_{1})\tilde s^* (\omega_{2}) \rangle}{T}
\end{equation}
The second-order susceptibility was calculated by dividing the higher-order cross-spectrum by the spectral power at the respective frequencies.
\begin{equation}
\label{eq:susceptibility}
%\begin{split}
\chi_{2}(\omega_{1}, \omega_{2}) = \frac{S_{rss} (\omega_{1},\omega_{2})}{2S_{ss} (\omega_{1}) S_{ss} (\omega_{2})}
\chi_{2}(\omega_{1}, \omega_{2}) = \frac{S_{xss} (\omega_{1},\omega_{2})}{2S_{ss} (\omega_{1}) S_{ss} (\omega_{2})}
%\end{split}
\end{equation}
% Applying the Fourier transform this can be rewritten resulting in:
@ -750,7 +758,7 @@ The absolute value of a second-order susceptibility matrix is visualized in \fig
We expect to see nonlinear susceptibility when $\omega_1 + \omega_2 = \fbase{}$. To characterize this we calculated the peakedness of the nonlinearity (PNL) as
\begin{equation}
\label{eq:nli_equation}
\nli{} = \frac{ D(\max(\fbase{}-5\,\rm{Hz} \leq f \leq \fbase{}+5\,\rm{Hz}))}{\mathrm{med}(D(f))}
\nli{} = \frac{ \max D(\fbase{}-5\,\rm{Hz} \leq f \leq \fbase{}+5\,\rm{Hz})}{\mathrm{med}(D(f))}
\end{equation}
For this index, the second-order susceptibility matrix was projected onto the diagonal $D(f)$, by taking the mean of the anti-diagonals. The peakedness at the frequency $\fbase{}$ in $D(f)$ was quantified by finding the maximum of $D(f)$ in the range $\fbase{} \pm 5$\,Hz (\subfigrefb{fig:cells_suscept}{G}) and dividing it by the median of $D(f)$.
@ -829,9 +837,9 @@ The model neurons were driven with similar stimuli as the real neurons in the or
The random amplitude modulation (RAM) input to the model was created by drawing random amplitude and phases from Gaussian distributions for each frequency component in the range 0--300 Hz. An inverse Fourier transform was applied to get the final amplitude RAM time-course. The input to the model was then
\begin{equation}
\label{eq:ram_equation}
x(t) = (1+ RAM(t)) \cdot \cos(2\pi f_{EOD} t)
y(t) = (1+ s(t)) \cdot \cos(2\pi f_{EOD} t)
\end{equation}
\note{fix stimulus x and y notation and RAM}
From each simulation run, the first second was discarded and the analysis was based on the last second of the data. The resulting spectra thus have a spectral resolution of 1\,Hz.
% \subsection{Second-order susceptibility analysis of the model}
% %\subsubsection{Model second-order nonlinearity}
@ -839,24 +847,25 @@ From each simulation run, the first second was discarded and the analysis was ba
% The second-order susceptibility in the model was calculated with \Eqnref{eq:susceptibility}, resulting in matrices as in \figrefb{model_and_data} and \figrefb{fig:model_full}. For this, the model neuron was presented the input $x(t)$ for 2\,s, with the first second being dismissed as the transient. The second-order susceptibility calculation was performed on the last second, resulting in a frequency resolution of 1\,Hz.
\subsection*{Model noise split into a noise and a stimulus component}\label{intrinsicsplit_methods}
According to the Novikov-Furutsu Theorem \cite{Novikov1965, Furutsu1963} the total noise of a LIF model ($\xi$) can be split up into several independent noise processes with the same correlation function. Here we split the internal noise into two parts: (i) One part is treated as a driving input signal $\xi_{signal} = \sqrt{\rho \, 2D \,c_{signal}} \cdot \xi(t) $ and used to calculate the cross-spectra in \Eqnref{eq:crosshigh} and (ii) the remaining noise $\xi_{noise} = \sqrt{2D \, c_{noise}} \cdot \xi(t)$ that is treated as pure noise. In this way the effective signal-to-noise ratio can be increased while maintaining the total noise in the system. $\rho$ a scaling factor that compensates (see below) for the signal transformations the amplitude modulation stimulus undergoes in the model, i.e. the threshold and the dendritic lowpass. In our case the model has a carrier (the fish's self-generated EOD) and we thus want to drive the model with an amplitude modulation stimulus
According to the Novikov-Furutsu Theorem \cite{Novikov1965, Furutsu1963} the total noise of a LIF model ($\xi$) can be split up into several independent noise processes with the same correlation function. Here we split the internal noise into two parts: (i) One part is treated as a driving input signal $s_\xi(t)$ \note{hier umschreiben das xi muss ein RAM sein} and used to calculate the cross-spectra in \Eqnref{eq:crosshigh} and (ii) the remaining noise $\sqrt{2D \, c_{noise}} \cdot \xi(t)$ that is treated as pure noise. In this way the effective signal-to-noise ratio can be increased while maintaining the total noise in the system. $\rho$ a scaling factor that compensates (see below) for the signal transformations the amplitude modulation stimulus undergoes in the model, i.e. the threshold and the dendritic lowpass. In our case the model has a carrier (the fish's self-generated EOD) and we thus want to drive the model with an amplitude modulation stimulus
%\sqrt{\rho \, 2D \,c_{signal}} \cdot \xi(t)
%(1-c_{signal})\cdot\xi$c_{noise} = 1-c_{signal}$
%c_{signal} \cdot \xi
\begin{equation}
\label{eq:ram_split}
x(t) = (1+ \xi_{signal}) \cdot \cos(2\pi f_{EOD} t)
y(t) = (1+ s_\xi(t)) \cdot \cos(2\pi f_{EOD} t)
\end{equation}
\begin{equation}
\label{eq:Noise_split_intrinsic_dendrite}
\tau_{d} \frac{d V_{d}}{d t} = -V_{d}+ \lfloor x(t) \rfloor_{0}
\tau_{d} \frac{d V_{d}}{d t} = -V_{d}+ \lfloor y(t) \rfloor_{0}
\end{equation}
\begin{equation}
\label{eq:Noise_split_intrinsic}
\tau_{m} \frac{d V_{m}}{d t} = - V_{m} + \mu + \alpha V_{d} - A + \xi_{noise}
\tau_{m} \frac{d V_{m}}{d t} = - V_{m} + \mu + \alpha V_{d} - A + \sqrt{2D \, c_{noise}} \cdot \xi(t)
\end{equation}
% das stimmt so, das c kommt unter die Wurzel!
@ -907,7 +916,7 @@ A big portion of the total noise was assigned to the signal component ($c_{signa
\section*{Supporting information}
\%subsection*{High-CV P-units do not exhibit increased nonlinear interactions at \fsum}
%\subsection*{High-CV P-units do not exhibit increased nonlinear interactions at \fsum}

View File

@ -89,6 +89,9 @@
\usepackage{nameref}%,hyperref
\usepackage[breaklinks=true,colorlinks=true,citecolor=blue!30!black,urlcolor=blue!30!black,linkcolor=blue!30!black]{hyperref}
% \usepackage{natbib}%sort,comma,,round
% \setcitestyle{super,comma,sort&compress}%authoryear,
% line numbers
\usepackage[right]{lineno}
@ -138,8 +141,8 @@
\renewcommand{\figurename}{Fig}
% Use the PLoS provided BiBTeX style
\bibliographystyle{plos2015}
%\bibliographystyle{plos2015}
\bibliographystyle{apalike}%alpha}%}%alpha}%apalike}
% Remove brackets from numbering in List of References
\makeatletter
\renewcommand{\@biblabel}[1]{\quad#1.}
@ -425,12 +428,12 @@
\\
Alexandra Barayeu\textsuperscript{1},
Maria Schlungbaum\textsuperscript{2,3},
Benjamin Lindner\textsubscript{2,3},
Benjamin Lindner\textsuperscript{2,3},
Jan Benda\textsuperscript{1, 4}
Jan Grewe\textsuperscript{1, *}
\\
\bigskip
\textbf{1} Institute for Neurobiology, Eberhardt Karls Universit\"at T\"ubingen, 72076 T\"ubingen, Germany\\
\textbf{1} Institute for Neurobiology, Eberhard Karls Universit\"at T\"ubingen, 72076 T\"ubingen, Germany\\
\textbf{2} Bernstein Center for Computational Neuroscience Berlin, Berlin, Germany\\
\textbf{3} Department of Physics, Humboldt University Berlin, Berlin, Germany\\
\textbf{4} Bernstein Center for Computational Neuroscience Tübingen, Tübingen, 72076, Germany\\
@ -519,9 +522,11 @@ The P-unit responses can be partially explained by simple linear filters. The li
\section*{Results}
Theoretical work shows that leaky-integrate-and-fire (LIF) model neurons show a distinct pattern of nonlinear stimulus encoding when the model is driven by two cosine signals. In the context of the weakly electric fish, such a setting is part of the animal's everyday life as the sinusoidal electric-organ discharges (EODs) of neighboring animals interfere with the own field and each lead to sinusoidal amplitude modulations (AMs) that are called beats and envelopes \cite{Middleton2006, Savard2011,Stamper2012Envelope}. The p-type electroreceptor afferents of the tuberous electrosensory system, i.e. the P-units, respond to such AMs of the underlying EOD carrier and their time-dependent firing rate carries information about the stimulus' time-course. P-units are heterogeneous in their baseline firing properties \cite{Grewe2017, Hladnik2023} and differ with respect to their baseline firing rates, the sensitivity and their noisiness. We here explore the nonlinear mechanism in different cells that exhibit distinctly different levels of noise, i.e. differences in the coefficient of variation (CV) of the interspike intervals (ISI). Low-CV P-units have a less noisy, more regular, firing pattern whereas high-CV P-units show a less regular firing pattern in their baseline activity.
Theoretical work \cite{Voronenko2017} shows that stochastic leaky integrate-and-fire (LIF) model neurons may show nonlinear stimulus encoding when the model is driven by two cosine signals with specific frequencies. In the context of weakly electric fish, such a setting is part of the animal's everyday life. The sinusoidal electric-organ discharges (EODs) of neighboring animals interfere and lead to amplitude modulations (AMs) that are called beats (two-fish interaction) and envelopes (multiple-fish interaction) \cite{Middleton2006, Savard2011,Stamper2012Envelope}. The p-type electroreceptor afferents of the tuberous electrosensory system, i.e. the P-units encode such AMs of the underlying EOD carrier in their time-dependent firing rates \cite{Bastian1981a,Walz2014}. We here explore nonlinear responses of different cells that exhibit distinctly different levels of output variability, quantified by the coefficient of variation (CV) of their interspike intervals (ISI). Low-CV P-units have a less noisy firing pattern that is closer to pacemaker firing, whereas high-CV P-units show a more irregular firing pattern that is closer to a Poisson process.
%P-units are heterogeneous in their baseline firing properties \cite{Grewe2017, Hladnik2023} and differ with respect to their baseline firing rates, the sensitivity and their noisiness.
\subsection*{Low-CV P-units exhibit nonlinear interactions} %frequency combinations withappearing when the input frequencies are related to \fbase{} are
\subsection*{Nonlinear signal transmission in low-CV P-units} %frequency combinations withappearing when the input frequencies are related to \fbase{} are
Second-order susceptibility is expected to be especially pronounced for low-CV cells \cite{Voronenko2017}. P-units fire action potentials probabilistically phase-locked to the self-generated EOD. Skipping of EOD cycles leads to the characteristic multimodal ISI distribution with maxima at integer multiples of the EOD period (\subfigrefb{fig:cells_suscept}{A}). In this example the ISI distribution has a CV of 0.2 which can be considered low among P-units\cite{Hladnik2023}. Spectral analysis of the baseline activity shows two major peaks, the first is located at the baseline firing rate (\fbase), the second is located at the discharge frequency of the electric organ (\feod{}) and is flanked by two smaller peaks at $\feod \pm \fbase{}$ (\subfigref{fig:cells_suscept}{B}).
@ -547,8 +552,13 @@ Irrespective of the CV, neither cell shows the complete proposed structure of no
}
\end{figure*}
\subsection*{Internal noise hides parts of the nonlinearity structure}
Traces of the proposed structure of second-order susceptibility are found in both ampullary and p-type electrosensory afferents. The nonlinerarity seems to depend on the CV, i.e. the level of intrinsic noise in the cells and in the next step we address whether the level of intrinsic masks the expected structures. One option to reduce the impact of this intrinsic noise is to average it out over many repetitions of the same stimulus. In the electrophysiological experiments we only have a limited recording duration and hence the number of stimulus repetitions is limited. To overcome these limitations we compared between an electrophysiologically recorded low-CV P-unit and its P-unit LIF model counterpart fitted to reproduce the P-unit behavior faithfully (see \figref{flowchart})\cite{Barayeu2023}. In the recording depicted in \subfigrefb{model_and_data}{A} the cell was stimulated with a weak RAM stimulus with the contrast of 1\,$\%$ (red, \subfigrefb{model_and_data}\,\panel[i]{B}). The across trial average ($\n{}=11$) shows the diagonal band in second-order susceptibility at \fsumb{} (yellow diagonal in pink edges) and an additional nonlinear band appeared at \fsumehalf{} (\subfigrefb{model_and_data}{A}). The matched model reproduces the same diagonal at \fsumb{} (\subfigrefb{model_and_data}\,\panel[ii]{B}) but not the diagonal at \fsumehalf. By increasing the number of trials in the simulation, the signal-to-noise ratio and thus the estimation of the nonlinearity structures can be improved. Still, even with 1 million repetitions, no changes are observable in the nonlinearity structures (\subfigrefb{model_and_data}\,\panel[iii]{B}). The increased trial count, however goes along with generally smaller second-order susceptibility values (\subfigrefb{model_and_data}\,\panel[iii]{B}). The decrease of the second-order susceptibility follows the relation $1/ \sqrt{N} $ (\figrefb{trialnr}).
\subsection*{High level of total noise hides parts of the nonlinearity structure}
Traces of the expected structure of second-order susceptibility are found in both ampullary and p-type electrosensory afferents. In the recordings shown above (\figrefb{fig:cells_suscept}, \figrefb{fig:ampullary}), the nonlinear response is strong whenever the two frequencies (\fone{}, \ftwo{}) fall onto the antidiagonal \fsumb{}, which is in line with theoretical expectations \cite{Voronenko2017}. However, a pronounced nonlinear response for frequencies with \foneb{} or \ftwob{}, although predicted by theory, cannot be observed. Here we investigate how these discrepancies can be understood.
In the electrophysiological experiments we only have a limited number of trials and this insufficient averaging may occlude the full nonlinear structure. This limitation can be overcome by using a computational model for the P-unit, a stochastic leaky integrate-and-fire model with adaptation current and dendritic preprocessing, with parameters fitted to the experimentally recorded P-unit (\figrefb{flowchart}) \cite{Barayeu2023}. The model faithfully reproduces the second-order susceptibility of a low-CV cell estimated from the same low number of repetitions ($\n{}=11$, compare \panel{A} and \panel[ii]{B} in \figrefb{model_and_data}). In the model we can increase the number of repetitions substantially but still do not observe the full nonlinear structure ($\n{}=10^6$, \subfigrefb{model_and_data}\,\panel[iii]{B}). A possible reason for this could be that by applying a broadband stimulus the effective input-noise level is increased and this may linearize the signal
transmission \cite{Longtin1993, Chialvo1997, Roddey2000, Voronenko2017}. Assuming that the intrinsic noise level in this P-unit is small enough, the full nonlinear structure should appear in the limit of weak AMs. Again, this cannot be done experimentally, because the problem of insufficient averaging becomes even more severe for weak AMs (low contrast). In the model, however, we know the time course of the intrinsic noise and can use this knowledge to determine the susceptibilities by input-output correlations via the Furutsu-Novikov theorem \cite{Furutsu1963, Novikov1965}. This theorem, in its simplest form, states that the cross-spectrum $S_{x\eta}(\omega)$ of a Gaussian noise $\eta(t)$ driving a nonlinear system and the system's output $x(t)$ is proportional to the linear susceptibility $S_{x\eta}(\omega)=\chi(\omega)S_{\eta\eta}(\omega)$. Here $\chi(\omega)$ characterizes the linear response to an infinitely weak signal $\varepsilon s(t)$ in the presence of the background noise $\eta(t)$. Likewise, the nonlinear susceptibility can be determined in an analogous fashion from higher-order input-output cross-spectra (see methods \eqref{eq:crosshigh}, \eqref{eq:susceptibility}) \cite{Egerland2020}. In line with an alternative derivation of the Furutsu-Novikov theorem \cite{Lindner2022}, we can split the total noise and consider a fraction of it as stimulus. This allows to calculate the susceptibility from the cross-spectrum between the output and this stimulus fraction of the noise. Adapting this approach to our P-unit model (see methods), we replace the intrinsic noise by an approximately equivalent RAM stimulus $s_\xi(t)$ and a weak remaining intrinsic noise ($\sqrt{2D \, c_{noise}} \cdot \xi(t)$, with $c_\text{noise} = 0.1$, see methods \eqref{eq:ram_split}, \eqref{eq:Noise_split_intrinsic}, \eqref{eq:Noise_split_intrinsic_dendrite}, \subfigrefb{model_and_data}\,\panel[i]{C}). We tune the amplitude / standard deviation of the RAM stimulus $s_\xi(t)$ such that the output firing rate and variability (CV) are the same as in the baseline activity (i.e. full intrinsic noise $\sqrt{2D}\xi(t)$ in the voltage equation but no RAM) and compute the second- and third-order cross-spectra between the RAM part of the noise $s_\xi(t)$ and the output spike train. This procedure has two consequences: (i) by means of the cross-spectrum between the output and $s_\xi(t)$, which is a large fraction of the noise, the signal-to-noise ratio of the measured susceptibilities is drastically improved; (ii) the total noise in the system has been reduced (by what was before the external RAM stimulus $s(t)$), which makes the system more nonlinear. For both reasons we now see the expected nonlinear features in the second-order susceptibility for a sufficient number of trials (\subfigrefb{model_and_data}\,\panel[iii]{B}), but not for a number of trials comparable to the experiment (\subfigrefb{model_and_data}\,\panel[ii]{B}). In addition to the strong response for \fsumb{}, we now also observe pronounced nonlinear responses at \foneb{} and \ftwob{} (vertical and horizontal lines, \subfigrefb{model_and_data}\,\panel[iii]{C}). Note, that the increased number of repetitions goes along with a substantial reduction of second-order susceptibility values (\subfigrefb{model_and_data}\,\panel[iii]{B}), that saturate in its peak values for $N>10^5$ (\figrefb{trialnr}). This demonstrates the limited reliability of the statistics that is based on 11 trials. However, we would like to point out that already the limited number of trials as used in the experiments
The nonlinerarity seems to depend on the CV, which is presumably related to the level of intrinsic noise in the cells. In the next step we address whether the level of intrinsic masks the expected structures. One option to reduce the impact of this intrinsic noise is to average it out over many repetitions of the same stimulus.
%Note that this line doesn't appear in the susceptibility matrix of the model (\subfigrefb{model_and_data}{C})Each cell has an intrinsic noise level (\subfigrefb{model_and_data}{A}, bottom).
%The signal component (purple) compensates for this total noise reduction, is not a weak signal anymore and
@ -681,7 +691,11 @@ The baseline firing rate \fbase{} was calculated as the number of spikes divided
\paragraph{White noise analysis} \label{response_modulation}
In the stimulus driven case, the neuronal activity of the recorded cell is modulated around the average firing rate that is similar to \fbase{} and in that way encodes the time-course of the stimulus.
The time-dependent response of the neuron was estimated from the spiking activity $x_k(t) = \sum_i\delta(t-t_{k,i})$ recorded for each stimulus presentation, $k$, by kernel convolution with a Gaussian kernel
The time-dependent response of the neuron was estimated from the spiking activity
\begin{equation}\label{eq:spikes}
x_k(t) = \sum_i\delta(t-t_{k,i})
\end{equation}
recorded for each stimulus presentation, $k$, by kernel convolution with a Gaussian kernel
\begin{equation}
K(t) = \scriptstyle \frac{1}{\sigma\sqrt{2\pi}} e^{-\frac{t^2}{2\sigma^2}}
@ -696,42 +710,42 @@ where $*$ denotes the convolution. $r(t)$ is then calculated as the across-trial
r(t) = \left\langle r_k(t) \right\rangle _k.
\end{equation}
To quantify how strongly the neuron is driven by the stimulus we quantified the response modulation as the standard deviation $\sigma_{M} = \sqrt{\langle (r(t)-\langle r(t) \rangle)^2\rangle}$.
To quantify how strongly the neuron is driven by the stimulus we quantified the response modulation as the standard deviation $\sigma_{M} = \sqrt{\langle (r(t)-\langle r(t) \rangle_t)^2\rangle_t}$, where $\langle \cdot \rangle_t$ indicates averaging over time.
\paragraph{Spectral analysis}\label{susceptibility_methods}
The neuron is driven by the stimulus and thus the neuronal response $r(t)$ depends on the stimulus $s(t)$. To investigate the relation between stimulus and response we calculated the first- and second-order susceptibility of the neuron to the stimulus in the frequency domain. The Fourier transforms of $s(t)$ and $r(t)$ are denoted as $\tilde s(\omega)$ and $\tilde r(\omega)$ and were calculated according to $\tilde x(\omega) = \int_{0}^{T} \, x(t) \cdot e^{- i \omega t}\,dt$, with $T$ being the signal duration. Stimuli had a duration of 10\,s and spectra of stimulus and response were calculated in separate segments of 0.5\,s with no overlap resulting in a spectral resolution of 2\,Hz. $r(t)$ was estimated by kernel convolution with a box kernel that had a width matching the sampling interval to preserve temporal accuracy as far as possible.
The neuron is driven by the stimulus and thus the spiking response $x(t)$ (\eqref{eq:spikes}) depends on the stimulus $s(t)$. To investigate the relation between stimulus and response we calculated the first- and second-order susceptibility of the neuron to the stimulus in the frequency domain. The Fourier transforms of $s(t)$ and $x(t)$ are denoted as $\tilde s(\omega)$ and $\tilde x(\omega)$ and were calculated according to $\tilde x(\omega) = \int_{0}^{T} \, x(t) \cdot e^{- i \omega t}\,dt$, with $T$ being the signal duration. Stimuli had a duration of 10\,s and spectra of stimulus and response were calculated in separate segments of 0.5\,s with no overlap resulting in a spectral resolution of 2\,Hz.
The power spectrum was calculated as
The power spectrum of the stimulus $s(t)$ was calculated as
\begin{equation}
\label{powereq}
\begin{split}
S_{ss}(\omega) = \frac{\langle \tilde s(\omega) \tilde s^* (\omega)\rangle}{T}
\end{split}
\end{equation}
with $\tilde s^* $ being the complex conjugate and $\langle ... \rangle$ denoting averaging over the segments. The cross-spectrum $S_{rs}(\omega)$ was calculated according to
with $\tilde s^* $ being the complex conjugate and $\langle ... \rangle$ denoting averaging over the segments. The power spectrum of the spike trains $S_{xx}$ was calculated accordingly. The cross-spectrum $S_{xs}(\omega)$ between stimulus and evoked spike trains was calculated according to
\begin{equation}
\label{cross}
\begin{split}
S_{rs}(\omega) = \frac{\langle \tilde r(\omega) \tilde s^* (\omega)\rangle}{T}
S_{xs}(\omega) = \frac{\langle \tilde x(\omega) \tilde s^* (\omega)\rangle}{T}
\end{split}
\end{equation}
From $S_{rs}(\omega)$ and $ S_{ss}(\omega)$ we calculated the linear susceptibility (transfer function) as
From $S_{xs}(\omega)$ and $ S_{ss}(\omega)$ we calculated the linear susceptibility (transfer function) as
\begin{equation}
\label{linearencoding_methods}
\begin{split}
\chi_{1}(\omega) = \frac{S_{rs}(\omega) }{S_{ss}(\omega) }
\chi_{1}(\omega) = \frac{S_{xs}(\omega) }{S_{ss}(\omega) }
\end{split}
\end{equation}
The second-order cross-spectrum that depends on the two frequencies $\omega_1$ and $\omega_2$ was calculated according to
\begin{equation}
\label{eq:crosshigh}
S_{rss} (\omega_{1},\omega_{2}) = \frac{\langle \tilde r (\omega_{1}+\omega_{2}) \tilde s^* (\omega_{1})\tilde s^* (\omega_{2}) \rangle}{T}
S_{xss} (\omega_{1},\omega_{2}) = \frac{\langle \tilde x (\omega_{1}+\omega_{2}) \tilde s^* (\omega_{1})\tilde s^* (\omega_{2}) \rangle}{T}
\end{equation}
The second-order susceptibility was calculated by dividing the higher-order cross-spectrum by the spectral power at the respective frequencies.
\begin{equation}
\label{eq:susceptibility}
%\begin{split}
\chi_{2}(\omega_{1}, \omega_{2}) = \frac{S_{rss} (\omega_{1},\omega_{2})}{2S_{ss} (\omega_{1}) S_{ss} (\omega_{2})}
\chi_{2}(\omega_{1}, \omega_{2}) = \frac{S_{xss} (\omega_{1},\omega_{2})}{2S_{ss} (\omega_{1}) S_{ss} (\omega_{2})}
%\end{split}
\end{equation}
% Applying the Fourier transform this can be rewritten resulting in:
@ -747,7 +761,7 @@ The absolute value of a second-order susceptibility matrix is visualized in \fig
We expect to see nonlinear susceptibility when $\omega_1 + \omega_2 = \fbase{}$. To characterize this we calculated the peakedness of the nonlinearity (PNL) as
\begin{equation}
\label{eq:nli_equation}
\nli{} = \frac{ D(\max(\fbase{}-5\,\rm{Hz} \leq f \leq \fbase{}+5\,\rm{Hz}))}{\mathrm{med}(D(f))}
\nli{} = \frac{ \max D(\fbase{}-5\,\rm{Hz} \leq f \leq \fbase{}+5\,\rm{Hz})}{\mathrm{med}(D(f))}
\end{equation}
For this index, the second-order susceptibility matrix was projected onto the diagonal $D(f)$, by taking the mean of the anti-diagonals. The peakedness at the frequency $\fbase{}$ in $D(f)$ was quantified by finding the maximum of $D(f)$ in the range $\fbase{} \pm 5$\,Hz (\subfigrefb{fig:cells_suscept}{G}) and dividing it by the median of $D(f)$.
@ -826,9 +840,9 @@ The model neurons were driven with similar stimuli as the real neurons in the or
The random amplitude modulation (RAM) input to the model was created by drawing random amplitude and phases from Gaussian distributions for each frequency component in the range 0--300 Hz. An inverse Fourier transform was applied to get the final amplitude RAM time-course. The input to the model was then
\begin{equation}
\label{eq:ram_equation}
x(t) = (1+ RAM(t)) \cdot \cos(2\pi f_{EOD} t)
y(t) = (1+ s(t)) \cdot \cos(2\pi f_{EOD} t)
\end{equation}
\note{fix stimulus x and y notation and RAM}
From each simulation run, the first second was discarded and the analysis was based on the last second of the data. The resulting spectra thus have a spectral resolution of 1\,Hz.
% \subsection{Second-order susceptibility analysis of the model}
% %\subsubsection{Model second-order nonlinearity}
@ -836,24 +850,25 @@ From each simulation run, the first second was discarded and the analysis was ba
% The second-order susceptibility in the model was calculated with \Eqnref{eq:susceptibility}, resulting in matrices as in \figrefb{model_and_data} and \figrefb{fig:model_full}. For this, the model neuron was presented the input $x(t)$ for 2\,s, with the first second being dismissed as the transient. The second-order susceptibility calculation was performed on the last second, resulting in a frequency resolution of 1\,Hz.
\subsection*{Model noise split into a noise and a stimulus component}\label{intrinsicsplit_methods}
According to the Novikov-Furutsu Theorem \cite{Novikov1965, Furutsu1963} the total noise of a LIF model ($\xi$) can be split up into several independent noise processes with the same correlation function. Here we split the internal noise into two parts: (i) One part is treated as a driving input signal $\xi_{signal} = \sqrt{\rho \, 2D \,c_{signal}} \cdot \xi(t) $ and used to calculate the cross-spectra in \Eqnref{eq:crosshigh} and (ii) the remaining noise $\xi_{noise} = \sqrt{2D \, c_{noise}} \cdot \xi(t)$ that is treated as pure noise. In this way the effective signal-to-noise ratio can be increased while maintaining the total noise in the system. $\rho$ a scaling factor that compensates (see below) for the signal transformations the amplitude modulation stimulus undergoes in the model, i.e. the threshold and the dendritic lowpass. In our case the model has a carrier (the fish's self-generated EOD) and we thus want to drive the model with an amplitude modulation stimulus
According to the Novikov-Furutsu Theorem \cite{Novikov1965, Furutsu1963} the total noise of a LIF model ($\xi$) can be split up into several independent noise processes with the same correlation function. Here we split the internal noise into two parts: (i) One part is treated as a driving input signal $s_\xi(t)$ \note{hier umschreiben das xi muss ein RAM sein} and used to calculate the cross-spectra in \Eqnref{eq:crosshigh} and (ii) the remaining noise $\sqrt{2D \, c_{noise}} \cdot \xi(t)$ that is treated as pure noise. In this way the effective signal-to-noise ratio can be increased while maintaining the total noise in the system. $\rho$ a scaling factor that compensates (see below) for the signal transformations the amplitude modulation stimulus undergoes in the model, i.e. the threshold and the dendritic lowpass. In our case the model has a carrier (the fish's self-generated EOD) and we thus want to drive the model with an amplitude modulation stimulus
%\sqrt{\rho \, 2D \,c_{signal}} \cdot \xi(t)
%(1-c_{signal})\cdot\xi$c_{noise} = 1-c_{signal}$
%c_{signal} \cdot \xi
\begin{equation}
\label{eq:ram_split}
x(t) = (1+ \xi_{signal}) \cdot \cos(2\pi f_{EOD} t)
y(t) = (1+ s_\xi(t)) \cdot \cos(2\pi f_{EOD} t)
\end{equation}
\begin{equation}
\label{eq:Noise_split_intrinsic_dendrite}
\tau_{d} \frac{d V_{d}}{d t} = -V_{d}+ \lfloor x(t) \rfloor_{0}
\tau_{d} \frac{d V_{d}}{d t} = -V_{d}+ \lfloor y(t) \rfloor_{0}
\end{equation}
\begin{equation}
\label{eq:Noise_split_intrinsic}
\tau_{m} \frac{d V_{m}}{d t} = - V_{m} + \mu + \alpha V_{d} - A + \xi_{noise}
\tau_{m} \frac{d V_{m}}{d t} = - V_{m} + \mu + \alpha V_{d} - A + \sqrt{2D \, c_{noise}} \cdot \xi(t)
\end{equation}
% das stimmt so, das c kommt unter die Wurzel!
@ -895,13 +910,16 @@ A big portion of the total noise was assigned to the signal component ($c_{signa
% here. See http://journals.plos.org/plosone/s/latex for
% step-by-step instructions.
%
\bibliography{references}
%\bibliographystyle{apalike}%alpha}%}%alpha}%apalike}
\bibliography{journalsabbrv,references}
% \bibliographystyle{apalike} %or any other style you like
%\bibliography{references}
%\bibliography{journalsabbrv,references}
\newpage
\section*{Supporting information}
\%subsection*{High-CV P-units do not exhibit increased nonlinear interactions at \fsum}
%\subsection*{High-CV P-units do not exhibit increased nonlinear interactions at \fsum}
@ -924,12 +942,12 @@ CVs in P-units can range up to 1.5 \cite{Grewe2017, Hladnik2023}. We show the sa
}
\end{figure*}
\bibliographystyle{iscience}
% \bibliographystyle{iscience}
%\bibliographystyle{apalike}%alpha}%}%alpha}%apalike}
%\bibliographystyle{elsarticle-num-names}%elsarticle-num-names}
%\ExecuteBibliographyOptions{sorting=nty}
%\bibliographystyle{authordate2}
\bibliography{journalsabbrv,references}
%\bibliography{journalsabbrv,references}
%\begin{thebibliography}{00}
\end{document}

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -64,7 +64,7 @@ def trialnr(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1], cells
ref_types, adapt_types, noises_added, level_extraction, receiver_contrast, contrasts, ]
nr = '2'
# embed()
# cell_contrasts = ["2013-01-08-aa-invivo-1"]
# cells_triangl_contrast = np.concatenate([cells_all,cell_contrasts])
# cells_triangl_contrast = 1
@ -139,12 +139,12 @@ def trialnr(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1], cells
if len(stack)> 0:
model_show, stack_plot, stack_plot_wo_norm = get_stack(cell, stack)
stacks.append(stack_plot)
perc95.append(np.percentile(stack_plot,95))
perc05.append(np.percentile(stack_plot, 5))
perc95.append(np.percentile(stack_plot,99.9))
perc05.append(np.percentile(stack_plot, 0))
median.append(np.percentile(stack_plot, 50))
stacks_wo_norm.append(stack_plot_wo_norm)
perc95_wo_norm.append(np.percentile(stack_plot_wo_norm,95))
perc05_wo_norm.append(np.percentile(stack_plot_wo_norm, 5))
perc95_wo_norm.append(np.percentile(stack_plot_wo_norm,99.9))
perc05_wo_norm.append(np.percentile(stack_plot_wo_norm, 0))
median_wo_norm.append(np.percentile(stack_plot_wo_norm, 50))
else:
@ -158,18 +158,53 @@ def trialnr(eod_metrice = False, width=0.005, nffts=['whole'], powers=[1], cells
perc05_wo_norm.append(float('nan'))
median_wo_norm.append(float('nan'))
ax = plt.subplot(1,1,1)
ax.plot(trial_nrs_here, perc05, color = 'grey')
ax.plot(trial_nrs_here, perc95, color = 'grey')
ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
ax.fill_between(trial_nrs_here, perc05, perc95, color='grey')
ax.set_xscale('log')
ax.set_yscale('log')
fig, ax = plt.subplots(2,2)
#ax.plot(trial_nrs_here, perc05, color = 'grey')
ax[0,0].plot(trial_nrs_here, perc95, color = 'grey')
#ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
#ax.scatter(trial_nrs_here, perc05, color = 'grey')
ax[0,0].scatter(trial_nrs_here, perc95, color = 'black')
ax.set_xlabel('Trials [$N$]')
ax.set_ylabel('$\chi_{2}$\,[Hz]')
ax[0,1].plot(trial_nrs_here, perc95_wo_norm / trial_nrs_here, color = 'grey')
#ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
#ax.scatter(trial_nrs_here, perc05, color = 'grey')
ax[0,1].scatter(trial_nrs_here, perc95_wo_norm / trial_nrs_here, color = 'black')
##################################
ax[1,0].plot(trial_nrs_here, median, color = 'grey')
#ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
#ax.scatter(trial_nrs_here, perc05, color = 'grey')
ax[1,0].scatter(trial_nrs_here, median, color = 'black')
ax[1,1].plot(trial_nrs_here, median_wo_norm / trial_nrs_here, color = 'grey')
#ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
#ax.scatter(trial_nrs_here, perc05, color = 'grey')
ax[1,1].scatter(trial_nrs_here, median_wo_norm / trial_nrs_here, color = 'black')
#ax[2].plot(trial_nrs_here, perc95, color = 'grey')
#ax.plot(trial_nrs_here, median, color = 'black', label = 'median')
#ax.scatter(trial_nrs_here, perc05, color = 'grey')
#ax[2].scatter(trial_nrs_here, perc95, color = 'black')
#ax.scatter(trial_nrs_here, median, color = 'black', label = 'median')
#ax.fill_between(trial_nrs_here, perc05, perc95, color='grey')
ax[0,0].set_xscale('log')
ax[0,0].set_yscale('log')
ax[0,1].set_xscale('log')
ax[0,1].set_yscale('log')
ax[1,0].set_xscale('log')
ax[1,0].set_yscale('log')
ax[1,1].set_xscale('log')
ax[1,1].set_yscale('log')
ax[0,0].set_xlabel('Trials [$N$]')
ax[0,0].set_ylabel('$\chi_{2}$\,[Hz]')
ax[1,0].set_xlabel('Trials [$N$]')
ax[1,0].set_ylabel('$\chi_{2}$\,[Hz]')
ax[0,1].set_xlabel('Trials [$N$]')
ax[0,1].set_ylabel('$cross$\,[Hz]')
ax[1,1].set_xlabel('Trials [$N$]')
ax[1,1].set_ylabel('$cross$\,[Hz]')
''' ax = plt.subplot(1,3,2)
ax.plot(trial_nrs_here, perc05_wo_norm, color = 'grey')
ax.plot(trial_nrs_here, perc95_wo_norm, color = 'grey')
@ -228,7 +263,7 @@ if __name__ == '__main__':
model = resave_small_files("models_big_fit_d_right.csv", load_folder='calc_model_core')
cells = model.cell.unique()
# embed()
params = {'cells': cells}
show = True

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@ import os
try:
from numba import jit
# embed()
except ImportError:
def jit(nopython):
def decorator_jit(func):
@ -90,7 +90,7 @@ if cont_other_dir == False:
if ('code' not in inspect.stack()[-1][1]) | (not (('alex' in os.getlogin()) | ('rudnaya' in os.getlogin()))):
version = 'public' # für alle sollte version public sein!
# embed()
# this we do only in the develop mode
if version == 'develop': #
copy = True
@ -177,7 +177,7 @@ def plt_scatter_four(grid, frame, cell_types, cell_type_type, annotate, colors):
# frame_all = frame[(frame[cell_type_type] == cell_type)]
frame_g = frame[
(frame[cell_type_type] == cell_type_it) & ((frame.gwn == True) | (frame.fs == True))]
# embed()
plt_cv_fr(annotate, ax2, add[1], frame_g, colors, cell_type_it)