21.09
This commit is contained in:
parent
042bdd7aa3
commit
f1b7291ecf
64
apteronotus_code/sin_all_normal.py
Normal file
64
apteronotus_code/sin_all_normal.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import pylab
|
||||||
|
from IPython import embed
|
||||||
|
from scipy.optimize import curve_fit
|
||||||
|
from jar_functions import gain_curve_fit
|
||||||
|
from jar_functions import avgNestedLists
|
||||||
|
|
||||||
|
|
||||||
|
identifier = [#'2018lepto1',
|
||||||
|
#'2018lepto4',
|
||||||
|
#'2018lepto5',
|
||||||
|
#'2018lepto76',
|
||||||
|
'2018lepto98',
|
||||||
|
'2019lepto03',
|
||||||
|
#'2019lepto24',
|
||||||
|
#'2019lepto27',
|
||||||
|
#'2019lepto30',
|
||||||
|
#'2020lepto04',
|
||||||
|
#'2020lepto06',
|
||||||
|
'2020lepto16',
|
||||||
|
'2020lepto19',
|
||||||
|
'2020lepto20'
|
||||||
|
]
|
||||||
|
|
||||||
|
tau = []
|
||||||
|
f_c = []
|
||||||
|
for ID in identifier:
|
||||||
|
print(ID)
|
||||||
|
amf = np.load('5Hz_amf_%s.npy' %ID)
|
||||||
|
gain = np.load('5Hz_gain_%s.npy' %ID)
|
||||||
|
|
||||||
|
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
|
||||||
|
#print('tau:', sinv[0])
|
||||||
|
tau.append(sinv[0])
|
||||||
|
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
|
||||||
|
print('f_cutoff:', f_cutoff)
|
||||||
|
f_c.append(f_cutoff)
|
||||||
|
|
||||||
|
|
||||||
|
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
|
||||||
|
|
||||||
|
all = []
|
||||||
|
|
||||||
|
for ident in identifier:
|
||||||
|
data = np.load('5Hz_gain_%s.npy' %ident)
|
||||||
|
all.append(data)
|
||||||
|
|
||||||
|
av = avgNestedLists(all)
|
||||||
|
|
||||||
|
fig = plt.figure()
|
||||||
|
ax = fig.add_subplot(111)
|
||||||
|
ax.plot(amf, av, 'o')
|
||||||
|
ax.set_xscale('log')
|
||||||
|
ax.set_yscale('log')
|
||||||
|
ax.set_title('gaincurve_average_allfish_5Hz')
|
||||||
|
ax.set_ylabel('gain [Hz/(mV/cm)]')
|
||||||
|
ax.set_xlabel('envelope_frequency [Hz]')
|
||||||
|
ax.set_ylim(0.0008, )
|
||||||
|
ax.plot(f_c, np.full((len(identifier)), 0.0015), 'o', label = 'cutoff frequencies')
|
||||||
|
ax.legend()
|
||||||
|
|
||||||
|
plt.show()
|
||||||
|
embed()
|
97
apteronotus_code/sin_all_uniform.py
Normal file
97
apteronotus_code/sin_all_uniform.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import pylab
|
||||||
|
from IPython import embed
|
||||||
|
from scipy.optimize import curve_fit
|
||||||
|
from jar_functions import gain_curve_fit
|
||||||
|
from jar_functions import avgNestedLists
|
||||||
|
|
||||||
|
|
||||||
|
identifier_uniform = ['2018lepto1',
|
||||||
|
# '2018lepto4',
|
||||||
|
# '2018lepto5',
|
||||||
|
#'2018lepto76',
|
||||||
|
'2018lepto98',
|
||||||
|
# '2019lepto03',
|
||||||
|
'2019lepto24',
|
||||||
|
#'2019lepto27',
|
||||||
|
# '2019lepto30',
|
||||||
|
'2020lepto04',
|
||||||
|
# '2020lepto06',
|
||||||
|
# '2020lepto16',
|
||||||
|
'2020lepto19',
|
||||||
|
# '2020lepto20'
|
||||||
|
]
|
||||||
|
identifier = ['2018lepto1',
|
||||||
|
'2018lepto4',
|
||||||
|
'2018lepto5',
|
||||||
|
'2018lepto76',
|
||||||
|
'2018lepto98',
|
||||||
|
'2019lepto03',
|
||||||
|
'2019lepto24',
|
||||||
|
'2019lepto27',
|
||||||
|
'2019lepto30',
|
||||||
|
'2020lepto04',
|
||||||
|
'2020lepto06',
|
||||||
|
'2020lepto16',
|
||||||
|
'2020lepto19',
|
||||||
|
'2020lepto20'
|
||||||
|
]
|
||||||
|
|
||||||
|
tau = []
|
||||||
|
f_c = []
|
||||||
|
for ID in identifier:
|
||||||
|
print(ID)
|
||||||
|
amf = np.load('amf_%s.npy' %ID)
|
||||||
|
gain = np.load('gain_%s.npy' %ID)
|
||||||
|
|
||||||
|
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
|
||||||
|
#print('tau:', sinv[0])
|
||||||
|
tau.append(sinv[0])
|
||||||
|
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
|
||||||
|
print('f_cutoff:', f_cutoff)
|
||||||
|
f_c.append(f_cutoff)
|
||||||
|
|
||||||
|
tau_uniform = []
|
||||||
|
f_c_uniform = []
|
||||||
|
for ID in identifier_uniform:
|
||||||
|
#print(ID)
|
||||||
|
amf = np.load('amf_%s.npy' %ID)
|
||||||
|
gain = np.load('gain_%s.npy' %ID)
|
||||||
|
|
||||||
|
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
|
||||||
|
#print('tau:', sinv[0])
|
||||||
|
tau_uniform.append(sinv[0])
|
||||||
|
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
|
||||||
|
#print('f_cutoff:', f_cutoff)
|
||||||
|
f_c_uniform.append(f_cutoff)
|
||||||
|
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
|
||||||
|
|
||||||
|
all = []
|
||||||
|
new_all = []
|
||||||
|
for ident in identifier:
|
||||||
|
data = np.load('gain_%s.npy' %ident)
|
||||||
|
all.append(data)
|
||||||
|
for ident in identifier_uniform:
|
||||||
|
data = np.load('gain_%s.npy' % ident)
|
||||||
|
new_all.append(data)
|
||||||
|
|
||||||
|
av = avgNestedLists(all)
|
||||||
|
new_av = avgNestedLists(new_all)
|
||||||
|
lim = 0.001
|
||||||
|
fig = plt.figure()
|
||||||
|
ax = fig.add_subplot(111)
|
||||||
|
ax.plot(amf, av, 'o', color = 'orange', label = 'normal')
|
||||||
|
ax.plot(amf, new_av, 'o', color = 'blue', label = 'uniformed')
|
||||||
|
ax.set_xscale('log')
|
||||||
|
ax.set_yscale('log')
|
||||||
|
ax.set_title('gaincurve_average_allfish')
|
||||||
|
ax.set_ylabel('gain [Hz/(mV/cm)]')
|
||||||
|
ax.set_xlabel('envelope_frequency [Hz]')
|
||||||
|
ax.set_ylim(0.0008, )
|
||||||
|
ax.plot(f_c, np.full((len(identifier)), 0.0015), 'o', color = 'orange', label = 'all cutoff frequencies')
|
||||||
|
ax.plot(f_c_uniform, np.full((len(identifier_uniform)), 0.001), 'o', color = 'blue', label = 'uniformed cutoff frequencies')
|
||||||
|
ax.legend()
|
||||||
|
|
||||||
|
plt.show()
|
||||||
|
embed()
|
@ -43,12 +43,13 @@ for infodataset in datasets:
|
|||||||
|
|
||||||
for ID in identifier:
|
for ID in identifier:
|
||||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\step\\%s' %ID
|
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\step\\%s' %ID
|
||||||
res_df = []
|
response = []
|
||||||
|
stim_ampl = []
|
||||||
for idx, dataset in enumerate(os.listdir(base_path)):
|
for idx, dataset in enumerate(os.listdir(base_path)):
|
||||||
dataset = os.path.join(base_path, dataset, 'beats-eod.dat')
|
dataset = os.path.join(base_path, dataset, 'beats-eod.dat')
|
||||||
print(dataset)
|
print(dataset)
|
||||||
#input of the function
|
#input of the function
|
||||||
frequency, time, amplitude, eodf, deltaf, stimulusf, duration, pause = parse_dataset(dataset)
|
frequency, time, amplitude, eodf, deltaf, stimulusf, stimulusamplitude, duration, pause = parse_dataset(dataset)
|
||||||
dm = np.mean(duration)
|
dm = np.mean(duration)
|
||||||
pm = np.mean(pause)
|
pm = np.mean(pause)
|
||||||
timespan = dm + pm
|
timespan = dm + pm
|
||||||
@ -57,20 +58,15 @@ for ID in identifier:
|
|||||||
if len(frequency) == 5:
|
if len(frequency) == 5:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
norm, base, jar = norm_function(frequency, time, onset_point=dm - dm, offset_point=dm) # dm-dm funktioniert nur wenn onset = 0 sec
|
mf, tnew = mean_traces(start, stop, timespan, frequency, time) # maybe fixed timespan/sampling rate
|
||||||
print(jar)
|
|
||||||
if jar[0] == 0.0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
mf, tnew = mean_traces(start, stop, timespan, norm, time) # maybe fixed timespan/sampling rate
|
|
||||||
|
|
||||||
cf, ct = mean_noise_cut_eigen(mf, tnew, n=1250)
|
cf, ct = mean_noise_cut_eigen(mf, tnew, n=1250)
|
||||||
|
|
||||||
cf_arr = np.array(cf)
|
onset_point = dm - dm
|
||||||
ct_arr = np.array(ct)
|
offset_point = dm
|
||||||
|
onset_end = onset_point - 10
|
||||||
|
offset_start = offset_point - 10
|
||||||
|
|
||||||
freq_all.append(cf_arr)
|
|
||||||
time_all.append(ct_arr)
|
|
||||||
|
|
||||||
b = []
|
b = []
|
||||||
for index, i in enumerate(ct):
|
for index, i in enumerate(ct):
|
||||||
@ -84,15 +80,37 @@ for ID in identifier:
|
|||||||
print(h)
|
print(h)
|
||||||
print(indexx)
|
print(indexx)
|
||||||
print(cf[indexx])
|
print(cf[indexx])
|
||||||
|
|
||||||
|
''' sounds good, doesnt work somehow: in norm devision by 0 (jar) or index doesnt fit
|
||||||
|
norm, base, jar = norm_function(frequency, time, onset_point=dm - dm,
|
||||||
|
offset_point=dm) # dm-dm funktioniert nur wenn onset = 0 sec
|
||||||
|
b = []
|
||||||
|
for index, i in enumerate(ct):
|
||||||
|
if i > -45 and i < -5:
|
||||||
|
b.append(cf[index])
|
||||||
|
|
||||||
|
j = []
|
||||||
|
for indexx, h in enumerate(ct):
|
||||||
|
if h > 195 and h < 145:
|
||||||
|
j.append(cf[indexx])
|
||||||
|
print(h)
|
||||||
|
print(indexx)
|
||||||
|
print(cf[indexx])
|
||||||
|
b = np.median(cf[(ct >= onset_end) & (ct < onset_point)])
|
||||||
|
|
||||||
|
j = np.median(cf[(ct >= offset_start) & (ct < offset_point)])
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
r = np.median(j) - np.median(b)
|
r = np.median(j) - np.median(b)
|
||||||
#response.append(r)
|
response.append(r)
|
||||||
embed()
|
stim_ampl.append(stimulusamplitude)
|
||||||
|
res_ampl = sorted(zip(stim_ampl, response))
|
||||||
base_line = plt.axhline(y = 0, color = 'black', ls = 'dotted', linewidth = '1')
|
base_line = plt.axhline(y = 0, color = 'black', ls = 'dotted', linewidth = '1')
|
||||||
|
|
||||||
plt.xlim([-10,220])
|
plt.xlabel('Stimulusamplitude')
|
||||||
plt.xlabel('time [s]')
|
plt.ylabel('absolute JAR magnitude')
|
||||||
plt.ylabel('rel. JAR magnitude')
|
plt.title('absolute JAR')
|
||||||
plt.title('relative JAR')
|
|
||||||
plt.savefig('relative JAR')
|
plt.savefig('relative JAR')
|
||||||
plt.legend(loc = 'lower right')
|
plt.legend(loc = 'lower right')
|
||||||
plt.show()
|
plt.show()
|
||||||
|
12
notes
12
notes
@ -1,10 +1,11 @@
|
|||||||
+ größe/gewicht/dominanz/temp in csv und über split aufteilen und mit ID verknüpfen oder mit pandar,
|
- zeigen: sin_all_uniform - sin_all_normal (also 5Hz, let away 0.001Hz?, gain_fit), eigenmannia_jar, plot_eigenmannia_jar(compare res_df_%s / res_df_%s_new),
|
||||||
eod basefrequenz rausziehen, scatter plot gegen cutoff frequency, ...
|
fish_properties(step_response_eigen does not work (norming/mean_trace)), phaseshift_all, gewicht größe von vanessa
|
||||||
|
|
||||||
|
+ größe/gewicht/dominanz/temp/eod basefrequenz/... , scatter plot gegen cutoff frequency, ...
|
||||||
- cutoff - dominance score
|
- cutoff - dominance score
|
||||||
- cutoff - basefrequency
|
- cutoff - basefrequency
|
||||||
- gain - dominance_score: für gain predict machen pro fish,
|
- gain - dominance_score: für gain predict machen pro fish, passt diese zeitkonstante?
|
||||||
hab ich dazu die richtige zeitckonstante aus gain_fit?
|
- mit daten befassen (fish_properties!
|
||||||
... da ich ja prediction auch über sin und nicht step mache dann
|
|
||||||
+ eigenmannia: specgram von pre_data neben specgram von data machen um zu sehen ob analyse fehler oder fehler in import_data
|
+ eigenmannia: specgram von pre_data neben specgram von data machen um zu sehen ob analyse fehler oder fehler in import_data
|
||||||
- erkenntnis: hab bei bm/jm nicht den gleichen mean abgezogen..
|
- erkenntnis: hab bei bm/jm nicht den gleichen mean abgezogen..
|
||||||
- an sich res_df besser, jedoch immer noch relativ variabel
|
- an sich res_df besser, jedoch immer noch relativ variabel
|
||||||
@ -13,6 +14,7 @@ eod basefrequenz rausziehen, scatter plot gegen cutoff frequency, ...
|
|||||||
+ look at step eigen data
|
+ look at step eigen data
|
||||||
- norming of data: what if in norm = ground / jar with jar == 0.0?
|
- norming of data: what if in norm = ground / jar with jar == 0.0?
|
||||||
+ look at 5Hz data - compare
|
+ look at 5Hz data - compare
|
||||||
|
+ to step response eigenmannia and eigenmannia response to deltaf, to now absolute JAR --> should i go to relative? (relative didnt work for me somehow)
|
||||||
|
|
||||||
long term:
|
long term:
|
||||||
- extra datei mit script drin um fertige daten darzustellen, den fit-code nur zur datenverarbeitung verwenden
|
- extra datei mit script drin um fertige daten darzustellen, den fit-code nur zur datenverarbeitung verwenden
|
||||||
|
Loading…
Reference in New Issue
Block a user