This commit is contained in:
xaver 2020-09-21 20:12:16 +02:00
parent 042bdd7aa3
commit f1b7291ecf
4 changed files with 204 additions and 23 deletions

View File

@ -0,0 +1,64 @@
import matplotlib.pyplot as plt
import numpy as np
import pylab
from IPython import embed
from scipy.optimize import curve_fit
from jar_functions import gain_curve_fit
from jar_functions import avgNestedLists
identifier = [#'2018lepto1',
#'2018lepto4',
#'2018lepto5',
#'2018lepto76',
'2018lepto98',
'2019lepto03',
#'2019lepto24',
#'2019lepto27',
#'2019lepto30',
#'2020lepto04',
#'2020lepto06',
'2020lepto16',
'2020lepto19',
'2020lepto20'
]
tau = []
f_c = []
for ID in identifier:
print(ID)
amf = np.load('5Hz_amf_%s.npy' %ID)
gain = np.load('5Hz_gain_%s.npy' %ID)
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
#print('tau:', sinv[0])
tau.append(sinv[0])
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
print('f_cutoff:', f_cutoff)
f_c.append(f_cutoff)
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
all = []
for ident in identifier:
data = np.load('5Hz_gain_%s.npy' %ident)
all.append(data)
av = avgNestedLists(all)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(amf, av, 'o')
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_title('gaincurve_average_allfish_5Hz')
ax.set_ylabel('gain [Hz/(mV/cm)]')
ax.set_xlabel('envelope_frequency [Hz]')
ax.set_ylim(0.0008, )
ax.plot(f_c, np.full((len(identifier)), 0.0015), 'o', label = 'cutoff frequencies')
ax.legend()
plt.show()
embed()

View File

@ -0,0 +1,97 @@
import matplotlib.pyplot as plt
import numpy as np
import pylab
from IPython import embed
from scipy.optimize import curve_fit
from jar_functions import gain_curve_fit
from jar_functions import avgNestedLists
identifier_uniform = ['2018lepto1',
# '2018lepto4',
# '2018lepto5',
#'2018lepto76',
'2018lepto98',
# '2019lepto03',
'2019lepto24',
#'2019lepto27',
# '2019lepto30',
'2020lepto04',
# '2020lepto06',
# '2020lepto16',
'2020lepto19',
# '2020lepto20'
]
identifier = ['2018lepto1',
'2018lepto4',
'2018lepto5',
'2018lepto76',
'2018lepto98',
'2019lepto03',
'2019lepto24',
'2019lepto27',
'2019lepto30',
'2020lepto04',
'2020lepto06',
'2020lepto16',
'2020lepto19',
'2020lepto20'
]
tau = []
f_c = []
for ID in identifier:
print(ID)
amf = np.load('amf_%s.npy' %ID)
gain = np.load('gain_%s.npy' %ID)
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
#print('tau:', sinv[0])
tau.append(sinv[0])
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
print('f_cutoff:', f_cutoff)
f_c.append(f_cutoff)
tau_uniform = []
f_c_uniform = []
for ID in identifier_uniform:
#print(ID)
amf = np.load('amf_%s.npy' %ID)
gain = np.load('gain_%s.npy' %ID)
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
#print('tau:', sinv[0])
tau_uniform.append(sinv[0])
f_cutoff = abs(1 / (2*np.pi*sinv[0]))
#print('f_cutoff:', f_cutoff)
f_c_uniform.append(f_cutoff)
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
all = []
new_all = []
for ident in identifier:
data = np.load('gain_%s.npy' %ident)
all.append(data)
for ident in identifier_uniform:
data = np.load('gain_%s.npy' % ident)
new_all.append(data)
av = avgNestedLists(all)
new_av = avgNestedLists(new_all)
lim = 0.001
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(amf, av, 'o', color = 'orange', label = 'normal')
ax.plot(amf, new_av, 'o', color = 'blue', label = 'uniformed')
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_title('gaincurve_average_allfish')
ax.set_ylabel('gain [Hz/(mV/cm)]')
ax.set_xlabel('envelope_frequency [Hz]')
ax.set_ylim(0.0008, )
ax.plot(f_c, np.full((len(identifier)), 0.0015), 'o', color = 'orange', label = 'all cutoff frequencies')
ax.plot(f_c_uniform, np.full((len(identifier_uniform)), 0.001), 'o', color = 'blue', label = 'uniformed cutoff frequencies')
ax.legend()
plt.show()
embed()

View File

@ -43,12 +43,13 @@ for infodataset in datasets:
for ID in identifier:
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\step\\%s' %ID
res_df = []
response = []
stim_ampl = []
for idx, dataset in enumerate(os.listdir(base_path)):
dataset = os.path.join(base_path, dataset, 'beats-eod.dat')
print(dataset)
#input of the function
frequency, time, amplitude, eodf, deltaf, stimulusf, duration, pause = parse_dataset(dataset)
frequency, time, amplitude, eodf, deltaf, stimulusf, stimulusamplitude, duration, pause = parse_dataset(dataset)
dm = np.mean(duration)
pm = np.mean(pause)
timespan = dm + pm
@ -57,20 +58,15 @@ for ID in identifier:
if len(frequency) == 5:
continue
norm, base, jar = norm_function(frequency, time, onset_point=dm - dm, offset_point=dm) # dm-dm funktioniert nur wenn onset = 0 sec
print(jar)
if jar[0] == 0.0:
continue
mf, tnew = mean_traces(start, stop, timespan, norm, time) # maybe fixed timespan/sampling rate
mf, tnew = mean_traces(start, stop, timespan, frequency, time) # maybe fixed timespan/sampling rate
cf, ct = mean_noise_cut_eigen(mf, tnew, n=1250)
cf_arr = np.array(cf)
ct_arr = np.array(ct)
onset_point = dm - dm
offset_point = dm
onset_end = onset_point - 10
offset_start = offset_point - 10
freq_all.append(cf_arr)
time_all.append(ct_arr)
b = []
for index, i in enumerate(ct):
@ -84,15 +80,37 @@ for ID in identifier:
print(h)
print(indexx)
print(cf[indexx])
''' sounds good, doesnt work somehow: in norm devision by 0 (jar) or index doesnt fit
norm, base, jar = norm_function(frequency, time, onset_point=dm - dm,
offset_point=dm) # dm-dm funktioniert nur wenn onset = 0 sec
b = []
for index, i in enumerate(ct):
if i > -45 and i < -5:
b.append(cf[index])
j = []
for indexx, h in enumerate(ct):
if h > 195 and h < 145:
j.append(cf[indexx])
print(h)
print(indexx)
print(cf[indexx])
b = np.median(cf[(ct >= onset_end) & (ct < onset_point)])
j = np.median(cf[(ct >= offset_start) & (ct < offset_point)])
'''
r = np.median(j) - np.median(b)
#response.append(r)
embed()
response.append(r)
stim_ampl.append(stimulusamplitude)
res_ampl = sorted(zip(stim_ampl, response))
base_line = plt.axhline(y = 0, color = 'black', ls = 'dotted', linewidth = '1')
plt.xlim([-10,220])
plt.xlabel('time [s]')
plt.ylabel('rel. JAR magnitude')
plt.title('relative JAR')
plt.xlabel('Stimulusamplitude')
plt.ylabel('absolute JAR magnitude')
plt.title('absolute JAR')
plt.savefig('relative JAR')
plt.legend(loc = 'lower right')
plt.show()

12
notes
View File

@ -1,10 +1,11 @@
+ größe/gewicht/dominanz/temp in csv und über split aufteilen und mit ID verknüpfen oder mit pandar,
eod basefrequenz rausziehen, scatter plot gegen cutoff frequency, ...
- zeigen: sin_all_uniform - sin_all_normal (also 5Hz, let away 0.001Hz?, gain_fit), eigenmannia_jar, plot_eigenmannia_jar(compare res_df_%s / res_df_%s_new),
fish_properties(step_response_eigen does not work (norming/mean_trace)), phaseshift_all, gewicht größe von vanessa
+ größe/gewicht/dominanz/temp/eod basefrequenz/... , scatter plot gegen cutoff frequency, ...
- cutoff - dominance score
- cutoff - basefrequency
- gain - dominance_score: für gain predict machen pro fish,
hab ich dazu die richtige zeitckonstante aus gain_fit?
... da ich ja prediction auch über sin und nicht step mache dann
- gain - dominance_score: für gain predict machen pro fish, passt diese zeitkonstante?
- mit daten befassen (fish_properties!
+ eigenmannia: specgram von pre_data neben specgram von data machen um zu sehen ob analyse fehler oder fehler in import_data
- erkenntnis: hab bei bm/jm nicht den gleichen mean abgezogen..
- an sich res_df besser, jedoch immer noch relativ variabel
@ -13,6 +14,7 @@ eod basefrequenz rausziehen, scatter plot gegen cutoff frequency, ...
+ look at step eigen data
- norming of data: what if in norm = ground / jar with jar == 0.0?
+ look at 5Hz data - compare
+ to step response eigenmannia and eigenmannia response to deltaf, to now absolute JAR --> should i go to relative? (relative didnt work for me somehow)
long term:
- extra datei mit script drin um fertige daten darzustellen, den fit-code nur zur datenverarbeitung verwenden