10.09
This commit is contained in:
parent
03dc051388
commit
ea00a01fd9
@ -12,7 +12,7 @@ from jar_functions import get_time_zeros
|
||||
from jar_functions import import_data_eigen
|
||||
from scipy.signal import savgol_filter
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia'
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\deltaf'
|
||||
|
||||
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
|
||||
|
||||
@ -30,9 +30,18 @@ for ID in identifier:
|
||||
|
||||
# base with nh.read_eod
|
||||
time, eod = nh.read_eod(datapath, duration = 2000) # anstatt dem import data mit tag manual jar - dann sollte onset wirklich bei 10 sec sein
|
||||
dt = time[1] - time[0]
|
||||
nfft = 2 **17
|
||||
spec_0, freqs_0, times_0 = specgram(eod, Fs=1 / dt, detrend='mean', NFFT=nfft, noverlap=nfft * 0.95)
|
||||
dbspec_0 = 10.0 * np.log10(spec_0) # in dB
|
||||
|
||||
plt.imshow(dbspec_0, cmap='jet', origin='lower', extent=(times_0[0], times_0[-1], 0, 1500), aspect='auto',
|
||||
vmin=-80, vmax=-10)
|
||||
plt.show()
|
||||
|
||||
zeropoints = get_time_zeros(time, eod, threshold=np.max(eod) * 0.1)
|
||||
|
||||
|
||||
frequencies = 1 / np.diff(zeropoints)
|
||||
|
||||
window = np.ones(101) / 101
|
||||
@ -65,8 +74,10 @@ for ID in identifier:
|
||||
jm = jar4 - np.mean(jar4) # data we take
|
||||
cut_time_jar = times_0[:len(jar4)]
|
||||
|
||||
#plt.imshow(spec4_0, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
||||
#plt.plot(cut_time_jar, jar4)
|
||||
#plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80,
|
||||
#vmax=-10)
|
||||
#plt.imshow(spec4_0, cmap='jet', origin='lower', extent=(times_0[0], times_0[-1], lim0_0, lim1_0), aspect='auto', vmin=-80, vmax=-10)
|
||||
plt.plot(cut_time_jar, jm)
|
||||
#plt.ylim(lim0_0, lim1_0)
|
||||
|
||||
# pre_data
|
||||
@ -75,8 +86,8 @@ for ID in identifier:
|
||||
dbspec_1 = 10.0 * np.log10(spec_1) # in dB
|
||||
power_1 = dbspec_1[:, 25]
|
||||
|
||||
fish_p_1 = power_1[(freqs_1 > 200) & (freqs_1 < 1000)]
|
||||
fish_f_1 = freqs_1[(freqs_1 > 200) & (freqs_1 < 1000)]
|
||||
fish_p_1 = power_1[(freqs_1 > 200) & (freqs_1 < 500)]
|
||||
fish_f_1 = freqs_1[(freqs_1 > 200) & (freqs_1 < 500)]
|
||||
|
||||
index1 = np.argmax(fish_p_1)
|
||||
eodf_1 = fish_f_1[index1]
|
||||
@ -94,13 +105,14 @@ for ID in identifier:
|
||||
bm = base4 - np.mean(base4) # data we take
|
||||
cut_time_base = times_1[:len(base4)] - times_1[-1]
|
||||
|
||||
#plt.plot(cut_time_base, base4)
|
||||
plt.plot(cut_time_base, bm)
|
||||
|
||||
j = []
|
||||
for idx, i in enumerate(times_0):
|
||||
if i > 45 and i < 55:
|
||||
j.append(jm[idx])
|
||||
|
||||
plt.plot(j)
|
||||
plt.show()
|
||||
r = np.median(j) - np.median(bm)
|
||||
|
||||
deltaf.append(df[0])
|
||||
|
@ -13,7 +13,7 @@ from jar_functions import import_data_eigen
|
||||
from jar_functions import get_new_zero_crossings
|
||||
from scipy.signal import savgol_filter
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia'
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\deltaf'
|
||||
|
||||
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
|
||||
|
||||
|
20
gain_fit.py
20
gain_fit.py
@ -28,17 +28,21 @@ for ID in identifier:
|
||||
amf = np.load('amf_%s.npy' %ID)
|
||||
gain = np.load('gain_%s.npy' %ID)
|
||||
|
||||
'''fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(amf, gain, 'o')
|
||||
ax.set_yscale('log')
|
||||
ax.set_xscale('log')
|
||||
plt.show()
|
||||
'''
|
||||
sinv, sinc = curve_fit(gain_curve_fit, amf, gain)
|
||||
print('tau:', sinv[0])
|
||||
tau.append(sinv[0])
|
||||
f_cutoff = 1 / (2*np.pi*sinv[0])
|
||||
print('f_cutoff:', f_cutoff)
|
||||
f_c.append(f_cutoff)
|
||||
#welche zeitkonstante ist das? was ist mit der zweiten?
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(amf, gain, 'o')
|
||||
amff = np.logspace(-3, 0, 200)
|
||||
ax.plot(amff, gain_curve_fit(amff, *sinv))
|
||||
ax.set_yscale('log')
|
||||
ax.set_xscale('log')
|
||||
plt.show()
|
||||
|
||||
#welche zeitkonstante ist das? was ist mit der zweiten? --> eher zweite zeitkonstante obwohl werte so klein?
|
||||
|
||||
|
@ -13,7 +13,7 @@ def sin_response(t, f, p, A):
|
||||
r_sin = A*np.sin(2*np.pi*t*f + p)
|
||||
return r_sin
|
||||
|
||||
def gain_curve_fit(tau, alpha, amf):
|
||||
def gain_curve_fit(amf, tau, alpha):
|
||||
gain = alpha / np.sqrt(1 + (2*np.pi*amf*tau)**2)
|
||||
return gain
|
||||
|
||||
|
12
notes
12
notes
@ -1,15 +1,19 @@
|
||||
+ fit für gain kurven: über tau = 1/cutoff_f * 2 * pi --> wie bestimm ich cutoff_f?
|
||||
+ daten von natalie zu eigenmannia mit + / - delta f anschauen ob unterschiede
|
||||
+ größe/gewicht/dominanz/temp in csv und über split aufteilen und mit ID verknüpfen oder mit pandar,
|
||||
eod basefrequenz rausziehen, scatter plot gegen cutoff frequency, ...
|
||||
+ cutoff frequencies rausziehen und zu gain_all plotten, dann punkte so aussortieren dass uniform
|
||||
verteilt ist um zu zeigen wie metzen chacron zu dem ergebnis gekommen sind (hoffentlich)
|
||||
dabei noch absolutwerte von cutoff und tau verwenden da wir wurzel in formel nehmen
|
||||
+ specgram von pre_data neben specgram von data machen um zu sehen ob analyse fehler oder fehler in import_data
|
||||
|
||||
long term:
|
||||
- extra datei mit script drin um fertige daten darzustellen, den fit-code nur zur datenverarbeitung verwenden
|
||||
- darstellung: specgram --> rausgezogene jarspur darüber --> filterung --> fit und daten zusammen dargestellt, das ganze für verschiedene frequenzen
|
||||
+ größe/evtl. gewicht nachtragen, eod basefrequenz rausziehen und zuweisen --> wie macht man das schnell und nicht manuell?
|
||||
- liste mit eigenschaften der fische (dominanz/größe), messvariablen (temp/conductivity), eodf und evtl ampl machen um diese plotten zu können
|
||||
- unterschiedliche nffts auf anderem rechner laufen lassen evtl um unterschiede zu sehen
|
||||
|
||||
- phase in degree: phase % (2pi) - modulo 2pi
|
||||
|
||||
|
||||
(
|
||||
- mit zu hohem RMS rauskicken: evtl nur ein trace rauskicken wenn nur da RMS zu hoch
|
||||
- 2019lepto27/30: 27 - 0.05Hz (7-27-af, erste dat mit len(dat)=1), 30 - 0.001Hz (7-30-ah mit 0.005 anstatt gewollten 0.001Hz --> fehlt)
|
||||
)
|
@ -7,7 +7,7 @@ from IPython import embed
|
||||
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
|
||||
|
||||
for ID in identifier:
|
||||
res_df = np.load('res_df_%s_new.npy' %ID)
|
||||
res_df = np.load('res_df_%s.npy' %ID)
|
||||
|
||||
mres = []
|
||||
mdf = []
|
||||
|
11
sin_all.py
11
sin_all.py
@ -40,16 +40,21 @@ identifier = ['2018lepto4',
|
||||
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
|
||||
|
||||
all = []
|
||||
|
||||
new_all = []
|
||||
for ident in identifier:
|
||||
data = np.load('gain_%s.npy' %ident)
|
||||
max = np.max(data)
|
||||
new_data = data / max
|
||||
all.append(data)
|
||||
new_all.append(new_data)
|
||||
|
||||
av = avgNestedLists(all)
|
||||
|
||||
new_av = avgNestedLists(new_all)
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(amf, av, 'o')
|
||||
ax.plot(amf, av, 'o', label = 'not normed')
|
||||
ax.plot(amf, new_av, 'o', label = 'normed')
|
||||
ax.legend()
|
||||
ax.set_xscale('log')
|
||||
ax.set_yscale('log')
|
||||
ax.set_title('gaincurve_average_allfish')
|
||||
|
@ -22,7 +22,7 @@ from jar_functions import average
|
||||
from jar_functions import import_data
|
||||
from jar_functions import import_amfreq
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\sin\\2019lepto27'
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\sin\\2015eigen8'
|
||||
|
||||
time_all = []
|
||||
freq_all = []
|
||||
@ -38,15 +38,12 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
#print(datapath)
|
||||
|
||||
data, pre_data, dt = import_data(datapath)
|
||||
embed()
|
||||
|
||||
nfft = 2**17
|
||||
|
||||
for d, dat in enumerate(data):
|
||||
if len(dat) == 1:
|
||||
print(datapath)
|
||||
embed()
|
||||
else:
|
||||
continue
|
||||
|
||||
file_name = []
|
||||
ID = []
|
||||
@ -62,7 +59,7 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
file_name.append(ID[0])
|
||||
|
||||
amfreq = import_amfreq(datapath)
|
||||
#print(amfreq)
|
||||
print(amfreq)
|
||||
file_name.append(str(amfreq))
|
||||
|
||||
file_name.append(str(d))
|
||||
@ -97,7 +94,9 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
jm = jar4 - np.mean(jar4) # data we take
|
||||
cut_times = times[:len(jar4)]
|
||||
|
||||
# plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
||||
plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
||||
plt.plot(cut_times, jar4)
|
||||
plt.show()
|
||||
|
||||
# save data
|
||||
#np.save('%s time' % file_name, cut_times)
|
||||
|
@ -89,7 +89,7 @@ for idx, dataset in enumerate(datasets):
|
||||
|
||||
# fit for each trace
|
||||
#plt.plot(ct_arr[ct_arr < dm], step_response(ct_arr[ct_arr < dm], *sv), label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
|
||||
#plt.plot(ft, step_response(ft, *sv), color='orange', label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
|
||||
plt.plot(ft, step_response(ft, *sv), color='orange', label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
|
||||
|
||||
print('fish: a1, a2, tau1, tau2', values)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user