02.09
This commit is contained in:
parent
21d613222c
commit
fb14104325
70
eigenmannia_jar.py
Normal file
70
eigenmannia_jar.py
Normal file
@ -0,0 +1,70 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import os
|
||||
import nix_helpers as nh
|
||||
from IPython import embed
|
||||
#from tqdm import tqdm
|
||||
from jar_functions import parse_stimuli_dat
|
||||
from jar_functions import norm_function_eigen
|
||||
from jar_functions import mean_noise_cut_eigen
|
||||
from jar_functions import get_time_zeros
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\2015eigen16'
|
||||
|
||||
datasets = ['2020-07-08-aa', '2020-07-08-as']
|
||||
|
||||
response = []
|
||||
deltaf = []
|
||||
|
||||
for dataset in os.listdir(base_path):
|
||||
datapath = os.path.join(base_path, dataset, '%s.nix' % dataset)
|
||||
print(datapath)
|
||||
stimuli_dat = os.path.join(base_path, dataset, 'manualjar-eod.dat')
|
||||
|
||||
df, duration = parse_stimuli_dat(stimuli_dat)
|
||||
dur = int(duration[0][0:2])
|
||||
print(df)
|
||||
|
||||
time, eod = nh.read_eod(datapath, duration = 2000)
|
||||
|
||||
zeropoints = get_time_zeros(time, eod, threshold = np.max(eod)*0.1)
|
||||
|
||||
frequencies = 1 / np.diff(zeropoints)
|
||||
|
||||
# norm, base, jar = norm_function_eigen(frequencies, zeropoints[:-1], onset_point=(dur - dur)+10, offset_point=dur+10) # dm-dm funktioniert nur wenn onset = 0 sec
|
||||
|
||||
# cf, ct = mean_noise_cut_eigen(frequencies, zeropoints[:-1], n=200)
|
||||
|
||||
window = np.ones(101) / 101
|
||||
freq = np.convolve(frequencies, window, mode='same')
|
||||
|
||||
''' # plt.plot(ct, cf)
|
||||
plt.plot(zeropoints[:-1], freq)
|
||||
plt.ylabel('EOD_frequency [Hz]')
|
||||
plt.xlabel('time [s]')
|
||||
plt.xlim(1, 140)
|
||||
plt.ylim(np.median(freq) - 10, np.median(freq) + 10)
|
||||
plt.title('JAR_deltaf_%s' % deltaf)
|
||||
plt.show()
|
||||
'''
|
||||
j = []
|
||||
for idx, i in enumerate(zeropoints):
|
||||
if i > 20 and i < 80:
|
||||
j.append(freq[idx])
|
||||
|
||||
b = []
|
||||
for idx, i in enumerate(zeropoints):
|
||||
if i < 20:
|
||||
b.append(freq[idx])
|
||||
|
||||
r = np.median(j) - np.median(b)
|
||||
response.append(r)
|
||||
deltaf.append(df[0])
|
||||
|
||||
res_df1 = sorted(zip(deltaf[:32],response[:32]))
|
||||
res_df2 = sorted(zip(deltaf[33:],response[33:]))
|
||||
res_df = sorted(zip(deltaf,response))
|
||||
|
||||
np.save('res_df1', res_df1)
|
||||
np.save('res_df2', res_df2)
|
||||
np.save('res_df', res_df)
|
@ -84,6 +84,25 @@ def parse_infodataset(dataset_name):
|
||||
identifier.append((l.split(':')[-1].strip()))
|
||||
return identifier
|
||||
|
||||
def parse_stimuli_dat(dataset_name):
|
||||
assert (os.path.exists(dataset_name)) # see if data exists
|
||||
f = open(dataset_name, 'r') # open data we gave in
|
||||
lines = f.readlines() # read data
|
||||
f.close() # ?
|
||||
|
||||
deltaf = []
|
||||
duration = []
|
||||
|
||||
for i in range(len(lines)):
|
||||
l = lines[i].strip() # all lines of textdata, exclude all empty lines (empty () default for spacebar)
|
||||
if "#" in l and "Delta f" in l:
|
||||
ll = (l.split(':')[-1].strip())
|
||||
deltaf.append(float(ll.split('.')[0]))
|
||||
if '#' in l and 'duration' in l:
|
||||
duration.append((l.split(':')[-1].strip()))
|
||||
|
||||
return deltaf, duration
|
||||
|
||||
def mean_traces(start, stop, timespan, frequencies, time):
|
||||
minimumt = min([len(time[k]) for k in range(len(time))])
|
||||
|
||||
@ -98,7 +117,17 @@ def mean_traces(start, stop, timespan, frequencies, time):
|
||||
mf = np.mean(frequency, axis=0)
|
||||
return mf, tnew
|
||||
|
||||
def mean_noise_cut(frequencies, time, n):
|
||||
def mean_noise_cut_eigen(frequencies, time, n):
|
||||
cutf = []
|
||||
cutt = []
|
||||
for k in np.arange(0, len(frequencies), n):
|
||||
t = time[k]
|
||||
f = np.mean(frequencies[k:k+n])
|
||||
cutf.append(f)
|
||||
cutt.append(t)
|
||||
return cutf, cutt
|
||||
|
||||
def mean_noise_cut(frequencies, n):
|
||||
cutf = np.zeros(len(frequencies))
|
||||
for k in range(0, len(frequencies) - n):
|
||||
kk = int(k)
|
||||
@ -125,6 +154,20 @@ def norm_function(f, t, onset_point, offset_point):
|
||||
normed = ground / jar
|
||||
norm.append(normed)
|
||||
|
||||
return norm, base, jar
|
||||
|
||||
def norm_function_eigen(f, t, onset_point, offset_point):
|
||||
onset_end = onset_point - 10
|
||||
offset_start = offset_point - 10
|
||||
|
||||
|
||||
base = np.median(f[(t >= onset_end) & (t < onset_point)])
|
||||
|
||||
ground = f - base
|
||||
|
||||
jar = np.median(ground[(t >= offset_start) & (t < offset_point)])
|
||||
|
||||
norm = ground / jar
|
||||
return norm
|
||||
|
||||
def base_eod(frequencies, time, onset_point):
|
||||
@ -147,6 +190,34 @@ def JAR_eod(frequencies, time, offset_point):
|
||||
|
||||
return jar_eod
|
||||
|
||||
|
||||
def get_time_zeros (time, ampl, threshold = 0.0):
|
||||
|
||||
"""
|
||||
Ermittelt die Zeitpunkte der Nullpunkte der EOD-Kurve
|
||||
param time: Zeitachse der Datei
|
||||
param eod: EOD-Kurve aus Datei
|
||||
return zeropoints: Liste mit Nullpunkten der EOD-Kurve
|
||||
"""
|
||||
#Xavers gedöns
|
||||
new_time = time[:-1]
|
||||
if len(new_time) != (len(ampl[:-1]) | len(ampl[1:])):
|
||||
new_time = time [:-2]
|
||||
zeropoints = new_time[(ampl[:-1] >= threshold) & (ampl[1:] < threshold)]
|
||||
dx = np.mean(np.diff(new_time))
|
||||
|
||||
for index in range(len(zeropoints)): # Daten glätten
|
||||
zeit_index = int(zeropoints[index] / dx)
|
||||
if ampl[zeit_index] < threshold:
|
||||
dy = ampl[zeit_index + 1] - ampl[zeit_index]
|
||||
else:
|
||||
dy = ampl[zeit_index] - ampl[zeit_index - 1]
|
||||
m = (dy / dx)
|
||||
x = (threshold - ampl[zeit_index]) / m
|
||||
zeropoints[index] += x
|
||||
return zeropoints
|
||||
|
||||
|
||||
def sort_values(values):
|
||||
a = values[:2]
|
||||
tau = np.array(sorted(values[2:], reverse=False))
|
||||
|
13
notes
Normal file
13
notes
Normal file
@ -0,0 +1,13 @@
|
||||
- mit zu hohem RMS rauskicken: evtl nur ein trace rauskicken wenn nur da RMS zu hoch
|
||||
- 2019lepto27/30 nochmal anschauen, gainpunkt fehlt
|
||||
- fit für gain kurven: über tau = 1/cutoff_f * 2 * pi --> wie bestimm ich cutoff_f?
|
||||
- daten von natalie zu eigenmannia mit + / - delta f anschauen ob unterschiede
|
||||
- unterschiedliche nffts auf anderem rechner laufen lassen evtl um unterschiede zu sehen
|
||||
|
||||
long term:
|
||||
- extra datei mit script drin um fertige daten darzustellen, den fit-code als datenverarbeitung allein verwenden
|
||||
- darstellung: specgram --> rausgezogene jarspur darüber --> filterung --> fit und daten zusammen dargestellt, das ganze für verschiedene frequenzen
|
||||
- größe/evtl. gewicht nachtragen, eod basefrequenz rausziehen und zuweisen
|
||||
- liste mit eigenschaften der fische (dominanz/größe), messvariablen (temp/conductivity), eodf und evtl ampl machen um diese plotten zu können
|
||||
|
||||
- phase in degree
|
50
plot_eigenmannia_jar.py
Normal file
50
plot_eigenmannia_jar.py
Normal file
@ -0,0 +1,50 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import os
|
||||
import nix_helpers as nh
|
||||
from IPython import embed
|
||||
|
||||
res_df1 = np.load('res_df1.npy')
|
||||
res_df2 = np.load('res_df2.npy')
|
||||
res_df = np.load('res_df.npy')
|
||||
|
||||
mres = []
|
||||
mdf = []
|
||||
|
||||
currf = None
|
||||
idxlist = []
|
||||
|
||||
for i, d in enumerate(res_df):
|
||||
if currf is None or currf == d[0]:
|
||||
currf = d[0]
|
||||
idxlist.append(i)
|
||||
|
||||
else: # currf != f
|
||||
meanres = [] # lists to make mean of
|
||||
meandf = []
|
||||
for x in idxlist:
|
||||
meanres.append(res_df[x][1])
|
||||
meandf.append(res_df[x][0])
|
||||
meanedres = np.mean(meanres)
|
||||
meaneddf = np.mean(meandf)
|
||||
mres.append(meanedres)
|
||||
mdf.append(meaneddf)
|
||||
currf = d[0] # set back for next loop
|
||||
idxlist = [i]
|
||||
meanres = [] # lists to make mean of
|
||||
meandf = []
|
||||
for y in idxlist:
|
||||
meanres.append(res_df[y][1])
|
||||
meandf.append(res_df[y][0])
|
||||
meanedres = np.mean(meanres)
|
||||
meaneddf = np.mean(meandf)
|
||||
mres.append(meanedres)
|
||||
mdf.append(meaneddf)
|
||||
|
||||
plt.plot(mdf, mres, 'o')
|
||||
plt.xlabel('deltaf [Hz]')
|
||||
plt.ylabel('JAR_respones [Hz]')
|
||||
plt.axhline(0, color='grey', lw =1)
|
||||
plt.axvline(0, color='grey', lw = 1)
|
||||
plt.title('JAR_response_to_deltaf_eigenmannia')
|
||||
plt.show()
|
74
sin_all.py
Normal file
74
sin_all.py
Normal file
@ -0,0 +1,74 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import pylab
|
||||
from IPython import embed
|
||||
|
||||
def avgNestedLists(nested_vals):
|
||||
"""
|
||||
Averages a 2-D array and returns a 1-D array of all of the columns
|
||||
averaged together, regardless of their dimensions.
|
||||
"""
|
||||
output = []
|
||||
maximum = 0
|
||||
for lst in nested_vals:
|
||||
if len(lst) > maximum:
|
||||
maximum = len(lst)
|
||||
for index in range(maximum): # Go through each index of longest list
|
||||
temp = []
|
||||
for lst in nested_vals: # Go through each list
|
||||
if index < len(lst): # If not an index error
|
||||
temp.append(lst[index])
|
||||
output.append(np.nanmean(temp))
|
||||
return output
|
||||
|
||||
identifier = ['2018lepto4',
|
||||
'2018lepto1',
|
||||
'2018lepto5',
|
||||
'2018lepto76',
|
||||
'2018lepto98',
|
||||
'2019lepto03',
|
||||
'2019lepto24',
|
||||
'2019lepto27',
|
||||
'2019lepto30',
|
||||
'2020lepto04',
|
||||
'2020lepto06',
|
||||
'2020lepto16',
|
||||
'2020lepto19',
|
||||
'2020lepto20'
|
||||
]
|
||||
|
||||
amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
|
||||
|
||||
all = []
|
||||
|
||||
for ident in identifier:
|
||||
data = np.load('gain_%s.npy' %ident)
|
||||
all.append(data)
|
||||
|
||||
av = avgNestedLists(all)
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(amf, av, 'o')
|
||||
ax.set_xscale('log')
|
||||
ax.set_yscale('log')
|
||||
ax.set_title('gaincurve_average_allfish')
|
||||
ax.set_ylabel('gain [Hz/(mV/cm)]')
|
||||
ax.set_xlabel('envelope_frequency [Hz]')
|
||||
plt.show()
|
||||
embed()
|
||||
|
||||
'''len_arr = []
|
||||
for a in all:
|
||||
len_arr.append(len(a))
|
||||
max_a = np.max(len_arr)
|
||||
|
||||
arr = np.ma.empty((1,len(all),max_a))
|
||||
arr.mask = True
|
||||
|
||||
for x, a in enumerate(all):
|
||||
arr[:a.shape[0],x] = arr[0][x]
|
||||
embed()
|
||||
|
||||
print(arr.mean(axis = 2))
|
||||
embed()'''
|
@ -10,20 +10,20 @@ from jar_functions import mean_noise_cut
|
||||
def take_second(elem): # function for taking the names out of files
|
||||
return elem[1]
|
||||
|
||||
identifier = ['2018lepto1',
|
||||
'2018lepto4',
|
||||
'2018lepto5',
|
||||
'2018lepto76',
|
||||
'2018lepto98',
|
||||
'2019lepto03',
|
||||
'2019lepto24',
|
||||
'2019lepto27',
|
||||
identifier = [#'2018lepto1',
|
||||
#'2018lepto4',
|
||||
#'2018lepto5',
|
||||
#'2018lepto76',
|
||||
#'2018lepto98',
|
||||
#'2019lepto03',
|
||||
#'2019lepto24',
|
||||
#'2019lepto27',
|
||||
'2019lepto30',
|
||||
'2020lepto04',
|
||||
'2020lepto06',
|
||||
'2020lepto16',
|
||||
'2020lepto19',
|
||||
'2020lepto20'
|
||||
#'2020lepto04',
|
||||
#'2020lepto06',
|
||||
#'2020lepto16',
|
||||
#'2020lepto19',
|
||||
#'2020lepto20'
|
||||
]
|
||||
for ident in identifier:
|
||||
|
||||
@ -56,10 +56,10 @@ for ident in identifier:
|
||||
n = int(1/float(d[1])/dt)
|
||||
cutf = mean_noise_cut(jm, time, n = n)
|
||||
cutt = time
|
||||
# plt.plot(time, jm-cutf, label='cut amfreq')
|
||||
# plt.plot(time, jm, label='spec')
|
||||
# plt.legend()
|
||||
# plt.show()
|
||||
#plt.plot(time, jm-cutf, label='cut amfreq')
|
||||
#plt.plot(time, jm, label='spec')
|
||||
#plt.legend()
|
||||
#plt.show()
|
||||
|
||||
sinv, sinc = curve_fit(sin_response, time, jm - cutf, [float(d[1]), 2, 0.5]) # fitting
|
||||
print('frequency, phaseshift, amplitude:', sinv)
|
||||
@ -73,12 +73,11 @@ for ident in identifier:
|
||||
|
||||
# root mean square
|
||||
RMS = np.sqrt(np.mean(((jm - cutf) - sin_response(cutt, sinv[0], sinv[1], sinv[2]))**2))
|
||||
|
||||
thresh = A / np.sqrt(2)
|
||||
|
||||
# plt.plot(time, sin_response(time, *sinv), label='fit: f=%f, p=%.2f, A=%.2f' % tuple(sinv))
|
||||
# plt.legend()
|
||||
# plt.show()
|
||||
#plt.plot(time, sin_response(time, *sinv), label='fit: f=%f, p=%.2f, A=%.2f' % tuple(sinv))
|
||||
#plt.legend()
|
||||
#plt.show()
|
||||
|
||||
# mean over same amfreqs for phase and gain
|
||||
if currf is None or currf == d[1]:
|
||||
@ -99,6 +98,7 @@ for ident in identifier:
|
||||
meanedp = np.mean(meanp)
|
||||
meanedrms = np.mean(meanrms)
|
||||
meanedthresh = np.mean(meanthresh)
|
||||
|
||||
mgain.append(meanedf)
|
||||
mphaseshift.append(meanedp)
|
||||
rootmeansquare.append(meanedrms)
|
||||
@ -118,6 +118,7 @@ for ident in identifier:
|
||||
meanedp = np.mean(meanp)
|
||||
meanedrms = np.mean(meanrms)
|
||||
meanedthresh = np.mean(meanthresh)
|
||||
|
||||
mgain.append(meanedf)
|
||||
mphaseshift.append(meanedp)
|
||||
rootmeansquare.append(meanedrms)
|
||||
@ -128,9 +129,18 @@ for ident in identifier:
|
||||
G = np.max(mgain) / np.sqrt(1 + (2*((np.pi*f*3.14)**2)))
|
||||
predict.append(G)
|
||||
|
||||
# as arrays
|
||||
mgain_arr = np.array(mgain)
|
||||
amfreq_arr = np.array(amfreq)
|
||||
rootmeansquare_arr = np.array(rootmeansquare)
|
||||
threshold_arr = np.array(threshold)
|
||||
|
||||
# condition needed to be fulfilled: RMS < threshold or RMS < mean(RMS)
|
||||
idx_arr = (rootmeansquare_arr < threshold_arr) | (rootmeansquare_arr < np.mean(rootmeansquare_arr))
|
||||
|
||||
fig = plt.figure()
|
||||
ax0 = fig.add_subplot(2, 1, 1)
|
||||
ax0.plot(amfreq, mgain(RMS<threshold), 'o')
|
||||
ax0.plot(amfreq_arr[idx_arr], mgain_arr[idx_arr], 'o')
|
||||
#ax0.plot(amf, predict)
|
||||
ax0.set_yscale('log')
|
||||
ax0.set_xscale('log')
|
||||
@ -142,26 +152,14 @@ for ident in identifier:
|
||||
ax1 = fig.add_subplot(2, 1, 2, sharex = ax0)
|
||||
ax1.plot(amfreq, threshold, 'o-', label = 'threshold', color = 'b')
|
||||
ax1.set_xscale('log')
|
||||
ax1.plot(amfreq, rootmeansquare, 'o-', label = 'RMS', color = 'orange')
|
||||
ax1.plot(amfreq, rootmeansquare, 'o-', label = 'RMS', color ='orange')
|
||||
ax1.set_xscale('log')
|
||||
ax1.set_xlabel('envelope_frequency [Hz]')
|
||||
ax1.set_ylabel('RMS')
|
||||
ax1.set_ylabel('RMS [Hz]')
|
||||
plt.legend()
|
||||
pylab.show()
|
||||
|
||||
embed()
|
||||
|
||||
# zu eigenmannia: jeden fisch mit amplituden von max und min von modulationstiefe und evtl 1 oder 2 dazwischen
|
||||
# und dann für die am frequenzen von apteronotus für 15Hz delta f messen
|
||||
|
||||
# mit zu hohem RMS rauskicken: gain/rms < ... (?)
|
||||
# gain kurven als array abspeichern
|
||||
# daten von natalie zu eigenmannia mit + / - delta f anschauen ob unterschiede
|
||||
# unterschiedliche nffts auf anderem rechner laufen lassen evtl um unterschiede zu sehen?
|
||||
|
||||
|
||||
# long term: extra datei mit script drin um fertige daten darzustellen, den code hier als datenverarbeitung allein verwenden
|
||||
# darstellung: specgram --> rausgezogene jarspur darüber --> filterung --> fit und daten zusammen dargestellt, das ganze für verschiedene frequenzen
|
||||
# liste mit eigenschaften der fische (dominanz/größe) und messvariablen (temp/conductivity) machen um diese plotten zu können
|
||||
np.save('gain_%s' %ident, mgain_arr[idx_arr])
|
||||
np.save('amf%s' %ident, amfreq_arr[idx_arr])
|
||||
|
||||
# phase in degree
|
||||
embed()
|
@ -6,8 +6,9 @@ import os
|
||||
import glob
|
||||
import IPython
|
||||
import numpy as np
|
||||
import DataLoader as dl
|
||||
#import DataLoader as dl
|
||||
from IPython import embed
|
||||
from tqdm import tqdm
|
||||
from scipy.optimize import curve_fit
|
||||
from jar_functions import step_response
|
||||
from jar_functions import sin_response
|
||||
@ -21,7 +22,7 @@ from jar_functions import average
|
||||
from jar_functions import import_data
|
||||
from jar_functions import import_amfreq
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\sin\\2019lepto03'
|
||||
base_path = 'D:\\jar_project\\JAR\\sin\\2019lepto30'
|
||||
|
||||
time_all = []
|
||||
freq_all = []
|
||||
@ -30,7 +31,7 @@ amfrequencies = []
|
||||
gains = []
|
||||
files = []
|
||||
|
||||
for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
for idx, dataset in tqdm(enumerate(os.listdir(base_path))):
|
||||
if dataset == 'prerecordings':
|
||||
continue
|
||||
datapath = os.path.join(base_path, dataset, '%s.nix' % dataset)
|
||||
@ -42,7 +43,15 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
for d, dat in enumerate(data):
|
||||
if len(dat) == 1:
|
||||
continue
|
||||
'''if len(data) > 0:
|
||||
print(datapath)
|
||||
|
||||
amfreq = import_amfreq(datapath)
|
||||
print(amfreq)
|
||||
continue
|
||||
else:
|
||||
embed()
|
||||
'''
|
||||
file_name = []
|
||||
ID = []
|
||||
|
||||
@ -57,7 +66,7 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
file_name.append(ID[0])
|
||||
|
||||
amfreq = import_amfreq(datapath)
|
||||
print(amfreq)
|
||||
#print(amfreq)
|
||||
file_name.append(str(amfreq))
|
||||
|
||||
file_name.append(str(d))
|
||||
@ -100,8 +109,5 @@ for idx, dataset in enumerate(os.listdir(base_path)):
|
||||
|
||||
# save filenames for this fish
|
||||
np.save('%s files' %ID[0], files)
|
||||
print(ID)
|
||||
embed()
|
||||
|
||||
# running average over on AM-period?
|
||||
|
||||
embed()
|
@ -72,7 +72,7 @@ for idx, dataset in enumerate(datasets):
|
||||
|
||||
mf, tnew = mean_traces(start, stop, timespan, norm, time) # maybe fixed timespan/sampling rate
|
||||
|
||||
cf, ct = mean_noise_cut(mf, tnew, n=1250)
|
||||
cf, ct = mean_noise_cut(mf, n=1250)
|
||||
|
||||
cf_arr = np.array(cf)
|
||||
ct_arr = np.array(ct)
|
||||
|
Loading…
Reference in New Issue
Block a user