This commit is contained in:
xaver 2020-07-09 18:15:08 +02:00
parent cf22967f48
commit d4f534316a
2 changed files with 142 additions and 11 deletions

View File

@ -49,9 +49,9 @@ def parse_dataset(dataset_name):
if '#Key' in l:
if len(time) != 0: #therefore empty in the first round
times.append(time) #2nd loop means time != 0, so we put the times/amplitudes/frequencies to
amplitudes.append(ampl) #the data of the first loop
frequencies.append(freq)
times.append(np.array(time)) #2nd loop means time != 0, so we put the times/amplitudes/frequencies to
amplitudes.append(np.array(ampl)) #the data of the first loop
frequencies.append(np.array(freq))
time = [] #temporary lists to overwrite the lists with the same name we made before
ampl = [] #so they are empty again
@ -63,9 +63,9 @@ def parse_dataset(dataset_name):
freq.append(temporary[1])
ampl.append(temporary[2])
times.append(time) #append data from one list to another
amplitudes.append(ampl) #these append the data from the first loop to the final lists, because we overwrite them (?)
frequencies.append(freq)
times.append(np.array(time)) #append data from one list to another
amplitudes.append(np.array(ampl)) #these append the data from the first loop to the final lists, because we overwrite them (?)
frequencies.append(np.array(freq))
return frequencies, times, amplitudes, eodfs, deltafs, stimulusfs, duration, pause #output of the function
@ -83,16 +83,17 @@ def parse_infodataset(dataset_name):
return identifier
def mean_traces(start, stop, timespan, frequencies, time):
minimumt = min(len(time[0]), len(time[1]))
minimumt = min([len(time[k]) for k in range(len(time))])
# new time with wished timespan because it varies for different loops
tnew = np.arange(start, stop, timespan / minimumt) # 3rd input is stepspacing:
# in case complete measuring time devided by total number of datapoints
# interpolation
f0 = np.interp(tnew, time[0], frequencies[0])
f1 = np.interp(tnew, time[1], frequencies[1])
#new array with frequencies of both loops as two lists put together
frequency = np.array([f0, f1])
frequency = np.zeros((len(frequencies), len(tnew)))
for k in range(len(frequencies)):
ft = time[k][frequencies[k] > -5]
fn = frequencies[k][frequencies[k] > -5]
frequency[k,:] = np.interp(tnew, ft, fn)
#making a mean over both loops with the axis 0 (=averaged in y direction, axis=1 would be over x axis)
mf = np.mean(frequency, axis=0)

130
step_response.py Normal file
View File

@ -0,0 +1,130 @@
import matplotlib.pyplot as plt
import matplotlib as cm
from matplotlib.colors import ListedColormap, LinearSegmentedColormap
import os
import glob
import IPython
import numpy as np
from IPython import embed
from scipy.optimize import curve_fit
from jar_functions import parse_dataset
from jar_functions import parse_infodataset
from jar_functions import mean_traces
from jar_functions import mean_noise_cut
from jar_functions import norm_function
from jar_functions import step_response
from jar_functions import sort_values
base_path = 'D:\\jar_project\\JAR'
#nicht: -5Hz delta f, 19-aa, 22-ae, 22-ad (?)
datasets = [#'2020-06-19-aa', #-5Hz delta f, horrible fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-19-ab\\beats-eod.dat')), #-5Hz delta f, bad fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-aa\\beats-eod.dat')), #-5Hz delta f, bad fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ab\\beats-eod.dat')), #-5Hz delta f, bad fit
'2020-06-22-ac', #-15Hz delta f, good fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ad\\beats-eod.dat')), #-15Hz delta f, horrible fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ae\\beats-eod.dat')), #-15Hz delta f, maxfev way to high so horrible
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\beats-eod.dat')) #-15Hz delta f, good fit
]
#dat = glob.glob('D:\\jar_project\\JAR\\2020*\\beats-eod.dat')
#infodat = glob.glob('D:\\jar_project\\JAR\\2020*\\info.dat')
infodatasets = [(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\info.dat')),
(os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\info.dat'))]
time_all = []
freq_all = []
ID = []
col = ['darkgrey', 'lightgrey']
for infodataset in infodatasets:
i = parse_infodataset(infodataset)
identifier = i[0]
ID.append(identifier)
for idx, dataset in enumerate(datasets):
dataset = os.path.join(base_path, dataset, 'beats-eod.dat')
#input of the function
frequency, time, amplitude, eodf, deltaf, stimulusf, duration, pause = parse_dataset(dataset)
dm = np.mean(duration)
pm = np.mean(pause)
timespan = dm + pm
start = np.mean([t[0] for t in time])
stop = np.mean([t[-1] for t in time])
mf , tnew = mean_traces(start, stop, timespan, frequency, time) # maybe fixed timespan/sampling rate
#for i in range(len(mf)):
cf, ct = mean_noise_cut(mf, tnew, n=1250)
cf_arr = np.array(cf)
ct_arr = np.array(ct)
norm = norm_function(cf_arr, ct_arr, onset_point = dm - dm, offset_point = dm) #dm-dm funktioniert nur wenn onset = 0 sec
freq_all.append(norm)
time_all.append(ct_arr)
#plt.plot(ct_arr, norm) #, color = col[idx], label='fish=%s' % ID[idx])
# fit function
ft = ct_arr[ct_arr < dm]
fn = norm[ct_arr < dm]
ft = ft[fn > -5]
fn = fn[fn > -5]
sv, sc = curve_fit(step_response, ft, fn, [1.0, 1.0, 5.0, 50.0], bounds=(0.0, np.inf)) #step_values and step_cov
# sorted a and tau
values = sort_values(sv)
'''
# fit for each trace
plt.plot(ct_arr[ct_arr < 100], step_response(ct_arr, *sv)[ct_arr < 100], color='orange',
label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
'''
print('fish: a1, a2, tau1, tau2', values)
# average over all fish
mf_all , tnew_all = mean_traces(start, stop, timespan, freq_all, time_all)
plt.plot(tnew_all, mf_all, color = 'b', label = 'average', ls = 'dashed')
# fit for average
sv_all, sc_all = curve_fit(step_response, tnew_all[tnew_all < dm], mf_all[tnew_all < dm], bounds=(0.0, np.inf)) #step_values and step_cov
values_all = sort_values(sv_all)
plt.plot(tnew_all[tnew_all < 100], step_response(tnew_all, *sv_all)[tnew_all < 100], color='orange',
label='average_fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values_all))
print('average: a1, a2, tau1, tau2', values_all)
const_line = plt.axhline(y = 0.632)
stimulus_duration = plt.hlines(y = -0.25, xmin = 0, xmax = 100, color = 'r', label = 'stimulus_duration')
base_line = plt.axhline(y = 0, color = 'black', ls = 'dotted', linewidth = '1')
plt.xlim([-10,220])
plt.xlabel('time [s]')
plt.ylabel('rel. JAR magnitude')
plt.title('relative JAR')
plt.savefig('relative JAR')
plt.legend(loc = 'lower right')
plt.show()
embed()
# norm vor mean_traces damit cutoff von -5
# average über alle fische eigentlich mal nicht nötig, auslagern
# nur bei -15 Hz messen
# bei verschiedenen amplituden messen (siehe Tim)
# natalie fragen ob sie bei verschiedenen Amplituden messen kann (siehe tim)
# Fragen:
# wie offset point wenn nicht start bei 0 sec? über zeitdatenpunkt? oder einfach immer bei 0 onset..?
# wie zip ich ID liste mit plot (für eine for schleife) zusammen?
# welche Stimulusintesität?
# start/stop/timespan ok?