This commit is contained in:
xaver 2020-07-10 13:36:56 +02:00
parent d4f534316a
commit f2d539ae60
2 changed files with 50 additions and 53 deletions

View File

@ -1,6 +1,8 @@
import os #compability with windows import os #compability with windows
from IPython import embed from IPython import embed
import numpy as np import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
def step_response(t, a1, a2, tau1, tau2): def step_response(t, a1, a2, tau1, tau2):
r_step = (a1*(1 - np.exp(-t/tau1))) + (a2*(1 - np.exp(-t/tau2))) r_step = (a1*(1 - np.exp(-t/tau1))) + (a2*(1 - np.exp(-t/tau2)))
@ -79,25 +81,21 @@ def parse_infodataset(dataset_name):
for i in range(len(lines)): for i in range(len(lines)):
l = lines[i].strip() #all lines of textdata, exclude all empty lines (empty () default for spacebar) l = lines[i].strip() #all lines of textdata, exclude all empty lines (empty () default for spacebar)
if "#" in l and "Identifier" in l: if "#" in l and "Identifier" in l:
identifier.append((l.split(':')[-1].strip()[1:12])) identifier.append((l.split(':')[-1].strip()))
return identifier return identifier
def mean_traces(start, stop, timespan, frequencies, time): def mean_traces(start, stop, timespan, frequencies, time):
minimumt = min([len(time[k]) for k in range(len(time))]) minimumt = min([len(time[k]) for k in range(len(time))])
# new time with wished timespan because it varies for different loops
tnew = np.arange(start, stop, timespan / minimumt) # 3rd input is stepspacing: tnew = np.arange(start, stop, timespan / minimumt)
# in case complete measuring time devided by total number of datapoints
# interpolation
#new array with frequencies of both loops as two lists put together
frequency = np.zeros((len(frequencies), len(tnew))) frequency = np.zeros((len(frequencies), len(tnew)))
for k in range(len(frequencies)): for k in range(len(frequencies)):
ft = time[k][frequencies[k] > -5] ft = time[k][frequencies[k] > -5]
fn = frequencies[k][frequencies[k] > -5] fn = frequencies[k][frequencies[k] > -5]
frequency[k,:] = np.interp(tnew, ft, fn) frequency[k,:] = np.interp(tnew, ft, fn)
#making a mean over both loops with the axis 0 (=averaged in y direction, axis=1 would be over x axis)
mf = np.mean(frequency, axis=0) mf = np.mean(frequency, axis=0)
return mf, tnew return mf, tnew
def mean_noise_cut(frequencies, time, n): def mean_noise_cut(frequencies, time, n):
@ -110,17 +108,21 @@ def mean_noise_cut(frequencies, time, n):
cutt.append(t) cutt.append(t)
return cutf, cutt return cutf, cutt
def norm_function(cf_arr, ct_arr, onset_point, offset_point): def norm_function(f, t, onset_point, offset_point):
onset_end = onset_point - 10 onset_end = onset_point - 10
offset_start = offset_point - 10 offset_start = offset_point - 10
base = np.median(cf_arr[(ct_arr >= onset_end) & (ct_arr < onset_point)]) norm = []
for j in range(len(f)):
base = np.median(f[j][(t[j] >= onset_end) & (t[j] < onset_point)])
ground = cf_arr - base ground = f[j] - base
jar = np.median(ground[(ct_arr >= offset_start) & (ct_arr < offset_point)]) jar = np.median(ground[(t[j] >= offset_start) & (t[j] < offset_point)])
normed = ground / jar
norm.append(normed)
norm = ground / jar
return norm return norm
def base_eod(frequencies, time, onset_point): def base_eod(frequencies, time, onset_point):
@ -150,7 +152,20 @@ def sort_values(values):
values_flat = values.flatten() values_flat = values.flatten()
return values_flat return values_flat
def average(freq_all, time_all, start, stop, timespan, dm):
mf_all, tnew_all = mean_traces(start, stop, timespan, freq_all, time_all)
plt.plot(tnew_all, mf_all, color='b', label='average', ls='dashed')
# fit for average
sv_all, sc_all = curve_fit(step_response, tnew_all[tnew_all < dm], mf_all[tnew_all < dm],
bounds=(0.0, np.inf)) # step_values and step_cov
values_all = sort_values(sv_all)
plt.plot(tnew_all[tnew_all < 100], step_response(tnew_all, *sv_all)[tnew_all < 100], color = 'g',
label='average_fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values_all))
print('average: a1, a2, tau1, tau2', values_all)
return mf_all, tnew_all, values_all

View File

@ -14,33 +14,33 @@ from jar_functions import mean_noise_cut
from jar_functions import norm_function from jar_functions import norm_function
from jar_functions import step_response from jar_functions import step_response
from jar_functions import sort_values from jar_functions import sort_values
from jar_functions import average
base_path = 'D:\\jar_project\\JAR' base_path = 'D:\\jar_project\\JAR'
#nicht: -5Hz delta f, 19-aa, 22-ae, 22-ad (?) #nicht: -5Hz delta f, 19-aa, 22-ae, 22-ad (?)
datasets = [#'2020-06-19-aa', #-5Hz delta f, horrible fit datasets = [#'2020-06-19-aa', #-5Hz delta f, horrible fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-19-ab\\beats-eod.dat')), #-5Hz delta f, bad fit #'2020-06-19-ab', #-5Hz delta f, bad fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-aa\\beats-eod.dat')), #-5Hz delta f, bad fit #'2020-06-22-aa', #-5Hz delta f, bad fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ab\\beats-eod.dat')), #-5Hz delta f, bad fit #'2020-06-22-ab', #-5Hz delta f, bad fit
'2020-06-22-ac', #-15Hz delta f, good fit '2020-06-22-ac', #-15Hz delta f, good fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ad\\beats-eod.dat')), #-15Hz delta f, horrible fit '2020-06-22-ad', #-15Hz delta f, horrible fit
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ae\\beats-eod.dat')), #-15Hz delta f, maxfev way to high so horrible '2020-06-22-ae', #-15Hz delta f, maxfev way to high so horrible
#(os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\beats-eod.dat')) #-15Hz delta f, good fit '2020-06-22-af' #-15Hz delta f, good fit
] ]
#dat = glob.glob('D:\\jar_project\\JAR\\2020*\\beats-eod.dat') #dat = glob.glob('D:\\jar_project\\JAR\\2020*\\beats-eod.dat')
#infodat = glob.glob('D:\\jar_project\\JAR\\2020*\\info.dat') #infodat = glob.glob('D:\\jar_project\\JAR\\2020*\\info.dat')
infodatasets = [(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\info.dat')),
(os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\info.dat'))]
time_all = [] time_all = []
freq_all = [] freq_all = []
ID = [] ID = []
col = ['darkgrey', 'lightgrey'] col = ['dimgrey', 'grey', 'darkgrey', 'silver', 'lightgrey', 'gainsboro', 'whitesmoke']
labels = zip(ID, datasets)
for infodataset in infodatasets: for infodataset in datasets:
infodataset = os.path.join(base_path, infodataset, 'info.dat')
i = parse_infodataset(infodataset) i = parse_infodataset(infodataset)
identifier = i[0] identifier = i[0]
ID.append(identifier) ID.append(identifier)
@ -55,54 +55,36 @@ for idx, dataset in enumerate(datasets):
timespan = dm + pm timespan = dm + pm
start = np.mean([t[0] for t in time]) start = np.mean([t[0] for t in time])
stop = np.mean([t[-1] for t in time]) stop = np.mean([t[-1] for t in time])
mf , tnew = mean_traces(start, stop, timespan, frequency, time) # maybe fixed timespan/sampling rate
#for i in range(len(mf)): norm = norm_function(frequency, time, onset_point=dm - dm, offset_point=dm) # dm-dm funktioniert nur wenn onset = 0 sec
mf , tnew = mean_traces(start, stop, timespan, norm, time) # maybe fixed timespan/sampling rate
cf, ct = mean_noise_cut(mf, tnew, n=1250) cf, ct = mean_noise_cut(mf, tnew, n=1250)
cf_arr = np.array(cf) cf_arr = np.array(cf)
ct_arr = np.array(ct) ct_arr = np.array(ct)
norm = norm_function(cf_arr, ct_arr, onset_point = dm - dm, offset_point = dm) #dm-dm funktioniert nur wenn onset = 0 sec freq_all.append(cf_arr)
freq_all.append(norm)
time_all.append(ct_arr) time_all.append(ct_arr)
#plt.plot(ct_arr, norm) #, color = col[idx], label='fish=%s' % ID[idx]) plt.plot(ct_arr, cf_arr, color = col[idx], label='fish=%s' % datasets[idx])
# fit function sv, sc = curve_fit(step_response, ct_arr[ct_arr < dm], cf_arr[ct_arr < dm], [1.0, 1.0, 5.0, 50.0], bounds=(0.0, np.inf)) # step_values and step_cov
ft = ct_arr[ct_arr < dm]
fn = norm[ct_arr < dm]
ft = ft[fn > -5]
fn = fn[fn > -5]
sv, sc = curve_fit(step_response, ft, fn, [1.0, 1.0, 5.0, 50.0], bounds=(0.0, np.inf)) #step_values and step_cov
# sorted a and tau # sorted a and tau
values = sort_values(sv) values = sort_values(sv)
'''
# fit for each trace # fit for each trace
plt.plot(ct_arr[ct_arr < 100], step_response(ct_arr, *sv)[ct_arr < 100], color='orange', plt.plot(ct_arr[ct_arr < dm], step_response(ct_arr[ct_arr < dm], *sv), label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values)) #plt.plot(ft, step_response(ft, *sv), color='orange', label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values))
'''
print('fish: a1, a2, tau1, tau2', values) print('fish: a1, a2, tau1, tau2', values)
# average over all fish
mf_all , tnew_all = mean_traces(start, stop, timespan, freq_all, time_all)
plt.plot(tnew_all, mf_all, color = 'b', label = 'average', ls = 'dashed')
# fit for average
sv_all, sc_all = curve_fit(step_response, tnew_all[tnew_all < dm], mf_all[tnew_all < dm], bounds=(0.0, np.inf)) #step_values and step_cov
values_all = sort_values(sv_all)
plt.plot(tnew_all[tnew_all < 100], step_response(tnew_all, *sv_all)[tnew_all < 100], color='orange',
label='average_fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values_all))
print('average: a1, a2, tau1, tau2', values_all) '''# average over all fish
mf_all, tnew_all, values_all = average(freq_all, time_all, start, stop, timespan, dm)
'''
const_line = plt.axhline(y = 0.632) const_line = plt.axhline(y = 0.632)
stimulus_duration = plt.hlines(y = -0.25, xmin = 0, xmax = 100, color = 'r', label = 'stimulus_duration') stimulus_duration = plt.hlines(y = -0.25, xmin = 0, xmax = 100, color = 'r', label = 'stimulus_duration')