import matplotlib.pyplot as plt import os import glob import IPython import numpy as np from IPython import embed from scipy.optimize import curve_fit from jar_functions import parse_dataset from jar_functions import parse_infodataset from jar_functions import mean_traces from jar_functions import mean_noise_cut from jar_functions import norm_function from jar_functions import step_response from jar_functions import sort_values #nicht: 19-aa, 22-ae, 22-ad (?) datasets = [#(os.path.join('D:\\jar_project\\JAR\\2020-06-19-aa\\beats-eod.dat')), #-5Hz delta f, horrible fit #(os.path.join('D:\\jar_project\\JAR\\2020-06-19-ab\\beats-eod.dat')), #-5Hz delta f, bad fit #(os.path.join('D:\\jar_project\\JAR\\2020-06-22-aa\\beats-eod.dat')), #-5Hz delta f, bad fit #(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ab\\beats-eod.dat')), #-5Hz delta f, bad fit (os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\beats-eod.dat')), #-15Hz delta f, good fit #(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ad\\beats-eod.dat')), #-15Hz delta f, horrible fit #(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ae\\beats-eod.dat')), #-15Hz delta f, maxfev way to high so horrible (os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\beats-eod.dat'))] #-15Hz delta f, good fit #np.array(sorted(glob.glob('D:\\jar_project\\JAR\\2020*\\beats-eod.dat'))) infodatasets = [(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\info.dat')), (os.path.join('D:\\jar_project\\JAR\\2020-06-22-af\\info.dat'))] time_all = [] freq_all = [] ID = [] for infodataset in infodatasets: i = parse_infodataset(infodataset) identifier = i[0] ID.append(identifier) for idx, dataset in enumerate(datasets): #input of the function frequency, time, amplitude, eodf, deltaf, stimulusf, duration, pause = parse_dataset(dataset) dm = np.mean(duration) pm = np.mean(pause) timespan = dm + pm start = (time[0][0] + time[1][0]) / 2 stop = (time[0][-1] + time[1][-1]) / 2 mf , tnew = mean_traces(start, stop, timespan, frequency, time) # maybe fixed timespan/sampling rate #for i in range(len(mf)): cf, ct = mean_noise_cut(mf, tnew, n=1250) cf_arr = np.array(cf) ct_arr = np.array(ct) norm = norm_function(cf_arr, ct_arr, onset_point = dm - dm, offset_point = dm) #dm-dm funktioniert nur wenn onset = 0 sec freq_all.append(norm.tolist()) time_all.append(ct_arr) plt.plot(ct_arr, norm, color = 'grey', label='fish=%s' % ID[idx]) # fit function sv, sc = curve_fit(step_response, ct_arr[ct_arr < dm], norm[ct_arr < dm]) #step_values and step_cov # sorted a and tau values = sort_values(sv) ''' plt.plot(ct_arr[ct_arr < 100], step_response(ct_arr, *sv)[ct_arr < 100], color='orange', label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values)) ''' print('fish: a1, a2, tau1, tau2', values) # average over all fish mf_all , tnew_all = mean_traces(start, stop, timespan, freq_all, time_all) plt.plot(tnew_all, mf_all, color = 'b', label = 'average', ls = 'dashed') # fit for average sv_all, sc_all = curve_fit(step_response, tnew_all[tnew_all < dm], mf_all[tnew_all < dm]) #step_values and step_cov values_all = sort_values(sv_all) plt.plot(tnew_all[tnew_all < 100], step_response(tnew_all, *sv_all)[tnew_all < 100], color='orange', label='average_fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(values_all)) print('average: a1, a2, tau1, tau2', values_all) const_line = plt.axhline(y = 0.632) stimulus_duration = plt.hlines(y = -0.25, xmin = 0, xmax = 100, color = 'r', label = 'stimulus_duration') base_line = plt.axhline(y = 0, color = 'black', ls = 'dotted', linewidth = '1') plt.xlim([-10,220]) plt.xlabel('time [s]') plt.ylabel('rel. JAR magnitude') plt.title('relative JAR') plt.savefig('relative JAR') plt.legend(loc = 'lower right') plt.show() embed() # Fragen: # wie offset point wenn nicht start bei 0 sec? über zeitdatenpunkt? oder einfach immer bei 0 onset..? # wie zip ich ID liste mit plot (für eine for schleife) zusammen? # welche Stimulusintesität? # start/stop/timespan ok?