03.07
This commit is contained in:
		
							parent
							
								
									9a6d29073e
								
							
						
					
					
						commit
						4022fff994
					
				@ -63,14 +63,14 @@ def mean_noise_cut(frequencies, time, n):
 | 
				
			|||||||
        f = np.mean(frequencies[k:k+n])
 | 
					        f = np.mean(frequencies[k:k+n])
 | 
				
			||||||
        cutf.append(f)
 | 
					        cutf.append(f)
 | 
				
			||||||
        cutt.append(t)
 | 
					        cutt.append(t)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return cutf, cutt
 | 
					    return cutf, cutt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def step_response(t, a1, a2, tau1, tau2):
 | 
					 | 
				
			||||||
    r_step = a1*(1 - np.exp(-t/tau1)) + a2*(1- np.exp(-t/tau2))
 | 
					 | 
				
			||||||
    return r_step
 | 
					 | 
				
			||||||
# plotten mit manual values for a1, ...
 | 
					 | 
				
			||||||
# auch mal a1 oder a2 auf Null setzen.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def step_response(t, a1, a2, tau1, tau2):
 | 
				
			||||||
 | 
					    r_step = (a1*(1 - np.exp(-t/tau1))) + (a2*(1 - np.exp(-t/tau2)))
 | 
				
			||||||
 | 
					    r_step[t<0] = 0
 | 
				
			||||||
 | 
					    return r_step
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def base_eod(frequencies, time, onset_point):
 | 
					def base_eod(frequencies, time, onset_point):
 | 
				
			||||||
@ -82,6 +82,7 @@ def base_eod(frequencies, time, onset_point):
 | 
				
			|||||||
    base_eod.append(base)
 | 
					    base_eod.append(base)
 | 
				
			||||||
    return base_eod
 | 
					    return base_eod
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def JAR_eod(frequencies, time, offset_point):
 | 
					def JAR_eod(frequencies, time, offset_point):
 | 
				
			||||||
    jar_eod = []
 | 
					    jar_eod = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -90,4 +91,35 @@ def JAR_eod(frequencies, time, offset_point):
 | 
				
			|||||||
    jar = np.mean(frequencies[(time >= offset_start) & (time < offset_point)])
 | 
					    jar = np.mean(frequencies[(time >= offset_start) & (time < offset_point)])
 | 
				
			||||||
    jar_eod.append(jar)
 | 
					    jar_eod.append(jar)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return jar_eod
 | 
					    return jar_eod
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def mean_loops(start, stop, timespan, frequencies, time):
 | 
				
			||||||
 | 
					    minimumt = min(len(time[0]), len(time[1]))
 | 
				
			||||||
 | 
					    # new time with wished timespan because it varies for different loops
 | 
				
			||||||
 | 
					    tnew = np.arange(start, stop, timespan / minimumt)  # 3rd input is stepspacing:
 | 
				
			||||||
 | 
					                                                        # in case complete measuring time devided by total number of datapoints
 | 
				
			||||||
 | 
					    # interpolation
 | 
				
			||||||
 | 
					    f0 = np.interp(tnew, time[0], frequencies[0])
 | 
				
			||||||
 | 
					    f1 = np.interp(tnew, time[1], frequencies[1])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    #new array with frequencies of both loops as two lists put together
 | 
				
			||||||
 | 
					    frequency = np.array([f0, f1])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    #making a mean over both loops with the axis 0 (=averaged in y direction, axis=1 would be over x axis)
 | 
				
			||||||
 | 
					    mf = np.mean(frequency, axis=0)
 | 
				
			||||||
 | 
					    return mf, tnew
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def norm_function(cf_arr, ct_arr, onset_point, offset_point):
 | 
				
			||||||
 | 
					    onset_end = onset_point - 10
 | 
				
			||||||
 | 
					    offset_start = offset_point - 10
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    base = np.mean(cf_arr[(ct_arr >= onset_end) & (ct_arr < onset_point)])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ground = cf_arr - base
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    jar = np.mean(cf_arr[(ct_arr >= offset_start) & (ct_arr < offset_point)])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    norm = ground / jar
 | 
				
			||||||
 | 
					    return norm
 | 
				
			||||||
@ -4,11 +4,14 @@ import glob
 | 
				
			|||||||
import IPython
 | 
					import IPython
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
from IPython import embed
 | 
					from IPython import embed
 | 
				
			||||||
 | 
					from scipy.optimize import curve_fit
 | 
				
			||||||
from jar_functions import parse_dataset
 | 
					from jar_functions import parse_dataset
 | 
				
			||||||
from jar_functions import mean_noise_cut
 | 
					from jar_functions import mean_noise_cut
 | 
				
			||||||
from jar_functions import step_response
 | 
					from jar_functions import step_response
 | 
				
			||||||
from jar_functions import JAR_eod
 | 
					from jar_functions import JAR_eod
 | 
				
			||||||
from jar_functions import base_eod
 | 
					from jar_functions import base_eod
 | 
				
			||||||
 | 
					from jar_functions import mean_loops
 | 
				
			||||||
 | 
					from jar_functions import norm_function
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
datasets = [(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\beats-eod.dat'))]
 | 
					datasets = [(os.path.join('D:\\jar_project\\JAR\\2020-06-22-ac\\beats-eod.dat'))]
 | 
				
			||||||
@ -27,58 +30,30 @@ timespan = 210
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
for dataset in datasets:
 | 
					for dataset in datasets:
 | 
				
			||||||
    #input of the function
 | 
					    #input of the function
 | 
				
			||||||
    t, f, a, e, d, s= parse_dataset(dataset)
 | 
					    t, f, a, e, d, s = parse_dataset(dataset)
 | 
				
			||||||
 | 
					    mf , tnew = mean_loops(start, stop, timespan, f, t)
 | 
				
			||||||
 | 
					embed()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    minimumt = min(len(t[0]), len(t[1]))
 | 
					for i in range(len(mf)):
 | 
				
			||||||
    # new time with wished timespan because it varies for different loops
 | 
					    for n in [500, 1000, 1500]:
 | 
				
			||||||
    tnew = np.arange(start, stop, timespan / minimumt)  # 3rd input is stepspacing:
 | 
					        cf, ct = mean_noise_cut(mf[i], time[i], n=n)
 | 
				
			||||||
                                                        # in case complete measuring time devided by total number of datapoints
 | 
					 | 
				
			||||||
    # interpolation
 | 
					 | 
				
			||||||
    f0 = np.interp(tnew, t[0], f[0])
 | 
					 | 
				
			||||||
    f1 = np.interp(tnew, t[1], f[1])
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    #new array with frequencies of both loops as two lists put together
 | 
					 | 
				
			||||||
    frequency = np.array([f0, f1])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    #making a mean over both loops with the axis 0 (=averaged in y direction, axis=1 would be over x axis)
 | 
					 | 
				
			||||||
    mf = np.mean(frequency, axis=0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    #appending data
 | 
					 | 
				
			||||||
    eodf.append(e)
 | 
					 | 
				
			||||||
    deltaf.append(d)
 | 
					 | 
				
			||||||
    stimulusf.append(s)
 | 
					 | 
				
			||||||
    amplitude.append(a)
 | 
					 | 
				
			||||||
    frequency_mean.append(mf)
 | 
					 | 
				
			||||||
    time.append(tnew)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
    for a in [0, 1, 2]:
 | 
					 | 
				
			||||||
        for b in [0, 1, 2]:
 | 
					 | 
				
			||||||
            r_step = step_response(t = ct_arr, a1 = a, a2 = b, tau1 = 30, tau2 = 60)
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
for i in range(len(frequency_mean)):
 | 
					 | 
				
			||||||
    for n in [100, 500, 1000]:
 | 
					 | 
				
			||||||
        cf, ct = mean_noise_cut(frequency_mean[i], time[i], n=n)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ct_arr = np.array(ct)
 | 
					 | 
				
			||||||
        cf_arr = np.array(cf)
 | 
					        cf_arr = np.array(cf)
 | 
				
			||||||
 | 
					        ct_arr = np.array(ct)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        base = base_eod(cf_arr, ct_arr, onset_point = 0)
 | 
					        norm = norm_function(cf_arr, ct_arr, onset_point = 0, offset_point = 100)
 | 
				
			||||||
        ground = cf_arr - base
 | 
					 | 
				
			||||||
        jar = JAR_eod(ground, ct_arr, offset_point = 100)
 | 
					 | 
				
			||||||
        norm = ground / jar
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        plt.plot(ct_arr, norm, label='n=%d' % n)
 | 
					        plt.plot(ct_arr, norm, label='n=%d' % n)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        #r_step = step_response(t=ct_arr, a1=0.58, a2=0.47, tau1=11.7, tau2=60)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
for n in [1480]:
 | 
					        #plt.plot(ct_arr[ct_arr < 100], r_step[ct_arr < 100], label='fit: n=%d' % n)
 | 
				
			||||||
    cf, ct = mean_noise_cut(frequency_mean[i], time[i], n=n)
 | 
					
 | 
				
			||||||
    ct_arr = np.array(ct)
 | 
					        step_values, step_cov = curve_fit(step_response, ct_arr[ct_arr < 100], norm [ct_arr < 100])
 | 
				
			||||||
    cf_arr = np.array(cf)
 | 
					
 | 
				
			||||||
    r_step = step_response(t=ct_arr + 10, a1=0.55, a2=0.89, tau1=11.2, tau2= 280)
 | 
					        plt.plot(ct_arr [ct_arr < 100], step_response(ct_arr, *step_values)[ct_arr < 100], 'r-',  label='fit: a1=%.2f, a2=%.2f, tau1=%.2f, tau2=%.2f' % tuple(step_values))
 | 
				
			||||||
    plt.plot(r_step, label='fit: n=%d' % n)
 | 
					        print(step_values)
 | 
				
			||||||
 | 
					const_line = plt.axhline(y=0.632)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'plotting'
 | 
					'plotting'
 | 
				
			||||||
plt.xlim([-10,220])
 | 
					plt.xlim([-10,220])
 | 
				
			||||||
@ -87,7 +62,14 @@ plt.xlabel('time [s]')
 | 
				
			|||||||
plt.ylabel('rel. JAR magnitude')
 | 
					plt.ylabel('rel. JAR magnitude')
 | 
				
			||||||
#plt.title('fit_function(a1=0)')
 | 
					#plt.title('fit_function(a1=0)')
 | 
				
			||||||
#plt.savefig('fit_function(a1=0)')
 | 
					#plt.savefig('fit_function(a1=0)')
 | 
				
			||||||
plt.legend()
 | 
					plt.legend(loc = 'lower right')
 | 
				
			||||||
plt.show()
 | 
					plt.show()
 | 
				
			||||||
embed()
 | 
					embed()
 | 
				
			||||||
# Zeitkonstante: von sec. 0 bis 63%? relative JAR
 | 
					
 | 
				
			||||||
 | 
					# noch mehr in funktionen reinhauen (quasi nur noch plotting und funktionen einlesen)
 | 
				
			||||||
 | 
					# zeitkonstanten nach groß und klein sortieren
 | 
				
			||||||
 | 
					# onset dauer auslesen
 | 
				
			||||||
 | 
					# ID aus info.dat auslesen
 | 
				
			||||||
 | 
					# alle daten einlesen durch große for schleife (auch average über alle fische?)
 | 
				
			||||||
 | 
					# für einzelne fische fit kontrollieren
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user