17.08
This commit is contained in:
parent
2841457261
commit
5c17008909
@ -102,8 +102,8 @@ def mean_noise_cut(frequencies, time, n):
|
||||
cutf = []
|
||||
cutt = []
|
||||
for k in np.arange(0, len(frequencies), n):
|
||||
t = time[k]
|
||||
f = np.mean(frequencies[k:k+n])
|
||||
t = time[k]
|
||||
cutf.append(f)
|
||||
cutt.append(t)
|
||||
return cutf, cutt
|
||||
|
@ -6,11 +6,14 @@ from IPython import embed
|
||||
from scipy.optimize import curve_fit
|
||||
from jar_functions import sin_response
|
||||
|
||||
def take_second(elem):
|
||||
def take_second(elem): # function for taking the names out of files
|
||||
return elem[1]
|
||||
|
||||
predict = []
|
||||
|
||||
rootmeansquare = []
|
||||
threshold = []
|
||||
|
||||
gain = []
|
||||
mgain = []
|
||||
phaseshift = []
|
||||
@ -21,35 +24,38 @@ amf = [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1]
|
||||
currf = None
|
||||
idxlist = []
|
||||
|
||||
data = sorted(np.load('files.npy'), key = take_second)
|
||||
data = sorted(np.load('files.npy'), key = take_second) # list with filenames in it
|
||||
|
||||
for i, d in enumerate(data):
|
||||
dd = list(d)
|
||||
jar = np.load('%s.npy' %dd)
|
||||
jm = jar - np.mean(jar)
|
||||
jar = np.load('%s.npy' %dd) # load data for every file name
|
||||
jm = jar - np.mean(jar) # low-pass filtering by subtracting mean
|
||||
print(dd)
|
||||
|
||||
time = np.load('time: %s.npy' %dd)
|
||||
time = np.load('%s time.npy' %dd) # time file
|
||||
|
||||
b, a = signal.butter(4, (float(d[1]) / 2) / 10000, 'high', analog=True)
|
||||
b, a = signal.butter(4, (float(d[1]) / 2) / 10000, 'high', analog=True) # high pass filtering so our fit gets a bit better
|
||||
y = signal.filtfilt(b, a, jm)
|
||||
#plt.plot(time, y)
|
||||
#plt.plot(time, jar)
|
||||
|
||||
sinv, sinc = curve_fit(sin_response, time, y, [float(d[1]), 2, 0.5])
|
||||
sinv, sinc = curve_fit(sin_response, time, y, [float(d[1]), 2, 0.5]) # fitting
|
||||
print('frequency, phaseshift, amplitude:', sinv)
|
||||
plt.show()
|
||||
p = np.sqrt(sinv[1]**2)
|
||||
A = np.sqrt(sinv[2] ** 2)
|
||||
f = float(d[1])
|
||||
phaseshift.append(p)
|
||||
gain.append(A)
|
||||
amfreq.append(f)
|
||||
|
||||
phaseshift.append(np.sqrt(sinv[1]**2))
|
||||
gain.append(np.sqrt(sinv[2]**2))
|
||||
amfreq.append(d[1])
|
||||
# root mean square
|
||||
|
||||
Rs = []
|
||||
for ix, t in enumerate(time):
|
||||
R = (jm[ix] - sin_response(t, float(d[1]), np.sqrt(sinv[1]**2), np.sqrt(sinv[2]**2)))**2
|
||||
Rs.append(R)
|
||||
RMS = np.sqrt(np.mean((jm - sin_response(time, sinv[0], sinv[1], sinv[2]))**2))
|
||||
rootmeansquare.append(RMS)
|
||||
thresh = A / np.sqrt(2)
|
||||
threshold.append(thresh)
|
||||
|
||||
sigma = sum(Rs)
|
||||
rms = np.sqrt((1/len(time)) * sigma)
|
||||
#plt.plot(time, sin_response(time, *sinv), label='fit: f=%f, p=%.2f, A=%.2f' % tuple(sinv))
|
||||
|
||||
#mean over same amfreqs for phase and gain
|
||||
@ -80,6 +86,7 @@ meanedp = np.mean(meanp)
|
||||
mgain.append(meanedf)
|
||||
mphaseshift.append(meanedp)
|
||||
|
||||
# predict of gain
|
||||
for f in amf:
|
||||
G = np.max(mgain) / np.sqrt(1 + (2*((np.pi*f*3.14)**2)))
|
||||
predict.append(G)
|
||||
@ -89,16 +96,24 @@ for f in amf:
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1)
|
||||
ax.plot(amf, mgain, 'o')
|
||||
ax.plot(amf, predict)
|
||||
#ax.plot(amf, predict)
|
||||
ax.set_yscale('log')
|
||||
ax.set_xscale('log')
|
||||
ax.set_title('2018lepto98')
|
||||
ax.set_title('%s' % data[0][0])
|
||||
ax.set_ylabel('gain [Hz/(mV/cm)]')
|
||||
ax.set_xlabel('AM-frequency [Hz]')
|
||||
#plt.savefig('2018lepto98_gain')
|
||||
#plt.savefig('%s gain' % data[0][0])
|
||||
pylab.show()
|
||||
plt.plot(threshold, label = 'threshold')
|
||||
plt.plot(rootmeansquare, label = 'RMS')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
embed()
|
||||
|
||||
#phase in degree
|
||||
# Q10 / conductivity
|
||||
# AM-frequency / envelope-frequency scale title?
|
||||
# AM-frequency / envelope-frequency scale title?
|
||||
|
||||
# bevor fit noch filtern mit 15Hz damit AM-Modulation rausgefiltert wird und nur noch envelope übrig bleibt.
|
||||
# Dazu running average mit n wobei n über samplingrate und delta f bestimmt wird
|
||||
# samplingrate über overlap muss dabei aber größer sein als samplingrate die noch übrig bleibt wenn ich mit delta f frequency gefiltert hab
|
@ -21,23 +21,22 @@ from jar_functions import average
|
||||
from jar_functions import import_data
|
||||
from jar_functions import import_amfreq
|
||||
|
||||
base_path = 'D:\\jar_project\\JAR\\sin\\2018lepto98'
|
||||
base_path = 'D:\\jar_project\\JAR\\sin\\2020lepto16'
|
||||
|
||||
#nicht: -5Hz delta f, 19-aa, 22-ae, 22-ad (?)
|
||||
|
||||
#dat = glob.glob('D:\\jar_project\\JAR\\2020*\\beats-eod.dat')
|
||||
#infodat = glob.glob('D:\\jar_project\\JAR\\2020*\\info.dat')
|
||||
datasets = ['2020-07-21-ak',
|
||||
'2020-07-21-al',
|
||||
'2020-07-21-am',
|
||||
'2020-07-21-an',
|
||||
'2020-07-21-ao',
|
||||
'2020-07-22-ai',
|
||||
'2020-07-22-aj',
|
||||
'2020-07-22-ak',
|
||||
'2020-07-22-al',
|
||||
'2020-07-22-am',
|
||||
]
|
||||
datasets = ['2020-08-04-ab',
|
||||
'2020-08-04-ac',
|
||||
'2020-08-04-ad',
|
||||
'2020-08-04-ae',
|
||||
'2020-08-04-af',
|
||||
'2020-08-05-ab',
|
||||
'2020-08-05-ac',
|
||||
'2020-08-05-ad',
|
||||
'2020-08-05-ae',
|
||||
'2020-08-05-af']
|
||||
|
||||
time_all = []
|
||||
freq_all = []
|
||||
@ -106,11 +105,16 @@ for idx, dataset in enumerate(datasets):
|
||||
freq4 = freqs[ix0:ix1]
|
||||
jar4 = freq4[np.argmax(spec4, axis=0)] # all freqs at max specs over axis 0
|
||||
jar = jar4 / 4
|
||||
jm = jar4 - np.mean(jar4)
|
||||
jm = jar4 - np.mean(jar4) # data we take
|
||||
|
||||
cut_times = times[:len(jar4)]
|
||||
np.save('time: %s.npy' % file_name, cut_times)
|
||||
np.save('%s.npy' % file_name, jar4)
|
||||
np.save('%s time' % file_name, cut_times)
|
||||
np.save('%s' % file_name, jar4)
|
||||
|
||||
cutf, cutt = mean_noise_cut(jar4, cut_times, 40000 * (1/15))
|
||||
plt.plot(cutt, cutf)
|
||||
plt.show()
|
||||
embed()
|
||||
|
||||
#plt.plot(cut_times, jm, '-k')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user