jar_project/eigenmannia_jar_savgol.py
2020-09-10 17:38:04 +02:00

91 lines
3.4 KiB
Python

import matplotlib.pyplot as plt
import numpy as np
import os
import nix_helpers as nh
from IPython import embed
from matplotlib.mlab import specgram
# from tqdm import tqdm
from jar_functions import parse_stimuli_dat
from jar_functions import norm_function_eigen
from jar_functions import mean_noise_cut_eigen
from jar_functions import get_time_zeros
from jar_functions import import_data_eigen
from jar_functions import get_new_zero_crossings
from scipy.signal import savgol_filter
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\deltaf'
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
response = []
deltaf = []
for ID in identifier:
for dataset in os.listdir(os.path.join(base_path, ID)):
datapath = os.path.join(base_path, ID, dataset, '%s.nix' % dataset)
print(datapath)
stimuli_dat = os.path.join(base_path, ID, dataset, 'manualjar-eod.dat')
df, duration = parse_stimuli_dat(stimuli_dat)
dur = int(duration[0][0:2])
print(df)
data, pre_data, dt = import_data_eigen(datapath)
# data
nfft = 2 ** 17
spec_0, freqs_0, times_0 = specgram(data[0], Fs=1 / dt, detrend='mean', NFFT=nfft, noverlap=nfft * 0.95)
dbspec_0 = 10.0 * np.log10(spec_0) # in dB
power_0 = dbspec_0[:, 25]
fish_p_0 = power_0[(freqs_0 > 200) & (freqs_0 < 1000)]
fish_f_0 = freqs_0[(freqs_0 > 200) & (freqs_0 < 1000)]
index_0 = np.argmax(fish_p_0)
eodf_0 = fish_f_0[index_0]
eodf4_0 = eodf_0 * 4
lim0_0 = eodf4_0 - 20
lim1_0 = eodf4_0 + 20
df_0 = freqs_0[1] - freqs_0[0]
ix0_0 = int(np.floor(lim0_0 / df_0)) # back to index
ix1_0 = int(np.ceil(lim1_0 / df_0)) # back to index
spec4_0 = dbspec_0[ix0_0:ix1_0, :]
freq4_0 = freqs_0[ix0_0:ix1_0]
jar4 = freq4_0[np.argmax(spec4_0, axis=0)] # all freqs at max specs over axis 0
jm = jar4 - np.mean(jar4) # data we take
cut_time_jar = times_0[:len(jar4)]
# pre_data: base with nh.read_eod
time, eod = nh.read_eod(datapath, duration=2000) # anstatt dem import data mit tag manual jar - dann sollte onset wirklich bei 10 sec sein
wl = int(0.001 / (time[1] - time[0]) + 1)
filtered_eod = savgol_filter(eod, wl, 5, deriv=0, delta=time[1] - time[0])
zero_line_threshold = np.mean(eod)
time_zero, zero_idx = get_new_zero_crossings(time, filtered_eod, threshold=zero_line_threshold)
eod_interval = np.diff(time_zero)
time_zero = time_zero[:-1]
center_eod_time = time_zero + 0.5 * eod_interval
frequencies = 1 / eod_interval
j = []
for idx, i in enumerate(times_0):
if i > 45 and i < 55:
j.append(jm[idx])
b = []
for idx, i in enumerate(time_zero):
if i < 10:
b.append(frequencies[idx])
bm = b - np.mean(b)
r = np.median(j) - np.median(bm)
embed()
res_df = sorted(zip(deltaf, response)) #
np.save('res_df_%s_new' % ID, res_df)
# problem: rohdaten(data, pre_data) lassen sich auf grund ihrer 1D-array struktur nicht savgol filtern
# diese bekomm ich nur über specgram in form von freq / time auftragen, was nicht mehr savgol gefiltert werden kann
# jedoch könnte ich trotzdem einfach aus jar4 response herauslesen wobei dies dann weniger gefiltert wäre