diff --git a/do_calc_activity.py b/do_calc_activity.py new file mode 100644 index 0000000..56a0714 --- /dev/null +++ b/do_calc_activity.py @@ -0,0 +1,251 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.colors as mcolors +import matplotlib.gridspec as gridspec +from IPython import embed +from scipy import stats +import math +import os +from IPython import embed +import helper_functions as hf +from params import * +import itertools + + + +def fill_cbf_plot(matrix, x, t, i, time_point1, time_point2): + matrix[i].extend(x[(t >= time_point1) & (t <= time_point2)]) + return matrix + + +def plot_changes_for_swim_through(x, y, dur, f): + plt.plot(x, y) + plt.title('dur: %.1f, %.1f' % (dur, f)) + ax = plt.gca() + ax.beautimechannelaxis() + ax.timeaxis() + plt.show() + + +def chances_per_bin_filler(time_bins, x, t, cbf, cbf_counter): + for idx, tb in enumerate(time_bins[:-1]): + xx = x[(t >= tb) & (t <= time_bins[idx + 1])] + if np.any(xx): + cbf[0, idx, cbf_counter] = len(xx[xx < 0]) + cbf[1, idx, cbf_counter] = len(xx[xx > 0]) + cbf[2, idx, cbf_counter] = len(xx[xx == 0]) + + cbf_counter += 1 + + return cbf, cbf_counter + +def make_trajectory(x, y, kernel): + smooth_y = np.convolve(y, kernel, 'valid') + smooth_x = x[int(np.ceil(kernel_size / 2)):-int(np.floor(kernel_size / 2) - 1)] + + # interpolate + fish_x = flat_x[np.where(flat_x == x_tickses[0])[0][0]:np.where(flat_x == x_tickses[-1])[0][0] + 1] + trajectory = np.round(np.interp(fish_x, smooth_x, smooth_y)) + + return fish_x, trajectory + +if __name__ == '__main__': + ################################################################################################################### + # parameter and variables + # plot params + inch = 2.45 + save_path = '../../thesis/Figures/Results/' + kernel_size = 100 + kernel = np.ones(kernel_size) / kernel_size + c = 0 + cc = 1 + + all_first = [] + all_last = [] + + # time_bins in seconds + time_factor = 60*60 + time_bins = np.arange(0, 24 * time_factor + 1, bin_len) + time_bins_recordings = np.zeros(len(time_bins)-1) + + # changes per bin of bin_len seconds for all fish + cbf = np.full([3, len(time_bins) - 1, 166], np.nan) + cbf_counter = 0 + + # percent roaming + pr = [] + swim_trough_list = [] + trial_durs = [] + transit_data = [] + + # average speed + speed = [] + + # trajectory + trajectories = [] + trajec_x = [] + std_trajecs = [] + + # for frequency vs. activity + f = [] + eigen = [] + males = [] + females = [] + n = [] + all_changes = [] + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + for index, filename_idx in enumerate([0, 1, 2, 3]): + filename = sorted(os.listdir('../data/'))[filename_idx] + aifl = np.load('../data/' + filename + '/aifl2.npy', allow_pickle=True) + + all_xticks = np.load('../data/' + filename + '/all_xtickses.npy', allow_pickle=True) + all_Ctime_v = np.load('../data/' + filename + '/all_Ctime_v.npy', allow_pickle=True) + all_max_ch_means = np.load('../data/' + filename + '/all_max_ch.npy', allow_pickle=True) + power_means = np.load('../data/' + filename + '/power_means.npy', allow_pickle=True) + freq = np.load('../data/' + filename + '/fish_freq_q10.npy', allow_pickle=True) + all_hms = np.load('../data/' + filename + '/all_hms.npy', allow_pickle=True) + names = np.load('../data/' + filename + '/fish_species.npy', allow_pickle=True) + ############################################################################################################### + # lists + flat_x = np.unique(np.hstack(all_xticks)) + + # in which bins is this recording, fill time_bins_recordings + flat_hms = np.unique(np.hstack(all_hms)) + for idx, tb in enumerate(time_bins[:-1]): + if np.any((flat_hms >= tb) & (flat_hms <= time_bins[idx + 1])): + time_bins_recordings[idx] += 1 + + ############################################################################################################### + # analysis of the changes and trajectories + for fish_number in range(len(power_means)): + if power_means[fish_number] >= -90.0: + + x_tickses = all_xticks[fish_number] + max_ch_mean = all_max_ch_means[fish_number] + + # make trajectory + fish_x, trajectory = make_trajectory(x_tickses, max_ch_mean, kernel) + + # trial_duration + t_s = datetime.timedelta(np.diff([fish_x[0], fish_x[-1]])[0]) + trial_dur = t_s.total_seconds() / 60 + trial_durs.append(trial_dur) + changes = np.sum(np.abs(np.diff(trajectory)) > 0.5) / trial_dur + if changes > 6: + continue + + # average changes per trial + all_changes.append(changes) + f.append(freq[fish_number, 2]) + n.append(names[fish_number]) + + # percent roaming + m = np.median(trajectory) + percent = (len(trajectory[trajectory > m + 1]) + len(trajectory[trajectory < m - 1])) / len(trajectory) + pr.append(percent) + std_trajecs.append(np.std(trajectory)) + + + # trajectories + trajectories.append(trajectory) + trajec_x.append(all_hms[fish_number]) + + # average speed + speed.append(np.sum(np.abs(np.diff(trajectory))) / trial_dur) + + # activity vs. time + t = all_hms[fish_number][:-1] + x = np.diff(trajectory) + if names[fish_number] == 'Eigenmannia': + cbf, cbf_counter = chances_per_bin_filler(time_bins, x, t, cbf, cbf_counter) + + elif names[fish_number] == 'Apteronotus': + cbf, cbf_counter = chances_per_bin_filler(time_bins, x, t, cbf, cbf_counter) + + + # swim through + first = fish_x[0] + last = fish_x[-1] + + start15 = mdates.date2num(mdates.num2date(first) + datetime.timedelta(seconds=2 * 60)) + stop15 = mdates.date2num(mdates.num2date(last) - datetime.timedelta(seconds=2 * 60)) + + if np.any(trajectory[fish_x < start15] >= 13) and np.any(trajectory[fish_x > stop15] <= 2): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + elif np.any(trajectory[fish_x < start15] <= 2) and np.any(trajectory[fish_x > stop15] >= 13): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + elif np.any(trajectory[fish_x < start15] <= 1) \ + and np.any(trajectory[fish_x > stop15] >= 6) \ + and np.any(trajectory[fish_x > stop15] <= 7): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + elif np.any(trajectory[fish_x < start15] >= 6) \ + and np.any(trajectory[fish_x < start15] <= 7) \ + and np.any(trajectory[fish_x > stop15] <= 1): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + elif np.any(trajectory[fish_x < start15] >= 14) \ + and np.any(trajectory[fish_x > stop15] >= 8) \ + and np.any(trajectory[fish_x > stop15] <= 9): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + elif np.any(trajectory[fish_x < start15] >= 8) \ + and np.any(trajectory[fish_x < start15] <= 9) \ + and np.any(trajectory[fish_x > stop15] >= 14): + c += 1 + swim_trough_list.append(True) + transit_data.append(np.array([first, last, trial_dur])) + # print(names[fish_number]) + # plot_changes_for_swim_through(fish_x, trajectory, trial_dur, freq[fish_number, 2]) + + else: + cc += 1 + swim_trough_list.append(False) + + + # np.save('../data/pr.npy', pr) + # np.save('../data/stl.npy', swim_trough_list) + # np.save('../data/speed.npy', speed) + # np.save('../data/trial_dur.npy', trial_durs) + # np.save('../data/trajectories'+str(bin_len)+'.npy', trajectories) + # np.save('../data/trajec_x'+str(bin_len)+'.npy', trajec_x) + # np.save('../data/std_trajec.npy', std_trajecs) + # + # np.save('../data/f.npy', f) + # np.save('../data/all_changes.npy', all_changes) + # np.save('../data/n.npy', n) + # + # np.save('../data/cbf_e'+str(bin_len)+'.npy', cbf_e) + # np.save('../data/cbf_a'+str(bin_len)+'.npy', cbf_a) + np.save('../data/cbf'+str(bin_len)+'.npy', cbf) + + embed() + quit() + + diff --git a/do_calc_eod_times.py b/do_calc_eod_times.py new file mode 100644 index 0000000..47eb8cd --- /dev/null +++ b/do_calc_eod_times.py @@ -0,0 +1,107 @@ +import sys +import os +import numpy as np +from tqdm import tqdm +import matplotlib.pyplot as plt +from thunderfish.dataloader import open_data +from thunderfish.eodanalysis import eod_waveform +from IPython import embed + + +def load_tracker_data(foldername): + ''' + Load data + :param foldername: where are the data saved + :return: ident_v: identity vector + power: power vector + freq_v: frequency vector + timeidx: time vector in indices + realtime: time vector in relative real time to the beginning + ''' + ident_v = np.load('../../../data/' + foldername + '/all_ident_v.npy', allow_pickle=True) + power = np.load('../../../data/' + foldername + '/all_sign_v.npy', allow_pickle=True) + freq_v = np.load('../../../data/' + foldername + '/all_fund_v.npy', allow_pickle=True) + timeidx = np.load('../../../data/' + foldername + '/all_idx_v.npy', allow_pickle=True) + realtime = np.load('../../../data/' + foldername + '/all_times.npy', allow_pickle=True) + aifl = np.load('../data/' + foldername + '/aifl2.npy', allow_pickle=True) + + return ident_v, power, freq_v, timeidx, realtime, aifl + + +def extract_times_freq_for_plotting(aifl, ident_v, freq_v, power_v, timeidx, realtime, fish_number): + + t = [] + f = [] + time_res = np.diff(realtime)[0] + dt = 10 # IN SECONDS + for channel in tqdm(range(16)): + for id in aifl[channel,fish_number][~np.isnan(aifl[channel, fish_number])]: + i_df_power_channel = [] + itr = np.arange(len(ident_v[channel]))[ident_v[channel] == id] + for i0 in itr[np.array(np.round(np.linspace(0, len(itr)*.9-1, 100)), dtype=int)]: + freqs_of_interest = freq_v[channel][(np.arange(len(ident_v[channel])) >= i0) & + (np.arange(len(ident_v[channel])) <= i0 + dt / time_res) & + (ident_v[channel] == id)] + + df = np.max(freqs_of_interest) - np.min(freqs_of_interest) + mean_f = np.mean(freqs_of_interest) + + # if df <= 0.5: + sign_of_interest = power_v[channel][(np.arange(len(ident_v[channel])) >= i0) & + (np.arange(len(ident_v[channel])) <= i0 + dt / time_res) & + (ident_v[channel] == id)] + max_channel = np.argmax(sign_of_interest, axis=1) + power = list(map(lambda x: np.max(x), sign_of_interest)) + + i_df_power_channel.append([i0, mean_f, np.min(power), max_channel[0], df]) + + i0, mean_f, power, c, df = i_df_power_channel[np.nanargmax(np.array(i_df_power_channel)[:, 4])] + t.append(realtime[timeidx[channel][i0]]) + f.append(mean_f) + return t, f + + +def main(filename): + ''' + Extracts the time points and corresponding frequencies where the EOD waveforms should be calculated + + :param filename: + :return: all_t: list; all time points of each fish where the eod should be extracted + all_f: list; all frequencies of each fish where the eod should be extracted + all_fish: list; fish_number of the aifl + ''' + # load data + ident_v, power_v, freq_v, timeidx, realtime, aifl = load_tracker_data(filename) + + fish_in_aifl = list(np.unique(np.where(~np.isnan(aifl[:, :, 0]))[1])) + all_t = [] + all_f = [] + all_fish = [] + for fish_number in fish_in_aifl: + print(fish_number) + # if 400 <= all_q10[fish_number, 2] <= 650 and power_means[fish_number] >= -90.0: + times, freqs = extract_times_freq_for_plotting(aifl, ident_v, freq_v, power_v, timeidx, realtime, fish_number) + + all_t.append(times) + all_f.append(freqs) + all_fish.append(fish_number) + + return all_t, all_f, all_fish + + + +if __name__ == '__main__': + + for index, filename_idx in enumerate([0, 2, 3, 5]): + filename = sorted(os.listdir('../../../data/mount_data/sanmartin/softgrid_1x16/'))[filename_idx] + print('new file: '+ filename) + all_t, all_f, all_fish = main(filename) + + ############################################################################################################# + # save data + if filename not in os.listdir('../data/'): + os.mkdir('../data/' + filename) + + np.save('../data/' + filename + '/eod_times_new_new.npy', all_t) + np.save('../data/' + filename + '/eod_freq_new_new.npy', all_f) + np.save('../data/' + filename + '/eod_fishnumber_new_new.npy', all_fish) diff --git a/do_check_for_overlap.py b/do_check_for_overlap.py new file mode 100644 index 0000000..8db97a6 --- /dev/null +++ b/do_check_for_overlap.py @@ -0,0 +1,196 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.colors as mcolors +from IPython import embed +import helper_functions as hf +import os +import datetime +import itertools +import matplotlib.gridspec as gridspec +from params import * + + +def plot_overlap_figure(fig, ax, aifl, emptyN1, emptyN2, emptyN3, emptyN4, color_v): + # plot lines in figure + for plot_cidx in range(16): + fish1 = aifl[plot_cidx, emptyN1, ~np.isnan(aifl[plot_cidx, emptyN1])] + fish2 = aifl[plot_cidx, emptyN2, ~np.isnan(aifl[plot_cidx, emptyN2])] + for eN1_idx in range(len(fish1)): + ax.plot(timeidx[plot_cidx][ident[plot_cidx] == fish1[eN1_idx]], + freq[plot_cidx][ident[plot_cidx] == fish1[eN1_idx]], + Linewidth=3, label='1', color=color_v[0], alpha=0.3) + for eN2_idx in range(len(fish2)): + ax.plot(timeidx[plot_cidx][ident[plot_cidx] == fish2[eN2_idx]], + freq[plot_cidx][ident[plot_cidx] == fish2[eN2_idx]], + Linewidth=3, label='2', color=color_v[1], alpha=0.3) + if emptyN3 is not None: + fish3 = aifl[plot_cidx, emptyN3, ~np.isnan(aifl[plot_cidx, emptyN3])] + for eN3_idx in range(len(fish3)): + ax.plot(timeidx[plot_cidx][ident[plot_cidx] == fish3[eN3_idx]], + freq[plot_cidx][ident[plot_cidx] == fish3[eN3_idx]], + Linewidth=3, label='3', color=color_v[4], alpha=0.3) + if emptyN4 is not None: + fish4 = aifl[plot_cidx, emptyN4, ~np.isnan(aifl[plot_cidx, emptyN4])] + for eN4_idx in range(len(fish4)): + ax.plot(timeidx[plot_cidx][ident[plot_cidx] == fish4[eN4_idx]], + freq[plot_cidx][ident[plot_cidx] == fish4[eN4_idx]], + Linewidth=3, label='4', color=color_v[5], alpha=0.3) + # plot the line which has to be assigned to one of the emptyN + ax.plot(timeidx[channel_idx][ident[channel_idx] == false_fish[ff_idx]], + freq[channel_idx][ident[channel_idx] == false_fish[ff_idx]], + Linewidth=1, color='red') + + # create legend without duplicated labels + hf.legend_without_duplicate_labels(fig, ax) + plt.show() + +def get_list_of_fishN_with_overlap(aifl, fish_in_aifl, time, identity): + liste = [] + for fishN in fish_in_aifl: + for Ch_idx in range(16): + fishs_idxs = aifl[Ch_idx, fishN, ~np.isnan(aifl[Ch_idx, fishN])] + len_f_idxs = len(fishs_idxs) + if len_f_idxs >= 2: + time_traces = [] + for f_idx in range(len_f_idxs): + time_traces.append(time[Ch_idx][identity[Ch_idx] == fishs_idxs[f_idx]]) + + for subset in itertools.combinations(fishs_idxs, 2): + r1 = set(time[Ch_idx][identity[Ch_idx] == subset[0]]) + result = r1.intersection(time[Ch_idx][identity[Ch_idx] == subset[1]]) + if bool(result): + print('overlap -- new sorting') + liste.append(fishN) + liste = np.unique(liste) + return liste + + +def assigne_ID_to_fishN_in_aifl(aifl, CH_idx, false_fish, i, empty_counter, fishN, N1, N2, N3, N4): + if in_str == '1': + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N1]]) + elif in_str == '2': + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N2]]) + elif in_str == '3': + if N3 is None: + N3 = int(empty_fishNs[empty_counter]) + empty_counter += 1 + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N3]]) + print(fishN, N1, N2, N3) + else: + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N3]]) + elif in_str == '4': + if N4 is None: + N4 = int(empty_fishNs[empty_counter]) + empty_counter += 1 + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N4]]) + print(fishN, N1, N2, N3, N4) + else: + aifl = hf.add_id_to_aifl(aifl, CH_idx, false_fish[i], [[N4]]) + else: + print('trace was not assigned to any fish -- trace Ch+ID:', CH_idx, false_fish[i]) + + return aifl, empty_counter, N3, N4 + + +if __name__ == '__main__': + + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + filename = sorted(os.listdir('../../../data/'))[6] + + ident = np.load('../../../data/' + filename + '/all_ident_v.npy', allow_pickle=True) + freq = np.load('../../../data/' + filename + '/all_fund_v.npy', allow_pickle=True) + timeidx = np.load('../../../data/' + filename + '/all_idx_v.npy', allow_pickle=True) + + aifl = np.load('../data/'+filename+'/aifl2.npy', allow_pickle=True) + faifl = np.load('../data/'+filename+'/faifl2.npy', allow_pickle=True) + # faifl = np.delete(faifl, [0], axis=0) + + ################################################################################################################### + # params + # color_v, font_size, _, _, _, _ = params.params() + + # variables + empty_counter = 0 + + # lists + fish_in_aifl = list(np.unique(np.where(~np.isnan(aifl[:, :, 0]))[1])) + fish_in_faifl = list(np.unique(faifl[:, [1, 3]])) + ################################################################################################################### + # get me the fish_numbers with overlapping traces + new_sorting = get_list_of_fishN_with_overlap(aifl, fish_in_aifl, timeidx, ident) + + ################################################################################################################### + # get me the fish_numbers with no fish_ids + empty_fishNs = [] + for i in range(len(aifl[0])): + if np.all(np.isnan(aifl[:, i])): + empty_fishNs = np.append(empty_fishNs, i) + + ################################################################################################################### + # get me the fish_numbers with no fish_ids + for fish_number in fish_in_aifl: + + emptyN3 = None + emptyN4 = None + ax = hf.plot_together(timeidx, freq, ident, aifl, int(fish_number), color_vec[0]) + + if fish_number not in new_sorting: + correct_str = input('Do we have to correct the fish? [y/n]') + else: + correct_str = 'y' + + if correct_str == 'n': + continue + else: + emptyN1 = int(empty_fishNs[empty_counter]) + empty_counter += 1 + emptyN2 = int(empty_fishNs[empty_counter]) + empty_counter += 1 + print(fish_number, emptyN1, emptyN2) + for channel_idx in range(16): + false_fish = aifl[channel_idx, int(fish_number), ~np.isnan(aifl[channel_idx, int(fish_number)])] + for ff_idx in range(len(false_fish)): + # __________________________________________________________________________________________________ + # make figure + fig1 = plt.figure(figsize=(16, 14)) + gs = gridspec.GridSpec(1, 1, left=0.125, right=0.95, top=0.90, bottom=0.15, wspace=0.15, hspace=0.15) + ax1 = fig1.add_subplot(gs[0, 0]) + ax1.set_xlim(ax.get_xlim()) + ax1.set_ylim(ax.get_ylim()) + fig1.suptitle('Ch {}, ID {}'.format(channel_idx, false_fish[ff_idx]), fontsize=fs) + + for i in range(16): + fish1 = aifl[i, fish_number, ~np.isnan(aifl[i, fish_number])] + r1 = len(fish1) + # print(fish1) + for len_idx1 in range(r1): + plt.plot(timeidx[i][ident[i] == fish1[len_idx1]], + freq[i][ident[i] == fish1[len_idx1]], + Linewidth=1, color='gray', alpha=0.2) + + plot_overlap_figure(fig1, ax1, aifl, emptyN1, emptyN2, emptyN3, emptyN4, color_vec) + # __________________________________________________________________________________________________ + # where does the new red line belong to? -- then fill aifl at the fish_number with the ID of the red + # line + in_str = input('Where does the traces belong to? [1/2/3/4]') + + aifl, empty_counter, emptyN3, emptyN4 = assigne_ID_to_fishN_in_aifl(aifl, channel_idx, false_fish, + ff_idx, empty_counter, + fish_number, emptyN1, emptyN2, + emptyN3, emptyN4) + + aifl[:, fish_number, :] = np.nan + ################################################################################################################### + # save + if filename not in os.listdir('../data/'): + os.mkdir('../data/'+filename) + + np.save('../data/' + filename + '/aifl2.npy', aifl) + np.save('../data/' + filename + '/faifl2.npy', faifl) + + embed() + quit() diff --git a/do_connect_fish_id.py b/do_connect_fish_id.py new file mode 100644 index 0000000..eef34c4 --- /dev/null +++ b/do_connect_fish_id.py @@ -0,0 +1,185 @@ +import numpy as np +import matplotlib.pyplot as plt +from IPython import embed +from scipy import stats +from helper_functions import fill_aifl +import os + + +# from tqdm import tqdm + +def connect_fish(aifl, faifl, uni_ident_list, matrix_fish_counter, jumper): + ''' function that connects all ids of the channels with the next channels + and can be varied in the check distance by jumper + + Parameters + ---------- + aifl: 3-D array + all identity fish list; 1_D: channel; 2_D: fish number; 3_D: fish identities + faifl: 2-D array + false all identity fish list; 1_D: number of how many false enteries in the aifl exist + 2_D: channels and fish number that are falsely connected; + 4 columns: Ch, fishN0, Ch_connect, fishN1 + uni_ident_list: 2-D array + array of all unique identities per channel; 1_D: channel; 2_D: identities + matrix_fish_counter: int + counter where the new fish should be registered + jumper: int + defines which channel should be compared with which next channel; channel+jumper + + Returns + ------- + aifl: 3-D array + updated aifl + faifl 2-D array + updated faifl + matrix_fish_counter: int + updated counter + + ''' + + for channel in range(len(uni_ident_list) - jumper): + print(channel) + connect_channel = channel + jumper + + # find for each identity the first and last time stamp in the given channel and the next + # channel 0 + t_Ch0 = np.empty((len(uni_ident_list[channel]), 2)) + for index in range(len(uni_ident_list[channel])): + id0 = uni_ident_list[channel][index] + t = timeidx[channel][ident[channel] == id0] + t_Ch0[index][0] = t[0] + t_Ch0[index][1] = t[-1] + + # channel 1 + t_Ch1 = np.empty((len(uni_ident_list[connect_channel]), 2)) + for index in range(len(uni_ident_list[connect_channel])): + id1 = uni_ident_list[connect_channel][index] + t1 = timeidx[connect_channel][ident[connect_channel] == id1] + t_Ch1[index][0] = t1[0] + t_Ch1[index][1] = t1[-1] + + # parameters + jitter_time = 61 + tolerance_time = jitter_time*2 + + false_count = 0 + true_count = 0 + + for i in range(len(t_Ch0)): + for j in range(len(t_Ch1)): + id0 = uni_ident_list[channel][i] + t0 = timeidx[channel][ident[channel] == id0] + f0 = freq[channel][ident[channel] == id0] + id1 = uni_ident_list[connect_channel][j] + + t1 = timeidx[connect_channel][ident[connect_channel] == id1] + f1 = freq[connect_channel][ident[connect_channel] == id1] + + t00 = t_Ch0[i][0] - jitter_time + t0n = t_Ch0[i][1] + jitter_time + + t10 = t_Ch1[j][0] - jitter_time + t1n = t_Ch1[j][1] + jitter_time + + + + # time sorting + sort_it = False + if (t00 <= t10) and (t00 <= t1n) and (t0n >= t1n): + sort_it = True + elif (t10 <= t00) and (t10 <= t0n) and (t1n >= t0n): + sort_it = True + elif (t00 <= t10) and (t0n >= t10) and (t0n <= t1n): + sort_it = True + elif (t10 <= t00) and (t1n >= t00) and (t1n <= t0n): + sort_it = True + else: + false_count += 1 + pass + + # frequency sorting + window_times = sorted(np.array([t00, t0n, t10, t1n]))[1:3] # only where they are overlapping + + if sort_it: + true_count += 1 + if window_times[0] < 0: + window_times[0] = 0.0 + + f0_box_min = np.median(f0[np.isclose(t0, window_times[0], atol=tolerance_time)]) + f0_box_max = np.median(f0[np.isclose(t0, window_times[1], atol=tolerance_time)]) + f1_box_min = np.median(f1[np.isclose(t1, window_times[0], atol=tolerance_time)]) + f1_box_max = np.median(f1[np.isclose(t1, window_times[1], atol=tolerance_time)]) + if f0_box_min.size > 0 and f1_box_min.size > 0: + fdiff0 = abs(f0_box_min - f1_box_min) + if fdiff0 <= 2: + matrix_fish_counter, aifl, faifl = fill_aifl(id0, id1, aifl, channel, connect_channel, + matrix_fish_counter, + timeidx, freq, ident, faifl) + + else: + continue + elif f0_box_max.size > 0 and f1_box_max.size > 0: + fdiff1 = abs(f0_box_max - f1_box_max) + if fdiff1 <= 2: + matrix_fish_counter, aifl, faifl = fill_aifl(id0, id1, aifl, channel, connect_channel, + matrix_fish_counter, + timeidx, freq, ident, faifl) + + else: + continue + return aifl, faifl, matrix_fish_counter + + +if __name__ == '__main__': + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + filename = sorted(os.listdir('../../../data/'))[6] + + ident = np.load('../../../data/' + filename + '/all_ident_v.npy', allow_pickle=True) + freq = np.load('../../../data/' + filename + '/all_fund_v.npy', allow_pickle=True) + timeidx = np.load('../../../data/' + filename + '/all_idx_v.npy', allow_pickle=True) + + ################################################################################################################### + # Parameters + ################################################################################################################### + # find me for each channel the unique identities + uni_ident_list = [] + for index in range(len(ident)): + uni_ident = np.unique(ident[index][~np.isnan(ident[index])]) + uni_ident_list.append(uni_ident) + + # all_identity_fish_list + # dim z, y, x, dim 3, 1, 2 + aifl = np.empty([16, 500, 100]) + aifl.fill(np.nan) + + faifl = np.empty([1, 4]) + faifl.fill(np.nan) + + matrix_fish_counter = 0 + + for jump in [1, 2, 3]: + print('Jump ', jump) + aifl, faifl, matrix_fish_counter = connect_fish(aifl, faifl, uni_ident_list, matrix_fish_counter, jump) + + oofl = [] + counter = 0 + for idx in range(len(uni_ident_list)): + for j in range(len(uni_ident_list[idx])): + if not np.any(aifl[idx] == uni_ident_list[idx][j]): + oofl.append([idx, uni_ident_list[idx][j]]) + counter = counter + 1 + + ################################################################################################################### + # save + if filename not in os.listdir('../data/'): + os.mkdir('../data/'+filename) + + np.save('../data/' + filename + '/aifl.npy', aifl) + np.save('../data/' + filename + '/faifl.npy', faifl) + np.save('../data/' + filename + '/oofl.npy', oofl) + embed() + quit() \ No newline at end of file diff --git a/do_correct_for_Q10.py b/do_correct_for_Q10.py new file mode 100644 index 0000000..5e568a2 --- /dev/null +++ b/do_correct_for_Q10.py @@ -0,0 +1,135 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.colors as mcolors +import matplotlib.gridspec as gridspec +import math +from IPython import embed +from scipy import stats +import os +from IPython import embed +from params import * +import datetime +import itertools +import pandas as pd + +if __name__ == '__main__': + + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + filename = sorted(os.listdir('../../../data/'))[6] + + ident = np.load('../../../data/' + filename + '/all_ident_v.npy', allow_pickle=True) + freq = np.load('../../../data/' + filename + '/all_fund_v.npy', allow_pickle=True) + timeidx = np.load('../../../data/' + filename + '/all_idx_v.npy', allow_pickle=True) + realtime = np.load('../../../data/' + filename + '/all_times.npy', allow_pickle=True) + temp = pd.read_csv('../../../data/' + filename + '/temperatures.csv', sep=';') + temp_l = np.array(temp.values.tolist()) + + aifl = np.load('../data/'+filename+'/aifl2.npy', allow_pickle=True) + + ################################################################################################################### + # parameter + # color_vec, fs, fs_ticks, lw, a, h_bins = params.params() + + # variables + mean_bins = temp_l[:, 0] - 150 + mean_bins[0] = 0 + all_freq_mean = [] + literature_q10 = 1.4 + + + # lists + fish_in_aifl = list(np.unique(np.where(~np.isnan(aifl[:, :, 0]))[1])) + fish_q10 = np.full([len(fish_in_aifl), 4], np.nan) + ################################################################################################################### + # analysis + for counter, fish_number in enumerate(fish_in_aifl): + q10_werte = [] + afm = [] + corr_freq_v = [] + + for channel in range(16): + fish_IDs = aifl[channel, fish_number, ~np.isnan(aifl[channel, fish_number])] + + for len_idx1 in range(len(fish_IDs)): + + ID = fish_IDs[len_idx1] # current ID + t = timeidx[channel][ident[channel] == ID] # timeidx vec of this ID + f = freq[channel][ident[channel] == ID] # freq vec of this ID + + # Cfreq: current frequency vector + # interpol_freq_vec: interpolated frequency vector + Cfreq = np.full([len(realtime)], np.nan) + interpol_freq_vec = np.full([len(realtime)], np.nan) + + ####################################################################################################### + # fill interpolated frequency vector + Cfreq[t] = f # frequency points on the realtime points + Ctime = realtime[~np.isnan(Cfreq[:])] # time points on the realtime points + all_Ctime = realtime[t[0]:t[-1]+1] # the realtime points + try: + interpol_freq_vec[np.arange(t[0], t[-1]+1)] = np.interp(all_Ctime, Ctime, f) + except: + continue + ####################################################################################################### + # freq mean in bins of the sampling rate of the temperature vector + freq_mean = np.full([len(mean_bins)], np.nan) + + for i in range(len(mean_bins) - 1): + idx_low = np.where(realtime >= mean_bins[i])[0][0] + idx_up = np.where(realtime <= int(mean_bins[i + 1]))[0][-1] + if i == len(mean_bins) - 1: + freq_mean[i] = np.nanmedian(interpol_freq_vec[idx_low:]) + else: + freq_mean[i] = np.nanmedian(interpol_freq_vec[idx_low:idx_up]) + afm.append(freq_mean) + + ####################################################################################################### + # q10 calculation + # every value in freq_mean with every other value + for subset in itertools.combinations(freq_mean[~np.isnan(freq_mean)], 2): + f1 = subset[0] + f2 = subset[1] + T1 = temp_l[np.where(freq_mean == subset[0])[0][0]][1] + T2 = temp_l[np.where(freq_mean == subset[1])[0][0]][1] + if T1 == T2: + continue + else: + q10_werte.append((f2/f1)**(10/(T2-T1))) + + fish_q10[counter, 0] = fish_number + fish_q10[counter, 1] = np.nanmedian(q10_werte) + + for i in range(len(afm)): + for k in range(len(afm[i])): + if math.isnan(afm[i][k]): + continue + else: + if math.isnan(fish_q10[counter, 1]): + corr_f = afm[i][k] * literature_q10 ** ((25 - temp_l[k, 1]) / 10) + else: + corr_f = afm[i][k] * fish_q10[counter, 1]**((25-temp_l[k, 1])/10) + corr_freq_v.append(corr_f) + + fish_q10[counter, 2] = np.nanmedian(corr_freq_v) + fish_q10[counter, 3] = np.nanmax(corr_freq_v) - np.nanmin(corr_freq_v) + + # print(np.nanmedian(corr_freq_v), np.nanmax(corr_freq_v) - np.nanmin(corr_freq_v)) + all_freq_mean.append(np.nanmean(np.array(afm), axis=0)) + + + if filename not in os.listdir('../data/'): + os.mkdir('../data/'+filename) + os.mkdir('../data/'+filename) + + np.save('../data/' + filename + '/fish_freq_q10.npy', fish_q10) + np.save('../data/' + filename + '/all_freq_mean.npy', all_freq_mean) + + embed() + quit() + + +