diff --git a/plot_material_eod.py b/plot_material_eod.py new file mode 100644 index 0000000..84e2601 --- /dev/null +++ b/plot_material_eod.py @@ -0,0 +1,125 @@ +import sys +import os +import numpy as np +from tqdm import tqdm +import matplotlib.pyplot as plt +from thunderfish.dataloader import open_data +from thunderfish.eodanalysis import eod_waveform +from IPython import embed +import matplotlib.gridspec as gridspec +from params import * + + +def unfilter(data, samplerate, cutoff): + """ + Apply inverse high-pass filter on data. + + Assumes high-pass filter \\[ \\tau \\dot y = -y + \\tau \\dot x \\] has + been applied on the original data \\(x\\), where \\(\tau=(2\\pi + f_{cutoff})^{-1}\\) is the time constant of the filter. To recover \\(x\\) + the ODE \\[ \\tau \\dot x = y + \\tau \\dot y \\] is applied on the + filtered data \\(y\\). + + Parameters: + ----------- + data: ndarray + High-pass filtered original data. + samplerate: float + Sampling rate of `data` in Hertz. + cutoff: float + Cutoff frequency \\(f_{cutoff}\\) of the high-pass filter in Hertz. + + Returns: + -------- + data: ndarray + Recovered original data. + """ + tau = 0.5 / np.pi / cutoff + fac = tau * samplerate + data -= np.mean(data) + d0 = data[0] + x = d0 + for k in range(len(data)): + d1 = data[k] + x += (d1 - d0) + d0 / fac + data[k] = x + d0 = d1 + return data + + +def calc_mean_eod(t0, f, data, dt=10, unfilter=0): + channel_list = np.arange(data.channels) + samplerate = data.samplerate + + start_i = t0 * samplerate + end_i = t0 * samplerate + dt * samplerate + 1 + t = np.arange(0, dt, 1 / f) + + mean_EODs = [] + for c in channel_list: + mean_eod, eod_times = eod_waveform(data[start_i:end_i, c], samplerate, t, unfilter_cutoff=unfilter) + mean_EODs.append(mean_eod) + + max_size = list(map(lambda x: np.max(x.T[1]) - np.min(x.T[1]), mean_EODs)) + EOD = mean_EODs[np.argmax(max_size)] + + return EOD, samplerate + + +def main(folder, filename): + # folder = path_to_files + data = open_data(os.path.join(folder, 'traces-grid1.raw'), -1, 60.0, 10.0) + + power_means = np.load('../data/' + filename + '/power_means.npy', allow_pickle=True) + all_q10 = np.load('../data/' + filename + '/fish_freq_q10.npy', allow_pickle=True) + all_t = np.load('../data/' + filename + '/eod_times_new_new.npy', allow_pickle=True) + all_f = np.load('../data/' + filename + '/eod_freq_new_new.npy', allow_pickle=True) + + plot_pannel = [16, 0] + cutoff_value = [200, 0] + y_ticks = [[-0.001, 0, 0.001, 0.0015], [-0.002, 0, 0.002]] + + ################################################################################################################## + # figure + fig = plt.figure(constrained_layout=True, figsize=[15 / inch, 6 / inch]) + gs = gridspec.GridSpec(ncols=2, nrows=1, figure=fig, hspace=0.05, wspace=0.0, + left=0.1, bottom=0.15, right=0.95, top=0.98) + + ax2 = fig.add_subplot(gs[0, 1]) + ax1 = fig.add_subplot(gs[0, 0], sharey=ax2) + + for fn_idx, fish_number, ax in zip([0, 1], [15, 22], [ax1, ax2]): + print(all_q10[fish_number, 2], fish_number) + + t = all_t[fish_number][plot_pannel[fn_idx]] + f = all_f[fish_number][plot_pannel[fn_idx]] + EOD, samplingrate = calc_mean_eod(t, f, data, unfilter=cutoff_value[fn_idx]) + + ############################################################################################################## + # plot + ax.plot(EOD.T[0], EOD.T[1], color=color_efm[fn_idx], lw=2) + ax.fill_between(EOD.T[0], EOD.T[1] + EOD.T[2], EOD.T[1] - EOD.T[2], + color=color_efm[fn_idx], alpha=0.7) + ax.make_nice_ax() + + ax.text(-0.12, 0.95, chr(ord('A') + fn_idx), transform=ax.transAxes, fontsize='large') + ax.text(0.8, 0.95, str(np.round(all_q10[fish_number, 2], 1))+' Hz', transform=ax.transAxes, fontsize=10) + + ax.set_xlabel('Time') + ax.set_yticks([0]) + ax.set_xticks([]) + # fig.suptitle(all_q10[fish_number, 2]) + + ax1.set_ylabel('Amplitude') + fig.savefig(save_path + 'eod_waves.pdf') + + plt.show() + + +if __name__ == '__main__': + + for index, filename_idx in enumerate([2]): + filename = sorted(os.listdir('../../../data/mount_data/sanmartin/softgrid_1x16/'))[filename_idx] + folder = '../../../data/mount_data/sanmartin/softgrid_1x16/' + filename + print('new file: ' + filename) + main(folder, filename) diff --git a/plot_pancake.py b/plot_pancake.py new file mode 100644 index 0000000..1819d7d --- /dev/null +++ b/plot_pancake.py @@ -0,0 +1,72 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.gridspec as gridspec + +from IPython import embed +import helper_functions as hf +from params import * +import os +import datetime + +if __name__ == '__main__': + ################################################################################################################### + # parameter and variables + # plot params + inch = 2.45 + save_path = '../../thesis/Figures/Results/' + kernel_size = 100 + + ################################################################################################################### + # load all the data of one day + # for filename_idx in [1, 4, 6]: + for filename_idx in [1]: + filename = sorted(os.listdir('../data/'))[filename_idx] + + all_max_ch_means = np.load('../data/' + filename + '/all_max_ch.npy', allow_pickle=True) + all_xticks = np.load('../data/' + filename + '/all_xtickses.npy', allow_pickle=True) + all_ipp = np.load('../data/' + filename + '/all_ipp.npy', allow_pickle=True) + power_means = np.load('../data/' + filename + '/power_means.npy', allow_pickle=True) + freq = np.load('../data/' + filename + '/fish_freq_q10.npy', allow_pickle=True) + ############################################################################################################### + # get fish + # for fish_number in range(len(power_means)): + for fish_number in [14]: + if power_means[fish_number] >= -90.0: + ipp = all_ipp[fish_number] + x_tickses = all_xticks[fish_number] + max_ch_mean = all_max_ch_means[fish_number] + + # smoothing of max channel mean + kernel = np.ones(kernel_size) / kernel_size + smooth_mcm = np.convolve(max_ch_mean, kernel, 'valid') + + try: + smooth_x = x_tickses[int(np.ceil(kernel_size/2)):-int(np.floor(kernel_size/2))] + except: + embed() + quit() + ##################################################################################################### + # plot traces + fig1, ax1 = plt.subplots(1, 1, figsize=(13 / inch, 8 / inch)) + fig1.subplots_adjust(left=0.12, bottom=0.15, right=0.99, top=0.99) + + ax1.imshow(ipp[::20].T[::-1], vmin=-100, vmax=-50, aspect='auto', interpolation='gaussian', + extent=[x_tickses[0], x_tickses[-1], -0.5, 15.5]) + + ax1.plot(smooth_x[::20], smooth_mcm[::20], '.', color=color2[4]) + + # ax1.set_title('freq: %.1f, power: %.1f' %(freq[:,2][fish_number], power_means[fish_number]), fontsize=fs + 2) + # ax1.set_title('freq: %.1f, Nr: %.1f' %(freq[:,2][fish_number], fish_number), fontsize=fs + 2) + ax1.set_xticks(smooth_x[::350]) + ax1.beautimechannelaxis() + ax1.timeaxis() + # fig1.autofmt_xdate() + + fig1.savefig(save_path + 'trajectory_'+str(fish_number)+'.pdf') + # fig1.savefig('../../../goettingen2021_poster/pictures/trajectory_'+ str(fish_number)+'.pdf') + print(fish_number, freq[fish_number,2]) + plt.show() + embed() + + diff --git a/plot_pie.py b/plot_pie.py new file mode 100644 index 0000000..d06cec6 --- /dev/null +++ b/plot_pie.py @@ -0,0 +1,170 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.colors as mcolors +import matplotlib.gridspec as gridspec +from mpl_toolkits.axes_grid1.inset_locator import inset_axes +from IPython import embed +from scipy import stats, optimize +import pandas as pd +import math +import os +from IPython import embed + +from eventdetection import threshold_crossings, merge_events +import helper_functions as hf +from params import * +from statisitic_functions import significance_bar, cohen_d +import itertools + + +def get_recording_number_in_time_bins(time_bins): + """ + Calculates the number of the recordings in the time bins + + :param time_bins: numpy array with borders of the time bins + :return: time_bins_recording: numpy array with the number of recordings to that specific time bin + """ + # variables + time_bins_recordings = np.zeros(len(time_bins) - 1) + + # load data + for index, filename_idx in enumerate([0, 1, 2, 3]): + filename = sorted(os.listdir('../data/'))[filename_idx] + time_points = np.load('../data/' + filename + '/all_hms.npy', allow_pickle=True) + + # in which bins is this recording, fill time_bins_recordings + unique_time_points = np.unique(np.hstack(time_points)) + for idx, tb in enumerate(time_bins[:-1]): + if np.any((unique_time_points >= tb) & (unique_time_points <= time_bins[idx + 1])): + time_bins_recordings[idx] += 1 + + return time_bins_recordings + + +def func(x, a, tau, c): + return a * np.exp(-x / tau) + c + + +def calc_movement(cbf, i): + movement = cbf[0, :, i] + cbf[1, :, i] + movement[np.isnan(movement)] = 0 + re_mov = cbf[0, :, i] - cbf[1, :, i] + re_mov[np.isnan(re_mov)] = 0 + + return movement, re_mov + + +if __name__ == '__main__': + ################################################################################################################### + # parameter and variables + # plot params + inch = 2.45 + + c = 0 + cat_v1 = [0, 0, 750, 0] + cat_v2 = [750, 750, 1000, 1000] + cat_n = ['Eigenmannia', 'Apteronotus', 'Apteronotus'] + + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + cbf2 = np.load('../data/cbf15.npy', allow_pickle=True) + stl = np.load('../data/stl.npy', allow_pickle=True) + freq = np.load('../data/f.npy', allow_pickle=True) + names = np.load('../data/n.npy', allow_pickle=True) + speed_average = np.load('../data/speed.npy', allow_pickle=True) + + ################################################################################################################### + # pie chart + ################################################################################################################### + + label = ['Transit', 'Stationary', + '$\it{Eigenmannia\,sp.}$', '$\it{A.\,macrostomus}\,\u2640$', '$\it{A.\,macrostomus}\,\u2642$'] + size = 0.3 + + true = freq[(names != 'unknown') & (stl)] + false = freq[(names != 'unknown') & (stl == False)] + + true_per = len(true) + false_per = len(false) + + fracs = [true_per, false_per] + + fracs2 = [] + fracs3 = [] + + for p in range(3): + true = freq[(names == cat_n[p]) & (freq >= cat_v1[p]) & (freq < cat_v2[p]) & (stl)] + false = freq[(names == cat_n[p]) & (freq >= cat_v1[p]) & (freq < cat_v2[p]) & (stl == False)] + + fracs2.append(len(true)) + fracs3.append(len(false)) + + ################################################################################################################### + # figure 2 + # fig2, ax2 = plt.subplots(figsize=[9 / inch, 7.8 / inch], subplot_kw=dict(aspect="equal")) + # + # vals = np.array([fracs2, fracs3]) + # + # ax2.pie(vals.sum(axis=0), colors=['#2e4053', '#ab1717', '#004d8d'], labels=label[2:], + # startangle=90, radius=0.7, wedgeprops=dict(width=size, edgecolor='w', linewidth=3), + # labeldistance=1.6, textprops=dict(ha='center')) + # + # wedges = ax2.pie(vals.flatten('F'), colors=['#425364', '#818c97', '#b32e2e', '#cc7373', '#195e98', '#6694ba'], + # startangle=90, radius=0.7+size, wedgeprops=dict(width=size, edgecolor='w', alpha=0.82)) + # + # ax2.pie(vals.sum(axis=0), colors=['#2e4053', '#ab1717', '#004d8d'], + # startangle=90, radius=0.7+size, wedgeprops=dict(width=size+size, edgecolor='w', linewidth=3, fill=False)) + # + # ax2.legend(wedges[0][:2], label[:2], + # loc="center left", + # bbox_to_anchor=(0.64, -0.46, 0.5, 1)) + # + # centre_circle = plt.Circle((0, 0), 0.2, color='black', fc='white', linewidth=0) + # fig2.gca().add_artist(centre_circle) + # + # plt.axis('equal') + # plt.tight_layout() + # fig2.savefig(save_path + 'pie.pdf') + # # fig2.savefig('../../../goettingen2021_poster/pictures/pie.pdf') + # + # plt.show() + # + # embed() + # quit() + ################################################################################################################### + # figure 2 + fig2, ax2 = plt.subplots(figsize=[9 / inch, 7.8 / inch], subplot_kw=dict(aspect="equal")) + + vals = np.array([fracs2, fracs3]) + + ax2.pie(vals.sum(axis=1), colors=['#1b2631', '#5d6d7e'], labels=label[:2], + startangle=180, radius=0.7, wedgeprops=dict(width=size, edgecolor='w', linewidth=3), + labeldistance=1.9, textprops=dict(ha='center')) + + wedges = ax2.pie(vals.flatten(), colors=['#3B4A5A', '#b32e2e', '#195e98', + '#818c97', '#cc7373', '#6694ba'], + startangle=180, radius=0.7 + size, wedgeprops=dict(width=size, edgecolor='w', alpha=0.82)) + + ax2.pie(vals.sum(axis=1), colors=color_diffdays[:2], + startangle=180, radius=0.7 + size, + wedgeprops=dict(width=size + size, edgecolor='w', linewidth=3, fill=False)) + + ax2.legend(wedges[0][:3], label[2:], + loc="center left", + bbox_to_anchor=(0.5, -0.35, 0.5, 0.5)) + + centre_circle = plt.Circle((0, 0), 0.2, color='black', fc='white', linewidth=0) + fig2.gca().add_artist(centre_circle) + + plt.axis('equal') + plt.tight_layout() + fig2.savefig(save_path + 'pie.pdf') + # fig2.savefig('../../../goettingen2021_poster/pictures/pie.pdf') + + plt.show() + embed() + + diff --git a/plot_power_mean.py b/plot_power_mean.py new file mode 100644 index 0000000..6428fc4 --- /dev/null +++ b/plot_power_mean.py @@ -0,0 +1,58 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.gridspec as gridspec + +from IPython import embed +import helper_functions as hf +from params import * +import os +import datetime + +if __name__ == '__main__': + ################################################################################################################### + # parameter and variables + # plot params + inch = 2.45 + all_power_means = [] + ################################################################################################################### + # load all the data of one day + for filename_idx in [1, 4, 6]: + filename = sorted(os.listdir('../../../data/'))[filename_idx] + + power_means = np.load('../data/' + filename + '/power_means.npy', allow_pickle=True) + all_power_means.extend(power_means) + + ########################################################################################################### + # plot power_mean histogram + fig0= plt.figure(figsize=[16 / inch, 12 / inch]) + spec = gridspec.GridSpec(ncols=1, nrows=1, figure=fig0, hspace=0.5, wspace=0.5) + ax0 = fig0.add_subplot(spec[0, 0]) + + n, bin_edges = np.histogram(power_means, bins=20) + # n3 = n3 / np.sum(n3) / (bin_edges3[1] - bin_edges3[0]) + + ax0.bar(bin_edges[:-1] + (bin_edges[1] - bin_edges[0]) / 2, n, width=0.9 * (bin_edges[1] - bin_edges[0])) + + ax0.set_xlabel('Power', fontsize=fs) + ax0.set_ylabel('n', fontsize=fs) + ax0.make_nice_ax() + + ########################################################################################################### + # plot power_mean histogram + fig1 = plt.figure(figsize=[16 / inch, 12 / inch]) + spec = gridspec.GridSpec(ncols=1, nrows=1, figure=fig1, hspace=0.5, wspace=0.5) + ax1 = fig1.add_subplot(spec[0, 0]) + + n, bin_edges = np.histogram(all_power_means, bins=20) + # n3 = n3 / np.sum(n3) / (bin_edges3[1] - bin_edges3[0]) + + ax1.bar(bin_edges[:-1] + (bin_edges[1] - bin_edges[0]) / 2, n, width=0.9 * (bin_edges[1] - bin_edges[0])) + + ax1.set_xlabel('Power', fontsize=fs) + ax1.set_ylabel('n', fontsize=fs) + ax1.make_nice_ax() + + plt.show() + + diff --git a/plot_roaming_events.py b/plot_roaming_events.py new file mode 100644 index 0000000..1ab46c8 --- /dev/null +++ b/plot_roaming_events.py @@ -0,0 +1,512 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import matplotlib.colors as mcolors +import matplotlib.gridspec as gridspec +from mpl_toolkits.axes_grid1.inset_locator import inset_axes +from IPython import embed +from scipy import stats, optimize +import pandas as pd +import math +import os +from IPython import embed + +from eventdetection import threshold_crossings, merge_events +import helper_functions as hf +from params import * +from statisitic_functions import significance_bar, cohen_d +import itertools + + +def get_recording_number_in_time_bins(time_bins): + """ + Calculates the number of the recordings in the time bins + + :param time_bins: numpy array with borders of the time bins + :return: time_bins_recording: numpy array with the number of recordings to that specific time bin + """ + # variables + time_bins_recordings = np.zeros(len(time_bins) - 1) + + # load data + for index, filename_idx in enumerate([0, 1, 2, 3]): + filename = sorted(os.listdir('../data/'))[filename_idx] + time_points = np.load('../data/' + filename + '/all_hms.npy', allow_pickle=True) + + # in which bins is this recording, fill time_bins_recordings + unique_time_points = np.unique(np.hstack(time_points)) + for idx, tb in enumerate(time_bins[:-1]): + if np.any((unique_time_points >= tb) & (unique_time_points <= time_bins[idx + 1])): + time_bins_recordings[idx] += 1 + + return time_bins_recordings + + +def func(x, a, tau, c): + return a * np.exp(-x / tau) + c + + +def calc_movement(cbf, i): + movement = cbf[0, :, i] + cbf[1, :, i] + movement[np.isnan(movement)] = 0 + re_mov = cbf[0, :, i] - cbf[1, :, i] + re_mov[np.isnan(re_mov)] = 0 + + return movement, re_mov + +def gauss(t, shift, sigma, size, norm = False): + if not hasattr(shift, '__len__'): + g = np.exp(-((t - shift) / sigma) ** 2 / 2) * size + if norm: + g = g / (np.sum(g) * (t[1] - t[0])) + return g + else: + t = np.array([t, ] * len(shift)) + res = np.exp(-((t.transpose() - shift).transpose() / sigma) ** 2 / 2) * size + return res + + +if __name__ == '__main__': + ################################################################################################################### + # parameter and variables + # plot params + inch = 2.45 + + c = 0 + cat_v1 = [0, 0, 750, 0] + cat_v2 = [750, 750, 1000, 1000] + cat_n = ['Eigenmannia', 'Apteronotus', 'Apteronotus'] + + # time + # time_bins 5 min + time_factor = 60 * 60 + time_bins = np.arange(0, 24 * time_factor + 1, bin_len) + + # percent roaming + re = [] + roaming_events = [] + roaming_threshold = 2.1 + roaming_merge = 20 # in minutes + roaming_exclusion = 0.25 + + iai = [] + dauer = [] + wann = [] + max_move = [] + distances = [] + percent = [] + speeds = [] + speed_transit = [] + max_move_boxes = [[], [], [], []] + fish_names = [] + roam_dist = [] + roam_start = [] + conv_arrays = [] + + time_edges = np.array([4.5, 6.5, 16.5, 18.5]) * time_factor + day = time_bins[:-1][(time_bins[:-1] >= time_edges[1]) & (time_bins[:-1] <= time_edges[2])] + dusk = time_bins[:-1][(time_bins[:-1] >= time_edges[2]) & (time_bins[:-1] <= time_edges[3])] + night = time_bins[:-1][(time_bins[:-1] <= time_edges[0]) | (time_bins[:-1] >= time_edges[3])] + dawn = time_bins[:-1][(time_bins[:-1] >= time_edges[0]) & (time_bins[:-1] <= time_edges[1])] + + ################################################################################################################### + # load data + ################################################################################################################### + # load all the data of one day + cbf2 = np.load('../data/cbf15.npy', allow_pickle=True) + stl = np.load('../data/stl.npy', allow_pickle=True) + freq = np.load('../data/f.npy', allow_pickle=True) + names = np.load('../data/n.npy', allow_pickle=True) + speed_average = np.load('../data/speed.npy', allow_pickle=True) + trial_dur = np.load('../data/trial_dur.npy', allow_pickle=True) + trajectories = np.load('../data/trajectories.npy', allow_pickle=True) + trajec_x = np.load('../data/trajec_x.npy', allow_pickle=True) + + ############################################################################################################### + # variables + for index, filename_idx in enumerate([0]): + filename = sorted(os.listdir('../data/'))[filename_idx] + all_Ctime_v = np.load('../data/' + filename + '/all_Ctime_v.npy', allow_pickle=True) + sampling_rate = 1 / np.diff(all_Ctime_v[0])[0] # in sec + + cbf_counter = 0 + ################################################################################################################### + # analysis + for i in range(len(trajectories)): + if names[i] == 'unknown': + continue + + movement, re_mov = calc_movement(cbf2, cbf_counter) + cbf_counter += 1 + + up_times, down_times = threshold_crossings(movement, roaming_threshold) + u, d = merge_events(up_times, down_times, 4 * roaming_merge) + roam_dur = np.diff(np.array([u, d]), axis=0)[0] + ausschlag = [] + distance = [] + for a_idx in range(len(u)): + ausschlag.append(np.nansum(movement[u[a_idx]:u[a_idx] + roam_dur[a_idx] + 1])) + distance.append(np.max(re_mov[u[a_idx]:u[a_idx] + roam_dur[a_idx] + 1])) + ausschlag = np.array(ausschlag) + distance = np.array(distance) + + speed = np.array(ausschlag / (roam_dur / 4)) + + # roaming events append only for the roaming fish + if not stl[i]: + if np.any(movement > roaming_threshold): + c += 1 + re.append(np.array([up_times, down_times])) + roaming_events.append(np.array([u * 3, d * 3])) # * 3 because than in 5 s intervals + iai.extend(np.diff(sorted(np.hstack([u, d])))) + + # distance + for dist_i in range(len(u)): + try: + roam_traj = trajectories[i][ + (trajec_x[i] >= u[dist_i] * 15) & (trajec_x[i] < d[dist_i] * 15 + 15)] + + di = np.max(roam_traj) - np.min(roam_traj) + roam_dist.append(di) + roam_start.append(roam_traj[0]) + except: + plt.plot(trajec_x[i], trajectories[i]) + plt.plot(np.arange(0, len(movement)) * 5 * 3, movement) + plt.plot(u * 5 * 3, np.ones_like(u), 'o') + plt.plot(d * 5 * 3 + 15, np.ones_like(d), 'o') + embed() + quit() + + # append + dauer.extend(roam_dur / 4) + wann.extend(u * 3) # * 3 because than in 5 s intervals + max_move.extend(ausschlag) + distances.append(distance) + speeds.extend(speed) + + # percent + trial_dur = np.diff([np.where(~np.isnan(cbf2[2, :, cbf_counter - 1]))[0][0], + np.where(~np.isnan(cbf2[2, :, cbf_counter - 1]))[0][-1]])[0] + if names[i] == 'Eigenmannia': + print(np.round(trial_dur / 4, 2), np.round(np.sum(roam_dur) / 4, 2), + np.round(np.sum(roam_dur) / trial_dur * 100, 2)) + + percent.append( + np.array([trial_dur / 4, np.sum(roam_dur) / 4, (np.sum(roam_dur) / 4) / trial_dur * 100])) + else: + speed_transit.append(speed_average[i]) + + print(c) + # embed() + # quit() + ############################################################################################################### + # correction + wann = np.hstack(np.array(wann)) + dauer = np.hstack(np.array(dauer)) + max_move = np.hstack(np.array(max_move)) + speeds = np.hstack(np.array(speeds)) + roam_dist = np.array(roam_dist) + roam_start = np.array(roam_start) + + # all roaming intervals less than 30 seconds excluded + wann = wann[dauer > roaming_exclusion] + max_move = max_move[dauer > roaming_exclusion] + speeds = speeds[dauer > roaming_exclusion] + roam_dist = roam_dist[dauer > roaming_exclusion] + roam_start = roam_start[dauer > roaming_exclusion] + dauer = dauer[dauer > roaming_exclusion] + print('median dauer: ', np.median(dauer), ' 25, 75: ', np.percentile(dauer, [25, 75])) + ############################################################################################################### + # statistic + n, bin_edges = np.histogram(iai, bins=np.arange(0, np.max(iai) + 1, 1)) + b, a, r, p, std = stats.linregress(dauer, max_move) + + ############################################################################################################### + # roll time axis + start = [] + stop = [] + for j in range(len(roaming_events)): + start.extend(roaming_events[j][0]) + stop.extend(roaming_events[j][1]) + + N_rec_time_bins = get_recording_number_in_time_bins(time_bins[::int((60 / bin_len) * 60)]) + + # rolled time axis for nicer plot midnight in the middle start noon + N_start, bin_edges = np.histogram(np.array(start) * 5, bins=time_bins[::int((60 / bin_len) * 60)]) + N_stop, bin_edges2 = np.histogram(np.array(stop) * 5, bins=time_bins[::int((60 / bin_len) * 60)]) + rolled_start = np.roll(N_start / N_rec_time_bins, int(len(N_start) / 2)) + rolled_stop = np.roll(N_stop / N_rec_time_bins, int(len(N_stop) / 2)) + rolled_bins = (bin_edges[:-1] / time_factor) + 0.5 + + ############################################################################################################### + # figure 1: max_channel_changes per time zone and per duration of the roaming event + fig = plt.figure(constrained_layout=True, figsize=[15 / inch, 17 / inch]) + gs = gridspec.GridSpec(ncols=6, nrows=4, figure=fig, hspace=0.01, wspace=0.01, + height_ratios=[1, 1, 1, 1], width_ratios=[1, 1, 1, 1, 1, 1], left=0.1, bottom=0.15, right=0.95, + top=0.95) + + ax0 = fig.add_subplot(gs[0, :]) + ax1 = fig.add_subplot(gs[1, :3]) + ax2 = fig.add_subplot(gs[1, 3:], sharex=ax1) + ax3 = fig.add_subplot(gs[2, :2], sharey=ax2) + ax4 = fig.add_subplot(gs[2, 2:4], sharey=ax2) + ax5 = fig.add_subplot(gs[2, 4:]) + ax6 = fig.add_subplot(gs[3, :]) + + # axins = inset_axes(ax1, width='30%', height='60%') + + # bar plot + ax0.bar(rolled_bins, rolled_start, color=color2[4]) + print('bar plot') + print('day: mean ', np.round(np.mean([rolled_start[:6], rolled_start[18:]]), 2), + ' std: ', np.round(np.std([rolled_start[:6], rolled_start[18:]]), 2)) + + print('night: mean ', np.round(np.mean(rolled_start[6:18]), 2), + ' std: ', np.round(np.std(rolled_start[6:18]), 2)) + + ax0.plot([16.5, 6.5], [20, 20], color=color_diffdays[0], lw=7) + ax0.plot([16.5, 18.5], [20, 20], color=color_diffdays[3], lw=7) + ax0.plot([4.5, 6.5], [20, 20], color=color_diffdays[3], lw=7) + + ############################################################################################################### + # curve_fit: tau, std, n + curvefit_stat = [] + + xdata = np.linspace(0.0, 10., 500) + y_speeds = [] + for plot_zone, color_zone, day_zone, pos_zone in \ + zip([day, dusk, night, dawn], [0, 1, 2, 3], ['day', 'dusk', 'night', 'dawn'], [1, 2, 3, 4]): + # boxplot ax1 + props_e = dict(linewidth=2, color=color_dadunida[color_zone]) + bp = ax1.boxplot(dauer[np.in1d(wann * 5, plot_zone)], positions=[pos_zone], widths=0.7, + showfliers=False, vert=False, + boxprops=props_e, medianprops=props_e, capprops=props_e, whiskerprops=props_e) + + x_n = [item.get_xdata() for item in bp['whiskers']][1][1] + n = len(dauer[np.in1d(wann * 5, plot_zone)]) + ax1.text(x_n + 2, pos_zone, str(n), ha='left', va='center') + print('dauer: ', day_zone, np.median(dauer[np.in1d(wann * 5, plot_zone)]), + ' 25, 75: ', np.percentile(dauer[np.in1d(wann * 5, plot_zone)], [25, 75])) + + # curve fit + x_dauer = dauer[dauer <= 10][np.in1d(wann[dauer <= 10] * 5, plot_zone)] + y_speed = speeds[dauer <= 10][np.in1d(wann[dauer <= 10] * 5, plot_zone)] + y_speeds.append(y_speed) + + popt, pcov = optimize.curve_fit(func, x_dauer, y_speed) + perr = np.sqrt(np.diag(pcov)) + print(day_zone, popt, 'perr', perr[1]) + curvefit_stat.append(np.array([popt[1], perr[1], n])) + + # plot dauer vs speed + x_dauer = dauer[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, plot_zone)] + y_speed = speeds[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, plot_zone)] + ax2.plot(x_dauer, y_speed, 'o', alpha=0.3, color=color_dadunida[color_zone]) + + ax3.plot(x_dauer, y_speed, 'o', alpha=0.3, color=color_dadunida[color_zone]) + + # plot curve fit + ax4.plot(xdata, func(xdata, *popt), '-', color=color_dadunida[color_zone], label=day_zone) + ax4.set_ylim(ax2.get_ylim()) + + # distance + pdf_x_dist = np.arange(0, 15, 0.1) + conv_array = np.zeros(len(pdf_x_dist)) + + for e in roam_dist[np.in1d(wann * 5, plot_zone)]: + conv_array += gauss(pdf_x_dist, e, 1, 0.2, norm=True) + + conv_array = conv_array / np.sum(conv_array) / 0.1 + conv_arrays.append(conv_array) + ax6.plot(pdf_x_dist, conv_array, color=color_dadunida[color_zone], label=day_zone) + ax6.plot([pdf_x_dist[np.cumsum(conv_array) < 5][-1], pdf_x_dist[np.cumsum(conv_array) < 5][-1]], + [-1.0, conv_array[np.cumsum(conv_array) < 5][-1]], color=color_dadunida[color_zone]) + # print(day_zone, '50', pdf_x_dist[np.cumsum(conv_array) < 5][-1], + # '25', pdf_x_dist[np.cumsum(conv_array) < 2.5][-1], '75', pdf_x_dist[np.cumsum(conv_array) < 7.5][-1]) + + + curvefit_stat = np.array(curvefit_stat) + # plot std of tau + ax5.bar([0, 1, 2, 3], curvefit_stat[:, 0], yerr=curvefit_stat[:, 1], color=color2[4]) + + ############################################################################################################### + # statistic + day_group = [day, dusk, night, dawn] + for subset in itertools.combinations([0, 1, 2, 3], 2): + mean1, std1, n1 = curvefit_stat[subset[0]] + mean2, std2, n2 = curvefit_stat[subset[1]] + t, p = stats.ttest_ind_from_stats(mean1, std1, n1, mean2, std2, n2) + d = cohen_d(y_speeds[subset[0]], y_speeds[subset[1]]) + print(['day', 'dusk', 'night', 'dawn'][subset[0]], ['day', 'dusk', 'night', 'dawn'][subset[1]], 't: ', + np.round(t, 2), 'p: ', np.round(p, 4), 'd: ', d) + + print(stats.mannwhitneyu(dauer[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, day_group[subset[0]])], + dauer[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, day_group[subset[1]])])) + + print(np.round(stats.ks_2samp(np.cumsum(conv_arrays[subset[0]]), np.cumsum(conv_arrays[subset[1]])), 3)) + + if subset[0] == 0 and subset[1] == 2: + significance_bar(ax5, p, None, subset[0], subset[1], 3.) + + ############################################################################################################### + # labels + ax0.set_ylabel('# Roaming Events', fontsize=fs) + ax0.set_xticks([0, 6, 12, 18, 24]) + ax0.set_xticklabels(['12:00', '18:00', '00:00', '06:00', '12:00']) + ax0.set_xlabel('Time', fontsize=fs) + + ax1.set_yticks([1, 2, 3, 4]) + ax1.set_yticklabels(['day', 'dusk', 'night', 'dawn']) + ax1.set_xlabel('Duration [min]', fontsize=fs) + ax1.invert_yaxis() + + ax2.set_xlabel('Duration [min]', fontsize=fs) + ax2.set_ylabel('Speed [m/min]', fontsize=fs) + ax2.set_ylim([0, 27]) + + ax3.set_ylabel('Speed [m/min]', fontsize=fs) + ax3.set_xlabel('Duration [min]', fontsize=fs) + ax3.set_xlim([0, 10]) + + ax4.set_xlabel('Duration [min]', fontsize=fs) + ax4.set_xlim([0, 10]) + + ax5.set_xticks([0, 1, 2, 3]) + ax5.set_xticklabels(['day', 'dusk', 'night', 'dawn'], rotation=45) + ax5.set_ylabel(r'$\tau$') + + ax6.set_xlabel('Distance [m]') + ax6.set_ylabel('PDF') + ax6.set_xlim([0, 15]) + ax6.set_ylim([-0.01, 0.3]) + + tagx = [-0.05, -0.07, -0.07, -0.17, -0.17, -0.17, -0.05] + for idx, ax in enumerate([ax0, ax1, ax2, ax3, ax4, ax5, ax6]): + ax.make_nice_ax() + ax.text(tagx[idx], 1.05, chr(ord('A') + idx), transform=ax.transAxes, fontsize='large') + + # fig.savefig(save_path + 'roaming_events.pdf') + # fig.savefig(save_path_pres + 'roaming_events.pdf') + + plt.show() + + # df = pd.DataFrame({'duration': dauer, 'speed': speeds, 'distance': roam_dist}) + ############################################################################################################### + # figure supplements + fig = plt.figure(constrained_layout=True, figsize=[15 / inch, 12 / inch]) + gs = gridspec.GridSpec(ncols=2, nrows=2, figure=fig, hspace=0.01, wspace=0.01, + height_ratios=[1, 1], width_ratios=[1, 1], left=0.1, bottom=0.15, right=0.95, + top=0.95) + + ax0 = fig.add_subplot(gs[0, 0]) + ax1 = fig.add_subplot(gs[0, 1]) + ax2 = fig.add_subplot(gs[1, 0]) + ax3 = fig.add_subplot(gs[1, 1]) + + for plot_zone, color_zone, day_zone, pos_zone, ax in \ + zip([day, dusk, night, dawn], [0, 1, 2, 3], ['day', 'dusk', 'night', 'dawn'], [1, 2, 3, 4], [ax0, ax1, ax2, ax3]): + + x_dauer = dauer[dauer <= 10][np.in1d(wann[dauer <= 10] * 5, plot_zone)] + y_speed = speeds[dauer <= 10][np.in1d(wann[dauer <= 10] * 5, plot_zone)] + + popt, pcov = optimize.curve_fit(func, x_dauer, y_speed) + + # plot dauer vs speed + ax.plot(xdata, func(xdata, *popt), '-', color=color_dadunida[color_zone], label=day_zone) + ax.plot(x_dauer, y_speed, 'o', alpha=0.3, color=color_dadunida[color_zone]) + print(len(x_dauer)) + ax.set_ylabel('Speed [m/min]', fontsize=fs) + ax.set_xlabel('Duration [min]', fontsize=fs) + ax.make_nice_ax() + ax.text(-0.218, 0.9, chr(ord('A') + color_zone), transform=ax.transAxes, fontsize='large') + ax.text(0.8, 0.9, day_zone, transform=ax.transAxes, fontsize='large') + ax.set_ylim([0, 30]) + # fig.savefig(save_path + 'supplements_roaming.pdf') + + embed() + quit() + + + + + ############################################################################################################### + # figure 1: max_channel_changes per time zone and per duration of the roaming event + fig = plt.figure(constrained_layout=True, figsize=[15 / inch, 15 / inch]) + gs = gridspec.GridSpec(ncols=2, nrows=3, figure=fig, hspace=0.01, wspace=0.01, + height_ratios=[1, 1, 1], width_ratios=[1, 1], left=0.1, bottom=0.15, right=0.95, + top=0.95) + + ax0 = fig.add_subplot(gs[0, :]) + ax1 = fig.add_subplot(gs[1, 0]) + ax2 = fig.add_subplot(gs[1, 1], sharex=ax1) + ax6 = fig.add_subplot(gs[2, :]) + + # bar plot + ax0.bar(rolled_bins, rolled_start, color=color2[4]) + + ax0.plot([16.5, 6.5], [20, 20], color=color_diffdays[0], lw=7) + ax0.plot([16.5, 18.5], [20, 20], color=color_diffdays[3], lw=7) + ax0.plot([4.5, 6.5], [20, 20], color=color_diffdays[3], lw=7) + + ############################################################################################################### + # curve_fit: tau, std, n + for plot_zone, color_zone, day_zone, pos_zone in \ + zip([day, dusk, night, dawn], [0, 1, 2, 3], ['day', 'dusk', 'night', 'dawn'], [1, 2, 3, 4]): + # boxplot ax1 + props_e = dict(linewidth=2, color=color_dadunida[color_zone]) + bp = ax1.boxplot(dauer[np.in1d(wann * 5, plot_zone)], positions=[pos_zone], widths=0.7, + showfliers=False, vert=False, + boxprops=props_e, medianprops=props_e, capprops=props_e, whiskerprops=props_e) + + x_n = [item.get_xdata() for item in bp['whiskers']][1][1] + n = len(dauer[np.in1d(wann * 5, plot_zone)]) + ax1.text(x_n + 2, pos_zone, str(n), ha='left', va='center') + + # plot dauer vs speed + x_dauer = dauer[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, plot_zone)] + y_speed = speeds[dauer <= 100][np.in1d(wann[dauer <= 100] * 5, plot_zone)] + ax2.plot(x_dauer, y_speed, 'o', alpha=0.3, color=color_dadunida[color_zone]) + + pdf_x_dist = np.arange(0, 15, 0.1) + conv_array = np.zeros(len(pdf_x_dist)) + + for e in roam_dist[np.in1d(wann * 5, plot_zone)]: + conv_array += gauss(pdf_x_dist, e, 1, 0.2, norm=True) + + conv_array = conv_array / np.sum(conv_array) / 0.1 + conv_arrays.append(conv_array) + print(day_zone, 'percentil 25,50,75:', np.round(np.percentile(conv_array, [25,50,75]), 4)) + + ax6.plot(pdf_x_dist, conv_array, color=color_dadunida[color_zone], label=day_zone) + ############################################################################################################### + # labels + ax0.set_ylabel('# Roaming Events', fontsize=fs) + ax0.set_xticks([0, 6, 12, 18, 24]) + ax0.set_xticklabels(['12:00', '18:00', '00:00', '06:00', '12:00']) + ax0.set_xlabel('Time', fontsize=fs) + + ax1.set_yticks([1, 2, 3, 4]) + ax1.set_yticklabels(['day', 'dusk', 'night', 'dawn']) + ax1.set_xlabel('Duration [min]', fontsize=fs) + ax1.invert_yaxis() + + ax2.set_xlabel('Duration [min]', fontsize=fs) + ax2.set_ylabel('Speed [m/min]', fontsize=fs) + ax2.set_ylim([0, 27]) + + ax6.set_xlabel('Distance [m]') + ax6.set_ylabel('PDF') + # ax6.set_xscale('symlog') + # ax6.set_xlim([0, 15]) + + tagx = [-0.05, -0.1, -0.1, -0.05] + for idx, ax in enumerate([ax0, ax1, ax2, ax6]): + ax.make_nice_ax() + ax.text(tagx[idx], 1.05, chr(ord('A') + idx), transform=ax.transAxes, fontsize='large') + + fig.savefig('../../../goettingen2021_poster/pictures/roaming_events.pdf') + + plt.show() + + embed() + quit() \ No newline at end of file