diff --git a/.gitignore b/.gitignore index 4ec96d9..3c2ae5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,10 @@ -# Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode +# Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode # Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode # Own stuff data env +output # Mac Stuff *.DS_Store @@ -13,6 +14,7 @@ env __pycache__/ *.py[cod] *$py.class +poster/main.pdf # C extensions *.so diff --git a/README.md b/README.md index 24a0519..959d09a 100644 --- a/README.md +++ b/README.md @@ -1,64 +1,248 @@ -# Chirp detection - GP2023 -## Git-Repository and commands - -- Go to the [Bendalab Git-Server](https://whale.am28.uni-tuebingen.de/git/) (https://whale.am28.uni-tuebingen.de/git/) -- Create your own account (and tell me ;D) - * I'll invite you the repository -- Clone the repository -- -```sh -git clone https://whale.am28.uni-tuebingen.de/git/raab/GP2023_chirp_detection.git -``` - -## Basic git commands - -- pull changes in git -```shell -git pull origin -``` -- commit chances -```shell -git commit -m '' file # commit one file -git commit -a -m '' # commit all files -``` -- push commits -```shell -git push origin -``` - -## Branches -Use branches to work on specific topics (e.g. 'algorithm', 'analysis', 'writing', ore even more specific ones) and merge -them into Master-Branch when it works are up to your expectations. - -The "master" branch should always contain a working/correct version of your project. - -- Create/change into branches -```shell -# list all branches (highlight active branch) -git banch -a -# switch into existing -git checkout -# switch into new branch -git checkout master -git checkout -b -``` - - -- Re-merging with master branch -1) get current version of master and implement it into branch -```shell -git checkout master -git pull origin master -git checkout -git rebase master -``` -This resets you branch to the fork-point, executes all commits of the current master before adding the commits of you -branch. You may have to resolve potential conflicts. Afterwards commit the corrected version and push it to your branch. - -2) Update master branch master -- correct way: Create -```shell -git checkout master -git merge -git push origin master -``` + + + + + + + + + + + + + + + + + + +
+
+ + Logo + + +

chirpdetector

+ +

+ An algorithm to detect the chirps of weakly electric fish. +
+ Explore the docs » +
+
+ View Demo + · + Report Bug + · + Request Feature +

+
+ + + + +
+ Table of Contents +
    +
  1. + About The Project + +
  2. +
  3. + Getting Started + +
  4. +
  5. Usage
  6. +
  7. Roadmap
  8. +
  9. Contributing
  10. +
  11. License
  12. +
  13. Contact
  14. +
  15. Acknowledgments
  16. +
+
+ + + + +## About The Project + +[![Product Name Screen Shot][product-screenshot]](https://example.com) + +Here's a blank template to get started: To avoid retyping too much info. Do a search and replace with your text editor for the following: `github_username`, `repo_name`, `twitter_handle`, `linkedin_username`, `email_client`, `email`, `project_title`, `project_description` + +

(back to top)

+ + + + + + + + + + + + + + +

(back to top)

+ + + + +## Getting Started + +This is an example of how you may give instructions on setting up your project locally. +To get a local copy up and running follow these simple example steps. + + + + + + + + + +### Installation + +1. Get a free API Key at [https://example.com](https://example.com) +2. Clone the repo + ```sh + git clone https://github.com/github_username/repo_name.git + ``` +3. Install NPM packages + ```sh + npm install + ``` +4. Enter your API in `config.js` + ```js + const API_KEY = 'ENTER YOUR API'; + ``` + +

(back to top)

+ + + + +## Usage + +Use this space to show useful examples of how a project can be used. Additional screenshots, code examples and demos work well in this space. You may also link to more resources. + +_For more examples, please refer to the [Documentation](https://example.com)_ + +

(back to top)

+ + + + +## Roadmap + +- [ ] Feature 1 +- [ ] Feature 2 +- [ ] Feature 3 + - [ ] Nested Feature + +See the [open issues](https://github.com/github_username/repo_name/issues) for a full list of proposed features (and known issues). + +

(back to top)

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Contact + +Your Name - [@twitter_handle](https://twitter.com/twitter_handle) - email@email_client.com + +Project Link: [https://github.com/github_username/repo_name](https://github.com/github_username/repo_name) + +

(back to top)

+ + + + +## Acknowledgments + +* []() +* []() +* []() + +

(back to top)

+ + + + + +[contributors-shield]: https://img.shields.io/github/contributors/github_username/repo_name.svg?style=for-the-badge +[contributors-url]: https://github.com/github_username/repo_name/graphs/contributors +[forks-shield]: https://img.shields.io/github/forks/github_username/repo_name.svg?style=for-the-badge +[forks-url]: https://github.com/github_username/repo_name/network/members +[stars-shield]: https://img.shields.io/github/stars/github_username/repo_name.svg?style=for-the-badge +[stars-url]: https://github.com/github_username/repo_name/stargazers +[issues-shield]: https://img.shields.io/github/issues/github_username/repo_name.svg?style=for-the-badge +[issues-url]: https://github.com/github_username/repo_name/issues +[license-shield]: https://img.shields.io/github/license/github_username/repo_name.svg?style=for-the-badge +[license-url]: https://github.com/github_username/repo_name/blob/master/LICENSE.txt +[linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555 +[linkedin-url]: https://linkedin.com/in/linkedin_username +[product-screenshot]: images/screenshot.png +[Next.js]: https://img.shields.io/badge/next.js-000000?style=for-the-badge&logo=nextdotjs&logoColor=white +[Next-url]: https://nextjs.org/ +[React.js]: https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB +[React-url]: https://reactjs.org/ +[Vue.js]: https://img.shields.io/badge/Vue.js-35495E?style=for-the-badge&logo=vuedotjs&logoColor=4FC08D +[Vue-url]: https://vuejs.org/ +[Angular.io]: https://img.shields.io/badge/Angular-DD0031?style=for-the-badge&logo=angular&logoColor=white +[Angular-url]: https://angular.io/ +[Svelte.dev]: https://img.shields.io/badge/Svelte-4A4A55?style=for-the-badge&logo=svelte&logoColor=FF3E00 +[Svelte-url]: https://svelte.dev/ +[Laravel.com]: https://img.shields.io/badge/Laravel-FF2D20?style=for-the-badge&logo=laravel&logoColor=white +[Laravel-url]: https://laravel.com +[Bootstrap.com]: https://img.shields.io/badge/Bootstrap-563D7C?style=for-the-badge&logo=bootstrap&logoColor=white +[Bootstrap-url]: https://getbootstrap.com +[JQuery.com]: https://img.shields.io/badge/jQuery-0769AD?style=for-the-badge&logo=jquery&logoColor=white +[JQuery-url]: https://jquery.com + diff --git a/README1.md b/README1.md new file mode 100644 index 0000000..24a0519 --- /dev/null +++ b/README1.md @@ -0,0 +1,64 @@ +# Chirp detection - GP2023 +## Git-Repository and commands + +- Go to the [Bendalab Git-Server](https://whale.am28.uni-tuebingen.de/git/) (https://whale.am28.uni-tuebingen.de/git/) +- Create your own account (and tell me ;D) + * I'll invite you the repository +- Clone the repository +- +```sh +git clone https://whale.am28.uni-tuebingen.de/git/raab/GP2023_chirp_detection.git +``` + +## Basic git commands + +- pull changes in git +```shell +git pull origin +``` +- commit chances +```shell +git commit -m '' file # commit one file +git commit -a -m '' # commit all files +``` +- push commits +```shell +git push origin +``` + +## Branches +Use branches to work on specific topics (e.g. 'algorithm', 'analysis', 'writing', ore even more specific ones) and merge +them into Master-Branch when it works are up to your expectations. + +The "master" branch should always contain a working/correct version of your project. + +- Create/change into branches +```shell +# list all branches (highlight active branch) +git banch -a +# switch into existing +git checkout +# switch into new branch +git checkout master +git checkout -b +``` + + +- Re-merging with master branch +1) get current version of master and implement it into branch +```shell +git checkout master +git pull origin master +git checkout +git rebase master +``` +This resets you branch to the fork-point, executes all commits of the current master before adding the commits of you +branch. You may have to resolve potential conflicts. Afterwards commit the corrected version and push it to your branch. + +2) Update master branch master +- correct way: Create +```shell +git checkout master +git merge +git push origin master +``` diff --git a/assets/logo.png b/assets/logo.png new file mode 100644 index 0000000..234652f Binary files /dev/null and b/assets/logo.png differ diff --git a/assets/logo.svg b/assets/logo.svg new file mode 100644 index 0000000..b34ed6c --- /dev/null +++ b/assets/logo.svg @@ -0,0 +1,1184 @@ + + + + diff --git a/code/behavior.py b/code/behavior.py index da02838..71c0926 100644 --- a/code/behavior.py +++ b/code/behavior.py @@ -1,16 +1,21 @@ -from pathlib import Path +import os +import os import numpy as np +import matplotlib.pyplot as plt + from IPython import embed from pandas import read_csv +from modules.logger import makeLogger +from scipy.ndimage import gaussian_filter1d - - +logger = makeLogger(__name__) class Behavior: """Load behavior data from csv file as class attributes Attributes ---------- + behavior: 0: chasing onset, 1: chasing offset, 2: physical contact behavior_type: behavioral_category: comment_start: @@ -20,23 +25,37 @@ class Behavior: media_file: observation_date: observation_id: - start_s: - stop_s: + start_s: start time of the event in seconds + stop_s: stop time of the event in seconds total_length: """ - def __init__(self, datapath: str) -> None: - csv_file = str(sorted(Path(datapath).glob('**/*.csv'))[0]) - self.dataframe = read_csv(csv_file, delimiter=',') - for key in self.dataframe: - if ' ' in key: - new_key = key.replace(' ', '_') - if '(' in new_key: - new_key = new_key.replace('(', '') - new_key = new_key.replace(')', '') - new_key = new_key.lower() - setattr(self, new_key, np.array(self.dataframe[key])) + def __init__(self, folder_path: str) -> None: + + + LED_on_time_BORIS = np.load(os.path.join(folder_path, 'LED_on_time.npy'), allow_pickle=True) + self.time = np.load(os.path.join(folder_path, "times.npy"), allow_pickle=True) + csv_filename = [f for f in os.listdir(folder_path) if f.endswith('.csv')][0] # check if there are more than one csv file + self.dataframe = read_csv(os.path.join(folder_path, csv_filename)) + self.chirps = np.load(os.path.join(folder_path, 'chirps.npy'), allow_pickle=True) + self.chirps_ids = np.load(os.path.join(folder_path, 'chirps_ids.npy'), allow_pickle=True) + for k, key in enumerate(self.dataframe.keys()): + key = key.lower() + if ' ' in key: + key = key.replace(' ', '_') + if '(' in key: + key = key.replace('(', '') + key = key.replace(')', '') + setattr(self, key, np.array(self.dataframe[self.dataframe.keys()[k]])) + + last_LED_t_BORIS = LED_on_time_BORIS[-1] + real_time_range = self.time[-1] - self.time[0] + factor = 1.034141 + shift = last_LED_t_BORIS - real_time_range * factor + self.start_s = (self.start_s - shift) / factor + self.stop_s = (self.stop_s - shift) / factor + """ 1 - chasing onset 2 - chasing offset @@ -64,12 +83,191 @@ temporal encpding needs to be corrected ... not exactly 25FPS. behavior = data['Behavior'] """ +def correct_chasing_events( + category: np.ndarray, + timestamps: np.ndarray + ) -> tuple[np.ndarray, np.ndarray]: + + onset_ids = np.arange( + len(category))[category == 0] + offset_ids = np.arange( + len(category))[category == 1] + + # Check whether on- or offset is longer and calculate length difference + if len(onset_ids) > len(offset_ids): + len_diff = len(onset_ids) - len(offset_ids) + longer_array = onset_ids + shorter_array = offset_ids + logger.info(f'Onsets are greater than offsets by {len_diff}') + elif len(onset_ids) < len(offset_ids): + len_diff = len(offset_ids) - len(onset_ids) + longer_array = offset_ids + shorter_array = onset_ids + logger.info(f'Offsets are greater than offsets by {len_diff}') + elif len(onset_ids) == len(offset_ids): + logger.info('Chasing events are equal') + return category, timestamps + + # Correct the wrong chasing events; delete double events + wrong_ids = [] + for i in range(len(longer_array)-(len_diff+1)): + if (shorter_array[i] > longer_array[i]) & (shorter_array[i] < longer_array[i+1]): + pass + else: + wrong_ids.append(longer_array[i]) + longer_array = np.delete(longer_array, i) + + category = np.delete( + category, wrong_ids) + timestamps = np.delete( + timestamps, wrong_ids) + return category, timestamps + + +def event_triggered_chirps( + event: np.ndarray, + chirps:np.ndarray, + time_before_event: int, + time_after_event: int + )-> tuple[np.ndarray, np.ndarray]: + + + event_chirps = [] # chirps that are in specified window around event + centered_chirps = [] # timestamps of chirps around event centered on the event timepoint + + for event_timestamp in event: + start = event_timestamp - time_before_event # timepoint of window start + stop = event_timestamp + time_after_event # timepoint of window ending + chirps_around_event = [c for c in chirps if (c >= start) & (c <= stop)] # get chirps that are in a -5 to +5 sec window around event + event_chirps.append(chirps_around_event) + if len(chirps_around_event) == 0: + continue + else: + centered_chirps.append(chirps_around_event - event_timestamp) + centered_chirps = np.concatenate(centered_chirps, axis=0) # convert list of arrays to one array for plotting + + return event_chirps, centered_chirps + + def main(datapath: str): - # behabvior is pandas dataframe with all the data - behavior = Behavior(datapath) + + # behavior is pandas dataframe with all the data + bh = Behavior(datapath) + + # chirps are not sorted in time (presumably due to prior groupings) + # get and sort chirps and corresponding fish_ids of the chirps + chirps = bh.chirps[np.argsort(bh.chirps)] + chirps_fish_ids = bh.chirps_ids[np.argsort(bh.chirps)] + category = bh.behavior + timestamps = bh.start_s + + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + # split categories + chasing_onset = timestamps[category == 0] + chasing_offset = timestamps[category == 1] + physical_contact = timestamps[category == 2] + + # First overview plot + fig1, ax1 = plt.subplots() + ax1.scatter(chirps, np.ones_like(chirps), marker='*', color='royalblue', label='Chirps') + ax1.scatter(chasing_onset, np.ones_like(chasing_onset)*2, marker='.', color='forestgreen', label='Chasing onset') + ax1.scatter(chasing_offset, np.ones_like(chasing_offset)*2.5, marker='.', color='firebrick', label='Chasing offset') + ax1.scatter(physical_contact, np.ones_like(physical_contact)*3, marker='x', color='black', label='Physical contact') + plt.legend() + # plt.show() + plt.close() + + # Get fish ids + fish_ids = np.unique(chirps_fish_ids) + + ##### Chasing triggered chirps CTC ##### + # Evaluate how many chirps were emitted in specific time window around the chasing onset events + + # Iterate over chasing onsets (later over fish) + time_around_event = 5 # time window around the event in which chirps are counted, 5 = -5 to +5 sec around event + #### Loop crashes at concatenate in function #### + # for i in range(len(fish_ids)): + # fish = fish_ids[i] + # chirps = chirps[chirps_fish_ids == fish] + # print(fish) + + chasing_chirps, centered_chasing_chirps = event_triggered_chirps(chasing_onset, chirps, time_around_event, time_around_event) + physical_chirps, centered_physical_chirps = event_triggered_chirps(physical_contact, chirps, time_around_event, time_around_event) + + # Kernel density estimation ??? + # centered_chasing_chirps_convolved = gaussian_filter1d(centered_chasing_chirps, 5) + + # centered_chasing = chasing_onset[0] - chasing_onset[0] ## get the 0 timepoint for plotting; set one chasing event to 0 + offsets = [0.5, 1] + fig4, ax4 = plt.subplots(figsize=(20 / 2.54, 12 / 2.54), constrained_layout=True) + ax4.eventplot(np.array([centered_chasing_chirps, centered_physical_chirps]), lineoffsets=offsets, linelengths=0.25, colors=['g', 'r']) + ax4.vlines(0, 0, 1.5, 'tab:grey', 'dashed', 'Timepoint of event') + # ax4.plot(centered_chasing_chirps_convolved) + ax4.set_yticks(offsets) + ax4.set_yticklabels(['Chasings', 'Physical \n contacts']) + ax4.set_xlabel('Time[s]') + ax4.set_ylabel('Type of event') + plt.show() + + # Associate chirps to inidividual fish + fish1 = chirps[chirps_fish_ids == fish_ids[0]] + fish2 = chirps[chirps_fish_ids == fish_ids[1]] + fish = [len(fish1), len(fish2)] + + ### Plots: + # 1. All recordings, all fish, all chirps + # One CTC, one PTC + # 2. All recordings, only winners + # One CTC, one PTC + # 3. All recordings, all losers + # One CTC, one PTC + + #### Chirp counts per fish general ##### + fig2, ax2 = plt.subplots() + x = ['Fish1', 'Fish2'] + width = 0.35 + ax2.bar(x, fish, width=width) + ax2.set_ylabel('Chirp count') + # plt.show() + plt.close() + + + ##### Count chirps emitted during chasing events and chirps emitted out of chasing events ##### + chirps_in_chasings = [] + for onset, offset in zip(chasing_onset, chasing_offset): + chirps_in_chasing = [c for c in chirps if (c > onset) & (c < offset)] + chirps_in_chasings.append(chirps_in_chasing) + + # chirps out of chasing events + counts_chirps_chasings = 0 + chasings_without_chirps = 0 + for i in chirps_in_chasings: + if i: + chasings_without_chirps += 1 + else: + counts_chirps_chasings += 1 + + # chirps in chasing events + fig3 , ax3 = plt.subplots() + ax3.bar(['Chirps in chasing events', 'Chasing events without Chirps'], [counts_chirps_chasings, chasings_without_chirps], width=width) + plt.ylabel('Count') + # plt.show() + plt.close() + + # comparison between chasing events with and without chirps + + + + embed() + exit() + if __name__ == '__main__': # Path to the data - datapath = '../data/mount_data/2020-03-13-10_00/' + datapath = '../data/mount_data/2020-05-13-10_00/' + datapath = '../data/mount_data/2020-05-13-10_00/' main(datapath) diff --git a/code/chirpdetection.py b/code/chirpdetection.py old mode 100644 new mode 100755 index ad733da..541015d --- a/code/chirpdetection.py +++ b/code/chirpdetection.py @@ -1,67 +1,313 @@ -import os +from itertools import compress +from dataclasses import dataclass import numpy as np from IPython import embed import matplotlib.pyplot as plt +import matplotlib.gridspec as gr from scipy.signal import find_peaks -from scipy.ndimage import gaussian_filter1d -from thunderfish.dataloader import DataLoader from thunderfish.powerspectrum import spectrogram, decibel from sklearn.preprocessing import normalize from modules.filters import bandpass_filter, envelope, highpass_filter -from modules.filehandling import ConfLoader, LoadData +from modules.filehandling import ConfLoader, LoadData, make_outputdir from modules.plotstyle import PlotStyle +from modules.logger import makeLogger +from modules.datahandling import ( + flatten, + purge_duplicates, + group_timestamps, + instantaneous_frequency, + minmaxnorm +) + +logger = makeLogger(__name__) ps = PlotStyle() -def instantaneos_frequency( - signal: np.ndarray, samplerate: int -) -> tuple[np.ndarray, np.ndarray]: - """ - Compute the instantaneous frequency of a signal. - - Parameters - ---------- - signal : np.ndarray - Signal to compute the instantaneous frequency from. - samplerate : int - Samplerate of the signal. - - Returns - ------- - tuple[np.ndarray, np.ndarray] +@dataclass +class ChirpPlotBuffer: """ - # calculate instantaneos frequency with zero crossings - roll_signal = np.roll(signal, shift=1) - time_signal = np.arange(len(signal)) / samplerate - period_index = np.arange(len(signal))[( - roll_signal < 0) & (signal >= 0)][1:-1] - - upper_bound = np.abs(signal[period_index]) - lower_bound = np.abs(signal[period_index - 1]) - upper_time = np.abs(time_signal[period_index]) - lower_time = np.abs(time_signal[period_index - 1]) - - # create ratio - lower_ratio = lower_bound / (lower_bound + upper_bound) + Buffer to save data that is created in the main detection loop + and plot it outside the detecion loop. + """ - # appy to time delta - time_delta = upper_time - lower_time - true_zero = lower_time + lower_ratio * time_delta + config: ConfLoader + t0: float + dt: float + track_id: float + electrode: int + data: LoadData + + time: np.ndarray + baseline: np.ndarray + baseline_envelope_unfiltered: np.ndarray + baseline_envelope: np.ndarray + baseline_peaks: np.ndarray + search_frequency: float + search: np.ndarray + search_envelope_unfiltered: np.ndarray + search_envelope: np.ndarray + search_peaks: np.ndarray + + frequency_time: np.ndarray + frequency: np.ndarray + frequency_filtered: np.ndarray + frequency_peaks: np.ndarray + + def plot_buffer(self, chirps: np.ndarray, plot: str) -> None: + + logger.debug("Starting plotting") + + # make data for plotting + + # get index of track data in this time window + window_idx = np.arange(len(self.data.idx))[ + (self.data.ident == self.track_id) + & (self.data.time[self.data.idx] >= self.t0) + & (self.data.time[self.data.idx] <= (self.t0 + self.dt)) + ] + + # get tracked frequencies and their times + freq_temp = self.data.freq[window_idx] + # time_temp = self.data.time[ + # self.data.idx[self.data.ident == self.track_id]][ + # (self.data.time >= self.t0) + # & (self.data.time <= (self.t0 + self.dt)) + # ] + + # remake the band we filtered in + q25, q50, q75 = np.percentile(freq_temp, [25, 50, 75]) + search_upper, search_lower = ( + q50 + self.search_frequency + self.config.minimal_bandwidth / 2, + q50 + self.search_frequency - self.config.minimal_bandwidth / 2, + ) + print(search_upper, search_lower) + + # get indices on raw data + start_idx = (self.t0 - 5) * self.data.raw_rate + window_duration = (self.dt + 10) * self.data.raw_rate + stop_idx = start_idx + window_duration + + # get raw data + data_oi = self.data.raw[start_idx:stop_idx, self.electrode] + + self.time = self.time - self.t0 + self.frequency_time = self.frequency_time - self.t0 + if len(chirps) > 0: + chirps = np.asarray(chirps) - self.t0 + self.t0_old = self.t0 + self.t0 = 0 + + fig = plt.figure( + figsize=(14 * ps.cm, 18 * ps.cm) + ) + + gs0 = gr.GridSpec( + 3, 1, figure=fig, height_ratios=[1, 1, 1] + ) + gs1 = gs0[0].subgridspec(1, 1) + gs2 = gs0[1].subgridspec(3, 1, hspace=0.4) + gs3 = gs0[2].subgridspec(3, 1, hspace=0.4) + # gs4 = gs0[5].subgridspec(1, 1) + + ax6 = fig.add_subplot(gs3[2, 0]) + ax0 = fig.add_subplot(gs1[0, 0], sharex=ax6) + ax1 = fig.add_subplot(gs2[0, 0], sharex=ax6) + ax2 = fig.add_subplot(gs2[1, 0], sharex=ax6) + ax3 = fig.add_subplot(gs2[2, 0], sharex=ax6) + ax4 = fig.add_subplot(gs3[0, 0], sharex=ax6) + ax5 = fig.add_subplot(gs3[1, 0], sharex=ax6) + # ax7 = fig.add_subplot(gs4[0, 0], sharex=ax0) + + # ax_leg = fig.add_subplot(gs0[1, 0]) + + waveform_scaler = 1000 + lw = 1.5 + + # plot spectrogram + _ = plot_spectrogram( + ax0, + data_oi, + self.data.raw_rate, + self.t0 - 5, + [np.min(self.frequency) - 300, np.max(self.frequency) + 300] + ) + ax0.set_ylim(np.min(self.frequency) - 100, + np.max(self.frequency) + 200) + + for track_id in self.data.ids: + + t0_track = self.t0_old - 5 + dt_track = self.dt + 10 + window_idx = np.arange(len(self.data.idx))[ + (self.data.ident == track_id) + & (self.data.time[self.data.idx] >= t0_track) + & (self.data.time[self.data.idx] <= (t0_track + dt_track)) + ] - # create new time array - inst_freq_time = true_zero[:-1] + 0.5 * np.diff(true_zero) + # get tracked frequencies and their times + f = self.data.freq[window_idx] + # t = self.data.time[ + # self.data.idx[self.data.ident == self.track_id]] + # tmask = (t >= t0_track) & (t <= (t0_track + dt_track)) + t = self.data.time[self.data.idx[window_idx]] + if track_id == self.track_id: + ax0.plot(t-self.t0_old, f, lw=lw, + zorder=10, color=ps.gblue1) + else: + ax0.plot(t-self.t0_old, f, lw=lw, + zorder=10, color=ps.black) + + # ax0.fill_between( + # np.arange(self.t0, self.t0 + self.dt, 1 / self.data.raw_rate), + # q50 - self.config.minimal_bandwidth / 2, + # q50 + self.config.minimal_bandwidth / 2, + # color=ps.gblue1, + # lw=1, + # ls="dashed", + # alpha=0.5, + # ) + + # ax0.fill_between( + # np.arange(self.t0, self.t0 + self.dt, 1 / self.data.raw_rate), + # search_lower, + # search_upper, + # color=ps.gblue2, + # lw=1, + # ls="dashed", + # alpha=0.5, + # ) + + ax0.axhline(q50 - self.config.minimal_bandwidth / 2, + color=ps.gblue1, lw=1, ls="dashed") + ax0.axhline(q50 + self.config.minimal_bandwidth / 2, + color=ps.gblue1, lw=1, ls="dashed") + ax0.axhline(search_lower, color=ps.gblue2, lw=1, ls="dashed") + ax0.axhline(search_upper, color=ps.gblue2, lw=1, ls="dashed") + + # ax0.axhline(q50, spec_times[0], spec_times[-1], + # color=ps.gblue1, lw=2, ls="dashed") + # ax0.axhline(q50 + self.search_frequency, + # spec_times[0], spec_times[-1], + # color=ps.gblue2, lw=2, ls="dashed") + + if len(chirps) > 0: + for chirp in chirps: + ax0.scatter( + chirp, np.median(self.frequency), c=ps.red, marker=".", + edgecolors=ps.red, + facecolors=ps.red, + zorder=10, + s=70, + ) - # compute frequency - inst_freq = gaussian_filter1d(1 / np.diff(true_zero), 5) + # plot waveform of filtered signal + ax1.plot(self.time, self.baseline * waveform_scaler, + c=ps.gray, lw=lw, alpha=0.5) + ax1.plot(self.time, self.baseline_envelope_unfiltered * + waveform_scaler, c=ps.gblue1, lw=lw, label="baseline envelope") + + # plot waveform of filtered search signal + ax2.plot(self.time, self.search * waveform_scaler, + c=ps.gray, lw=lw, alpha=0.5) + ax2.plot(self.time, self.search_envelope_unfiltered * + waveform_scaler, c=ps.gblue2, lw=lw, label="search envelope") + + # plot baseline instantaneous frequency + ax3.plot(self.frequency_time, self.frequency, + c=ps.gblue3, lw=lw, label="baseline inst. freq.") + + # plot filtered and rectified envelope + ax4.plot(self.time, self.baseline_envelope * + waveform_scaler, c=ps.gblue1, lw=lw) + ax4.scatter( + (self.time)[self.baseline_peaks], + (self.baseline_envelope*waveform_scaler)[self.baseline_peaks], + edgecolors=ps.red, + facecolors=ps.red, + zorder=10, + marker=".", + s=70, + # facecolors="none", + ) + + # plot envelope of search signal + ax5.plot(self.time, self.search_envelope * + waveform_scaler, c=ps.gblue2, lw=lw) + ax5.scatter( + (self.time)[self.search_peaks], + (self.search_envelope*waveform_scaler)[self.search_peaks], + edgecolors=ps.red, + facecolors=ps.red, + zorder=10, + marker=".", + s=70, + # facecolors="none", + ) + + # plot filtered instantaneous frequency + ax6.plot(self.frequency_time, + self.frequency_filtered, c=ps.gblue3, lw=lw) + ax6.scatter( + self.frequency_time[self.frequency_peaks], + self.frequency_filtered[self.frequency_peaks], + edgecolors=ps.red, + facecolors=ps.red, + zorder=10, + marker=".", + s=70, + # facecolors="none", + ) + + ax0.set_ylabel("frequency [Hz]") + ax1.set_ylabel(r"$\mu$V") + ax2.set_ylabel(r"$\mu$V") + ax3.set_ylabel("Hz") + ax4.set_ylabel(r"$\mu$V") + ax5.set_ylabel(r"$\mu$V") + ax6.set_ylabel("Hz") + ax6.set_xlabel("time [s]") + + plt.setp(ax0.get_xticklabels(), visible=False) + plt.setp(ax1.get_xticklabels(), visible=False) + plt.setp(ax2.get_xticklabels(), visible=False) + plt.setp(ax3.get_xticklabels(), visible=False) + plt.setp(ax4.get_xticklabels(), visible=False) + plt.setp(ax5.get_xticklabels(), visible=False) + + # ps.letter_subplots([ax0, ax1, ax4], xoffset=-0.21) + + # ax7.set_xticks(np.arange(0, 5.5, 1)) + # ax7.spines.bottom.set_bounds((0, 5)) + + ax0.set_xlim(0, self.config.window) + plt.subplots_adjust(left=0.165, right=0.975, + top=0.98, bottom=0.074, hspace=0.2) + fig.align_labels() + + if plot == "show": + plt.show() + elif plot == "save": + make_outputdir(self.config.outputdir) + out = make_outputdir( + self.config.outputdir + self.data.datapath.split("/")[-2] + "/" + ) - return inst_freq_time, inst_freq, true_zero + plt.savefig(f"{out}{self.track_id}_{self.t0_old}.pdf") + plt.savefig(f"{out}{self.track_id}_{self.t0_old}.svg") + plt.close() -def plot_spectrogram(axis, signal: np.ndarray, samplerate: float, t0: float) -> None: +def plot_spectrogram( + axis, + signal: np.ndarray, + samplerate: float, + window_start_seconds: float, + ylims: list[float] +) -> np.ndarray: """ Plot a spectrogram of a signal. @@ -73,28 +319,46 @@ def plot_spectrogram(axis, signal: np.ndarray, samplerate: float, t0: float) -> Signal to plot the spectrogram from. samplerate : float Samplerate of the signal. - t0 : float + window_start_seconds : float Start time of the signal. """ + + logger.debug("Plotting spectrogram") + # compute spectrogram spec_power, spec_freqs, spec_times = spectrogram( signal, ratetime=samplerate, - freq_resolution=50, - overlap_frac=0.2, + freq_resolution=10, + overlap_frac=0.5, ) - axis.pcolormesh( - spec_times + t0, - spec_freqs, - decibel(spec_power), + fmask = np.zeros(spec_freqs.shape, dtype=bool) + fmask[(spec_freqs > ylims[0]) & (spec_freqs < ylims[1])] = True + + axis.imshow( + decibel(spec_power[fmask, :]), + extent=[ + spec_times[0] + window_start_seconds, + spec_times[-1] + window_start_seconds, + spec_freqs[fmask][0], + spec_freqs[fmask][-1], + ], + aspect="auto", + origin="lower", + interpolation="gaussian", + # alpha=0.6, ) + # axis.use_sticky_edges = False + return spec_times - axis.set_ylim(200, 1200) - -def double_bandpass( - data: DataLoader, samplerate: int, freqs: np.ndarray, search_freq: float +def extract_frequency_bands( + raw_data: np.ndarray, + samplerate: int, + baseline_track: np.ndarray, + searchband_center: float, + minimal_bandwidth: float, ) -> tuple[np.ndarray, np.ndarray]: """ Apply a bandpass filter to the baseline of a signal and a second bandpass @@ -102,14 +366,16 @@ def double_bandpass( Parameters ---------- - data : DataLoader + raw_data : np.ndarray Data to apply the filter to. samplerate : int Samplerate of the signal. - freqs : np.ndarray + baseline_track : np.ndarray Tracked fundamental frequencies of the signal. - search_freq : float + searchband_center: float Frequency to search for above or below the baseline. + minimal_bandwidth : float + Minimal bandwidth of the filter. Returns ------- @@ -117,420 +383,742 @@ def double_bandpass( """ # compute boundaries to filter baseline - q25, q75 = np.percentile(freqs, [25, 75]) + q25, q50, q75 = np.percentile(baseline_track, [25, 50, 75]) # check if percentile delta is too small - if q75 - q25 < 5: - median = np.median(freqs) - q25, q75 = median - 2.5, median + 2.5 + if q75 - q25 < 10: + q25, q75 = q50 - minimal_bandwidth / 2, q50 + minimal_bandwidth / 2 # filter baseline - filtered_baseline = bandpass_filter(data, samplerate, lowf=q25, highf=q75) + filtered_baseline = bandpass_filter( + raw_data, samplerate, lowf=q25, highf=q75 + ) # filter search area filtered_search_freq = bandpass_filter( - data, samplerate, lowf=q25 + search_freq, highf=q75 + search_freq + raw_data, + samplerate, + lowf=searchband_center + q50 - minimal_bandwidth / 2, + highf=searchband_center + q50 + minimal_bandwidth / 2, ) - return (filtered_baseline, filtered_search_freq) + return filtered_baseline, filtered_search_freq -def main(datapath: str) -> None: +def window_median_all_track_ids( + data: LoadData, window_start_seconds: float, window_duration_seconds: float +) -> tuple[list[tuple[float, float, float]], list[int]]: + """ + Calculate the median and quantiles of the frequency of all fish in a + given time window. - # load raw file - file = os.path.join(datapath, "traces-grid1.raw") - # data = DataLoader(file, 60.0, 0, channel=-1) + Iterate over all track ids and calculate the 25, 50 and 75 percentile + in a given time window to pass this data to 'find_searchband' function, + which then determines whether other fish in the current window fall + within the searchband of the current fish and then determine the + gaps that are outside of the percentile ranges. - data = LoadData(datapath) + Parameters + ---------- + data : LoadData + Data to calculate the median frequency from. + window_start_seconds : float + Start time of the window. + window_duration_seconds : float + Duration of the window. + + Returns + ------- + tuple[list[tuple[float, float, float]], list[int]] + + """ + + frequency_percentiles = [] + track_ids = [] + + for _, track_id in enumerate(np.unique(data.ident[~np.isnan(data.ident)])): + + # the window index combines the track id and the time window + window_idx = np.arange(len(data.idx))[ + (data.ident == track_id) + & (data.time[data.idx] >= window_start_seconds) + & ( + data.time[data.idx] + <= (window_start_seconds + window_duration_seconds) + ) + ] + + if len(data.freq[window_idx]) > 0: + frequency_percentiles.append( + np.percentile(data.freq[window_idx], [25, 50, 75])) + track_ids.append(track_id) + + # convert to numpy array + frequency_percentiles = np.asarray(frequency_percentiles) + track_ids = np.asarray(track_ids) + + return frequency_percentiles, track_ids + + +def array_center(array: np.ndarray) -> float: + """ + Return the center value of an array. + If the array length is even, returns + the mean of the two center values. + + Parameters + ---------- + array : np.ndarray + Array to calculate the center from. + + Returns + ------- + float + + """ + if len(array) % 2 == 0: + return np.mean(array[int(len(array) / 2) - 1:int(len(array) / 2) + 1]) + else: + return array[int(len(array) / 2)] + + +def find_searchband( + current_frequency: np.ndarray, + percentiles_ids: np.ndarray, + frequency_percentiles: np.ndarray, + config: ConfLoader, + data: LoadData, +) -> float: + """ + Find the search frequency band for each fish by checking which fish EODs + are above the current EOD and finding a gap in them. + + Parameters + ---------- + current_frequency : np.ndarray + Current EOD frequency array / the current fish of interest. + percentiles_ids : np.ndarray + Array of track IDs of the medians of all other fish in the current + window. + frequency_percentiles : np.ndarray + Array of percentiles frequencies of all other fish in the current window. + config : ConfLoader + Configuration file. + data : LoadData + Data to find the search frequency from. + + Returns + ------- + float + + """ + # frequency window where second filter filters is potentially allowed + # to filter. This is the search window, in which we want to find + # a gap in the other fish's EODs. + current_median = np.median(current_frequency) + search_window = np.arange( + current_median + config.search_df_lower, + current_median + config.search_df_upper, + config.search_res, + ) + + # search window in boolean + bool_lower = np.ones_like(search_window, dtype=bool) + bool_upper = np.ones_like(search_window, dtype=bool) + search_window_bool = np.ones_like(search_window, dtype=bool) + + # make seperate arrays from the qartiles + q25 = np.asarray([i[0] for i in frequency_percentiles]) + q75 = np.asarray([i[2] for i in frequency_percentiles]) + + # get tracks that fall into search window + check_track_ids = percentiles_ids[ + (q25 > current_median) & ( + q75 < search_window[-1]) + ] + + # iterate through theses tracks + if check_track_ids.size != 0: + + for j, check_track_id in enumerate(check_track_ids): + + q25_temp = q25[percentiles_ids == check_track_id] + q75_temp = q75[percentiles_ids == check_track_id] + + bool_lower[search_window > q25_temp - config.search_res] = False + bool_upper[search_window < q75_temp + config.search_res] = False + search_window_bool[(bool_lower == False) & + (bool_upper == False)] = False + + # find gaps in search window + search_window_indices = np.arange(len(search_window)) + + # get search window gaps + # taking the diff of a boolean array gives non zero values where the + # array changes from true to false or vice versa + + search_window_gaps = np.diff(search_window_bool, append=np.nan) + nonzeros = search_window_gaps[np.nonzero(search_window_gaps)[0]] + nonzeros = nonzeros[~np.isnan(nonzeros)] + + if len(nonzeros) == 0: + return config.default_search_freq + + # if the first value is -1, the array starst with true, so a gap + if nonzeros[0] == -1: + stops = search_window_indices[search_window_gaps == -1] + starts = np.append( + 0, search_window_indices[search_window_gaps == 1] + ) + + # if the last value is -1, the array ends with true, so a gap + if nonzeros[-1] == 1: + stops = np.append( + search_window_indices[search_window_gaps == -1], + len(search_window) - 1, + ) - # load wavetracker files - # time = np.load(datapath + "times.npy", allow_pickle=True) - # freq = np.load(datapath + "fund_v.npy", allow_pickle=True) - # powers = np.load(datapath + "sign_v.npy", allow_pickle=True) - # idx = np.load(datapath + "idx_v.npy", allow_pickle=True) - # ident = np.load(datapath + "ident_v.npy", allow_pickle=True) + # else it starts with false, so no gap + if nonzeros[0] == 1: + stops = search_window_indices[search_window_gaps == -1] + starts = search_window_indices[search_window_gaps == 1] + + # if the last value is -1, the array ends with true, so a gap + if nonzeros[-1] == 1: + stops = np.append( + search_window_indices[search_window_gaps == -1], + len(search_window), + ) + + # get the frequency ranges of the gaps + search_windows = [search_window[x:y] for x, y in zip(starts, stops)] + search_windows_lens = [len(x) for x in search_windows] + longest_search_window = search_windows[np.argmax(search_windows_lens)] + + # the center of the search frequency band is then the center of + # the longest gap + + search_freq = array_center(longest_search_window) - current_median + + return search_freq + + return config.default_search_freq + + +def chirpdetection(datapath: str, plot: str, debug: str = 'false') -> None: + + assert plot in [ + "save", + "show", + "false", + ], "plot must be 'save', 'show' or 'false'" + + assert debug in [ + "false", + "electrode", + "fish", + ], "debug must be 'false', 'electrode' or 'fish'" + + if debug != "false": + assert plot == "show", "debug mode only runs when plot is 'show'" + + # load raw file + print('datapath', datapath) + data = LoadData(datapath) # load config file config = ConfLoader("chirpdetector_conf.yml") - # set time window # <------------------------ Iterate through windows here + # set time window window_duration = config.window * data.raw_rate window_overlap = config.overlap * data.raw_rate window_edge = config.edge * data.raw_rate - # check if window duration is even + # check if window duration and window ovelap is even, otherwise the half + # of the duration or window overlap would return a float, thus an + # invalid index + if window_duration % 2 == 0: window_duration = int(window_duration) else: raise ValueError("Window duration must be even.") - # check if window ovelap is even if window_overlap % 2 == 0: window_overlap = int(window_overlap) else: raise ValueError("Window overlap must be even.") + # make time array for raw data raw_time = np.arange(data.raw.shape[0]) / data.raw_rate # good chirp times for data: 2022-06-02-10_00 - t0 = (3 * 60 * 60 + 6 * 60 + 43.5) * data.raw_rate - dt = 60 * data.raw_rate + # window_start_index = (3 * 60 * 60 + 6 * 60 + 43.5) * data.raw_rate + # window_duration_index = 60 * data.raw_rate - window_starts = np.arange( - t0, - t0 + dt, + # t0 = 0 + # dt = data.raw.shape[0] + # window_start_seconds = (23495 + ((28336-23495)/3)) * data.raw_rate + # window_duration_seconds = (28336 - 23495) * data.raw_rate + + window_start_index = 0 + window_duration_index = data.raw.shape[0] + + # generate starting points of rolling window + window_start_indices = np.arange( + window_start_index, + window_start_index + window_duration_index, window_duration - (window_overlap + 2 * window_edge), - dtype=int + dtype=int, ) - # ask how many windows should be calulated - nwindows = int( - input("How many windows should be calculated (integer number)? ")) - for start_index in window_starts[:nwindows]: + # ititialize lists to store data + multiwindow_chirps = [] + multiwindow_ids = [] - # make t0 and dt - t0 = start_index / data.raw_rate - dt = window_duration / data.raw_rate + for st, window_start_index in enumerate(window_start_indices[3175:]): - # set index window - stop_index = start_index + window_duration - - # t0 = 3 * 60 * 60 + 6 * 60 + 43.5 - # dt = 60 - # start_index = t0 * data.raw_rate - # stop_index = (t0 + dt) * data.raw_rate - - # calucate frequencies in wndow - median_freq = [] - track_ids = [] - for i, track_id in enumerate(np.unique(data.ident[~np.isnan(data.ident)])): - window_index = np.arange(len(data.idx))[ - (data.ident == track_id) & (data.time[data.idx] >= t0) & ( - data.time[data.idx] <= (t0 + dt)) - ] - median_freq.append(np.median(data.freq[window_index])) - track_ids.append(track_id) - median_freq = np.asarray(median_freq) - track_ids = np.asarray(track_ids) + logger.info(f"Processing window {st+1} of {len(window_start_indices)}") - # iterate through all fish - for i, track_id in enumerate(np.unique(data.ident[~np.isnan(data.ident)])): + window_start_seconds = window_start_index / data.raw_rate + window_duration_seconds = window_duration / data.raw_rate + + # set index window + window_stop_index = window_start_index + window_duration - print(f"Track ID: {track_id}") + # calucate median of fish frequencies in window + median_freq, median_ids = window_median_all_track_ids( + data, window_start_seconds, window_duration_seconds + ) - window_index = np.arange(len(data.idx))[ - (data.ident == track_id) & (data.time[data.idx] >= t0) & ( - data.time[data.idx] <= (t0 + dt)) + # iterate through all fish + for tr, track_id in enumerate( + np.unique(data.ident[~np.isnan(data.ident)]) + ): + + logger.debug(f"Processing track {tr} of {len(data.ids)}") + + # get index of track data in this time window + track_window_index = np.arange(len(data.idx))[ + (data.ident == track_id) + & (data.time[data.idx] >= window_start_seconds) + & ( + data.time[data.idx] + <= (window_start_seconds + window_duration_seconds) + ) ] # get tracked frequencies and their times - freq_temp = data.freq[window_index] - powers_temp = data.powers[window_index, :] + current_frequencies = data.freq[track_window_index] + current_powers = data.powers[track_window_index, :] + + # approximate sampling rate to compute expected durations if there + # is data available for this time window for this fish id - # time_temp = time[idx[window_index]] - track_samplerate = np.mean(1 / np.diff(data.time)) - expected_duration = ((t0 + dt) - t0) * track_samplerate +# track_samplerate = np.mean(1 / np.diff(data.time)) +# expected_duration = ( +# (window_start_seconds + window_duration_seconds) +# - window_start_seconds +# ) * track_samplerate # check if tracked data available in this window - if len(freq_temp) < expected_duration * 0.9: + if len(current_frequencies) < 3: + logger.warning( + f"Track {track_id} has no data in window {st}, skipping." + ) continue - fig, axs = plt.subplots( - 7, - config.electrodes, - figsize=(20 / 2.54, 12 / 2.54), - constrained_layout=True, - sharex=True, - sharey='row', - ) - # get best electrode - best_electrodes = np.argsort(np.nanmean( - powers_temp, axis=0))[-config.electrodes:] - - # frequency where second filter filters - search_window = np.arange(np.median(freq_temp)+config.search_df_lower, np.median( - freq_temp)+config.search_df_upper, config.search_res) + # check if there are powers available in this window + nanchecker = np.unique(np.isnan(current_powers)) + if (len(nanchecker) == 1) and nanchecker[0] is True: + logger.warning( + f"No powers available for track {track_id} window {st}," + "skipping." + ) + continue - # search window in boolean - search_window_bool = np.ones(len(search_window), dtype=bool) + # find the strongest electrodes for the current fish in the current + # window - # get tracks that fall into search window - check_track_ids = track_ids[(median_freq > search_window[0]) & ( - median_freq < search_window[-1])] + best_electrode_index = np.argsort( + np.nanmean(current_powers, axis=0) + )[-config.number_electrodes:] - # iterate through theses tracks - if check_track_ids.size != 0: + # find a frequency above the baseline of the current fish in which + # no other fish is active to search for chirps there - for j, check_track_id in enumerate(check_track_ids): + search_frequency = find_searchband( + config=config, + current_frequency=current_frequencies, + percentiles_ids=median_ids, + data=data, + frequency_percentiles=median_freq, + ) - q1, q2 = np.percentile( - data.freq[data.ident == check_track_id], config.search_freq_percentiles) + # add all chirps that are detected on mulitple electrodes for one + # fish fish in one window to this list - search_window_bool[(search_window > q1) & ( - search_window < q2)] = False + multielectrode_chirps = [] - # find gaps in search window - search_window_indices = np.arange(len(search_window)) + # iterate through electrodes + for el, electrode_index in enumerate(best_electrode_index): - # get search window gaps - search_window_gaps = np.diff(search_window_bool, append=np.nan) - nonzeros = search_window_gaps[np.nonzero( - search_window_gaps)[0]] - nonzeros = nonzeros[~np.isnan(nonzeros)] + logger.debug( + f"Processing electrode {el+1} of " + f"{len(best_electrode_index)}" + ) - # if the first value is -1, the array starst with true, so a gap - if nonzeros[0] == -1: - stops = search_window_indices[search_window_gaps == -1] - starts = np.append( - 0, search_window_indices[search_window_gaps == 1]) + # LOAD DATA FOR CURRENT ELECTRODE AND CURRENT FISH ------------ - # if the last value is -1, the array ends with true, so a gap - if nonzeros[-1] == 1: - stops = np.append( - search_window_indices[search_window_gaps == -1], len(search_window) - 1) + # load region of interest of raw data file + current_raw_data = data.raw[ + window_start_index:window_stop_index, electrode_index + ] + current_raw_time = raw_time[ + window_start_index:window_stop_index + ] - # else it starts with false, so no gap - if nonzeros[0] == 1: - stops = search_window_indices[search_window_gaps == -1] - starts = search_window_indices[search_window_gaps == 1] + # EXTRACT FEATURES -------------------------------------------- - # if the last value is -1, the array ends with true, so a gap - if nonzeros[-1] == 1: - stops = np.append( - search_window_indices[search_window_gaps == -1], len(search_window)) + # filter baseline and above + baselineband, searchband = extract_frequency_bands( + raw_data=current_raw_data, + samplerate=data.raw_rate, + baseline_track=current_frequencies, + searchband_center=search_frequency, + minimal_bandwidth=config.minimal_bandwidth, + ) - # get the frequency ranges of the gaps - search_windows = [search_window[x:y] - for x, y in zip(starts, stops)] - search_windows_lens = [len(x) for x in search_windows] - longest_search_window = search_windows[np.argmax( - search_windows_lens)] + # compute envelope of baseline band to find dips + # in the baseline envelope - search_freq = ( - longest_search_window[1] - longest_search_window[0]) / 2 + baseline_envelope_unfiltered = envelope( + signal=baselineband, + samplerate=data.raw_rate, + cutoff_frequency=config.baseline_envelope_cutoff, + ) - else: - search_freq = config.default_search_freq + # highpass filter baseline envelope to remove slower + # fluctuations e.g. due to motion envelope - print(f"Search frequency: {search_freq}") + baseline_envelope = bandpass_filter( + signal=baseline_envelope_unfiltered, + samplerate=data.raw_rate, + lowf=config.baseline_envelope_bandpass_lowf, + highf=config.baseline_envelope_bandpass_highf, + ) - for i, electrode in enumerate(best_electrodes): + # highbass filter introduced filter effects, i.e. oscillations + # around peaks. Compute the envelope of the highpass filtered + # and inverted baseline envelope to remove these oscillations - # load region of interest of raw data file - data_oi = data.raw[start_index:stop_index, :] - time_oi = raw_time[start_index:stop_index] + baseline_envelope = -baseline_envelope - # plot wavetracker tracks to spectrogram - # for track_id in np.unique(ident): # <---------- Find freq gaps later - # here + # baseline_envelope = envelope( + # signal=baseline_envelope, + # samplerate=data.raw_rate, + # cutoff_frequency=config.baseline_envelope_envelope_cutoff, + # ) - # # get indices for time array in time window - # window_index = np.arange(len(idx))[ - # (ident == track_id) & - # (time[idx] >= t0) & - # (time[idx] <= (t0 + dt)) - # ] + # compute the envelope of the search band. Peaks in the search + # band envelope correspond to troughs in the baseline envelope + # during chirps - # freq_temp = freq[window_index] - # time_temp = time[idx[window_index]] + search_envelope_unfiltered = envelope( + signal=searchband, + samplerate=data.raw_rate, + cutoff_frequency=config.search_envelope_cutoff, + ) + search_envelope = search_envelope_unfiltered + + # compute instantaneous frequency of the baseline band to find + # anomalies during a chirp, i.e. a frequency jump upwards or + # sometimes downwards. We do not fully understand why the + # instantaneous frequency can also jump downwards during a + # chirp. This phenomenon is only observed on chirps on a narrow + # filtered baseline such as the one we are working with. + + ( + baseline_frequency_time, + baseline_frequency, + ) = instantaneous_frequency( + signal=baselineband, + samplerate=data.raw_rate, + smoothing_window=config.baseline_frequency_smoothing, + ) - # axs[0].plot(time_temp-t0, freq_temp, lw=2) - # axs[0].set_ylim(500, 1000) + # bandpass filter the instantaneous frequency to remove slow + # fluctuations. Just as with the baseline envelope, we then + # compute the envelope of the signal to remove the oscillations + # around the peaks - # track_id = ids + # baseline_frequency_samplerate = np.mean( + # np.diff(baseline_frequency_time) + # ) - # filter baseline and above - baseline, search = double_bandpass( - data_oi[:, electrode], data.raw_rate, freq_temp, search_freq + baseline_frequency_filtered = np.abs( + baseline_frequency - np.median(baseline_frequency) ) - # compute instantaneous frequency on broad signal - broad_baseline = bandpass_filter( - data_oi[:, electrode], - data.raw_rate, - lowf=np.mean(freq_temp)-5, - highf=np.mean(freq_temp)+100 - ) + # baseline_frequency_filtered = highpass_filter( + # signal=baseline_frequency_filtered, + # samplerate=baseline_frequency_samplerate, + # cutoff=config.baseline_frequency_highpass_cutoff, + # ) - # compute instantaneous frequency on narrow signal - baseline_freq_time, baseline_freq, true_zero = instantaneos_frequency( - baseline, data.raw_rate - ) + # baseline_frequency_filtered = envelope( + # signal=-baseline_frequency_filtered, + # samplerate=baseline_frequency_samplerate, + # cutoff_frequency=config.baseline_frequency_envelope_cutoff, + # ) - # compute envelopes - baseline_envelope_unfiltered = envelope( - baseline, data.raw_rate, config.envelope_cutoff) - search_envelope = envelope( - search, data.raw_rate, config.envelope_cutoff) - - # highpass filter envelopes - baseline_envelope = highpass_filter( - baseline_envelope_unfiltered, - data.raw_rate, - config.envelope_highpass_cutoff - ) + # CUT OFF OVERLAP --------------------------------------------- - # baseline_envelope = np.abs(baseline_envelope) - # search_envelope = highpass_filter( - # search_envelope, - # data.raw_rate, - # config.envelope_highpass_cutoff - # ) + # cut off snippet at start and end of each window to remove + # filter effects - # envelopes of filtered envelope of filtered baseline - baseline_envelope = envelope( - np.abs(baseline_envelope), - data.raw_rate, - config.envelope_envelope_cutoff + # get arrays with raw samplerate without edges + no_edges = np.arange( + int(window_edge), len(baseline_envelope) - int(window_edge) + ) + current_raw_time = current_raw_time[no_edges] + baselineband = baselineband[no_edges] + baseline_envelope_unfiltered = baseline_envelope_unfiltered[no_edges] + searchband = searchband[no_edges] + baseline_envelope = baseline_envelope[no_edges] + search_envelope_unfiltered = search_envelope_unfiltered[no_edges] + search_envelope = search_envelope[no_edges] + + # get instantaneous frequency withoup edges + no_edges_t0 = int(window_edge) / data.raw_rate + no_edges_t1 = baseline_frequency_time[-1] - ( + int(window_edge) / data.raw_rate + ) + no_edges = (baseline_frequency_time >= no_edges_t0) & ( + baseline_frequency_time <= no_edges_t1 ) -# search_envelope = bandpass_filter( -# search_envelope, data.raw_rate, lowf=lowf, highf=highf) - - # bandpass filter the instantaneous - inst_freq_filtered = bandpass_filter( - baseline_freq, - data.raw_rate, - lowf=config.instantaneous_lowf, - highf=config.instantaneous_highf + baseline_frequency_filtered = baseline_frequency_filtered[ + no_edges + ] + baseline_frequency = baseline_frequency[no_edges] + baseline_frequency_time = ( + baseline_frequency_time[no_edges] + window_start_seconds ) - # test taking the log of the envelopes - # baseline_envelope = np.log(baseline_envelope) - # search_envelope = np.log(search_envelope) + # NORMALIZE --------------------------------------------------- - # CUT OFF OVERLAP ------------------------------------------------- + # normalize all three feature arrays to the same range to make + # peak detection simpler - # cut off first and last 0.5 * overlap at start and end - valid = np.arange( - int(window_edge), len(baseline_envelope) - - int(window_edge) - ) - baseline_envelope_unfiltered = baseline_envelope_unfiltered[valid] - baseline_envelope = baseline_envelope[valid] - search_envelope = search_envelope[valid] + # baseline_envelope = minmaxnorm([baseline_envelope])[0] + # search_envelope = minmaxnorm([search_envelope])[0] + # baseline_frequency_filtered = minmaxnorm( + # [baseline_frequency_filtered] + # )[0] - # get inst freq valid snippet - valid_t0 = int(window_edge) / data.raw_rate - valid_t1 = baseline_freq_time[-1] - \ - (int(window_edge) / data.raw_rate) + # PEAK DETECTION ---------------------------------------------- - inst_freq_filtered = inst_freq_filtered[(baseline_freq_time >= valid_t0) & ( - baseline_freq_time <= valid_t1)] + # detect peaks baseline_enelope + baseline_peak_indices, _ = find_peaks( + baseline_envelope, prominence=config.baseline_prominence + ) + # detect peaks search_envelope + search_peak_indices, _ = find_peaks( + search_envelope, prominence=config.search_prominence + ) + # detect peaks inst_freq_filtered + frequency_peak_indices, _ = find_peaks( + baseline_frequency_filtered, prominence=config.frequency_prominence + ) - baseline_freq = baseline_freq[(baseline_freq_time >= valid_t0) & ( - baseline_freq_time <= valid_t1)] + # DETECT CHIRPS IN SEARCH WINDOW ------------------------------ - baseline_freq_time = baseline_freq_time[(baseline_freq_time >= valid_t0) & ( - baseline_freq_time <= valid_t1)] + t0 - true_zero = true_zero + t0 - # overwrite raw time to valid region - time_oi = time_oi[valid] - baseline = baseline[valid] - broad_baseline = broad_baseline[valid] - search = search[valid] + # get the peak timestamps from the peak indices + baseline_peak_timestamps = current_raw_time[ + baseline_peak_indices + ] + search_peak_timestamps = current_raw_time[ + search_peak_indices] - # NORMALIZE ---------------------------------------------------- + frequency_peak_timestamps = baseline_frequency_time[ + frequency_peak_indices + ] - baseline_envelope = normalize([baseline_envelope])[0] - search_envelope = normalize([search_envelope])[0] - inst_freq_filtered = normalize([inst_freq_filtered])[0] + # check if one list is empty and if so, skip to the next + # electrode because a chirp cannot be detected if one is empty - # PEAK DETECTION ----------------------------------------------- + one_feature_empty = ( + len(baseline_peak_timestamps) == 0 + or len(search_peak_timestamps) == 0 + or len(frequency_peak_timestamps) == 0 + ) - # detect peaks baseline_enelope - prominence = np.percentile( - baseline_envelope, config.baseline_prominence_percentile) - baseline_peaks, _ = find_peaks( - np.abs(baseline_envelope), prominence=prominence) + if one_feature_empty and (debug == 'false'): + continue - # detect peaks search_envelope - prominence = np.percentile( - search_envelope, config.search_prominence_percentile) - search_peaks, _ = find_peaks( - search_envelope, prominence=prominence) + # group peak across feature arrays but only if they + # occur in all 3 feature arrays - # detect peaks inst_freq_filtered - prominence = np.percentile( - inst_freq_filtered, config.instantaneous_prominence_percentile) - inst_freq_peaks, _ = find_peaks( - np.abs(inst_freq_filtered), prominence=prominence) - - # PLOT ------------------------------------------------------------ - - # plot spectrogram - plot_spectrogram( - axs[0, i], data_oi[:, electrode], data.raw_rate, t0) - - # plot baseline instantaneos frequency - axs[1, i].plot(baseline_freq_time, baseline_freq - - np.median(baseline_freq)) - - # plot waveform of filtered signal - axs[2, i].plot(time_oi, baseline, c=ps.green) - axs[2, i].scatter( - true_zero, np.zeros_like(true_zero), c=ps.red) - - # plot broad filtered baseline - axs[2, i].plot( - time_oi, - broad_baseline, - ) + sublists = [ + list(baseline_peak_timestamps), + list(search_peak_timestamps), + list(frequency_peak_timestamps), + ] - # plot narrow filtered baseline envelope - axs[2, i].plot( - time_oi, - baseline_envelope_unfiltered, - c=ps.red + singleelectrode_chirps = group_timestamps( + sublists=sublists, + at_least_in=3, + difference_threshold=config.chirp_window_threshold, ) - # plot waveform of filtered search signal - axs[3, i].plot(time_oi, search) + # check it there are chirps detected after grouping, continue + # with the loop if not + + if (len(singleelectrode_chirps) == 0) and (debug == 'false'): + continue + + # append chirps from this electrode to the multilectrode list + multielectrode_chirps.append(singleelectrode_chirps) + + # only initialize the plotting buffer if chirps are detected + chirp_detected = (el == (config.number_electrodes - 1) + & (plot in ["show", "save"]) + ) + + if chirp_detected or (debug != 'elecrode'): + + logger.debug("Detected chirp, ititialize buffer ...") + + # save data to Buffer + buffer = ChirpPlotBuffer( + config=config, + t0=window_start_seconds, + dt=window_duration_seconds, + electrode=electrode_index, + track_id=track_id, + data=data, + time=current_raw_time, + baseline_envelope_unfiltered=baseline_envelope_unfiltered, + baseline=baselineband, + baseline_envelope=baseline_envelope, + baseline_peaks=baseline_peak_indices, + search_frequency=search_frequency, + search=searchband, + search_envelope_unfiltered=search_envelope_unfiltered, + search_envelope=search_envelope, + search_peaks=search_peak_indices, + frequency_time=baseline_frequency_time, + frequency=baseline_frequency, + frequency_filtered=baseline_frequency_filtered, + frequency_peaks=frequency_peak_indices, + ) + + logger.debug("Buffer initialized!") + + if debug == "electrode": + logger.info(f'Plotting electrode {el} ...') + buffer.plot_buffer( + chirps=singleelectrode_chirps, plot=plot) + + logger.debug( + f"Processed all electrodes for fish {track_id} for this" + "window, sorting chirps ..." + ) - # plot envelope of search signal - axs[3, i].plot( - time_oi, - search_envelope, - c=ps.red - ) + # check if there are chirps detected in multiple electrodes and + # continue the loop if not - # plot filtered and rectified envelope - axs[4, i].plot(time_oi, baseline_envelope) - axs[4, i].scatter( - (time_oi)[baseline_peaks], - baseline_envelope[baseline_peaks], - c=ps.red, - ) + if (len(multielectrode_chirps) == 0) and (debug == 'false'): + continue - # plot envelope of search signal - axs[5, i].plot(time_oi, search_envelope) - axs[5, i].scatter( - (time_oi)[search_peaks], - search_envelope[search_peaks], - c=ps.red, - ) + # validate multielectrode chirps, i.e. check if they are + # detected in at least 'config.min_electrodes' electrodes - # plot filtered instantaneous frequency - axs[6, i].plot(baseline_freq_time, np.abs(inst_freq_filtered)) - axs[6, i].scatter( - baseline_freq_time[inst_freq_peaks], - np.abs(inst_freq_filtered)[inst_freq_peaks], - c=ps.red, - ) + multielectrode_chirps_validated = group_timestamps( + sublists=multielectrode_chirps, + at_least_in=config.minimum_electrodes, + difference_threshold=config.chirp_window_threshold, + ) - axs[6, i].set_xlabel("Time [s]") - axs[0, i].set_title("Spectrogram") - axs[1, i].set_title("Fitered baseline instanenous frequency") - axs[2, i].set_title("Fitered baseline") - axs[3, i].set_title("Fitered above") - axs[4, i].set_title("Filtered envelope of baseline envelope") - axs[5, i].set_title("Search envelope") - axs[6, i].set_title( - "Filtered absolute instantaneous frequency") + # add validated chirps to the list that tracks chirps across there + # rolling time windows - plt.show() + multiwindow_chirps.append(multielectrode_chirps_validated) + multiwindow_ids.append(track_id) + + logger.info( + f"Found {len(multielectrode_chirps_validated)}" + f" chirps for fish {track_id} in this window!" + ) + # if chirps are detected and the plot flag is set, plot the + # chirps, otheswise try to delete the buffer if it exists + + if debug == "fish": + logger.info(f'Plotting fish {track_id} ...') + buffer.plot_buffer(multielectrode_chirps_validated, plot) + + if ((len(multielectrode_chirps_validated) > 0) & + (plot in ["show", "save"]) & (debug == 'false')): + try: + buffer.plot_buffer(multielectrode_chirps_validated, plot) + del buffer + except NameError: + pass + else: + try: + del buffer + except NameError: + pass + + # flatten list of lists containing chirps and create + # an array of fish ids that correspond to the chirps + + multiwindow_chirps_flat = [] + multiwindow_ids_flat = [] + for track_id in np.unique(multiwindow_ids): + + # get chirps for this fish and flatten the list + current_track_bool = np.asarray(multiwindow_ids) == track_id + current_track_chirps = flatten( + list(compress(multiwindow_chirps, current_track_bool)) + ) + + # add flattened chirps to the list + multiwindow_chirps_flat.extend(current_track_chirps) + multiwindow_ids_flat.extend( + list(np.ones_like(current_track_chirps) * track_id) + ) + + # purge duplicates, i.e. chirps that are very close to each other + # duplites arise due to overlapping windows + + purged_chirps = [] + purged_ids = [] + for track_id in np.unique(multiwindow_ids_flat): + tr_chirps = np.asarray(multiwindow_chirps_flat)[ + np.asarray(multiwindow_ids_flat) == track_id + ] + if len(tr_chirps) > 0: + tr_chirps_purged = purge_duplicates( + tr_chirps, config.chirp_window_threshold + ) + purged_chirps.extend(list(tr_chirps_purged)) + purged_ids.extend(list(np.ones_like(tr_chirps_purged) * track_id)) + + # sort chirps by time + purged_chirps = np.asarray(purged_chirps) + purged_ids = np.asarray(purged_ids) + purged_ids = purged_ids[np.argsort(purged_chirps)] + purged_chirps = purged_chirps[np.argsort(purged_chirps)] + + # save them into the data directory + np.save(datapath + "chirps.npy", purged_chirps) + np.save(datapath + "chirp_ids.npy", purged_ids) if __name__ == "__main__": + # datapath = "/home/weygoldt/Data/uni/chirpdetection/GP2023_chirp_detection/data/mount_data/2020-05-13-10_00/" datapath = "../data/2022-06-02-10_00/" - main(datapath) + # datapath = "/home/weygoldt/Data/uni/efishdata/2016-colombia/fishgrid/2016-04-09-22_25/" + # datapath = "/home/weygoldt/Data/uni/chirpdetection/GP2023_chirp_detection/data/mount_data/2020-03-13-10_00/" + chirpdetection(datapath, plot="save", debug="false") diff --git a/code/chirpdetector_conf.yml b/code/chirpdetector_conf.yml old mode 100644 new mode 100755 index dd3d285..058448d --- a/code/chirpdetector_conf.yml +++ b/code/chirpdetector_conf.yml @@ -1,42 +1,41 @@ -# Duration and overlap of the analysis window in seconds -window: 5 -overlap: 1 -edge: 0.25 +# Path setup ------------------------------------------------------------------ -# Number of electrodes to go over -electrodes: 3 +dataroot: "../data/" # path to data +outputdir: "../output/" # path to save plots to -# Boundary for search frequency in Hz -search_boundary: 100 +# Rolling window parameters --------------------------------------------------- -# Cutoff frequency for envelope estimation by lowpass filter -envelope_cutoff: 25 +window: 5 # rolling window length in seconds +overlap: 1 # window overlap in seconds +edge: 0.25 # window edge cufoffs to mitigate filter edge effects -# Cutoff frequency for envelope highpass filter -envelope_highpass_cutoff: 3 +# Electrode iteration parameters ---------------------------------------------- -# Cutoff frequency for envelope of envelope -envelope_envelope_cutoff: 5 +number_electrodes: 2 # number of electrodes to go over +minimum_electrodes: 1 # mimumun number of electrodes a chirp must be on -# Instantaneous frequency bandpass filter cutoff frequencies -instantaneous_lowf: 15 -instantaneous_highf: 8000 +# Feature extraction parameters ----------------------------------------------- -# Baseline envelope peak detection parameters -baseline_prominence_percentile: 90 +search_df_lower: 20 # start searching this far above the baseline +search_df_upper: 100 # stop searching this far above the baseline +search_res: 1 # search window resolution +default_search_freq: 60 # search here if no need for a search frequency +minimal_bandwidth: 10 # minimal bandpass filter width for baseline +search_bandwidth: 10 # minimal bandpass filter width for search frequency +baseline_frequency_smoothing: 10 # instantaneous frequency smoothing -# Search envelope peak detection parameters -search_prominence_percentile: 75 +# Feature processing parameters ----------------------------------------------- -# Instantaneous frequency peak detection parameters -instantaneous_prominence_percentile: 90 +baseline_envelope_cutoff: 25 # envelope estimation cutoff +baseline_envelope_bandpass_lowf: 2 # envelope badpass lower cutoff +baseline_envelope_bandpass_highf: 100 # envelope bandbass higher cutoff +search_envelope_cutoff: 10 # search envelope estimation cufoff -# search freq parameter -search_df_lower: 25 -search_df_upper: 100 -search_res: 1 -search_freq_percentiles: - - 5 - - 95 -default_search_freq: 50 +# Peak detecion parameters ---------------------------------------------------- +baseline_prominence: 0.00005 # peak prominence threshold for baseline envelope +search_prominence: 0.000004 # peak prominence threshold for search envelope +frequency_prominence: 2 # peak prominence threshold for baseline freq + +# Classify events as chirps if they are less than this time apart +chirp_window_threshold: 0.02 diff --git a/code/eventchirpsplots.py b/code/eventchirpsplots.py new file mode 100644 index 0000000..4ebaa66 --- /dev/null +++ b/code/eventchirpsplots.py @@ -0,0 +1,593 @@ +import os + +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt + +from tqdm import tqdm +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.plotstyle import PlotStyle +from modules.datahandling import causal_kde1d, acausal_kde1d, flatten + +logger = makeLogger(__name__) +ps = PlotStyle() + + +class Behavior: + """Load behavior data from csv file as class attributes + Attributes + ---------- + behavior: 0: chasing onset, 1: chasing offset, 2: physical contact + behavior_type: + behavioral_category: + comment_start: + comment_stop: + dataframe: pandas dataframe with all the data + duration_s: + media_file: + observation_date: + observation_id: + start_s: start time of the event in seconds + stop_s: stop time of the event in seconds + total_length: + """ + + def __init__(self, folder_path: str) -> None: + print(f'{folder_path}') + LED_on_time_BORIS = np.load(os.path.join( + folder_path, 'LED_on_time.npy'), allow_pickle=True) + self.time = np.load(os.path.join( + folder_path, "times.npy"), allow_pickle=True) + csv_filename = [f for f in os.listdir(folder_path) if f.endswith( + '.csv')][0] # check if there are more than one csv file + self.dataframe = read_csv(os.path.join(folder_path, csv_filename)) + self.chirps = np.load(os.path.join( + folder_path, 'chirps.npy'), allow_pickle=True) + self.chirps_ids = np.load(os.path.join( + folder_path, 'chirp_ids.npy'), allow_pickle=True) + + for k, key in enumerate(self.dataframe.keys()): + key = key.lower() + if ' ' in key: + key = key.replace(' ', '_') + if '(' in key: + key = key.replace('(', '') + key = key.replace(')', '') + setattr(self, key, np.array( + self.dataframe[self.dataframe.keys()[k]])) + + last_LED_t_BORIS = LED_on_time_BORIS[-1] + real_time_range = self.time[-1] - self.time[0] + factor = 1.034141 + shift = last_LED_t_BORIS - real_time_range * factor + self.start_s = (self.start_s - shift) / factor + self.stop_s = (self.stop_s - shift) / factor + + +""" +1 - chasing onset +2 - chasing offset +3 - physical contact event + +temporal encpding needs to be corrected ... not exactly 25FPS. + +### correspinding python code ### + + factor = 1.034141 + LED_on_time_BORIS = np.load(os.path.join(folder_path, 'LED_on_time.npy'), allow_pickle=True) + last_LED_t_BORIS = LED_on_time_BORIS[-1] + real_time_range = times[-1] - times[0] + shift = last_LED_t_BORIS - real_time_range * factor + + data = pd.read_csv(os.path.join(folder_path, file[1:-7] + '.csv')) + boris_times = data['Start (s)'] + data_times = [] + + for Cevent_t in boris_times: + Cevent_boris_times = (Cevent_t - shift) / factor + data_times.append(Cevent_boris_times) + + data_times = np.array(data_times) + behavior = data['Behavior'] +""" + + +def correct_chasing_events( + category: np.ndarray, + timestamps: np.ndarray +) -> tuple[np.ndarray, np.ndarray]: + + onset_ids = np.arange( + len(category))[category == 0] + offset_ids = np.arange( + len(category))[category == 1] + + wrong_bh = np.arange(len(category))[ + category != 2][:-1][np.diff(category[category != 2]) == 0] + if onset_ids[0] > offset_ids[0]: + offset_ids = np.delete(offset_ids, 0) + help_index = offset_ids[0] + wrong_bh = np.append(wrong_bh[help_index]) + + category = np.delete(category, wrong_bh) + timestamps = np.delete(timestamps, wrong_bh) + + # Check whether on- or offset is longer and calculate length difference + if len(onset_ids) > len(offset_ids): + len_diff = len(onset_ids) - len(offset_ids) + logger.info(f'Onsets are greater than offsets by {len_diff}') + elif len(onset_ids) < len(offset_ids): + len_diff = len(offset_ids) - len(onset_ids) + logger.info(f'Offsets are greater than onsets by {len_diff}') + elif len(onset_ids) == len(offset_ids): + logger.info('Chasing events are equal') + + return category, timestamps + + +def event_triggered_chirps( + event: np.ndarray, + chirps: np.ndarray, + time_before_event: int, + time_after_event: int, + dt: float, + width: float, +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + + event_chirps = [] # chirps that are in specified window around event + # timestamps of chirps around event centered on the event timepoint + centered_chirps = [] + + for event_timestamp in event: + start = event_timestamp - time_before_event + stop = event_timestamp + time_after_event + chirps_around_event = [c for c in chirps if (c >= start) & (c <= stop)] + event_chirps.append(chirps_around_event) + if len(chirps_around_event) == 0: + continue + else: + centered_chirps.append(chirps_around_event - event_timestamp) + + time = np.arange(-time_before_event, time_after_event, dt) + + # Kernel density estimation with some if's + if len(centered_chirps) == 0: + centered_chirps = np.array([]) + centered_chirps_convolved = np.zeros(len(time)) + else: + # convert list of arrays to one array for plotting + centered_chirps = np.concatenate(centered_chirps, axis=0) + centered_chirps_convolved = (acausal_kde1d( + centered_chirps, time, width)) / len(event) + + return event_chirps, centered_chirps, centered_chirps_convolved + + +def main(datapath: str): + + foldernames = [ + datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath + x)] + + nrecording_chirps = [] + nrecording_chirps_fish_ids = [] + nrecording_chasing_onsets = [] + nrecording_chasing_offsets = [] + nrecording_physicals = [] + + # Iterate over all recordings and save chirp- and event-timestamps + for folder in foldernames: + # exclude folder with empty LED_on_time.npy + if folder == '../data/mount_data/2020-05-12-10_00/': + continue + + bh = Behavior(folder) + + # Chirps are already sorted + category = bh.behavior + timestamps = bh.start_s + chirps = bh.chirps + nrecording_chirps.append(chirps) + chirps_fish_ids = bh.chirps_ids + nrecording_chirps_fish_ids.append(chirps_fish_ids) + fish_ids = np.unique(chirps_fish_ids) + + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + # Split categories + chasing_onsets = timestamps[category == 0] + nrecording_chasing_onsets.append(chasing_onsets) + chasing_offsets = timestamps[category == 1] + nrecording_chasing_offsets.append(chasing_offsets) + physical_contacts = timestamps[category == 2] + nrecording_physicals.append(physical_contacts) + + # Define time window for chirps around event analysis + time_before_event = 30 + time_after_event = 60 + dt = 0.01 + width = 1.5 # width of kernel for all recordings, currently gaussian kernel + recording_width = 2 # width of kernel for each recording + time = np.arange(-time_before_event, time_after_event, dt) + + ##### Chirps around events, all fish, all recordings ##### + # Centered chirps per event type + nrecording_centered_onset_chirps = [] + nrecording_centered_offset_chirps = [] + nrecording_centered_physical_chirps = [] + # Bootstrapped chirps per recording and per event: 27[1000[n]] 27 recs, 1000 shuffles, n chirps + nrecording_shuffled_convolved_onset_chirps = [] + nrecording_shuffled_convolved_offset_chirps = [] + nrecording_shuffled_convolved_physical_chirps = [] + + nbootstrapping = 100 + + for i in range(len(nrecording_chirps)): + chirps = nrecording_chirps[i] + chasing_onsets = nrecording_chasing_onsets[i] + chasing_offsets = nrecording_chasing_offsets[i] + physical_contacts = nrecording_physicals[i] + + # Chirps around chasing onsets + _, centered_chasing_onset_chirps, cc_chasing_onset_chirps = event_triggered_chirps( + chasing_onsets, chirps, time_before_event, time_after_event, dt, recording_width) + # Chirps around chasing offsets + _, centered_chasing_offset_chirps, cc_chasing_offset_chirps = event_triggered_chirps( + chasing_offsets, chirps, time_before_event, time_after_event, dt, recording_width) + # Chirps around physical contacts + _, centered_physical_chirps, cc_physical_chirps = event_triggered_chirps( + physical_contacts, chirps, time_before_event, time_after_event, dt, recording_width) + + nrecording_centered_onset_chirps.append(centered_chasing_onset_chirps) + nrecording_centered_offset_chirps.append( + centered_chasing_offset_chirps) + nrecording_centered_physical_chirps.append(centered_physical_chirps) + + ## Shuffled chirps ## + nshuffled_onset_chirps = [] + nshuffled_offset_chirps = [] + nshuffled_physical_chirps = [] + + # for j in tqdm(range(nbootstrapping)): + # # Calculate interchirp intervals; add first chirp timestamp in beginning to get equal lengths + # interchirp_intervals = np.append(np.array([chirps[0]]), np.diff(chirps)) + # np.random.shuffle(interchirp_intervals) + # shuffled_chirps = np.cumsum(interchirp_intervals) + # # Shuffled chasing onset chirps + # _, _, cc_shuffled_onset_chirps = event_triggered_chirps(chasing_onsets, shuffled_chirps, time_before_event, time_after_event, dt, recording_width) + # nshuffled_onset_chirps.append(cc_shuffled_onset_chirps) + # # Shuffled chasing offset chirps + # _, _, cc_shuffled_offset_chirps = event_triggered_chirps(chasing_offsets, shuffled_chirps, time_before_event, time_after_event, dt, recording_width) + # nshuffled_offset_chirps.append(cc_shuffled_offset_chirps) + # # Shuffled physical contact chirps + # _, _, cc_shuffled_physical_chirps = event_triggered_chirps(physical_contacts, shuffled_chirps, time_before_event, time_after_event, dt, recording_width) + # nshuffled_physical_chirps.append(cc_shuffled_physical_chirps) + + # rec_shuffled_q5_onset, rec_shuffled_median_onset, rec_shuffled_q95_onset = np.percentile( + # nshuffled_onset_chirps, (5, 50, 95), axis=0) + # rec_shuffled_q5_offset, rec_shuffled_median_offset, rec_shuffled_q95_offset = np.percentile( + # nshuffled_offset_chirps, (5, 50, 95), axis=0) + # rec_shuffled_q5_physical, rec_shuffled_median_physical, rec_shuffled_q95_physical = np.percentile( + # nshuffled_physical_chirps, (5, 50, 95), axis=0) + + # #### Recording plots #### + # fig, ax = plt.subplots(1, 3, figsize=(28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all') + # ax[0].set_xlabel('Time[s]') + + # # Plot chasing onsets + # ax[0].set_ylabel('Chirp rate [Hz]') + # ax[0].plot(time, cc_chasing_onset_chirps, color=ps.yellow, zorder=2) + # ax0 = ax[0].twinx() + # ax0.eventplot(centered_chasing_onset_chirps, linelengths=0.2, colors=ps.gray, alpha=0.25, zorder=1) + # ax0.vlines(0, 0, 1.5, ps.white, 'dashed') + # ax[0].set_zorder(ax0.get_zorder()+1) + # ax[0].patch.set_visible(False) + # ax0.set_yticklabels([]) + # ax0.set_yticks([]) + # ######## median - q5, median + q95 + # ax[0].fill_between(time, rec_shuffled_q5_onset, rec_shuffled_q95_onset, color=ps.gray, alpha=0.5) + # ax[0].plot(time, rec_shuffled_median_onset, color=ps.black) + + # # Plot chasing offets + # ax[1].set_xlabel('Time[s]') + # ax[1].plot(time, cc_chasing_offset_chirps, color=ps.orange, zorder=2) + # ax1 = ax[1].twinx() + # ax1.eventplot(centered_chasing_offset_chirps, linelengths=0.2, colors=ps.gray, alpha=0.25, zorder=1) + # ax1.vlines(0, 0, 1.5, ps.white, 'dashed') + # ax[1].set_zorder(ax1.get_zorder()+1) + # ax[1].patch.set_visible(False) + # ax1.set_yticklabels([]) + # ax1.set_yticks([]) + # ax[1].fill_between(time, rec_shuffled_q5_offset, rec_shuffled_q95_offset, color=ps.gray, alpha=0.5) + # ax[1].plot(time, rec_shuffled_median_offset, color=ps.black) + + # # Plot physical contacts + # ax[2].set_xlabel('Time[s]') + # ax[2].plot(time, cc_physical_chirps, color=ps.maroon, zorder=2) + # ax2 = ax[2].twinx() + # ax2.eventplot(centered_physical_chirps, linelengths=0.2, colors=ps.gray, alpha=0.25, zorder=1) + # ax2.vlines(0, 0, 1.5, ps.white, 'dashed') + # ax[2].set_zorder(ax2.get_zorder()+1) + # ax[2].patch.set_visible(False) + # ax2.set_yticklabels([]) + # ax2.set_yticks([]) + # ax[2].fill_between(time, rec_shuffled_q5_physical, rec_shuffled_q95_physical, color=ps.gray, alpha=0.5) + # ax[2].plot(time, rec_shuffled_median_physical, ps.black) + # fig.suptitle(f'Recording: {i}') + # # plt.show() + # plt.close() + + # nrecording_shuffled_convolved_onset_chirps.append(nshuffled_onset_chirps) + # nrecording_shuffled_convolved_offset_chirps.append(nshuffled_offset_chirps) + # nrecording_shuffled_convolved_physical_chirps.append(nshuffled_physical_chirps) + + #### New shuffle approach #### + bootstrap_onset = [] + bootstrap_offset = [] + bootstrap_physical = [] + + # New bootstrapping approach + for n in range(nbootstrapping): + diff_onset = np.diff( + np.sort(flatten(nrecording_centered_onset_chirps))) + diff_offset = np.diff( + np.sort(flatten(nrecording_centered_offset_chirps))) + diff_physical = np.diff( + np.sort(flatten(nrecording_centered_physical_chirps))) + + np.random.shuffle(diff_onset) + shuffled_onset = np.cumsum(diff_onset) + np.random.shuffle(diff_offset) + shuffled_offset = np.cumsum(diff_offset) + np.random.shuffle(diff_physical) + shuffled_physical = np.cumsum(diff_physical) + + kde_onset (acausal_kde1d(shuffled_onset, time, width))/(27*100) + kde_offset = (acausal_kde1d(shuffled_offset, time, width))/(27*100) + kde_physical = (acausal_kde1d(shuffled_physical, time, width))/(27*100) + + bootstrap_onset.append(kde_onset) + bootstrap_offset.append(kde_offset) + bootstrap_physical.append(kde_physical) + + # New shuffle approach q5, q50, q95 + onset_q5, onset_median, onset_q95 = np.percentile( + bootstrap_onset, [5, 50, 95], axis=0) + offset_q5, offset_median, offset_q95 = np.percentile( + bootstrap_offset, [5, 50, 95], axis=0) + physical_q5, physical_median, physical_q95 = np.percentile( + bootstrap_physical, [5, 50, 95], axis=0) + + # vstack um 1. Dim zu cutten + # nrecording_shuffled_convolved_onset_chirps = np.vstack(nrecording_shuffled_convolved_onset_chirps) + # nrecording_shuffled_convolved_offset_chirps = np.vstack(nrecording_shuffled_convolved_offset_chirps) + # nrecording_shuffled_convolved_physical_chirps = np.vstack(nrecording_shuffled_convolved_physical_chirps) + + # shuffled_q5_onset, shuffled_median_onset, shuffled_q95_onset = np.percentile( + # nrecording_shuffled_convolved_onset_chirps, (5, 50, 95), axis=0) + # shuffled_q5_offset, shuffled_median_offset, shuffled_q95_offset = np.percentile( + # nrecording_shuffled_convolved_offset_chirps, (5, 50, 95), axis=0) + # shuffled_q5_physical, shuffled_median_physical, shuffled_q95_physical = np.percentile( + # nrecording_shuffled_convolved_physical_chirps, (5, 50, 95), axis=0) + + # Flatten all chirps + all_chirps = np.concatenate(nrecording_chirps).ravel() # not centered + + # Flatten event timestamps + all_onsets = np.concatenate( + nrecording_chasing_onsets).ravel() # not centered + all_offsets = np.concatenate( + nrecording_chasing_offsets).ravel() # not centered + all_physicals = np.concatenate( + nrecording_physicals).ravel() # not centered + + # Flatten all chirps around events + all_onset_chirps = np.concatenate( + nrecording_centered_onset_chirps).ravel() # centered + all_offset_chirps = np.concatenate( + nrecording_centered_offset_chirps).ravel() # centered + all_physical_chirps = np.concatenate( + nrecording_centered_physical_chirps).ravel() # centered + + # Convolute all chirps + # Divide by total number of each event over all recordings + all_onset_chirps_convolved = (acausal_kde1d( + all_onset_chirps, time, width)) / len(all_onsets) + all_offset_chirps_convolved = (acausal_kde1d( + all_offset_chirps, time, width)) / len(all_offsets) + all_physical_chirps_convolved = (acausal_kde1d( + all_physical_chirps, time, width)) / len(all_physicals) + + # Plot all events with all shuffled + fig, ax = plt.subplots(1, 3, figsize=( + 28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all') + # offsets = np.arange(1,28,1) + ax[0].set_xlabel('Time[s]') + + # Plot chasing onsets + ax[0].set_ylabel('Chirp rate [Hz]') + ax[0].plot(time, all_onset_chirps_convolved, color=ps.yellow, zorder=2) + ax0 = ax[0].twinx() + nrecording_centered_onset_chirps = np.asarray( + nrecording_centered_onset_chirps, dtype=object) + ax0.eventplot(np.array(nrecording_centered_onset_chirps), + linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1) + ax0.vlines(0, 0, 1.5, ps.white, 'dashed') + ax[0].set_zorder(ax0.get_zorder()+1) + ax[0].patch.set_visible(False) + ax0.set_yticklabels([]) + ax0.set_yticks([]) + # ax[0].fill_between(time, shuffled_q5_onset, shuffled_q95_onset, color=ps.gray, alpha=0.5) + # ax[0].plot(time, shuffled_median_onset, color=ps.black) + ax[0].fill_between(time, onset_q5, onset_q95, color=ps.gray, alpha=0.5) + ax[0].plot(time, onset_median, color=ps.black) + + # Plot chasing offets + ax[1].set_xlabel('Time[s]') + ax[1].plot(time, all_offset_chirps_convolved, color=ps.orange, zorder=2) + ax1 = ax[1].twinx() + nrecording_centered_offset_chirps = np.asarray( + nrecording_centered_offset_chirps, dtype=object) + ax1.eventplot(np.array(nrecording_centered_offset_chirps), + linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1) + ax1.vlines(0, 0, 1.5, ps.white, 'dashed') + ax[1].set_zorder(ax1.get_zorder()+1) + ax[1].patch.set_visible(False) + ax1.set_yticklabels([]) + ax1.set_yticks([]) + # ax[1].fill_between(time, shuffled_q5_offset, shuffled_q95_offset, color=ps.gray, alpha=0.5) + # ax[1].plot(time, shuffled_median_offset, color=ps.black) + ax[1].fill_between(time, offset_q5, offset_q95, color=ps.gray, alpha=0.5) + ax[1].plot(time, offset_median, color=ps.black) + + # Plot physical contacts + ax[2].set_xlabel('Time[s]') + ax[2].plot(time, all_physical_chirps_convolved, color=ps.maroon, zorder=2) + ax2 = ax[2].twinx() + nrecording_centered_physical_chirps = np.asarray( + nrecording_centered_physical_chirps, dtype=object) + ax2.eventplot(np.array(nrecording_centered_physical_chirps), + linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1) + ax2.vlines(0, 0, 1.5, ps.white, 'dashed') + ax[2].set_zorder(ax2.get_zorder()+1) + ax[2].patch.set_visible(False) + ax2.set_yticklabels([]) + ax2.set_yticks([]) + # ax[2].fill_between(time, shuffled_q5_physical, shuffled_q95_physical, color=ps.gray, alpha=0.5) + # ax[2].plot(time, shuffled_median_physical, ps.black) + ax[2].fill_between(time, physical_q5, physical_q95, + color=ps.gray, alpha=0.5) + ax[2].plot(time, physical_median, ps.black) + fig.suptitle('All recordings') + plt.show() + plt.close() + + embed() + + # chasing_durations = [] + # # Calculate chasing duration to evaluate a nice time window for kernel density estimation + # for onset, offset in zip(chasing_onsets, chasing_offsets): + # duration = offset - onset + # chasing_durations.append(duration) + + # fig, ax = plt.subplots() + # ax.boxplot(chasing_durations) + # plt.show() + # plt.close() + + # # Associate chirps to individual fish + # fish1 = chirps[chirps_fish_ids == fish_ids[0]] + # fish2 = chirps[chirps_fish_ids == fish_ids[1]] + # fish = [len(fish1), len(fish2)] + + # Convolution over all recordings + # Rasterplot for each recording + + # #### Chirps around events, winner VS loser, one recording #### + # # Load file with fish ids and winner/loser info + # meta = pd.read_csv('../data/mount_data/order_meta.csv') + # current_recording = meta[meta.index == 43] + # fish1 = current_recording['rec_id1'].values + # fish2 = current_recording['rec_id2'].values + # # Implement check if fish_ids from meta and chirp detection are the same??? + # winner = current_recording['winner'].values + + # if winner == fish1: + # loser = fish2 + # elif winner == fish2: + # loser = fish1 + + # winner_chirps = chirps[chirps_fish_ids == winner] + # loser_chirps = chirps[chirps_fish_ids == loser] + + # # Event triggered winner chirps + # _, winner_centered_onset, winner_cc_onset = event_triggered_chirps(chasing_onsets, winner_chirps, time_before_event, time_after_event, dt, width) + # _, winner_centered_offset, winner_cc_offset = event_triggered_chirps(chasing_offsets, winner_chirps, time_before_event, time_after_event, dt, width) + # _, winner_centered_physical, winner_cc_physical = event_triggered_chirps(physical_contacts, winner_chirps, time_before_event, time_after_event, dt, width) + + # # Event triggered loser chirps + # _, loser_centered_onset, loser_cc_onset = event_triggered_chirps(chasing_onsets, loser_chirps, time_before_event, time_after_event, dt, width) + # _, loser_centered_offset, loser_cc_offset = event_triggered_chirps(chasing_offsets, loser_chirps, time_before_event, time_after_event, dt, width) + # _, loser_centered_physical, loser_cc_physical = event_triggered_chirps(physical_contacts, loser_chirps, time_before_event, time_after_event, dt, width) + + # ########## Winner VS Loser plot ########## + # fig, ax = plt.subplots(2, 3, figsize=(50 / 2.54, 15 / 2.54), constrained_layout=True, sharey='row') + # offset = [1.35] + # ax[1][0].set_xlabel('Time[s]') + # ax[1][1].set_xlabel('Time[s]') + # ax[1][2].set_xlabel('Time[s]') + # # Plot winner chasing onsets + # ax[0][0].set_ylabel('Chirp rate [Hz]') + # ax[0][0].plot(time, winner_cc_onset, color='tab:blue', zorder=100) + # ax0 = ax[0][0].twinx() + # ax0.eventplot(np.array([winner_centered_onset]), lineoffsets=offset, linelengths=0.1, colors=['tab:green'], alpha=0.25, zorder=-100) + # ax0.set_ylabel('Event') + # ax0.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[0][0].set_zorder(ax0.get_zorder()+1) + # ax[0][0].patch.set_visible(False) + # ax0.set_yticklabels([]) + # ax0.set_yticks([]) + # # Plot winner chasing offets + # ax[0][1].plot(time, winner_cc_offset, color='tab:blue', zorder=100) + # ax1 = ax[0][1].twinx() + # ax1.eventplot(np.array([winner_centered_offset]), lineoffsets=offset, linelengths=0.1, colors=['tab:purple'], alpha=0.25, zorder=-100) + # ax1.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[0][1].set_zorder(ax1.get_zorder()+1) + # ax[0][1].patch.set_visible(False) + # ax1.set_yticklabels([]) + # ax1.set_yticks([]) + # # Plot winner physical contacts + # ax[0][2].plot(time, winner_cc_physical, color='tab:blue', zorder=100) + # ax2 = ax[0][2].twinx() + # ax2.eventplot(np.array([winner_centered_physical]), lineoffsets=offset, linelengths=0.1, colors=['tab:red'], alpha=0.25, zorder=-100) + # ax2.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[0][2].set_zorder(ax2.get_zorder()+1) + # ax[0][2].patch.set_visible(False) + # ax2.set_yticklabels([]) + # ax2.set_yticks([]) + # # Plot loser chasing onsets + # ax[1][0].set_ylabel('Chirp rate [Hz]') + # ax[1][0].plot(time, loser_cc_onset, color='tab:blue', zorder=100) + # ax3 = ax[1][0].twinx() + # ax3.eventplot(np.array([loser_centered_onset]), lineoffsets=offset, linelengths=0.1, colors=['tab:green'], alpha=0.25, zorder=-100) + # ax3.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[1][0].set_zorder(ax3.get_zorder()+1) + # ax[1][0].patch.set_visible(False) + # ax3.set_yticklabels([]) + # ax3.set_yticks([]) + # # Plot loser chasing offsets + # ax[1][1].plot(time, loser_cc_offset, color='tab:blue', zorder=100) + # ax4 = ax[1][1].twinx() + # ax4.eventplot(np.array([loser_centered_offset]), lineoffsets=offset, linelengths=0.1, colors=['tab:purple'], alpha=0.25, zorder=-100) + # ax4.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[1][1].set_zorder(ax4.get_zorder()+1) + # ax[1][1].patch.set_visible(False) + # ax4.set_yticklabels([]) + # ax4.set_yticks([]) + # # Plot loser physical contacts + # ax[1][2].plot(time, loser_cc_physical, color='tab:blue', zorder=100) + # ax5 = ax[1][2].twinx() + # ax5.eventplot(np.array([loser_centered_physical]), lineoffsets=offset, linelengths=0.1, colors=['tab:red'], alpha=0.25, zorder=-100) + # ax5.vlines(0, 0, 1.5, 'tab:grey', 'dashed') + # ax[1][2].set_zorder(ax5.get_zorder()+1) + # ax[1][2].patch.set_visible(False) + # ax5.set_yticklabels([]) + # ax5.set_yticks([]) + # plt.show() + # plt.close() + + # for i in range(len(fish_ids)): + # fish = fish_ids[i] + # chirps_temp = chirps[chirps_fish_ids == fish] + # print(fish) + + #### Chirps around events, only losers, one recording #### + + +if __name__ == '__main__': + # Path to the data + datapath = '../data/mount_data/' + main(datapath) diff --git a/code/extract_chirps.py b/code/extract_chirps.py new file mode 100644 index 0000000..77e3e8d --- /dev/null +++ b/code/extract_chirps.py @@ -0,0 +1,58 @@ +import os +import pandas as pd +import numpy as np +from chirpdetection import chirpdetection +from IPython import embed + +# check rec ../data/mount_data/2020-03-25-10_00/ starting at 3175 + + +def get_valid_datasets(dataroot): + + datasets = sorted([name for name in os.listdir(dataroot) if os.path.isdir( + os.path.join(dataroot, name))]) + + valid_datasets = [] + for dataset in datasets: + + path = os.path.join(dataroot, dataset) + csv_name = '-'.join(dataset.split('-')[:3]) + '.csv' + + if os.path.exists(os.path.join(path, csv_name)) is False: + continue + + if os.path.exists(os.path.join(path, 'ident_v.npy')) is False: + continue + + ident = np.load(os.path.join(path, 'ident_v.npy')) + number_of_fish = len(np.unique(ident[~np.isnan(ident)])) + if number_of_fish != 2: + continue + + valid_datasets.append(dataset) + + datapaths = [os.path.join(dataroot, dataset) + + '/' for dataset in valid_datasets] + + return datapaths, valid_datasets + + +def main(datapaths): + + for path in datapaths: + chirpdetection(path, plot='show') + + +if __name__ == '__main__': + + dataroot = '../data/mount_data/' + + + datapaths, valid_datasets= get_valid_datasets(dataroot) + + recs = pd.DataFrame(columns=['recording'], data=valid_datasets) + recs.to_csv('../recs.csv', index=False) + # datapaths = ['../data/mount_data/2020-03-25-10_00/'] + main(datapaths) + +# window 1524 + 244 in dataset index 4 is nice example diff --git a/code/get_behaviour.py b/code/get_behaviour.py new file mode 100644 index 0000000..36311ca --- /dev/null +++ b/code/get_behaviour.py @@ -0,0 +1,35 @@ +import os +from paramiko import SSHClient +from scp import SCPClient +from IPython import embed +from pandas import read_csv + +ssh = SSHClient() +ssh.load_system_host_keys() + +ssh.connect(hostname='kraken', + username='efish', + password='fwNix4U', + ) + + +# SCPCLient takes a paramiko transport as its only argument +scp = SCPClient(ssh.get_transport()) + +data = read_csv('../recs.csv') +foldernames = data['recording'].values + +directory = f'/Users/acfw/Documents/uni_tuebingen/chirpdetection/GP2023_chirp_detection/data/mount_data/' +for foldername in foldernames: + + if not os.path.exists(directory+foldername): + os.makedirs(directory+foldername) + + files = [('-').join(foldername.split('-')[:3])+'.csv','chirp_ids.npy', 'chirps.npy', 'fund_v.npy', 'ident_v.npy', 'idx_v.npy', 'times.npy', 'spec.npy', 'LED_on_time.npy', 'sign_v.npy'] + + + for f in files: + scp.get(f'/home/efish/behavior/2019_tube_competition/{foldername}/{f}', + directory+foldername) + +scp.close() diff --git a/code/modules/behaviour_handling.py b/code/modules/behaviour_handling.py new file mode 100644 index 0000000..94a0ca1 --- /dev/null +++ b/code/modules/behaviour_handling.py @@ -0,0 +1,169 @@ +import numpy as np +import os +from IPython import embed + +from pandas import read_csv +from modules.logger import makeLogger +from modules.datahandling import causal_kde1d, acausal_kde1d, flatten + + +logger = makeLogger(__name__) + + +class Behavior: + """Load behavior data from csv file as class attributes + Attributes + ---------- + behavior: 0: chasing onset, 1: chasing offset, 2: physical contact + behavior_type: + behavioral_category: + comment_start: + comment_stop: + dataframe: pandas dataframe with all the data + duration_s: + media_file: + observation_date: + observation_id: + start_s: start time of the event in seconds + stop_s: stop time of the event in seconds + total_length: + """ + + def __init__(self, folder_path: str) -> None: + + LED_on_time_BORIS = np.load(os.path.join( + folder_path, 'LED_on_time.npy'), allow_pickle=True) + + csv_filename = os.path.split(folder_path[:-1])[-1] + csv_filename = '-'.join(csv_filename.split('-')[:-1]) + '.csv' + # embed() + + # csv_filename = [f for f in os.listdir( + # folder_path) if f.endswith('.csv')][0] + # logger.info(f'CSV file: {csv_filename}') + self.dataframe = read_csv(os.path.join(folder_path, csv_filename)) + + self.chirps = np.load(os.path.join( + folder_path, 'chirps.npy'), allow_pickle=True) + self.chirps_ids = np.load(os.path.join( + folder_path, 'chirp_ids.npy'), allow_pickle=True) + + self.ident = np.load(os.path.join( + folder_path, 'ident_v.npy'), allow_pickle=True) + self.idx = np.load(os.path.join( + folder_path, 'idx_v.npy'), allow_pickle=True) + self.freq = np.load(os.path.join( + folder_path, 'fund_v.npy'), allow_pickle=True) + self.time = np.load(os.path.join( + folder_path, "times.npy"), allow_pickle=True) + self.spec = np.load(os.path.join( + folder_path, "spec.npy"), allow_pickle=True) + + for k, key in enumerate(self.dataframe.keys()): + key = key.lower() + if ' ' in key: + key = key.replace(' ', '_') + if '(' in key: + key = key.replace('(', '') + key = key.replace(')', '') + setattr(self, key, np.array( + self.dataframe[self.dataframe.keys()[k]])) + + last_LED_t_BORIS = LED_on_time_BORIS[-1] + real_time_range = self.time[-1] - self.time[0] + factor = 1.034141 + shift = last_LED_t_BORIS - real_time_range * factor + self.start_s = (self.start_s - shift) / factor + self.stop_s = (self.stop_s - shift) / factor + + +def correct_chasing_events( + category: np.ndarray, + timestamps: np.ndarray +) -> tuple[np.ndarray, np.ndarray]: + + onset_ids = np.arange( + len(category))[category == 0] + offset_ids = np.arange( + len(category))[category == 1] + + wrong_bh = np.arange(len(category))[ + category != 2][:-1][np.diff(category[category != 2]) == 0] + + if category[category != 2][-1] == 0: + wrong_bh = np.append( + wrong_bh, + np.arange(len(category))[category != 2][-1]) + + if onset_ids[0] > offset_ids[0]: + offset_ids = np.delete(offset_ids, 0) + help_index = offset_ids[0] + wrong_bh = np.append(wrong_bh[help_index]) + + category = np.delete(category, wrong_bh) + timestamps = np.delete(timestamps, wrong_bh) + + new_onset_ids = np.arange( + len(category))[category == 0] + new_offset_ids = np.arange( + len(category))[category == 1] + + # Check whether on- or offset is longer and calculate length difference + + if len(new_onset_ids) > len(new_offset_ids): + embed() + logger.warning('Onsets are greater than offsets') + elif len(new_onset_ids) < len(new_offset_ids): + logger.warning('Offsets are greater than onsets') + elif len(new_onset_ids) == len(new_offset_ids): + # logger.info('Chasing events are equal') + pass + + return category, timestamps + + +def center_chirps( + events: np.ndarray, + chirps: np.ndarray, + time_before_event: int, + time_after_event: int, + # dt: float, + # width: float, +) -> tuple[np.ndarray, np.ndarray, np.ndarray]: + + event_chirps = [] # chirps that are in specified window around event + # timestamps of chirps around event centered on the event timepoint + centered_chirps = [] + + for event_timestamp in events: + + start = event_timestamp - time_before_event + stop = event_timestamp + time_after_event + chirps_around_event = [c for c in chirps if (c >= start) & (c <= stop)] + + if len(chirps_around_event) == 0: + continue + + centered_chirps.append(chirps_around_event - event_timestamp) + event_chirps.append(chirps_around_event) + + centered_chirps = np.sort(flatten(centered_chirps)) + event_chirps = np.sort(flatten(event_chirps)) + + if len(centered_chirps) != len(event_chirps): + raise ValueError( + 'Non centered chirps and centered chirps are not equal') + + # time = np.arange(-time_before_event, time_after_event, dt) + + # # Kernel density estimation with some if's + # if len(centered_chirps) == 0: + # centered_chirps = np.array([]) + # centered_chirps_convolved = np.zeros(len(time)) + # else: + # # convert list of arrays to one array for plotting + # centered_chirps = np.concatenate(centered_chirps, axis=0) + # centered_chirps_convolved = (acausal_kde1d( + # centered_chirps, time, width)) / len(event) + + return centered_chirps diff --git a/code/modules/datahandling.py b/code/modules/datahandling.py new file mode 100644 index 0000000..f375d44 --- /dev/null +++ b/code/modules/datahandling.py @@ -0,0 +1,338 @@ +import numpy as np +from typing import List, Any +from scipy.ndimage import gaussian_filter1d +from scipy.stats import gamma, norm + + +def minmaxnorm(data): + """ + Normalize data to [0, 1] + + Parameters + ---------- + data : np.ndarray + Data to normalize. + + Returns + ------- + np.ndarray + Normalized data. + + """ + return (data - np.min(data)) / (np.max(data) - np.min(data)) + + +def instantaneous_frequency( + signal: np.ndarray, + samplerate: int, + smoothing_window: int, +) -> tuple[np.ndarray, np.ndarray]: + """ + Compute the instantaneous frequency of a signal that is approximately + sinusoidal and symmetric around 0. + + Parameters + ---------- + signal : np.ndarray + Signal to compute the instantaneous frequency from. + samplerate : int + Samplerate of the signal. + smoothing_window : int + Window size for the gaussian filter. + + Returns + ------- + tuple[np.ndarray, np.ndarray] + + """ + # calculate instantaneous frequency with zero crossings + roll_signal = np.roll(signal, shift=1) + time_signal = np.arange(len(signal)) / samplerate + period_index = np.arange(len(signal))[(roll_signal < 0) & (signal >= 0)][ + 1:-1 + ] + + upper_bound = np.abs(signal[period_index]) + lower_bound = np.abs(signal[period_index - 1]) + upper_time = np.abs(time_signal[period_index]) + lower_time = np.abs(time_signal[period_index - 1]) + + # create ratio + lower_ratio = lower_bound / (lower_bound + upper_bound) + + # appy to time delta + time_delta = upper_time - lower_time + true_zero = lower_time + lower_ratio * time_delta + + # create new time array + instantaneous_frequency_time = true_zero[:-1] + 0.5 * np.diff(true_zero) + + # compute frequency + instantaneous_frequency = gaussian_filter1d( + 1 / np.diff(true_zero), smoothing_window + ) + + return instantaneous_frequency_time, instantaneous_frequency + + +def purge_duplicates( + timestamps: List[float], threshold: float = 0.5 +) -> List[float]: + """ + Compute the mean of groups of timestamps that are closer to the previous + or consecutive timestamp than the threshold, and return all timestamps that + are further apart from the previous or consecutive timestamp than the + threshold in a single list. + + Parameters + ---------- + timestamps : List[float] + A list of sorted timestamps + threshold : float, optional + The threshold to group the timestamps by, default is 0.5 + + Returns + ------- + List[float] + A list containing a list of timestamps that are further apart than + the threshold and a list of means of the groups of timestamps that + are closer to the previous or consecutive timestamp than the threshold. + """ + # Initialize an empty list to store the groups of timestamps that are + # closer to the previous or consecutive timestamp than the threshold + groups = [] + + # initialize the first group with the first timestamp + group = [timestamps[0]] + + for i in range(1, len(timestamps)): + + # check the difference between current timestamp and previous + # timestamp is less than the threshold + if timestamps[i] - timestamps[i - 1] < threshold: + # add the current timestamp to the current group + group.append(timestamps[i]) + else: + # if the difference is greater than the threshold + # append the current group to the groups list + groups.append(group) + + # start a new group with the current timestamp + group = [timestamps[i]] + + # after iterating through all the timestamps, add the last group to the + # groups list + groups.append(group) + + # get the mean of each group and only include the ones that have more + # than 1 timestamp + means = [np.mean(group) for group in groups if len(group) > 1] + + # get the timestamps that are outliers, i.e. the ones that are alone + # in a group + outliers = [ts for group in groups for ts in group if len(group) == 1] + + # return the outliers and means in a single list + return outliers + means + + +def group_timestamps( + sublists: List[List[float]], at_least_in: int, difference_threshold: float +) -> List[float]: + """ + Groups timestamps that are less than `threshold` milliseconds apart from + at least `n` other sublists. + Returns a list of the mean of each group. + If any of the sublists is empty, it will be ignored. + + Parameters + ---------- + sublists : List[List[float]] + a list of sublists, each containing timestamps + n : int + minimum number of sublists that a timestamp must be close to in order + to be grouped + threshold : float + the maximum difference in milliseconds between timestamps to be + considered a match + + Returns + ------- + List[float] + a list of the mean of each group. + + """ + # Flatten the sublists and sort the timestamps + timestamps = [ + timestamp for sublist in sublists if sublist for timestamp in sublist + ] + timestamps.sort() + + if len(timestamps) == 0: + return [] + + groups = [] + current_group = [timestamps[0]] + + # Group timestamps that are less than threshold milliseconds apart + for i in range(1, len(timestamps)): + if timestamps[i] - timestamps[i - 1] < difference_threshold: + current_group.append(timestamps[i]) + else: + groups.append(current_group) + current_group = [timestamps[i]] + + groups.append(current_group) + + # Retain only groups that contain at least n timestamps + final_groups = [] + for group in groups: + if len(group) >= at_least_in: + final_groups.append(group) + + # Calculate the mean of each group + means = [np.mean(group) for group in final_groups] + + return means + + +def flatten(list: List[List[Any]]) -> List: + """ + Flattens a list / array of lists. + + Parameters + ---------- + l : array or list of lists + The list to be flattened + + Returns + ------- + list + The flattened list + """ + return [item for sublist in list for item in sublist] + + +def causal_kde1d(spikes, time, width, shape=2): + """ + causalkde computes a kernel density estimate using a causal kernel (i.e. exponential or gamma distribution). + A shape of 1 turns the gamma distribution into an exponential. + + Parameters + ---------- + spikes : array-like + spike times + time : array-like + sampling time + width : float + kernel width + shape : int, optional + shape of gamma distribution, by default 1 + + Returns + ------- + rate : array-like + instantaneous firing rate + """ + + # compute dt + dt = time[1] - time[0] + + # time on which to compute kernel: + tmax = 10 * width + + # kernel not wider than time + if 2 * tmax > time[-1] - time[0]: + tmax = 0.5 * (time[-1] - time[0]) + + # kernel time + ktime = np.arange(-tmax, tmax, dt) + + # gamma kernel centered in ktime: + kernel = gamma.pdf( + x=ktime, + a=shape, + loc=0, + scale=width, + ) + + # indices of spikes in time array: + indices = np.asarray((spikes - time[0]) / dt, dtype=int) + + # binary spike train: + brate = np.zeros(len(time)) + brate[indices[(indices >= 0) & (indices < len(time))]] = 1.0 + + # convolution with kernel: + rate = np.convolve(brate, kernel, mode="same") + + return rate + + +def acausal_kde1d(spikes, time, width): + """ + causalkde computes a kernel density estimate using a causal kernel (i.e. exponential or gamma distribution). + A shape of 1 turns the gamma distribution into an exponential. + + Parameters + ---------- + spikes : array-like + spike times + time : array-like + sampling time + width : float + kernel width + shape : int, optional + shape of gamma distribution, by default 1 + + Returns + ------- + rate : array-like + instantaneous firing rate + """ + + # compute dt + dt = time[1] - time[0] + + # time on which to compute kernel: + tmax = 10 * width + + # kernel not wider than time + if 2 * tmax > time[-1] - time[0]: + tmax = 0.5 * (time[-1] - time[0]) + + # kernel time + ktime = np.arange(-tmax, tmax, dt) + + # gamma kernel centered in ktime: + kernel = norm.pdf( + x=ktime, + loc=0, + scale=width, + ) + + # indices of spikes in time array: + indices = np.asarray((spikes - time[0]) / dt, dtype=int) + + # binary spike train: + brate = np.zeros(len(time)) + brate[indices[(indices >= 0) & (indices < len(time))]] = 1.0 + + # convolution with kernel: + rate = np.convolve(brate, kernel, mode="same") + + return rate + + +if __name__ == "__main__": + + timestamps = [ + [1.2, 1.5, 1.3], + [], + [1.21, 1.51, 1.31], + [1.19, 1.49, 1.29], + [1.22, 1.52, 1.32], + [1.2, 1.5, 1.3], + ] + print(group_timestamps(timestamps, 2, 0.05)) + print(purge_duplicates([1, 2, 3, 4, 5, 6, 6.02, 7, 8, 8.02], 0.05)) diff --git a/code/modules/filehandling.py b/code/modules/filehandling.py index d25018d..c3c71f2 100644 --- a/code/modules/filehandling.py +++ b/code/modules/filehandling.py @@ -3,6 +3,7 @@ import os import yaml import numpy as np from thunderfish.dataloader import DataLoader +import matplotlib.pyplot as plt class ConfLoader: @@ -36,6 +37,7 @@ class LoadData: def __init__(self, datapath: str) -> None: # load raw data + self.datapath = datapath self.file = os.path.join(datapath, "traces-grid1.raw") self.raw = DataLoader(self.file, 60.0, 0, channel=-1) self.raw_rate = self.raw.samplerate @@ -53,3 +55,23 @@ class LoadData: def __str__(self) -> str: return f"LoadData({self.file})" + + +def make_outputdir(path: str) -> str: + """ + Creates a new directory where the path leads if it does not already exist. + + Parameters + ---------- + path : string + path to the new output directory + + Returns + ------- + string + path of the newly created output directory + """ + + if os.path.isdir(path) == False: + os.mkdir(path) + return path diff --git a/code/modules/filters.py b/code/modules/filters.py index 5192cdc..e6d9896 100644 --- a/code/modules/filters.py +++ b/code/modules/filters.py @@ -3,8 +3,8 @@ import numpy as np def bandpass_filter( - data: np.ndarray, - rate: float, + signal: np.ndarray, + samplerate: float, lowf: float, highf: float, ) -> np.ndarray: @@ -12,7 +12,7 @@ def bandpass_filter( Parameters ---------- - data : np.ndarray + signal : np.ndarray The data to be filtered rate : float The sampling rate @@ -26,21 +26,22 @@ def bandpass_filter( np.ndarray The filtered data """ - sos = butter(2, (lowf, highf), "bandpass", fs=rate, output="sos") - fdata = sosfiltfilt(sos, data) - return fdata + sos = butter(2, (lowf, highf), "bandpass", fs=samplerate, output="sos") + filtered_signal = sosfiltfilt(sos, signal) + + return filtered_signal def highpass_filter( - data: np.ndarray, - rate: float, + signal: np.ndarray, + samplerate: float, cutoff: float, ) -> np.ndarray: """Highpass filter a signal. Parameters ---------- - data : np.ndarray + signal : np.ndarray The data to be filtered rate : float The sampling rate @@ -52,14 +53,15 @@ def highpass_filter( np.ndarray The filtered data """ - sos = butter(2, cutoff, "highpass", fs=rate, output="sos") - fdata = sosfiltfilt(sos, data) - return fdata + sos = butter(2, cutoff, "highpass", fs=samplerate, output="sos") + filtered_signal = sosfiltfilt(sos, signal) + + return filtered_signal def lowpass_filter( - data: np.ndarray, - rate: float, + signal: np.ndarray, + samplerate: float, cutoff: float ) -> np.ndarray: """Lowpass filter a signal. @@ -78,21 +80,25 @@ def lowpass_filter( np.ndarray The filtered data """ - sos = butter(2, cutoff, "lowpass", fs=rate, output="sos") - fdata = sosfiltfilt(sos, data) - return fdata + sos = butter(2, cutoff, "lowpass", fs=samplerate, output="sos") + filtered_signal = sosfiltfilt(sos, signal) + return filtered_signal -def envelope(data: np.ndarray, rate: float, freq: float) -> np.ndarray: + +def envelope(signal: np.ndarray, + samplerate: float, + cutoff_frequency: float + ) -> np.ndarray: """Calculate the envelope of a signal using a lowpass filter. Parameters ---------- - data : np.ndarray + signal : np.ndarray The signal to calculate the envelope of - rate : float + samplingrate : float The sampling rate of the signal - freq : float + cutoff_frequency : float The cutoff frequency of the lowpass filter Returns @@ -100,6 +106,7 @@ def envelope(data: np.ndarray, rate: float, freq: float) -> np.ndarray: np.ndarray The envelope of the signal """ - sos = butter(2, freq, "lowpass", fs=rate, output="sos") - envelope = np.sqrt(2) * sosfiltfilt(sos, np.abs(data)) + sos = butter(2, cutoff_frequency, "lowpass", fs=samplerate, output="sos") + envelope = np.sqrt(2) * sosfiltfilt(sos, np.abs(signal)) + return envelope diff --git a/code/modules/logger.py b/code/modules/logger.py new file mode 100644 index 0000000..5dabf80 --- /dev/null +++ b/code/modules/logger.py @@ -0,0 +1,41 @@ +import logging + + +def makeLogger(name: str): + + # create logger formats for file and terminal + file_formatter = logging.Formatter( + "[ %(levelname)s ] ~ %(asctime)s ~ %(module)s.%(funcName)s: %(message)s") + console_formatter = logging.Formatter( + "[ %(levelname)s ] in %(module)s.%(funcName)s: %(message)s") + + # create logging file if loglevel is debug + file_handler = logging.FileHandler(f"gridtools_log.log", mode="w") + file_handler.setLevel(logging.WARN) + file_handler.setFormatter(file_formatter) + + # create stream handler for terminal output + console_handler = logging.StreamHandler() + console_handler.setFormatter(console_formatter) + console_handler.setLevel(logging.INFO) + + # create script specific logger + logger = logging.getLogger(name) + logger.addHandler(file_handler) + logger.addHandler(console_handler) + logger.setLevel(logging.INFO) + + return logger + + +if __name__ == "__main__": + + # initiate logger + mylogger = makeLogger(__name__) + + # test logger levels + mylogger.debug("This is for debugging!") + mylogger.info("This is an info.") + mylogger.warning("This is a warning.") + mylogger.error("This is an error.") + mylogger.critical("This is a critical error!") diff --git a/code/modules/plotstyle.py b/code/modules/plotstyle.py index 9e382a7..61aedcc 100644 --- a/code/modules/plotstyle.py +++ b/code/modules/plotstyle.py @@ -23,17 +23,21 @@ def PlotStyle() -> None: sky = "#89dceb" teal = "#94e2d5" green = "#a6e3a1" - yellow = "#f9e2af" - orange = "#fab387" - maroon = "#eba0ac" - red = "#f38ba8" - purple = "#cba6f7" - pink = "#f5c2e7" + yellow = "#f9d67f" + orange = "#faa472" + maroon = "#eb8486" + red = "#f37588" + purple = "#d89bf7" + pink = "#f59edb" lavender = "#b4befe" + gblue1 = "#89b4fa" + gblue2 = "#89dceb" + gblue3 = "#a6e3a1" @classmethod def lims(cls, track1, track2): - """Helper function to get frequency y axis limits from two fundamental frequency tracks. + """Helper function to get frequency y axis limits from two + fundamental frequency tracks. Args: track1 (array): First track @@ -91,12 +95,23 @@ def PlotStyle() -> None: ax.tick_params(left=False, labelleft=False) ax.patch.set_visible(False) + @classmethod + def hide_xax(cls, ax): + ax.xaxis.set_visible(False) + ax.spines["bottom"].set_visible(False) + + @classmethod + def hide_yax(cls, ax): + ax.yaxis.set_visible(False) + ax.spines["left"].set_visible(False) + @classmethod def set_boxplot_color(cls, bp, color): plt.setp(bp["boxes"], color=color) - plt.setp(bp["whiskers"], color=color) - plt.setp(bp["caps"], color=color) - plt.setp(bp["medians"], color=color) + plt.setp(bp["whiskers"], color=white) + plt.setp(bp["caps"], color=white) + plt.setp(bp["medians"], color=white) + @classmethod def label_subplots(cls, labels, axes, fig): @@ -215,9 +230,9 @@ def PlotStyle() -> None: plt.rc("legend", fontsize=SMALL_SIZE) # legend fontsize plt.rc("figure", titlesize=BIGGER_SIZE) # fontsize of the figure title - plt.rcParams["image.cmap"] = 'cmo.haline' - # plt.rcParams["axes.xmargin"] = 0.1 - # plt.rcParams["axes.ymargin"] = 0.15 + plt.rcParams["image.cmap"] = "cmo.haline" + plt.rcParams["axes.xmargin"] = 0.05 + plt.rcParams["axes.ymargin"] = 0.1 plt.rcParams["axes.titlelocation"] = "left" plt.rcParams["axes.titlesize"] = BIGGER_SIZE # plt.rcParams["axes.titlepad"] = -10 @@ -230,9 +245,9 @@ def PlotStyle() -> None: plt.rcParams["legend.borderaxespad"] = 0.5 plt.rcParams["legend.fancybox"] = False - # specify the custom font to use - plt.rcParams["font.family"] = "sans-serif" - plt.rcParams["font.sans-serif"] = "Helvetica Now Text" + # # specify the custom font to use + # plt.rcParams["font.family"] = "sans-serif" + # plt.rcParams["font.sans-serif"] = "Helvetica Now Text" # dark mode modifications plt.rcParams["boxplot.flierprops.color"] = white @@ -247,31 +262,33 @@ def PlotStyle() -> None: # plt.rcParams["axes.grid"] = True # display grid or not # plt.rcParams["axes.grid.axis"] = "y" # which axis the grid is applied to plt.rcParams["axes.labelcolor"] = white - plt.rcParams["axes.axisbelow"] = True # draw axis gridlines and ticks: + plt.rcParams["axes.axisbelow"] = True # draw axis gridlines and ticks: plt.rcParams["axes.spines.left"] = True # display axis spines plt.rcParams["axes.spines.bottom"] = True plt.rcParams["axes.spines.top"] = False plt.rcParams["axes.spines.right"] = False plt.rcParams["axes.prop_cycle"] = cycler( - 'color', [ - '#b4befe', - '#89b4fa', - '#74c7ec', - '#89dceb', - '#94e2d5', - '#a6e3a1', - '#f9e2af', - '#fab387', - '#eba0ac', - '#f38ba8', - '#cba6f7', - '#f5c2e7', - ]) + "color", + [ + "#b4befe", + "#89b4fa", + "#74c7ec", + "#89dceb", + "#94e2d5", + "#a6e3a1", + "#f9e2af", + "#fab387", + "#eba0ac", + "#f38ba8", + "#cba6f7", + "#f5c2e7", + ], + ) plt.rcParams["xtick.color"] = gray # color of the ticks plt.rcParams["ytick.color"] = gray # color of the ticks plt.rcParams["grid.color"] = dark_gray # grid color - plt.rcParams["figure.facecolor"] = black # figure face color - plt.rcParams["figure.edgecolor"] = "#555169" # figure edge color + plt.rcParams["figure.facecolor"] = black # figure face color + plt.rcParams["figure.edgecolor"] = black # figure edge color plt.rcParams["savefig.facecolor"] = black # figure face color when saving return style @@ -281,12 +298,11 @@ if __name__ == "__main__": s = PlotStyle() - import matplotlib.pyplot as plt + import matplotlib.cbook as cbook import matplotlib.cm as cm import matplotlib.pyplot as plt - import matplotlib.cbook as cbook - from matplotlib.path import Path from matplotlib.patches import PathPatch + from matplotlib.path import Path # Fixing random state for reproducibility np.random.seed(19680801) @@ -294,14 +310,20 @@ if __name__ == "__main__": delta = 0.025 x = y = np.arange(-3.0, 3.0, delta) X, Y = np.meshgrid(x, y) - Z1 = np.exp(-X**2 - Y**2) - Z2 = np.exp(-(X - 1)**2 - (Y - 1)**2) + Z1 = np.exp(-(X**2) - Y**2) + Z2 = np.exp(-((X - 1) ** 2) - (Y - 1) ** 2) Z = (Z1 - Z2) * 2 fig1, ax = plt.subplots() - im = ax.imshow(Z, interpolation='bilinear', cmap=cm.RdYlGn, - origin='lower', extent=[-3, 3, -3, 3], - vmax=abs(Z).max(), vmin=-abs(Z).max()) + im = ax.imshow( + Z, + interpolation="bilinear", + cmap=cm.RdYlGn, + origin="lower", + extent=[-3, 3, -3, 3], + vmax=abs(Z).max(), + vmin=-abs(Z).max(), + ) plt.show() @@ -314,22 +336,21 @@ if __name__ == "__main__": all_data = [np.random.normal(0, std, 100) for std in range(6, 10)] # plot violin plot - axs[0].violinplot(all_data, - showmeans=False, - showmedians=True) - axs[0].set_title('Violin plot') + axs[0].violinplot(all_data, showmeans=False, showmedians=True) + axs[0].set_title("Violin plot") # plot box plot axs[1].boxplot(all_data) - axs[1].set_title('Box plot') + axs[1].set_title("Box plot") # adding horizontal grid lines for ax in axs: ax.yaxis.grid(True) - ax.set_xticks([y + 1 for y in range(len(all_data))], - labels=['x1', 'x2', 'x3', 'x4']) - ax.set_xlabel('Four separate samples') - ax.set_ylabel('Observed values') + ax.set_xticks( + [y + 1 for y in range(len(all_data))], labels=["x1", "x2", "x3", "x4"] + ) + ax.set_xlabel("Four separate samples") + ax.set_ylabel("Observed values") plt.show() @@ -341,24 +362,42 @@ if __name__ == "__main__": theta = np.linspace(0.0, 2 * np.pi, N, endpoint=False) radii = 10 * np.random.rand(N) width = np.pi / 4 * np.random.rand(N) - colors = cmo.cm.haline(radii / 10.) + colors = cmo.cm.haline(radii / 10.0) - ax = plt.subplot(projection='polar') + ax = plt.subplot(projection="polar") ax.bar(theta, radii, width=width, bottom=0.0, color=colors, alpha=0.5) plt.show() - methods = [None, 'none', 'nearest', 'bilinear', 'bicubic', 'spline16', - 'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric', - 'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos'] + methods = [ + None, + "none", + "nearest", + "bilinear", + "bicubic", + "spline16", + "spline36", + "hanning", + "hamming", + "hermite", + "kaiser", + "quadric", + "catrom", + "gaussian", + "bessel", + "mitchell", + "sinc", + "lanczos", + ] # Fixing random state for reproducibility np.random.seed(19680801) grid = np.random.rand(4, 4) - fig, axs = plt.subplots(nrows=3, ncols=6, figsize=(9, 6), - subplot_kw={'xticks': [], 'yticks': []}) + fig, axs = plt.subplots( + nrows=3, ncols=6, figsize=(9, 6), subplot_kw={"xticks": [], "yticks": []} + ) for ax, interp_method in zip(axs.flat, methods): ax.imshow(grid, interpolation=interp_method) diff --git a/code/plot_chirp_bodylegth(old).py b/code/plot_chirp_bodylegth(old).py new file mode 100644 index 0000000..68d31bd --- /dev/null +++ b/code/plot_chirp_bodylegth(old).py @@ -0,0 +1,277 @@ +import numpy as np +from extract_chirps import get_valid_datasets + +import os + +import numpy as np +import matplotlib.pyplot as plt +from thunderfish.powerspectrum import decibel + +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.plotstyle import PlotStyle +from modules.behaviour_handling import Behavior, correct_chasing_events + +ps = PlotStyle() + +logger = makeLogger(__name__) + + +def get_chirp_winner_loser(folder_name, Behavior, order_meta_df): + + foldername = folder_name.split('/')[-2] + winner_row = order_meta_df[order_meta_df['recording'] == foldername] + winner = winner_row['winner'].values[0].astype(int) + winner_fish1 = winner_row['fish1'].values[0].astype(int) + winner_fish2 = winner_row['fish2'].values[0].astype(int) + + if winner > 0: + if winner == winner_fish1: + winner_fish_id = winner_row['rec_id1'].values[0] + loser_fish_id = winner_row['rec_id2'].values[0] + + elif winner == winner_fish2: + winner_fish_id = winner_row['rec_id2'].values[0] + loser_fish_id = winner_row['rec_id1'].values[0] + + chirp_winner = len( + Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) + chirp_loser = len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return chirp_winner, chirp_loser + else: + return np.nan, np.nan + + +def get_chirp_size(folder_name, Behavior, order_meta_df, id_meta_df): + + foldername = folder_name.split('/')[-2] + folder_row = order_meta_df[order_meta_df['recording'] == foldername] + fish1 = folder_row['fish1'].values[0].astype(int) + fish2 = folder_row['fish2'].values[0].astype(int) + + groub = folder_row['group'].values[0].astype(int) + size_fish1_row = id_meta_df[(id_meta_df['group'] == groub) & ( + id_meta_df['fish'] == fish1)] + size_fish2_row = id_meta_df[(id_meta_df['group'] == groub) & ( + id_meta_df['fish'] == fish2)] + + size_winners = [size_fish1_row[col].values[0] + for col in ['l1', 'l2', 'l3']] + mean_size_winner = np.nanmean(size_winners) + + size_losers = [size_fish2_row[col].values[0] for col in ['l1', 'l2', 'l3']] + mean_size_loser = np.nanmean(size_losers) + + if mean_size_winner > mean_size_loser: + size_diff = mean_size_winner - mean_size_loser + winner_fish_id = folder_row['rec_id1'].values[0] + loser_fish_id = folder_row['rec_id2'].values[0] + + elif mean_size_winner < mean_size_loser: + size_diff = mean_size_loser - mean_size_winner + winner_fish_id = folder_row['rec_id2'].values[0] + loser_fish_id = folder_row['rec_id1'].values[0] + + else: + size_diff = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + + chirp_diff = len(Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) - len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return size_diff, chirp_diff + + +def get_chirp_freq(folder_name, Behavior, order_meta_df): + + foldername = folder_name.split('/')[-2] + folder_row = order_meta_df[order_meta_df['recording'] == foldername] + fish1 = folder_row['rec_id1'].values[0].astype(int) + fish2 = folder_row['rec_id2'].values[0].astype(int) + chirp_freq_fish1 = np.nanmedian( + Behavior.freq[Behavior.ident == fish1]) + chirp_freq_fish2 = np.nanmedian( + Behavior.freq[Behavior.ident == fish2]) + + if chirp_freq_fish1 > chirp_freq_fish2: + freq_diff = chirp_freq_fish1 - chirp_freq_fish2 + winner_fish_id = folder_row['rec_id1'].values[0] + loser_fish_id = folder_row['rec_id2'].values[0] + + elif chirp_freq_fish1 < chirp_freq_fish2: + freq_diff = chirp_freq_fish2 - chirp_freq_fish1 + winner_fish_id = folder_row['rec_id2'].values[0] + loser_fish_id = folder_row['rec_id1'].values[0] + + chirp_diff = len(Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) - len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return freq_diff, chirp_diff + + +def main(datapath: str): + + foldernames = [ + datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath+x)] + + foldernames, _ = get_valid_datasets(datapath) + path_order_meta = ( + '/').join(foldernames[0].split('/')[:-2]) + '/order_meta.csv' + order_meta_df = read_csv(path_order_meta) + order_meta_df['recording'] = order_meta_df['recording'].str[1:-1] + path_id_meta = ( + '/').join(foldernames[0].split('/')[:-2]) + '/id_meta.csv' + id_meta_df = read_csv(path_id_meta) + + chirps_winner = [] + size_diffs = [] + size_chirps_diffs = [] + chirps_loser = [] + freq_diffs = [] + freq_chirps_diffs = [] + + for foldername in foldernames: + # behabvior is pandas dataframe with all the data + if foldername == '../data/mount_data/2020-05-12-10_00/': + continue + bh = Behavior(foldername) + # chirps are not sorted in time (presumably due to prior groupings) + # get and sort chirps and corresponding fish_ids of the chirps + category = bh.behavior + timestamps = bh.start_s + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + # winner_chirp, loser_chirp = get_chirp_winner_loser( + # foldername, bh, order_meta_df) + # chirps_winner.append(winner_chirp) + # chirps_loser.append(loser_chirp) + # size_diff, chirp_diff = get_chirp_size( + # foldername, bh, order_meta_df, id_meta_df) + # size_diffs.append(size_diff) + # size_chirps_diffs.append(chirp_diff) + + # freq_diff, freq_chirps_diff = get_chirp_freq( + # foldername, bh, order_meta_df) + # freq_diffs.append(freq_diff) + # freq_chirps_diffs.append(freq_chirps_diff) + + folder_name = foldername.split('/')[-2] + winner_row = order_meta_df[order_meta_df['recording'] == folder_name] + winner = winner_row['winner'].values[0].astype(int) + winner_fish1 = winner_row['fish1'].values[0].astype(int) + winner_fish2 = winner_row['fish2'].values[0].astype(int) + + groub = winner_row['group'].values[0].astype(int) + size_rows = id_meta_df[id_meta_df['group'] == groub] + + if winner == winner_fish1: + winner_fish_id = winner_row['rec_id1'].values[0] + loser_fish_id = winner_row['rec_id2'].values[0] + + size_winners = [] + for l in ['l1', 'l2', 'l3']: + size_winner = size_rows[size_rows['fish'] + == winner_fish1][l].values[0] + size_winners.append(size_winner) + mean_size_winner = np.nanmean(size_winners) + + size_losers = [] + for l in ['l1', 'l2', 'l3']: + size_loser = size_rows[size_rows['fish'] + == winner_fish2][l].values[0] + size_losers.append(size_loser) + mean_size_loser = np.nanmean(size_losers) + + size_diffs.append(mean_size_winner - mean_size_loser) + + elif winner == winner_fish2: + winner_fish_id = winner_row['rec_id2'].values[0] + loser_fish_id = winner_row['rec_id1'].values[0] + + size_winners = [] + for l in ['l1', 'l2', 'l3']: + size_winner = size_rows[size_rows['fish'] + == winner_fish2][l].values[0] + size_winners.append(size_winner) + mean_size_winner = np.nanmean(size_winners) + + size_losers = [] + for l in ['l1', 'l2', 'l3']: + size_loser = size_rows[size_rows['fish'] + == winner_fish1][l].values[0] + size_losers.append(size_loser) + mean_size_loser = np.nanmean(size_losers) + + size_diffs.append(mean_size_winner - mean_size_loser) + else: + continue + + print(foldername) + all_fish_ids = np.unique(bh.chirps_ids) + chirp_winner = len(bh.chirps[bh.chirps_ids == winner_fish_id]) + chirp_loser = len(bh.chirps[bh.chirps_ids == loser_fish_id]) + + freq_winner = np.nanmedian(bh.freq[bh.ident == winner_fish_id]) + freq_loser = np.nanmedian(bh.freq[bh.ident == loser_fish_id]) + + chirps_winner.append(chirp_winner) + chirps_loser.append(chirp_loser) + + size_chirps_diffs.append(chirp_winner - chirp_loser) + freq_diffs.append(freq_winner - freq_loser) + + fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=( + 22*ps.cm, 12*ps.cm), width_ratios=[1.5, 1, 1]) + plt.subplots_adjust(left=0.098, right=0.945, top=0.94, wspace=0.343) + scatterwinner = 1.15 + scatterloser = 1.85 + chirps_winner = np.asarray(chirps_winner)[~np.isnan(chirps_winner)] + chirps_loser = np.asarray(chirps_loser)[~np.isnan(chirps_loser)] + + bplot1 = ax1.boxplot(chirps_winner, positions=[ + 1], showfliers=False, patch_artist=True) + bplot2 = ax1.boxplot(chirps_loser, positions=[ + 2], showfliers=False, patch_artist=True) + ax1.scatter(np.ones(len(chirps_winner)) * + scatterwinner, chirps_winner, color='r') + ax1.scatter(np.ones(len(chirps_loser)) * + scatterloser, chirps_loser, color='r') + ax1.set_xticklabels(['winner', 'loser']) + ax1.text(0.1, 0.9, f'n = {len(chirps_winner)}', + transform=ax1.transAxes, color=ps.white) + + for w, l in zip(chirps_winner, chirps_loser): + ax1.plot([scatterwinner, scatterloser], [w, l], + color='r', alpha=0.5, linewidth=0.5) + ax1.set_ylabel('Chirps [n]', color=ps.white) + + colors1 = ps.red + ps.set_boxplot_color(bplot1, colors1) + colors1 = ps.orange + ps.set_boxplot_color(bplot2, colors1) + ax2.scatter(size_diffs, size_chirps_diffs, color='r') + ax2.set_xlabel('Size difference [mm]') + ax2.set_ylabel('Chirps [n]') + + ax3.scatter(freq_diffs, size_chirps_diffs, color='r') + # ax3.scatter(freq_diffs, freq_chirps_diffs, color='r') + ax3.set_xlabel('Frequency difference [Hz]') + ax3.set_yticklabels([]) + ax3.set + + plt.savefig('../poster/figs/chirps_winner_loser.pdf') + plt.show() + + +if __name__ == '__main__': + + # Path to the data + datapath = '../data/mount_data/' + + main(datapath) diff --git a/code/plot_chirp_size.py b/code/plot_chirp_size.py new file mode 100644 index 0000000..bcd36f4 --- /dev/null +++ b/code/plot_chirp_size.py @@ -0,0 +1,307 @@ +import numpy as np +from extract_chirps import get_valid_datasets + +import os + +import numpy as np +import matplotlib.pyplot as plt +from scipy.stats import pearsonr, spearmanr, wilcoxon +from thunderfish.powerspectrum import decibel + +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.plotstyle import PlotStyle +from modules.behaviour_handling import Behavior, correct_chasing_events + + +ps = PlotStyle() + +logger = makeLogger(__name__) + + +def get_chirp_winner_loser(folder_name, Behavior, order_meta_df): + + foldername = folder_name.split('/')[-2] + winner_row = order_meta_df[order_meta_df['recording'] == foldername] + winner = winner_row['winner'].values[0].astype(int) + winner_fish1 = winner_row['fish1'].values[0].astype(int) + winner_fish2 = winner_row['fish2'].values[0].astype(int) + + if winner > 0: + if winner == winner_fish1: + winner_fish_id = winner_row['rec_id1'].values[0] + loser_fish_id = winner_row['rec_id2'].values[0] + + elif winner == winner_fish2: + winner_fish_id = winner_row['rec_id2'].values[0] + loser_fish_id = winner_row['rec_id1'].values[0] + + chirp_winner = len( + Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) + chirp_loser = len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return chirp_winner, chirp_loser + else: + return np.nan, np.nan + + +def get_chirp_size(folder_name, Behavior, order_meta_df, id_meta_df): + + foldername = folder_name.split('/')[-2] + folder_row = order_meta_df[order_meta_df['recording'] == foldername] + fish1 = folder_row['fish1'].values[0].astype(int) + fish2 = folder_row['fish2'].values[0].astype(int) + winner = folder_row['winner'].values[0].astype(int) + + groub = folder_row['group'].values[0].astype(int) + size_fish1_row = id_meta_df[(id_meta_df['group'] == groub) & ( + id_meta_df['fish'] == fish1)] + size_fish2_row = id_meta_df[(id_meta_df['group'] == groub) & ( + id_meta_df['fish'] == fish2)] + + size_winners = [size_fish1_row[col].values[0] + for col in ['l1', 'l2', 'l3']] + size_fish1 = np.nanmean(size_winners) + + size_losers = [size_fish2_row[col].values[0] for col in ['l1', 'l2', 'l3']] + size_fish2 = np.nanmean(size_losers) + if winner == fish1: + if size_fish1 > size_fish2: + size_diff_bigger = size_fish1 - size_fish2 + size_diff_smaller = size_fish2 - size_fish1 + + elif size_fish1 < size_fish2: + size_diff_bigger = size_fish1 - size_fish2 + size_diff_smaller = size_fish2 - size_fish1 + else: + size_diff_bigger = np.nan + size_diff_smaller = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + return size_diff_bigger, size_diff_smaller, winner_fish_id, loser_fish_id + + winner_fish_id = folder_row['rec_id1'].values[0] + loser_fish_id = folder_row['rec_id2'].values[0] + + elif winner == fish2: + if size_fish2 > size_fish1: + size_diff_bigger = size_fish2 - size_fish1 + size_diff_smaller = size_fish1 - size_fish2 + + elif size_fish2 < size_fish1: + size_diff_bigger = size_fish2 - size_fish1 + size_diff_smaller = size_fish1 - size_fish2 + else: + size_diff_bigger = np.nan + size_diff_smaller = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + return size_diff_bigger, size_diff_smaller, winner_fish_id, loser_fish_id + + winner_fish_id = folder_row['rec_id2'].values[0] + loser_fish_id = folder_row['rec_id1'].values[0] + else: + size_diff_bigger = np.nan + size_diff_smaller = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + return size_diff_bigger, size_diff_smaller, winner_fish_id, loser_fish_id + + chirp_winner = len( + Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) + chirp_loser = len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return size_diff_bigger, chirp_winner, size_diff_smaller, chirp_loser + + +def get_chirp_freq(folder_name, Behavior, order_meta_df): + + foldername = folder_name.split('/')[-2] + folder_row = order_meta_df[order_meta_df['recording'] == foldername] + fish1 = folder_row['fish1'].values[0].astype(int) + fish2 = folder_row['fish2'].values[0].astype(int) + + fish1_freq = folder_row['rec_id1'].values[0].astype(int) + fish2_freq = folder_row['rec_id2'].values[0].astype(int) + winner = folder_row['winner'].values[0].astype(int) + chirp_freq_fish1 = np.nanmedian( + Behavior.freq[Behavior.ident == fish1_freq]) + chirp_freq_fish2 = np.nanmedian( + Behavior.freq[Behavior.ident == fish2_freq]) + + if winner == fish1: + if chirp_freq_fish1 > chirp_freq_fish2: + freq_diff_higher = chirp_freq_fish1 - chirp_freq_fish2 + freq_diff_lower = chirp_freq_fish2 - chirp_freq_fish1 + + elif chirp_freq_fish1 < chirp_freq_fish2: + freq_diff_higher = chirp_freq_fish1 - chirp_freq_fish2 + freq_diff_lower = chirp_freq_fish2 - chirp_freq_fish1 + else: + freq_diff_higher = np.nan + freq_diff_lower = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + + winner_fish_id = folder_row['rec_id1'].values[0] + loser_fish_id = folder_row['rec_id2'].values[0] + + elif winner == fish2: + if chirp_freq_fish2 > chirp_freq_fish1: + freq_diff_higher = chirp_freq_fish2 - chirp_freq_fish1 + freq_diff_lower = chirp_freq_fish1 - chirp_freq_fish2 + + elif chirp_freq_fish2 < chirp_freq_fish1: + freq_diff_higher = chirp_freq_fish2 - chirp_freq_fish1 + freq_diff_lower = chirp_freq_fish1 - chirp_freq_fish2 + else: + freq_diff_higher = np.nan + freq_diff_lower = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + + winner_fish_id = folder_row['rec_id2'].values[0] + loser_fish_id = folder_row['rec_id1'].values[0] + else: + freq_diff_higher = np.nan + freq_diff_lower = np.nan + winner_fish_id = np.nan + loser_fish_id = np.nan + + chirp_winner = len( + Behavior.chirps[Behavior.chirps_ids == winner_fish_id]) + chirp_loser = len( + Behavior.chirps[Behavior.chirps_ids == loser_fish_id]) + + return freq_diff_higher, chirp_winner, freq_diff_lower, chirp_loser + + +def main(datapath: str): + + foldernames = [ + datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath+x)] + foldernames, _ = get_valid_datasets(datapath) + path_order_meta = ( + '/').join(foldernames[0].split('/')[:-2]) + '/order_meta.csv' + order_meta_df = read_csv(path_order_meta) + order_meta_df['recording'] = order_meta_df['recording'].str[1:-1] + path_id_meta = ( + '/').join(foldernames[0].split('/')[:-2]) + '/id_meta.csv' + id_meta_df = read_csv(path_id_meta) + + chirps_winner = [] + + size_diffs_winner = [] + size_diffs_loser = [] + size_chirps_winner = [] + size_chirps_loser = [] + + freq_diffs_higher = [] + freq_diffs_lower = [] + freq_chirps_winner = [] + freq_chirps_loser = [] + + chirps_loser = [] + freq_diffs = [] + freq_chirps_diffs = [] + + for foldername in foldernames: + # behabvior is pandas dataframe with all the data + if foldername == '../data/mount_data/2020-05-12-10_00/': + continue + bh = Behavior(foldername) + # chirps are not sorted in time (presumably due to prior groupings) + # get and sort chirps and corresponding fish_ids of the chirps + category = bh.behavior + timestamps = bh.start_s + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + winner_chirp, loser_chirp = get_chirp_winner_loser( + foldername, bh, order_meta_df) + chirps_winner.append(winner_chirp) + chirps_loser.append(loser_chirp) + + size_diff_bigger, chirp_winner, size_diff_smaller, chirp_loser = get_chirp_size( + foldername, bh, order_meta_df, id_meta_df) + + freq_diff_higher, chirp_freq_winner, freq_diff_lower, chirp_freq_loser = get_chirp_freq( + foldername, bh, order_meta_df) + + freq_diffs_higher.append(freq_diff_higher) + freq_diffs_lower.append(freq_diff_lower) + freq_chirps_winner.append(chirp_freq_winner) + freq_chirps_loser.append(chirp_freq_loser) + + if np.isnan(size_diff_bigger): + continue + size_diffs_winner.append(size_diff_bigger) + size_diffs_loser.append(size_diff_smaller) + size_chirps_winner.append(chirp_winner) + size_chirps_loser.append(chirp_loser) + + size_winner_pearsonr = pearsonr(size_diffs_winner, size_chirps_winner) + size_loser_pearsonr = pearsonr(size_diffs_loser, size_chirps_loser) + + fig, (ax1, ax2) = plt.subplots(1, 2, figsize=( + 13*ps.cm, 10*ps.cm), sharey=True) + plt.subplots_adjust(left=0.098, right=0.945, top=0.94, wspace=0.343) + scatterwinner = 1.15 + scatterloser = 1.85 + chirps_winner = np.asarray(chirps_winner)[~np.isnan(chirps_winner)] + chirps_loser = np.asarray(chirps_loser)[~np.isnan(chirps_loser)] + + stat = wilcoxon(chirps_winner, chirps_loser) + print(stat) + + bplot1 = ax1.boxplot(chirps_winner, positions=[ + 0.9], showfliers=False, patch_artist=True) + + bplot2 = ax1.boxplot(chirps_loser, positions=[ + 2.1], showfliers=False, patch_artist=True) + ax1.scatter(np.ones(len(chirps_winner)) * + scatterwinner, chirps_winner, color=ps.red) + ax1.scatter(np.ones(len(chirps_loser)) * + scatterloser, chirps_loser, color=ps.orange) + ax1.set_xticklabels(['winner', 'loser']) + ax1.text(0.1, 0.9, f'n = {len(chirps_winner)}', + transform=ax1.transAxes, color=ps.white) + + for w, l in zip(chirps_winner, chirps_loser): + ax1.plot([scatterwinner, scatterloser], [w, l], + color=ps.white, alpha=1, linewidth=0.5) + ax1.set_ylabel('chirps [n]', color=ps.white) + ax1.set_xlabel('outcome', color=ps.white) + + colors1 = ps.red + ps.set_boxplot_color(bplot1, colors1) + colors1 = ps.orange + ps.set_boxplot_color(bplot2, colors1) + + ax2.scatter(size_diffs_winner, size_chirps_winner, + color=ps.red, label='winner') + ax2.scatter(size_diffs_loser, size_chirps_loser, + color=ps.orange, label='loser') + + ax2.set_xlabel('size difference [cm]') + # ax2.set_xticks(np.arange(-10, 10.1, 2)) + + handles, labels = ax2.get_legend_handles_labels() + fig.legend(handles, labels, loc='upper center', ncol=2) + plt.subplots_adjust(left=0.162, right=0.97, top=0.85, bottom=0.176) + + # pearson r + plt.savefig('../poster/figs/chirps_winner_loser.pdf') + plt.show() + + +if __name__ == '__main__': + + # Path to the data + datapath = '../data/mount_data/' + + main(datapath) diff --git a/code/plot_chirps_in_chasing.py b/code/plot_chirps_in_chasing.py new file mode 100644 index 0000000..98894d8 --- /dev/null +++ b/code/plot_chirps_in_chasing.py @@ -0,0 +1,81 @@ +import numpy as np + +import os + +import numpy as np +import matplotlib.pyplot as plt +from scipy.stats import pearsonr, spearmanr +from thunderfish.powerspectrum import decibel + +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.plotstyle import PlotStyle +from modules.behaviour_handling import Behavior, correct_chasing_events +from modules.datahandling import flatten + + +ps = PlotStyle() + +logger = makeLogger(__name__) + + +def main(datapath: str): + + foldernames = [ + datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath+x)] + time_precents = [] + chirps_percents = [] + for foldername in foldernames: + # behabvior is pandas dataframe with all the data + if foldername == '../data/mount_data/2020-05-12-10_00/': + continue + bh = Behavior(foldername) + + category = bh.behavior + timestamps = bh.start_s + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + chasing_onset = timestamps[category == 0] + chasing_offset = timestamps[category == 1] + if len(chasing_onset) != len(chasing_offset): + embed() + + chirps_in_chasings = [] + for onset, offset in zip(chasing_onset, chasing_offset): + chirps_in_chasing = [c for c in bh.chirps if (c > onset) & (c < offset)] + chirps_in_chasings.append(chirps_in_chasing) + + try: + time_chasing = np.sum(chasing_offset[chasing_offset<3*60*60] - chasing_onset[chasing_onset<3*60*60]) + except: + time_chasing = np.sum(chasing_offset[chasing_offset<3*60*60] - chasing_onset[chasing_onset<3*60*60][:-1]) + + + time_chasing_percent = (time_chasing/(3*60*60))*100 + chirps_chasing = np.asarray(flatten(chirps_in_chasings)) + chirps_chasing_new = chirps_chasing[chirps_chasing<3*60*60] + chirps_percent = (len(chirps_chasing_new)/len(bh.chirps))*100 + + time_precents.append(time_chasing_percent) + chirps_percents.append(chirps_percent) + + fig, ax = plt.subplots(1, 1, figsize=(14*ps.cm, 10*ps.cm)) + + ax.boxplot([time_precents, chirps_percents]) + ax.set_xticklabels(['Time Chasing', 'Chirps in Chasing']) + ax.set_ylabel('Percent') + ax.scatter(np.ones(len(time_precents))*1.25, time_precents, color=ps.white) + ax.scatter(np.ones(len(chirps_percents))*1.75, chirps_percents, color=ps.white) + plt.savefig('../poster/figs/chirps_in_chasing.pdf') + plt.show() + + +if __name__ == '__main__': + # Path to the data + datapath = '../data/mount_data/' + main(datapath) + + diff --git a/code/plot_event_timeline.py b/code/plot_event_timeline.py new file mode 100644 index 0000000..0b35484 --- /dev/null +++ b/code/plot_event_timeline.py @@ -0,0 +1,123 @@ +import numpy as np + +import os + +import numpy as np +import matplotlib.pyplot as plt +from thunderfish.powerspectrum import decibel + +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.plotstyle import PlotStyle +from modules.behaviour_handling import Behavior, correct_chasing_events + +from extract_chirps import get_valid_datasets +ps = PlotStyle() + +logger = makeLogger(__name__) + + +def main(datapath: str): + + foldernames = [ + datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath+x)] + foldernames, _ = get_valid_datasets(datapath) + for foldername in foldernames[1:2]: + # foldername = foldernames[0] + if foldername == '../data/mount_data/2020-05-12-10_00/': + continue + # behabvior is pandas dataframe with all the data + bh = Behavior(foldername) + # 2020-06-11-10 + category = bh.behavior + timestamps = bh.start_s + # Correct for doubles in chasing on- and offsets to get the right on-/offset pairs + # Get rid of tracking faults (two onsets or two offsets after another) + category, timestamps = correct_chasing_events(category, timestamps) + + # split categories + chasing_onset = (timestamps[category == 0] / 60) / 60 + chasing_offset = (timestamps[category == 1] / 60) / 60 + physical_contact = (timestamps[category == 2] / 60) / 60 + + all_fish_ids = np.unique(bh.chirps_ids) + fish1_id = all_fish_ids[0] + fish2_id = all_fish_ids[1] + # Associate chirps to inidividual fish + fish1 = (bh.chirps[bh.chirps_ids == fish1_id] / 60) / 60 + fish2 = (bh.chirps[bh.chirps_ids == fish2_id] / 60) / 60 + fish1_color = ps.purple + fish2_color = ps.lavender + + fig, ax = plt.subplots(5, 1, figsize=( + 21*ps.cm, 10*ps.cm), height_ratios=[0.5, 0.5, 0.5, 0.2, 6], sharex=True) + # marker size + s = 80 + ax[0].scatter(physical_contact, np.ones( + len(physical_contact)), color=ps.maroon, marker='|', s=s) + ax[1].scatter(chasing_onset, np.ones(len(chasing_onset)), + color=ps.orange, marker='|', s=s) + ax[2].scatter(fish1, np.ones(len(fish1))-0.25, + color=fish1_color, marker='|', s=s) + ax[2].scatter(fish2, np.zeros(len(fish2))+0.25, + color=fish2_color, marker='|', s=s) + + freq_temp = bh.freq[bh.ident == fish1_id] + time_temp = bh.time[bh.idx[bh.ident == fish1_id]] + ax[4].plot((time_temp / 60) / 60, freq_temp, color=fish1_color) + + freq_temp = bh.freq[bh.ident == fish2_id] + time_temp = bh.time[bh.idx[bh.ident == fish2_id]] + ax[4].plot((time_temp / 60) / 60, freq_temp, color=fish2_color) + + # ax[3].imshow(decibel(bh.spec), extent=[bh.time[0]/60/60, bh.time[-1]/60/60, 0, 2000], aspect='auto', origin='lower') + + # Hide grid lines + ax[0].grid(False) + ax[0].set_frame_on(False) + ax[0].set_xticks([]) + ax[0].set_yticks([]) + ps.hide_ax(ax[0]) + ax[0].yaxis.set_label_coords(-0.1, 0.5) + + ax[1].grid(False) + ax[1].set_frame_on(False) + ax[1].set_xticks([]) + ax[1].set_yticks([]) + ps.hide_ax(ax[1]) + + ax[2].grid(False) + ax[2].set_frame_on(False) + ax[2].set_yticks([]) + ax[2].set_xticks([]) + ps.hide_ax(ax[2]) + + ax[4].axvspan(3, 6, 0, 5, facecolor='grey', alpha=0.5) + ax[4].set_xticks(np.arange(0, 6.1, 0.5)) + ps.hide_ax(ax[3]) + + labelpad = 30 + fsize = 12 + ax[0].set_ylabel('contact', rotation=0, + labelpad=labelpad, fontsize=fsize) + ax[1].set_ylabel('chasing', rotation=0, + labelpad=labelpad, fontsize=fsize) + ax[2].set_ylabel('chirps', rotation=0, + labelpad=labelpad, fontsize=fsize) + ax[4].set_ylabel('EODf') + + ax[4].set_xlabel('time [h]') + # ax[0].set_title(foldername.split('/')[-2]) + # 2020-03-31-9_59 + plt.subplots_adjust(left=0.158, right=0.987, top=0.918) + # plt.savefig('../poster/figs/timeline.pdf') + plt.show() + + # plot chirps + + +if __name__ == '__main__': + # Path to the data + datapath = '../data/mount_data/' + main(datapath) diff --git a/code/plot_introduction_specs.py b/code/plot_introduction_specs.py new file mode 100644 index 0000000..20fb562 --- /dev/null +++ b/code/plot_introduction_specs.py @@ -0,0 +1,121 @@ +import numpy as np +import matplotlib.pyplot as plt +from thunderfish.powerspectrum import spectrogram, decibel + +from modules.filehandling import LoadData +from modules.datahandling import instantaneous_frequency +from modules.filters import bandpass_filter +from modules.plotstyle import PlotStyle + +ps = PlotStyle() + + +def main(): + + # Load data + datapath = "../data/2022-06-02-10_00/" + data = LoadData(datapath) + + # good chirp times for data: 2022-06-02-10_00 + window_start_seconds = 3 * 60 * 60 + 6 * 60 + 43.5 + 9 + 6.25 + window_start_index = window_start_seconds * data.raw_rate + window_duration_seconds = 0.2 + window_duration_index = window_duration_seconds * data.raw_rate + + timescaler = 1000 + + raw = data.raw[window_start_index:window_start_index + + window_duration_index, 10] + + fig, (ax1, ax2, ax3) = plt.subplots( + 3, 1, figsize=(12 * ps.cm, 10*ps.cm), sharex=True, sharey=True) + + # plot instantaneous frequency + filtered1 = bandpass_filter( + signal=raw, lowf=750, highf=1200, samplerate=data.raw_rate) + filtered2 = bandpass_filter( + signal=raw, lowf=550, highf=700, samplerate=data.raw_rate) + + freqtime1, freq1 = instantaneous_frequency( + filtered1, data.raw_rate, smoothing_window=3) + freqtime2, freq2 = instantaneous_frequency( + filtered2, data.raw_rate, smoothing_window=3) + + ax1.plot(freqtime1*timescaler, freq1, color=ps.red, + lw=2, label=f"fish 1, {np.median(freq1):.0f} Hz") + ax1.plot(freqtime2*timescaler, freq2, color=ps.orange, + lw=2, label=f"fish 2, {np.median(freq2):.0f} Hz") + ax1.legend(bbox_to_anchor=(0, 1.02, 1, 0.2), loc="lower center", + mode="normal", borderaxespad=0, ncol=2) + ps.hide_xax(ax1) + + # plot fine spectrogram + spec_power, spec_freqs, spec_times = spectrogram( + raw, + ratetime=data.raw_rate, + freq_resolution=150, + overlap_frac=0.2, + ) + + ylims = [300, 1200] + fmask = np.zeros(spec_freqs.shape, dtype=bool) + fmask[(spec_freqs > ylims[0]) & (spec_freqs < ylims[1])] = True + + ax2.imshow( + decibel(spec_power[fmask, :]), + extent=[ + spec_times[0]*timescaler, + spec_times[-1]*timescaler, + spec_freqs[fmask][0], + spec_freqs[fmask][-1], + ], + aspect="auto", + origin="lower", + interpolation="gaussian", + alpha=1, + ) + ps.hide_xax(ax2) + + # plot coarse spectrogram + spec_power, spec_freqs, spec_times = spectrogram( + raw, + ratetime=data.raw_rate, + freq_resolution=10, + overlap_frac=0.3, + ) + fmask = np.zeros(spec_freqs.shape, dtype=bool) + fmask[(spec_freqs > ylims[0]) & (spec_freqs < ylims[1])] = True + ax3.imshow( + decibel(spec_power[fmask, :]), + extent=[ + spec_times[0]*timescaler, + spec_times[-1]*timescaler, + spec_freqs[fmask][0], + spec_freqs[fmask][-1], + ], + aspect="auto", + origin="lower", + interpolation="gaussian", + alpha=1, + ) + # ps.hide_xax(ax3) + + ax3.set_xlabel("time [ms]") + ax2.set_ylabel("frequency [Hz]") + + ax1.set_yticks(np.arange(400, 1201, 400)) + ax1.spines.left.set_bounds((400, 1200)) + ax2.set_yticks(np.arange(400, 1201, 400)) + ax2.spines.left.set_bounds((400, 1200)) + ax3.set_yticks(np.arange(400, 1201, 400)) + ax3.spines.left.set_bounds((400, 1200)) + + plt.subplots_adjust(left=0.17, right=0.98, top=0.9, + bottom=0.14, hspace=0.35) + + plt.savefig('../poster/figs/introplot.pdf') + plt.show() + + +if __name__ == '__main__': + main() diff --git a/code/plot_kdes.py b/code/plot_kdes.py new file mode 100644 index 0000000..5fd9cad --- /dev/null +++ b/code/plot_kdes.py @@ -0,0 +1,471 @@ +from extract_chirps import get_valid_datasets +import os + +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt + +from tqdm import tqdm +from IPython import embed +from pandas import read_csv +from modules.logger import makeLogger +from modules.datahandling import flatten, causal_kde1d, acausal_kde1d +from modules.behaviour_handling import ( + Behavior, correct_chasing_events, center_chirps) +from modules.plotstyle import PlotStyle + +logger = makeLogger(__name__) +ps = PlotStyle() + + +def bootstrap(data, nresamples, kde_time, kernel_width, event_times, time_before, time_after): + + bootstrapped_kdes = [] + data = data[data <= 3*60*60] # only night time + + # diff_data = np.diff(np.sort(data), prepend=0) + # if len(data) != 0: + # mean_chirprate = (len(data) - 1) / (data[-1] - data[0]) + + for i in tqdm(range(nresamples)): + + # np.random.shuffle(diff_data) + + # bootstrapped_data = np.cumsum(diff_data) + bootstrapped_data = data + np.random.randn(len(data)) * 10 + + bootstrap_data_centered = center_chirps( + bootstrapped_data, event_times, time_before, time_after) + + bootstrapped_kde = acausal_kde1d( + bootstrap_data_centered, time=kde_time, width=kernel_width) + + # bootstrapped_kdes = list(np.asarray( + # bootstrapped_kdes) / len(event_times)) + + bootstrapped_kdes.append(bootstrapped_kde) + + return bootstrapped_kdes + + +def jackknife(data, nresamples, subsetsize, kde_time, kernel_width, event_times, time_before, time_after): + + jackknife_kdes = [] + data = data[data <= 3*60*60] # only night time + subsetsize = int(len(data) * subsetsize) + + diff_data = np.diff(np.sort(data), prepend=0) + + for i in tqdm(range(nresamples)): + + bootstrapped_data = np.random.sample(data, subsetsize, replace=False) + + bootstrapped_data = np.cumsum(diff_data) + + bootstrap_data_centered = center_chirps( + bootstrapped_data, event_times, time_before, time_after) + + bootstrapped_kde = acausal_kde1d( + bootstrap_data_centered, time=kde_time, width=kernel_width) + + # bootstrapped_kdes = list(np.asarray( + # bootstrapped_kdes) / len(event_times)) + + jackknife_kdes.append(bootstrapped_kde) + + return jackknife_kdes + + +def get_chirp_winner_loser(folder_name, Behavior, order_meta_df): + + foldername = folder_name.split('/')[-2] + winner_row = order_meta_df[order_meta_df['recording'] == foldername] + winner = winner_row['winner'].values[0].astype(int) + winner_fish1 = winner_row['fish1'].values[0].astype(int) + winner_fish2 = winner_row['fish2'].values[0].astype(int) + + if winner > 0: + if winner == winner_fish1: + winner_fish_id = winner_row['rec_id1'].values[0] + loser_fish_id = winner_row['rec_id2'].values[0] + + elif winner == winner_fish2: + winner_fish_id = winner_row['rec_id2'].values[0] + loser_fish_id = winner_row['rec_id1'].values[0] + + chirp_winner = Behavior.chirps[Behavior.chirps_ids == winner_fish_id] + chirp_loser = Behavior.chirps[Behavior.chirps_ids == loser_fish_id] + + return chirp_winner, chirp_loser + return None, None + + +def main(dataroot): + + foldernames, _ = get_valid_datasets(dataroot) + plot_all = True + time_before = 60 + time_after = 60 + dt = 0.001 + kernel_width = 1 + kde_time = np.arange(-time_before, time_after, dt) + nbootstraps = 2 + + meta_path = ( + '/').join(foldernames[0].split('/')[:-2]) + '/order_meta.csv' + meta = pd.read_csv(meta_path) + meta['recording'] = meta['recording'].str[1:-1] + + winner_onsets = [] + winner_offsets = [] + winner_physicals = [] + + loser_onsets = [] + loser_offsets = [] + loser_physicals = [] + + winner_onsets_boot = [] + winner_offsets_boot = [] + winner_physicals_boot = [] + + loser_onsets_boot = [] + loser_offsets_boot = [] + loser_physicals_boot = [] + + onset_count = 0 + offset_count = 0 + physical_count = 0 + + # Iterate over all recordings and save chirp- and event-timestamps + for folder in tqdm(foldernames): + + foldername = folder.split('/')[-2] + # logger.info('Loading data from folder: {}'.format(foldername)) + + broken_folders = ['../data/mount_data/2020-05-12-10_00/'] + if folder in broken_folders: + continue + + bh = Behavior(folder) + category, timestamps = correct_chasing_events(bh.behavior, bh.start_s) + + category = category[timestamps < 3*60*60] # only night time + timestamps = timestamps[timestamps < 3*60*60] # only night time + + winner, loser = get_chirp_winner_loser(folder, bh, meta) + + if winner is None: + continue + + onsets = (timestamps[category == 0]) + offsets = (timestamps[category == 1]) + physicals = (timestamps[category == 2]) + + onset_count += len(onsets) + offset_count += len(offsets) + physical_count += len(physicals) + + winner_onsets.append(center_chirps( + winner, onsets, time_before, time_after)) + winner_offsets.append(center_chirps( + winner, offsets, time_before, time_after)) + winner_physicals.append(center_chirps( + winner, physicals, time_before, time_after)) + + loser_onsets.append(center_chirps( + loser, onsets, time_before, time_after)) + loser_offsets.append(center_chirps( + loser, offsets, time_before, time_after)) + loser_physicals.append(center_chirps( + loser, physicals, time_before, time_after)) + + # bootstrap + # chirps = [winner, winner, winner, loser, loser, loser] + + winner_onsets_boot.append(bootstrap( + winner, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=onsets, + time_before=time_before, + time_after=time_after)) + winner_offsets_boot.append(bootstrap( + winner, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=offsets, + time_before=time_before, + time_after=time_after)) + winner_physicals_boot.append(bootstrap( + winner, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=physicals, + time_before=time_before, + time_after=time_after)) + + loser_onsets_boot.append(bootstrap( + loser, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=onsets, + time_before=time_before, + time_after=time_after)) + loser_offsets_boot.append(bootstrap( + loser, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=offsets, + time_before=time_before, + time_after=time_after)) + loser_physicals_boot.append(bootstrap( + loser, + nresamples=nbootstraps, + kde_time=kde_time, + kernel_width=kernel_width, + event_times=physicals, + time_before=time_before, + time_after=time_after)) + + if plot_all: + + winner_onsets_conv = acausal_kde1d( + winner_onsets[-1], kde_time, kernel_width) + winner_offsets_conv = acausal_kde1d( + winner_offsets[-1], kde_time, kernel_width) + winner_physicals_conv = acausal_kde1d( + winner_physicals[-1], kde_time, kernel_width) + + loser_onsets_conv = acausal_kde1d( + loser_onsets[-1], kde_time, kernel_width) + loser_offsets_conv = acausal_kde1d( + loser_offsets[-1], kde_time, kernel_width) + loser_physicals_conv = acausal_kde1d( + loser_physicals[-1], kde_time, kernel_width) + + fig, ax = plt.subplots(2, 3, figsize=( + 21*ps.cm, 10*ps.cm), sharey=True, sharex=True) + ax[0, 0].set_title( + f"{foldername}, onsets {len(onsets)}, offsets {len(offsets)}, physicals {len(physicals)},winner {len(winner)}, looser {len(loser)} , onsets") + ax[0, 0].plot(kde_time, winner_onsets_conv/len(onsets)) + ax[0, 1].plot(kde_time, winner_offsets_conv/len(offsets)) + ax[0, 2].plot(kde_time, winner_physicals_conv/len(physicals)) + ax[1, 0].plot(kde_time, loser_onsets_conv/len(onsets)) + ax[1, 1].plot(kde_time, loser_offsets_conv/len(offsets)) + ax[1, 2].plot(kde_time, loser_physicals_conv/len(physicals)) + + # # plot bootstrap lines + for kde in winner_onsets_boot[-1]: + ax[0, 0].plot(kde_time, kde/len(onsets), + color='gray') + for kde in winner_offsets_boot[-1]: + ax[0, 1].plot(kde_time, kde/len(offsets), + color='gray') + for kde in winner_physicals_boot[-1]: + ax[0, 2].plot(kde_time, kde/len(physicals), + color='gray') + for kde in loser_onsets_boot[-1]: + ax[1, 0].plot(kde_time, kde/len(onsets), + color='gray') + for kde in loser_offsets_boot[-1]: + ax[1, 1].plot(kde_time, kde/len(offsets), + color='gray') + for kde in loser_physicals_boot[-1]: + ax[1, 2].plot(kde_time, kde/len(physicals), + color='gray') + + # plot bootstrap percentiles + # ax[0, 0].fill_between( + # kde_time, + # np.percentile(winner_onsets_boot[-1], 5, axis=0), + # np.percentile(winner_onsets_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + # ax[0, 1].fill_between( + # kde_time, + # np.percentile(winner_offsets_boot[-1], 5, axis=0), + # np.percentile( + # winner_offsets_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + # ax[0, 2].fill_between( + # kde_time, + # np.percentile( + # winner_physicals_boot[-1], 5, axis=0), + # np.percentile( + # winner_physicals_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + # ax[1, 0].fill_between( + # kde_time, + # np.percentile(loser_onsets_boot[-1], 5, axis=0), + # np.percentile(loser_onsets_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + # ax[1, 1].fill_between( + # kde_time, + # np.percentile(loser_offsets_boot[-1], 5, axis=0), + # np.percentile(loser_offsets_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + # ax[1, 2].fill_between( + # kde_time, + # np.percentile( + # loser_physicals_boot[-1], 5, axis=0), + # np.percentile( + # loser_physicals_boot[-1], 95, axis=0), + # color='gray', + # alpha=0.5) + + # ax[0, 0].plot(kde_time, np.median(winner_onsets_boot[-1], axis=0), + # color='black', linewidth=2) + # ax[0, 1].plot(kde_time, np.median(winner_offsets_boot[-1], axis=0), + # color='black', linewidth=2) + # ax[0, 2].plot(kde_time, np.median(winner_physicals_boot[-1], axis=0), + # color='black', linewidth=2) + # ax[1, 0].plot(kde_time, np.median(loser_onsets_boot[-1], axis=0), + # color='black', linewidth=2) + # ax[1, 1].plot(kde_time, np.median(loser_offsets_boot[-1], axis=0), + # color='black', linewidth=2) + # ax[1, 2].plot(kde_time, np.median(loser_physicals_boot[-1], axis=0), + # color='black', linewidth=2) + + ax[0, 0].set_xlim(-30, 30) + plt.show() + + winner_onsets = np.sort(flatten(winner_onsets)) + winner_offsets = np.sort(flatten(winner_offsets)) + winner_physicals = np.sort(flatten(winner_physicals)) + loser_onsets = np.sort(flatten(loser_onsets)) + loser_offsets = np.sort(flatten(loser_offsets)) + loser_physicals = np.sort(flatten(loser_physicals)) + + winner_onsets_conv = acausal_kde1d( + winner_onsets, kde_time, kernel_width) + winner_offsets_conv = acausal_kde1d( + winner_offsets, kde_time, kernel_width) + winner_physicals_conv = acausal_kde1d( + winner_physicals, kde_time, kernel_width) + loser_onsets_conv = acausal_kde1d( + loser_onsets, kde_time, kernel_width) + loser_offsets_conv = acausal_kde1d( + loser_offsets, kde_time, kernel_width) + loser_physicals_conv = acausal_kde1d( + loser_physicals, kde_time, kernel_width) + + winner_onsets_conv = winner_onsets_conv / onset_count + winner_offsets_conv = winner_offsets_conv / offset_count + winner_physicals_conv = winner_physicals_conv / physical_count + loser_onsets_conv = loser_onsets_conv / onset_count + loser_offsets_conv = loser_offsets_conv / offset_count + loser_physicals_conv = loser_physicals_conv / physical_count + + winner_onsets_boot = np.concatenate( + winner_onsets_boot) + winner_offsets_boot = np.concatenate( + winner_offsets_boot) + winner_physicals_boot = np.concatenate( + winner_physicals_boot) + loser_onsets_boot = np.concatenate( + loser_onsets_boot) + loser_offsets_boot = np.concatenate( + loser_offsets_boot) + loser_physicals_boot = np.concatenate( + loser_physicals_boot) + + percs = [5, 50, 95] + winner_onsets_boot_quarts = np.percentile( + winner_onsets_boot, percs, axis=0) + winner_offsets_boot_quarts = np.percentile( + winner_offsets_boot, percs, axis=0) + winner_physicals_boot_quarts = np.percentile( + winner_physicals_boot, percs, axis=0) + loser_onsets_boot_quarts = np.percentile( + loser_onsets_boot, percs, axis=0) + loser_offsets_boot_quarts = np.percentile( + loser_offsets_boot, percs, axis=0) + loser_physicals_boot_quarts = np.percentile( + loser_physicals_boot, percs, axis=0) + + fig, ax = plt.subplots(2, 3, figsize=( + 21*ps.cm, 10*ps.cm), sharey=True, sharex=True) + + ax[0, 0].plot(kde_time, winner_onsets_conv) + ax[0, 1].plot(kde_time, winner_offsets_conv) + ax[0, 2].plot(kde_time, winner_physicals_conv) + ax[1, 0].plot(kde_time, loser_onsets_conv) + ax[1, 1].plot(kde_time, loser_offsets_conv) + ax[1, 2].plot(kde_time, loser_physicals_conv) + + ax[0, 0].plot(kde_time, winner_onsets_boot_quarts[1], c=ps.black) + ax[0, 1].plot(kde_time, winner_offsets_boot_quarts[1], c=ps.black) + ax[0, 2].plot(kde_time, winner_physicals_boot_quarts[1], c=ps.black) + ax[1, 0].plot(kde_time, loser_onsets_boot_quarts[1], c=ps.black) + ax[1, 1].plot(kde_time, loser_offsets_boot_quarts[1], c=ps.black) + ax[1, 2].plot(kde_time, loser_physicals_boot_quarts[1], c=ps.black) + + # for kde in winner_onsets_boot: + # ax[0, 0].plot(kde_time, kde, + # color='gray') + # for kde in winner_offsets_boot: + # ax[0, 1].plot(kde_time, kde, + # color='gray') + # for kde in winner_physicals_boot: + # ax[0, 2].plot(kde_time, kde, + # color='gray') + # for kde in loser_onsets_boot: + # ax[1, 0].plot(kde_time, kde, + # color='gray') + # for kde in loser_offsets_boot: + # ax[1, 1].plot(kde_time, kde, + # color='gray') + # for kde in loser_physicals_boot: + # ax[1, 2].plot(kde_time, kde, + # color='gray') + + ax[0, 0].fill_between(kde_time, + winner_onsets_boot_quarts[0], + winner_onsets_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + ax[0, 1].fill_between(kde_time, + winner_offsets_boot_quarts[0], + winner_offsets_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + ax[0, 2].fill_between(kde_time, + loser_physicals_boot_quarts[0], + loser_physicals_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + ax[1, 0].fill_between(kde_time, + loser_onsets_boot_quarts[0], + loser_onsets_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + ax[1, 1].fill_between(kde_time, + loser_offsets_boot_quarts[0], + loser_offsets_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + ax[1, 2].fill_between(kde_time, + loser_physicals_boot_quarts[0], + loser_physicals_boot_quarts[2], + color=ps.gray, + alpha=0.5) + + plt.show() + + +if __name__ == '__main__': + main('../data/mount_data/') diff --git a/poster/figs/algorithm.pdf b/poster/figs/algorithm.pdf new file mode 100644 index 0000000..359d7e6 Binary files /dev/null and b/poster/figs/algorithm.pdf differ diff --git a/poster/figs/algorithm1.pdf b/poster/figs/algorithm1.pdf new file mode 100644 index 0000000..3ac4dfb Binary files /dev/null and b/poster/figs/algorithm1.pdf differ diff --git a/poster/figs/chirps_in_chasing.pdf b/poster/figs/chirps_in_chasing.pdf new file mode 100644 index 0000000..836ca8e Binary files /dev/null and b/poster/figs/chirps_in_chasing.pdf differ diff --git a/poster/figs/chirps_winner_loser.pdf b/poster/figs/chirps_winner_loser.pdf new file mode 100644 index 0000000..5650732 Binary files /dev/null and b/poster/figs/chirps_winner_loser.pdf differ diff --git a/poster/figs/efishlogo.pdf b/poster/figs/efishlogo.pdf new file mode 100644 index 0000000..95372e3 Binary files /dev/null and b/poster/figs/efishlogo.pdf differ diff --git a/poster/figs/introplot.pdf b/poster/figs/introplot.pdf new file mode 100644 index 0000000..7fef6fa Binary files /dev/null and b/poster/figs/introplot.pdf differ diff --git a/poster/figs/logo_all.pdf b/poster/figs/logo_all.pdf new file mode 100644 index 0000000..f54eedc --- /dev/null +++ b/poster/figs/logo_all.pdf @@ -0,0 +1,529 @@ +%PDF-1.5 % +1 0 obj <>/OCGs[15 0 R 18 0 R 21 0 R 25 0 R 35 0 R 36 0 R 37 0 R 38 0 R]>>/Pages 2 0 R/Type/Catalog>> endobj 34 0 obj <>stream + + + + + application/pdf + + + EKUT_WortBildMarke_W_RGB + + + + + Adobe Illustrator CS3 + 2010-07-08T15:07+02:00 + 2010-09-21T13:10:33+02:00 + 2010-09-21T13:10:33+02:00 + + + + 256 + 68 + JPEG + /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgARAEAAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXYq7FXmPm/RNIu /wAwrs3HliLzAZdEjaWLjAJARcSJzDSlW5cRxBQ8qDbtkCN3a6fLIYRU+D1+fcl/5kaM15511RbD SF1DUp/LLGJk9NJo5RclEuI2biTJEPs0YE0pXBIbtmiycOKNyqPifo5fFV8w/oPXvMXkXUYtLj8y 297puoMEnjg5zqiWxRnFxxTkrOxoTsScJ3pGHjxwyRMuAiUe/bn3KHmODydp3m7TrjWtDX9EweVp PVsjbi5a3SC4txGHoGp6K1XnX4fHAatOE5ZYyIS9Xic7q9j9679B32nzflrDeW8epanb3F+YRPIr sIvq801vGbikgJhThuKjktR2ONcl8USGYg8MSI/eAdvNOtSinuPzV8p3OoaZBa3AtNU4TK6zO3BY ONW4IRw5mn+scPVogQNPMRJIuP6UDqvmae3/ADIttaEk/wCiLe6/w5cJ6Mv1bjOAzXHr8fRDLecI SOdfhO2JO7ZjwA4DDbiI4+e+3Suf07qfnSLybbeedZn1/SlureXRrAmdIEdknku7mFH9Ugek7Hgo kJHQVO2Jq06Y5TiiISo8cuvSgfj7l0nlcR65+W9h5ihg1LUYbS+t72WZFn5mK1UqrM4+PgTsT337 41yQM/ozGFxjYrp1TeTTI/LH5gaBBof+i6Try3cF/pMZItke2gM8dxDF9mI/D6bcaA1HfDVFpE/F wyM95Qqj13NUe/vS782L+HRfMvlrXUs45bq3jvUa9eP1Pq0TCJTcOq/G6QCRn4r79Kk4Jc23QQM8 c4Xsa27+e3xTDW9M0XRPKbWWmF5LrzTOtvc6lBG091c/WqyXVz+5VmdhbiV14rxG1KDCdg14pynk uXLGOXICuQ386SS51L9MfkVrdtqKerf6NaXNheCeMq/r2S0jlKSAMrMnCTcAiuD+FujDg1cTHlIg j3H8UmHkvT9HufNs1zoVmNP0+20/6l5gsGjECTXcwilhb6ttuIuRaQrRgwAr8VEc2vUzkMdTNkyu J57b3v7+iN8g6LG35XyadpZXTprxdRjjuIVClJZJ5o0l+HulFp7AYYjZhq8v+EcUvVXD9wb/AC1k 0yOe802bRItC80adBBBqcEChYp4QX9G4hZQBIjnlufiU7HGK60SIEhLjxyJry7wWd5J17sVdirsV dirsVdirsVYB56/NW18v6n+h7T6uLxVBur2+do7S3Z0aSONuAZnkdVqF+EbirCoyJlTsdLoDkjxG 67hzP7GO6F/zkFpq2sU/mFrUxT2n1pJ9NMr+lMFY/U54pRVJmMT8Dy4t7AgmIm5OXsiV1C+db/eP JCN+ePmyfU5rS20eCK5SJrhNHaK8urwIpUgTNbIyxmRWDL8JA6NTu8ZZ/wAl4xGzI137AfC2feQv zM8u+cbOM2r/AFXVDH6txpUxpMi148lqF9RPBl+mh2yUZW67V6KeE77x72XZJw3Yq7FXYqx7UPJ/ 1vX5dch1i/sbuW0FgUtjbGMRK5kBVZoJTz5MTyrgpyYaiocBjEi73v8AQVl15Jjl14a3DrGoWl4t iNNAia2dfQD+pU+tBKxctuWLVxpMdTUOAxiRd9f0FSb8vNNjn0SXTr670xfL9tJaafFbfV2QRzBR IX9eGYszemu9e3zwcKfzkiJcQEuM2bv9BCq3kWxl1SHUbu+u7x00+TSp4ZzA8c9vMwaX1qRByzso J4sPYUw0j80RHhAA9XF12PzULX8vLW3bQyNX1GQeXWdtNEjWzUWSMw+m7ehyZRExUb1964OFlLWE 8Xpj6+fP396P1byrFqOvWGtfpC6tLrTYZ4LZIPq5jpchRIzCWGUljwXvTbp1qSGrHn4YGFAiVd/T 4qB8iaU/k2bypPcXNxZTrJ6l1I0f1kvLKZzLzWNU9QStyDcOu+NbUy/NS8XxABf2dylN+X9hdXl5 c6hqF7fi/wBOGlXcE5t/Te3UuVP7uGNuYaVm5A9Tg4WQ1ZAAiAKlxdefzWyeQuU+j3I13UTd6Gkq WMz/AFRyVmUIwlrb/H8C8a7H3rvjwqNXtIcManz5/rTLTvLMFtqZ1a7up9S1T0zBHc3JQCKJiGZI Y4ljjQMVHI05GgqTTDTXPOTHhAEY+X6WtV8r2upa5p2rT3M6tp0c8SWa+kbeVLkBZRKrxuzVCgbM P14kLjzmMDEAeqt+uyA0b8vdM0jUrS6tb69a108XA07S5Xie2t1uac1jrH61Bx+Csh4ioGxpgEWz JrJTiQQLNWepr419i3Vfy803UZNeb6/eWsXmOKOLU7eAwemfTT0uaepDIVdk+FjXfExXHrJR4dge Dlz/AFou18nW1prC6tb393HeG1js7wgwcLpIa+k8yelx9RORoycdtumGmEtSTHhIFXY57e7dAaf+ XSWGlJpVv5g1UWMZndIuVmCHuC7s5dbZXPF5WdQTSvUECmDhbJ6zilxGMb27+nxTzTdDSzvJ7+e5 kvdQuI44XuZljUiKIsVjQRJGAvJ2Y9yT8qGmieWwABQCZYWp2KuxV2KuxV2KuxVAeYNVGkaDqWrM nMafaz3RT+YQxtJT6eOAlsw4+OYj3kB8tXc2lx6kb6/eHzBrk4a7nlt2kuoLiUejct9bhVXX6vEp eALG32kJNKfDS9ZESMaHojy7q5jY955796j5u0mW4tFXWLcW99HaGexuxEYXjtYWZPTntVjgkJkk ZWWSjcUPQgFsSE6fIAfSbF7+/wAjZ+Xe28es6vbC208JbaZHZzNp9y0RE8quI0u5o1grct6v1Wbk 8nMMocUHTFQYwNy3lYvu60N9trHKuiY6VeHQNXtZtSjMEllcacbbUF/cJYelcBNQtkgMknKJ1uGa T06cmZWC8GDYRs1ZI+JEiPUS2/nbek3XPba/dzZ/oGi2i/8AOQ+rWCyTjT7CwXULWzM0hhWdvQWv Akig9ZmA7H5ZID1OvzZT+SidrMqvy3/U9nmhjmheGVQ8UilHU9CrChGWOkBo28r/ACe0q2g80+dm DzS/UNTezshNLJKIoAWbggcn2367ZCPMu27RyE48fnGz72MflX+Z2q+X5bPSPNzkeX9XeZtA1iQ8 lRo52ieCRz+wrgjf7G37BBEYyrm5Wv0Mclyx/XGuIfDn+Ofve83NtBcwPBOgkhkHF0PQg9stefjI g2HnX5TaLZpJ5yWUNN6euX2nxepJJIEs1WJkhXmxoBzyERzdl2hlP7v+oD8d90h/JfSLWTzX58hu HmuotN1CTT7KO4lklEdv6syFQGY7lUAJ64IDcuR2lkIx4iNuKNn37KAs08mfn1YwSvI2g+YIGXTU llkeOC5agKpzY7+olB4CSmPKTLi8fRk/xwO/mPx9z2e+ntbexuLi7YJaQxPJcO3QRqpLk+3EZY6S AJIA5vOfyh8q29z5en8xanC7TeYJZ7m2tZZHZLexmc+jCiliqgp8VR2IyEQ7LtHORMQj/BQ956li /wCVvk7Rda8xedrHVWvLu30bVHtNOU315GY4llmXjWKVOWyDrgiObl67UyhDGY0DKNn0jy8nqWhe R9M0vQrzRHeW7sLueWYCaSRpFSSnFPVLF/g47NWuTAdTl1UpzE+RAeb/AJK+UtH8xeUbu61s3V9d R39xapO95dKyxRqgVRwkUClTkICw7PtPUSx5AIUBwg8h+pNfI135h8v/AJpap5Gu9QuNV0VrIalp k145lnhXmq8DI1WK1Zl38ARSpwjY01aqMMmnjlAEZXRrkiNbvLPXPzeXypr07ppMGmrc2GmiR4o7 u5d/iaTgUMnBAeKGo2JxO5pjiicem8SA9XFRPcGU6D5F0vQNeuNQ0gNbWd3brFNYh5Gj9VHqJVV2 YKSvwmmSApxMuqlkgIy3IPNgOm6Ha/8AQw19aLJMLC10xdTisvVkMIuC0UdQnLjT94Wp0rkK9TsJ 5T+SB6mVX5bsz/5Vf5en1O+1XUZby5v76d5mkjvLu1VEPwxxolvLGKLGFWp6/hkuFwvz0xERjQAH cD94YT+XXlyx1jzT52sdRudQntdI1BbfTo/0lfp6cRMnw1SdWb7I3Yk5GI3LnazMYY8ZiI3KO/pj 5eT0Lyx5Is/LqapBZXVy1tqLq6etM8skNI+B4SSFm6/EMmBTrs+qOThJAseTyWXWdX03z/ZaX+lb 5/K0izeUf0vJNyuPrnFZXn5kcRIk8yxq3gh8Mrvd24xxlhMuGPif3ldK7vkL+L1bW/INjrVro9jq N5dzWOkgll9Z0muJBGI0eWeMo+w5E0pUn75mLqcWrMDIxAuX2fBgH5ieWtP0bzR5I0/TLjULez1b UDbX8Q1K/YSRAxgLV52K/aP2aZGQ3DsNHmM8eSUhEmMbHpj5+TNm/LDRIdV07VNPub63ubCdJSst 7dXMciCvJHW4ll612p3yXC4P56ZiYyAII7gPuDMck4TsVdiqF1XT4tS0u806Y0ivYJLeQjeiyoUP 4HEs8c+GQkOhfHXlSa2W0uz6U1/e3cElrbPHBNza5nrBFAJkZWPKFWcLUdCDyFVzHD2mcGx0AN8x yG917/wE+8822o6jIZ/M8DWurXUbSNHNbxxS28UckcdoYJYKfWllQ+lwVfgJZm6DCfNx9LKMdsZu I8+fO7v6a5+aYaLZSahp0t2I4r3T7HjFqNlqCtaWKRCP0lpIjpI8iyep9Xj+MInxfCz4Q15ZcMq5 E8iNz+OXEdrPeAh7qx0x/NbaVa6farqxuJ7WHS7eO54SQ6lBJC8vpzrE0bWv97vHUqwo3FQcerKM peHxEnhoG9v4TfTv5c3p+jfu/wDnJXXefw+toiel70a2/wCaDkx9Tqsm+hj/AF/1vWsm6d5x+U8b f4i8/wAv7Da7KoPuoqf+JDIR6uz7QPoxf1EB5F8naN5u/Jey0fVErG8t80M6gepDKL6fjJGT0I/E bYxFhs1Wplh1RlH+j8fSEJ+X3nLW/KOvr+XfneQlwQvl7WX/ALu4i6JGzn7lqdj8B7YImtiz1emh mh4+L/OHd+P2so/K3+/86f8AgTX3/JqDJR6uJruWP/hY/Sx78lP+Uw/Mr/ttyf8AJ+4wQ5lye0/7 rD/U/QEf+fvlyfUfJY1mxquqeXJl1C2lX7QjUj1qHwAAk/2GMxs19k5hHLwn6Z7fq/V8VnmTzKPO Hkjy5pmnOUuPOzRwz+mfihtoh6moEf6gQxn/AFsSbHvThweDlnKXLF9/8P63pVvbw21vFbwII4IU WOKNdgqKKKo9gBk3Vkkmy8Q/LXR9e1Dzl+YZ0rXptF9PW5hKsNvbXAkrPPQn6xHIRx9sriNy73W5 IRxYuKPF6O8joO5635S0/W7DRUttbvm1LUVmuDJeMFUyRmdzCeCAKn7rj8I6HJh0+onCUrgOGO23 w3+14v8Alf5q81eWvy51zVbPRINU0uyv7qaR/rbQTrQIJP3XoyhlQfETzG1criSA7zXYMeXPGJkY yMR0sfe9J/LjTbe/jbz3cXS3+reYbeI+pGvpxW1uoqLWJeTn4HHxkmpYdsnHvdZrZmP7kCowPzPe t/M/8r7PznawXNvOdO8w6f8AFpupJUEEHkEcr8XHluCN1O47gso2uh1xwEgi4HmEt/KXz15hv73U PJ3m6L0vNGiKGebb/SIKgCQ02qOa/ENmDA+OCJ6Ft7Q0sIgZcf8Ady+wqGmf+tHav/4Dy/8AJ+DH +JlP/Eo/1/0F6lk3UvLfyj/5Tf8AMj/trL+ubIR5l23aH91h/q/qZ35v8wweXfLGp63NQrY27yop 2DSUpGn+zchfpyRNB1+nwnJkEB1LxrzB5c81Tfk8miy+WL5NSsT+lZNUa4sT/pXNp55eK3BmqVkc ABeXTbKyDTu8ObGNTxCceE+mqly5DpT1r8vPNKeaPJul60CPWuIQt0o/ZnjPCUU7fGpI9qZZE2HT 6zB4WWUO77mH/m5/ym/5b/8AbWb9cORlzDm9n/3Wb+r+t6lk3UuxV2KuxV2KvCfMf5N6r5YsrjVP L0Ntr8UbSFdCurUzBI2WUI0atN8boZa0pSpLKtdjWYU7/D2lHKRGdw/pA+7yeN61pPm3TfL1rq90 7jStSmeCzklP77/RQYiAjfHGlFoF/wAkeAysgu7xZMcpmI+qPP4orSLHX5fLWmailtbXGnW2oSvD Hey/uZJ7eMTTB4SURg8fENyJYhKCgrVHJhklAZDGzxGPTuO3P8c3on5LaJBrPmHS9Qhi9dtDeaaT WObqotmWWG2shEWYK3Nmk+LcRhRUdMnAOt7TymEJRP8AF089iT+j329G8+eUPMcPnHS/PnlWFLzU rGI2uo6U7iL61bGv2Hb4Q45nr7eFDIjew63SaiBxSw5NoncHuKZP59165tzFpflDVf0owCol8kVt bIx6tJOZGBVf8gEnDxeTUNJAG5ZI8PlufkivJvl248o+WJluS+p6tdTy6hqj26jlNd3LAyemHKCg 2A5EbCu2IFBhqcwzZNvTEChfQBKvyZsdc0ryoukazpc+n3cM1xNWQxtGyzSmReLRu2/x03HbGHJu 7SlCeTijIEUE78++RNH856FJpmoKElWr2V6orJBLTZ16VH8y9x9BwyFtGk1UsE+KPxHekX5QeX/N Hlzy/rVv5kVp9ROpzTrMhEhuYhbQIkiHavP06fFQ1674Ighv7RzY8s4mH08Py3KUflLpvmnSvNfm y41fQrmytfMN+19azs8LqgMkr8JODkg0kG4BwRu27tCeOeOAjIEwjXXyeqzwQ3EEkE6CSGZWjljb cMrCjA/MZN1IJBsPK/ye/LXVfLWsavPqrM9tp8s1h5dWTelrLIJ5Jl7fvPgHzDDIRjTtu0dbHLGI jzO8vfyr4PVZZPTieTiz8FLcEFWagrRRtucm6kC3jn5eDzl5a8webdQ1HylqMltr1899a/VntHdA ZJX4Orzx9pBuCfllcbFu61nhZYQjGcbhGt78vJ6DoGv6/fWOpanf6Lc2CRycNP0uT0zdSIkalnNG 4AvIxABboMmC67LihEiIkD3nowz8kNA8waRoOr6D5k0Sa1S/vJroPI0UkLxTxJG0TcHY1/d+G9cj AOb2plhOcZwldAD5K/5baT5r8j3Go+WbvTLi+8ui5efRNTt2ifhFId45UZ1dfHZTvXtTGII2Y63J jzgZAQJ16gnb+afMmj+YtXtdS0LUL/TJpo5dIv8AT0W4T0jBGrROnMNGRIjGtKEk/Mm2gYIThExl ESrcHbqpeU9B1W8876n541WybS2urRNN03T5GRpxbo4keWf0yyBnZRRakgdcQN7TqMsY4hiieKjZ PS/JILaz83Rfnfd+ZW0G7GhT2X6NFyrQliF4N6hj9Tlx9SP5039sG925MpYzpRj4hx3fV6vK5jid wrOVUtwXdmoK0FabnJuoAeVflXY+bdM84+bLnWdDubSy8wXpu7O5ZoWEaiSUhJQjkj4JF3A7b5CN 27bXyxyxQEZAmAo8/JMvzMi8wa1f6Lo1pot3c6HDqEF3rd0nohZIYGDLEitIrOpbdtu21cZNWiMI CUjICfCRHn1eikAggioOxBybrXlH5X6N5o8ma/r2gy6RcyeVrm9efR72NomWIMeNHUyc+JQLvTt0 3yEQQ7fXZMeeEZ8Q8QDcN/m3pvmzUfNnlK60XRbi+tdAvBe3k8bxIHHqRN6cfN1NeMbVqO+Mrtez 54445iUgDMUPtepW8xmgSUxvEXAJikADr7MASK/Tk3UkUVTFDsVdirsVdirwz/nKz/lH9C/5i5f+ TeV5Hfdg/XL3Ir8gNC0nXfylu9M1W2S6s7i+uFeORQaExRjkpP2XWvwsNx2xgLDHtbLLHqRKJo8I etaHoOjaDpsem6PaR2VlF9iGMbVPVmJqzMe7Ma5MCnT5csskuKRso/C1uxVh02ufmRNqeoLpmhWU mlwTmGznvLqS2lkEYAd+Ail+EyV4nbbx65Gy5oxYBEcUjxVvQv8ASkHl78yfP+vaxrek2Pl/Txda BMkF8ZL+VULuXA9Mi3PIfuj1AwCRLkZtFhxxjIylU+Xp/an1v5l89LpeuTaholra3+lKJbWAXDvD cRemzsVmCbGqkCq/Pxw2XHOHFxREZEiXly+DX5Z+eNY84eW5NeudMjs4Gd0s4IpvVkl9LZieaxqt W+EVOMTadbpY4Z8AN97fmPzpr+l+Qv8AFKaKIriCMTX2k3k3CWNeXE0eMOpI+1Q028DtiTtaMOmh PN4fFt0Ibu/NnmSD8u/8Vrp1q90ln+kZtP8AXcKLf0xKQJeG7rHU/ZpXb3xva1jp4HN4dmrq66pG fzR802/kuz86XOg202hzqst1Ha3btdQxO3AScJIUR6HqA/4VIHEatv8AyOM5TiEjx+Y2P2syvvME svlJvMGhpHehrT69axzMYlkj9P1AOQDcWK+I69clezhRw1k4J7b0lf5YedNU85eXRrt3YxWFvNI8 drFHK0rMIzxZmqqAfECAMETbdrtNHDPgBsobyp+aOmaz5t1rypdBbTVtMuporVeXw3MMbEckr+2o HxL9I70RLemWo0MoY45BvGQ+TN358G4U50PHl0r2rTJOAGH+WfN+v6p5w13y/eWNrbpoH1f6xcRT SSGT63EZYuCtGlNh8VTkQd3Nz6eEMUZgk8d9O5U1TzzO+uzeXvLOn/pjV7UKdQleT0LK05/ZE83G Ri56hEQn5Y33IhpRwceQ8MTy6k+4LbmT82ok9WCHQrggEm253cbHwCyEMCfmox3WI0x58Y+SI8me Z9c8waPfTX2lLpGrWN3JZSWck3rLziRG5F1Vdjz2pXbepriDbHU4IY5ACXFEi7Yz5U/Mbz55lvta srLQ9NguNBuTaXq3F9OAZQzqfTMdtICKxnc0wCRLlZ9HhxCJMpVMWPSP1sz8p6nruo6fPJrdimnX sNzLB6ETmRCkZAV1dgvIN1BoMkHC1EIRI4DYpIfyy/NLTPOtvcwFVtNZsXZbqy5V5IG4iWKu5Q9/ 5Tt4EiMrcjW6GWAg84nqzS4+seg/1bgZ6fuxJUJX3pvknBFXuxbyF5u1jzJLrX1yyt7OLR9QuNKb 0ZXlaSa2K83HJEASjbd8jE25er08cXDRJ4oiXzZbknDdirsVdirsVdirEvOv5p+TPJxEWr3ZN6yh 0sLdfVnKnoxWoVQe3JhXImQDmabQZc30jbv6PnX86vzSuPOcum20emy6bplspubb6x/eziYALKQP hC0X4aE998qnK3pOzNCMFm+KR29yffkF+bmh+WbK48ua6Wt7a4uDc2l8qvIA8iojROiBmAPAFSB4 1wwlTR2t2fPKROG5qqfSFjqVhfxNJZzpOqNwkCn4kelSki/aRxXdWAI75c8xOBjzCJxYuxV2KvJf yf8A/Jj/AJnf9tCD/k5dZCPMu47R/uMP9U/716V5i/5R/U/+YSf/AJNtki6vD9Y94YH/AM4+LI35 TWCxsEkMl2EcjkAfXehptWmRhydh2v8A4wfh9yM/MS11a1/JrWLfV7xdQ1GKxK3N6kYhEjcx8XAE gbf5jDLkw0conVRMRUb5K+o/+SOuv/AZf/unnH+FjD/Gx/wz/fPPl8x6rD+Q9hpo0G/Fnd2YtrnW FWCaCK2dyJZxHFM83wpWgdE375C/S7HwYnVmXFGwbre77uVfe9Q06bQJPy1A8v3C3OjxaW8NnMvU pFAU+KoBD/D8QIrXrk+jqpiYz+sVLi3+aQf847/+Sr03/jLdf8n3wQ5OR2x/jB+H3MWf8uz5th80 6hpkgsPNWjeZb1tI1BSUJ4rDKI5CO3JvhP7J+mo4bcsazwTCMt8csYsfNmP5WfmYfMkU2ia5H9R8 4aXWPULJxwMnA8TLGv8AxIDofYjJRlbha/ReF64b45cipaJMbD8xfzLvkHN47bSrjg3QmOzmoNu3 wYBzKco4sGEecvvCj/zj0gk/L0anKxkvtUvbq6vp23Z5TJwqT8kGMOTLtc/vuHpEAB6Lf39rYWr3 V0xSCPd3Cs9ATStEDHJuthAyNBKvLfmvylrsl0dAvIbxgwku3gU05lQgLtxA5cVA61oMAILdm0+T HXGK7nlf5Y6/JpXnL8xQmlX+perrUxrYxJIF4zz7NzeOhNdshE7l22uw8eLF6ox9HX3B615W8wL5 g0aPVFs57BZJZ4vqt0Ak6G3meFhIgJ4tyjO1dsmDbp8+Hw5cNg8uXLcW8R0X8vNTvvIWiedPKTm3 846ZLeEBTQXcUd7OPTcHYsF+EV6r8J2pSsR2sO9y6yMc0sWTfHKvh6R+Ptep/ln+ZGneddH9ZQLX WLX4NT00k8opAacgG34NTbw6HcZOMrdTrdFLBKucTyKG/KtVWbzqFAA/xPfGg23MUBJ+/GPVlr+W P/hcf0s6yTgOxV2KuxV2KuxV8r/nHZ6xqXmbUFaWOC1S4V7mCGotLUsPhe+uD8D3TJv6aciF+FSa ccplzes7OlGOMd9fE/1R/N83mWo3jS2dtbLyktrOSeO0uXFCYmYOI/8AYsxeni+QdrCNEnqav8fj kmHly8a1t5LiJ9TsUhcfX9R02U0WNxSIPF+735cxyMoBrSleqGvNGzR4T3A/j9D3b8gL7UpEVLSZ bjR1jMP1WS9aSeAc2kEjQJCLdWdidufPrXlTLYPP9rRj12l7tj8bv9D2/LHROxV2KvIvydmhb8yP zNCyKxbUISoBBqFkuQxHyJocrhzLue0QfAw/1f1PWbqBLi2lt3+xMjRtUV2YUOx+eWOniaNvKfyB 1AaZpmo+RtUYW+vaLeTVtHNGeB6MJI6/aXkSduxB75CHc7ftaHFIZY7wkPtTz85NQWTypL5Zs6T6 95haOz0+yU1ch5AZJWHURxorFm6DDPlTj9mwrJ4h+iG5Ka+eIIrD8r9etTJ+7t9GuoFdqCtLVo1+ ljieTVpTxaiJ75j70B+Uk9ifyn0SSeSP6olmVuWkK+mFVmEgcnYAb1rjHk2doA/mJVztiX5QafJY fl15tuVLR6Hc3F/No/qVUG2WIqJQW/ZYL+GRjycztGfFngP4wI3706/5x1kjb8rNPVWDMk9yrgGp U+szUPgaEHDDk0dsD/CD7h9yM/Km5R7/AM8QqQfT8yXjFga/aSNaU9jHhj1Ya+O2M/7WP0qP5o/l pd61NB5n8sTfo/zjpnx206UQXCqP7qQnatNlLbU+FtjsJR6hOh1ogDjyb45fYk35N+YLrzT5n87X uq2JsrySLTLXULFwQFlhjuIpRRviAJXoenTBA2S39pYRix4xE2PUQfklv5e6yfyu16/8j+aXNto1 1cNc6BrEu0Dq9FKO/wBlagKT2Vq12IOMTWxbdZj/ADUBlx7yAqQ6vbFubdoPrCyo1uV5iYMCnGle XLpSnfLHRcJuury7/nG9YR5EvTCFCNqtyVK9COMYH4ZDHydt2zfjC/5o/Sh/yRngk84/mQEkVy+s vIoVgaoZ7ijCnb3wQ5ll2mD4WH+p+gPULDUdNuZ7y2snV2spAl0I6cVlkUSkbftEOGPzyx1M4SAB PVhn5Ez+r+XFoOQZYrq9VaU2BupH7f61cjDk53aorOfcPuSr8yPIGs2GsL5+8iAQ+YLYE6lYIPhv oti/wDZnIHxL+11HxgVEo9Q26LVxlHwc30Hke78fjZFfkRrK63o3mPV1ha3F/r1zcGBjUoZLe3JS vfidq4wLHtXHwShHnUB95el5N1bsVdirsVdirsVeT/mT+Vl3r2u2lxDapd6JEjST2SymOT12lluJ 2WpX4rhvSj5VqFr02yEo27fRa8Y4EE1Lv8tgPluXneo/kz5tv9Tg0UaZIqNOqTa84hS0ihSpkW1t o2/dR8mqC1XkND8NWrDgLsodpY4xMr6fTvfxPX7h5rbD8kPPM9zS0t5ND1i2T0Ly4aRDp92g25pI haQeoAOaGJgTuafZDwFM+08QG54onl/OH477ew/lv+Xl3oAF/rM8lxqioYreL1xLbW6PQv6MSQ2y IzkfE3Ekjv1yyMadLrdYMm0RUfduffuWe5J17sVWTen6Mnq/3fE8+v2ab9MUjmwry5/yqH9Mn/Dv 6M/TG/P6lx9f7Qrz4fFTlSvLvkRTnZvzPD6+Lh82cZJwGE+fP+VU/Wrf/F31L9I7fVuXL65Su3D0 P3/Gv0ZGVdXO0n5ij4d19n27L/I//KsPrlz/AIW+q/pOh+t15/XeNf2/rH7/AI18dsRXRGq8eh4l 8P2fZsnnmr/DH6Hk/wATfV/0RyX1vrdPRrX4edfh69K98Jrq0YPE4v3d8XkwWz/6F3+tt9X/AELz qnqfY9Cv7HLl+569K98j6XYS/O1vx/p/WzzXP8N/oKT9MfV/0HwHq+rT6v6dNuVPh4U+jJF1+Lj4 /TfH9qT+VP8AlWv1W8/wt9R+q8H+u/UKenSg5cvT+Hlxp74BXRuz+PY8S76Wt8q/8qx/Sc3+GPqH 6RqfrP1KnPl8VfV4d/tfaxFdFz+Pw/vOLh82X5Jw0ssf8Pfp3U/qP1f9M8Lf9L+lT1eNH+r+tTvx 5ca709qYG2fHwC74d6/TS3zN/hf9FP8A4m+p/ouvx/pD0/R5dv734eXh3xNdU4PE4v3d8Xk8w/6x rq1Pqno8vi4/XPq9fo/c5D0u0/w7z+y/1vTND/wp+gf9wP1T9B8Xp9Q4ehSnx09Havj3yYp1eXxO P13xef7Xn97/ANC27ev+hK1NfT48q9+Xp7/fkfS7GP57pxsq8sf8q2/wrd/4d+qf4c9Vvrv1evpe rROXqd68eFa9qdsIqnEz+P4g474+lq3kv/lXf77/AAf9S4b+v+j+PCu32uHw16e+IrojU+N/lb+L KMk4iV6H/h3nqX6F9Cv11/0p9XpT67wT1OfHb1OPDl+O+ANuXj2475bX3JphanYq7FXYq7FXYq7F XYq7FXYq/wD/2Q== + + + + + + uuid:4843E9BF348CDF11938FC449556742DA + uuid:b47a17de-e854-6642-a62a-d66d7f6f8213 + proof:pdf + + uuid:4743E9BF348CDF11938FC449556742DA + uuid:4643E9BF348CDF11938FC449556742DA + + + + 1 + False + False + + 194.999984 + 50.000049 + Millimeters + + + + Cyan + Magenta + Yellow + Black + + + + + + Standard-Farbfeldgruppe + 0 + + + + R=165 G=30 B=55 + PROCESS + 100.000000 + RGB + 165 + 29 + 54 + + + + + + + + + Adobe PDF library 8.00 + + + + + + + + + + + + + + + + + + + + + + + + + endstream endobj 2 0 obj <> endobj 40 0 obj <>/Resources<>/Properties<>>>/Thumb 44 0 R/TrimBox[0.0 0.0 552.756 141.732]/Type/Page>> endobj 41 0 obj <>stream +HdWn- ܟ8}%RY "``&@?U(]ZHǯ?˟~^?_?|)tr*wzx+ݭ+9ܮ?~{ۿ__Jɗzϙ[|,ߦ/(ok*wUj~WK^U5t;ghwN]U}CXS J_(nƂh g7Es +R%+U%xƥ)㿓㥸3vҽ%5n=rB{$._# UUN„/p]Ҿ.mpn> +a}EMxikQՏ9lLd9pjL=ƇY(y&_ER$EJ2,)uX*Y\l d| vSݔMeCb= iV?tǂܴ9>F(qiI^K= +O9LyEGa>Ĺ|9p~ +bL0Ge^3U=7&)ꛭqa+{SO5,?\QУq\$X(cxHnC2fvH<^6cЬ5?CdI J͔I$I.fugwRW0Moǽ.}>K'v =mBGrDF]Ώ' 3DXُCoݒo3n=qIGzMgE103Gsj̥HN[^R .zW@nEFh[v:V؋gd y|v8|J݆,YxC242ufk À],m)os҆ !G5hmuy+F]FynBn(̾78i}xxhs(w N:ua֦,v`Zu GjzD/!i+J,&by^ܭ _qt L%=}t1TxAݦ)` #A ĚeC㹟}}#c tPCEz9fS ~ϗ8czi~xa+:ȥcw63X Tj5b k7|㾠hU4xj+"ʭ }IV3|h`@j!o66TQy ^-FR$VjZf +pX&*R | F u?Ç@ ,Wy-#NKUsGء~n1 u9;,7M\Ap\{lm+0pDTZ54Ai<e۳(֎mݎ  +8#Yz`1Ŝ3s`߂ tJ:(_}0"Rh"9?&=_RLޔS{*z -eڞ"Hk&[cVhXao_!N㬨S|_UWHӧ$g9aܳlsz&م90^cUz}#WxVaQ+vq;KѰ +f]&z> e^KZ?|êq||N'b9 &-D%?gޟhIcӁ'iKY{ no'm1,j˨Jդ` [b"cf +b򨈇m%sdI;Z?ms$YI| tL" +5_sAZGiʊ եV,-Ndq6YХcb?2)|1{!*Z(RY߳xFCf ?K`}=n6k,[[gO/6ƫxc:֘eҦ +3QQ]5wպ!KӴ{ĸvВtNQYx+e<+aKnk0 ަ= qk ']T\X.Țgt(Z1FXŜ5}6A 6}!cӍo>SQ/;6L)cFl~h__|#'I=}P'>լԑϝh@j?{--J4-BE>6n3~ RvVCW1`aŚ z8< b|q0vqv#YPVdP޾ZXǮ9sy$l#Y:':GYki"\O +ڳB8 +Y}XT޹!tx3Qtj<;ac$nXx>:*vJKeg0}i[yxRV :^|!pDK`EV4{1D-B&<dbdbt?eœw Xe+Ӭ݊[{25w/J~c-m8QKvcK0NQ'}9O;cwnbxIB E@Uur5C;( d߸6M,RB~|f^Xnu8l3("kMƺ%A_|+#/!w0\St=mYhTqu'H}Qw 3w 6+gzMoQa;C0wQ~&PafP˧&*J|IF =4EHYR(B"_2Oq#d8+{9Γ!m(3Y3ԞYj͎X:p?`;c +ćtلS#:܍ K@+-P QT.)8X9GdTlxӟ`׋cm-RF2|$ߴtWw1K%Ӈe$?ھYrGίBdy4w Gt,e:%N+lyotNdO+2>tntB=+//|Z9"QDY+=>G=2\vG&Oʵ$y fuews)ā"i|BSqk䯊C_%HhRx~ҡNiSDiqQճjH{v?R'YvAe\E=!y&)1x`QJ̥d +m񱣨~bu&Y> įLx??ř^efoT-c{99*}FP3-.*JN. ;*Pp|'GJ1qmq҈M}0Z7/wS߈m{#tբ ws%rF"| uA4 +aTwfCS G - +Ptd#l(P*1\Sn; ,^6rDWD:B1r4i2%kL&'eLpF!AN /;P dlJ{,,U:\p8 l2"uԧ /;FtkTo̶-5훮k P]E],7vW|&7;3lBScC7uEW.Otז>#P+b)#NΥܜ#a ^3ӓ#w'UEb9}.l3qYbo ?{Da89nZÚ!exY7g /Ct25[9µs1+|x13ݸέPD\ɷq6޿m׼./NFiFɀ0/rEF]5LxhKp d4mE5>M4o +cƙIp&+Qt+by!l]9l%cP1մ(L*yԧ6XHIv}fzI"E|SEw1MiVi"L%]3^9+ $LYBē% ~ G׏ c.כϐ1~6teu3Ziٌ̡^\ab #0fagRz2{ uͭ:$!n OZC1?7ס|mS5|Pl7]>vч~b r9LF:%ܶ+goc87#=s7Rwe]y?B hczSZҼvW &CQ w2qz++v:=–Όܚr|",3%-z?}GC|5Yߝ{5q@KUTna5k9ư$ȏ"B u/6'!wl\c}HY/8Zϣ GkE²r=fo&\#@+_o74CB ߰v)gZ<\q59*Buz*~ΌX`1y<Ƞ2z}E©^=#mzFXiF!Zs&,~Sjkɲѵ 7yklDz`yS`c:%'- lIJS^gIy%1Zi&oґdnbfRb\J&\Ю8ԝUQS{P&nϣIXeFB= 7NטsfU3ŧ'o?? LI4nI=pI6{λ488q_VީϘWxdPK.nnWO]D<$'W>_75 HcH0xcDWa+Aj~ԶW\xGJ'@q_[iXpr' +ܣB^:ߤxHXSݪ& ez4+\ ;]C +3\y2]md +O-# wtIc_}J섈mnYԙ.FAîSVCkb${cT ,0'RdkUҢ'FE +-]AyAߺX G\KBU{$Oa + +Zz +N!?~ Q}@2̖:0]W6,>s=&JiQ7KYOأ OJW;/[ (IGkt/*"Ps`Ii ~[-AeIDL)fj%uszoX1ҽyF5'(lk`bϕƜ+m'_|Y7$NJ=' (Ld fyl1DX[vy7& +/Dߢ6z FU< wį7 +;(~ 5>MiS}ʅ.u-k.EzZh4-9mxBsg64Ӛi ۛ}%O܇q=}O7Hqߊ϶/hAC;RPo3gC#}E Z=Cu@ޟ SDi%Vx- kOh'H׽^[5\>;y(S] +k\Fcȗو{?[ +-M{Z {Ti4TfGƁxTC_J-QY-0FaGp6#{q`pB]ɀvAVq Z>F^1kPy -1]Ƀ9u)1kO\{ZdDkٙ(I͈18P1P~Vv6^$a6`e[- lY>3~tf*qA- lYR6z˕!'#-+<6{`b.&|z[͙Al_ ^xJlV;CΈ- PؒҋñX4MumJlLl:+MGZ +'[#p #d%E-|ƀLSϑ- X*-&Ij@6ڃi&$oFdpaF]bb1׀`6_=t`6c6ge6o4V#d`6cF + dyΎ2uud{r8w>d3Ͻ׉OGwb9ynMgd 2d ې-l!ouٞJꃲ%A9h2O8*I`߷9o#C KVH^B؞z@:J̧R<bRsfк\eVc˵bA+e2٬K?zS5szR.Xn5og ')B==oz\WRnz< 9k4VyCWp8BT1 M +tA1{Sr,dKa^Vg*eVt$˒,8-*N'N9[14Jto5[87Yʶm?cQJ$f( +wMƜL)t*gTxC2PcQ"%fr@0 Ud?ۨ-KO"rGrM:Sl($vpMN'K."@ eFM­>9Xڠ9T)RL3ecTgZ>4=zhHvUR"73,nN`cQ!Mek6zetRn72F#'1g1d!tbLI1p.KWCZi i䆩C$$XfJʑ`7NtVy$kA.ؚߊ$@U]P1nC3˵Xf}/yH1oVxc_ +s]CX̗i :nsvT^А0GwΊVMAX*$si9a)*PgcW9&h hKVCibjkM#dTRjqyaV &xw|s,(=u0MW3;^8>Q#uh KJ̥(\\tA6הGjMqOEd|l56k*^%BٛEGK2gP6XT%Rl^/KX&=<$sgG?Ў +_,V e u bhqu' MA8 ]kVlDYǮDCC 5ٕY;>f"cXm.J!_(U얡EN\c.aI5G_ԸCE'I*K\@d98TM Ҩ\p#oZoBUm,j.uɶpTH=q\(+Sw{k^gߩ:,Ddؽܒk>pQ $G3.k:XzҚx!pT흺ma]'FR)Ӕ%WŠcغ]oIhLG&sC#q~$Mwqud*'sQtmlR_V?rW$S=MUxppӮÏc5(☀aayɊF bF*eG +p4ϳ,$]%G0x>O6`W[ !ּ@(Ȟy7~bMhjzϷz D vRƻyD#{Vh2β+OCjn;gu)< {^`[y3eLB_܌~OCEgT##rT<5G H%`}&yxwj@j@ēM{널/A4 ACL^ zQZn51C&TT]` <;MkP9Zݗv1Lu#*N^\w95@i\˺9+Dk0~ 5s]Ʊqty0rTͪE1]jeEE<'\s 8wJli)2؟pyr`!pwY Pf94_jbbEwfMX e"8tS` 28_DW_vܣ˾P_x:|揍<:u;kzA5kUmxƵ]yi|"Ӷ/‡`]ē$^ %?O*N#Sn.v9n fXfl=nrnha72ܾY#\r Si^Il`KsFIFKsSX:RVԙ@u}ɨ_ +vR@-C(^1]N݂w?Aj%B} ME-f;ajˤ=bf!'[?c،"u1|ך^* oo!Vu?lns'1_-˝仾9q "+' We`Zrn@sNX[o[PUʼ`[TAaVr2:"Siؖ:jC5{=}L#-&ŘɎgp['7.1 +  `$`@۾cpr5<} WW3JUR-tFAiXK3dl( w3{6`OB36{lrL{IEa[+79?D.6C6;|k`FMfkW P?pUXo#Olm@{$PoYb4[$ĮnE;V':Rr"iSkXꁸo `d%;Ҋ!V>H%e5 բ;HT_PLOhKq9܁6&= +Pd'b겆cKdcܙ4LYݎ=BEwǦ@z=k{C4$8:x=lh:(d S̠\C5+Q)dz7 4"xȭB7[j9UU4tDn%2'Y A,~{Mx7e~NhGL Ef \?oWlθEno˂P-i4>dG+~(ǡbm-)9Ffث gPm@#]%$9rᥭ24(~ f`j=u ,oLsgwlkjSԙ6AFm9XϽQB*y:-y +h}P~}eb5S Y T33 $|f^ Hdw[kD3Ug +s:O k͡h.uVNT7ё\ƈf\@)tr4Fr!jH X!C6,V~fT:f?(rc2Be90L5՛7ln}֘ĠmE€eڝ-NMfr@4\Ж NP$&>KcӵGS5<ʷD_vA@Wg +cQv/,T'nӌҥZf.{LXQc3a/p +ˢ&KP7qo+!Pǀ*VOU%б +B^0CIrjVBmmq`?$J:ޞ$j?bu>8m=)ޓ +x& }[ԇ;J [sO\h0|b˧XAXRn'37i  Ȑ}#$vhϝsve\Ȳ$a>\~<X^u_RI |dZÀ[p}:Uk9pRpSƋ!J  )"Yx䀦pR"OI DT@lPy &V0~M&1uG:}.g- {ɲ MC_$l} 6MSlߙ)aSЧnp+Ic34Bxӯm375`=c\vRPqǺ)T]K6厵K'7tr6УexpPE +"?,-g`f>\ugfZUb;haBM#win tʒTDevWX*3\ #W>6Mv=Pҕ!~O&-0I,d3G\)Ӻ6Ż6i|ك˴MuK'W ޻QPQS Z$mrAu =\Z!~fK5ΐ*]-6S5d: @K7: ]T']ôV2/s +c:L)ZӴB +EZLl3lllH_Dك.%pEE-bu`ʙ5 +@nA +Xo\Jҋ.|Og'uuB`h?cM|$-3R]'ElyAYGme6 P%)#҃XNRo^8L%<-('c9IC3HgDN3S + \ QUN&0r5) TF4ZUzl'7FUv^Ah2Gd 0|0tI!+0$d7MA2zu"StC6+9\T-L\nee`(>^|4.G˶ymsX6V +I:bW-#hQso&%L:݄xU~MsԠc+=Իl`Y,/؝^;ӧF~?+EaOVA0 +OAJzy#ws&ֲWzuIքJ!L 8Ti+)֧vw9: ֋PCOI'J32IHMI7TphGB;f#íg>@\`Z7Qaה<J Gz,ö:Kv$΀}L{j܁{lC[KT݇\;L G~hʌc^pNLM5-tXIUmv$L  ^c>i$/Cz8:&#ޝV@]rIN;dֶze恄'Hty/aZ43c 6&DYب;D^83_֬-ςJVU D"(]cI`LA,I}|ٮmpD8TU f6wXz +K 3[LRWXRܴ;L +pBc.?Nvd55:ᦦY:4Tǿ[x̣֟h_ixeɀ AsQ;lُj)%f,C#](Lpކ%y4Lɤaj/3@m5GuX@U=kYƬvO'NRWO`0 "P|Wyʇ-d21"ߗD$!<){XQnKݨ?Z^QQ0Ӽe lY' ' _PI^KA?XػU#sS -[\Bo#=>2j;BPTHCo@wI8Hat;A{&{!*2Oƫ@8GC`ڄrnf̪wiCť]uVRy}-+߄2p^ P̎J;tEEg&Q<3ae[3{~\ylQ?fxZe2׳:Jy|=mH3l'w5RdȪr2<^w|+7OD2gU7UJq+W{q% PWF2͆-]2Ð<c>+AB3׊eC8 +ⴟcv"R{!7ӅsJ~zaPB<|4}'̾^ # +"ULxixމd&ݯpMqө <@Oo"YrISfũZUͪyq*jX^̪:ojdsyzqG_z-z/DU]99Vսz9.nv +LU,͍QKc^f0knՏ~oog+?d{7Cg!'|z3};sЂiZ mvL^xTrmFAB1.tTf!<@"ȟ|{'A_ˡ +=ErHp٤A-Uy: +aQ(<ؒW-=sb0|tIC8K։w6uEC@,tyBŅ<"+1ƄV(§ځ~>9hЧs4DTz#0+ >fWRx~bl.F,30#] 9Yw}m%d:vUᎊϮ"D'5cGQq +a8q464D>-^f~~1ws9i<*GIW|q0XVxL8F8sZ;8/]FOn"@(%1Q |&F e!|Qbԉ& F_n0O{ѱRl?Ya2-R%y0]+}35\ᏆP_wn +B` \Z$ +MRl}ߪOC(w`BQU5?LqY2n4΁q[t}C֜=%DїZVUQ$ψR="`s4z1]]z Ke鞟ndFaWr ɏLM/v\a$,]A$ +7jpbx휙:Dִigk(drbex\[̖[SJI'鍯)™Y?(ɴG=u؏P;!Gg>Ra3\cdY{a iT c3gWia ٿ]\ʰCVLTad2H9'cnMV i׭,.}S7fu*JM*ݨ7+@J w2,4j(qlޣmA.OcFI1WaCzdX(m?%sRX5c%Gq&@n$AK$oR] 6FCxCjv0J(;]B+I-AڌRZVS*%W@\Ze(ṙT,ge6R!imɴ 4V'eF&BZjQ~ZOw7.L#D6k=aT]&a6ȰQ-qƦéAɬpE?|'00^7gs!cy&D`6ᐊ vB8=i.!TplY|Ye>+xoQ??PY2 $3: +Ĥ3@]<^EH+ö⪅3~YڥDpSF{2Ujls? 4RxIn2^4׍9GM|\[ᅢMJbwm2^3RKBUYg>P64O=!Ĭ>^ _G6oňi0DZc-V)f(qsݛ3eID#QA.]]PclDwu8-ba})dLJ0n⠐ +U +G8i$d7G{?Z+Jd\?_ ϯ)jJ V Ec1/0ͰϥV#"{2zE%v}"&~XaK]"A +wӼ8ʦ~`.N>Z{v "n#cESi|u*;-h͚g@@9h'WE{Yc}˵_-bxpdCe+X؀S'o91]umzMBlz(aY쩀=z(`+5H5.pVyטZ&qgF'IsNw (Yuze`xgs"ja6 %\U|˓`<٨L`|2Fᠹ/r4R*PK׭n&BBaOI͕;Ѕ&=C] +Z9 zAt0E+aD[ 5/LvR*!^.K'k04;ySuB!U">[^}c@,LNFxHm ZeKnr1!ߢZzaex_yH."͚~j6 5Wnw(Uӟs]&Wv:+ +%`#6^pyjq PbjG{: (fJM{6 Y*ʀ#gN[Rw{=v,˫QW5!5~~;˯i9=Ӏ^˹$[pEsRYGU3LHE9 ?1,P}*&l/:ԭ .R9^\7 +MG篃yϸ L|I!CV6$Zoi|.g3ηr^ #˴V%?k^h$[vZk۴,CJ3M2PzY!}`\iu8]21$KTMSY{cY-aT +v@vhovcH!뱞&'}w8EX1߷ʪy1`ittJo{V,U;cU34 1V #ۢ + 鎣L!k!X]<{]Odz=`i0uq9ֈdT4[SҧZ*)]4ʬoԤ«z-pmVȇȶ'#W鬷)qW<,`p":ڬ *{J"9N1/Qp!Vİ4HF䉺*4=SQ kMqx_|^x=EMh$f>\R_ <]b`jT-wjE +cA2:+W&Spku&U)mNij>wh/÷LJ[{Y|uIIgdsI?z +lht$Q'Ըj(34*TӦWF7rY>1n4B}g1jp-)s23! ~ɬrIAw:1b"aW Es5hQ%:pr}:jc3_[fi.M#OtZ }^pkU6+A l mU2)o^[!LG#}s(_kT˚g|ӄ-̾$x~ts^v`4o!)h/ }yfrx_>פBfrQf|%DRHLYn{'<.n6XA)Q\ ޳?n [/mMB~aQ"~F}c8FUI?/y +oV e^)]MRS3"{7ʓI췕FCqgfbpѺ0\Uq~9L)sX KWi>W|F?qi凉xFU P i j/ä8 U1uR8Mb?1c@@?XW_*F]&}}EHlOH~;)/4{62M, 6gz4Jfl*ˠ*<[Rg  oT!P$g ^m;m)`ix2gSSvA$Ru +;L xwF oj~8uD]u,2sTNUܵ}J|//Mdǟ[uTR6VKujԆ<2'^d4Pj4E j?VEcط6`ϡ?&U;5j--(GNE.:X{ヨhV(Hh5qMQT4\"КN"=| "g5rU>T,5Ej+]u5Z#\;몶vhrGASuzٱC.S~$kaN$GMCf5{Zړå( JY0NX,)I߆KHG8w&#~~t%$vH[^T%6=5 +WtXz`_0 (@FBأn󑈣zhr4).o$;-\lz<@}]}S.ҩ+դh..@)1+!h WVn ܙ@nAZby;‘pXP%vNVl;gn18nK3$TN1mϸTF@B ASP&rHbע [Aj"Z];3ƫjapfl>nA4zc'LauR\̈́s2$r0QDj h-"@SN}`W|tFD}6ӱlCC*j2'myw ȲzTԮ=]qZ^&y|e]')TMGQR0Nm ]u6~ +~VUr7O{E噄 j|5xg̬YgJaAp}ӞW4▲o~P>.IV;jxkMto:B,AuwQ2]7 h&lOŬP*D`Y(]#}c&t]6[HKM}3M +E@FX#8p!kehiwL'iw=^3kCnMϋ]8@Q(;Z9s~PQ$﮴'HZ8c5CɼкZ+x*#Ezbh El4Ho:S֬FUZcSzB-9wZyV#.'Ҁt'DZw9F6a1~'u.nDaEKqᅴfbW;2݋6kU*!"o/^ +cHIN/=0yk^~[ZGn{>P۲/Ka+=/u[j ʂ'!3S\>Rlxa t~ΈvE'٣Y'[DcFSloX}ߗ h˱XCyťi:`Twd1̈UJD·߯*gT,ϷүW~R8aMBcɟ] 312:)D֕oyIuC^GkC}mل[NF=o +<[j:ʿ(Wz `.d2֫Ҙ@J5*lu>#گ?l"O{b:|cUkedm71kJ݇r{ +O4,SJc{4:NG˓` 2hЉ h!PjZ2k'Q;}hٛ~딞W! $ #W{߸_Emhe1ҀmXiZ~m\y2vZs1m!T 7:v(tY4Vۣ] :Po p#*,ɟFGDbH^pр_P9*^N.c70@h *_[,(K|SUNoj]3^f-v-PKKݙ)l]c(Cfˈɨ_x5vrGSEPÔ,.9>;`(X9Uyn ͬk8E +qqYe:bp  +($4bjoF *iNkΆ ꋫS˶=a+cڬ̞:%=lJc]0U*. +ve~.1Gt?SN*P$H,"vrZ#+4NݰV5^ [T~u%$ l-=895FZcՄCV +y}YJ4Z z[ĈS4.XIDYOCnRڟC̥5#CeBzZ7^ksk?u^4h6|36ueC}8Z5uޱ +ۖyhzʖ@cPZVyC/UwQj>65QAz5?CTlHEC?&FFmԧ.g}/4פ+%&[fһn4'Z .HDsC*,#5 +Bh\[Qj]ʯi1G*NddBCrӣj1Hʮp +L#`$M(OaG/ GXauTe`y5X jK4Rdu0"DyTWxzB&~94{yJf۫{w53ŶտֈVG=Ϲ^WI^*o[ϩ[c +6(J +#h; +hH5:Y)}Ci:DJ-B B$Ğ,B>ڃzj*}-W0l|9"I W6F TaυJ%EDWڲguA%e-bÞƐ@m΢2fT6j ;?]j xLBh7v,Eݐn+THLu2(̪|*:݊r_D>|yKIH c)3Uw<`n2K4$YgW˪sC9ʒN'K/zYI6;+Ov; }!ּt!)CCJWbӬ +7;s'@8ZDݮ-̕ker;֕f]-ǣȹYI@ʃ1s~WQ3G;vRSNCO"C k|a?X 3 ++#6n7&H] 5R¶we;"Vo pzn +<ԢdĴ +W4dEQȡ{\y@aJo ̚[t*:<~;$I4*Ҹ(RO/!4s9{?„h0ډߗH訧ז/CZH}۵qUjz`t'{WM|;T6ym#"BY*}"%xMׄX3) +ӻ]P9/R;S@,"C/gRfS ze+0eJ4_jA4-2q׊9őcuW1Y[\Tۻ (8p9dҋç_vg-Ss)2,꿭QH>; ~ֱqyLX@)XY}IUuCy.faÀU7T&,Q;;CLJ)ªurt}t߾+TA endstream endobj 44 0 obj <>stream +8;V_Y>EV>s#Xhb<(Q1"0lg9Wte*=mj;oE;_%SZp]+80K)eUqr82V.>W\QesHU!`f+ +GLT29fPkb`F8i\%HOh4Um;Z#fO6K;*^EYtA]Z24j4m.eIK8^L$P/4uT]R)E#1,9\U +]R$4*4iYsW`QaQLm&Pu/4NTOeF(+\jB[dOY\3uo&%>;]f\Y%8YBr!j/eBEc8<5^#K +`7pQ,0e#lUaXbQ[8G270&B"nXM +-mk&o#AUsJZ$dcQQU+WVf34GeWi?c+b(Xj<3u.Xb-V4Z~> endstream endobj 45 0 obj [/Indexed/DeviceRGB 255 46 0 R] endobj 46 0 obj <>stream +8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0 +b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup` +E1r!/,*0[*9.aFIR2&b-C#soRZ7Dl%MLY\.?d>Mn +6%Q2oYfNRF$$+ON<+]RUJmC0InDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j$XKrcYp0n+Xl_nU*O( +l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 35 0 obj <> endobj 36 0 obj <> endobj 37 0 obj <> endobj 38 0 obj <> endobj 53 0 obj [/View/Design] endobj 54 0 obj <>>> endobj 51 0 obj [/View/Design] endobj 52 0 obj <>>> endobj 49 0 obj [/View/Design] endobj 50 0 obj <>>> endobj 47 0 obj [/View/Design] endobj 48 0 obj <>>> endobj 43 0 obj <> endobj 42 0 obj <> endobj 55 0 obj <> endobj 56 0 obj <>stream +%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 14.0 %%AI8_CreatorVersion: 14.0.0 %%For: (Christian Zander) () %%Title: (UT_WBMW_Rot_RGB.pdf) %%CreationDate: 21.09.10 13:10 %%Canvassize: 16383 %%BoundingBox: 0 -142 553 0 %%HiResBoundingBox: 0 -141.7334 552.7559 0 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 10.0 %AI12_BuildNumber: 367 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0.647059 0.117647 0.215686 (R=165 G=30 B=55) %%+ 0 0 0 ([Passermarken]) %AI3_Cropmarks: 0 -141.7324 552.7559 0 %AI3_TemplateBox: 276.5 -71.5 276.5 -71.5 %AI3_TileBox: -126.6221 -350.3662 656.3779 208.6338 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 1 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 300 %AI5_NumLayers: 4 %AI9_OpenToView: 216.9478 83.0522 2.69 1336 999 26 1 0 43 129 0 0 0 1 1 0 1 1 0 %AI5_OpenViewLayers: 7777 %%PageOrigin:128.8359 128.2852 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 57 0 obj <>stream +%%BoundingBox: 0 -142 553 0 %%HiResBoundingBox: 0 -141.7334 552.7559 0 %AI7_Thumbnail: 128 36 8 %%BeginData: 7440 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD11FF9A9A9AFF9A9B9AFFA8709ACAA19A9AA1FF70A8FFA870FFFF %A2A1FFFF76A176CACA70A1A19AFFFFFFA894FFA19BFFA1CAFFFF70A176CA %CB94A8FFA2A1A1FD43FFA2A8FFFFFD04A1FF76FFFFA8A1FF6FFF76CBFFFF %76FFFFA16FFFFFA1A8FF70FF76FFFFA170FFFFFF9BA8A1FFFF9A76FFFF9B %A8CA76FF76FFFFA1A1FD26FFA8A1CAFFFFFFCAA1A8FD15FFA1A1A1FFA19B %76FFFF9AA1FFA1A1CAA1FF9BA1CAA19BFFFFA19BA8FF9BCAA8A2FF9BFFFF %FF9ACAFFFF9A9AFFFFFFA19BA8FF9ACAA1CAFFA1CAFFCA70A1FD23FFA89B %9A9A76CACA766F9B6F9BFD14FFA19BCAFFA177A876FF70CAA8A8709AA8FF %70CAFFCA70FFA1A176A1FFA1769AFFFF70FFFFFF70FFFFFF7076A8FFA1A1 %70A1FF9B709AFFFF76FFFFFFA870A8FD22FF9A6F9A6F9B6F7070706F9B44 %9AA8FD12FFA1CAFFFFA1A2FF9BCA9BFFFFFD04A1FF9ACAFFCA9BFFA1CAFF %70FF9BCA9ACAFF9AFFFFA1A1FFFFCAA1A19ACAA1FFCB70FF9ACA76FFFF9A %CAFFA8FFA1CAFD21FFCA69946FA1706F69FD049B9A7070CAFD11FFA176A1 %A8A176A1A8CA70A1A1A176FF76A876CAFFCA70A876FFFFA1A19BA1CA76CA %70A2A1A1A8FFFFCA70FFA19A76FFFF9BA19BA2A876FF76A19BA1A1A8FD22 %FF6F9A70A19AA1706976A1706F7076A1A1FD33FFCAFD3BFFA19A9AA1769A %70A1709B69706F9A709BA1FD6FFFA176A16F7076A16F9A697676A1769A6F %76CAA2CACAFFFFFFCACAA2CBFFCBFD06FFA8CAA2FFCACAA2CACACAA2CAFD %05FFCAA1CAA8CAA8A2A1CACAFFA2CAA8CAA1FD07FFA1A1A1FFFFCAA2CACA %FFA1A8A2CACACAA2CAA8FD08FFCAA1CAA8CBA8CAA2CACAFD0FFFA1706F70 %76FF9B70699AA1A1769A709A6FA1696FFD05FF6F6FA8FF6FA2FD06FF6F70 %FFFF6F69A2FF9B6876FD05FF706FA8FF6F6F9BA16FA2FFA8449AA1A1699A %FFFFFFCA6FA1A176A1FFA86F6FFFA1709BA1706F9BA1709AFFCA76A176A1 %9BFF709A9BA1699AA1A16FA2FD0FFFA2446F769BA19A7069709A70A29B9B %6FA1FF69A1FD05FFA16FFFFF7044CAFD05FFA176FFFFA169FFFFFF6F9AFD %05FF9AA1FFFFA169FFFFFFA8FFFF6FA1FFFFCA44A1FFFF70A1FFFFA8CAFF %FF6F9BFFCACAFFFFA269FFFFFFA8FFFFFF6F6FCAFFFFCAFFFFFF70A1FFFF %FFA8FD0FFFCA6F6FA19B6F769A70FD049AA176A176FF6F76FD05FFA170FF %FF706F44CBFD04FF9B9BFFFF766FCAFFFFA144CBFFFFFFCA6FFFFFFF766F %A8FD05FF69A1FFFFFF7070FFFF7076FD06FF7076FD05FFA16FA8FFFFFFCB %FFA87044CAFD06FF69A1FD14FF6F6F6F706FA1709A6F9AA1A19B70709AFF %69A1FD05FFA170FFFF9ACA706FFD04FFA19AFFFFA169FFFFFFCA69A1FFFF %FFA1A1FFFFFFA16FFD06FF70A1FFFFFF769BFFFF9A6FA8FD05FF6FA1FD05 %FFA26FFD06FFA1A19B76FD06FF70A1FD14FF6F6F9A70CB9B9A707076CA76 %A16F69A8FF6F76FD05FFA170FFFF6FFFA8696FFFFFFF9B9BFFFF9A6FFD04 %FF6F70FFFFFF76A8FFFFFF9B69A1CAA1FFFFFF6FA1FFFFCA6FA2FFFFFF45 %69A2FD04FF7076FD05FFA16FFD06FF70FFA16FCAFD05FF69A1FD14FF6F6F %76A86F706F6F699B699A709A69CBFF6FA1FD05FFA170FFFF9ACBFFA86970 %FFFFA19BFFFFA16FFD04FFA844CBFFCA70FD04FFA169A17670CAFFFF7076 %CAA29BA1FD05FF6969A2FFFFFF6FA1FD05FFA26FFD05FFA29AFFFF44A2FD %05FF70A1FD14FFCA769A706F686F69FD046F70A1A1A8CA6F76FD05FF9B76 %FFFF6FFFFFFFA16876FF77A1FFFF766FFD04FFCA6976FF76A1FD04FF766F %FFFFA8FFFFFF699A9B6976FD07FF6969A8FFFF7076FD05FFA16FFD05FF9B %6FA1766F6FFD05FF69A1FD16FFA169449A696944766F706F7076FFFF6F9B %FD05FFA176FFFF9ACAFFFFFFA169A1CA9BFFFFA16FFD05FF7670FF9ACAFD %04FFA16FFD06FF70A1FF769AFD08FF69A1FFFF6FA1FD05FFC46FFD05FF6F %CBFFFFA16FCAFD04FF70A1FD17FFA89BFF9A446FA1A2706F70FFFFFF9A6F %FD05FF70A1FFFF6FCAFD04FF766970A1FFFF766FFD05FFCA449A76FD05FF %766FFD06FF69A1FFCA44CAFFFFCAFD04FF9A76FFFF7076FD05FFA16FFD04 %FFA170FD04FF44A1FD04FF6FA1FD1AFF706944A8FF766FFD04FFA869A1FF %FFFFA870A8FFFF70A8FD05FF76689BFFFFA169FD06FF7044FD06FFA169FF %FFFFA1FFFF6F9AFFFF706FFFFFCA9BFFFFFF6FCAFFFF6F9BFD05FFA169FD %04FF9BA1FD04FF9B69FD04FF6F9BFD1AFF9A6869A1FD08FF776F76A27770 %A1FFFFCA45A1A8FD05FF6F9BFFFF6F69A1FD05FF7670FD06FF6F6976A170 %9BFFCA4470A8FFA16976FFA86F76A8769BFFFFA26969CAFD04FF6F68A1FF %FFA244A1FD04FFA16876FFFFA24470A8FD19FF706944A1FD09FFCAA19BA2 %CAFFFFFFA8CAA2FD07FFA8FFCACAA8CAFD06FFA8FD05FFCACAA8CAA1A1CA %FFCACAA2FFFFFFA8CACAFFA8A1A1CAFFFFFFCBA2CACAFFFFFFCBCAA8CAFF %FFA8CAA8FD05FFA8CACAFFA8CAA8FD1AFF9A6969A1FD7CFF706944A1FD0F %FFCAFFCAFFFFFFCAFD11FFCBFD1BFFA8CAA8FFFFFFCBFFFFFFCAFD0BFFCA %FD22FF6F4469A1FD0EFFA870769A7070769A6F9ACA9B69A1FFCA77FFA16F %70FFA8706FA170A1FFFFFF9B69A1FFFFA19BFD06FF7770A1FFFFFF9B9A76 %A1767669A8FFCA6F70769B6FA8FFFF70CAFD05FFCA709AA8FD1CFFCAA170 %4469446FA8FD0DFFCAA1FFFFA168FFCACBA1FFA169A8FFA8A1FFFF6FA1FF %FF7694FFCA44CAFFFFA169A8FFFFCA44CAFD06FF69FFFFFF769ACAFD04FF %9AA1FFFF9B69FFFFC4A8FFFF706FFD06FF9B76FD1BFFCA9B696F696F696F %696F9AFD0CFFCAFFFFFFA16FFD05FFA844FD06FF70A1FFFFA16FFFFF7670 %FFFFA844CBFFFFA16F44A8FD04FFA86FCAFF7770FD06FFA8CAFFFF766FFD %06FF6F696FFD05FF769BFD1AFFA8A16F686F9B4469446F6F9A70CAFD0EFF %A269FD05FFA86FA8FFCAA1FFFF70A1FFFF9B70FFFF76A1FFFFA86FA8FFFF %A1A19A69CBFD04FF6FFFFF6FA1FD0AFFA169FD06FF9AA16976FD04FFA19A %FD1BFF766F696F766F696F6F946F7076FD0EFFA169A8FD04FFCA44CAFFCA %76FFFF70A1FFFFA169FFA19AFFFFFFCA44CAFFFF9BA1FF6F44FFFFFFCA70 %A8CA44CAFD0AFF766FA8FFA8FFFFFF6FFFA16876FFFFFF9B9BFD1AFFA170 %696F6F6F456944706F9B707077FD0DFFA269FD05FFA86FA8FD05FF70A1FF %FF9B6FA19A6FCAFFFFA86FA8FFFFA19AFFCB6F6FFFFFFF70FFA86FCAFD05 %FFA8CAA8FFFFA169A19A70CAFFFF9ACBFFA169A1FFFFA19AFD1AFFCB9A94 %7670696F696F69706F6F9BCAFD0DFFA169A8FD04FFCA69CAFD05FF9AA1FF %FFA16FFFFF9B44CAFFCA69CAFFFF9B9BFFFFA8696FFFFF70CACA44CAFD05 %FFCA4476CAFF9A6FA8FFA1FFFFFF6FFFFFFF7668A1FF9B9BFD1AFFA89B76 %6F6970446F696976706FA1A1FD0DFFA269FD05FFA86FA8FD05FF70CAFFFF %9A9AFFFFFF6F76FFA26FCAFFFFA19BFFFFFFA26976FF70FFCA69A1FD06FF %6FA1FFFFA16FFD06FF9ACAFFFFFF7069CAA29BFD1BFFA1706F9B709A449A %6F6F709B70A1FD0DFFA169CAFD05FF44A2FD05FF6FA8FFFF9B6FFFFFFF70 %70FFA844CAFFFF769BFD04FF9B68769AA8FF6F6FFD06FF69A1FFFF766FFD %06FF6FCAFD04FF696F76A1FD1BFFA16FA170A16F696FA16F696FA1FD0EFF %CA69FD06FF9A70FD04FFCA6FFFFFFF9B9AFFFFFF9A9BFFA86FCAFFFFA276 %FD05FFA1696FFFFFFF689AFD05FF70A1FFFFA16FFFFFFFCAFFFF9ACAFD05 %FF6F69A1FD1BFFCBA2A1CA7070A8A170CAA2CAFD0FFF7668A2FD05FFA86F %76FFA8A86FCAFFFFFF7044CAA8A16FFFFFA144A1FFFF7670FD06FF7669CA %FFFFCA696FA1FFA8A244A2FFFF6F69A1CAA1A1FFCB44A2FD05FFCB44A1FD %1EFFA8FFA8FFFFCAFD12FFA176CAFD06FFA89B769B76CBFD04FFA1A176A1 %A1FFFFFFA1A1A1FFFFA19ACAFD06FFA1FD05FFA19A769B76A1CAFFCAA19A %9B769AA2FFA8A1A1FD06FFCAA1FD42FFCAFD0DFFCAFFFFFFCAFD13FFA8FD %0BFFCAFFFFFFCAFD30FFFF %%EndData endstream endobj 58 0 obj <>stream +Hr:z_wNwl1`90a!yd<ؾ~m0ݞi&Dw~։ĘIB^˃HyD+#՜|Cd,j~۠0R$Iz ,`O˜G]û +8RFV1wwM).٘'a>^Vust6!Q +TZpHY F֚Zӄ3\Z^;޾l}ڋ3"#QIJp׽Ѯ,f6X>5-sLp9n`렚ۖ. +6rx^+Y)?F̧Y ~3 eDTꎾjgNa?e|op( Rer+$@72W1uhP w]ZA P FJC^ +wL槹b! $P~-A mn'o%VitQ+1Y_,g(ExHE)lb'f:")@ +21`˳yg/)<+Y +RjP% ]PAWp >L"Nn`$(y,>!P+doiLD)>'LA?C 3̱D_θ"):G3̌1xXM%K;|´;S>2A:m$4k!R0& 9D?!W* ) ҅1aDxJo)yTZF-Yll> .n/To8%;nWA1 |rvo&hg=[ՂR{nqia}>BK<#e>FWqy}o0 0{n[`CeF+ǃ ;áOo\tөF7 x_5F *FfTΉ^UP;7Vu_:1oE V`Mphm~Tms2#桲C`m4Os[;i6a-e/tWAR0[roѱ3PVr_k`q\b\y1ϐzr=\_®(~1toIǩ"0U_jԎ_ +Н^+4oG-bAhe +:Ò:,-p}$kO?w8&;5Q:1b}e?( g'1#$ިL~UhJdzH'_b>]bAc`8'JRxg<Ĵm`7 IW@Z8 '2"vPR5SN.V(7(ȣuEJEs;ʢ\W\e,2GuˎZ [#K(({gD纷udFd&anpЛP?dO;b*r.+Cu3Ư9=IXTr}XZaގ6q"rSm^Z )E)! rJ \VpC<$dV\ݺ}uݶ2ìzb2X;qy'?WV!xC \ H n I*i=y;*P qjf\s9w- + U #i8̥1AL8”8O5ē^ ICL Թ01WXL8ʿd5pJTHS<)tԵp!E0z߅tZ2Qdj2A/o#57XY> @7$} <ׄF4)RKhA]5[b(AUM}n-MTW(]>7c@E"e "_nϣىL)h(X1f4鴀b1܁P0Ê9!HA,E>؂OpXi%"uK)P*av! D @d +1 gYTieA3FGD2 K :һ~O$Eu 6|xOq!/Y#;Y +-!7Q@LŜe@*y +EMdbn@(0SAb~Hq+ <"x$"`ќn\6B |"1:xfuhLE3F U]:'Ա- 4 "gBzO 09NaoItb!GQ3v2a =Gw%b `fi"./Y|Cڠ wGAp@Alq ]I趹.UE"Zr[F&ݐ WU$_Lʍ}2ԚXາi!hdg(k +ȂneoMk; ȹ mur]nQYNS aunvdIGy%:dX]G!:͝jBPx0w,"0K21E8Ҽ"'CD4+Ub3QEi̿džZt(Y7ʹ=T/ڨ",K87U&*7-p#F7HˍZ~09HsFВ;0AoAJIyi#K-G*Ak!mtQ0§"/Gk>[mEa/k.#}"^)Z4=D̰L*:2TfcK1A"+[C?FKJʱf`nu#y +%ƉGI!an])N7@ /CU)+6pk22?t49ڕ]6ݿ0{Dr[= A$h}6%;ClɅ:hb@LTF#4{Į;0_ء_]UdYukQ@ ؇blczQ6Ӣ¬0pO[rzJDYp]CV$yJD *!YT>d 2ׅˁ܅]Ͼr=(fNѼ$?B? c +&NNq)ϕZ@`957N~wj翪lZ 4K屠/l5@bmK)]*V>EDA0Q)wK'5wW`{@scC@$z!x1/G*=Pq28Ҭ0S7sx_X"bDpb&T!t-tS]3;Dh*D@Jޭ ~> 9J%o,i֤0<ĨňLmsR r%6!Dѳilct-l+qPW@ɼCR|[yCrh2q5qVL4Nw3G~nri<˵αP'q]ǒJ<8Ŷc +|zb m}ƒ|+˧(^}Yi$w^ʕ +Qr_ʕ8B'by׸vvOJGw_PՎRf\Ԧ,D%f!R:xl6tk?õ jf?7F2k386i\Tg]y'ze֋/{eѺgŶ&_츤+qo{[4;-&jmPX;׬]5Ao|}/⡭'6b;O?”fsܡ()x1{kBcUIi^_HZ ^5]R7mZl5mrLR\~^{֣-Y@,Rde[,Rq|T׻}t&?5|]JZo-mʣvKciuBi,ҢY<'|nsܷI/Ƶ>Nj=J^mI[;H $=V|@ƔWɧk2IOY+|{聤uoF,trVfޝ;,RɄl8)?{lLRR2_:Il|"?ؤmX)PڋBKki +f}hNu'inXW1WZL\DJ^(G> IvMA'>4ixMӁ2n4zшaXދ"0z9Z5 $"}WnU,n+l +r.΀4 jF(i[dTtudZQcl'/-h4~k߶+hnXPnA>R1 [ԟǛ&(J塔0e'CAx_*I֗9!ի_nKk +mt{STz8|c/Fw靟!}ᏼߤ zܧ2r(B! K8~KkEïG=R~".Mŭ9*T?C&]S6'(w@ʣDlg Sd $Hz1 OCw: .o@*}]zﭜ~]N3ej72/;=^z~wwW ޟ9>R.ԟ_S93,W?~pwAWnqsISv*3uo6{zox1{d+Gf17 %mTZt +d&Web7-'lgNoc2!^(r_0K$vےnl0`Ʊ͢ہa\vF!/œX#a|JdUkE$*Kk V݇)17=n4Lhc!F|-A~+_%fY}KqCx&(cRޕgRQ O +2rNǸUШy~jOjh!cooBr^U,=wYF_3wBeWg=ftəlY%CBOa[ x+ȅMIWX)NeaZ];A;0B 2CS|7-zOˑt<Q{ *(0: 0a>!>Xǧsusҥ +f|&X[7d䰅\Ł2*0u`IbAS_9;C<./1j)$JD ^PPS{ciވ}-)/ó*s])eFdWK$kBO 򱖿XLvŏ .;ڹgOOxdb.]{,vJ}yY.iﴂLJhX(U;eq/NpKG~]i_'f G4Sƾ e8*L6m&BKDZe&M +eߙʿ.[~WrXʎF)4S0bIo P`͋2Yx#*SbN!Q9#ܞM uEe3ǃ.ñg[CSuѺ?NuLd;I +A>0%Npԑif_haŗI)&=52y`׾ni*0 ~a$t>ȀY%){`~tęgv%d2S[b!( EJ5RO{PDDϝ\Y{3^W|ɨhz5@eU d=87$:>mEE%T=XD5*zWͼA\T;U9RtDcn$t-wCQ@nU}-©=9-5Wra>s@7CN",' d!=f27 f ⻩OON&(j3C9WހXz B>R{r.e{e](y!a~5pvilc(sr>2w!vYeg73z1-.*3ԃ.v +䭠L È/9gTKI ;R<̌҆$A:=[*>+(U:gvߛtH.\!y?O3,]꣪e~"\tw + #u"ƈ@..X ?f!1Z+ҐobA6YxjMZiSiљ cǝR д{ƥVBU~UWBc}ц t:9JR,j2d,8Z¸+2<7MSјUy'IYI"Aۓ.YcVdo=Vh-FǞ(uYJΌ֩qAeH$ۺJ cWlznb<ҫpqd"tbzb"+Ph36XQEVwuekpoξeq՞*VZ8TxPctmPC'Vn\bEKsa*}dSsg~UGT*f#ھ[rcEx]Z|lOўV:Of٫saԆ p2;c=>۽.D$"g]@R1ӀV Y9aDU-z:ͨIiE;;uTm5}S2SO\&Rg SQT*77_[wl+ 'D˭1o.Mlpzbۅ!Q&(m`mN&uq|!2E 'Ԝ%rȄ˽LBxi>tʅ.ZDjEޞԊQnj$ˢE=C"M!y4CfAL&;L03b7H_i##y#҃x1h1#ojDL:( 񙣟u0 F[<9:wGcBF'صn_=qigω7˞JPz[kkg_ֽ}bw1אXB腵CUY5 o1v1PlXKiPLvXC +XB+o_CI[ƪӢ*b]o`:}tqq!㈀ ap? 8\2.4ǜd&~BC G܎HvMcvΊ* 3KVǍi~wZWsqT]fpj>ԂoͻZ9v8RkV-56^*SnoY[ݔh ,AJD=XRR0OYT oRN2's?hNB" s 'i7pj RU/rlzt^M.PZqq(Aꩲt0HyTLzGT,s#S=tv +>zN!&cv9i3Ӵ^j=d}SBi;k_//GcZN'ȓDSI*Vi)GM1%`AYMo\\yT5\\"R(t-v u +H9J I +U],l+ O-tkGYa6࿌7DHtsQD\{n`u[}vЃxn abw/XiG~$߿8FPCbAMVߴ/Ӧem2堪_ATmX]Hl&^6#Fɖo":nWAcǝ3RKle%Z͗P*澳ň/RcEA'mXKx PtcKPl{F6v* ,D}~J <7L3ӄXc8I$a{/Xoۛ2q LQډJaŻH^[3A a'lCdQ`;##*NHCWx[U[1 S1z@`W=)3˪)dzWK:+# #X8[ܐ/7w~uH%ni)5\$6I!V&vɸސbiO8b95-ݷ}`rԖ~ح#c*GScyC>BnE!L;+t߆B }(x+Y#U nJHX5FuwyT-R,W‚U1j9yv$XOl9-/qKeCydjӑO ^ٷ*KXTTةRba_`|$\o\I˅0E6ʡ}M—yT~ISX.xʗYcx + KcNm$5 +1)?s Yች0ƃl-^DEdWB^@[v^ޕM|օwe,LJ>c.r<*ja$/0MY{Т$4r%ɬF ++V\rB1qvȧ +&:NEܕ /f +U^begT懕ܨ5QEs ЊQV@{ƮN>¯PDU8 *Ss]uvWnѸ)H#Э  U2п3?QANQ%V3[nNI݊lY[k* /NXco~]8ʾ$v+ήHP\۞.gwH, uHzbhzMtrp9:~Kw%[* *DHa,:iw< $I|W9APK|ycݕzv]I~^$U$hA6e,GQiC6Ds'b~lk_m*vĘU㏽!bPbm8P^cLUo+ E }dWSX[ UA>Ҟey` `t ŒяFASJ1NEU^+%tee"#t`-P|ZσO,:.NJ/o8hv +X7t5Kk]>IŬyV n%D'g'<3[ b޷0iÑ6t5z 3`22b榩[bH2 )9? 2BDaRRP1+X˗}ܕ(] T5<} CTc)WM@!' u-؊ v01g9W=@;bp~uMpCyѵo2,n$UAmarq ,ޚAgxPWf.7Y0?d5;]6&Ęx=ͤkϞL7RV=O"zmP>VMm֋G}eݻ4Gл曖^5Ï׉c49zdg/E5Ӄo?ڌqT1QإKijgsU zzgc0#hfjrgTQBP׹Ŋy)p1j,%14 ,h6zF]VEOOvB0Y=O@޷11:Lm1F#dt3.{#LrR5B)dGR9HZre&⠸}/oRZGttky" p {>'cY /Rn_ة endstream endobj 59 0 obj <>stream +HW^L }>H)P JE6Ap,EZxo-ݧW$'9I!SYc>,us7"'|C%2z Q;N=Z82ގ)wp/z X\U!C钺W:z#|R,NɺDNeNIa4'S{۶nE.۶T($r\JfǖdvI&nZtؐ+MA?<Cl-xVu[MàOa`tTQ8\E%6 rdGreQ60(tα* 6fӫ]:x_4웆U2Up> 8<oKE4jeR/j.1 oeyThZrY䣸t65BAιX飊}{@dڻv 2$>mXt3`lcuVz^_z^%RgUobdJ 3w\\9@e) ^c{jwr +&H_;`W?fEDL$a +fn̺a.5 b[{] j՜Soa]4*/wPj5kdјR![ } `?<Ųes'"- +v!н\jYJkq,f. urv+X,&'lVð# ɺ7j;:=*obرвv1@Y(Ґnznh#W+y.4BC@R9 [$6 !s}L'ǬSi8Gjpoڬ &+Z;͎nSfiFǟS`|4J=uV*> R<7!?œx1`L:'ʜ-ABI @;6թfV~rZ(*a CV]הx p#ߊÃa.7hݒi0zOi8rM8uvQEI JM]Xz͘%I!fEv}W\/\c5ls6ݥGyVQ,~tc\aH~Ұ䐩cxLBT}[ USMXm"H0`BcQc\g OTqxdtUS ! _MCm8@ fE M)ot1iչ]`7T*]1ܿ5n ^5scն>iρȴÉL&e:1|:ۆ~Id?BIR*kdCCeHgV }5!$s@YlR|I_UX`wUV<[߲bv)Iń_(a G$]V^L$_ pmHZ^&$]zݬ/iVN࿿oqMWVa3CtpAQ8>SS>Nd,HwI`?X!JT7̍bY=Z2=ژ.ֹ4\Ⱥg$)3n1wɋ!C鷃X;8Jmr[LżOiIh98cF'3oFczPsH7Zw8y3u8fп +h,B;AH]ʔO=Sb*2[.iẀrYIݝ4A:١zsaBg$FSl f&]:fG 8^2X>j.tX;8P +g;0g/>viy&/ƛBСӻB /8!X:Az׵ 2Y|tc.cw]>F񅋩4w2Pqbڰ3};?b:zxI­$.tαTέԱYғ:jgΥۻ쫳k\ =>R]].+ѵ\˕˙>WԸ[xˡ\j+m +D[6P+b +( $n#a l $:mi2ƆZʪqS)M`XOhiʕ28|u V "<^PJh \\8C$-9>d(BVO2JT6A4l0ؓtuș&#=]&ylsj +Osj>bbd{&8>UYm HBFT˾|e~!:kf;5s2V48l0 6(h~4J~2^{BMUњKX]Os)KQ ؀Nl+R]a2|X?|c'#7 ߎ@P.Fr1+)J/{ٮxP~y~|X\vW_jdZ+ ܝesOQibQ|,9ǕU_u=*d(5+fz)VhJ yՐs3JaɇǨ jߜrI%ɇa졢By(r]`#DwU$ L 3IPlRBb$D1tL1e[*HKB?K~(#m>14x'D{DiM?Jd&JO=~(q,Q#aLDi ;) dLaͧ0su?[G"X!~zI9^ +n7۫ђ9]"zX" +# 0n\2;mg|@R1MD%.wSmpUO*d  v DH5p + \#4"H,,orU6ֻE_[ڢmN^mx9!MTk"Vp0>a]W2"ǭb! wA&KD$:{ut1[qBXxɺc#=FC'E烋XŔNXe] V3l9ǟQ6iûv e"EwAUbL<H_s ++Bwp=oUm3n@V%_Q1zɤw]2 g~ Ł(,܍_GzT>`9(6С>,noA.&ڐ#\yҮ+ wy@ieP1FPi@2(>W|o>5HOq:u {b2gwbw>~Q|ߗ l/$vν^eo~w^ x2qv/ȃqZ0*8۫"*|/;V݋1i^]w84%eh.Kb +o ~vm?S&\pk(s0%^\nbeF&6]H#/7.ذ 齵t麡=p<:8X+4ek_ۗ y (OGZGQ>4G[]zypU 8(O;{eVgE&uzIcE-g t,ZtgUIsA5StD;-־scqq~A˧vҴ|^a~|Soz<,tuQ#uP>m&!5Kqand˝%h:gWb ŧUJr} +N2;yJWn^ Ob>=ƊIx :ڣqaLKguW[ll{yg;wѭxD5߼eKJo;_r~Ñwh~$?#}'wsy>tzx6y|8Ë Z~=Z:>^]|:;X춑lx:;>?ۿܭJ7\[[l!c:n =eWɛ'gLJgٿen?wڧYk#x|zoia?RQiZEpmvWݽUɺUýRJ?㏿~Tg wGq㻱G8Nr"m'익0J XٍRd֖S{bc@/m 3V\øSpɰ9I@u`u| n6q҄ +rL`\DHIGHg&WpxHF(Ply{d Z +"B]7RUEr$Aن@z 3R+2#%iL)%"΂hd2ʝT08D ԃCG4C-&#Ui1JC Ԓ5A4X& !nⅵ/ U8ȋxlę φ(;1,8Zua(4 P% +T/bep䅰"Ki\IWFl /iomNܝe$yi^BT,D^B@ٮ?"qQbR4C1HL KNT#1 e#14/m$FWf,1k$`+ـRg+J q*1*QKQ4ZB*DF_hBv eJ&pծ"Yb*%SE|N(9\HL+15hPbx&Jxj^C&0oZ8*Rb1 +@M)R l +Z(}VҀSV9lChL+ =X*GCxkȘz2B--b1d'7cN'|ϝaPYkb 0L`t=.B=+-RQ(E\f[݊o"a̭Hr j@ ^Pe P BJv/󔤮y +WXsV||$@HjcG/z}+9S{$NPwBxyF=T|tez.l6 `~_"vО-fmO6whg/Y-4i'*#< +x)"/?Tˆ]㳥bVjJ27D5[DFKt|.ocbƖ CͦeL mD. ު?Q)qӱ+/06{t?5w6M*pTV}pZtfBT뼈ήclB,/>C?,'`4;,N(5E. jARD`N|cG<"G[ f;p9PdFLgͰjApTٹ))PQ R`H[mۍL^qzbPb\]-"O'FM FеH ]Gg$݈Щ7=%AȄmxɼ0߹㢹)/CE(|Em߹@ScUPkY0A/Ik59"` l›o_!:h'nk "-Z{ZC ?2λOx]zLQ@*U]l?hs4E5GANLxcmlT1)さ&eՔCZ\@^'u14^=O=bB YYdy?1Jܣv͒0$r! Qbpw]~'eڌSdݬyDd4#Ƌ [<ŢX6⟙DHnJE>QY9٘2ג#%U Nj/ցZ"*r nLE7VYa? EKpЉ{9YۈQ 5Q@2mݵ=Sv:s)Y=JףG^WK&^_vd؁qéSͥ8!dڧUdGk,\0wKý {wTJQ8B3 sÕ[уW@osS$ 6M sF;:e. Ez'/|l-g̃mU&px%z!)L +\ux)FW +3(KCT@JC*ЎwHOn/ƔЙl2Vٚ!#o7L%3͜alel;!3ڍghh U@V$ νmhETT%3z:m6Eba+; ѺoB**$y|FwWh_dw;2 dM#.n-e~}g(UBP^<wy?kVh\7+Yi1͐pHT8&0ʝ7>lYqi%t'uTo!5u})Uϛ<3"2!/*M?GEhڤ/ðbBʶkڰfkCe'.<Լ}i!3-L MPgsN._nhhQܝJehS"g4ngOMLðP< vg0 JnsEDZEٮ,RCm }08ś&d>6I6%3jl'}ȴztḙ2|c;d쳘FDҿNoȎx!ƈEd+cOXGٙZ: +"弻u `QM'R83@KXbjun砘Xm g'T9ب|[GJ8/,O2a\\%˓M@YS&ŘAIykzE^Oq5k();nI9r"4?3Wɫ::ƢӅa U9y yPR$d=B#QW1ِ[y>+"&0+Ѹ?&u 58#{(|f}VW)Ԋ/stMû#Eb{(R=idFBh9Ěh<hi5NjI_ T:K0B Ía. }6O4}3~id:7bks΀txRem##E |rp@ԪaƚZ_%cQL00LlV s&>,.Lh^}'ζmyBQR#5^τ H껚V;z̩ +d%RYP<${a +Ze=u8zμ>0? +>phd=&Z.! 4=Ii#G)q6={;aǤwW=΅t0 Ea ,?'=hсS,n!3[.P"5&L!e{9XZ. \P8Ա +u/ѝFe5.$`wtSF=f:Fmv@QtJ +;YОvF+룳MG[7FBj> ,_oF JmiXy">!dG!W-pd]K-*uGh(ǩD[A09MYb$Ld;E1A#opG 2Ue Y7Q1Mn/N!Pw8h̳"ןE s +d=Add +yxJE9o Y/5Y}FNKa5]oR )6,F,V]g@]V%r:;); wr-„=5Zc+LG6i7pYZS?O lM=ĉB*| mTA?)< m/jnޞ'YNְ9\‚0hcg{aHy&ԙݻrfahCB&~>C͵-q^R:; 1þTk/j]TmwH ,Sbw443yX:˜a g|<9 {>&ٶk xh[ t{$l YTԸQ( )ڞ<}QQ %fbrENY~w,y+b.bݠ&͐|D֫(ƑgY{9BtTڜpPiu!`FxONp~gfZnbS" bٛd\09ž=wѥX9R´&c\+'+gNyZ WYVI6)K2`i]c\PmUP&I`ɛo"+~:RDr"Mu3[]Mg[,/!_*d6&PƳ`@#N3O"l4chEya2NGTl +1>_-)/f#tcȘq`H%sl*s+poVEґI퀐cumfv"7$"l# +Xnۤ=^لa +}.:+.<4!k2v#;5B}cȋz6jE·P ;M+~~.}VA,brwb<v8@xa^ .B+lnw:Y"Of)O.y^"ݜՒgV=t}s1LLOds1ϠO l')p_)ODfdB;ȆCq`T6.2[\} ރImziJ>VWUX *j{>O2TWC,'K8DGL 'Қ"nxnj UD w[ +d;|LLL;6I @% )YfPL3r3ߋ,si40Y=2zaݐNUwK]b]*xjk{{ +lK0C[k!zo^n/-D|QA"-Lu]nCZ]|Ex͕,>]a𲑈$4J[B22ʦ85܈`[_Fam??i5 NZPQD4iD?`S9m u@ lY8ʈ%!8AZ#TQY7}@29 3:]pH(K`|SK0_! j~>*EvZ9{p4|I-~¾̻p"U z]B jb;.>3,* f\ + LGI@t|k&ǻJQ9|y4KYםxklw@2}1aIJЄ+g39?LzK!0# +-Gۀ!t)6|i*U%petdz˖?p( or533G@f7|( f* S)"J;WAMwZfW @0h@i)uRpRW'θ^vxPDBD;J@jO%@ n!+K / R%SNl&\@-+_SyQG?8 ;-ae.VLC Y FR c3u +2)2 -}uB]Ζ``C*dZqː0\טMmj@&?{?[xLK{ {B9g-?[=" =.wYĚAWS FL3cgXEЗ2B%}- zyNBshp"yD.GX;TlY}R +7=%9ѹzj \;g +4a/ x[M2cF +80@r ,s+؟VeLY1|ys#wUj+]»_բۊ v σqO񲡡 "߾yHd3 UCҟFKPd_rP7z Z ?:^d _3Ecq +$#` u La ]~Pz:1-1je FWj7WioN!eVro$H3B`8|?0!3`"w%erTe{tPq߲ŷ0G +2[{"Yfeyc/^h%-MBa! gҕ[n/E^LxI uBU) +{K`,QiRgIh tN7 [YO46ёy؄ 49\ :$gʈAuUt\ori/r᫟UҝICx-mC6F'{jrin|!5slLpCUMVϷ2#T򶪞T!}FxfΌ/Y: Xt?F}s25晱NS7 qt ߫p'BY@يbPw3!W3h qk]Efų5ZL":l*;k٠% #=߉,# U0:VmaIy@S Bp0Am'YW`6rXJ/lF!xLBЄS5*Zɔ=<+56Ɖ,ޱªbR<8ජ?/gh[ld0*.r6iٖ}KbuNcG/DTh׹`^%tY$9G 3y9oS\gN˯ DA(xM}VZINUȚw\`TA|JgKU-]>{xy83@Z6UkeKw!OʨLos %vM!\q]SayF]t{EJ%Lкsiۓ: qD0j>?Ջ 3n4D'g hzg']^ê֗5FloXΠ*,I&>D$Dip+;lM!q)Buh зmv:؈2Հ)ijhZN  (?[d!xQ8h؇=8ن+Ui)_,]|tuQ@^x \i8L~!Ii}U`1s<(oJ\bM"c,aXa*'fŤ;H[g3xu QƵ Y`{n:1e8GA:$3F]5ɌuPϺ1o cu~lLAp!""䂳Xk~? iIК8duZK7qЇb묦\#ӄ˙[Sisᾜ͋P h 6G ЖbHsF!Ng%ۅ%r:K 3׽̆4NaiU Xl՚#iN8|@%ۉNTm'e-+[O||Ӊ⇞#@og~6FKKk袊dp] )Si}m[P l6tpSfiOwjm&JSA pa˱xإuP%)QG/he;J<8`hKYc{ ,hG{,h +C3G"5"P9`f'7<~T눇6ud~!W`UH!nw~5GwB㑀ڰƭ.M  [`#ftSyuRz8wb^<{hZ-=sTW@^9$` ӕhG~Ck`2VD C߄QɆA7]Ȭx,:xvK -~@kpy}0:3<$/=‰H>HN[!9=>R;1:q:VZL?ӜI{Aswҳ ]ZI;W &Ŭ" [5uJxљAB4TqpxMEh5n ߬ fVc3b@,''˖|;M3ωNԋB]F4RXQ"_:c\E1MJzr\oVGje +/R0e62HD:b vς*׸4zGXPI +n$zR#rR Ys3$U\;9/㐞YwU^wDj)Վc`ngڌnhxv\1!RЋw"yW٥">~損KiIk?dDqJfY%"ӽA9wp3yRb.nY50`%WaJ*U?oa&Ъ?Dp^w\f\k}Ӻ`4.>jyF4si W>4qE~M dd<. b!&iHQqٚxwBP}]Nj~L-:>J,irC+4W_rg3L?% E V4-D I67Y`*dҢHd,$ 0ŞlRV_ҋ-8_C`YEUT^6^->)-_C +{oUIU'coߵ)gs5=N}5Uz̉< TVtqr8f9 u8aMil(Q@&s gևiXkeχ=,#buȮ#;B rU^>(IL74K=*8[Fn:r@UϷytv]Ufp? ½Z]rxF4;J.yNY>+\C!o{g>=;ApDW-<YbS:R¹#g4}ߙ7KXj|ƾxOkQ;`il +mT>+gq ޶:yge:yIAKN\ amDZjv(Q8&iH߹w "mT'nJQ6XTsfY +\Ԩ|dmM.~;H8HdFy"eh,+ǯ C..*-p}|܂:7kNAUV Gͻv払AYMq+ԎMֺj|tU.ˤz QcD)2)T޶BQmR#rFdF30+jTG]DU/eP1P[*Ov>2gS #pjtl uKi6>m,0j`.AsU#Ϣ(]n3)$D)ď 3kO,̖1kQ)C}?OrA$[eDžMlA50F21D-Egݞs"c<Ԝ՝/;}\uχN9}9'9-/hq/ϛ 9rc̑9R#7.aDc֪D֞6pH`Vr،Ϋ#)57 )C*gC EOB^g{c #7vR=ZZ{(#7N +Fnؓ?3K8 endstream endobj 60 0 obj <>stream +HlMe +GsȳD]5X2}=*FV*EiF+}~-E>,F=Vz)I"ԟRwS:=\У HyT}wx OGϯ5q"^3'iql?S:Q\-/*}6 \8JէYŏIy'z}=xi:֫0$Gؼ8 3MӘUSjVVrE k-BPW>V;~O *T "8@wU>FPi{,$. CJAL򰹊{ 5P9Y-~@ `BE<\%F:[?FSܡ_H*R?ck,4-q#A#4)D$TK8i&5`3rxjGGuq ӌtydv~ˬ-BI3j~N?=d%w.ԠIށ ʨd ;]lVUJ5)PN +2}j[ " dFfܱYuyQ^;̯etmZiW6SZqr~ڌ=?ht-:,v]tCF:5oaGWQN]~FSc;߇|<văMQ9*\4PY!߉<=>kmD쥴 @**=}ч,bæd_:΋h&)@RDK[<QrPfKWyJ,j9Z;lI2vk[| B]WQخlj]*@е =ɮZ F?c]ː,WUH\4جjvĴVrjLsF=L)0)(G9,&[LeզK<Jku]k@$r^\u/]2مpk٥C^U2{暋ɪo)5Qm/mZ@ aBnJ)[d_:lWq H{h1[, +X)"[hZb~^L22[aA$石j IKk:ǭ&e8ʟv}SP> %g!$LrdcrY]p 1>1fWK%k ,O8χxM/cA'9C"y,] +}^xH0ś{g:^kjo͊n-(@1|rM(DzoIcwD={Kl2gW!fΩ?Ǻ5DY"3K$fح!{ V4Ay! <6ID$Z ' dOM&{$MBxؒ&[aRJ6'Wp*Jp}]x5f~JKT +2Y#Y +Asʬ< ]iK)%չc,쩆yAz*}(` נ~ .bA0v30X*I-kXCa")J衴ᐣcRZmS ø|t }|HgiBW=d: +o| !P4Yc-E^N4ٜ#z[8bm6NC):>kB;1b؆j +슭YGf&NR&JYο?DĸYfSd H7{[+]i/KcUiLcy`۝Fb(q_T9\iѝ|XCMp4 `2!&Z>1h%6jQjC +!tG-Q^3[,#^ q6*-!;_ODцkQy4wVE\d1xp, Р ob)z%dLP靐Rt%$nlx'޵AނfeSK F|X>;&!I ! ++Ri%͙f\蟯օ0 + iiw6F(Xߐ %"kX-E Eo |=TLC#jC^. SD":0X%QYgQ +D}ב鰼n [:3%,퐸YjO2v%lRϩRv?4^S;pnx%h'fk?n]Y*ǓN/.@R@F]"ɈhvG-kD"xzl{;RJ ;i0o0ҧWe+ۃw-.8~!zK69_ l&`&>07%X2.FjJ c5*9&p-% `' +Dp")LgӺ5C7݊pGȃ!vĴPGY7Ѿ¬LC cEv ȍaU"r5)ɕ͚(Wtl8",HH .Ơ0gPݡEKiL&WS7ݨbÖ օ% -yvgNKE8>EW{L?sPR}FPVi?eeX" z |"amz(*a!0Z3a#Ч%m jMl]Rbb 'qOMjœ= ]N ;B^Y=IƻI{qqL5qW{0"ep1:c={hO$rr/B.cA2ߥ!=(a3ss K-ƽEmb@$ߢM.ZNݧ-l%}ю|Dg`A.|(s~L}%|[P߈]STbCD`,m1w-E!t35VQӝ9LFONeKҀDz/8yAT%/sō7؁ӍP O~iR@22U[B%eWED6"f$ew tz Xml.vi{ojdԷqf2h. ܅ +HY RJ^0#ӏ7,.#%zojBʸyQf-MeegmcjtI\7íC4i>VuMw ՠXoOCCif6HYnR@F!"&x*BٶAT%RYs\SުăUߪq4]PNBxGq]I JgK?0BV9e(APlZQ&XWbfZ`G1̀Th@#sU[0CCC~.Iηh( d%>m(~kL#-/ 2= mBVǪ)tLJbmuZQnED̤[J%GՑFEi^+Unb=Mgc=Puyh3:`:3: l_SiIȁVHҌ1(VS>=<Y lU-9uߓOecѩ +~K֨ +`'O.Q)bߊkf1XRJb=뿞U4ٞYZ{Gy{Bpg-{d){9&[-Zu=K9UguXGJlα%b0C6iwDznќy8*4l0T񸠯 b(,,NUKh|0 +V ,(1J;4V1.PW?'zQF[# j0۪ +Yce>lrXR/Hqc\鯱k""S5"Sf"8K W]gydK$[{-zÿ:"%rqv>*"eP@"0I,Xb5iG#x!DNRD"8zDfH@F Hʫ8.Ge9b~;X +GIVC#J""j`*x;%QX3娉NduKAL.WKß oq#51J @{"1t=0y6'{x5Y:{KBԚ&پVj"dRj$Q/u%Q-h}oICe\Sʶ$V;2$}⫈jV|+t6#4R,VP"y+;IXMYyɥV`QLR,XHFK"Ýh80p=. +UdLPRZsÔ6tP|ΒTD}]B5Yᖱo UAmce&bPz-؋A<_ǍM9SG^&*j9@v7}8pFiǪwwx>s ^pceW'8lB46tA"罦 57Yjn]tlU ܲr:Z +DOQ4";RZm6JeWZ6".Ž.3YMWrq\ʀNpVGH*ԡ'F{8 Mӟ;j<"G-duE.^^%a4sI;u֌9wUM3Ut}'-YIRZq dEt V%@ HXm5l#|؟$wĐjoK(<|,YOMMeȃdGVnM'Ֆ_vSCmH=6.]j"uiIuB"#ks0]wf dz:Әۨ3S@"f@DJh‰y qGs/!-؛&c說AV PekF.jɑ,7b';@Buy[Y7ɐzBL}B#}ns7*S *N{sXb jd5R=0[ؖ:^%l`]D8adW +l-Kj A{i2J52E-6}0![mbkJ1ڕV +UG$V +b5Do1JREi l,:,.{0ufH٪,XST(d7&ǡ16Qę+YI} *@"JÑ^=C!TM>U4AŢ$cJn츉@/0 caYPj.V M6`ØS pdQ`7 .T*f+C!/s.|00] vVk[ EY ͌AM?]o"8SŻžKr[`.a@2]*?50(v5KzBXuzMj9F`(?DI,1 r9O=A2Has2-2X]a_F>?^\RHI=}SK{hSIjo@zCVr,ayZӦԨ+od6.{N;S΅):MIv*hmd%`--eej^/#Lb |Dfn}vϗ$d&~QLGiܫko贁 \:YټU7lU7_Kuoh* +B_˜4lKzuQ tz%4bR;Fwm$F?H [ɦ"`4"/I[*mrThN"ǿ" c,/B~aCC棜h"6J{a[:}&uv}2oiqR}}@uk 4cͷjup"h`˄9k`OJz?Hme슒5/3n j H>*[Zrr%OrvF*ls2:-@[𬑃Ip@cr\QB>jvUɅ.5D+qP"Km>|%ʄUBJLB0ҜlaPTи݉A0x`MSZLIgn&KNE2XݧkrVMKC@$vAf[w68o[)kY:3>"(U׺| =p%jA2d|LYpB&jM*y6.~R>@9x l~8ᇃ~@${6?%1иe~ NݭH$8^4z YkX f+ ً N=#29>ڵ#S W{"gnAvc"xtΗ+iBͷ'6I9v+5 + ܮ_i ޹os3N|c(^laxpb=i @B]K D-hjM:Âi{[#Zؐ@!9T&N΅=FD?GP갩_&]q&a2x%.$`BoV;@Z-AnQW4X?n W!5Wd;!.Q\H*dَ:d ~RB bRA:b8KN'$ bM_LSǫr1Yo}ܫ*HU UYfP^_,,6n 2](4"6)l}kw cD+UNo7]e~}G惘_?^{C"zhز|& fJZUA?plj|žP6 nE2P5g.&nn붼_HຎP AEB~6a[@NQ[5dZP8;U\0y#G[G/U.o jDZ@ |EJse]G{(>P hP3gT5l*qhte; w1<6DlkjGq"di gS{hw]0l4꯰{i_Zm?tʚ6DlK[%ǪX\Ccgg/}omMᘲRyxHʓj&18G;Lv̥Ljuj[r"oa5~ ]F5yLP) Lq\6T24a̧RܗHWDCLbgT!3:x(u((ޚJ18C=j԰!' +i(Z|/};_[~R懼%$ex*^8QDBI%D3R>3,#zJA`U3Ppe dW9r]܊7`ISTzb?=Ȓn&  HA7`pl?n68tJ_ݧ_A,Q#x"?}Ieڣ|B[OUꄛR˨q2(CIc .H$pL!#rN#jN:F#1*BExoFR=!lO9j>&ㄇ. yw0HI~l.\CbrGu(pkyGF!Nhu@c/=*!FR;B,US{QNg>/ڏ"m{TYp)P{SpRKM&ey_UM5腰N#FY] $Aآ]23rX,A?`=-*FbH6 K)yAvdu`ݯc+ * `p/&NVP(ꗟӭm,C9dS MR4,یM(L!v j1;HHaڥA,W?&bd~@\% Vߢ bd01h z-J}iRݲ 1,^qehS7AC8x5UhDoX26jXDD_94X03 Y]oJS—8/ +[J2ɥ.5k +yNa Y0Q:95Ie ;{zR(E~rN%H-F ~FCx"awL3#=ԟErcz=gVH@dpK:Mn(:[}DQs]yX¬rPTyGaEȵ#bl<ݎs>3VRpM,ְ2i46b8ciUl +`J$~=S3;m[bGի*rNW#Bг,b]2FNyyG$~0ç:ՈoE,h9Y̨ağa*WpCF[i￯)epɏ:y@g},bbDȪvu¿LiƂ,1f&&~ԤkIvL`Ks{mNa$ M(=K]kj]r[ף̹&>H3v1!<Ӣ + J!ʼ;`:2}mp(6OLWB@+׋;q=K ^R ń$"/N5 ~RАYTqG!,2bM&rNnR[Uz[.E>QNg0U<:bF7\pE,p]`H pELXa>]) л#O!`HӰ}&D㏕$3c&t3 PE̵NSU\䣈b/f4wwq +꡹Xݔ3*yӫ{QǼw)!KŨ;ja8p+bwU %x8ǞwaW2@A(Ͻp:oNב|举p9^(58.sZn{RmŊ5CbEcl}<ژdn]BO:Uɑjh Bb'q]MEmI+!@e_\"FbJ1N]rP򮘊Ӆܪ|*#nN/8OTFb謶?3bn<228- b^xp9`2'홪h!9uTMZ|GwCMly_kw58Ҫ]57ng,J#Gk1mz10rSyc +RYbzaV--:F{♺ݔi{:/,zgǾ6KN =ق_B#uDֺМ$N,N7#@Їڗjv>$fh$*ъDH??S?f$B=KWvޟ9y5ud(9iE:ȼ i_86n$hq- +'L_P/&;L_77mߓ⑰mM4Vٰ7d\IVC rxM]} |c"+^NI}Ä<*s7M\QĦu6=D M+kH4TYD;q. z0'dIJaA.'v㑕,6 +@z>6sqD+"STϝv6p@Q-X7MR"xdlBٺ.ns6+;YES=9MU8A:LGAh2 !TLH^qy1rC]<_6I5IX͈/i~4bgMU򋪯溜uCCuYܲܲy +3Omtntg^FvPFDT(ʽ0$$c)ğ"]3C]3fU*0iQ%92rjڣN&O"]sHn.*s +Zl(MEh4k8Q|T<3L#I~d8ШCU`Zv#\U^8׵-J.~j '*KYrēKheEha)ݚܪ <ţ%DF]lc@8< م.qL#1 jA==D;'t}2|0#7}"]@7}}`gY<\}|/7 M ( N8"0o\=(#<(R| c"FlpoΦs ) goՙE} zJM +!nݗp 8T'`(8Wns+Sx\Dbt/3(T}ERsD# Y.7yLy1<$c_4 6<3adS{"V0L|^y:P9(A)=]L8n1;wj#wAL1{0[_ +z2gj$/{8C Ű}#G v!Ҷ =le'exG>#PgOIcbx6IHLTbkim!V< o8\(ع~PO Iϩk&5X"woK h`GSK *kǡ!Apve;]oIz QBR_kY&QpoŚx0`$*s&a-!as"sM!۴Xki ĉ#d;bTCBI,җ7KP2+Pt5{G& DWT*{9Xۑ3ds[!3,0b$`?%Tu4"I-@v5[ѽt4A<$;H}mo(B8s<&EţIT;0_~V+E.QAIv 'x < L`[]Ym3%/Sc w9Z1[?ID^.lckĽnشclQ<]+4M*ߺ7%=rd +DaX MeHDfh@JO +CJ14j)7 U[Kfq}zPts@GQԨvTkO]7=C2o.Dz{@̡bݏ}[G09H +EJ\)r<( +1>KdZKg JpM<HLE_k]a[|vPƒZ3#$~9~mδLcd[P-=wcqAؔᖎ`b71Ŗ@.U +@EsWm:%yRd~0PX:Rn@͢ rDK+#& MtrW`\ZiȆƾaAn؞AdXUXr_z.ġd1:9LNԏ5o Ddf"Q׋k@@hɽQrהrjXo1 ~ڒ5"Wc\*(\+!4sb'+rZ&RS!-8kx# V-Ư/$EkBRަ<rr%ӱ*4"|tث}V0햱HE;b%Ʊ/IDBܭ "PwOҶ +b;7>a1d%i.Mc(ƶ'HTmT-o;I/扬{mY/ZbXˢ#@>&!mY@@BTkMW3[ ۂ LJ1Paj~6ݬpNk, i_#u=̌) H,BgYFK+'t;wwZpz158~`b[wA"TsS*"sϵ=- i6wdD*1rJ U<34#(J32 $"0-eys XNC^=Vo/ecAR[o@} L=(_-/3z4%+GtVg>J %9SP aw F`dU5ϠzNꜙ.5ltSY$>\RFDKx$~LYD ICAtytO1R2ȭrɹT'|լМGC \hRВ [7й$wDM#RX ʤ!"-[ft _F?GM'0&C`mxOw_ KG0zf%9qA؂ڪ8 +2==um?9o8bTֽmNAXKHC՜X <#1 ȕXiB"AVd!^?Vk-K)Vex<ɂGlsњϰktiP#,K=ˑW{O$ֈ瞴 @)#(Xln;mhx1aiD Hdh`tSو=X| +:ިxb`RM˼)OzJ1gal?L\i=]BLEUD{,\ۓM|gTҭQNڒUVlSH׋ +}l ۮ7 SmS=#"1WhAlz? I{ؕvt1BYsdkP_dW;v;\ 9 O xԓL@PUC1;@ȫ0F@MNl=Ճ70ߋo5e]QVhwTS nJ$}H}OGO]qCМ88wDW8Rj5?BP wuYA UdVy.eGLfw{M$l҉Z#F]*E-[fJUQʼnǯY6lA"g O)UmI`V]Hl7R*' Ij$:׆= E;\*zB[qKl9ri=P#GkP5)ŏ}#*sI.@H%Eu"V1]mI!sM?3Yu@ƶ>c7*.t?Ku|u;%s b6UT.vBVIt{@;C9.:i>qw>B :Ssl5(ֹ~~ב8`Pڮ;Bޅ_˴GQ-ԓ3#Rөx(O{}z7G('J tj"sq,JNJT{}%-U5A[YUo>̢  +קtVw&c q$1oQ*;f=ȮdR"h)1H8"d1ͰTlZ~Bd] $P]_pG&BsB39mF#s{ 4.+C3 Nǧ=u*hjP9>Q:(oH ʐe ; Bs|䠐))ye#)liJQ8ORv-)45mPBhWk ՞2 gHo*,*{ٔ2_" Bh 8utԬAWg.&5kJ>K& rΊ 7sݎ#ΖUd~Φ7mtDM:0~o\' їy0BPK◐v%OӇ2U:k% ]euA ?b2b?K,87,b9RLTWW WXjU1v7^`5{hT ]{H_@QR]d"z,HON)tcDf"P&Y\A9z}(u0F%,nдIv~!%nCFaV1L`Em*jf;RѤt &nB*5Na)ml{IAj m6A"#Yr]= +>>iR;Z-J`<ɲFj o(߈577o-'t$M F -y%^T=De54{7듽> ^5C_:+ͼA3G@R.z)ƇRCp΢2ət̅ H0mQ~ d aȻ:E$Uk_aG/Ic2r>C>fH +CryV6)܇TLYzzjcjT͓f> ʱK^&A|r/~fR?#ܴT8{kȰr][1א,ec幗2)-wMFoܪ x/D0A U!GRRe@[D]F_9Gs(>-E>F 0rҧ2}N+;b^v77F-Њ-\]_;p^%r<}7ގXDq]P*a" &i[SΤ` :742;A1mg=D3{ӂVgrETQQ +&C'۩ݙۤ ݫjzR~ #xT|QFE532uD-HMl\F$E0.=C1FdL&ARĢ+|z6hVn༳_)P BlfI6ii3T+"C6PdcLcSC?RP%**GHvg_$n HhH)ƹͶXd`_VV$|PmLfa'(+B𝢌j*YCŹ5TGi(Ap HO`ҫWuIT \Co \ an׳Q7\Řf +ne9M@(8t G*-IYǘuG.\NDmXK;Fi~=nb )L%q5i?NGjju=RX,fb>|Wibz("N3:Kb#t#1u+Gq#(An D +1(UK1I52h@ji*7ʆ_HIE"z`!j HN?ÍQa>d t"n':"ЪKyAb eN'"1l$^k(! f"īƈMRVWw@>MI [CY.҇I]9,EH^AC* )]Wc&܍,xZRHo$u$Gż&!rZ w`BX(ﰧ2{kJppv}uZCW/ $[ +D 0|/ο}]\~u~{7\t϶Gcx?޾Ģg}9~G;xyxxow??i'FZ~v껯 0i endstream endobj 15 0 obj <> endobj 18 0 obj <> endobj 21 0 obj <> endobj 25 0 obj <> endobj 26 0 obj [/View/Design] endobj 27 0 obj <>>> endobj 22 0 obj [/View/Design] endobj 23 0 obj <>>> endobj 19 0 obj [/View/Design] endobj 20 0 obj <>>> endobj 16 0 obj [/View/Design] endobj 17 0 obj <>>> endobj 39 0 obj [38 0 R 37 0 R 36 0 R 35 0 R] endobj 61 0 obj <> endobj xref 0 62 0000000003 65535 f +0000000016 00000 n +0000021147 00000 n +0000000004 00000 f +0000000005 00000 f +0000000013 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000117691 00000 n +0000118341 00000 n +0000118372 00000 n +0000117766 00000 n +0000118225 00000 n +0000118256 00000 n +0000117850 00000 n +0000118109 00000 n +0000118140 00000 n +0000000000 00000 f +0000117925 00000 n +0000117993 00000 n +0000118024 00000 n +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000251 00000 n +0000052594 00000 n +0000052669 00000 n +0000052753 00000 n +0000052828 00000 n +0000118457 00000 n +0000021199 00000 n +0000021574 00000 n +0000053473 00000 n +0000053360 00000 n +0000051557 00000 n +0000052033 00000 n +0000052081 00000 n +0000053244 00000 n +0000053275 00000 n +0000053128 00000 n +0000053159 00000 n +0000053012 00000 n +0000053043 00000 n +0000052896 00000 n +0000052927 00000 n +0000053547 00000 n +0000053755 00000 n +0000054851 00000 n +0000062476 00000 n +0000075641 00000 n +0000096091 00000 n +0000118503 00000 n +trailer <<24AEDBD9D06A452B99A853D33D1917D3>]>> startxref 118691 %%EOF \ No newline at end of file diff --git a/poster/figs/timeline.pdf b/poster/figs/timeline.pdf new file mode 100644 index 0000000..b5a6833 Binary files /dev/null and b/poster/figs/timeline.pdf differ diff --git a/poster/main.pdf b/poster/main.pdf index 06827b3..0ec415f 100644 Binary files a/poster/main.pdf and b/poster/main.pdf differ diff --git a/poster/main.tex b/poster/main.tex index da4bff1..c4f1818 100644 --- a/poster/main.tex +++ b/poster/main.tex @@ -1,4 +1,4 @@ -\documentclass[25pt, a0paper, landscape, margin=0mm, innermargin=20mm, +\documentclass[25pt, a0paper, portrait, margin=0mm, innermargin=20mm, blockverticalspace=2mm, colspace=20mm, subcolspace=0mm]{tikzposter} %Default values for poster format options. \input{packages} @@ -7,77 +7,98 @@ blockverticalspace=2mm, colspace=20mm, subcolspace=0mm]{tikzposter} %Default val \begin{document} \renewcommand{\baselinestretch}{1} -\title{\parbox{1900pt}{A dark template to make colorful figures pop}} -\author{Sina Prause, Alexander Wendt, Patrick Weygoldt} -\institute{Supervised by Till Raab \& Jan Benda} +\title{\parbox{1500pt}{Bypassing time-frequency uncertainty in the detection of transient communication signals in weakly electric fish}} +\author{Sina Prause, Alexander Wendt, and Patrick Weygoldt} +\institute{Supervised by Till Raab \& Jan Benda, Neuroethology Lab, University of Tuebingen} \usetitlestyle[]{sampletitle} \maketitle \renewcommand{\baselinestretch}{1.4} \begin{columns} -\column{0.3} -\myblock[TranspBlock]{Introduction}{ - \lipsum[1][1-5] +\column{0.4} +\myblock[GrayBlock]{Introduction}{ + The time-frequency tradeoff makes reliable signal detecion and simultaneous + sender identification by simple Fourier decomposition in freely interacting + weakly electric fish impossible. This profoundly limits our current + understanding of chirps to experiments + with single - or physically separated - individuals. + % \begin{tikzfigure}[] + % \label{griddrawing} + % \includegraphics[width=0.8\linewidth]{figs/introplot} + % \end{tikzfigure} +} +\myblock[TranspBlock]{Chirp detection}{ \begin{tikzfigure}[] - \label{griddrawing} - \includegraphics[width=\linewidth]{example-image-a} + \label{fig:alg1} + \includegraphics[width=0.9\linewidth]{figs/algorithm1} \end{tikzfigure} -} - -\myblock[TranspBlock]{Methods}{ + \vspace{2cm} \begin{tikzfigure}[] - \label{detector} - \includegraphics[width=\linewidth]{example-image-b} + \label{fig:alg2} + \includegraphics[width=1\linewidth]{figs/algorithm} \end{tikzfigure} + \vspace{0cm} } -\column{0.4} -\myblock[TranspBlock]{Results}{ - \lipsum[3][1-5] +\column{0.6} +\myblock[TranspBlock]{Chirps during competition}{ \begin{tikzfigure}[] - \label{modulations} - \includegraphics[width=\linewidth]{example-image-c} + \label{fig:example_b} + \includegraphics[width=\linewidth]{figs/timeline.pdf} \end{tikzfigure} -} + \noindent + \begin{itemize} + \setlength\itemsep{0.5em} + \item Two fish compete for one hidding place in one tank, + \item Experiment had a 3 hour long darkphase and a 3 hour long light phase. + \end{itemize} -\myblock[TranspBlock]{More Stuff}{ - \lipsum[3][1-9] + \noindent + \begin{minipage}[c]{0.7\linewidth} + \begin{tikzfigure}[] + \label{fig:example_b} + \includegraphics[width=\linewidth]{figs/chirps_winner_loser.pdf} + \end{tikzfigure} + \end{minipage} % no space if you would like to put them side by side + \begin{minipage}[c]{0.2\linewidth} + \begin{itemize} + \setlength\itemsep{0.5em} + \item Fish who won the competition chirped more often than the fish who lost. + \item + \end{itemize} + \end{minipage} } -\column{0.3} -\myblock[TranspBlock]{More Results}{ +\myblock[TranspBlock]{Interactions at modulations}{ + \vspace{-1.2cm} \begin{tikzfigure}[] - \label{results} - \includegraphics[width=\linewidth]{example-image-a} + \label{fig:example_c} + \includegraphics[width=0.5\linewidth]{example-image-c} \end{tikzfigure} - \begin{multicols}{2} - \lipsum[5][1-8] - \end{multicols} - \vspace{-1cm} + } -\myblock[TranspBlock]{Conclusion}{ +\myblock[GrayBlock]{Conclusion}{ \begin{itemize} \setlength\itemsep{0.5em} - \item \lipsum[1][1] - \item \lipsum[1][1] - \item \lipsum[1][1] + \item Our analysis is the first to indicate that \textit{A. leptorhynchus} uses long, diffuse and synchronized EOD$f$ signals to communicate in addition to chirps and rises. + \item The recorded fish do not exhibit jamming avoidance behavior while close during synchronous modulations. + \item Synchronous signals \textbf{initiate} spatio-temporal interactions. \end{itemize} \vspace{0.2cm} } -\end{columns} + \end{columns} -\node[ - above right, +\node [above right, text=white, outer sep=45pt, minimum width=\paperwidth, align=center, draw, fill=boxes, - color=boxes, -] at (-0.51\paperwidth,-43.5) { - \textcolor{text}{\normalsize Contact: name.surname@student.uni-tuebingen.de}}; + color=boxes] at (-43.6,-61) { + \textcolor{white}{ + \normalsize Contact: \{name\}.\{surname\}@student.uni-tuebingen.de}}; \end{document} diff --git a/poster/packages.tex b/poster/packages.tex index 82f951e..50d9f0e 100644 --- a/poster/packages.tex +++ b/poster/packages.tex @@ -1,11 +1,10 @@ \usepackage[utf8]{inputenc} \usepackage[scaled]{helvet} -\renewcommand\familydefault{\sfdefault} +\renewcommand\familydefault{\sfdefault} \usepackage[T1]{fontenc} \usepackage{wrapfig} \usepackage{setspace} \usepackage{multicol} \setlength{\columnsep}{1.5cm} \usepackage{xspace} -\usepackage{tikz} -\usepackage{lipsum} +\usepackage{tikz} \ No newline at end of file diff --git a/poster/style.tex b/poster/style.tex index ac800ce..da09710 100644 --- a/poster/style.tex +++ b/poster/style.tex @@ -16,10 +16,11 @@ \colorlet{notefgcolor}{background} \colorlet{notebgcolor}{background} + % Title setup \settitle{ % Rearrange the order of the minipages to e.g. center the title between the logos -\begin{minipage}[c]{0.6\paperwidth} +\begin{minipage}[c]{0.8\paperwidth} % \centering \vspace{2.5cm}\hspace{1.5cm} \color{text}{\Huge{\textbf{\@title}} \par} @@ -30,26 +31,28 @@ \vspace{2.5cm} \end{minipage} \begin{minipage}[c]{0.2\paperwidth} -% \centering - \vspace{1cm}\hspace{1cm} - \includegraphics[scale=1]{example-image-a} -\end{minipage} -\begin{minipage}[c]{0.2\paperwidth} -% \vspace{1cm}\hspace{1cm} \centering - \includegraphics[scale=1]{example-image-a} + % \vspace{1cm} + \hspace{-10cm} + \includegraphics[width=0.8\linewidth]{figs/efishlogo.pdf} \end{minipage}} +% \begin{minipage}[c]{0.2\paperwidth} +% \vspace{1cm}\hspace{1cm} +% \centering +% \includegraphics[width=\linewidth]{example-image-a} +% \end{minipage}} -% definie title style with background box +% define title style with background box (currently white) \definetitlestyle{sampletitle}{ - width=1189mm, + width=841mm, roundedcorners=0, linewidth=0pt, innersep=15pt, titletotopverticalspace=0mm, titletoblockverticalspace=5pt }{ - \begin{scope}[line width=\titlelinewidth, rounded corners=\titleroundedcorners] + \begin{scope}[line width=\titlelinewidth, + rounded corners=\titleroundedcorners] \draw[fill=text, color=boxes] (\titleposleft,\titleposbottom) rectangle diff --git a/poster_old/figs/Untitled.png b/poster_old/figs/Untitled.png new file mode 100644 index 0000000..3259ce2 Binary files /dev/null and b/poster_old/figs/Untitled.png differ diff --git a/poster_old/figs/algorithm.pdf b/poster_old/figs/algorithm.pdf new file mode 100644 index 0000000..2e2c453 Binary files /dev/null and b/poster_old/figs/algorithm.pdf differ diff --git a/poster_old/figs/introplot.pdf b/poster_old/figs/introplot.pdf new file mode 100644 index 0000000..cbead3e Binary files /dev/null and b/poster_old/figs/introplot.pdf differ diff --git a/poster_old/figs/logo.png b/poster_old/figs/logo.png new file mode 100644 index 0000000..234652f Binary files /dev/null and b/poster_old/figs/logo.png differ diff --git a/poster_old/figs/logo.svg b/poster_old/figs/logo.svg new file mode 100644 index 0000000..b34ed6c --- /dev/null +++ b/poster_old/figs/logo.svg @@ -0,0 +1,1184 @@ + + + + diff --git a/poster_old/figs/placeholder1.png b/poster_old/figs/placeholder1.png new file mode 100644 index 0000000..2dc3349 Binary files /dev/null and b/poster_old/figs/placeholder1.png differ diff --git a/poster_old/main.pdf b/poster_old/main.pdf new file mode 100644 index 0000000..4c1a7c1 Binary files /dev/null and b/poster_old/main.pdf differ diff --git a/poster_old/main.tex b/poster_old/main.tex new file mode 100644 index 0000000..ca20bb3 --- /dev/null +++ b/poster_old/main.tex @@ -0,0 +1,119 @@ +\documentclass[25pt, a0paper, landscape, margin=0mm, innermargin=20mm, +blockverticalspace=2mm, colspace=20mm, subcolspace=0mm]{tikzposter} %Default values for poster format options. + +\input{packages} +\input{style} + +\begin{document} + +\renewcommand{\baselinestretch}{1} +\title{\parbox{1900pt}{Pushing the limits of time-frequency uncertainty in the +detection of transient communication signals in weakly electric fish}} +\author{Sina Prause, Alexander Wendt, Patrick Weygoldt} +\institute{Supervised by Till Raab \& Jan Benda, Neurothology Group, +University of Tübingen} +\usetitlestyle[]{sampletitle} +\maketitle +\renewcommand{\baselinestretch}{1.4} + +\begin{columns} +\column{0.5} +\myblock[TranspBlock]{Introduction}{ + \begin{minipage}[t]{0.55\linewidth} + The time-frequency tradeoff makes reliable signal detecion and simultaneous + sender identification of freely interacting individuals impossible. + This profoundly limits our current understanding of chirps to experiments + with single - or physically separated - individuals. + \end{minipage} \hfill + \begin{minipage}[t]{0.40\linewidth} + \vspace{-1.5cm} + \begin{tikzfigure}[] + \label{tradeoff} + \includegraphics[width=\linewidth]{figs/introplot} + \end{tikzfigure} + \end{minipage} +} + +\myblock[TranspBlock]{A chirp detection algorithm}{ + \begin{tikzfigure}[] + \label{modulations} + \includegraphics[width=\linewidth]{figs/algorithm} + \end{tikzfigure} +} + +\column{0.5} +\myblock[TranspBlock]{Chirps and diadic competitions}{ + \begin{minipage}[t]{0.7\linewidth} + \begin{tikzfigure}[] + \label{modulations} + \includegraphics[width=\linewidth]{figs/placeholder1} + \end{tikzfigure} + \end{minipage} \hfill + \begin{minipage}[t]{0.25\linewidth} + \lipsum[3][1-3] + \end{minipage} + + \begin{minipage}[t]{0.7\linewidth} + \begin{tikzfigure}[] + \label{modulations} + \includegraphics[width=\linewidth]{figs/placeholder1} + \end{tikzfigure} + \end{minipage} \hfill + \begin{minipage}[t]{0.25\linewidth} + \lipsum[3][1-3] + \end{minipage} + + \begin{minipage}[t]{0.7\linewidth} + \begin{tikzfigure}[] + \label{modulations} + \includegraphics[width=\linewidth]{figs/placeholder1} + \end{tikzfigure} + \end{minipage} \hfill + \begin{minipage}[t]{0.25\linewidth} + \lipsum[3][1-3] + \end{minipage} + + +} + +\myblock[TranspBlock]{Conclusion}{ + \lipsum[3][1-9] +} + +% \column{0.3} +% \myblock[TranspBlock]{More Results}{ +% \begin{tikzfigure}[] +% \label{results} +% \includegraphics[width=\linewidth]{example-image-a} +% \end{tikzfigure} + +% \begin{multicols}{2} +% \lipsum[5][1-8] +% \end{multicols} +% \vspace{-1cm} +% } + +% \myblock[TranspBlock]{Conclusion}{ +% \begin{itemize} +% \setlength\itemsep{0.5em} +% \item \lipsum[1][1] +% \item \lipsum[1][1] +% \item \lipsum[1][1] +% \end{itemize} +% \vspace{0.2cm} +% } +\end{columns} + +\node[ + above right, + text=white, + outer sep=45pt, + minimum width=\paperwidth, + align=center, + draw, + fill=boxes, + color=boxes, +] at (-0.51\paperwidth,-43.5) { +\textcolor{text}{\normalsize Contact: \{name\}.\{surname\}@student.uni-tuebingen.de}}; + +\end{document} diff --git a/poster_old/packages.tex b/poster_old/packages.tex new file mode 100644 index 0000000..82f951e --- /dev/null +++ b/poster_old/packages.tex @@ -0,0 +1,11 @@ +\usepackage[utf8]{inputenc} +\usepackage[scaled]{helvet} +\renewcommand\familydefault{\sfdefault} +\usepackage[T1]{fontenc} +\usepackage{wrapfig} +\usepackage{setspace} +\usepackage{multicol} +\setlength{\columnsep}{1.5cm} +\usepackage{xspace} +\usepackage{tikz} +\usepackage{lipsum} diff --git a/poster_old/style.tex b/poster_old/style.tex new file mode 100644 index 0000000..ac800ce --- /dev/null +++ b/poster_old/style.tex @@ -0,0 +1,119 @@ +\tikzposterlatexaffectionproofoff +\usetheme{Default} + +\definecolor{text}{HTML}{e0e4f7} +\definecolor{background}{HTML}{111116} +\definecolor{boxes}{HTML}{2a2a32} +\definecolor{unired}{HTML}{a51e37} + +\colorlet{blocktitlefgcolor}{text} +\colorlet{backgroundcolor}{background} +\colorlet{blocktitlebgcolor}{background} +\colorlet{blockbodyfgcolor}{text} +\colorlet{innerblocktitlebgcolor}{background} +\colorlet{innerblocktitlefgcolor}{text} +\colorlet{notefrcolor}{text} +\colorlet{notefgcolor}{background} +\colorlet{notebgcolor}{background} + +% Title setup +\settitle{ +% Rearrange the order of the minipages to e.g. center the title between the logos +\begin{minipage}[c]{0.6\paperwidth} +% \centering + \vspace{2.5cm}\hspace{1.5cm} + \color{text}{\Huge{\textbf{\@title}} \par} + \vspace*{2em}\hspace{1.5cm} + \color{text}{\LARGE \@author \par} + \vspace*{2em}\hspace{1.5cm} + \color{text}{\Large \@institute} + \vspace{2.5cm} +\end{minipage} +\begin{minipage}[c]{0.2\paperwidth} +% \centering + \vspace{1cm}\hspace{1cm} + \includegraphics[scale=1]{example-image-a} +\end{minipage} +\begin{minipage}[c]{0.2\paperwidth} +% \vspace{1cm}\hspace{1cm} + \centering + \includegraphics[scale=1]{example-image-a} +\end{minipage}} + +% definie title style with background box +\definetitlestyle{sampletitle}{ + width=1189mm, + roundedcorners=0, + linewidth=0pt, + innersep=15pt, + titletotopverticalspace=0mm, + titletoblockverticalspace=5pt +}{ + \begin{scope}[line width=\titlelinewidth, rounded corners=\titleroundedcorners] + \draw[fill=text, color=boxes] + (\titleposleft,\titleposbottom) + rectangle + (\titleposright,\titlepostop); + \end{scope} +} + +% define coustom block style for visible blocks +\defineblockstyle{GrayBlock}{ + titlewidthscale=1, + bodywidthscale=1, + % titlecenter, + titleleft, + titleoffsetx=0pt, + titleoffsety=-30pt, + bodyoffsetx=0pt, + bodyoffsety=-40pt, + bodyverticalshift=0mm, + roundedcorners=25, + linewidth=1pt, + titleinnersep=20pt, + bodyinnersep=38pt +}{ + \draw[rounded corners=\blockroundedcorners, inner sep=\blockbodyinnersep, + line width=\blocklinewidth, color=background, + top color=boxes, bottom color=boxes, + ] + (blockbody.south west) rectangle (blockbody.north east); % + \ifBlockHasTitle% + \draw[rounded corners=\blockroundedcorners, inner sep=\blocktitleinnersep, + top color=background, bottom color=background, + line width=2, color=background, %fill=blocktitlebgcolor + ] + (blocktitle.south west) rectangle (blocktitle.north east); % + \fi% +} +\newcommand\myblock[3][GrayBlock]{\useblockstyle{#1}\block{#2}{#3}\useblockstyle{Default}} + +% Define blockstyle for tranparent block +\defineblockstyle{TranspBlock}{ + titlewidthscale=0.99, + bodywidthscale=0.99, + titleleft, + titleoffsetx=15pt, + titleoffsety=-40pt, + bodyoffsetx=0pt, + bodyoffsety=-40pt, + bodyverticalshift=0mm, + roundedcorners=25, + linewidth=1pt, + titleinnersep=20pt, + bodyinnersep=38pt +}{ + \draw[rounded corners=\blockroundedcorners, inner sep=\blockbodyinnersep, + line width=\blocklinewidth, color=background, + top color=background, bottom color=background, + ] + (blockbody.south west) rectangle (blockbody.north east); % + \ifBlockHasTitle% + \draw[rounded corners=\blockroundedcorners, inner sep=\blocktitleinnersep, + top color=background, bottom color=background, + line width=2, color=background, %fill=blocktitlebgcolor + ] + (blocktitle.south west) rectangle (blocktitle.north east); % + \fi% +} +\renewcommand\myblock[3][TranspBlock]{\useblockstyle{#1}\block{#2}{#3}\useblockstyle{Default}} diff --git a/recs.csv b/recs.csv new file mode 100644 index 0000000..92b9a0a --- /dev/null +++ b/recs.csv @@ -0,0 +1,29 @@ +recording +2020-03-13-10_00 +2020-03-16-10_00 +2020-03-19-10_00 +2020-03-20-10_00 +2020-03-23-09_58 +2020-03-24-10_00 +2020-03-25-10_00 +2020-03-31-09_59 +2020-05-11-10_00 +2020-05-12-10_00 +2020-05-13-10_00 +2020-05-14-10_00 +2020-05-15-10_00 +2020-05-18-10_00 +2020-05-19-10_00 +2020-05-21-10_00 +2020-05-25-10_00 +2020-05-27-10_00 +2020-05-28-10_00 +2020-05-29-10_00 +2020-06-02-10_00 +2020-06-03-10_10 +2020-06-04-10_00 +2020-06-05-10_00 +2020-06-08-10_00 +2020-06-09-10_00 +2020-06-10-10_00 +2020-06-11-10_00