merging diverged
This commit is contained in:
parent
0b109e8c5e
commit
a6b7ed2c6c
@ -11,8 +11,88 @@ from modules.logger import makeLogger
|
|||||||
from modules.plotstyle import PlotStyle
|
from modules.plotstyle import PlotStyle
|
||||||
from modules.datahandling import flatten
|
from modules.datahandling import flatten
|
||||||
from modules.behaviour_handling import Behavior, correct_chasing_events, event_triggered_chirps
|
from modules.behaviour_handling import Behavior, correct_chasing_events, event_triggered_chirps
|
||||||
|
from extract_chirps import get_valid_datasets
|
||||||
|
|
||||||
logger = makeLogger(__name__)
|
logger = makeLogger(__name__)
|
||||||
ps = PlotStyle()
|
ps = PlotStyle()
|
||||||
|
|
||||||
#### Goal: CTC & PTC for each winner and loser and for all winners and loser ####
|
|
||||||
|
def get_chirp_winner_loser(folder_name, Behavior, order_meta_df):
|
||||||
|
|
||||||
|
foldername = folder_name.split('/')[-2]
|
||||||
|
winner_row = order_meta_df[order_meta_df['recording'] == foldername]
|
||||||
|
winner = winner_row['winner'].values[0].astype(int)
|
||||||
|
winner_fish1 = winner_row['fish1'].values[0].astype(int)
|
||||||
|
winner_fish2 = winner_row['fish2'].values[0].astype(int)
|
||||||
|
|
||||||
|
if winner > 0:
|
||||||
|
if winner == winner_fish1:
|
||||||
|
winner_fish_id = winner_row['rec_id1'].values[0]
|
||||||
|
loser_fish_id = winner_row['rec_id2'].values[0]
|
||||||
|
|
||||||
|
elif winner == winner_fish2:
|
||||||
|
winner_fish_id = winner_row['rec_id2'].values[0]
|
||||||
|
loser_fish_id = winner_row['rec_id1'].values[0]
|
||||||
|
|
||||||
|
chirp_winner = Behavior.chirps[Behavior.chirps_ids == winner_fish_id]
|
||||||
|
chirp_loser = Behavior.chirps[Behavior.chirps_ids == loser_fish_id]
|
||||||
|
|
||||||
|
return chirp_winner, chirp_loser
|
||||||
|
else:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def main(dataroot):
|
||||||
|
|
||||||
|
foldernames, _ = get_valid_datasets(dataroot)
|
||||||
|
|
||||||
|
meta_path = (
|
||||||
|
'/').join(foldernames[0].split('/')[:-2]) + '/order_meta.csv'
|
||||||
|
meta = pd.read_csv(meta_path)
|
||||||
|
meta['recording'] = meta['recording'].str[1:-1]
|
||||||
|
|
||||||
|
winner_chirps = []
|
||||||
|
loser_chirps = []
|
||||||
|
onsets = []
|
||||||
|
offsets = []
|
||||||
|
physicals = []
|
||||||
|
|
||||||
|
# Iterate over all recordings and save chirp- and event-timestamps
|
||||||
|
for folder in foldernames:
|
||||||
|
|
||||||
|
logger.info('Loading data from folder: {}'.format(folder))
|
||||||
|
|
||||||
|
time_before = 30
|
||||||
|
time_after = 60
|
||||||
|
dt = 0.1
|
||||||
|
kernel_width = 2
|
||||||
|
kde_time = np.arange(-time_before, time_after, dt)
|
||||||
|
|
||||||
|
broken_folders = ['../data/mount_data/2020-05-12-10_00/']
|
||||||
|
if folder in broken_folders:
|
||||||
|
continue
|
||||||
|
|
||||||
|
bh = Behavior(folder)
|
||||||
|
winner, loser = get_chirp_winner_loser(folder, bh, meta)
|
||||||
|
|
||||||
|
if winner is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Chirps are already sorted
|
||||||
|
winner_chirps.append(bh.chirps)
|
||||||
|
loser_chirps.append(bh.chirps)
|
||||||
|
|
||||||
|
# Correct for doubles in chasing on- and offsets to get the right on-/offset pairs
|
||||||
|
# Get rid of tracking faults (two onsets or two offsets after another)
|
||||||
|
category, timestamps = correct_chasing_events(bh.behavior, bh.start_s)
|
||||||
|
|
||||||
|
# Split categories
|
||||||
|
onsets.append(timestamps[category == 0])
|
||||||
|
offsets.append(timestamps[category == 1])
|
||||||
|
physicals.append(timestamps[category == 2])
|
||||||
|
|
||||||
|
# center chirps around events
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main('../data/mount_data/')
|
||||||
|
@ -14,6 +14,7 @@ from modules.datahandling import causal_kde1d, acausal_kde1d, flatten
|
|||||||
logger = makeLogger(__name__)
|
logger = makeLogger(__name__)
|
||||||
ps = PlotStyle()
|
ps = PlotStyle()
|
||||||
|
|
||||||
|
|
||||||
class Behavior:
|
class Behavior:
|
||||||
"""Load behavior data from csv file as class attributes
|
"""Load behavior data from csv file as class attributes
|
||||||
Attributes
|
Attributes
|
||||||
@ -35,12 +36,17 @@ class Behavior:
|
|||||||
|
|
||||||
def __init__(self, folder_path: str) -> None:
|
def __init__(self, folder_path: str) -> None:
|
||||||
print(f'{folder_path}')
|
print(f'{folder_path}')
|
||||||
LED_on_time_BORIS = np.load(os.path.join(folder_path, 'LED_on_time.npy'), allow_pickle=True)
|
LED_on_time_BORIS = np.load(os.path.join(
|
||||||
self.time = np.load(os.path.join(folder_path, "times.npy"), allow_pickle=True)
|
folder_path, 'LED_on_time.npy'), allow_pickle=True)
|
||||||
csv_filename = [f for f in os.listdir(folder_path) if f.endswith('.csv')][0] # check if there are more than one csv file
|
self.time = np.load(os.path.join(
|
||||||
|
folder_path, "times.npy"), allow_pickle=True)
|
||||||
|
csv_filename = [f for f in os.listdir(folder_path) if f.endswith(
|
||||||
|
'.csv')][0] # check if there are more than one csv file
|
||||||
self.dataframe = read_csv(os.path.join(folder_path, csv_filename))
|
self.dataframe = read_csv(os.path.join(folder_path, csv_filename))
|
||||||
self.chirps = np.load(os.path.join(folder_path, 'chirps.npy'), allow_pickle=True)
|
self.chirps = np.load(os.path.join(
|
||||||
self.chirps_ids = np.load(os.path.join(folder_path, 'chirp_ids.npy'), allow_pickle=True)
|
folder_path, 'chirps.npy'), allow_pickle=True)
|
||||||
|
self.chirps_ids = np.load(os.path.join(
|
||||||
|
folder_path, 'chirp_ids.npy'), allow_pickle=True)
|
||||||
|
|
||||||
for k, key in enumerate(self.dataframe.keys()):
|
for k, key in enumerate(self.dataframe.keys()):
|
||||||
key = key.lower()
|
key = key.lower()
|
||||||
@ -49,7 +55,8 @@ class Behavior:
|
|||||||
if '(' in key:
|
if '(' in key:
|
||||||
key = key.replace('(', '')
|
key = key.replace('(', '')
|
||||||
key = key.replace(')', '')
|
key = key.replace(')', '')
|
||||||
setattr(self, key, np.array(self.dataframe[self.dataframe.keys()[k]]))
|
setattr(self, key, np.array(
|
||||||
|
self.dataframe[self.dataframe.keys()[k]]))
|
||||||
|
|
||||||
last_LED_t_BORIS = LED_on_time_BORIS[-1]
|
last_LED_t_BORIS = LED_on_time_BORIS[-1]
|
||||||
real_time_range = self.time[-1] - self.time[0]
|
real_time_range = self.time[-1] - self.time[0]
|
||||||
@ -58,6 +65,7 @@ class Behavior:
|
|||||||
self.start_s = (self.start_s - shift) / factor
|
self.start_s = (self.start_s - shift) / factor
|
||||||
self.stop_s = (self.stop_s - shift) / factor
|
self.stop_s = (self.stop_s - shift) / factor
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
1 - chasing onset
|
1 - chasing onset
|
||||||
2 - chasing offset
|
2 - chasing offset
|
||||||
@ -89,14 +97,15 @@ temporal encpding needs to be corrected ... not exactly 25FPS.
|
|||||||
def correct_chasing_events(
|
def correct_chasing_events(
|
||||||
category: np.ndarray,
|
category: np.ndarray,
|
||||||
timestamps: np.ndarray
|
timestamps: np.ndarray
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
onset_ids = np.arange(
|
onset_ids = np.arange(
|
||||||
len(category))[category == 0]
|
len(category))[category == 0]
|
||||||
offset_ids = np.arange(
|
offset_ids = np.arange(
|
||||||
len(category))[category == 1]
|
len(category))[category == 1]
|
||||||
|
|
||||||
wrong_bh = np.arange(len(category))[category!=2][:-1][np.diff(category[category!=2])==0]
|
wrong_bh = np.arange(len(category))[
|
||||||
|
category != 2][:-1][np.diff(category[category != 2]) == 0]
|
||||||
if onset_ids[0] > offset_ids[0]:
|
if onset_ids[0] > offset_ids[0]:
|
||||||
offset_ids = np.delete(offset_ids, 0)
|
offset_ids = np.delete(offset_ids, 0)
|
||||||
help_index = offset_ids[0]
|
help_index = offset_ids[0]
|
||||||
@ -105,7 +114,6 @@ def correct_chasing_events(
|
|||||||
category = np.delete(category, wrong_bh)
|
category = np.delete(category, wrong_bh)
|
||||||
timestamps = np.delete(timestamps, wrong_bh)
|
timestamps = np.delete(timestamps, wrong_bh)
|
||||||
|
|
||||||
|
|
||||||
# Check whether on- or offset is longer and calculate length difference
|
# Check whether on- or offset is longer and calculate length difference
|
||||||
if len(onset_ids) > len(offset_ids):
|
if len(onset_ids) > len(offset_ids):
|
||||||
len_diff = len(onset_ids) - len(offset_ids)
|
len_diff = len(onset_ids) - len(offset_ids)
|
||||||
@ -121,15 +129,16 @@ def correct_chasing_events(
|
|||||||
|
|
||||||
def event_triggered_chirps(
|
def event_triggered_chirps(
|
||||||
event: np.ndarray,
|
event: np.ndarray,
|
||||||
chirps:np.ndarray,
|
chirps: np.ndarray,
|
||||||
time_before_event: int,
|
time_before_event: int,
|
||||||
time_after_event: int,
|
time_after_event: int,
|
||||||
dt: float,
|
dt: float,
|
||||||
width: float,
|
width: float,
|
||||||
)-> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
event_chirps = [] # chirps that are in specified window around event
|
event_chirps = [] # chirps that are in specified window around event
|
||||||
centered_chirps = [] # timestamps of chirps around event centered on the event timepoint
|
# timestamps of chirps around event centered on the event timepoint
|
||||||
|
centered_chirps = []
|
||||||
|
|
||||||
for event_timestamp in event:
|
for event_timestamp in event:
|
||||||
start = event_timestamp - time_before_event
|
start = event_timestamp - time_before_event
|
||||||
@ -148,15 +157,18 @@ def event_triggered_chirps(
|
|||||||
centered_chirps = np.array([])
|
centered_chirps = np.array([])
|
||||||
centered_chirps_convolved = np.zeros(len(time))
|
centered_chirps_convolved = np.zeros(len(time))
|
||||||
else:
|
else:
|
||||||
centered_chirps = np.concatenate(centered_chirps, axis=0) # convert list of arrays to one array for plotting
|
# convert list of arrays to one array for plotting
|
||||||
centered_chirps_convolved = (acausal_kde1d(centered_chirps, time, width)) / len(event)
|
centered_chirps = np.concatenate(centered_chirps, axis=0)
|
||||||
|
centered_chirps_convolved = (acausal_kde1d(
|
||||||
|
centered_chirps, time, width)) / len(event)
|
||||||
|
|
||||||
return event_chirps, centered_chirps, centered_chirps_convolved
|
return event_chirps, centered_chirps, centered_chirps_convolved
|
||||||
|
|
||||||
|
|
||||||
def main(datapath: str):
|
def main(datapath: str):
|
||||||
|
|
||||||
foldernames = [datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath + x)]
|
foldernames = [
|
||||||
|
datapath + x + '/' for x in os.listdir(datapath) if os.path.isdir(datapath + x)]
|
||||||
|
|
||||||
nrecording_chirps = []
|
nrecording_chirps = []
|
||||||
nrecording_chirps_fish_ids = []
|
nrecording_chirps_fish_ids = []
|
||||||
@ -193,14 +205,12 @@ def main(datapath: str):
|
|||||||
physical_contacts = timestamps[category == 2]
|
physical_contacts = timestamps[category == 2]
|
||||||
nrecording_physicals.append(physical_contacts)
|
nrecording_physicals.append(physical_contacts)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Define time window for chirps around event analysis
|
# Define time window for chirps around event analysis
|
||||||
time_before_event = 30
|
time_before_event = 30
|
||||||
time_after_event = 60
|
time_after_event = 60
|
||||||
dt = 0.01
|
dt = 0.01
|
||||||
width = 1.5 # width of kernel for all recordings, currently gaussian kernel
|
width = 1.5 # width of kernel for all recordings, currently gaussian kernel
|
||||||
recording_width = 2 # width of kernel for each recording
|
recording_width = 2 # width of kernel for each recording
|
||||||
time = np.arange(-time_before_event, time_after_event, dt)
|
time = np.arange(-time_before_event, time_after_event, dt)
|
||||||
|
|
||||||
##### Chirps around events, all fish, all recordings #####
|
##### Chirps around events, all fish, all recordings #####
|
||||||
@ -222,14 +232,18 @@ def main(datapath: str):
|
|||||||
physical_contacts = nrecording_physicals[i]
|
physical_contacts = nrecording_physicals[i]
|
||||||
|
|
||||||
# Chirps around chasing onsets
|
# Chirps around chasing onsets
|
||||||
_, centered_chasing_onset_chirps, cc_chasing_onset_chirps = event_triggered_chirps(chasing_onsets, chirps, time_before_event, time_after_event, dt, recording_width)
|
_, centered_chasing_onset_chirps, cc_chasing_onset_chirps = event_triggered_chirps(
|
||||||
|
chasing_onsets, chirps, time_before_event, time_after_event, dt, recording_width)
|
||||||
# Chirps around chasing offsets
|
# Chirps around chasing offsets
|
||||||
_, centered_chasing_offset_chirps, cc_chasing_offset_chirps = event_triggered_chirps(chasing_offsets, chirps, time_before_event, time_after_event, dt, recording_width)
|
_, centered_chasing_offset_chirps, cc_chasing_offset_chirps = event_triggered_chirps(
|
||||||
|
chasing_offsets, chirps, time_before_event, time_after_event, dt, recording_width)
|
||||||
# Chirps around physical contacts
|
# Chirps around physical contacts
|
||||||
_, centered_physical_chirps, cc_physical_chirps = event_triggered_chirps(physical_contacts, chirps, time_before_event, time_after_event, dt, recording_width)
|
_, centered_physical_chirps, cc_physical_chirps = event_triggered_chirps(
|
||||||
|
physical_contacts, chirps, time_before_event, time_after_event, dt, recording_width)
|
||||||
|
|
||||||
nrecording_centered_onset_chirps.append(centered_chasing_onset_chirps)
|
nrecording_centered_onset_chirps.append(centered_chasing_onset_chirps)
|
||||||
nrecording_centered_offset_chirps.append(centered_chasing_offset_chirps)
|
nrecording_centered_offset_chirps.append(
|
||||||
|
centered_chasing_offset_chirps)
|
||||||
nrecording_centered_physical_chirps.append(centered_physical_chirps)
|
nrecording_centered_physical_chirps.append(centered_physical_chirps)
|
||||||
|
|
||||||
## Shuffled chirps ##
|
## Shuffled chirps ##
|
||||||
@ -252,7 +266,6 @@ def main(datapath: str):
|
|||||||
# _, _, cc_shuffled_physical_chirps = event_triggered_chirps(physical_contacts, shuffled_chirps, time_before_event, time_after_event, dt, recording_width)
|
# _, _, cc_shuffled_physical_chirps = event_triggered_chirps(physical_contacts, shuffled_chirps, time_before_event, time_after_event, dt, recording_width)
|
||||||
# nshuffled_physical_chirps.append(cc_shuffled_physical_chirps)
|
# nshuffled_physical_chirps.append(cc_shuffled_physical_chirps)
|
||||||
|
|
||||||
|
|
||||||
# rec_shuffled_q5_onset, rec_shuffled_median_onset, rec_shuffled_q95_onset = np.percentile(
|
# rec_shuffled_q5_onset, rec_shuffled_median_onset, rec_shuffled_q95_onset = np.percentile(
|
||||||
# nshuffled_onset_chirps, (5, 50, 95), axis=0)
|
# nshuffled_onset_chirps, (5, 50, 95), axis=0)
|
||||||
# rec_shuffled_q5_offset, rec_shuffled_median_offset, rec_shuffled_q95_offset = np.percentile(
|
# rec_shuffled_q5_offset, rec_shuffled_median_offset, rec_shuffled_q95_offset = np.percentile(
|
||||||
@ -260,7 +273,6 @@ def main(datapath: str):
|
|||||||
# rec_shuffled_q5_physical, rec_shuffled_median_physical, rec_shuffled_q95_physical = np.percentile(
|
# rec_shuffled_q5_physical, rec_shuffled_median_physical, rec_shuffled_q95_physical = np.percentile(
|
||||||
# nshuffled_physical_chirps, (5, 50, 95), axis=0)
|
# nshuffled_physical_chirps, (5, 50, 95), axis=0)
|
||||||
|
|
||||||
|
|
||||||
# #### Recording plots ####
|
# #### Recording plots ####
|
||||||
# fig, ax = plt.subplots(1, 3, figsize=(28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all')
|
# fig, ax = plt.subplots(1, 3, figsize=(28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all')
|
||||||
# ax[0].set_xlabel('Time[s]')
|
# ax[0].set_xlabel('Time[s]')
|
||||||
@ -319,9 +331,12 @@ def main(datapath: str):
|
|||||||
|
|
||||||
# New bootstrapping approach
|
# New bootstrapping approach
|
||||||
for n in range(nbootstrapping):
|
for n in range(nbootstrapping):
|
||||||
diff_onset = np.diff(np.sort(flatten(nrecording_centered_onset_chirps)))
|
diff_onset = np.diff(
|
||||||
diff_offset = np.diff(np.sort(flatten(nrecording_centered_offset_chirps)))
|
np.sort(flatten(nrecording_centered_onset_chirps)))
|
||||||
diff_physical = np.diff(np.sort(flatten(nrecording_centered_physical_chirps)))
|
diff_offset = np.diff(
|
||||||
|
np.sort(flatten(nrecording_centered_offset_chirps)))
|
||||||
|
diff_physical = np.diff(
|
||||||
|
np.sort(flatten(nrecording_centered_physical_chirps)))
|
||||||
|
|
||||||
np.random.shuffle(diff_onset)
|
np.random.shuffle(diff_onset)
|
||||||
shuffled_onset = np.cumsum(diff_onset)
|
shuffled_onset = np.cumsum(diff_onset)
|
||||||
@ -339,10 +354,12 @@ def main(datapath: str):
|
|||||||
bootstrap_physical.append(kde_physical)
|
bootstrap_physical.append(kde_physical)
|
||||||
|
|
||||||
# New shuffle approach q5, q50, q95
|
# New shuffle approach q5, q50, q95
|
||||||
onset_q5, onset_median, onset_q95 = np.percentile(bootstrap_onset, [5, 50, 95], axis=0)
|
onset_q5, onset_median, onset_q95 = np.percentile(
|
||||||
offset_q5, offset_median, offset_q95 = np.percentile(bootstrap_offset, [5, 50, 95], axis=0)
|
bootstrap_onset, [5, 50, 95], axis=0)
|
||||||
physical_q5, physical_median, physical_q95 = np.percentile(bootstrap_physical, [5, 50, 95], axis=0)
|
offset_q5, offset_median, offset_q95 = np.percentile(
|
||||||
|
bootstrap_offset, [5, 50, 95], axis=0)
|
||||||
|
physical_q5, physical_median, physical_q95 = np.percentile(
|
||||||
|
bootstrap_physical, [5, 50, 95], axis=0)
|
||||||
|
|
||||||
# vstack um 1. Dim zu cutten
|
# vstack um 1. Dim zu cutten
|
||||||
# nrecording_shuffled_convolved_onset_chirps = np.vstack(nrecording_shuffled_convolved_onset_chirps)
|
# nrecording_shuffled_convolved_onset_chirps = np.vstack(nrecording_shuffled_convolved_onset_chirps)
|
||||||
@ -360,23 +377,33 @@ def main(datapath: str):
|
|||||||
all_chirps = np.concatenate(nrecording_chirps).ravel() # not centered
|
all_chirps = np.concatenate(nrecording_chirps).ravel() # not centered
|
||||||
|
|
||||||
# Flatten event timestamps
|
# Flatten event timestamps
|
||||||
all_onsets = np.concatenate(nrecording_chasing_onsets).ravel() # not centered
|
all_onsets = np.concatenate(
|
||||||
all_offsets = np.concatenate(nrecording_chasing_offsets).ravel() # not centered
|
nrecording_chasing_onsets).ravel() # not centered
|
||||||
all_physicals = np.concatenate(nrecording_physicals).ravel() # not centered
|
all_offsets = np.concatenate(
|
||||||
|
nrecording_chasing_offsets).ravel() # not centered
|
||||||
|
all_physicals = np.concatenate(
|
||||||
|
nrecording_physicals).ravel() # not centered
|
||||||
|
|
||||||
# Flatten all chirps around events
|
# Flatten all chirps around events
|
||||||
all_onset_chirps = np.concatenate(nrecording_centered_onset_chirps).ravel() # centered
|
all_onset_chirps = np.concatenate(
|
||||||
all_offset_chirps = np.concatenate(nrecording_centered_offset_chirps).ravel() # centered
|
nrecording_centered_onset_chirps).ravel() # centered
|
||||||
all_physical_chirps = np.concatenate(nrecording_centered_physical_chirps).ravel() # centered
|
all_offset_chirps = np.concatenate(
|
||||||
|
nrecording_centered_offset_chirps).ravel() # centered
|
||||||
|
all_physical_chirps = np.concatenate(
|
||||||
|
nrecording_centered_physical_chirps).ravel() # centered
|
||||||
|
|
||||||
# Convolute all chirps
|
# Convolute all chirps
|
||||||
# Divide by total number of each event over all recordings
|
# Divide by total number of each event over all recordings
|
||||||
all_onset_chirps_convolved = (acausal_kde1d(all_onset_chirps, time, width)) / len(all_onsets)
|
all_onset_chirps_convolved = (acausal_kde1d(
|
||||||
all_offset_chirps_convolved = (acausal_kde1d(all_offset_chirps, time, width)) / len(all_offsets)
|
all_onset_chirps, time, width)) / len(all_onsets)
|
||||||
all_physical_chirps_convolved = (acausal_kde1d(all_physical_chirps, time, width)) / len(all_physicals)
|
all_offset_chirps_convolved = (acausal_kde1d(
|
||||||
|
all_offset_chirps, time, width)) / len(all_offsets)
|
||||||
|
all_physical_chirps_convolved = (acausal_kde1d(
|
||||||
|
all_physical_chirps, time, width)) / len(all_physicals)
|
||||||
|
|
||||||
# Plot all events with all shuffled
|
# Plot all events with all shuffled
|
||||||
fig, ax = plt.subplots(1, 3, figsize=(28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all')
|
fig, ax = plt.subplots(1, 3, figsize=(
|
||||||
|
28*ps.cm, 16*ps.cm, ), constrained_layout=True, sharey='all')
|
||||||
# offsets = np.arange(1,28,1)
|
# offsets = np.arange(1,28,1)
|
||||||
ax[0].set_xlabel('Time[s]')
|
ax[0].set_xlabel('Time[s]')
|
||||||
|
|
||||||
@ -384,8 +411,10 @@ def main(datapath: str):
|
|||||||
ax[0].set_ylabel('Chirp rate [Hz]')
|
ax[0].set_ylabel('Chirp rate [Hz]')
|
||||||
ax[0].plot(time, all_onset_chirps_convolved, color=ps.yellow, zorder=2)
|
ax[0].plot(time, all_onset_chirps_convolved, color=ps.yellow, zorder=2)
|
||||||
ax0 = ax[0].twinx()
|
ax0 = ax[0].twinx()
|
||||||
nrecording_centered_onset_chirps = np.asarray(nrecording_centered_onset_chirps, dtype=object)
|
nrecording_centered_onset_chirps = np.asarray(
|
||||||
ax0.eventplot(np.array(nrecording_centered_onset_chirps), linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
nrecording_centered_onset_chirps, dtype=object)
|
||||||
|
ax0.eventplot(np.array(nrecording_centered_onset_chirps),
|
||||||
|
linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
||||||
ax0.vlines(0, 0, 1.5, ps.white, 'dashed')
|
ax0.vlines(0, 0, 1.5, ps.white, 'dashed')
|
||||||
ax[0].set_zorder(ax0.get_zorder()+1)
|
ax[0].set_zorder(ax0.get_zorder()+1)
|
||||||
ax[0].patch.set_visible(False)
|
ax[0].patch.set_visible(False)
|
||||||
@ -400,8 +429,10 @@ def main(datapath: str):
|
|||||||
ax[1].set_xlabel('Time[s]')
|
ax[1].set_xlabel('Time[s]')
|
||||||
ax[1].plot(time, all_offset_chirps_convolved, color=ps.orange, zorder=2)
|
ax[1].plot(time, all_offset_chirps_convolved, color=ps.orange, zorder=2)
|
||||||
ax1 = ax[1].twinx()
|
ax1 = ax[1].twinx()
|
||||||
nrecording_centered_offset_chirps = np.asarray(nrecording_centered_offset_chirps, dtype=object)
|
nrecording_centered_offset_chirps = np.asarray(
|
||||||
ax1.eventplot(np.array(nrecording_centered_offset_chirps), linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
nrecording_centered_offset_chirps, dtype=object)
|
||||||
|
ax1.eventplot(np.array(nrecording_centered_offset_chirps),
|
||||||
|
linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
||||||
ax1.vlines(0, 0, 1.5, ps.white, 'dashed')
|
ax1.vlines(0, 0, 1.5, ps.white, 'dashed')
|
||||||
ax[1].set_zorder(ax1.get_zorder()+1)
|
ax[1].set_zorder(ax1.get_zorder()+1)
|
||||||
ax[1].patch.set_visible(False)
|
ax[1].patch.set_visible(False)
|
||||||
@ -416,8 +447,10 @@ def main(datapath: str):
|
|||||||
ax[2].set_xlabel('Time[s]')
|
ax[2].set_xlabel('Time[s]')
|
||||||
ax[2].plot(time, all_physical_chirps_convolved, color=ps.maroon, zorder=2)
|
ax[2].plot(time, all_physical_chirps_convolved, color=ps.maroon, zorder=2)
|
||||||
ax2 = ax[2].twinx()
|
ax2 = ax[2].twinx()
|
||||||
nrecording_centered_physical_chirps = np.asarray(nrecording_centered_physical_chirps, dtype=object)
|
nrecording_centered_physical_chirps = np.asarray(
|
||||||
ax2.eventplot(np.array(nrecording_centered_physical_chirps), linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
nrecording_centered_physical_chirps, dtype=object)
|
||||||
|
ax2.eventplot(np.array(nrecording_centered_physical_chirps),
|
||||||
|
linelengths=0.5, colors=ps.gray, alpha=0.25, zorder=1)
|
||||||
ax2.vlines(0, 0, 1.5, ps.white, 'dashed')
|
ax2.vlines(0, 0, 1.5, ps.white, 'dashed')
|
||||||
ax[2].set_zorder(ax2.get_zorder()+1)
|
ax[2].set_zorder(ax2.get_zorder()+1)
|
||||||
ax[2].patch.set_visible(False)
|
ax[2].patch.set_visible(False)
|
||||||
@ -425,7 +458,8 @@ def main(datapath: str):
|
|||||||
ax2.set_yticks([])
|
ax2.set_yticks([])
|
||||||
# ax[2].fill_between(time, shuffled_q5_physical, shuffled_q95_physical, color=ps.gray, alpha=0.5)
|
# ax[2].fill_between(time, shuffled_q5_physical, shuffled_q95_physical, color=ps.gray, alpha=0.5)
|
||||||
# ax[2].plot(time, shuffled_median_physical, ps.black)
|
# ax[2].plot(time, shuffled_median_physical, ps.black)
|
||||||
ax[2].fill_between(time, physical_q5, physical_q95, color=ps.gray, alpha=0.5)
|
ax[2].fill_between(time, physical_q5, physical_q95,
|
||||||
|
color=ps.gray, alpha=0.5)
|
||||||
ax[2].plot(time, physical_median, ps.black)
|
ax[2].plot(time, physical_median, ps.black)
|
||||||
fig.suptitle('All recordings')
|
fig.suptitle('All recordings')
|
||||||
plt.show()
|
plt.show()
|
||||||
@ -444,7 +478,6 @@ def main(datapath: str):
|
|||||||
# plt.show()
|
# plt.show()
|
||||||
# plt.close()
|
# plt.close()
|
||||||
|
|
||||||
|
|
||||||
# # Associate chirps to individual fish
|
# # Associate chirps to individual fish
|
||||||
# fish1 = chirps[chirps_fish_ids == fish_ids[0]]
|
# fish1 = chirps[chirps_fish_ids == fish_ids[0]]
|
||||||
# fish2 = chirps[chirps_fish_ids == fish_ids[1]]
|
# fish2 = chirps[chirps_fish_ids == fish_ids[1]]
|
||||||
@ -453,7 +486,6 @@ def main(datapath: str):
|
|||||||
# Convolution over all recordings
|
# Convolution over all recordings
|
||||||
# Rasterplot for each recording
|
# Rasterplot for each recording
|
||||||
|
|
||||||
|
|
||||||
# #### Chirps around events, winner VS loser, one recording ####
|
# #### Chirps around events, winner VS loser, one recording ####
|
||||||
# # Load file with fish ids and winner/loser info
|
# # Load file with fish ids and winner/loser info
|
||||||
# meta = pd.read_csv('../data/mount_data/order_meta.csv')
|
# meta = pd.read_csv('../data/mount_data/order_meta.csv')
|
||||||
@ -547,7 +579,6 @@ def main(datapath: str):
|
|||||||
# plt.show()
|
# plt.show()
|
||||||
# plt.close()
|
# plt.close()
|
||||||
|
|
||||||
|
|
||||||
# for i in range(len(fish_ids)):
|
# for i in range(len(fish_ids)):
|
||||||
# fish = fish_ids[i]
|
# fish = fish_ids[i]
|
||||||
# chirps_temp = chirps[chirps_fish_ids == fish]
|
# chirps_temp = chirps[chirps_fish_ids == fish]
|
||||||
@ -556,7 +587,6 @@ def main(datapath: str):
|
|||||||
#### Chirps around events, only losers, one recording ####
|
#### Chirps around events, only losers, one recording ####
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Path to the data
|
# Path to the data
|
||||||
datapath = '../data/mount_data/'
|
datapath = '../data/mount_data/'
|
||||||
|
@ -7,21 +7,12 @@ from IPython import embed
|
|||||||
# check rec ../data/mount_data/2020-03-25-10_00/ starting at 3175
|
# check rec ../data/mount_data/2020-03-25-10_00/ starting at 3175
|
||||||
|
|
||||||
|
|
||||||
def main(datapaths):
|
def get_valid_datasets(dataroot):
|
||||||
|
|
||||||
for path in datapaths:
|
|
||||||
chirpdetection(path, plot='show')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
|
||||||
dataroot = '../data/mount_data/'
|
|
||||||
|
|
||||||
datasets = sorted([name for name in os.listdir(dataroot) if os.path.isdir(
|
datasets = sorted([name for name in os.listdir(dataroot) if os.path.isdir(
|
||||||
os.path.join(dataroot, name))])
|
os.path.join(dataroot, name))])
|
||||||
|
|
||||||
valid_datasets = []
|
valid_datasets = []
|
||||||
|
|
||||||
for dataset in datasets:
|
for dataset in datasets:
|
||||||
|
|
||||||
path = os.path.join(dataroot, dataset)
|
path = os.path.join(dataroot, dataset)
|
||||||
@ -43,9 +34,25 @@ if __name__ == '__main__':
|
|||||||
datapaths = [os.path.join(dataroot, dataset) +
|
datapaths = [os.path.join(dataroot, dataset) +
|
||||||
'/' for dataset in valid_datasets]
|
'/' for dataset in valid_datasets]
|
||||||
|
|
||||||
|
return datapaths, valid_datasets
|
||||||
|
|
||||||
|
|
||||||
|
def main(datapaths):
|
||||||
|
|
||||||
|
for path in datapaths:
|
||||||
|
chirpdetection(path, plot='show')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
dataroot = '../data/mount_data/'
|
||||||
|
|
||||||
|
|
||||||
|
datapaths, valid_datasets= get_valid_datasets(dataroot)
|
||||||
|
|
||||||
recs = pd.DataFrame(columns=['recording'], data=valid_datasets)
|
recs = pd.DataFrame(columns=['recording'], data=valid_datasets)
|
||||||
recs.to_csv('../recs.csv', index=False)
|
recs.to_csv('../recs.csv', index=False)
|
||||||
datapaths = ['../data/mount_data/2020-03-25-10_00/']
|
# datapaths = ['../data/mount_data/2020-03-25-10_00/']
|
||||||
main(datapaths)
|
main(datapaths)
|
||||||
|
|
||||||
# window 1524 + 244 in dataset index 4 is nice example
|
# window 1524 + 244 in dataset index 4 is nice example
|
||||||
|
@ -2,7 +2,6 @@ import numpy as np
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
|
|
||||||
|
|
||||||
@ -35,20 +34,33 @@ class Behavior:
|
|||||||
|
|
||||||
def __init__(self, folder_path: str) -> None:
|
def __init__(self, folder_path: str) -> None:
|
||||||
|
|
||||||
LED_on_time_BORIS = np.load(os.path.join(folder_path, 'LED_on_time.npy'), allow_pickle=True)
|
LED_on_time_BORIS = np.load(os.path.join(
|
||||||
|
folder_path, 'LED_on_time.npy'), allow_pickle=True)
|
||||||
|
|
||||||
|
csv_filename = os.path.split(folder_path[:-1])[-1]
|
||||||
|
csv_filename = '-'.join(csv_filename.split('-')[:-1]) + '.csv'
|
||||||
|
# embed()
|
||||||
|
|
||||||
csv_filename = [f for f in os.listdir(folder_path) if f.endswith('.csv')][0]
|
# csv_filename = [f for f in os.listdir(
|
||||||
|
# folder_path) if f.endswith('.csv')][0]
|
||||||
logger.info(f'CSV file: {csv_filename}')
|
logger.info(f'CSV file: {csv_filename}')
|
||||||
self.dataframe = read_csv(os.path.join(folder_path, csv_filename))
|
self.dataframe = read_csv(os.path.join(folder_path, csv_filename))
|
||||||
|
|
||||||
self.chirps = np.load(os.path.join(folder_path, 'chirps.npy'), allow_pickle=True)
|
self.chirps = np.load(os.path.join(
|
||||||
self.chirps_ids = np.load(os.path.join(folder_path, 'chirp_ids.npy'), allow_pickle=True)
|
folder_path, 'chirps.npy'), allow_pickle=True)
|
||||||
|
self.chirps_ids = np.load(os.path.join(
|
||||||
self.ident = np.load(os.path.join(folder_path, 'ident_v.npy'), allow_pickle=True)
|
folder_path, 'chirp_ids.npy'), allow_pickle=True)
|
||||||
self.idx = np.load(os.path.join(folder_path, 'idx_v.npy'), allow_pickle=True)
|
|
||||||
self.freq = np.load(os.path.join(folder_path, 'fund_v.npy'), allow_pickle=True)
|
self.ident = np.load(os.path.join(
|
||||||
self.time = np.load(os.path.join(folder_path, "times.npy"), allow_pickle=True)
|
folder_path, 'ident_v.npy'), allow_pickle=True)
|
||||||
self.spec = np.load(os.path.join(folder_path, "spec.npy"), allow_pickle=True)
|
self.idx = np.load(os.path.join(
|
||||||
|
folder_path, 'idx_v.npy'), allow_pickle=True)
|
||||||
|
self.freq = np.load(os.path.join(
|
||||||
|
folder_path, 'fund_v.npy'), allow_pickle=True)
|
||||||
|
self.time = np.load(os.path.join(
|
||||||
|
folder_path, "times.npy"), allow_pickle=True)
|
||||||
|
self.spec = np.load(os.path.join(
|
||||||
|
folder_path, "spec.npy"), allow_pickle=True)
|
||||||
|
|
||||||
for k, key in enumerate(self.dataframe.keys()):
|
for k, key in enumerate(self.dataframe.keys()):
|
||||||
key = key.lower()
|
key = key.lower()
|
||||||
@ -57,7 +69,8 @@ class Behavior:
|
|||||||
if '(' in key:
|
if '(' in key:
|
||||||
key = key.replace('(', '')
|
key = key.replace('(', '')
|
||||||
key = key.replace(')', '')
|
key = key.replace(')', '')
|
||||||
setattr(self, key, np.array(self.dataframe[self.dataframe.keys()[k]]))
|
setattr(self, key, np.array(
|
||||||
|
self.dataframe[self.dataframe.keys()[k]]))
|
||||||
|
|
||||||
last_LED_t_BORIS = LED_on_time_BORIS[-1]
|
last_LED_t_BORIS = LED_on_time_BORIS[-1]
|
||||||
real_time_range = self.time[-1] - self.time[0]
|
real_time_range = self.time[-1] - self.time[0]
|
||||||
@ -70,14 +83,15 @@ class Behavior:
|
|||||||
def correct_chasing_events(
|
def correct_chasing_events(
|
||||||
category: np.ndarray,
|
category: np.ndarray,
|
||||||
timestamps: np.ndarray
|
timestamps: np.ndarray
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
onset_ids = np.arange(
|
onset_ids = np.arange(
|
||||||
len(category))[category == 0]
|
len(category))[category == 0]
|
||||||
offset_ids = np.arange(
|
offset_ids = np.arange(
|
||||||
len(category))[category == 1]
|
len(category))[category == 1]
|
||||||
|
|
||||||
wrong_bh = np.arange(len(category))[category!=2][:-1][np.diff(category[category!=2])==0]
|
wrong_bh = np.arange(len(category))[
|
||||||
|
category != 2][:-1][np.diff(category[category != 2]) == 0]
|
||||||
if onset_ids[0] > offset_ids[0]:
|
if onset_ids[0] > offset_ids[0]:
|
||||||
offset_ids = np.delete(offset_ids, 0)
|
offset_ids = np.delete(offset_ids, 0)
|
||||||
help_index = offset_ids[0]
|
help_index = offset_ids[0]
|
||||||
@ -101,15 +115,16 @@ def correct_chasing_events(
|
|||||||
|
|
||||||
def event_triggered_chirps(
|
def event_triggered_chirps(
|
||||||
event: np.ndarray,
|
event: np.ndarray,
|
||||||
chirps:np.ndarray,
|
chirps: np.ndarray,
|
||||||
time_before_event: int,
|
time_before_event: int,
|
||||||
time_after_event: int,
|
time_after_event: int,
|
||||||
dt: float,
|
dt: float,
|
||||||
width: float,
|
width: float,
|
||||||
)-> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
event_chirps = [] # chirps that are in specified window around event
|
event_chirps = [] # chirps that are in specified window around event
|
||||||
centered_chirps = [] # timestamps of chirps around event centered on the event timepoint
|
# timestamps of chirps around event centered on the event timepoint
|
||||||
|
centered_chirps = []
|
||||||
|
|
||||||
for event_timestamp in event:
|
for event_timestamp in event:
|
||||||
start = event_timestamp - time_before_event
|
start = event_timestamp - time_before_event
|
||||||
@ -128,8 +143,9 @@ def event_triggered_chirps(
|
|||||||
centered_chirps = np.array([])
|
centered_chirps = np.array([])
|
||||||
centered_chirps_convolved = np.zeros(len(time))
|
centered_chirps_convolved = np.zeros(len(time))
|
||||||
else:
|
else:
|
||||||
centered_chirps = np.concatenate(centered_chirps, axis=0) # convert list of arrays to one array for plotting
|
# convert list of arrays to one array for plotting
|
||||||
centered_chirps_convolved = (acausal_kde1d(centered_chirps, time, width)) / len(event)
|
centered_chirps = np.concatenate(centered_chirps, axis=0)
|
||||||
|
centered_chirps_convolved = (acausal_kde1d(
|
||||||
|
centered_chirps, time, width)) / len(event)
|
||||||
|
|
||||||
return event_chirps, centered_chirps, centered_chirps_convolved
|
return event_chirps, centered_chirps, centered_chirps_convolved
|
||||||
|
|
||||||
|
BIN
poster/main.pdf
BIN
poster/main.pdf
Binary file not shown.
@ -21,10 +21,10 @@ blockverticalspace=2mm, colspace=20mm, subcolspace=0mm]{tikzposter} %Default val
|
|||||||
sender identification of freely interacting individuals impossible.
|
sender identification of freely interacting individuals impossible.
|
||||||
This profoundly limits our current understanding of chirps to experiments
|
This profoundly limits our current understanding of chirps to experiments
|
||||||
with single - or physically separated - individuals.
|
with single - or physically separated - individuals.
|
||||||
% \begin{tikzfigure}[]
|
\begin{tikzfigure}[]
|
||||||
% \label{griddrawing}
|
\label{griddrawing}
|
||||||
% \includegraphics[width=1\linewidth]{figs/introplot}
|
\includegraphics[width=1\linewidth]{figs/introplot}
|
||||||
% \end{tikzfigure}
|
\end{tikzfigure}
|
||||||
}
|
}
|
||||||
\myblock[TranspBlock]{Chirp detection}{
|
\myblock[TranspBlock]{Chirp detection}{
|
||||||
\begin{tikzfigure}[]
|
\begin{tikzfigure}[]
|
||||||
|
Loading…
Reference in New Issue
Block a user