Seriously, no idea. Wild amount of changes. Good luck.

This commit is contained in:
j-hartling
2026-04-17 17:19:30 +02:00
parent 36ac504efa
commit 3b4b7f2161
40 changed files with 2067 additions and 672 deletions

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
search_path = '../data/inv/full/'
save_path = '../data/inv/full/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)
species_data[mkey] = np.zeros(shape, dtype=float)
# Log species data:
for stage in stages:
mkey = f'measure_{stage}'
species_data[mkey][..., j] = data[mkey]
# Save collected file data:
save_name = save_path + species
save_data(save_name, species_data, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,46 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
search_path = '../data/inv/log_hp/'
save_path = '../data/inv/log_hp/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, incl='noise', ext='npz', dir=search_path)
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_inv'])
scales, measure = data['scales'], data['measure_inv']
if j == 0:
# Prepare species-specific storage:
spec_data = np.zeros((measure.shape + (len(all_paths),)), dtype=float)
# Log file data:
spec_data[..., j] = measure
# Save collected file data:
save_name = save_path + species
archive = dict(scales=scales, measure_inv=spec_data)
save_data(save_name, archive, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'conv', 'feat']
search_path = '../data/inv/short/'
save_path = '../data/inv/short/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)
species_data[mkey] = np.zeros(shape, dtype=float)
# Log species data:
for stage in stages:
mkey = f'measure_{stage}'
species_data[mkey][..., j] = data[mkey]
# Save collected file data:
save_name = save_path + species
save_data(save_name, species_data, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
search_path = '../data/inv/thresh_lp/'
save_path = '../data/inv/thresh_lp/collected/'
# ANALYSIS SETTINGS:
with_noise = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
incl = 'noise' if with_noise else 'pure'
all_paths = search_files(species, incl=incl, ext='npz', dir=search_path)
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_feat', 'thresh_rel'])
measure = data['measure_feat']
if j == 0:
# Prepare species-specific storage:
spec_data = np.zeros((measure.shape + (len(all_paths),)), dtype=float)
# Log file data:
spec_data[..., j] = measure
# Save collected file data:
save_name = save_path + species + f'_{incl}'
archive = dict(
scales=data['scales'],
measure_feat=spec_data,
thresh_rel=data['thresh_rel'])
save_data(save_name, archive, config)
print('Done.')

View File

@@ -0,0 +1,87 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
search_path = '../data/inv/full/'
save_path = '../data/inv/full/condensed/'
# ANALYSIS SETTINGS:
compute_ratios = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if k == 0:
# Prepare song file-specific storage:
file_data = {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(rec_paths),)
file_data[stage] = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
rec_mean, rec_sd = {}, {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(sorted_paths),)
rec_mean[f'mean_{stage}'] = np.zeros(shape, dtype=float)
rec_sd[f'sd_{stage}'] = np.zeros(shape, dtype=float)
# Log song file data:
for stage in stages:
mkey = f'measure_{stage}'
if compute_ratios:
data[mkey] /= data[mkey][0]
file_data[stage][..., k] = data[mkey]
# Get recording statistics:
for stage in stages:
rec_mean[f'mean_{stage}'][..., j] = np.nanmean(file_data[stage], axis=-1)
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
save_name = save_path + species
if compute_ratios:
save_name += '_normed'
else:
save_name += '_raw'
archive = dict(scales=data['scales'])
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_name, archive, config, overwrite=True)
print('Done.')

View File

@@ -1,8 +1,8 @@
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species
from misc_functions import shorten_species, sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
@@ -23,7 +23,6 @@ sources = [
'FTN'
]
search_path = '../data/inv/log_hp/'
ref_path = '../data/inv/log_hp/ref_measures.npz'
save_path = '../data/inv/log_hp/condensed/'
# ANALYSIS SETTINGS:
@@ -31,14 +30,12 @@ compute_ratios = True
plot_overview = True
# PREPARATION:
if compute_ratios:
ref_measure = np.load(ref_path)['inv']
if plot_overview:
fig, axes = plt.subplots(3, len(target_species), figsize=(16, 9),
sharex=True, sharey=True, layout='constrained')
axes[0, 0].set_ylabel('songs')
axes[1, 0].set_ylabel('recordings\n(mean ± SD)')
axes[2, 0].set_ylabel('total\n(mean ± SEM)')
axes[2, 0].set_ylabel('total\n(mean ± SD)')
# EXECUTION:
for i, species in enumerate(target_species):
@@ -48,49 +45,21 @@ for i, species in enumerate(target_species):
# Fetch all species-specific song files:
all_paths = search_files(species, incl='noise', ext='npz', dir=search_path)
# Separate by source:
sorted_paths = {}
for source in sources:
# Check for any source-specific song files:
source_paths = [path for path in all_paths if source in path]
if not source_paths:
continue
# Separate by recording:
sorted_paths[source] = [[]]
for path, name in zip(source_paths, crop_paths(source_paths)):
# Find numerical ID behind source tag:
id_ind = name.find(source) + len(source) + 1
# Check if ID is followed by sub-ID:
sub_id = name[id_ind:].split('-')[1]
if 's' in sub_id:
# Single (time stamp in next spot):
sorted_paths[source][0].append(path)
continue
sub_id = int(sub_id)
# Multiple (sub-ID in next spot):
if sub_id > len(sorted_paths[source]):
# Open new recording-specific slot:
sorted_paths[source].append([])
sorted_paths[source][sub_id - 1].append(path)
# Re-sort song files only by recording (discarding source separation):
sorted_paths = [path for paths in sorted_paths.values() for path in paths]
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, _ = load_data(path, ['scales', 'measure_inv'])
data, config = load_data(path, ['scales', 'measure_inv'])
scales, measure = data['scales'], data['measure_inv']
# Relate to noise:
if compute_ratios:
measure /= ref_measure
measure /= measure[0]
if k == 0:
# Prepare song file-specific storage:
@@ -116,7 +85,8 @@ for i, species in enumerate(target_species):
rec_mean[:, j] + rec_sd[:, j], color='k', alpha=0.2)
# Save condensed recording data for current species:
np.savez(save_path + species, scales=scales, mean=rec_mean, sd=rec_sd)
archive = dict(scales=scales, mean_inv=rec_mean, sd_inv=rec_sd)
save_data(save_path + species, archive, config, overwrite=True)
if plot_overview:
spec_mean = rec_mean.mean(axis=1)
@@ -128,9 +98,7 @@ for i, species in enumerate(target_species):
print('Done.')
if plot_overview:
axes[0, 0].set_xlim(scales[0], scales[-1])
axes[0, 0].set_xscale('log')
axes[0, 0].set_yscale('log')
plt.show()
axes[0, 0].set_xlim(scales[1], scales[-1])
plt.show()

View File

@@ -0,0 +1,87 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
stages = ['filt', 'env', 'conv', 'feat']
search_path = '../data/inv/short/'
save_path = '../data/inv/short/condensed/'
# ANALYSIS SETTINGS:
compute_ratios = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if k == 0:
# Prepare song file-specific storage:
file_data = {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(rec_paths),)
file_data[stage] = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
rec_mean, rec_sd = {}, {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(sorted_paths),)
rec_mean[f'mean_{stage}'] = np.zeros(shape, dtype=float)
rec_sd[f'sd_{stage}'] = np.zeros(shape, dtype=float)
# Log song file data:
for stage in stages:
mkey = f'measure_{stage}'
if compute_ratios:
data[mkey] /= data[mkey][0]
file_data[stage][..., k] = data[mkey]
# Get recording statistics:
for stage in stages:
rec_mean[f'mean_{stage}'][..., j] = np.nanmean(file_data[stage], axis=-1)
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
save_name = save_path + species
if compute_ratios:
save_name += '_normed'
else:
save_name += '_raw'
archive = dict(scales=data['scales'])
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_name, archive, config)
print('Done.')

View File

@@ -0,0 +1,135 @@
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species, sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
search_path = '../data/inv/thresh_lp/'
save_path = '../data/inv/thresh_lp/condensed/'
# ANALYSIS SETTINGS:
with_noise = True
plot_overview = True
thresh_rel = np.array([0.5, 1, 3])
# PREPARATION:
if plot_overview:
kern_colors = ['r', 'g', 'b']
all_figs, all_axes = {}, {}
for thresh in thresh_rel:
fig, axes = plt.subplots(3, len(target_species), figsize=(16, 9),
sharex=True, sharey=True, layout='constrained')
fig.suptitle(f'rel. thresh: {thresh}')
axes[0, 0].set_ylim(0, 1)
axes[0, 0].set_ylabel('songs')
axes[1, 0].set_ylabel('recordings\n(mean ± SD)')
axes[2, 0].set_ylabel('total\n(mean ± SD)')
all_figs[thresh] = fig
all_axes[thresh] = axes
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
if plot_overview:
for thresh in thresh_rel:
all_axes[thresh][0, i].set_title(shorten_species(species))
# Fetch all species-specific song files:
incl = 'noise' if with_noise else 'pure'
all_paths = search_files(species, incl=incl, ext='npz', dir=search_path)
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_feat'])
scales, measure = data['scales'], data['measure_feat']
if k == 0:
# Prepare song file-specific storage:
shape = measure.shape + (len(rec_paths),)
file_data = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
shape = measure.shape + (len(sorted_paths),)
rec_mean = np.zeros(shape, dtype=float)
rec_sd = np.zeros(shape, dtype=float)
# Log song file data:
file_data[..., k] = measure
if plot_overview:
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[0, i].plot(scales, measure[:, m, l], c=c, alpha=0.5)
# Get recording statistics:
rec_mean[..., j] = file_data.mean(axis=-1)
rec_sd[..., j] = file_data.std(axis=-1)
if plot_overview:
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[1, i].plot(scales, rec_mean[:, m, l, j], c=c)
spread = (rec_mean[:, m, l, j] - rec_sd[:, m, l, j],
rec_mean[:, m, l, j] + rec_sd[:, m, l, j])
axes[1, i].fill_between(scales, *spread, color=c, alpha=0.2)
# Save condensed recording data:
save_name = save_path + species
if with_noise:
save_name += '_noise'
else:
save_name += '_pure'
archive = dict(
scales=scales,
mean_feat=rec_mean,
sd_feat=rec_sd,
thresh_rel=thresh_rel,)
save_data(save_name, archive, config)
if plot_overview:
spec_mean = rec_mean.mean(axis=-1)
spec_sd = rec_mean.std(axis=-1)
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[2, i].plot(scales, spec_mean[:, m, l], c=c)
spread = (spec_mean[:, m, l] - spec_sd[:, m, l],
spec_mean[:, m, l] + spec_sd[:, m, l])
axes[2, i].fill_between(scales, *spread, color=c, alpha=0.2)
print('Done.')
if plot_overview:
for thresh in thresh_rel:
axes = all_axes[thresh]
axes[0, 0].set_xscale('log')
axes[0, 0].set_xlim(scales[1], scales[-1])
plt.show()

View File

@@ -0,0 +1,4 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files

View File

@@ -5,7 +5,7 @@ from plot_functions import xlabel, ylabel, strip_zeros, letter_subplots
# GENERAL SETTINGS:
data_path = '../data/inv/noise_env/sd_conversion.npz'
save_path = '../figures/fig_noise_env_sd_conversion.pdf'
save_path = '../figures/fig_noise_env_sd_conversion_appendix.pdf'
# PLOT SETTINGS:
fig_kwargs = dict(

View File

@@ -1,13 +1,13 @@
import plotstyle_plt
import glob
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from misc_functions import get_saturation
from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\
plot_line, plot_barcode, strip_zeros, time_bar,\
plot_line, strip_zeros, time_bar,\
letter_subplot, letter_subplots
from IPython import embed
@@ -17,11 +17,6 @@ def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs)
return None
def plot_bi_snippets(axes, time, snippets, **kwargs):
for i, ax in enumerate(axes):
plot_barcode(ax, time, snippets[:, ..., i], **kwargs)
return None
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1:
ax.plot(scales, measures, **kwargs)[0]
@@ -39,8 +34,28 @@ def show_saturation(ax, scales, measures, high=0.95, **kwargs):
marker='o', ms=10, zorder=6, clip_on=False, **kwargs)
# GENERAL SETTINGS:
target = 'Omocestus_rufipes'
data_paths = glob.glob(f'../data/inv/full/{target}*.npz')
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
raw_path = search_files(target_species, incl='raw', dir='../data/inv/full/condensed/')[0]
norm_path = search_files(target_species, incl='norm', dir='../data/inv/full/condensed/')[0]
snip_path = search_files(example_file, dir='../data/inv/full/')[0]
trace_path = search_files(target_species, dir='../data/inv/full/collected/')[0]
ref_path = '../data/inv/full/ref_measures.npz'
save_path = '../figures/fig_invariance_full.pdf'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
@@ -105,9 +120,9 @@ lw = dict(
log=0.25,
inv=0.25,
conv=0.25,
bi=0,
feat=1,
big=3
big=3,
plateau=1.5,
)
xlabels = dict(
big='scale $\\alpha$',
@@ -118,7 +133,6 @@ ylabels = dict(
log='$x_{\\text{db}}$',
inv='$x_{\\text{inv}}$',
conv='$c_i$',
bi='$b_i$',
feat='$f_i$',
big=['intensity', 'rel. intensity', 'norm. intensity']
)
@@ -187,121 +201,160 @@ bar_kwargs = dict(
va='center',
)
)
# PREPARATION:
ref_data = dict(np.load(ref_path))
plateau_settings = dict(
low=0.05,
high=0.95,
first=True,
last=True,
condense=None,
)
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
# Load invariance data:
data, config = load_data(data_path, **load_kwargs)
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate']
# Load invariance data:
raw_data, config = load_data(raw_path, files='scales', keywords='mean')
norm_data, _ = load_data(norm_path, files='scales', keywords='mean')
scales = raw_data['scales']
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = data['example_scales'].size
# Load snippet data:
snip, _ = load_data(snip_path, files='example_scales', keywords='snip')
t_full = np.arange(snip['snip_filt'].shape[0]) / config['rate']
snip_scales = snip['example_scales']
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = snip_scales.size
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(data["example_scales"][j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(data['scales'][0], data['scales'][-1])
ax.set_xscale('symlog', linthresh=data['scales'][1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(snip_scales[j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# # Plot filtered snippets:
# plot_snippets(snip_axes[0, :], t_full, data['snip_filt'],
# c=colors['filt'], lw=lw['filt'])
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
# # Plot envelope snippets:
# plot_snippets(snip_axes[1, :], t_full, data['snip_env'],
# ymin=0, c=colors['env'], lw=lw['env'])
if False:
# Plot filtered snippets:
plot_snippets(snip_axes[0, :], t_full, snip['snip_filt'],
c=colors['filt'], lw=lw['filt'])
# # Plot logarithmic snippets:
# plot_snippets(snip_axes[2, :], t_full, data['snip_log'],
# c=colors['log'], lw=lw['log'])
# Plot envelope snippets:
plot_snippets(snip_axes[1, :], t_full, snip['snip_env'],
ymin=0, c=colors['env'], lw=lw['env'])
# # Plot invariant snippets:
# plot_snippets(snip_axes[3, :], t_full, data['snip_inv'],
# c=colors['inv'], lw=lw['inv'])
# Plot logarithmic snippets:
plot_snippets(snip_axes[2, :], t_full, snip['snip_log'],
c=colors['log'], lw=lw['log'])
# # Plot kernel response snippets:
# plot_snippets(snip_axes[4, :], t_full, data['snip_conv'],
# c=colors['conv'], lw=lw['conv'])
# Plot invariant snippets:
plot_snippets(snip_axes[3, :], t_full, snip['snip_inv'],
c=colors['inv'], lw=lw['inv'])
# # Plot feature snippets:
# plot_snippets(snip_axes[5, :], t_full, data['snip_feat'],
# ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
# Plot kernel response snippets:
plot_snippets(snip_axes[4, :], t_full, snip['snip_conv'],
c=colors['conv'], lw=lw['conv'])
# Analysis results:
scales_rel = data['scales'] - data['scales'][0]
scales_rel /= scales_rel[-1]
for stage in stages:
measure = data[f'measure_{stage}']
# Plot feature snippets:
plot_snippets(snip_axes[5, :], t_full, snip['snip_feat'],
ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
# Plot unmodified intensity measures:
curve = plot_curves(big_axes[0], data['scales'], measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[0], data['scales'], curve, c=colors[stage])
# Plot analysis results:
for stage in stages:
# Get average unnormed measure across recordings:
raw_measure = raw_data[f'mean_{stage}'].mean(axis=-1)
# # Relate to pure-noise reference:
# norm_measure = measure / ref_data[stage]
# Plot unmodified intensity measures:
curve = plot_curves(big_axes[0], scales, raw_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
ind = get_saturation(curve, **plateau_settings)[1]
scale = scales[ind]
big_axes[0].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].vlines(scale, big_axes[0].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# # Plot noise-related intensity measures:
# big_axes[1].plot(data['scales'], norm_measure, c=colors[stage], lw=lw['big'])
# Get average noise-related measure across recordings:
norm_measure = norm_data[f'mean_{stage}'].mean(axis=-1)
# Normalize measure to [0, 1]:
min_measure = measure.min(axis=0)
max_measure = measure.max(axis=0)
norm_measure = (measure - min_measure) / (max_measure - min_measure)
# Plot noise-related intensity measure:
curve = plot_curves(big_axes[1], scales, norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Plot normalized intensity measures:
curve = plot_curves(big_axes[1], data['scales'], norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[1], data['scales'], curve, c=colors[stage])
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
big_axes[1].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].vlines(scale, big_axes[1].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# # Plot over relative scales:
# plot_curves(big_axes[2], scales_rel, norm_measure, c=colors[stage], lw=lw['big'],
# fill_kwargs=dict(color=colors[stage], alpha=0.25))
# scales_rel = curve - curve.min()
# scales_rel /= scales_rel.max()
# Normalize measure to [0, 1]:
min_measure = raw_measure.min(axis=0)
max_measure = raw_measure.max(axis=0)
norm_measure = (raw_measure - min_measure) / (max_measure - min_measure)
if save_path is not None:
fig.savefig(save_path)
plt.show()
# Plot range-normalized intensity measure:
curve = plot_curves(big_axes[2], scales, norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -0,0 +1,326 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from misc_functions import get_saturation
from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\
plot_line, plot_barcode, strip_zeros, time_bar,\
letter_subplot, letter_subplots
from IPython import embed
def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
ymin, ymax = ylimits(snippets, minval=ymin, maxval=ymax, pad=0.05)
for i, ax in enumerate(axes):
plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs)
return None
def plot_bi_snippets(axes, time, snippets, **kwargs):
for i, ax in enumerate(axes):
plot_barcode(ax, time, snippets[:, ..., i], **kwargs)
return None
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1:
ax.plot(scales, measures, **kwargs)[0]
return measures
median_measure = np.median(measures, axis=1)
spread_measure = [np.percentile(measures, 25, axis=1),
np.percentile(measures, 75, axis=1)]
ax.plot(scales, median_measure, **kwargs)[0]
ax.fill_between(scales, *spread_measure, **fill_kwargs)
return median_measure
def show_saturation(ax, scales, measures, high=0.95, **kwargs):
high_ind = get_saturation(measures, high=high)[1]
return ax.plot(scales[high_ind], 0, transform=ax.get_xaxis_transform(),
marker='o', ms=10, zorder=6, clip_on=False, **kwargs)
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/inv/full/condensed/')
snip_paths = search_files(example_file, dir='../data/inv/full/')
ref_path = '../data/inv/full/ref_measures.npz'
save_path = '../figures/fig_invariance_full.pdf'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
load_kwargs = dict(
files=stages,
keywords=['scales', 'snip', 'measure']
)
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 20/2.54),
)
super_grid_kwargs = dict(
nrows=2,
ncols=1,
wspace=0,
hspace=0,
left=0,
right=1,
bottom=0,
top=1,
height_ratios=[3, 2]
)
subfig_specs = dict(
snip=(0, 0),
big=(1, 0),
)
snip_grid_kwargs = dict(
nrows=len(stages),
ncols=None,
wspace=0.1,
hspace=0.4,
left=0.08,
right=0.95,
bottom=0.08,
top=0.95
)
big_grid_kwargs = dict(
nrows=1,
ncols=3,
wspace=0.2,
hspace=0,
left=snip_grid_kwargs['left'],
right=0.96,
bottom=0.2,
top=0.95
)
# PLOT SETTINGS:
fs = dict(
lab_norm=16,
lab_tex=20,
letter=22,
tit_norm=16,
tit_tex=20,
bar=16,
)
colors = load_colors('../data/stage_colors.npz')
lw = dict(
filt=0.25,
env=0.25,
log=0.25,
inv=0.25,
conv=0.25,
bi=0,
feat=1,
big=3
)
xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{db}}$',
inv='$x_{\\text{inv}}$',
conv='$c_i$',
bi='$b_i$',
feat='$f_i$',
big=['intensity', 'rel. intensity', 'norm. intensity']
)
xlab_big_kwargs = dict(
y=0,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
)
ylab_snip_kwargs = dict(
x=0,
fontsize=fs['lab_tex'],
rotation=0,
ha='left',
va='center'
)
ylab_big_kwargs = dict(
x=-0.12,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
)
yloc = dict(
filt=3000,
env=1000,
log=50,
inv=20,
conv=2,
feat=1,
)
title_kwargs = dict(
x=0.5,
yref=1,
ha='center',
va='top',
fontsize=fs['tit_norm'],
)
letter_snip_kwargs = dict(
x=0,
yref=0.5,
ha='left',
va='center',
fontsize=fs['letter'],
)
letter_big_kwargs = dict(
x=0,
y=1,
ha='left',
va='bottom',
fontsize=fs['letter'],
)
bar_time = 5
bar_kwargs = dict(
dur=bar_time,
y0=-0.25,
y1=-0.1,
xshift=1,
color='k',
lw=0,
clip_on=False,
text_pos=(-0.1, 0.5),
text_str=f'${bar_time}\\,\\text{{s}}$',
text_kwargs=dict(
fontsize=fs['bar'],
ha='right',
va='center',
)
)
# PREPARATION:
ref_data = dict(np.load(ref_path))
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
# Load invariance data:
data, config = load_data(data_path, **load_kwargs)
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate']
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = data['example_scales'].size
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(data["example_scales"][j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(data['scales'][0], data['scales'][-1])
ax.set_xscale('symlog', linthresh=data['scales'][1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
plt.show()
# # Plot filtered snippets:
# plot_snippets(snip_axes[0, :], t_full, data['snip_filt'],
# c=colors['filt'], lw=lw['filt'])
# # Plot envelope snippets:
# plot_snippets(snip_axes[1, :], t_full, data['snip_env'],
# ymin=0, c=colors['env'], lw=lw['env'])
# # Plot logarithmic snippets:
# plot_snippets(snip_axes[2, :], t_full, data['snip_log'],
# c=colors['log'], lw=lw['log'])
# # Plot invariant snippets:
# plot_snippets(snip_axes[3, :], t_full, data['snip_inv'],
# c=colors['inv'], lw=lw['inv'])
# # Plot kernel response snippets:
# plot_snippets(snip_axes[4, :], t_full, data['snip_conv'],
# c=colors['conv'], lw=lw['conv'])
# # Plot feature snippets:
# plot_snippets(snip_axes[5, :], t_full, data['snip_feat'],
# ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
# Analysis results:
scales_rel = data['scales'] - data['scales'][0]
scales_rel /= scales_rel[-1]
for stage in stages:
measure = data[f'measure_{stage}']
# Plot unmodified intensity measures:
curve = plot_curves(big_axes[0], data['scales'], measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[0], data['scales'], curve, c=colors[stage])
# # Relate to pure-noise reference:
# norm_measure = measure / ref_data[stage]
# # Plot noise-related intensity measures:
# big_axes[1].plot(data['scales'], norm_measure, c=colors[stage], lw=lw['big'])
# Normalize measure to [0, 1]:
min_measure = measure.min(axis=0)
max_measure = measure.max(axis=0)
norm_measure = (measure - min_measure) / (max_measure - min_measure)
# Plot normalized intensity measures:
curve = plot_curves(big_axes[1], data['scales'], norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[1], data['scales'], curve, c=colors[stage])
# # Plot over relative scales:
# plot_curves(big_axes[2], scales_rel, norm_measure, c=colors[stage], lw=lw['big'],
# fill_kwargs=dict(color=colors[stage], alpha=0.25))
# scales_rel = curve - curve.min()
# scales_rel /= scales_rel.max()
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -4,7 +4,7 @@ import matplotlib.pyplot as plt
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from misc_functions import shorten_species, get_kde, get_saturation
from misc_functions import shorten_species, get_saturation
from color_functions import load_colors
from plot_functions import hide_axis, ylimits, super_xlabel, ylabel, hide_ticks,\
plot_line, strip_zeros, time_bar, zoom_inset,\
@@ -27,18 +27,9 @@ def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
handles.extend(plot_line(ax, time, snippet, ymin=ymin, ymax=ymax, **kwargs))
return handles
# def zalpha(handles, background='w', down=1):
# twins = []
# for handle in handles:
# twin = handle.copy()
# twin.set(color=background, alpha=1)
# twin.set_zorder(handle.get_zorder() - down)
# twins.append(twin)
# return twins
# GENERAL SETTINGS:
target = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_paths = search_files(target, excl='noise', dir='../data/inv/log_hp/')
data_path = search_files(target, excl='noise', dir='../data/inv/log_hp/')[0]
ref_path = '../data/inv/log_hp/ref_measures.npz'
save_path = '../figures/fig_invariance_log_hp.pdf'
target_species = [
@@ -56,6 +47,7 @@ load_kwargs = dict(
keywords=['scales', 'snip', 'measure']
)
compute_ratios = True
exclude_zero = True
show_diag = True
show_plateaus = True
@@ -275,169 +267,180 @@ plateau_dot_kwargs = dict(
)
# PREPARATION:
if compute_ratios:
ref_measures = dict(np.load(ref_path))
species_measures = {}
thresh_inds = np.zeros((len(target_species),), dtype=int)
for i, species in enumerate(target_species):
spec_path = search_files(species, dir='../data/inv/log_hp/condensed/')[0]
spec_data = dict(np.load(spec_path))
measure = spec_data['mean'].mean(axis=1)
measure = spec_data['mean_inv'].mean(axis=-1)
if exclude_zero:
measure = measure[spec_data['scales'] > 0]
species_measures[species] = measure
thresh_inds[i] = get_saturation(measure, **plateau_settings)[1]
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
print(f'Processing {data_path}')
# Load invariance data:
pure_data, config = load_data(data_path, **load_kwargs)
noise_data, _ = load_data(data_path.replace('.npz', '_noise.npz'), **load_kwargs)
pure_scales, noise_scales = pure_data['scales'], noise_data['scales']
t_full = np.arange(pure_data['snip_env'].shape[0]) / config['env_rate']
# Load invariance data:
pure_data, config = load_data(data_path, **load_kwargs)
noise_data, _ = load_data(data_path.replace('pure', 'noise'), **load_kwargs)
pure_scales, noise_scales = pure_data['scales'], noise_data['scales']
t_full = np.arange(pure_data['snip_env'].shape[0]) / config['env_rate']
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
fig.canvas.draw()
if compute_ratios:
# Relate pure-song measures to near-zero scale:
pure_data['measure_env'] /= pure_data['measure_env'][1]
pure_data['measure_log'] /= pure_data['measure_log'][1]
pure_data['measure_inv'] /= pure_data['measure_inv'][1]
# Relate noise-song measures to zero scale:
noise_data['measure_env'] /= noise_data['measure_env'][0]
noise_data['measure_log'] /= noise_data['measure_log'][0]
noise_data['measure_inv'] /= noise_data['measure_inv'][0]
# Prepare pure-song snippet axes:
pure_grid_kwargs['ncols'] = pure_data['example_scales'].size
pure_subfig = fig.add_subfigure(super_grid[subfig_specs['pure']])
pure_axes = add_snip_axes(pure_subfig, pure_grid_kwargs)
for ax, stage in zip(pure_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=pure_subfig.transSubfigure)
for ax, scale in zip(pure_axes[0, :], pure_data['example_scales']):
pure_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(pure_subfig, 'a', ref=pure_title, **letter_snip_kwargs)
pure_inset = pure_axes[0, 0].inset_axes(zoom_inset_bounds)
pure_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
pure_inset.tick_params(**inset_tick_kwargs)
hide_ticks(pure_inset, 'bottom', ticks=False)
if exclude_zero:
# Exclude zero scales:
inds = pure_scales > 0
pure_scales = pure_scales[inds]
pure_data['measure_env'] = pure_data['measure_env'][inds]
pure_data['measure_log'] = pure_data['measure_log'][inds]
pure_data['measure_inv'] = pure_data['measure_inv'][inds]
inds = noise_scales > 0
noise_scales = noise_scales[inds]
noise_data['measure_env'] = noise_data['measure_env'][inds]
noise_data['measure_log'] = noise_data['measure_log'][inds]
noise_data['measure_inv'] = noise_data['measure_inv'][inds]
# Prepare noise-song snippet axes:
noise_grid_kwargs['ncols'] = noise_data['example_scales'].size
noise_subfig = fig.add_subfigure(super_grid[subfig_specs['noise']])
noise_axes = add_snip_axes(noise_subfig, noise_grid_kwargs)
for ax, stage in zip(noise_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=noise_subfig.transSubfigure)
for ax, scale in zip(noise_axes[0, :], noise_data['example_scales']):
noise_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(noise_subfig, 'b', ref=noise_title, **letter_snip_kwargs)
noise_inset = noise_axes[0, 0].inset_axes(zoom_inset_bounds)
noise_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
noise_inset.tick_params(**inset_tick_kwargs)
hide_ticks(noise_inset, 'bottom', ticks=False)
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
fig.canvas.draw()
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i, scales in enumerate([pure_scales, noise_scales, noise_scales]):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_ylim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_aspect(**anchor_kwargs)
if i > 0:
hide_ticks(ax, 'left')
big_axes[i] = ax
ylabel(big_axes[0], ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)
# Prepare pure-song snippet axes:
pure_grid_kwargs['ncols'] = pure_data['example_scales'].size
pure_subfig = fig.add_subfigure(super_grid[subfig_specs['pure']])
pure_axes = add_snip_axes(pure_subfig, pure_grid_kwargs)
for ax, stage in zip(pure_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=pure_subfig.transSubfigure)
for ax, scale in zip(pure_axes[0, :], pure_data['example_scales']):
pure_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(pure_subfig, 'a', ref=pure_title, **letter_snip_kwargs)
pure_inset = pure_axes[0, 0].inset_axes(zoom_inset_bounds)
pure_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
pure_inset.tick_params(**inset_tick_kwargs)
hide_ticks(pure_inset, 'bottom', ticks=False)
# Plot pure-song envelope snippets:
handle = plot_snippets(pure_axes[0, :], t_full, pure_data['snip_env'],
ymin=0, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(pure_axes[0, 0], pure_inset, handle, transform=pure_axes[0, 0].transAxes, **zoom_kwargs)
# Prepare noise-song snippet axes:
noise_grid_kwargs['ncols'] = noise_data['example_scales'].size
noise_subfig = fig.add_subfigure(super_grid[subfig_specs['noise']])
noise_axes = add_snip_axes(noise_subfig, noise_grid_kwargs)
for ax, stage in zip(noise_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=noise_subfig.transSubfigure)
for ax, scale in zip(noise_axes[0, :], noise_data['example_scales']):
noise_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(noise_subfig, 'b', ref=noise_title, **letter_snip_kwargs)
noise_inset = noise_axes[0, 0].inset_axes(zoom_inset_bounds)
noise_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
noise_inset.tick_params(**inset_tick_kwargs)
hide_ticks(noise_inset, 'bottom', ticks=False)
# Plot pure-song logarithmic snippets:
plot_snippets(pure_axes[1, :], t_full, pure_data['snip_log'],
c=colors['log'], lw=lw['snip'])
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i, scales in enumerate([pure_scales, noise_scales, noise_scales]):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_ylim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_aspect(**anchor_kwargs)
if i > 0:
hide_ticks(ax, 'left')
big_axes[i] = ax
ylabel(big_axes[0], ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)
# Plot pure-song invariant snippets:
plot_snippets(pure_axes[2, :], t_full, pure_data['snip_inv'],
c=colors['inv'], lw=lw['snip'])
# Plot pure-song envelope snippets:
handle = plot_snippets(pure_axes[0, :], t_full, pure_data['snip_env'],
ymin=0, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(pure_axes[0, 0], pure_inset, handle, transform=pure_axes[0, 0].transAxes, **zoom_kwargs)
# Plot noise-song envelope snippets:
ymin, ymax = pure_axes[0, 0].get_ylim()
handle = plot_snippets(noise_axes[0, :], t_full, noise_data['snip_env'],
ymin, ymax, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(noise_axes[0, 0], noise_inset, handle, transform=noise_axes[0, 0].transAxes, **zoom_kwargs)
# Plot pure-song logarithmic snippets:
plot_snippets(pure_axes[1, :], t_full, pure_data['snip_log'],
c=colors['log'], lw=lw['snip'])
# Plot noise-song logarithmic snippets:
ymin, ymax = pure_axes[1, 0].get_ylim()
plot_snippets(noise_axes[1, :], t_full, noise_data['snip_log'],
ymin, ymax, c=colors['log'], lw=lw['snip'])
# Plot pure-song invariant snippets:
plot_snippets(pure_axes[2, :], t_full, pure_data['snip_inv'],
c=colors['inv'], lw=lw['snip'])
# Plot noise-song invariant snippets:
ymin, ymax = pure_axes[2, 0].get_ylim()
plot_snippets(noise_axes[2, :], t_full, noise_data['snip_inv'],
ymin, ymax, c=colors['inv'], lw=lw['snip'])
# Plot noise-song envelope snippets:
ymin, ymax = pure_axes[0, 0].get_ylim()
handle = plot_snippets(noise_axes[0, :], t_full, noise_data['snip_env'],
ymin, ymax, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(noise_axes[0, 0], noise_inset, handle, transform=noise_axes[0, 0].transAxes, **zoom_kwargs)
# Indicate time scale:
time_bar(noise_axes[-1, -1], **bar_kwargs)
# Plot noise-song logarithmic snippets:
ymin, ymax = pure_axes[1, 0].get_ylim()
plot_snippets(noise_axes[1, :], t_full, noise_data['snip_log'],
ymin, ymax, c=colors['log'], lw=lw['snip'])
if compute_ratios:
# Relate pure-song measures to zero scale:
pure_data['measure_env'] /= ref_measures['env']
pure_data['measure_log'] /= ref_measures['log']
pure_data['measure_inv'] /= ref_measures['inv']
# Relate noise-song measures to zero scale:
noise_data['measure_env'] /= ref_measures['env']
noise_data['measure_log'] /= ref_measures['log']
noise_data['measure_inv'] /= ref_measures['inv']
# Plot noise-song invariant snippets:
ymin, ymax = pure_axes[2, 0].get_ylim()
plot_snippets(noise_axes[2, :], t_full, noise_data['snip_inv'],
ymin, ymax, c=colors['inv'], lw=lw['snip'])
# Plot pure-song measures (ideal):
big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'])
# Indicate time scale:
time_bar(noise_axes[-1, -1], **bar_kwargs)
# Plot noise-song measures (limited):
big_axes[1].plot(noise_scales, noise_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_inv'], c=colors['inv'], lw=lw['big'])
# Plot pure-song measures (ideal):
big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'])
if show_diag:
# Indicate diagonal:
big_axes[0].plot(pure_scales, pure_scales, **diag_kwargs)
big_axes[1].plot(noise_scales, noise_scales, **diag_kwargs)
# Plot noise-song measures (limited):
big_axes[1].plot(noise_scales, noise_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_inv'], c=colors['inv'], lw=lw['big'])
if show_plateaus:
# Indicate low and high plateaus of noise invariance curve:
low_ind, high_ind = get_saturation(noise_data['measure_inv'], **plateau_settings)
big_axes[1].axvspan(noise_scales[0], noise_scales[low_ind],
fc=noise_colors[0], **plateau_rect_kwargs)
big_axes[1].axvspan(noise_scales[low_ind], noise_scales[high_ind],
fc=noise_colors[1], **plateau_rect_kwargs)
if show_diag:
# Indicate diagonal:
big_axes[0].plot(pure_scales, pure_scales, **diag_kwargs)
big_axes[1].plot(noise_scales, noise_scales, **diag_kwargs)
# Plot species-specific noise-song invariance curves:
for i, (species, measure) in enumerate(species_measures.items()):
# Plot invariance curve:
color = species_colors[species]
big_axes[2].plot(noise_scales, measure, label=shorten_species(species),
c=color, lw=lw['spec'])
# Indicate saturation:
ind = thresh_inds[i]
scale = noise_scales[ind]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], measure[ind],
color=color, **plateau_line_kwargs)
legend = big_axes[2].legend(**leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
if show_plateaus:
# Indicate low and high plateaus of noise invariance curve:
low_ind, high_ind = get_saturation(noise_data['measure_inv'], **plateau_settings)
big_axes[1].axvspan(noise_scales[0], noise_scales[low_ind],
fc=noise_colors[0], **plateau_rect_kwargs)
big_axes[1].axvspan(noise_scales[low_ind], noise_scales[high_ind],
fc=noise_colors[1], **plateau_rect_kwargs)
if save_path is not None:
fig.savefig(save_path, bbox_inches='tight')
plt.show()
# Plot species-specific noise-song invariance curves:
for i, (species, measure) in enumerate(species_measures.items()):
# Plot invariance curve:
color = species_colors[species]
big_axes[2].plot(noise_scales, measure, label=shorten_species(species),
c=color, lw=lw['spec'])
# Indicate saturation:
ind = thresh_inds[i]
scale = noise_scales[ind]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], measure[ind],
color=color, **plateau_line_kwargs)
legend = big_axes[2].legend(**leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
if save_path is not None:
fig.savefig(save_path, bbox_inches='tight')
plt.show()
print('Done.')
embed()

View File

@@ -2,7 +2,7 @@ import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from plot_functions import ylabel, super_xlabel, letter_subplots, title_subplot
from plot_functions import ylabel, super_xlabel, title_subplot
from color_functions import load_colors
from misc_functions import shorten_species
@@ -17,7 +17,10 @@ target_species = [
'Pseudochorthippus_parallelus',
]
data_path = '../data/inv/log_hp/condensed/'
save_path = '../figures/fig_invariance_log-hp_species.pdf'
save_path = '../figures/fig_invariance_log-hp_appendix.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
# GRAPH SETTINGS:
fig_kwargs = dict(
@@ -45,6 +48,22 @@ fill_kwargs = dict(
alpha=0.3,
zorder=1,
)
mean_kwargs = dict(
# c=(0.5,) * 3,
lw=2,
alpha=1,
zorder=3,
ls='--'
)
mean_colors = {
'Chorthippus_biguttulus': (1,) * 3,
'Chorthippus_mollis': (0,) * 3,
'Chrysochraon_dispar': (0,) * 3,
'Euchorthippus_declivus': (0,) * 3,
'Gomphocerippus_rufus': (0,) * 3,
'Omocestus_rufipes': (0,) * 3,
'Pseudochorthippus_parallelus': (0,) * 3,
}
xlab = 'scale $\\alpha$'
ylab = '$\\sigma_{\\alpha}\\,/\\,\\sigma_{\\eta}$'
xlab_kwargs = dict(
@@ -82,7 +101,6 @@ axes[0].set_xscale('log')
axes[0].set_yscale('log')
super_xlabel(xlab, fig, axes[0], axes[-1], **xlab_kwargs)
ylabel(axes[0], ylab, **ylab_kwargs, transform=fig.transFigure)
# letter_subplots(axes, **letter_kwargs)
# Run through species:
for species, ax in zip(target_species, axes):
@@ -93,14 +111,24 @@ for species, ax in zip(target_species, axes):
path = search_files(species, dir=data_path)[0]
data = dict(np.load(path))
scales = data['scales']
means = data['mean']
sds = data['sd']
means = data['mean_inv']
sds = data['sd_inv']
if exclude_zero:
# Exclude zero scale:
inds = scales > 0
scales = scales[inds]
means = means[inds, :]
sds = sds[inds, :]
# Plot recording-specific traces:
for mean, sd in zip(means.T, sds.T):
ax.plot(scales, mean, c=color, **line_kwargs)
ax.fill_between(scales, mean - sd, mean + sd, color=color, **fill_kwargs)
# Plot species mean trace:
ax.plot(scales, means.mean(axis=-1), c=mean_colors[species], **mean_kwargs)
# Save graph:
fig.savefig(save_path)
plt.show()

View File

@@ -0,0 +1,191 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from plot_functions import ylabel, ylimits, super_xlabel, title_subplot, time_bar
from color_functions import load_colors, shade_colors
from misc_functions import shorten_species
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
data_path = '../data/inv/thresh_lp/condensed/'
save_path = '../figures/fig_invariance_thresh-lp_appendix.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
# SUBSET SETTINGS:
thresh_rel = np.array([0.5, 1, 3])[0]
kern_specs = np.array([
[1, 0.008],
[2, 0.004],
[3, 0.002],
])[np.array([0, 1, 2])]
n_kernels = kern_specs.shape[0]
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 16/2.54),
nrows=n_kernels,
ncols=len(target_species),
sharex=True,
sharey=True,
gridspec_kw=dict(
wspace=0.4,
hspace=0.2,
left=0.07,
right=0.98,
bottom=0.1,
top=0.95,
)
)
# PLOT SETTINGS:
species_colors = load_colors('../data/species_colors.npz')
kern_shades = [0, 0.75]
kern_colors = shade_colors((0., 0., 0.), np.linspace(*kern_shades, n_kernels))
line_kwargs = dict(
lw=2,
alpha=0.5,
zorder=2,
)
fill_kwargs = dict(
alpha=0.3,
zorder=1,
)
mean_kwargs = dict(
# c=(0.5,) * 3,
lw=2,
alpha=1,
zorder=3,
ls='--'
)
mean_colors = {
'Chorthippus_biguttulus': (1,) * 3,
'Chorthippus_mollis': (0,) * 3,
'Chrysochraon_dispar': (0,) * 3,
'Euchorthippus_declivus': (0,) * 3,
'Gomphocerippus_rufus': (0,) * 3,
'Omocestus_rufipes': (0,) * 3,
'Pseudochorthippus_parallelus': (1,) * 3,
}
kern_kwargs = dict(
lw=2,
)
inset_bounds = [0.05, 0.6, 0.3, 0.25]
kern_bar_time = 0.05
kern_bar_kwargs = dict(
dur=kern_bar_time,
y0=0.1,
y1=0.2,
color='k',
lw=0,
clip_on=False,
text_pos=(0.5, -1),
text_str=f'${int(kern_bar_time * 1000)}\\,\\text{{ms}}$',
text_kwargs=dict(
fontsize=12,
ha='center',
va='top',
)
)
xlab = 'scale $\\alpha$'
ylabs = [f'$\\mu_{{f_{i}}}$' for i in range(1, n_kernels + 1)]
xlab_kwargs = dict(
y=0,
fontsize=16,
ha='center',
va='bottom',
)
ylab_kwargs = dict(
x=0,
fontsize=20,
ha='center',
va='top',
)
title_kwargs = dict(
x=0.5,
yref=0.99,
ha='center',
va='top',
fontsize=16,
fontstyle='italic',
)
letter_kwargs = dict(
x=0.005,
y=0.99,
fontsize=22,
ha='left',
va='top',
)
# Prepare graph:
fig, axes = plt.subplots(**fig_kwargs)
axes[0, 0].set_xscale('log')
axes[0, 0].set_ylim(0, 1)
axes[0, 0].yaxis.set_major_locator(plt.MultipleLocator(0.5))
super_xlabel(xlab, fig, axes[-1, 0], axes[-1, -1], **xlab_kwargs)
insets = []
for ax, ylab in zip(axes[:, 0], ylabs):
ylabel(ax, ylab, **ylab_kwargs, transform=fig.transFigure)
insets.append(ax.inset_axes(inset_bounds))
# Run through species:
for i, (species, spec_axes) in enumerate(zip(target_species, axes.T)):
title_subplot(spec_axes[0], shorten_species(species), ref=fig, **title_kwargs)
# Load species data:
path = search_files(species, dir=data_path)[0]
data, config = load_data(path, files=['scales', 'mean_feat', 'sd_feat', 'thresh_rel'])
scales = data['scales']
means = data['mean_feat']
sds = data['sd_feat']
# Reduce to single threshold:
ind = np.nonzero(data['thresh_rel'] == thresh_rel)[0][0]
means = means[:, :, ind, :]
sds = sds[:, :, ind, :]
if exclude_zero:
# Exclude zero scale:
inds = scales > 0
scales = scales[inds]
means = means[inds, :, :]
sds = sds[inds, :, :]
# Run through kernels:
for j, (ax, inset) in enumerate(zip(spec_axes, insets)):
if i == 0:
# Indicate kernel waveform:
inset.plot(config['k_times'], config['kernels'][:, j],
c=kern_colors[j], **kern_kwargs)
inset.set_xlim(config['k_times'][[0, -1]])
ylimits(config['kernels'], inset, pad=0.05)
inset.set_title(rf'$k_{{{j+1}}}$', fontsize=15)
if j == 0:
time_bar(inset, **kern_bar_kwargs)
inset.axis('off')
# Plot recording-specific traces:
for k in range(means.shape[-1]):
ax.plot(scales, means[:, j, k], c=species_colors[species], **line_kwargs)
spread = (means[:, j, k] - sds[:, j, k], means[:, j, k] + sds[:, j, k])
ax.fill_between(scales, *spread, color=species_colors[species], **fill_kwargs)
# Plot kernel-specific mean trace:
ax.plot(scales, means[:, j, :].mean(axis=-1), c=mean_colors[species], **mean_kwargs)
# Save graph:
fig.savefig(save_path)
plt.show()

View File

@@ -58,19 +58,19 @@ def side_distributions(axes, snippets, inset_bounds, thresh, nbins=1000,
# GENERAL SETTINGS:
target = 'Omocestus_rufipes'
data_paths = search_files(target, incl='noise', dir='../data/inv/thresh_lp/')
example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_path = search_files(example_file, incl='noise', dir='../data/inv/thresh_lp/')[0]
stages = ['conv', 'bi', 'feat']
load_kwargs = dict(
files=stages,
keywords=['scales', 'snip', 'measure', 'thresh']
)
save_path = None#'../figures/fig_invariance_thresh_lp_single.pdf'
save_path = '../figures/fig_invariance_thresh_lp_single.pdf'
exclude_zero = True
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 16/2.54),
figsize=(32/2.54, 32/2.54),
)
super_grid_kwargs = dict(
nrows=None,
@@ -140,6 +140,8 @@ lw = dict(
bi=0.1,
feat=3,
big=4,
kern=2.5,
plateau=1.5,
)
xlabels = dict(
alpha='scale $\\alpha$',
@@ -216,6 +218,10 @@ letter_big_kwargs = dict(
va='top',
fontsize=fs['letter'],
)
kern_kwargs = dict(
c='k',
lw=lw['kern'],
)
dist_kwargs = dict(
c='k',
lw=1,
@@ -257,171 +263,198 @@ plateau_settings = dict(
last=True,
condense=None,
)
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
zoom_rel = np.array([0.5, 0.515])
# SUBSET SETTINGS:
kern_specs = np.array([
[1, 0.008],
[2, 0.004],
[3, 0.002],
])[np.array([1])]
zoom_rel = np.array([0.5, 0.515])
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
print(f'Processing {data_path}')
# Load invariance data:
noise_data, config = load_data(data_path, **load_kwargs)
pure_data, _ = load_data(data_path.replace('noise', 'pure'), **load_kwargs)
# Load invariance data:
noise_data, config = load_data(data_path, **load_kwargs)
pure_data, _ = load_data(data_path.replace('noise', 'pure'), **load_kwargs)
# Unpack shared variables:
scales = noise_data['scales']
plot_scales = noise_data['example_scales']
thresh_rel = noise_data['thresh_rel']
thresh_abs = noise_data['thresh_abs']
# Unpack shared variables:
scales = noise_data['scales']
plot_scales = noise_data['example_scales']
thresh_rel = noise_data['thresh_rel']
thresh_abs = noise_data['thresh_abs']
# Reduce to kernel subset and crop to zoom frame:
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
zoom_abs = zoom_rel * t_full[-1]
zoom_inds = (t_full >= zoom_abs[0]) & (t_full <= zoom_abs[1])
kern_ind = find_kern_specs(config['k_specs'], kerns=kern_specs)[0]
noise_data['snip_inv'] = noise_data['snip_inv'][zoom_inds, :]
noise_data['snip_conv'] = noise_data['snip_conv'][zoom_inds, kern_ind, :]
noise_data['snip_bi'] = noise_data['snip_bi'][zoom_inds, kern_ind, :, :]
noise_data['snip_feat'] = noise_data['snip_feat'][zoom_inds, kern_ind, :, :]
noise_data['measure_feat'] = noise_data['measure_feat'][:, kern_ind, :]
pure_data['measure_feat'] = pure_data['measure_feat'][:, kern_ind, :]
thresh_abs = thresh_abs[:, kern_ind]
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
if exclude_zero:
# Reduce to nonzero scales:
nonzero_inds = scales > 0
scales = scales[nonzero_inds]
noise_data['measure_inv'] = noise_data['measure_inv'][nonzero_inds]
noise_data['measure_feat'] = noise_data['measure_feat'][nonzero_inds, :]
pure_data['measure_feat'] = pure_data['measure_feat'][nonzero_inds, :]
# Reduce to kernel subset and crop to zoom frame:
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
zoom_abs = zoom_rel * t_full[-1]
zoom_inds = (t_full >= zoom_abs[0]) & (t_full <= zoom_abs[1])
kern_ind = find_kern_specs(config['k_specs'], kerns=kern_specs)[0]
noise_data['snip_inv'] = noise_data['snip_inv'][zoom_inds, :]
noise_data['snip_conv'] = noise_data['snip_conv'][zoom_inds, kern_ind, :]
noise_data['snip_bi'] = noise_data['snip_bi'][zoom_inds, kern_ind, :, :]
noise_data['snip_feat'] = noise_data['snip_feat'][zoom_inds, kern_ind, :, :]
noise_data['measure_feat'] = noise_data['measure_feat'][:, kern_ind, :]
pure_data['measure_feat'] = pure_data['measure_feat'][:, kern_ind, :]
config['kernels'] = config['kernels'][:, kern_ind]
thresh_abs = thresh_abs[:, kern_ind]
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
# Get threshold-specific colors:
factors = np.linspace(*shade_factors, thresh_rel.size)
shaded = dict(
conv=shade_colors(colors['conv'], factors),
bi=shade_colors(colors['bi'], factors),
feat=shade_colors(colors['feat'], factors),
)
if exclude_zero:
# Exclude zero scale:
inds = scales > 0
scales = scales[inds]
noise_data['measure_inv'] = noise_data['measure_inv'][inds]
noise_data['measure_feat'] = noise_data['measure_feat'][inds, :]
pure_data['measure_feat'] = pure_data['measure_feat'][inds, :]
# Adjust grid parameters to loaded data:
super_grid_kwargs['nrows'] = snip_rows * thresh_rel.size + input_rows
input_grid_kwargs['ncols'] = plot_scales.size
snip_grid_kwargs['ncols'] = plot_scales.size
# Get threshold-specific colors:
factors = np.linspace(*shade_factors, thresh_rel.size)
shaded = dict(
conv=shade_colors(colors['conv'], factors),
bi=shade_colors(colors['bi'], factors),
feat=shade_colors(colors['feat'], factors),
)
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Adjust grid parameters to loaded data:
super_grid_kwargs['nrows'] = snip_rows * thresh_rel.size + input_rows
input_grid_kwargs['ncols'] = plot_scales.size
snip_grid_kwargs['ncols'] = plot_scales.size
# Prepare input snippet axes:
input_subfig = fig.add_subfigure(super_grid[subfig_specs['input']])
input_axes = add_snip_axes(input_subfig, input_grid_kwargs).ravel()
input_axes[0].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][0]))
input_axes[1].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][1]))
ylabel(input_axes[0], ylabels['inv'], transform=input_subfig.transSubfigure, **ylab_snip_kwargs)
for ax, scale in zip(input_axes, plot_scales):
title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', ref=input_subfig, **title_kwargs)
letter_subplot(input_subfig, 'a', **letter_snip_kwargs)
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare snippet axes:
snip_subfigs, snip_axes = [], []
for i in range(thresh_rel.size):
subfig_spec = subfig_specs['snip'].copy()
subfig_spec[0] = slice(*(subfig_spec[0] + i * snip_rows))
snip_subfig = fig.add_subfigure(super_grid[*subfig_spec])
axes = add_snip_axes(snip_subfig, snip_grid_kwargs)
[hide_axis(ax, 'left') for ax in axes[1:, 1]]
super_ylabel(f'$\\Theta={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$',
snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for (ax1, ax2), stage in zip(axes[:, :2], stages):
ax1.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][0]))
ax2.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][1]))
ylabel(ax1, ylabels[stage], transform=snip_subfig.transSubfigure, **ylab_snip_kwargs)
if i == thresh_rel.size - 1:
axes[-1, -1].set_xlim(t_full[0], t_full[-1])
time_bar(axes[-1, -1], **bar_kwargs)
snip_subfigs.append(snip_subfig)
snip_axes.append(axes)
letter_subplots(snip_subfigs, 'bcd', **letter_snip_kwargs)
# Prepare input snippet axes:
input_subfig = fig.add_subfigure(super_grid[subfig_specs['input']])
input_axes = add_snip_axes(input_subfig, input_grid_kwargs).ravel()
input_axes[0].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][0]))
input_axes[1].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][1]))
ylabel(input_axes[0], ylabels['inv'], transform=input_subfig.transSubfigure, **ylab_snip_kwargs)
for ax, scale in zip(input_axes, plot_scales):
title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', ref=input_subfig, **title_kwargs)
letter_subplot(input_subfig, 'a', **letter_snip_kwargs)
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
# Prepare snippet axes:
snip_subfigs, snip_axes = [], []
for i in range(thresh_rel.size):
subfig_spec = subfig_specs['snip'].copy()
subfig_spec[0] = slice(*(subfig_spec[0] + i * snip_rows))
snip_subfig = fig.add_subfigure(super_grid[*subfig_spec])
axes = add_snip_axes(snip_subfig, snip_grid_kwargs)
[hide_axis(ax, 'left') for ax in axes[1:, 1]]
super_ylabel(f'$\\Theta={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$',
snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for (ax1, ax2), stage in zip(axes[:, :2], stages):
ax1.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][0]))
ax2.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][1]))
ylabel(ax1, ylabels[stage], transform=snip_subfig.transSubfigure, **ylab_snip_kwargs)
if i == thresh_rel.size - 1:
axes[-1, -1].set_xlim(t_full[0], t_full[-1])
time_bar(axes[-1, -1], **bar_kwargs)
snip_subfigs.append(snip_subfig)
snip_axes.append(axes)
letter_subplots(snip_subfigs, 'bcd', **letter_snip_kwargs)
alpha_ax = big_subfig.add_subplot(big_grid[0, 0])
alpha_ax.set_xlim(scales[0], scales[-1])
alpha_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
ylimits(pure_data['measure_feat'], alpha_ax, minval=0, pad=ypad['big'])
alpha_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(alpha_ax, xlabels['alpha'], **xlab_alpha_kwargs)
ylabel(alpha_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
sigma_ax = big_subfig.add_subplot(big_grid[1, 0])
sigma_ax.set_xlim(noise_data['measure_inv'].min(), noise_data['measure_inv'].max())
# sigma_ax.set_xscale('log')
sigma_ax.set_xlim(scales[0], scales[-1])
sigma_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
ylimits(pure_data['measure_feat'], sigma_ax, minval=0, pad=ypad['big'])
sigma_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(sigma_ax, xlabels['sigma'], **xlab_sigma_kwargs)
ylabel(sigma_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
alpha_ax = big_subfig.add_subplot(big_grid[0, 0])
alpha_ax.set_xlim(scales[0], scales[-1])
alpha_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
ylimits(pure_data['measure_feat'], alpha_ax, minval=0, pad=ypad['big'])
alpha_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(alpha_ax, xlabels['alpha'], **xlab_alpha_kwargs)
ylabel(alpha_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
# Plot intensity-adapted snippets:
plot_snippets(input_axes, t_full, noise_data['snip_inv'],
ypad=ypad['inv'], c=colors['inv'], lw=lw['inv'])
ylimits(noise_data['snip_inv'][:, 0], input_axes[0], pad=ypad['inv'])
sigma_ax = big_subfig.add_subplot(big_grid[1, 0])
sigma_ax.set_xlim(noise_data['measure_inv'].min(), noise_data['measure_inv'].max())
sigma_ax.set_xlim(scales[0], scales[-1])
sigma_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
ylimits(pure_data['measure_feat'], sigma_ax, minval=0, pad=ypad['big'])
sigma_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(sigma_ax, xlabels['sigma'], **xlab_sigma_kwargs)
ylabel(sigma_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
# Plot representation snippets per threshold:
for i, (subfig, axes) in enumerate(zip(snip_subfigs, snip_axes)):
dist_fill_kwargs['color'] = shaded['bi'][i]
# Plot intensity-adapted snippets:
plot_snippets(input_axes, t_full, noise_data['snip_inv'],
ypad=ypad['inv'], c=colors['inv'], lw=lw['inv'])
ylimits(noise_data['snip_inv'][:, 0], input_axes[0], pad=ypad['inv'])
# Plot kernel response snippets:
plot_snippets(axes[0, :], t_full, noise_data['snip_conv'], thresh=thresh_abs[i],
ypad=ypad['conv'], fill_kwargs=dist_fill_kwargs, c=shaded['conv'][i], lw=lw['conv'])
ylimits(noise_data['snip_conv'][:, 0], axes[0, 0], pad=ypad['conv'])
# Indicate kernel waveform over 1st intensity-adapted snippet:
input_axes[0].plot(config['k_times'] + 0.5 * t_full[-1], config['kernels'], **kern_kwargs)
# Plot kernel response distributions:
side_distributions(axes[0, :1], noise_data['snip_conv'][:, :1], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
side_distributions(axes[0, 1:], noise_data['snip_conv'][:, 1:], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
# Plot representation snippets per threshold:
for i, (subfig, axes) in enumerate(zip(snip_subfigs, snip_axes)):
dist_fill_kwargs['color'] = shaded['bi'][i]
# Plot binary snippets:
plot_bi_snippets(axes[1, :], t_full, noise_data['snip_bi'][:, :, i],
color=shaded['bi'][i], lw=lw['bi'])
# Plot kernel response snippets:
plot_snippets(axes[0, :], t_full, noise_data['snip_conv'], thresh=thresh_abs[i],
ypad=ypad['conv'], fill_kwargs=dist_fill_kwargs, c=shaded['conv'][i], lw=lw['conv'])
ylimits(noise_data['snip_conv'][:, 0], axes[0, 0], pad=ypad['conv'])
# Plot feature snippets:
handles = plot_snippets(axes[2, :], t_full, noise_data['snip_feat'][:, :, i],
ymin=0, ymax=1, c=shaded['feat'][i], lw=lw['feat'])
[set_clip_box(h[0], ax, bounds=[[0, -0.05], [1, 1.05]]) for h, ax in zip(handles, axes[2, :])]
# Plot kernel response distributions:
side_distributions(axes[0, :1], noise_data['snip_conv'][:, :1], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
side_distributions(axes[0, 1:], noise_data['snip_conv'][:, 1:], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
# Get threshold-specific saturation:
for i in range(thresh_rel.size):
ind = get_saturation(noise_data['measure_feat'][:, i], **plateau_settings)[1]
# Plot binary snippets:
plot_bi_snippets(axes[1, :], t_full, noise_data['snip_bi'][:, :, i],
color=shaded['bi'][i], lw=lw['bi'])
# Plot analysis results:
for ax, x in zip([alpha_ax, sigma_ax], [scales, noise_data['measure_inv']]):
# Plot pure-song analysis results:
handles = ax.plot(x, pure_data['measure_feat'], lw=lw['big'], ls='dotted')
[h.set_color(c) for h, c in zip(handles, shaded['feat'])]
# Plot feature snippets:
handles = plot_snippets(axes[2, :], t_full, noise_data['snip_feat'][:, :, i],
ymin=0, ymax=1, c=shaded['feat'][i], lw=lw['feat'])
[set_clip_box(h[0], ax, bounds=[[0, -0.05], [1, 1.05]]) for h, ax in zip(handles, axes[2, :])]
# Plot noise-song analysis results:
handles = ax.plot(x, noise_data['measure_feat'], lw=lw['big'])
[h.set_color(c) for h, c in zip(handles, shaded['feat'])]
# Get saturation:
saturation_inds = []
for i in range(thresh_rel.size):
ind = get_saturation(noise_data['measure_feat'][:, i], **plateau_settings)[1]
saturation_inds.append(ind)
# Add proxy legend:
if ax == alpha_ax:
h1 = ax.plot([], [], c='k', lw=lw['big'], label='$\\alpha\\cdot s(t) + \\eta(t)$')[0]
h2 = ax.plot([], [], c='k', lw=lw['big'], ls='dotted', label='$\\alpha\\cdot s(t)$')[0]
ax.legend(handles=[h1, h2], **leg_kwargs)
# Plot analysis results:
for ax, x in zip([alpha_ax, sigma_ax], [scales, noise_data['measure_inv']]):
# Plot pure-song analysis results:
handles = ax.plot(x, pure_data['measure_feat'], lw=lw['big'], ls='dotted')
[h.set_color(c) for h, c in zip(handles, shaded['feat'])]
if save_path is not None:
fig.savefig(save_path)
plt.show()
# Plot noise-song analysis results:
handles = ax.plot(x, noise_data['measure_feat'], lw=lw['big'])
[h.set_color(c) for h, c in zip(handles, shaded['feat'])]
# Indicate threshold-specific saturation:
for i, ind in enumerate(saturation_inds):
color = shaded['feat'][i]
ax.plot(x[ind], 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=ax.get_xaxis_transform())
ax.plot(x[ind], 0, mfc=color, mec='k', alpha=0.75, zorder=6,
**plateau_dot_kwargs, transform=ax.get_xaxis_transform())
ax.vlines(x[ind], ax.get_ylim()[0], noise_data['measure_feat'][ind, i],
color=color, **plateau_line_kwargs)
# Add proxy legend:
if ax == alpha_ax:
h1 = ax.plot([], [], c='k', lw=lw['big'], label='$\\alpha\\cdot s(t) + \\eta(t)$')[0]
h2 = ax.plot([], [], c='k', lw=lw['big'], ls='dotted', label='$\\alpha\\cdot s(t)$')[0]
ax.legend(handles=[h1, h2], **leg_kwargs)
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -162,20 +162,30 @@ def add_cross_axes(fig, n, long='col', fill='row', **grid_kwargs):
# GENERAL SETTINGS:
target_species = [
'Omocestus_rufipes',
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
]
example_files = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}
n_species = len(target_species)
load_kwargs = dict(
keywords=['scales', 'measure', 'thresh']
keywords=['scales', 'mean', 'thresh']
)
save_path = '../figures/fig_invariance_thresh_lp_species.pdf'
exclude_zero = True
show_floor = True
show_floor = False
# SUBSET SETTINGS:
thresh_rel = np.array([0.5, 1, 3])[0]
@@ -266,14 +276,6 @@ lw = dict(
kern=2.5,
plateau=3,
)
zorder = dict(
Omocestus_rufipes=2,
Chorthippus_biguttulus=2.5,
Chorthippus_mollis=2.4,
Chrysochraon_dispar=2,
Gomphocerippus_rufus=2,
Pseudochorthippus_parallelus=2,
)
space_kwargs = dict(
s=30,
)
@@ -357,21 +359,27 @@ letter_space_kwargs = dict(
va='center',
fontsize=fs['letter'],
)
song_bar_time = 1.0
spec_bar_times = dict(
Chorthippus_biguttulus=1,
Chorthippus_mollis=10,
Chrysochraon_dispar=1,
Euchorthippus_declivus=0.25,
Gomphocerippus_rufus=5,
Omocestus_rufipes=5,
Pseudochorthippus_parallelus=1,
)
song_bar_kwargs = dict(
dur=song_bar_time,
y0=-0.1,
y1=0,
xshift=0,
xshift=0.5,
color='k',
lw=0,
clip_on=False,
text_pos=(1.25, 0.5),
text_str=f'${int(song_bar_time)}\\,\\text{{s}}$',
text_pos=(0.5, -0.1),
text_kwargs=dict(
fontsize=fs['bar'],
ha='left',
va='center',
ha='center',
va='top',
)
)
kern_bar_time = 0.05
@@ -382,7 +390,7 @@ kern_bar_kwargs = dict(
color='k',
lw=0,
clip_on=False,
text_pos=(0.6, -1),
text_pos=(0.7, -1),
text_str=f'${int(kern_bar_time * 1000)}\\,\\text{{ms}}$',
text_kwargs=dict(
fontsize=fs['bar'],
@@ -502,7 +510,7 @@ for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch species-specific recording file:
song_path = search_files(species, dir='../data/processed/')[0]
song_path = search_files(example_files[species], dir='../data/processed/')[0]
# Load song data:
song_data, _ = load_data(song_path, files='filt')
@@ -513,16 +521,18 @@ for i, species in enumerate(target_species):
time = np.arange(song.shape[0]) / rate
plot_line(song_ax, time, song, ypad=0.05, c='k', lw=lw['song'])
title_subplot(song_ax, shorten_species(species), ref=song_subfig, **title_kwargs)
time_bar(song_ax, **song_bar_kwargs)
song_bar_kwargs['text_pos'] = None
time_bar(song_ax, dur=spec_bar_times[species], **song_bar_kwargs,
text_str=f'${spec_bar_times[species]}\\,\\text{{s}}$')
# Fetch species-specific invariance files:
pure_path = search_files(species, incl='pure', dir='../data/inv/thresh_lp/')[0]
noise_path = search_files(species, incl='noise', dir='../data/inv/thresh_lp/')[0]
pure_path = search_files(species, incl='pure', dir='../data/inv/thresh_lp/condensed/')[0]
noise_path = search_files(species, incl='noise', dir='../data/inv/thresh_lp/condensed/')[0]
# Load invariance data:
pure_data, config = load_data(pure_path, **load_kwargs)
noise_data, _ = load_data(noise_path, **load_kwargs)
pure_measure = pure_data['mean_feat'].mean(axis=-1)
noise_measure = noise_data['mean_feat'].mean(axis=-1)
scales = pure_data['scales']
# Reduce to kernel subset and a single threshold:
@@ -530,8 +540,8 @@ for i, species in enumerate(target_species):
kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs)
config['k_specs'] = config['k_specs'][kern_inds]
config['kernels'] = config['kernels'][:, kern_inds]
pure_measure = pure_data['measure_feat'][:, kern_inds, thresh_ind]
noise_measure = noise_data['measure_feat'][:, kern_inds, thresh_ind]
pure_measure = pure_measure[:, kern_inds, thresh_ind]
noise_measure = noise_measure[:, kern_inds, thresh_ind]
if exclude_zero:
# Reduce to nonzero scales:
nonzero_inds = scales > 0
@@ -564,7 +574,6 @@ for i, species in enumerate(target_species):
inset.plot(config['k_times'], kern, c=c, lw=lw['kern'])
inset.set_xlim(xlims)
inset.set_ylim(ylims)
# time_bar(insets[0], parent=feat_axes[0, 0], **kern_bar_kwargs)
time_bar(insets[0], **kern_bar_kwargs)
# Plot invariance curves in feature space:
@@ -572,13 +581,11 @@ for i, species in enumerate(target_species):
for ind, (pure_ax, noise_ax) in enumerate(zip(pure_axes, noise_axes)):
irow, icol = row_inds[ind], col_inds[ind]
pure_handle = pure_ax.scatter(pure_measure[:, icol], pure_measure[:, irow],
c=scales, cmap=scale_cmap, norm=norm,
zorder=zorder[species], **space_kwargs)
c=scales, cmap=scale_cmap, norm=norm, **space_kwargs)
pure_space_handles[pure_ax].append(pure_handle)
noise_handle = noise_ax.scatter(noise_measure[:, icol], noise_measure[:, irow],
c=scales, cmap=scale_cmap, norm=norm,
zorder=zorder[species], **space_kwargs)
c=scales, cmap=scale_cmap, norm=norm, **space_kwargs)
noise_space_handles[noise_ax].append(noise_handle)
# Indicate scale color code in pure subfigure:

View File

@@ -1,5 +1,6 @@
import numpy as np
from scipy.stats import gaussian_kde
from thunderhopper.filetools import crop_paths
def shorten_species(name):
genus, species = name.split('_')
@@ -9,6 +10,44 @@ def unsort_unique(array):
values, inds = np.unique(array, return_index=True)
return values[np.argsort(inds)]
def draw_noise_segment(noise, n):
rng = np.random.default_rng()
start = rng.integers(0, noise.shape[0] - n, endpoint=True)
return np.take(noise, np.arange(start, start + n), axis=0)
def sort_files_by_rec(paths, sources=['BM04', 'BM93', 'DJN', 'GBC', 'FTN']):
# Separate by source:
sorted_paths = {}
for source in sources:
# Check for any source-specific song files:
source_paths = [path for path in paths if source in path]
if not source_paths:
continue
# Separate by recording:
sorted_paths[source] = [[]]
for path, name in zip(source_paths, crop_paths(source_paths)):
# Find numerical ID behind source tag:
id_ind = name.find(source) + len(source) + 1
# Get segment where sub-ID would be:
sub_id = name[id_ind:].split('-')[1]
if 's' in sub_id:
# Found time stamp (single recording):
sorted_paths[source][0].append(path)
continue
sub_id = int(sub_id)
# Found sub-ID (multiple recordings):
if sub_id > len(sorted_paths[source]):
# Open new recording-specific slot:
sorted_paths[source].append([])
sorted_paths[source][sub_id - 1].append(path)
# Re-sort song files by recording only (discarding source separation):
sorted_paths = [path for paths in sorted_paths.values() for path in paths]
return sorted_paths
def get_kde(data, sigma, axis=None, n=1000, pad=10):
if axis is None:
axis = np.linspace(data.min() - pad * sigma, data.max() + pad * sigma, n)

View File

@@ -1,24 +1,42 @@
import glob
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import crop_paths
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filtertools import find_kern_specs
from thunderhopper.model import process_signal, convolve_kernels
from thunderhopper.model import process_signal
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
target = 'Omocestus_rufipes'
data_paths = glob.glob(f'../data/processed/{target}*.npz')
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][0]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/full/ref_measures.npz'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
save_path = '../data/inv/full/'
# ANALYSIS SETTINGS:
example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 100)
scales = np.unique(np.concatenate((scales, example_scales)))
thresh_rel = 3
scales = np.geomspace(0.01, 10000, 500)
scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = 0.5
# SUBSET SETTINGS:
kernels = np.array([
@@ -34,11 +52,14 @@ types = None#np.array([-1])
sigmas = None#np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# PREPARATION:
noise_data = np.load(noise_path)
pure_noise = noise_data['raw']
pure_noise = np.load(noise_path)['raw']
if thresh_rel is not None:
# Get threshold values from pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'] * thresh_rel
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
# Get song recording (prior to anything):
@@ -46,8 +67,8 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song, rate = data['raw'], config['rate']
if thresh_rel is not None:
# Get noise-bound kernel-specific thresholds:
config['feat_thresh'] = noise_data['conv'].std(axis=0) * thresh_rel
# Set kernel-specific thresholds:
config['feat_thresh'] = thresh_abs
# Reduce to kernel subset:
if any(var is not None for var in [kernels, types, sigmas]):
@@ -66,22 +87,10 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song /= song[segment].std(axis=0)
# Get normalized noise component:
noise = pure_noise[:song.shape[0]]
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Prepare snippet storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_log=np.zeros(shape_low, dtype=float),
snip_inv=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
)
# Prepare measure storage:
# Prepare storage:
shape_low = (scales.size,)
shape_high = (scales.size, config['k_specs'].shape[0])
measures = dict(
@@ -91,6 +100,18 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
measure_inv=np.zeros(shape_low, dtype=float),
measure_conv=np.zeros(shape_high, dtype=float),
measure_feat=np.zeros(shape_high, dtype=float)
)
if save_detailed:
# Prepare optional storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_log=np.zeros(shape_low, dtype=float),
snip_inv=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
)
# Execute piecewise:
@@ -105,18 +126,17 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
signal=scaled, rate=rate)
# Store results:
for stage in stages:
mkey, skey = f'measure_{stage}', f'snip_{stage}'
# Log snippet data:
if scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[skey][:, ..., scale_ind] = signals[stage]
# Log intensity measure per stage (excluding binary):
if stage in ['raw', 'filt', 'env', 'log', 'inv', 'conv']:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
elif stage == 'feat':
# Log intensity measures:
mkey = f'measure_{stage}'
if stage == 'feat':
measures[mkey][i] = signals[stage][segment, :].mean(axis=0)
else:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
# Log optional snippet data:
if save_detailed and scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[f'snip_{stage}'][:, ..., scale_ind] = signals[stage]
# Save analysis results:
if save_path is not None:
@@ -124,8 +144,9 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
scales=scales,
example_scales=example_scales,
)
data.update(snippets)
data.update(measures)
if save_detailed:
data.update(snippets)
save_data(save_path + name, data, config, overwrite=True)
print('Done.')
embed()

View File

@@ -2,6 +2,7 @@ import numpy as np
from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import decibel, sosfilter
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
@@ -12,17 +13,18 @@ noise_path = '../data/processed/white_noise_sd-1.npz'
save_path = '../data/inv/log_hp/'
# ANALYSIS SETTINGS:
add_noise = search_target == '*' or False
save_detailed = search_target == example_file
add_noise = search_target == '*'
example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 1000)
scales = np.unique(np.concatenate((scales, example_scales)))
scales = np.unique(np.concatenate(([0], scales, example_scales)))
# PREPARATION:
pure_noise = np.load(noise_path)['filt']
if add_noise:
pure_noise = np.load(noise_path)['filt']
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
# Get filtered song (prior to envelope extraction):
@@ -38,7 +40,7 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song /= song[segment].std()
if add_noise:
# Get normalized noise component:
noise = pure_noise[:song.shape[0]]
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Prepare storage:
@@ -93,10 +95,12 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
snip_log=snip_log,
snip_inv=snip_inv,
)
file_name = save_path + name
save_name = save_path + name
if add_noise:
file_name += '_noise'
save_data(file_name, archive, config, overwrite=True)
save_name += '_noise'
else:
save_name += '_pure'
save_data(save_name, archive, config, overwrite=True)
print('Done.')
embed()

View File

@@ -0,0 +1,157 @@
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filtertools import find_kern_specs
from thunderhopper.filters import sosfilter
from thunderhopper.model import convolve_kernels, process_signal
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/short/ref_measures.npz'
pre_stages = ['filt', 'env']
stages = pre_stages + ['conv', 'feat']
save_path = '../data/inv/short/'
# ANALYSIS SETTINGS:
example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 500)
scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = 0.5
# SUBSET SETTINGS:
kernels = np.array([
[1, 0.002],
[-1, 0.002],
[2, 0.004],
[-2, 0.004],
[3, 0.032],
[-3, 0.032]
])
kernels = None
types = None#np.array([-1])
sigmas = None#np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# PREPARATION:
pure_noise = np.load(noise_path)['raw']
if thresh_rel is not None:
# Get threshold values from pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'] * thresh_rel
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
# Get song recording (prior to anything):
data, config = load_data(data_path, files='raw')
song, rate = data['raw'], config['rate']
if thresh_rel is not None:
# Set kernel-specific thresholds:
config['feat_thresh'] = thresh_abs
# Reduce to kernel subset:
if any(var is not None for var in [kernels, types, sigmas]):
kern_inds = find_kern_specs(config['k_specs'], kernels, types, sigmas)
config['kernels'] = config['kernels'][:, kern_inds]
config['k_specs'] = config['k_specs'][kern_inds, :]
config['k_props'] = [config['k_props'][i] for i in kern_inds]
config['feat_thresh'] = config['feat_thresh'][kern_inds]
# Get song segment to be analyzed:
time = np.arange(song.shape[0]) / rate
start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std(axis=0)
# Get normalized noise component:
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Prepare storage:
shape_low = (scales.size,)
shape_high = (scales.size, config['k_specs'].shape[0])
measures = dict(
measure_filt=np.zeros(shape_low, dtype=float),
measure_env=np.zeros(shape_low, dtype=float),
measure_conv=np.zeros(shape_high, dtype=float),
measure_feat=np.zeros(shape_high, dtype=float)
)
if save_detailed:
# Prepare optional storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
)
# Execute piecewise:
for i, scale in enumerate(scales):
print('Simulating scale ', scale)
# Rescale song and add noise:
scaled = song * scale + noise
# Process mixture:
signals, rates = process_signal(config, returns=pre_stages,
signal=scaled, rate=rate)
# Process mixture further:
signals['conv'] = convolve_kernels(signals['env'], config['kernels'], config['k_specs'])
signals['feat'] = sosfilter((signals['conv'] > config['feat_thresh']).astype(float),
rate, config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen'])
# Store results:
for stage in stages:
# Log intensity measures:
mkey = f'measure_{stage}'
if stage == 'feat':
measures[mkey][i] = signals[stage][segment, :].mean(axis=0)
else:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
# Log optional snippet data:
if save_detailed and scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[f'snip_{stage}'][:, ..., scale_ind] = signals[stage]
# Save analysis results:
if save_path is not None:
data = dict(
scales=scales,
example_scales=example_scales,
)
data.update(measures)
if save_detailed:
data.update(snippets)
save_data(save_path + name, data, config, overwrite=True)
print('Done.')
embed()

View File

@@ -5,21 +5,23 @@ from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import sosfilter
from thunderhopper.filtertools import find_kern_specs
from thunderhopper.model import convolve_kernels
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
target = ['Omocestus_rufipes', '*'][0]
data_paths = search_files(target, excl='noise', dir='../data/processed/')
example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
search_target = ['*', example_file][0]
data_paths = search_files(search_target, excl='noise', dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/thresh_lp/ref_measures.npz'
save_path = '../data/inv/thresh_lp/'
# ANALYSIS SETTINGS:
add_noise = False
save_snippets = add_noise and (target == 'Omocestus_rufipes')
plot_results = False
example_scales = np.array([0, 1, 10, 30, 100])
scales = np.geomspace(0.01, 10000, 100)
scales = np.unique(np.concatenate((scales, example_scales)))
scales = np.geomspace(0.01, 10000, 1000)
scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = np.array([0.5, 1, 3])
kern_specs = np.array([
[1, 0.008],
@@ -28,12 +30,15 @@ kern_specs = np.array([
])
# PREPARATION:
pure_noise = np.load(noise_path)['inv']
if add_noise:
pure_noise = np.load(noise_path)['inv']
# Define kernel-specific threshold values based on pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'][None, :] * thresh_rel[:, None]
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
save_name = save_path + name
# Get adapted envelope (prior to convolution):
data, config = load_data(data_path, files='inv')
@@ -44,28 +49,25 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std()
# Reduce to kernel subset:
kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs)
config['kernels'] = config['kernels'][:, kern_inds]
config['k_specs'] = config['k_specs'][kern_inds, :]
config['k_props'] = [config['k_props'][i] for i in kern_inds]
# Get normalized noise component:
noise = pure_noise[:song.shape[0]]
if add_noise:
# Get normalized noise component:
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Normalize both components:
song /= song[segment].std()
noise /= noise[segment].std()
# Define kernel-specific threshold values based on pure-noise response SD:
ref_conv = convolve_kernels(noise, config['kernels'], config['k_specs'])
thresh_abs = ref_conv[segment, :].std(axis=0, keepdims=True) * thresh_rel[:, None]
# Prepare measure storage:
measure_inv = np.zeros((scales.size,), dtype=float)
# Prepare storage:
measure_feat = np.zeros((scales.size, kern_specs.shape[0], thresh_rel.size), dtype=float)
if save_snippets:
# Prepare snippet storage:
if save_detailed:
# Prepare optional storage:
measure_inv = np.zeros((scales.size,), dtype=float)
snip_inv = np.zeros((song.size, example_scales.size), dtype=float)
shape = (song.size, kern_specs.shape[0], example_scales.size, thresh_rel.size)
snip_conv = np.zeros(shape[:-1], dtype=float)
@@ -82,20 +84,21 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
# Add noise:
scaled_song += noise
# Log input intensity measure:
measure_inv[i] = scaled_song[segment].std()
if save_detailed:
# Log input intensity measure:
measure_inv[i] = scaled_song[segment].std()
# Process mixture:
scaled_conv = convolve_kernels(scaled_song, config['kernels'], config['k_specs'])
# Log threshold-independent snippet data:
if save_snippets and scale in example_scales:
if save_detailed and scale in example_scales:
save_ind = np.nonzero(example_scales == scale)[0][0]
snip_inv[:, save_ind] = scaled_song
snip_conv[:, :, save_ind] = scaled_conv
# Execute piecewise again:
for j, thresholds in enumerate(thresh_abs):
for j, thresholds in enumerate(thresh_abs[:, kern_inds]):
# Process mixture further:
scaled_bi = (scaled_conv > thresholds).astype(float)
@@ -103,11 +106,11 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
padtype='fixed', padlen=config['padlen'])
# Log threshold-dependent snippet data:
if save_snippets and scale in example_scales:
if save_detailed and scale in example_scales:
snip_bi[:, :, save_ind, j] = scaled_bi
snip_feat[:, :, save_ind, j] = scaled_feat
# Log intensity measure:
# Log output intensity measure:
measure_feat[i, :, j] = scaled_feat[segment, :].mean(axis=0)
# Overview plot:
@@ -133,18 +136,19 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
data = dict(
scales=scales,
example_scales=example_scales,
measure_inv=measure_inv,
measure_feat=measure_feat,
thresh_rel=thresh_rel,
thresh_abs=thresh_abs,
)
if save_snippets:
if save_detailed:
data.update(dict(
measure_inv=measure_inv,
snip_inv=snip_inv,
snip_conv=snip_conv,
snip_bi=snip_bi,
snip_feat=snip_feat,
))
save_name = save_path + name
if add_noise:
save_name += '_noise'
else:

View File

@@ -9,7 +9,7 @@ from IPython import embed
save_path = '../data/processed/white_noise'
stages = ['raw', 'filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat']
sds = [1]
dur = 60
dur = 180
# Interactivity:
reload_saved = False
@@ -45,6 +45,7 @@ for sd in sds:
# Generate white noise signal:
noise = rng.normal(loc=0, scale=sd, size=n_samples)
print('Got your no(i)se!')
# Fetch and store representations:
save = None if save_path is None else save_path + f'_sd-{sd}.npz'

View File

@@ -7,7 +7,7 @@ from IPython import embed
## SETTINGS:
# General:
mode = ['log_hp', 'thresh_lp', 'full'][2]
mode = ['log_hp', 'thresh_lp', 'full', 'short'][3]
noise_path = '../data/processed/white_noise_sd-1.npz'
save_path = '../data/inv/'
pad = np.array([0.1, 0.9])
@@ -15,7 +15,8 @@ pad = np.array([0.1, 0.9])
stages = dict(
log_hp=['filt', 'env', 'log', 'inv'],
thresh_lp=['inv', 'conv', 'feat'],
full=['raw', 'filt', 'env', 'log', 'inv', 'conv', 'feat']
full=['raw', 'filt', 'env', 'log', 'inv', 'conv', 'feat'],
short=['raw', 'filt', 'env', 'conv', 'feat']
)[mode]
# PROCESSING:
@@ -49,6 +50,12 @@ elif mode == 'thresh_lp':
padtype='fixed', padlen=config['padlen'])
elif mode == 'full':
data = process_signal(config, stages, signal=starter, rate=config['rate'])[0]
elif mode == 'short':
data = process_signal(config, ['raw', 'filt', 'env'], signal=starter, rate=config['rate'])[0]
data['conv'] = convolve_kernels(data['env'], config['kernels'], config['k_specs'])
data['feat'] = sosfilter((data['conv'] > config['feat_thresh']).astype(float),
config['env_rate'], config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen'])
# Get measures:
measures = {}