Added loads of units in nearly all graphs.

Overhauled fig_invariance_full.pdf.
Added some legends, somewhere.
This commit is contained in:
j-hartling
2026-04-28 19:43:05 +02:00
parent 7e1aa8721a
commit e70d100655
40 changed files with 965 additions and 471 deletions

View File

@@ -5,13 +5,13 @@ from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
# 'Chorthippus_biguttulus',
# 'Chorthippus_mollis',
# 'Chrysochraon_dispar',
# 'Euchorthippus_declivus',
# 'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
# 'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
search_path = '../data/inv/full/'
@@ -30,11 +30,15 @@ for i, species in enumerate(target_species):
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
data, config = load_data(path, 'scales', ['measure', 'thresh'])
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
species_data = dict(
scales=data['scales'],
thresh_rel=data['thresh_rel'],
thresh_abs=data['thresh_abs']
)
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)

View File

@@ -5,13 +5,13 @@ from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
# 'Chorthippus_biguttulus',
# 'Chorthippus_mollis',
# 'Chrysochraon_dispar',
# 'Euchorthippus_declivus',
# 'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
# 'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'inv', 'conv', 'feat']
search_path = '../data/inv/short/'
@@ -30,11 +30,15 @@ for i, species in enumerate(target_species):
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
data, config = load_data(path, 'scales', ['measure', 'thresh'])
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
species_data = dict(
scales=data['scales'],
thresh_rel=data['thresh_rel'],
thresh_abs=data['thresh_abs']
)
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)

View File

@@ -1,6 +1,7 @@
import numpy as np
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.modeltools import load_data, save_data
from misc_functions import divide_by_zero
from IPython import embed
def sort_files_by_rec(paths, sources=['JJ', 'SLO']):
@@ -58,7 +59,7 @@ if mode == 'song':
'max',
'base',
'range'
][1]
][4]
suffix = dict(
none='_unnormed',
min='_norm-min',
@@ -108,19 +109,22 @@ for i, species in enumerate(target_species):
if normalization == 'min':
# Minimum normalization:
data[mkey] /= data[mkey].min(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].min(axis=0))
# data[mkey] /= data[mkey].min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
data[mkey] /= data[mkey].max(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].max(axis=0))
# data[mkey] /= data[mkey].max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
data[mkey] /= ref_data[stage]
data[mkey] = divide_by_zero(data[mkey], data[mkey][0])
# data[mkey] /= data[mkey][0]
elif normalization == 'range':
# Min-max normalization:
min_measure = data[mkey].min(axis=0, keepdims=True)
max_measure = data[mkey].max(axis=0, keepdims=True)
data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
data[mkey] = divide_by_zero(data[mkey] - min_measure, max_measure - min_measure)
# data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
file_data[stage][..., k] = data[mkey]

View File

@@ -1,18 +1,18 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from misc_functions import sort_files_by_rec, divide_by_zero
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
# 'Chorthippus_biguttulus',
# 'Chorthippus_mollis',
# 'Chrysochraon_dispar',
# 'Euchorthippus_declivus',
# 'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
# 'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
@@ -32,7 +32,7 @@ normalization = [
'max',
'base',
'range',
][2]
][4]
suffix = dict(
none='_unnormed',
min='_norm-min',
@@ -58,7 +58,7 @@ for i, species in enumerate(target_species):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
data, config = load_data(path, 'scales', ['measure', 'thresh'])
if k == 0:
# Prepare song file-specific storage:
@@ -80,18 +80,22 @@ for i, species in enumerate(target_species):
if normalization == 'min':
# Minimum normalization:
data[mkey] /= data[mkey].min(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].min(axis=0))
# data[mkey] /= data[mkey].min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
data[mkey] /= data[mkey].max(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].max(axis=0))
# data[mkey] /= data[mkey].max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
data[mkey] /= data[mkey][0]
data[mkey] = divide_by_zero(data[mkey], data[mkey][0])
# data[mkey] /= data[mkey][0]
elif normalization == 'range':
# Min-max normalization:
min_measure = data[mkey].min(axis=0, keepdims=True)
max_measure = data[mkey].max(axis=0, keepdims=True)
data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
data[mkey] = divide_by_zero(data[mkey] - min_measure, max_measure - min_measure)
# data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
file_data[stage][..., k] = data[mkey]
@@ -101,7 +105,11 @@ for i, species in enumerate(target_species):
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
archive = dict(scales=data['scales'])
archive = dict(
scales=data['scales'],
thresh_rel=data['thresh_rel'],
thresh_abs=data['thresh_abs'],
)
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_path + species + suffix, archive, config, overwrite=True)

View File

@@ -2,7 +2,7 @@ import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species, sort_files_by_rec
from misc_functions import shorten_species, sort_files_by_rec, divide_by_zero
from IPython import embed
# GENERAL SETTINGS:
@@ -41,7 +41,7 @@ suffix = dict(
base='_norm-base',
range='_norm-range'
)[normalization]
plot_overview = True
plot_overview = False
# PREPARATION:
if plot_overview:
@@ -59,7 +59,7 @@ for i, species in enumerate(target_species):
# Fetch all species-specific song files:
all_paths = search_files(species, incl=mode, ext='npz', dir=search_path)
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
@@ -82,18 +82,22 @@ for i, species in enumerate(target_species):
# Log song file data:
if normalization == 'min':
# Minimum normalization:
measure /= measure.min(axis=0, keepdims=True)
measure = divide_by_zero(measure, measure.min(axis=0, keepdims=True))
# measure /= measure.min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
measure /= measure.max(axis=0, keepdims=True)
measure = divide_by_zero(measure, measure.max(axis=0, keepdims=True))
# measure /= measure.max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
measure /= measure[0]
measure = divide_by_zero(measure, measure[0])
# measure /= measure[0]
elif normalization == 'range':
# Min-max normalization:
min_measure = measure.min(axis=0, keepdims=True)
max_measure = measure.max(axis=0, keepdims=True)
measure = (measure - min_measure) / (max_measure - min_measure)
measure = divide_by_zero(measure - min_measure, max_measure - min_measure)
# measure = (measure - min_measure) / (max_measure - min_measure)
file_data[:, k] = measure
@@ -101,8 +105,8 @@ for i, species in enumerate(target_species):
axes[0, i].plot(scales, measure, c='k', alpha=0.5)
# Get recording statistics:
rec_mean[:, j] = file_data.mean(axis=1)
rec_sd[:, j] = file_data.std(axis=1)
rec_mean[:, j] = np.nanmean(file_data, axis=1)
rec_sd[:, j] = np.nanstd(file_data, axis=1)
if plot_overview:
axes[1, i].plot(scales, rec_mean[:, j], c='k')

View File

@@ -1,7 +1,7 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from misc_functions import sort_files_by_rec, divide_by_zero
from IPython import embed
# GENERAL SETTINGS:
@@ -26,14 +26,14 @@ search_path = '../data/inv/rect_lp/'
save_path = '../data/inv/rect_lp/condensed/'
# ANALYSIS SETTINGS:
mode = ['pure', 'noise'][1]
mode = ['pure', 'noise'][0]
normalization = [
'none',
'min',
'max',
'base',
'range',
][0]
][4]
suffix = dict(
none='_unnormed',
min='_norm-min',
@@ -79,18 +79,22 @@ for i, species in enumerate(target_species):
if normalization == 'min':
# Minimum normalization:
data[mkey] /= data[mkey].min(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].min(axis=0))
# data[mkey] /= data[mkey].min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
data[mkey] /= data[mkey].max(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].max(axis=0))
# data[mkey] /= data[mkey].max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
data[mkey] /= data[mkey][0]
data[mkey] = divide_by_zero(data[mkey], data[mkey][0])
# data[mkey] /= data[mkey][0]
elif normalization == 'range':
# Min-max normalization:
min_measure = data[mkey].min(axis=0, keepdims=True)
max_measure = data[mkey].max(axis=0, keepdims=True)
data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
data[mkey] = divide_by_zero(data[mkey] - min_measure, max_measure - min_measure)
# data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
file_data[stage][..., k] = data[mkey]

View File

@@ -1,18 +1,18 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from misc_functions import sort_files_by_rec, divide_by_zero
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
# 'Chorthippus_biguttulus',
# 'Chorthippus_mollis',
# 'Chrysochraon_dispar',
# 'Euchorthippus_declivus',
# 'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
# 'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
@@ -32,7 +32,7 @@ normalization = [
'max',
'base',
'range',
][2]
][4]
suffix = dict(
none='_unnormed',
min='_norm-min',
@@ -58,7 +58,7 @@ for i, species in enumerate(target_species):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
data, config = load_data(path, 'scales', ['measure', 'thresh'])
if k == 0:
# Prepare song file-specific storage:
@@ -80,18 +80,22 @@ for i, species in enumerate(target_species):
if normalization == 'min':
# Minimum normalization:
data[mkey] /= data[mkey].min(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].min(axis=0))
# data[mkey] /= data[mkey].min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
data[mkey] /= data[mkey].max(axis=0, keepdims=True)
data[mkey] = divide_by_zero(data[mkey], data[mkey].max(axis=0))
# data[mkey] /= data[mkey].max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
data[mkey] /= data[mkey][0]
data[mkey] = divide_by_zero(data[mkey], data[mkey][0])
# data[mkey] /= data[mkey][0]
elif normalization == 'range':
# Min-max normalization:
min_measure = data[mkey].min(axis=0, keepdims=True)
max_measure = data[mkey].max(axis=0, keepdims=True)
data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
data[mkey] = divide_by_zero(data[mkey] - min_measure, max_measure - min_measure)
# data[mkey] = (data[mkey] - min_measure) / (max_measure - min_measure)
file_data[stage][..., k] = data[mkey]
@@ -101,7 +105,11 @@ for i, species in enumerate(target_species):
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
archive = dict(scales=data['scales'])
archive = dict(
scales=data['scales'],
thresh_rel=data['thresh_rel'],
thresh_abs=data['thresh_abs']
)
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_path + species + suffix, archive, config)

View File

@@ -2,7 +2,7 @@ import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species, sort_files_by_rec
from misc_functions import shorten_species, sort_files_by_rec, divide_by_zero
from IPython import embed
# GENERAL SETTINGS:
@@ -91,20 +91,24 @@ for i, species in enumerate(target_species):
rec_sd = np.zeros(shape, dtype=float)
# Log song file data:
if normalization == 'min':
# Minimum normalization:
measure /= measure.min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
measure /= measure.max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
measure /= measure[0]
elif normalization == 'range':
# Min-max normalization:
min_measure = measure.min(axis=0, keepdims=True)
max_measure = measure.max(axis=0, keepdims=True)
measure = (measure - min_measure) / (max_measure - min_measure)
if normalization == 'min':
# Minimum normalization:
measure = divide_by_zero(measure, measure.min(axis=0))
# measure /= measure.min(axis=0, keepdims=True)
elif normalization == 'max':
# Maximum normalization:
measure = divide_by_zero(measure, measure.max(axis=0))
# measure /= measure.max(axis=0, keepdims=True)
elif normalization == 'base':
# Noise baseline normalization:
measure = divide_by_zero(measure, measure[0])
# measure /= measure[0]
elif normalization == 'range':
# Min-max normalization:
min_measure = measure.min(axis=0, keepdims=True)
max_measure = measure.max(axis=0, keepdims=True)
measure = divide_by_zero(measure - min_measure, max_measure - min_measure)
# measure = (measure - min_measure) / (max_measure - min_measure)
file_data[..., k] = measure
@@ -115,8 +119,8 @@ for i, species in enumerate(target_species):
axes[0, i].plot(scales, measure[:, m, l], c=c, alpha=0.5)
# Get recording statistics:
rec_mean[..., j] = file_data.mean(axis=-1)
rec_sd[..., j] = file_data.std(axis=-1)
rec_mean[..., j] = np.nanmean(file_data, axis=-1)
rec_sd[..., j] = np.nanstd(file_data, axis=-1)
if plot_overview:
for l, thresh in enumerate(thresh_rel):

View File

@@ -144,11 +144,11 @@ xlabels = dict(
big='distance [cm]',
)
ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{db}}$',
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
filt='$x_{\\text{filt}}$\n$[\\text{a.u.}]$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
log='$x_{\\text{log}}$\n$[\\text{dB}]$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
conv='$c_i$\n$[\\text{dB}]$',
feat='$f_i$',
big=['measure', 'rel. measure', 'norm. measure']
)

View File

@@ -0,0 +1,294 @@
import plotstyle_plt
import string
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from thunderhopper.filtertools import find_kern_specs
from color_functions import load_colors
from plot_functions import hide_ticks, ylabel, super_xlabel, letter_subplots,\
ylimits, title_subplot
from misc_functions import exclude_zero_scale, reduce_kernel_set
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
modes = [
'unnormed',
'norm-base',
'norm-min',
'norm-max',
]
full_folder = '../data/inv/full/condensed/'
short_folder = '../data/inv/short/condensed/'
save_path = '../figures/fig_invariance_full_short.pdf'
load_kwargs = dict(
files=['scales', 'mean_feat', 'sd_feat'],
keywords=['thresh'],
)
# ANALYSIS SETTINGS:
exclude_zero = True
scale_subset_kwargs = dict(
combis=[['mean', 'sd'], ['feat']],
)
kern_subset_kwargs = dict(
combis=[['mean', 'sd'], ['feat']],
keys=['thresh_abs'],
)
thresh_rel = np.array([0, 0.5, 1, 1.5, 2, 2.5, 3])
percentiles = np.array([
[25, 75],
# [0, 100],
])
# SUBSET SETTINGS:
types = np.array([1, 2, 3])
# types = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, -8, 9, -9, 10, -10]
sigmas = np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# sigmas = [0.001, 0.002, 0.004, 0.008, 0.016, 0.032]
kernels = None
reduce_kernels = any(var is not None for var in [kernels, types, sigmas])
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 32/2.54),
)
super_grid_kwargs = dict(
nrows=1,
ncols=2,
wspace=0,
hspace=0,
left=0,
right=1,
bottom=0,
top=1,
)
subfig_specs = dict(
full=(0, 0),
short=(0, 1),
)
col_width = 0.85
col_rest = 1 - col_width
full_grid_kwargs = dict(
nrows=len(modes),
ncols=1,
wspace=0,
hspace=0.1,
left=col_rest,
right=1,
bottom=0.05,
top=0.9
)
short_grid_kwargs = dict(
nrows=len(modes),
ncols=1,
wspace=full_grid_kwargs['wspace'],
hspace=full_grid_kwargs['hspace'],
left=col_rest / 2,
right=1 - col_rest / 2,
bottom=full_grid_kwargs['bottom'],
top=full_grid_kwargs['top']
)
# PLOT SETTINGS:
fs = dict(
lab_norm=16,
lab_tex=20,
letter=22,
tit_norm=16,
tit_tex=20,
bar=16,
)
colors = load_colors('../data/stage_colors.npz')
feat_colors = load_colors('../data/feat_colors_all.npz')
lw = dict(
feat=3,
plateau=1.5
)
line_kwargs = dict(
lw=lw['feat']
)
fill_kwargs = dict(
alpha=0.15
)
xlabels = dict(
super='scale $\\alpha$',
)
ylabels = {
'unnormed': '$\\mu_{f_i}$',
'norm-base': '$\\mu_{f_i}\\,/\\,\\mu_{f_i}\\,[\\,\\eta\\,]$',
'norm-min': '$\\mu_{f_i}\\,/\\,\\min\\,[\\,\\mu_{f_i}\\,]$',
'norm-max': '$\\mu_{f_i}\\,/\\,\\max\\,[\\,\\mu_{f_i}\\,]$'
}
xlab_kwargs = dict(
y=0,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
)
ylab_kwargs = dict(
x=0,
fontsize=fs['lab_tex'],
ha='center',
va='top',
)
ylims = {
'unnormed': [0, 1],
'norm-base': [0, None],
'norm-min': [0, None],
'norm-max': [0, 1]
}
yloc = {
'unnormed': 0.5,
'norm-base': 0.5,
'norm-min': 0.5,
'norm-max': 0.5
}
title_kwargs = dict(
x=0.5,
y=1,
ha='center',
va='bottom',
fontsize=fs['tit_norm'],
)
titles = dict(
full='Including $x_\\text{dB}$',
short='Excluding $x_\\text{dB}$',
)
letter_kwargs = dict(
xref=0.01,
y=1,
ha='left',
va='center',
fontsize=fs['letter'],
)
letters = dict(
full=string.ascii_lowercase[0::2],
short=string.ascii_lowercase[1::2],
)
plateau_settings = dict(
low=0.05,
high=0.95,
first=True,
last=True,
condense=None,
)
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
# EXECUTION:
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare full analysis axes:
full_subfig = fig.add_subfigure(super_grid[*subfig_specs['full']])
full_grid = full_subfig.add_gridspec(**full_grid_kwargs)
full_axes = np.zeros((len(modes),), dtype=object)
for i, mode in enumerate(modes):
full_axes[i] = full_subfig.add_subplot(full_grid[i, 0])
# full_axes[i].yaxis.set_major_locator(plt.MultipleLocator(yloc[mode]))
full_axes[i].set_xscale('symlog', linthresh=0.01, linscale=0.5)
ylabel(full_axes[i], ylabels[mode], transform=full_subfig.transSubfigure, **ylab_kwargs)
if i == 0:
title_subplot(full_axes[i], titles['full'], **title_kwargs)
if i < full_grid_kwargs['nrows'] - 1:
hide_ticks(full_axes[i], 'bottom')
letter_subplots(full_axes, letters['full'], ref=full_subfig, **letter_kwargs)
# Prepare short analysis axes:
short_subfig = fig.add_subfigure(super_grid[*subfig_specs['short']])
short_grid = short_subfig.add_gridspec(**short_grid_kwargs)
short_axes = np.zeros((len(modes),), dtype=object)
for i, mode in enumerate(modes):
short_axes[i] = short_subfig.add_subplot(short_grid[i, 0])
# short_axes[i].yaxis.set_major_locator(plt.MultipleLocator(yloc[mode]))
short_axes[i].set_xscale('symlog', linthresh=0.01, linscale=0.5)
hide_ticks(short_axes[i], 'left')
if i == 0:
title_subplot(short_axes[i], titles['short'], **title_kwargs)
if i < short_grid_kwargs['nrows'] - 1:
hide_ticks(short_axes[i], 'bottom')
letter_subplots(short_axes, letters['short'], ref=short_subfig, **letter_kwargs)
super_xlabel(xlabels['super'], fig, full_axes[-1], short_axes[-1],
left_fig=full_subfig, right_fig=short_subfig, **xlab_kwargs)
# Run through normalization modes:
for mode, full_ax, short_ax in zip(modes, full_axes, short_axes):
# Load invariance data:
full_path = search_files(target_species, incl=mode, dir=full_folder)[0]
short_path = search_files(target_species, incl=mode, dir=short_folder)[0]
full_data, config = load_data(full_path, **load_kwargs)
short_data, _ = load_data(short_path, **load_kwargs)
# Reduce datasets:
if reduce_kernels:
kern_inds = find_kern_specs(config['k_specs'], kernels, types, sigmas)
full_data = reduce_kernel_set(full_data, kern_inds, **kern_subset_kwargs)
short_data = reduce_kernel_set(short_data, kern_inds, **kern_subset_kwargs)
config['k_specs'] = config['k_specs'][kern_inds, :]
config['kernels'] = config['kernels'][:, kern_inds]
if exclude_zero:
full_data = exclude_zero_scale(full_data, **scale_subset_kwargs)
short_data = exclude_zero_scale(short_data, **scale_subset_kwargs)
# Average over recordings:
full_measure = full_data['mean_feat'].mean(axis=-1)
short_measure = short_data['mean_feat'].mean(axis=-1)
# Condense over kernels:
full_median = np.nanmedian(full_measure, axis=1)
full_spread = np.nanpercentile(full_measure, percentiles, axis=1)
short_median = np.nanmedian(short_measure, axis=1)
short_spread = np.nanpercentile(short_measure, percentiles, axis=1)
# Determine shared ylims:
if None in ylims[mode]:
min_val, max_val = ylims[mode]
full_limits = ylimits(full_median, minval=min_val, maxval=max_val)
short_limits = ylimits(short_median, minval=min_val, maxval=max_val)
ylims[mode] = [min(full_limits[0], short_limits[0]),
max(full_limits[1], short_limits[1])]
if np.inf in ylims[mode]:
embed()
# Plot full analysis results:
for i, thresh in enumerate(thresh_rel):
full_ax.plot(full_data['scales'], full_median[:, i], lw=lw['feat'])
for spread in full_spread[:, :, :, i]:
full_ax.fill_between(full_data['scales'], *spread, **fill_kwargs)
full_ax.set_xlim(full_data['scales'][0], full_data['scales'][-1])
full_ax.set_ylim(ylims[mode])
# Plot short analysis results:
for i, thresh in enumerate(thresh_rel):
short_ax.plot(short_data['scales'], short_median[:, i], lw=lw['feat'])
for spread in short_spread[:, :, :, i]:
short_ax.fill_between(short_data['scales'], *spread, **fill_kwargs)
short_ax.set_xlim(short_data['scales'][0], short_data['scales'][-1])
short_ax.set_ylim(ylims[mode])
plt.show()

View File

@@ -5,7 +5,8 @@ from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from thunderhopper.filtertools import find_kern_specs
from misc_functions import get_saturation
from misc_functions import get_saturation, reduce_kernel_set, exclude_zero_scale,\
divide_by_zero
from color_functions import load_colors
from plot_functions import hide_axis, reorder_by_sd, ylimits, super_xlabel, ylabel, title_subplot,\
plot_line, strip_zeros, time_bar, assign_colors,\
@@ -20,30 +21,15 @@ def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
ymin=ymin, ymax=ymax, **kwargs))
return handles
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1:
ax.plot(scales, measures, **kwargs)[0]
return measures
median_measure = np.median(measures, axis=1)
spread_measure = [np.percentile(measures, 25, axis=1),
np.percentile(measures, 75, axis=1)]
ax.plot(scales, median_measure, **kwargs)[0]
ax.fill_between(scales, *spread_measure, **fill_kwargs)
return median_measure
def exclude_zero_scale(data, stages):
inds = data['scales'] > 0
data['scales'] = data['scales'][inds]
for stage in stages:
data[f'mean_{stage}'] = data[f'mean_{stage}'][inds, ...]
return data
def reduce_kernel_set(data, inds, keyword, stages=['conv', 'feat']):
for stage in stages:
key = f'{keyword}_{stage}'
data[key] = data[key][:, inds, ...]
return data
def plot_curves(ax, scales, measures, fill_kwargs={}, compress=False, **kwargs):
if not compress or measures.ndim == 1:
handles = ax.plot(scales, measures, **kwargs)
return handles, measures
median_measure = np.nanmedian(measures, axis=1)
spread_measure = np.nanpercentile(measures, [25, 75], axis=1)
line_handle = ax.plot(scales, median_measure, **kwargs)[0]
fill_handle = ax.fill_between(scales, *spread_measure, **fill_kwargs)
return [line_handle, fill_handle], median_measure
# GENERAL SETTINGS:
target_species = [
@@ -65,29 +51,28 @@ example_file = {
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
raw_path = search_files(target_species, incl='unnormed', dir='../data/inv/full/condensed/')[0]
base_path = search_files(target_species, incl='base', dir='../data/inv/full/condensed/')[0]
range_path = search_files(target_species, incl='range', dir='../data/inv/full/condensed/')[0]
snip_path = search_files(example_file, dir='../data/inv/full/')[0]
data_path = search_files(example_file, dir='../data/inv/full/')[0]
save_path = '../figures/fig_invariance_full.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
compress_kernels = True
thresh_rel = np.array([0, 0.5, 1, 1.5, 2, 2.5, 3])[4]
scale_subset_kwargs = dict(
combis=[['measure'], stages],
)
kern_subset_kwargs = dict(
combis=[['measure', 'snip'], ['conv', 'feat']],
keys=['thresh_abs'],
)
# SUBSET SETTINGS:
types = np.array([1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, -8, 9, -9, 10, -10])
sigmas = np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# types = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, -8, 9, -9, 10, -10]
sigmas = np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# sigmas = [0.001, 0.002, 0.004, 0.008, 0.016, 0.032]
kernels = np.array([
[1, 0.002],
[-1, 0.002],
[2, 0.004],
[-2, 0.004],
[3, 0.032],
[-3, 0.032]
])
kernels = None
reduce_kernels = any(var is not None for var in [kernels, types, sigmas])
# GRAPH SETTINGS:
fig_kwargs = dict(
@@ -113,7 +98,7 @@ snip_grid_kwargs = dict(
ncols=None,
wspace=0.1,
hspace=0.4,
left=0.11,
left=0.13,
right=0.98,
bottom=0.08,
top=0.95
@@ -138,9 +123,11 @@ fs = dict(
tit_tex=20,
bar=16,
)
colors = load_colors('../data/stage_colors.npz')
conv_colors = load_colors('../data/conv_colors_all.npz')
feat_colors = load_colors('../data/feat_colors_all.npz')
stage_colors = load_colors('../data/stage_colors.npz')
kern_colors = dict(
conv=load_colors('../data/conv_colors_all.npz'),
feat=load_colors('../data/feat_colors_all.npz')
)
lw = dict(
filt=0.25,
env=0.25,
@@ -150,16 +137,17 @@ lw = dict(
feat=1,
big=3,
plateau=1.5,
legend=5,
)
xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{db}}$',
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
filt='$x_{\\text{filt}}$\n$[\\text{a.u.}]$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
log='$x_{\\text{log}}$\n$[\\text{dB}]$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
conv='$c_i$\n$[\\text{dB}]$',
feat='$f_i$',
big=['measure', 'rel. measure', 'norm. measure']
)
@@ -170,11 +158,12 @@ xlab_big_kwargs = dict(
va='bottom',
)
ylab_snip_kwargs = dict(
x=0,
x=0.03,
fontsize=fs['lab_tex'],
rotation=0,
ha='left',
va='center'
ha='center',
va='center',
ma='center'
)
ylab_big_kwargs = dict(
x=-0.2,
@@ -228,6 +217,29 @@ bar_kwargs = dict(
va='center',
)
)
leg_kwargs = dict(
ncols=1,
loc='upper left',
bbox_to_anchor=(0.05, 0.5, 0.5, 0.5),
frameon=False,
prop=dict(
size=20,
),
borderpad=0,
borderaxespad=0,
handlelength=1,
columnspacing=1,
handletextpad=0.5,
labelspacing=0.1
)
leg_labels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{log}}$',
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
feat='$f_i$'
)
plateau_settings = dict(
low=0.05,
high=0.95,
@@ -249,26 +261,30 @@ plateau_dot_kwargs = dict(
# EXECUTION:
# Load raw (unnormed) invariance data:
data, config = load_data(raw_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
scales = data['scales']
# Load invariance data:
data, config = load_data(data_path, keywords=['snip', 'scales', 'measure', 'thresh'])
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate']
# Load snippet data:
snip, _ = load_data(snip_path, files='example_scales', keywords='snip')
t_full = np.arange(snip['snip_filt'].shape[0]) / config['rate']
snip_scales = snip['example_scales']
# Optional kernel subset:
reduce_kernels = False
if any(var is not None for var in [kernels, types, sigmas]):
# Reduce kernels:
if reduce_kernels:
kern_inds = find_kern_specs(config['k_specs'], kernels, types, sigmas)
data = reduce_kernel_set(data, kern_inds, keyword='mean')
snip = reduce_kernel_set(snip, kern_inds, keyword='snip')
data = reduce_kernel_set(data, kern_inds, **kern_subset_kwargs)
config['k_specs'] = config['k_specs'][kern_inds, :]
config['kernels'] = config['kernels'][:, kern_inds]
reduce_kernels = True
# Reduce thresholds:
thresh_ind = np.nonzero(data['thresh_rel'] == thresh_rel)[0][0]
data['measure_feat'] = data['measure_feat'][:, :, thresh_ind]
data['snip_feat'] = data['snip_feat'][:, :, :, thresh_ind]
# Remember pure-noise reference measures:
ref_data = {stage: data[f'measure_{stage}'][0, ...] for stage in stages}
# Reduce scales:
if exclude_zero:
data = exclude_zero_scale(data, **scale_subset_kwargs)
scales = data['scales']
snip_scales = data['example_scales']
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = snip_scales.size
@@ -307,114 +323,125 @@ for i in range(big_grid.ncols):
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
if i < (big_grid.ncols - 1):
ax.set_ylim(scales[0], scales[-1])
else:
ax.set_ylim(0, 1)
# if i < (big_grid.ncols - 1):
# ax.set_ylim(scales[0], scales[-1])
# else:
# ax.set_ylim(0, 1)
big_axes[i] = ax
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'bcd', **letter_big_kwargs)
if True:
# Plot filtered snippets:
plot_snippets(snip_axes[0, :], t_full, snip['snip_filt'],
c=colors['filt'], lw=lw['filt'])
plot_snippets(snip_axes[0, :], t_full, data['snip_filt'],
c=stage_colors['filt'], lw=lw['filt'])
# Plot envelope snippets:
plot_snippets(snip_axes[1, :], t_full, snip['snip_env'],
ymin=0, c=colors['env'], lw=lw['env'])
plot_snippets(snip_axes[1, :], t_full, data['snip_env'],
ymin=0, c=stage_colors['env'], lw=lw['env'])
# Plot logarithmic snippets:
plot_snippets(snip_axes[2, :], t_full, snip['snip_log'],
c=colors['log'], lw=lw['log'])
plot_snippets(snip_axes[2, :], t_full, data['snip_log'],
c=stage_colors['log'], lw=lw['log'])
# Plot invariant snippets:
plot_snippets(snip_axes[3, :], t_full, snip['snip_inv'],
c=colors['inv'], lw=lw['inv'])
plot_snippets(snip_axes[3, :], t_full, data['snip_inv'],
c=stage_colors['inv'], lw=lw['inv'])
# Plot kernel response snippets:
all_handles = plot_snippets(snip_axes[4, :], t_full, snip['snip_conv'],
c=colors['conv'], lw=lw['conv'])
all_handles = plot_snippets(snip_axes[4, :], t_full, data['snip_conv'],
c=stage_colors['conv'], lw=lw['conv'])
for i, handles in enumerate(all_handles):
assign_colors(handles, config['k_specs'][:, 0], conv_colors)
reorder_by_sd(handles, snip['snip_conv'][..., i])
assign_colors(handles, config['k_specs'][:, 0], kern_colors['conv'])
reorder_by_sd(handles, data['snip_conv'][..., i])
# Plot feature snippets:
all_handles = plot_snippets(snip_axes[5, :], t_full, snip['snip_feat'],
ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
all_handles = plot_snippets(snip_axes[5, :], t_full, data['snip_feat'],
ymin=0, ymax=1, c=stage_colors['feat'], lw=lw['feat'])
for i, handles in enumerate(all_handles):
assign_colors(handles, config['k_specs'][:, 0], feat_colors)
reorder_by_sd(handles, snip['snip_feat'][..., i])
del snip
assign_colors(handles, config['k_specs'][:, 0], kern_colors['feat'])
reorder_by_sd(handles, data['snip_feat'][..., i])
# Remember saturation points:
# Plot analysis results:
crit_inds, crit_scales = {}, {}
# Unnormed measures:
leg_handles = []
for stage in stages:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[0], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
mkey = f'measure_{stage}'
measure = data[mkey]
color = stage_colors[stage]
fill_kwargs = dict(color=color, alpha=0.25)
# Plot raw intensity measure curve(s):
handles, curve = plot_curves(big_axes[0], scales, measure, fill_kwargs,
compress_kernels, c=color, lw=lw['big'])
if not compress_kernels and stage in ['conv', 'feat']:
assign_colors(handles, config['k_specs'][:, 0], kern_colors[stage])
# Add stage-specific proxy legend artist:
leg_handles.append(big_axes[0].plot([], [], c=color, lw=lw['big'],
label=leg_labels[stage])[0])
# Indicate saturation point(s):
if stage in ['log', 'inv', 'conv', 'feat']:
ind = get_saturation(curve, **plateau_settings)[1]
scale = scales[ind]
big_axes[0].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].vlines(scale, big_axes[0].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# Log saturation point:
crit_inds[stage] = ind
crit_scales[stage] = scale
del data
if compress_kernels or stage in ['log', 'inv']:
scale = scales[ind]
crit_scales[stage] = scale
big_axes[0].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].vlines(scale, big_axes[0].get_ylim()[0], curve[ind],
color=color, **plateau_line_kwargs)
# Noise baseline-related measures:
data, _ = load_data(base_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
if reduce_kernels:
data = reduce_kernel_set(data, kern_inds, keyword='mean')
for stage in stages:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[1], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
# Relate to noise baseline:
measure = divide_by_zero(data[mkey], ref_data[stage])
# Plot baseline-normalized ntensity measure curve(s):
handles, curve = plot_curves(big_axes[1], scales, measure, fill_kwargs,
compress_kernels, c=color, lw=lw['big'])
if not compress_kernels and stage in ['conv', 'feat']:
assign_colors(handles, config['k_specs'][:, 0], kern_colors[stage])
# Indicate saturation point(s):
if stage in ['log', 'inv', 'conv', 'feat']:
ind, scale = crit_inds[stage], crit_scales[stage]
big_axes[1].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].vlines(scale, big_axes[1].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
del data
ind = crit_inds[stage]
scale = crit_scales[stage]
if compress_kernels or stage in ['log', 'inv']:
big_axes[1].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].vlines(scale, big_axes[1].get_ylim()[0], curve[ind],
color=color, **plateau_line_kwargs)
if stage in ['filt', 'env']:
continue
# Min-max normalized measures:
data, _ = load_data(range_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
if reduce_kernels:
data = reduce_kernel_set(data, kern_inds, keyword='mean')
for stage in ['log', 'inv', 'conv', 'feat']:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[2], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Relate to curve maximum:
measure = data[mkey] / np.nanmax(data[mkey], axis=0)
# Indicate saturation point:
# Plot max-normalized ntensity measure curve(s):
handles, curve = plot_curves(big_axes[2], scales, measure, fill_kwargs,
compress_kernels, c=color, lw=lw['big'])
if not compress_kernels and stage in ['conv', 'feat']:
assign_colors(handles, config['k_specs'][:, 0], kern_colors[stage])
# Indicate saturation point(s):
if stage in ['log', 'inv', 'conv', 'feat']:
ind, scale = crit_inds[stage], crit_scales[stage]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
del data
ind = crit_inds[stage]
scale = crit_scales[stage]
if compress_kernels or stage in ['log', 'inv']:
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], curve[ind],
color=color, **plateau_line_kwargs)
# Add legend to first analysis axis:
legend = big_axes[0].legend(handles=leg_handles, **leg_kwargs)
[handle.set_lw(lw['legend']) for handle in legend.get_lines()]
# Save graph:
if save_path is not None:

View File

@@ -4,23 +4,21 @@ import matplotlib.pyplot as plt
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from thunderhopper.filtertools import find_kern_specs
from misc_functions import get_saturation
from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\
plot_line, plot_barcode, strip_zeros, time_bar,\
from plot_functions import hide_axis, reorder_by_sd, ylimits, super_xlabel, ylabel, title_subplot,\
plot_line, strip_zeros, time_bar, assign_colors,\
letter_subplot, letter_subplots
from IPython import embed
def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
ymin, ymax = ylimits(snippets, minval=ymin, maxval=ymax, pad=0.05)
handles = []
for i, ax in enumerate(axes):
plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs)
return None
def plot_bi_snippets(axes, time, snippets, **kwargs):
for i, ax in enumerate(axes):
plot_barcode(ax, time, snippets[:, ..., i], **kwargs)
return None
handles.append(plot_line(ax, time, snippets[:, ..., i],
ymin=ymin, ymax=ymax, **kwargs))
return handles
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1:
@@ -33,10 +31,19 @@ def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
ax.fill_between(scales, *spread_measure, **fill_kwargs)
return median_measure
def show_saturation(ax, scales, measures, high=0.95, **kwargs):
high_ind = get_saturation(measures, high=high)[1]
return ax.plot(scales[high_ind], 0, transform=ax.get_xaxis_transform(),
marker='o', ms=10, zorder=6, clip_on=False, **kwargs)
def exclude_zero_scale(data, stages):
inds = data['scales'] > 0
data['scales'] = data['scales'][inds]
for stage in stages:
data[f'mean_{stage}'] = data[f'mean_{stage}'][inds, ...]
return data
def reduce_kernel_set(data, inds, keyword, stages=['conv', 'feat']):
for stage in stages:
key = f'{keyword}_{stage}'
data[key] = data[key][:, inds, ...]
return data
# GENERAL SETTINGS:
target_species = [
@@ -57,19 +64,34 @@ example_file = {
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/inv/full/condensed/')
snip_paths = search_files(example_file, dir='../data/inv/full/')
ref_path = '../data/inv/full/ref_measures.npz'
save_path = '../figures/fig_invariance_full.pdf'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
load_kwargs = dict(
files=stages,
keywords=['scales', 'snip', 'measure']
)
raw_path = search_files(target_species, incl='unnormed', dir='../data/inv/full/condensed/')[0]
base_path = search_files(target_species, incl='base', dir='../data/inv/full/condensed/')[0]
range_path = search_files(target_species, incl='range', dir='../data/inv/full/condensed/')[0]
snip_path = search_files(example_file, dir='../data/inv/full/')[0]
save_path = '../figures/fig_invariance_full.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
# SUBSET SETTINGS:
types = np.array([1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, -8, 9, -9, 10, -10])
sigmas = np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# types = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5, 6, -6, 7, -7, 8, -8, 9, -9, 10, -10]
# sigmas = [0.001, 0.002, 0.004, 0.008, 0.016, 0.032]
kernels = np.array([
[1, 0.002],
[-1, 0.002],
[2, 0.004],
[-2, 0.004],
[3, 0.032],
[-3, 0.032]
])
kernels = None
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 20/2.54),
figsize=(32/2.54, 32/2.54),
)
super_grid_kwargs = dict(
nrows=2,
@@ -91,20 +113,20 @@ snip_grid_kwargs = dict(
ncols=None,
wspace=0.1,
hspace=0.4,
left=0.08,
right=0.95,
left=0.11,
right=0.98,
bottom=0.08,
top=0.95
)
big_grid_kwargs = dict(
nrows=1,
ncols=3,
wspace=0.2,
wspace=0.4,
hspace=0,
left=snip_grid_kwargs['left'],
right=0.96,
bottom=0.2,
top=0.95
right=snip_grid_kwargs['right'],
bottom=0.13,
top=0.98
)
# PLOT SETTINGS:
@@ -117,15 +139,17 @@ fs = dict(
bar=16,
)
colors = load_colors('../data/stage_colors.npz')
conv_colors = load_colors('../data/conv_colors_all.npz')
feat_colors = load_colors('../data/feat_colors_all.npz')
lw = dict(
filt=0.25,
env=0.25,
log=0.25,
inv=0.25,
conv=0.25,
bi=0,
feat=1,
big=3
big=3,
plateau=1.5,
)
xlabels = dict(
big='scale $\\alpha$',
@@ -134,11 +158,10 @@ ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{db}}$',
inv='$x_{\\text{inv}}$',
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
bi='$b_i$',
feat='$f_i$',
big=['intensity', 'rel. intensity', 'norm. intensity']
big=['measure', 'rel. measure', 'norm. measure']
)
xlab_big_kwargs = dict(
y=0,
@@ -154,7 +177,7 @@ ylab_snip_kwargs = dict(
va='center'
)
ylab_big_kwargs = dict(
x=-0.12,
x=-0.2,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
@@ -164,7 +187,7 @@ yloc = dict(
env=1000,
log=50,
inv=20,
conv=2,
conv=1,
feat=1,
)
title_kwargs = dict(
@@ -205,122 +228,199 @@ bar_kwargs = dict(
va='center',
)
)
# PREPARATION:
ref_data = dict(np.load(ref_path))
plateau_settings = dict(
low=0.05,
high=0.95,
first=True,
last=True,
condense=None,
)
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
# Load invariance data:
data, config = load_data(data_path, **load_kwargs)
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate']
# Load raw (unnormed) invariance data:
data, config = load_data(raw_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
scales = data['scales']
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = data['example_scales'].size
# Load snippet data:
snip, _ = load_data(snip_path, files='example_scales', keywords='snip')
t_full = np.arange(snip['snip_filt'].shape[0]) / config['rate']
snip_scales = snip['example_scales']
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Optional kernel subset:
reduce_kernels = False
if any(var is not None for var in [kernels, types, sigmas]):
kern_inds = find_kern_specs(config['k_specs'], kernels, types, sigmas)
data = reduce_kernel_set(data, kern_inds, keyword='mean')
snip = reduce_kernel_set(snip, kern_inds, keyword='snip')
config['k_specs'] = config['k_specs'][kern_inds, :]
config['kernels'] = config['kernels'][:, kern_inds]
reduce_kernels = True
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(data["example_scales"][j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = snip_scales.size
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(data['scales'][0], data['scales'][-1])
ax.set_xscale('symlog', linthresh=data['scales'][1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
plt.show()
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# # Plot filtered snippets:
# plot_snippets(snip_axes[0, :], t_full, data['snip_filt'],
# c=colors['filt'], lw=lw['filt'])
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(snip_scales[j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# # Plot envelope snippets:
# plot_snippets(snip_axes[1, :], t_full, data['snip_env'],
# ymin=0, c=colors['env'], lw=lw['env'])
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
if i < (big_grid.ncols - 1):
ax.set_ylim(scales[0], scales[-1])
else:
ax.set_ylim(0, 1)
big_axes[i] = ax
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'bcd', **letter_big_kwargs)
# # Plot logarithmic snippets:
# plot_snippets(snip_axes[2, :], t_full, data['snip_log'],
# c=colors['log'], lw=lw['log'])
if True:
# Plot filtered snippets:
plot_snippets(snip_axes[0, :], t_full, snip['snip_filt'],
c=colors['filt'], lw=lw['filt'])
# # Plot invariant snippets:
# plot_snippets(snip_axes[3, :], t_full, data['snip_inv'],
# c=colors['inv'], lw=lw['inv'])
# Plot envelope snippets:
plot_snippets(snip_axes[1, :], t_full, snip['snip_env'],
ymin=0, c=colors['env'], lw=lw['env'])
# # Plot kernel response snippets:
# plot_snippets(snip_axes[4, :], t_full, data['snip_conv'],
# c=colors['conv'], lw=lw['conv'])
# Plot logarithmic snippets:
plot_snippets(snip_axes[2, :], t_full, snip['snip_log'],
c=colors['log'], lw=lw['log'])
# # Plot feature snippets:
# plot_snippets(snip_axes[5, :], t_full, data['snip_feat'],
# ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
# Plot invariant snippets:
plot_snippets(snip_axes[3, :], t_full, snip['snip_inv'],
c=colors['inv'], lw=lw['inv'])
# Analysis results:
scales_rel = data['scales'] - data['scales'][0]
scales_rel /= scales_rel[-1]
for stage in stages:
measure = data[f'measure_{stage}']
# Plot kernel response snippets:
all_handles = plot_snippets(snip_axes[4, :], t_full, snip['snip_conv'],
c=colors['conv'], lw=lw['conv'])
for i, handles in enumerate(all_handles):
assign_colors(handles, config['k_specs'][:, 0], conv_colors)
reorder_by_sd(handles, snip['snip_conv'][..., i])
# Plot unmodified intensity measures:
curve = plot_curves(big_axes[0], data['scales'], measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[0], data['scales'], curve, c=colors[stage])
# Plot feature snippets:
all_handles = plot_snippets(snip_axes[5, :], t_full, snip['snip_feat'],
ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
for i, handles in enumerate(all_handles):
assign_colors(handles, config['k_specs'][:, 0], feat_colors)
reorder_by_sd(handles, snip['snip_feat'][..., i])
del snip
# # Relate to pure-noise reference:
# norm_measure = measure / ref_data[stage]
# Remember saturation points:
crit_inds, crit_scales = {}, {}
# # Plot noise-related intensity measures:
# big_axes[1].plot(data['scales'], norm_measure, c=colors[stage], lw=lw['big'])
# Unnormed measures:
for stage in stages:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[0], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
ind = get_saturation(curve, **plateau_settings)[1]
scale = scales[ind]
big_axes[0].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].vlines(scale, big_axes[0].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# Log saturation point:
crit_inds[stage] = ind
crit_scales[stage] = scale
del data
# Normalize measure to [0, 1]:
min_measure = measure.min(axis=0)
max_measure = measure.max(axis=0)
norm_measure = (measure - min_measure) / (max_measure - min_measure)
# Noise baseline-related measures:
data, _ = load_data(base_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
if reduce_kernels:
data = reduce_kernel_set(data, kern_inds, keyword='mean')
for stage in stages:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[1], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
ind, scale = crit_inds[stage], crit_scales[stage]
big_axes[1].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].vlines(scale, big_axes[1].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
del data
# Plot normalized intensity measures:
curve = plot_curves(big_axes[1], data['scales'], norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[1], data['scales'], curve, c=colors[stage])
# Min-max normalized measures:
data, _ = load_data(range_path, files='scales', keywords='mean')
if exclude_zero:
data = exclude_zero_scale(data, stages)
if reduce_kernels:
data = reduce_kernel_set(data, kern_inds, keyword='mean')
for stage in ['log', 'inv', 'conv', 'feat']:
# Plot average intensity measure across recordings:
curve = plot_curves(big_axes[2], scales, data[f'mean_{stage}'].mean(axis=-1),
c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# # Plot over relative scales:
# plot_curves(big_axes[2], scales_rel, norm_measure, c=colors[stage], lw=lw['big'],
# fill_kwargs=dict(color=colors[stage], alpha=0.25))
# scales_rel = curve - curve.min()
# scales_rel /= scales_rel.max()
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
ind, scale = crit_inds[stage], crit_scales[stage]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
del data
if save_path is not None:
fig.savefig(save_path)
plt.show()
# Save graph:
if save_path is not None:
file_name = save_path.replace('.pdf', f'_{target_species}.pdf')
fig.savefig(file_name)
plt.show()
print('Done.')
embed()

View File

@@ -95,7 +95,7 @@ noise_grid_kwargs = dict(
top=edge_padding + block_height,
height_ratios=[1, 2, 1]
)
big_col_shift = -0.12
big_col_shift = -0.04
big_grid_kwargs = dict(
nrows=1,
ncols=3,
@@ -135,11 +135,10 @@ xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
env='$x_{\\text{env}}$',
log='$x_{\\text{dB}}$',
inv='$x_{\\text{adapt}}$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
log='$x_{\\text{log}}$\n$[\\text{dB}]$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
big_pure='$\\sigma_x$',
big_log='$\\sigma_x\\,[\\text{dB}]$',
big_noise='$\\sigma_x\\,/\\,\\sigma_{\\eta}$' if relate_to_noise else None,
)
xlab_big_kwargs = dict(
@@ -148,23 +147,17 @@ xlab_big_kwargs = dict(
ha='center',
va='bottom',
)
ylab_big_left_kwargs = dict(
ylab_big_kwargs = dict(
x=-0.2,
fontsize=fs['lab_tex'],
ha='center',
va='bottom'
)
ylab_big_right_kwargs = dict(
x=1.2,
fontsize=fs['lab_tex'],
ha='center',
va='top'
)
ylab_snip_kwargs = dict(
x=0,
x=0.03,
fontsize=fs['lab_tex'],
rotation=0,
ha='left',
ha='center',
va='center',
)
yloc = dict(
@@ -231,19 +224,41 @@ bar_kwargs = dict(
va='center',
)
)
leg_kwargs = dict(
stage_leg_kwargs = dict(
ncols=1,
loc='upper left',
bbox_to_anchor=(0.05, 0.5, 0.5, 0.5),
frameon=False,
prop=dict(
size=20,
),
borderpad=0,
borderaxespad=0,
handlelength=1,
columnspacing=1,
handletextpad=0.5,
labelspacing=0
)
stage_leg_labels = dict(
env='$x_{\\text{env}}$',
log='$x_{\\text{log}}$',
inv='$x_{\\text{adapt}}$',
)
spec_leg_kwargs = dict(
ncols=2,
loc='upper right',
bbox_to_anchor=(0, 0.6, 1, 0.4),
frameon=False,
prop=dict(
size=12,
size=13.5,
style='italic',
),
borderpad=0,
borderaxespad=0,
handlelength=0.5,
columnspacing=1,
handletextpad=0.5,
labelspacing=0.25,
)
diag_kwargs = dict(
c=(0.3,) * 3,
@@ -374,17 +389,8 @@ for i, scales in enumerate([pure_scales, noise_scales, noise_scales]):
ax.set_ylim(0.9, 30)
big_axes[i] = ax
shift_subplot(big_axes[0], dx=big_col_shift)
ylabel(big_axes[0], ylabels['big_pure'], transform=big_axes[0].transAxes, **ylab_big_left_kwargs)
ylabel(big_axes[1], ylabels['big_noise'], transform=big_axes[1].transAxes, **ylab_big_left_kwargs)
big_twin = big_axes[0].twinx()
hide_axis(big_twin, 'left')
big_twin.spines['right'].set_visible(True)
big_twin.set_position(big_axes[0].get_position().bounds)
big_twin.set_ylim(scales[0], scales[-1])
big_twin.set_yscale('symlog', linthresh=scales[1], linscale=0.5)
ylabel(big_twin, ylabels['big_log'], transform=big_twin.transAxes, **ylab_big_right_kwargs)
color_axis(big_axes[0], colors['env'], side='left')
color_axis(big_twin, colors['log'], side='right')
ylabel(big_axes[0], ylabels['big_pure'], transform=big_axes[0].transAxes, **ylab_big_kwargs)
ylabel(big_axes[1], ylabels['big_noise'], transform=big_axes[1].transAxes, **ylab_big_kwargs)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)
@@ -421,9 +427,11 @@ plot_snippets(noise_axes[2, :], t_full, noise_data['snip_inv'],
time_bar(noise_axes[-1, -1], **bar_kwargs)
# Plot pure-song measures (ideal):
big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big'], label=stage_leg_labels['env'])
big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'], label=stage_leg_labels['log'])
big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'], label=stage_leg_labels['inv'])
legend = big_axes[0].legend(**stage_leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
# Plot noise-song measures (limited):
big_axes[1].plot(noise_scales, noise_data['measure_env'], c=colors['env'], lw=lw['big'])
@@ -458,7 +466,7 @@ for i, (species, measure) in enumerate(species_measures.items()):
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], measure[ind],
color=color, **plateau_line_kwargs)
legend = big_axes[2].legend(**leg_kwargs)
legend = big_axes[2].legend(**spec_leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
if save_path is not None:

View File

@@ -83,8 +83,8 @@ pure_grid_kwargs = dict(
ncols=None,
wspace=0.1,
hspace=0.15,
left=0.08 - snip_col_shift,
right=0.95,
left=0.1 - snip_col_shift,
right=0.98,
bottom=1 - block_height - edge_padding,
top=1 - edge_padding,
height_ratios=[1, 1]
@@ -132,23 +132,22 @@ species_colors = load_colors('../data/species_colors.npz')
lw = dict(
snip=0.5,
big=3,
spec=2,
spec=1.5,
legend=5,
)
dash_cycle = 6 # points
ls_env = [
(0, np.array((0.2, 0.8)) * dash_cycle),
(0, np.array((0.1,) * 10) * dash_cycle),
(0, np.array((0.6, 0.1, 0.2, 0.1)) * dash_cycle),
(0, np.array((0.5, 0.5)) * dash_cycle),
(0, np.array((0.3, 0.2, 0.3, 0.2)) * dash_cycle),
'solid',
] # [np.nan, 2500, 250, 25]
xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
raw='$x$',
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
filt='$x_{\\text{filt}}$\n$[\\text{a.u.}]$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
big_pure='$\\sigma_x$',
big_noise='$\\sigma_x\\,/\\,\\sigma_{\\eta}$' if relate_to_noise else None,
)
@@ -159,23 +158,17 @@ xlab_big_kwargs = dict(
va='bottom',
)
ylab_snip_kwargs = dict(
x=0,
x=0.025,
fontsize=fs['lab_tex'],
rotation=0,
ha='left',
ha='center',
va='center',
)
ylab_pure_kwargs = dict(
x=0,
ylab_big_kwargs = dict(
x=-0.2,
fontsize=fs['lab_tex'],
ha='center',
va='top',
)
ylab_noise_kwargs = dict(
y=0.5,
fontsize=fs['lab_tex'],
ha='center',
va='top',
va='bottom',
)
ylim_zoom_factor = 0.03
yloc = dict(
@@ -346,9 +339,8 @@ for i, scales in enumerate([pure_scales, noise_scales, noise_scales]):
ax.set_ylim(0.1, 100)
big_axes[i] = ax
shift_subplot(big_axes[0], dx=big_col_shift)
ylabel(big_axes[0], ylabels['big_pure'], transform=big_subfig.transSubfigure, **ylab_pure_kwargs)
ylabel(big_axes[1], ylabels['big_noise'], transform=big_axes[1].transAxes, **ylab_noise_kwargs,
x=(big_subfig.transSubfigure + big_axes[0].transAxes.inverted()).transform((ylab_pure_kwargs['x'], 0))[0])
ylabel(big_axes[0], ylabels['big_pure'], transform=big_axes[0].transAxes, **ylab_big_kwargs)
ylabel(big_axes[1], ylabels['big_noise'], transform=big_axes[1].transAxes, **ylab_big_kwargs)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)

View File

@@ -154,10 +154,10 @@ xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
filt='$x_{\\text{filt}}$\n$[\\text{a.u.}]$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
conv='$c_i$\n$[\\text{dB}]$',
feat='$f_i$',
big=['measure', 'rel. measure', 'norm. measure']
)

View File

@@ -9,7 +9,7 @@ from misc_functions import shorten_species
from IPython import embed
# GENERAL SETTINGS:
mode = ['pure', 'noise'][1]
mode = ['pure', 'noise'][0]
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',

View File

@@ -98,7 +98,7 @@ snip_grid_kwargs = dict(
ncols=None,
wspace=0.3,
hspace=0,
left=0.2 - snip_col_shift,
left=0.23 - snip_col_shift,
right=0.93,
bottom=0.15,
top=0.95,
@@ -119,7 +119,7 @@ big_grid_kwargs = dict(
ncols=1,
wspace=0,
hspace=0.15,
left=0.17,
left=0.2,
right=0.96,
bottom=0.05,
top=0.99
@@ -152,11 +152,11 @@ xlabels = dict(
sigma='$\\sigma_{\\text{adapt}}$',
)
ylabels = dict(
inv='$x_{\\text{adapt}}$',
conv='$c_i$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
conv='$c_i$\n$[\\text{dB}]$',
bi='$b_i$',
feat='$f_i$',
big='$\\mu_f$',
big='$\\mu_{f_i}$',
)
xlab_alpha_kwargs = dict(
y=0.5,
@@ -171,7 +171,7 @@ xlab_sigma_kwargs = dict(
va='bottom',
)
ylab_snip_kwargs = dict(
x=0.08,
x=0.1,
fontsize=fs['lab_tex'],
rotation=0,
ha='center',
@@ -179,7 +179,7 @@ ylab_snip_kwargs = dict(
)
ylab_super_kwargs = dict(
x=0,
fontsize=fs['lab_norm'],
fontsize=fs['lab_tex'],
ha='left',
va='center',
)
@@ -366,7 +366,7 @@ for i in range(thresh_rel.size):
low_box = axes[-1, 0].get_position()
high_box = axes[0, 0].get_position()
[hide_axis(ax, 'left') for ax in axes[1:, 1]]
super_ylabel(f'$\\Theta={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$',
super_ylabel(f'$\\Theta_i={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$',
snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for (ax1, ax2), stage in zip(axes[:, :2], stages):
ax1.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][0]))

View File

@@ -2,6 +2,7 @@ import plotstyle_plt
import glob
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from color_functions import load_colors
from plot_functions import hide_axis, letter_subplots,\
@@ -10,8 +11,8 @@ from plot_functions import hide_axis, letter_subplots,\
from IPython import embed
# GENERAL SETTINGS:
target = 'Omocestus_rufipes'
data_paths = glob.glob(f'../data/processed/{target}*.npz')
example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_paths = search_files(example_file, dir='../data/processed/')
stages = ['filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat']
save_path = '../figures/'
@@ -25,7 +26,7 @@ fig_kwargs = dict(
gridspec_kw=dict(
wspace=0.15,
hspace=0.3,
left=0.1,
left=0.12,
right=0.99,
bottom=0.08,
top=0.95
@@ -61,13 +62,13 @@ xlabels = dict(
super='time [s]',
)
ylabels = dict(
filt=r'$x_{\text{filt}}$',
env=r'$x_{\text{env}}$',
log=r'$x_{\text{dB}}$',
inv=r'$x_{\text{adapt}}$',
conv=r'$c_i$',
bi=r'$b_i$',
feat=r'$f_i$'
filt='$x_{\\text{filt}}$\n$[\\text{a.u.}]$',
env='$x_{\\text{env}}$\n$[\\text{a.u.}]$',
log='$x_{\\text{log}}$\n$[\\text{dB}]$',
inv='$x_{\\text{adapt}}$\n$[\\text{dB}]$',
conv='$c_i$\n$[\\text{dB}]$',
bi='$b_i$',
feat='$f_i$',
)
xlab_kwargs = dict(
y=0,
@@ -76,9 +77,9 @@ xlab_kwargs = dict(
fontsize=fs['lab_norm'],
)
ylab_kwargs = dict(
x=0,
x=0.03,
rotation=0,
ha='left',
ha='center',
va='center',
fontsize=fs['lab_tex'],
)
@@ -89,16 +90,16 @@ xloc = dict(
yloc_full = dict(
filt=0.2,
env=0.1,
log=40,
log=50,
inv=20,
conv=2,
conv=1,
feat=1
)
yloc_zoom = dict(
filt=0.1,
env=0.02,
log=40,
inv=20,
log=50,
inv=10,
conv=0.5,
feat=1
)
@@ -202,7 +203,10 @@ for data_path in data_paths:
# Update parameters:
fig_kwargs['gridspec_kw'].update(
left=0.06,
left=0.09,
)
ylab_kwargs.update(
x=0.02,
)
# PART II: FEATURE EXTRACTION STAGE:

View File

@@ -1,5 +1,6 @@
import numpy as np
from scipy.stats import gaussian_kde
from itertools import product
from thunderhopper.filetools import crop_paths
from IPython import embed
@@ -16,6 +17,34 @@ def draw_noise_segment(noise, n):
start = rng.integers(0, noise.shape[0] - n, endpoint=True)
return np.take(noise, np.arange(start, start + n), axis=0)
def divide_by_zero(num, denom, replace=np.nan):
with np.errstate(divide='ignore', invalid='ignore'):
result = np.true_divide(num, denom)
result[~np.isfinite(result)] = replace
return result
def exclude_zero_scale(data, keys=None, combis=None):
inds = np.nonzero(data['scales'] > 0)[0]
data['scales'] = data['scales'][inds]
if keys is not None:
for key in keys:
data[key] = data[key][inds, ...]
if combis is not None:
for key1, key2 in product(*combis):
key = f'{key1}_{key2}'
data[key] = data[key][inds, ...]
return data
def reduce_kernel_set(data, inds, keys=None, combis=None):
if keys is not None:
for key in keys:
data[key] = data[key][:, inds, ...]
if combis is not None:
for key1, key2 in product(*combis):
key = f'{key1}_{key2}'
data[key] = data[key][:, inds, ...]
return data
def sort_files_by_rec(paths, sources=['BM04', 'BM93', 'DJN', 'GBC', 'FTN']):
# Separate by source:
sorted_paths = {}

View File

@@ -104,9 +104,9 @@ def ylimits(signal, ax=None, minval=None, maxval=None, pad=0.05):
return [minval, maxval]
limits = [minval, maxval]
if minval is None:
limits[0] = signal.min()
limits[0] = np.nanmin(signal)
if maxval is None:
limits[1] = signal.max()
limits[1] = np.nanmax(signal)
span = limits[1] - limits[0]
if pad and minval is None:
limits[0] -= span * pad

View File

@@ -16,7 +16,7 @@ target_species = [
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
][6]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',

View File

@@ -17,7 +17,7 @@ target_species = [
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
][6]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',