Nearly finished 1st draft of species-specific Thresh-LP invariance figure (WIP).

This commit is contained in:
j-hartling
2026-03-13 17:15:03 +01:00
parent 4f5054c8fd
commit 1516fe6090
19 changed files with 735 additions and 239 deletions

Binary file not shown.

Binary file not shown.

View File

@@ -251,9 +251,9 @@
\@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces \textbf {Intensity invariance by logarithmic compression and adaptation is restricted by the noise floor.} Envelope $x_{\text {env}}(t)$ is transformed into logarihmically compressed envelope $x_{\text {dB}}(t)$ and further into intensity-adapted envelope $x_{\text {adapt}}(t)$. Indicated time scale is $5\,$s for both \textbf {a} and \textbf {b} (black bars). \textbf {a}:~Ideally, if $x_{\text {env}}(t)$ consists only of song component $s(t)$ rescaled by $\alpha $, then $x_{\text {adapt}}(t)$ is fully intensity-invariant across all $\alpha $. \textbf {b}:~In practice, $x_{\text {env}}(t)$ also contains fixed-scale noise component $\eta (t)$, which limits the effective intensity invariance of $x_{\text {adapt}}(t)$ to sufficiently large $\alpha $. \textbf {c}:~Ratios of the SD of each representation in \textbf {b} at a given $\alpha $ relative to the SD of the representation for $\alpha =0$ (solid lines). The same ratios for the ideal $x_{\text {adapt}}(t)$ in \textbf {a} are shown for comparison (dashed line). }}{12}{}\protected@file@percent } \@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces \textbf {Intensity invariance by logarithmic compression and adaptation is restricted by the noise floor.} Envelope $x_{\text {env}}(t)$ is transformed into logarihmically compressed envelope $x_{\text {dB}}(t)$ and further into intensity-adapted envelope $x_{\text {adapt}}(t)$. Indicated time scale is $5\,$s for both \textbf {a} and \textbf {b} (black bars). \textbf {a}:~Ideally, if $x_{\text {env}}(t)$ consists only of song component $s(t)$ rescaled by $\alpha $, then $x_{\text {adapt}}(t)$ is fully intensity-invariant across all $\alpha $. \textbf {b}:~In practice, $x_{\text {env}}(t)$ also contains fixed-scale noise component $\eta (t)$, which limits the effective intensity invariance of $x_{\text {adapt}}(t)$ to sufficiently large $\alpha $. \textbf {c}:~Ratios of the SD of each representation in \textbf {b} at a given $\alpha $ relative to the SD of the representation for $\alpha =0$ (solid lines). The same ratios for the ideal $x_{\text {adapt}}(t)$ in \textbf {a} are shown for comparison (dashed line). }}{12}{}\protected@file@percent }
\newlabel{fig:inv_log-hp}{{4}{12}{}{}{}} \newlabel{fig:inv_log-hp}{{4}{12}{}{}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Thresholding nonlinearity \& temporal averaging}{12}{}\protected@file@percent } \@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Thresholding nonlinearity \& temporal averaging}{12}{}\protected@file@percent }
\@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces \textbf {Intensity invariance by thresholding and temporal averaging depends on the threshold value.} Kernel response $c_i(t)$ is rescaled by $\alpha $ and transformed into binary response $b_i(t)$ and further into feature $f_i(t)$. Threshold value $\Theta _i$ is set to different percentiles of the the distribution of $c_i(t)$ at $\alpha =1$. Darker colors indicate higher values of $\Theta _i$. Indicated time scale of $500\,$ms is the same for \textbf {a}-\textbf {c} (black bar). \textbf {a}:~50th percentile. \textbf {b}:~75th percentile. \textbf {c}:~100th percentile. \textbf {d}:~Average value of $f_i(t)$ during the song for the different $\Theta _i$ in \textbf {a}-\textbf {c}. }}{13}{}\protected@file@percent } \@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces \textbf {Intensity invariance by thresholding and temporal averaging depends on the threshold value with regard to variable range but not saturation level.} Kernel response $c_i(t)$ is rescaled by $\alpha $ and transformed into binary response $b_i(t)$ and further into feature $f_i(t)$. Threshold value $\Theta _i$ is set to different percentiles of the the distribution of $c_i(t)$ at $\alpha =1$. Darker colors indicate higher values of $\Theta _i$. Indicated time scale of $500\,$ms is the same for \textbf {a}-\textbf {c} (black bar). \textbf {a}:~50th percentile. \textbf {b}:~75th percentile. \textbf {c}:~100th percentile. \textbf {d}:~Average value of $f_i(t)$ during the song for the different $\Theta _i$ in \textbf {a}-\textbf {c}. }}{13}{}\protected@file@percent }
\newlabel{fig:inv_thresh-lp_single}{{5}{13}{}{}{}} \newlabel{fig:inv_thresh-lp_single}{{5}{13}{}{}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces \textbf {Intensity invariance by thresholding and temporal averaging depends on noise.} Kernel response $c_i(t)$ is rescaled by $\alpha $, mixed with fixed-scale noise component $\eta (t)$, and transformed into binary response $b_i(t)$ and further into feature $f_i(t)$. Threshold value $\Theta _i$ is set to different percentiles of the the distribution of $c_i(t)$ at $\alpha =1$. Darker colors indicate higher values of $\Theta _i$. Indicated time scale of $500\,$ms is the same for \textbf {a}-\textbf {c} (black bar). \textbf {a}:~50th percentile. \textbf {b}:~75th percentile. \textbf {c}:~100th percentile. \textbf {d}:~Average value of $f_i(t)$ during the song for the different $\Theta _i$ in \textbf {a}-\textbf {c}. }}{14}{}\protected@file@percent } \@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces \textbf {Intensity invariance by thresholding and temporal averaging depends on noise with regard to variable range but not saturation level.} Kernel response $c_i(t)$ is rescaled by $\alpha $, mixed with fixed-scale noise component $\eta (t)$, and transformed into binary response $b_i(t)$ and further into feature $f_i(t)$. Threshold value $\Theta _i$ is set to different percentiles of the the distribution of $c_i(t)$ at $\alpha =1$. Darker colors indicate higher values of $\Theta _i$. Indicated time scale of $500\,$ms is the same for \textbf {a}-\textbf {c} (black bar). \textbf {a}:~50th percentile. \textbf {b}:~75th percentile. \textbf {c}:~100th percentile. \textbf {d}:~Average value of $f_i(t)$ during the song for the different $\Theta _i$ in \textbf {a}-\textbf {c}. }}{14}{}\protected@file@percent }
\newlabel{fig:inv_thresh-lp_single_noise}{{6}{14}{}{}{}} \newlabel{fig:inv_thresh-lp_single_noise}{{6}{14}{}{}{}}
\newlabel{eq:pdf_split}{{15}{15}{}{}{}} \newlabel{eq:pdf_split}{{15}{15}{}{}{}}
\newlabel{eq:feat_avg}{{16}{15}{}{}{}} \newlabel{eq:feat_avg}{{16}{15}{}{}{}}

View File

@@ -1,14 +1,14 @@
# Fdb version 4 # Fdb version 4
["biber main"] 1773238445.97958 "main.bcf" "main.bbl" "main" 1773238449.24151 0 ["biber main"] 1773238445.97958 "main.bcf" "main.bbl" "main" 1773392777.69214 0
"cite.bib" 1770904753.08918 27483 4290db0c91f7b5055e25472ef913f6b4 "" "cite.bib" 1770904753.08918 27483 4290db0c91f7b5055e25472ef913f6b4 ""
"main.bcf" 1773238449.18184 112931 2a478116d80ebb1ada7083a24facd6e3 "pdflatex" "main.bcf" 1773392777.63678 112931 2a478116d80ebb1ada7083a24facd6e3 "pdflatex"
(generated) (generated)
"main.bbl" "main.bbl"
"main.blg" "main.blg"
(rewritten before read) (rewritten before read)
["pdflatex"] 1773238448.36992 "/home/hartling/phd/paper/paper_2025/main.tex" "main.pdf" "main" 1773238449.24172 0 ["pdflatex"] 1773392776.7873 "/home/hartling/phd/paper/paper_2025/main.tex" "main.pdf" "main" 1773392777.69235 0
"/etc/texmf/web2c/texmf.cnf" 1761560044.43676 475 c0e671620eb5563b2130f56340a5fde8 "" "/etc/texmf/web2c/texmf.cnf" 1761560044.43676 475 c0e671620eb5563b2130f56340a5fde8 ""
"/home/hartling/phd/paper/paper_2025/main.tex" 1773238449.19584 47479 96e40a52b29dbb10a3ed8b868cb9724c "" "/home/hartling/phd/paper/paper_2025/main.tex" 1773312783.44267 47631 f6b9c9e8caccb02783f29486663acfa3 ""
"/usr/share/texlive/texmf-dist/fonts/map/fontname/texfonts.map" 1577235249 3524 cb3e574dea2d1052e39280babc910dc8 "" "/usr/share/texlive/texmf-dist/fonts/map/fontname/texfonts.map" 1577235249 3524 cb3e574dea2d1052e39280babc910dc8 ""
"/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex7.tfm" 1246382020 1004 54797486969f23fa377b128694d548df "" "/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex7.tfm" 1246382020 1004 54797486969f23fa377b128694d548df ""
"/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex8.tfm" 1246382020 988 bdf658c3bfc2d96d3c8b02cfc1c94c20 "" "/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex8.tfm" 1246382020 988 bdf658c3bfc2d96d3c8b02cfc1c94c20 ""
@@ -153,14 +153,14 @@
"/var/lib/texmf/web2c/pdftex/pdflatex.fmt" 1761648508 8213325 7fd20752ab46ff9aa583e4973d7433df "" "/var/lib/texmf/web2c/pdftex/pdflatex.fmt" 1761648508 8213325 7fd20752ab46ff9aa583e4973d7433df ""
"figures/fig_auditory_pathway.pdf" 1771593904.14638 1153923 3df8539421fd21dc866cc8d320bd9b1d "" "figures/fig_auditory_pathway.pdf" 1771593904.14638 1153923 3df8539421fd21dc866cc8d320bd9b1d ""
"figures/fig_feat_stages.pdf" 1771841347.81651 8600157 89f9276167cc096f9adce052152edd70 "" "figures/fig_feat_stages.pdf" 1771841347.81651 8600157 89f9276167cc096f9adce052152edd70 ""
"figures/fig_invariance_log_hp.pdf" 1773227453.7878 479810 ed6614c5cd8907e3df0d9491fa880b7e "" "figures/fig_invariance_log_hp.pdf" 1773239234.07343 478578 b583c316e5e715032fa338984f274dd8 ""
"figures/fig_invariance_thresh_lp_single.pdf" 1773227716.51231 993717 4a3f16eb87f842fe8a5cddf2b365b665 "" "figures/fig_invariance_thresh_lp_single.pdf" 1773392682.53371 931926 00b6c0232c328feb78271f3a7ad10bd9 ""
"figures/fig_invariance_thresh_lp_single_noise.pdf" 1773227704.85542 1512337 7854a80cfe876190279163e0f7e5104f "" "figures/fig_invariance_thresh_lp_single_noise.pdf" 1773392714.27939 1401197 6c69dc364ff57747e40f8a0ab4353fa4 ""
"figures/fig_pre_stages.pdf" 1771841345.77353 440442 263f9bd4a3bca8e0653ac0a4c4a8da2c "" "figures/fig_pre_stages.pdf" 1771841345.77353 440442 263f9bd4a3bca8e0653ac0a4c4a8da2c ""
"main.aux" 1773238449.17684 14728 db185b2df9ec693fd5b8b1e57e05bff8 "pdflatex" "main.aux" 1773392777.63078 14838 a4d8e057160ee865618aebf9c55cc9ce "pdflatex"
"main.bbl" 1773238446.59287 91039 1380dc8c93d2855fdb132cc5a40ad52f "biber main" "main.bbl" 1773238446.59287 91039 1380dc8c93d2855fdb132cc5a40ad52f "biber main"
"main.run.xml" 1773238449.18284 2335 a049bc26a7f032e842ce55de5bc38328 "pdflatex" "main.run.xml" 1773392777.63678 2335 a049bc26a7f032e842ce55de5bc38328 "pdflatex"
"main.tex" 1773238449.19584 47479 96e40a52b29dbb10a3ed8b868cb9724c "" "main.tex" 1773312783.44267 47631 f6b9c9e8caccb02783f29486663acfa3 ""
(generated) (generated)
"main.aux" "main.aux"
"main.bcf" "main.bcf"

View File

@@ -1,4 +1,4 @@
This is pdfTeX, Version 3.141592653-2.6-1.40.25 (TeX Live 2023/Debian) (preloaded format=pdflatex 2025.10.28) 11 MAR 2026 15:14 This is pdfTeX, Version 3.141592653-2.6-1.40.25 (TeX Live 2023/Debian) (preloaded format=pdflatex 2025.10.28) 13 MAR 2026 10:06
entering extended mode entering extended mode
restricted \write18 enabled. restricted \write18 enabled.
file:line:error style messages enabled. file:line:error style messages enabled.
@@ -775,10 +775,10 @@ File: figures/fig_invariance_thresh_lp_single.pdf Graphic file (type pdf)
Package pdftex.def Info: figures/fig_invariance_thresh_lp_single.pdf used on input line 654. Package pdftex.def Info: figures/fig_invariance_thresh_lp_single.pdf used on input line 654.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt. (pdftex.def) Requested size: 483.69687pt x 241.84782pt.
[12 <./figures/fig_invariance_log_hp.pdf>] [12 <./figures/fig_invariance_log_hp.pdf>]
<figures/fig_invariance_thresh_lp_single_noise.pdf, id=1209, 910.48819pt x 455.2441pt> <figures/fig_invariance_thresh_lp_single_noise.pdf, id=1206, 910.48819pt x 455.2441pt>
File: figures/fig_invariance_thresh_lp_single_noise.pdf Graphic file (type pdf) File: figures/fig_invariance_thresh_lp_single_noise.pdf Graphic file (type pdf)
<use figures/fig_invariance_thresh_lp_single_noise.pdf> <use figures/fig_invariance_thresh_lp_single_noise.pdf>
Package pdftex.def Info: figures/fig_invariance_thresh_lp_single_noise.pdf used on input line 676. Package pdftex.def Info: figures/fig_invariance_thresh_lp_single_noise.pdf used on input line 677.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt. (pdftex.def) Requested size: 483.69687pt x 241.84782pt.
[13 <./figures/fig_invariance_thresh_lp_single.pdf>] [14 <./figures/fig_invariance_thresh_lp_single_noise.pdf>] [15 [13 <./figures/fig_invariance_thresh_lp_single.pdf>] [14 <./figures/fig_invariance_thresh_lp_single_noise.pdf>] [15
@@ -800,9 +800,9 @@ Here is how much of TeX's memory you used:
1143 hyphenation exceptions out of 8191 1143 hyphenation exceptions out of 8191
94i,19n,93p,1214b,1732s stack positions out of 10000i,1000n,20000p,200000b,200000s 94i,19n,93p,1214b,1732s stack positions out of 10000i,1000n,20000p,200000b,200000s
</usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbxti10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmex10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr17.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmti12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/symbols/msbm10.pfb> </usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbxti10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmex10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr17.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmti12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/symbols/msbm10.pfb>
Output written on main.pdf (17 pages, 13334435 bytes). Output written on main.pdf (17 pages, 13160382 bytes).
PDF statistics: PDF statistics:
1393 PDF objects out of 1440 (max. 8388607) 1394 PDF objects out of 1440 (max. 8388607)
802 compressed objects within 9 object streams 802 compressed objects within 9 object streams
0 named destinations out of 1000 (max. 500000) 0 named destinations out of 1000 (max. 500000)
43 words of extra memory for PDF output out of 10000 (max. 10000000) 43 words of extra memory for PDF output out of 10000 (max. 10000000)

BIN
main.pdf

Binary file not shown.

Binary file not shown.

View File

@@ -653,7 +653,8 @@ the signal for reliable song recognition.
\centering \centering
\includegraphics[width=\textwidth]{figures/fig_invariance_thresh_lp_single.pdf} \includegraphics[width=\textwidth]{figures/fig_invariance_thresh_lp_single.pdf}
\caption{\textbf{Intensity invariance by thresholding and temporal \caption{\textbf{Intensity invariance by thresholding and temporal
averaging depends on the threshold value.} averaging depends on the threshold value with regard to
variable range but not saturation level.}
Kernel response $c_i(t)$ is rescaled by $\sca$ and Kernel response $c_i(t)$ is rescaled by $\sca$ and
transformed into binary response $b_i(t)$ and further into transformed into binary response $b_i(t)$ and further into
feature $f_i(t)$. Threshold value $\thr$ is set to feature $f_i(t)$. Threshold value $\thr$ is set to
@@ -675,7 +676,8 @@ the signal for reliable song recognition.
\centering \centering
\includegraphics[width=\textwidth]{figures/fig_invariance_thresh_lp_single_noise.pdf} \includegraphics[width=\textwidth]{figures/fig_invariance_thresh_lp_single_noise.pdf}
\caption{\textbf{Intensity invariance by thresholding and temporal \caption{\textbf{Intensity invariance by thresholding and temporal
averaging depends on noise.} averaging depends on noise with regard to variable range
but not saturation level.}
Kernel response $c_i(t)$ is rescaled by $\sca$, mixed with Kernel response $c_i(t)$ is rescaled by $\sca$, mixed with
fixed-scale noise component $\noc(t)$, and transformed fixed-scale noise component $\noc(t)$, and transformed
into binary response $b_i(t)$ and further into feature into binary response $b_i(t)$ and further into feature

View File

@@ -1,31 +1,13 @@
import plotstyle_plt import plotstyle_plt
import glob
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from matplotlib.transforms import BboxTransformTo
from itertools import product from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data from thunderhopper.modeltools import load_data
from color_functions import load_colors from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, plot_line, plot_barcode, strip_zeros from plot_functions import hide_axis, ylimits, xlabel, ylabel, plot_line, plot_barcode, strip_zeros
from IPython import embed from IPython import embed
def time_bar(ax, dur, y0=0.9, y1=0.95, xshift=0.5, parent=None, transform=None, **kwargs):
t0, t1 = ax.get_xlim()
offset = (t1 - t0 - dur) * xshift
x0 = t0 + offset
x1 = x0 + dur
if parent is None:
parent = ax
if transform is None:
transform = BboxTransformTo(parent.bbox)
if transform is not ax.transData:
trans = ax.transData + transform.inverted()
x0 = trans.transform((x0, 0))[0]
x1 = trans.transform((x1, 0))[0]
parent.add_artist(plt.Rectangle((x0, y0), x1 - x0, y1 - y0,
transform=transform, **kwargs))
return None
def add_snip_axes(fig, grid_kwargs): def add_snip_axes(fig, grid_kwargs):
grid = fig.add_gridspec(**grid_kwargs) grid = fig.add_gridspec(**grid_kwargs)
axes = np.zeros((grid.nrows, grid.ncols), dtype=object) axes = np.zeros((grid.nrows, grid.ncols), dtype=object)
@@ -49,7 +31,7 @@ def plot_bi_snippets(axes, time, binary, **kwargs):
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes' target = 'Omocestus_rufipes'
data_paths = search_files(target, excl='noise', dir='../data/inv/full/') data_paths = glob.glob(f'../data/processed/{target}*.npz')
stages = ['filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat'] stages = ['filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat']
load_kwargs = dict( load_kwargs = dict(
files=stages, files=stages,
@@ -62,8 +44,8 @@ fig_kwargs = dict(
figsize=(32/2.54, 16/2.54), figsize=(32/2.54, 16/2.54),
) )
super_grid_kwargs = dict( super_grid_kwargs = dict(
nrows=2, nrows=len(stages),
ncols=2, ncols=3,
wspace=0, wspace=0,
hspace=0, hspace=0,
left=0, left=0,
@@ -72,31 +54,20 @@ super_grid_kwargs = dict(
top=1 top=1
) )
subfig_specs = dict( subfig_specs = dict(
pure=(0, 0), **{stage: (slice(0, -1), i) for i, stage in enumerate(stages)},
noise=(1, 0), big=(slice(None), -1)
analysis=(slice(None), 1)
) )
pure_grid_kwargs = dict( stage_grid_kwargs = dict(
nrows=len(stages), nrows=1,
ncols=None, ncols=None,
wspace=0.05, wspace=0.05,
hspace=0.1, hspace=0,
left=0.07, left=0.07,
right=0.95, right=0.95,
bottom=0.15, bottom=0.15,
top=0.9 top=0.9
) )
noise_grid_kwargs = dict( big_grid_kwargs = dict(
nrows=len(stages),
ncols=None,
wspace=0.05,
hspace=0.1,
left=0.07,
right=0.95,
bottom=0.15,
top=0.9
)
analysis_grid_kwargs = dict(
nrows=1, nrows=1,
ncols=1, ncols=1,
wspace=0, wspace=0,
@@ -106,19 +77,20 @@ analysis_grid_kwargs = dict(
bottom=0.1, bottom=0.1,
top=0.95 top=0.95
) )
snip_specs = dict(
conv=(0, slice(None)),
bi=(1, slice(None)),
feat=(2, slice(None))
)
# PLOT SETTINGS: # PLOT SETTINGS:
colors = load_colors('../data/stage_colors.npz') colors = load_colors('../data/stage_colors.npz')
lw_snippets = dict( lw_snippets = dict(
raw=0.25,
filt=0.25,
env=0.5,
log=0.5,
inv=0.5,
conv=0.5, conv=0.5,
bi=0.01,
feat=2 feat=2
) )
lw_analysis = 3 lw_big = 3
xlabels = dict( xlabels = dict(
analysis='scale $\\alpha$', analysis='scale $\\alpha$',
) )
@@ -148,38 +120,38 @@ ylab_analysis_kwargs = dict(
ha='center', ha='center',
va='top', va='top',
) )
xloc = dict( # xloc = dict(
analysis=10, # analysis=10,
) # )
letter_snip_kwargs = dict( # letter_snip_kwargs = dict(
x=0.02, # x=0.02,
y=1, # y=1,
ha='left', # ha='left',
va='top', # va='top',
fontsize=22, # fontsize=22,
fontweight='bold' # fontweight='bold'
) # )
letter_analysis_kwargs = dict( # letter_analysis_kwargs = dict(
x=0, # x=0,
y=1, # y=1,
ha='left', # ha='left',
va='top', # va='top',
fontsize=22, # fontsize=22,
fontweight='bold' # fontweight='bold'
) # )
bar_time = 5 # bar_time = 5
bar_kwargs = dict( # bar_kwargs = dict(
y0=0.7, # y0=0.7,
y1=0.8, # y1=0.8,
color='k', # color='k',
lw=0, # lw=0,
) # )
spread_kwargs = dict( # spread_kwargs = dict(
alpha=0.3, # alpha=0.3,
lw=0, # lw=0,
zorder=0 # zorder=0
) # )
kernel_ind = 0 # kernel_ind = 0
# EXECUTION: # EXECUTION:
for data_path in data_paths: for data_path in data_paths:

View File

@@ -124,9 +124,6 @@ ylab_analysis_kwargs = dict(
ha='center', ha='center',
va='top', va='top',
) )
xloc = dict(
analysis=200,
)
letter_snip_kwargs = dict( letter_snip_kwargs = dict(
x=0.02, x=0.02,
y=0.97, y=0.97,
@@ -189,7 +186,7 @@ for data_path in data_paths:
analysis_grid = analysis_subfig.add_gridspec(**analysis_grid_kwargs) analysis_grid = analysis_subfig.add_gridspec(**analysis_grid_kwargs)
analysis_ax = analysis_subfig.add_subplot(analysis_grid[0, 0]) analysis_ax = analysis_subfig.add_subplot(analysis_grid[0, 0])
analysis_ax.set_xlim(noise_data['scales'].min(), noise_data['scales'].max()) analysis_ax.set_xlim(noise_data['scales'].min(), noise_data['scales'].max())
analysis_ax.xaxis.set_major_locator(plt.MultipleLocator(xloc['analysis'])) analysis_ax.set_xscale('symlog', linthresh=pure_data['scales'][1], linscale=0.5)
xlabel(analysis_ax, xlabels['analysis'], **xlab_analysis_kwargs, xlabel(analysis_ax, xlabels['analysis'], **xlab_analysis_kwargs,
transform=analysis_subfig.transSubfigure) transform=analysis_subfig.transSubfigure)
analysis_ax.set_yscale('log') analysis_ax.set_yscale('log')
@@ -240,7 +237,7 @@ for data_path in data_paths:
analysis_ax.plot(noise_data['scales'], measure_env, c=colors['env'], lw=lw_analysis) analysis_ax.plot(noise_data['scales'], measure_env, c=colors['env'], lw=lw_analysis)
analysis_ax.plot(noise_data['scales'], measure_log, c=colors['log'], lw=lw_analysis) analysis_ax.plot(noise_data['scales'], measure_log, c=colors['log'], lw=lw_analysis)
analysis_ax.plot(noise_data['scales'], measure_inv, c=colors['inv'], lw=lw_analysis) analysis_ax.plot(noise_data['scales'], measure_inv, c=colors['inv'], lw=lw_analysis)
analysis_ax.set_ylim(0.1, measure_env.max()) analysis_ax.set_ylim(0.9, measure_env.max())
if save_path is not None: if save_path is not None:
fig.savefig(save_path) fig.savefig(save_path)

View File

@@ -52,7 +52,7 @@ def side_distributions(axes, snippets, inset_bounds, thresh, nbins=50,
# GENERAL SETTINGS: # GENERAL SETTINGS:
with_noise = False with_noise = True
target = 'Omocestus_rufipes' target = 'Omocestus_rufipes'
search_kwargs = dict( search_kwargs = dict(
incl=['subset', 'noise'] if with_noise else 'subset', incl=['subset', 'noise'] if with_noise else 'subset',
@@ -186,9 +186,10 @@ bar_kwargs = dict(
lw=0, lw=0,
) )
kernel = np.array([ kernel = np.array([
[2, 0.008], [1, 0.008],
[4, 0.008], [2, 0.004],
])[:1] [3, 0.002],
])[np.array([1])]
zoom_rel = np.array([0.5, 0.525]) zoom_rel = np.array([0.5, 0.525])
@@ -207,13 +208,11 @@ for data_path in data_paths:
data['snip_conv'] = data['snip_conv'][zoom_inds, kern_ind, ...] data['snip_conv'] = data['snip_conv'][zoom_inds, kern_ind, ...]
data['snip_bi'] = data['snip_bi'][zoom_inds, kern_ind, ...] data['snip_bi'] = data['snip_bi'][zoom_inds, kern_ind, ...]
data['snip_feat'] = data['snip_feat'][zoom_inds, kern_ind, ...] data['snip_feat'] = data['snip_feat'][zoom_inds, kern_ind, ...]
data['measure_conv'] = data['measure_conv'][:, kern_ind, :]
data['measure_feat'] = data['measure_feat'][:, kern_ind, :] data['measure_feat'] = data['measure_feat'][:, kern_ind, :]
data['threshs'] = data['threshs'][:, kern_ind]
t_full = np.arange(data['snip_conv'].shape[0]) / config['env_rate'] t_full = np.arange(data['snip_conv'].shape[0]) / config['env_rate']
# Get threshold-specific colors: # Get threshold-specific colors:
factors = np.linspace(*color_factors, data['thresh_perc'].size) factors = np.linspace(*color_factors, data['threshs'].size)
colors = dict( colors = dict(
conv=shade_colors(colors['conv'], factors), conv=shade_colors(colors['conv'], factors),
bi=shade_colors(colors['bi'], factors), bi=shade_colors(colors['bi'], factors),
@@ -221,7 +220,7 @@ for data_path in data_paths:
) )
# Adjust grid parameters: # Adjust grid parameters:
super_grid_kwargs['nrows'] = data['thresh_perc'].size super_grid_kwargs['nrows'] = data['threshs'].size
snip_grid_kwargs['ncols'] = data['example_scales'].size snip_grid_kwargs['ncols'] = data['example_scales'].size
# Prepare overall graph: # Prepare overall graph:
@@ -230,13 +229,13 @@ for data_path in data_paths:
# Prepare snippet axes: # Prepare snippet axes:
snip_axes = {} snip_axes = {}
for i in range(data['thresh_perc'].size): for i in range(data['threshs'].size):
subfig_specs['snip'] = (i, subfig_specs['snip'][1]) subfig_specs['snip'] = (i, subfig_specs['snip'][1])
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']]) snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
axes = add_snip_axes(snip_subfig, snip_grid_kwargs) axes = add_snip_axes(snip_subfig, snip_grid_kwargs)
snip_axes[snip_subfig] = axes snip_axes[snip_subfig] = axes
super_ylabel(f'{data["thresh_perc"][i]}%', snip_subfig, super_ylabel(f'{strip_zeros(100 * data["thresh_perc"][i])}%',
axes[0, 0], axes[-1, 0], **ylab_super_kwargs) snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for ax, stage in zip(axes[:, 0], stages): for ax, stage in zip(axes[:, 0], stages):
ylabel(ax, ylabels[stage], **ylab_snip_kwargs, ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=snip_subfig.transSubfigure) transform=snip_subfig.transSubfigure)

View File

@@ -0,0 +1,400 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from thunderhopper.filtertools import find_kern_specs
from color_functions import load_colors, shade_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, super_ylabel,\
plot_line, plot_barcode, strip_zeros, time_bar,\
letter_subplot, letter_subplots, hide_ticks,\
super_xlabel, super_ylabel, assign_colors
from IPython import embed
def force_sequence(*vars, skip_None=False, equal_size=False):
""" Ensures single-loop compatibility of one or several input variables.
Uses np.ndim() to separate sequence-likes (tuples, lists, >=1D arrays)
and scalar inputs (int, float, bool, 0D arrays, strings, dicts, None).
Scalar variables are promoted to 1D sequences by either tuple wrapping
or expanding by one array dimension (only 0D arrays). All single-entry
sequences can be repeated to match the length of the longest sequence.
Input variables that are None can be excluded from these treatments.
Parameters
----------
*vars : tuple (m,) of inputs (any type)
Input variables to be checked, promoted, and equalized as required.
skip_None : bool, optional
If True, None inputs fall through unmodified. The default is False.
equal_size : bool, optional
If True, counts the number of elements in each passed or promoted
sequence (using len(), meaning that elements are defined as entries
along the first sequence axis) and repeats single-element sequences to
match the maximum count. Arrays with shape[0] == 1 are not tiled but
tuple-wrapped and repeated to avoid deep copies. The default is False.
Returns
-------
vars : array-like or None or list (m,) of array-likes or Nones
Treated output variables, each either a >=1D sequence-like or None.
Single variables are returned without list wrapper.
Raises
------
ValueError
Breaks if equal_size is True and a sequence has incompatible length,
i.e. any number of elements other than 1, 0 (Nones) or the maximum.
"""
# Enforce input iterability:
vars, sizes = list(vars), []
for i, var in enumerate(vars):
if skip_None and var is None:
# Maintain None:
sizes.append(0)
continue
if np.ndim(var) == 0:
# Make each input variable at least 1D sequence-like:
vars[i] = var[None] if isinstance(var, np.ndarray) else (var,)
# Count sequence elements:
sizes.append(len(vars[i]))
# Check early exits:
if len(vars) == 1:
return vars[0]
target = max(sizes)
if not equal_size or target <= 1 or all(n == target for n in sizes):
return vars
# Validate compatibility of element counts:
if not all(n in (0, 1, target) for n in sizes):
msg = f'Given a maximum sequence length of {target}, all variables '\
f'must either have 1 or {target} elements or be None: {sizes}'
raise ValueError(msg)
# Equalize sequence length across input variables:
for i, (var, size) in enumerate(zip(vars, sizes)):
if size == 1:
vars[i] = ((var,) if isinstance(var, np.ndarray) else var) * target
return vars
def split_subplot(ax, side='right', size=10, pad=10):
""" Divides the given parent subplot into two or more separate subplots.
Opens a new axes divider on the area of the parent axes and appends a
number of child axes of given size and padding on the specified sides.
The parent's size is reduced in the process. Values passed for size and
pad are interpreted as percentages of the width (if side is 'left' or
'right') or height (if side is 'top' or 'bottom') of the remainder of
the parent. Practically, size=100 means that child and parent will be
of equal size after the split (regardless of padding) and pad=100 means
that the space between child and parent equals the parent's new width
or height. Any of side, size, or pad can be 1D sequence-likes of equal
length to perform multiple splits using the same divider. Calling this
function multiple times on the same parent subplot is possible but will
open a new and updated divider each time, making the effects of size
and pad values inconsistent between calls.
Parameters
----------
ax : matplotlib axes
Parent subplot to be divided.
side : str or 1D array-like of str (m,)
Sides of the parent subplot where new subplots are to be appended.
Options are 'bottom', 'left', 'top', 'right'. The default is 'right'.
size : int or float or 1D array-like of ints or floats (m,), optional
Horizontal or vertical extent of each child axes as percentage of width
or height of the parent axes after splitting. Multiple splits from the
same side are possible and performed in given order, with the earliest
child axes being positioned closest to the parent. The default is 10.
pad : int or float or 1D array-like of ints or floats (m,), optional
Padding between each child axes and the parent as percentage of width
or height of the parent axes after splitting. The default is 10.
Returns
-------
matplotlib axes or list of matplotlib axes (m,)
One or multiple newly appended child subplots.
"""
# Open divider on parent axes:
div = make_axes_locatable(ax)
# Split off one or multiple child axes:
if not any(np.ndim(var) for var in (side, size, pad)):
return div.append_axes(side, size=f'{size}%', pad=f'{pad}%')
inputs = zip(*force_sequence(side, size, pad, equal_size=True))
return [div.append_axes(s, f'{n}%', f'{p}%') for s, n, p in inputs]
# GENERAL SETTINGS:
targets = [
'Omocestus_rufipes',
'Chorthippus_biguttulus',
# 'Chorthippus_mollis',
# 'Chrysochraon_dispar',
'Gomphocerippus_rufus',
# 'Pseudochorthippus_parallelus',
]
pure_paths = search_files(targets, incl='subset', excl='noise', dir='../data/inv/thresh_lp/')
load_kwargs = dict(
keywords=['scales', 'measure', 'thresh']
)
save_path = '../figures/fig_invariance_thresh_lp_species.pdf'
# SUBSET SETTINGS:
thresh_percent = np.array([0.6, 0.75, 0.999])[0]
kernels = np.array([
[1, 0.008],
[2, 0.004],
[3, 0.002],
])[np.array([0, 1])]
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 16/2.54),
)
n_species = len(targets)
super_grid_kwargs = dict(
nrows=2,
ncols=n_species + 2,
wspace=0,
hspace=0,
left=0,
right=1,
bottom=0,
top=1
)
subfig_specs = dict(
spec=(slice(None), slice(0, n_species)),
big=(slice(None), slice(n_species, None))
)
spec_grid_kwargs = dict(
nrows=2,
ncols=n_species,
wspace=0.25,
hspace=0.1,
left=0.1,
right=0.97,
bottom=0.1,
top=0.94
)
big_grid_kwargs = dict(
nrows=2,
ncols=1,
wspace=0,
hspace=0.2,
left=0,
right=1,
bottom=spec_grid_kwargs['bottom'],
top=spec_grid_kwargs['top']
)
anchor_kwargs = dict(
aspect='equal',
adjustable='box',
anchor=(0.3, 0.5)
)
inset_kwargs = dict(
y0=0.7,
w=0.3,
h=0.2,
)
# PLOT SETTINGS:
base_color = load_colors('../data/stage_colors.npz')['feat']
spec_cmaps = [
'Reds',
'Greens',
'Blues',
]
lw = dict(
spec=2,
kern=3
)
space_kwargs = dict(
s=30,
)
xlabs = dict(
spec='scale $\\alpha$',
big='$\\mu_{f_1}$'
)
ylabs = dict(
spec='$\\mu_f$',
big='$\\mu_{f_2}$',
)
xlab_spec_kwargs = dict(
y=0.005,
fontsize=16,
ha='center',
va='bottom',
)
ylab_spec_kwargs = dict(
x=0,
fontsize=20,
ha='left',
va='center',
)
xlab_big_kwargs = dict(
y=0.005,
fontsize=20,
ha='center',
va='bottom',
)
ylab_big_kwargs = dict(
x=0.03,
fontsize=20,
ha='center',
va='center',
)
xloc = dict(
big=0.5,
)
yloc = dict(
spec=0.5,
big=0.5
)
spec_letter_kwargs = dict(
x=0,
y=1.03,
ha='center',
va='bottom',
fontsize=22,
)
big_letter_kwargs = dict(
x=0,
yref=spec_letter_kwargs['y'],
ha='center',
va='bottom',
fontsize=22,
)
time_bar_kwargs = dict(
dur=0.05,
y0=inset_kwargs['y0'],
y1=inset_kwargs['y0'] + 0.03,
color='k',
lw=0
)
cbar_bounds = [
0.8,
big_grid_kwargs['bottom'],
0.15,
big_grid_kwargs['top'] - big_grid_kwargs['bottom']
]
shade_factors = [0.9, -0.9]
# EXECUTION:
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare species-specific axes:
spec_subfig = fig.add_subfigure(super_grid[subfig_specs['spec']])
spec_grid = spec_subfig.add_gridspec(**spec_grid_kwargs)
spec_axes = np.zeros((spec_grid_kwargs['nrows'], n_species), dtype=object)
for i, j in product(range(spec_grid_kwargs['nrows']), range(n_species)):
ax = spec_subfig.add_subplot(spec_grid[i, j])
ax.set_xscale('symlog', linthresh=0.1, linscale=0.5)
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['spec']))
ax.set_ylim(0, 1)
spec_axes[i, j] = ax
super_xlabel(xlabs['spec'], spec_subfig, spec_axes[-1, 0], spec_axes[-1, -1], **xlab_spec_kwargs)
super_ylabel(ylabs['spec'], spec_subfig, spec_axes[-1, 0], spec_axes[0, 0], **ylab_spec_kwargs)
[hide_ticks(ax, side='bottom') for ax in spec_axes[0, :]]
[hide_ticks(ax, side='left') for ax in spec_axes[:, 1:].ravel()]
letter_subplots(spec_axes[0, :], labels='abc', **spec_letter_kwargs)
# Prepare kernel insets:
x0 = np.linspace(0, 1, kernels.shape[0] + 1)[:-1] + 1 / kernels.shape[0] / 2
x0 -= inset_kwargs['w'] / 2
insets = []
for i in range(kernels.shape[0]):
bounds = [x0[i], inset_kwargs['y0'], inset_kwargs['w'], inset_kwargs['h']]
inset = spec_axes[0, 0].inset_axes(bounds)
inset.set_title(rf'$k_{{{i+1}}}$', fontsize=20)
inset.axis('off')
insets.append(inset)
# Prepare feature space axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros(super_grid_kwargs['nrows'], dtype=object)
for i in range(big_axes.size):
ax = big_subfig.add_subplot(big_grid[i, 0])
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.xaxis.set_major_locator(plt.MultipleLocator(xloc['big']))
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
ax.set_aspect(**anchor_kwargs)
# ax.set_ylabel(ylabs['big'], **ylab_big_kwargs)
ylabel(ax, ylabs['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
big_axes[i] = ax
super_xlabel(xlabs['big'], big_subfig, big_axes[1], big_axes[1], **xlab_big_kwargs)
hide_ticks(big_axes[0], side='bottom')
letter_subplot(big_axes[0], 'd', ref=spec_axes[0, 0], **big_letter_kwargs)
# Prepare colorbars:
bar_ax = big_subfig.add_axes(cbar_bounds)
bar_axes = split_subplot(bar_ax, side=['right', 'right'], size=100, pad=0)
bar_axes = [bar_ax] + bar_axes
for ax in bar_axes:
ax.spines[['right', 'top']].set_visible(True)
hide_ticks(ax, 'bottom', ticks=False)
hide_ticks(ax, 'left', ticks=False)
bar_axes[-1].tick_params(axis='y', which='both', right=True, labelright=True)
# plt.show()
# Plot results per species:
for i, pure_path in enumerate(pure_paths):
print(f'Processing {pure_path}')
noise_path = pure_path.replace('.npz', '_noise.npz')
# Load invariance data:
pure_data, config = load_data(pure_path, **load_kwargs)
noise_data, _ = load_data(noise_path, **load_kwargs)
scales = pure_data['scales']
# Reduce to kernel subset and single threshold:
thresh_ind = np.nonzero(pure_data['thresh_perc'] == thresh_percent)[0][0]
kern_inds = find_kern_specs(config['k_specs'], kerns=kernels)
config['k_specs'] = config['k_specs'][kern_inds]
config['kernels'] = config['kernels'][:, kern_inds]
pure_measure = pure_data['measure_feat'][:, kern_inds, thresh_ind]
noise_measure = noise_data['measure_feat'][:, kern_inds, thresh_ind]
# Plot invariance curves:
pure_ax, noise_ax = spec_axes[:, i]
pure_ax.plot(scales, pure_measure, c=base_color, lw=lw['spec'])
noise_ax.plot(scales, noise_measure, c=base_color, lw=lw['spec'])
if i == 0:
# Indicate kernel waveforms:
ylims = ylimits(config['kernels'], pad=0.05)
xlims = (config['k_times'][0], config['k_times'][-1])
for j, inset in enumerate(insets):
inset.plot(config['k_times'], config['kernels'][:, j],
c='k', lw=lw['kern'])
inset.set_xlim(xlims)
inset.set_ylim(ylims)
time_bar(insets[0], parent=spec_axes[0, 0], **time_bar_kwargs)
# Prepare shaded colors:
# factors = np.linspace(*shade_factors, scales.size)
# shaded_colors = shade_colors(spec_colors[i], factors)
# Plot pure feature space:
handle = big_axes[0].scatter(pure_measure[:, 0], pure_measure[:, 1],
c=scales, cmap=spec_cmaps[i], **space_kwargs)
# Plot noise feature space:
big_axes[1].scatter(noise_measure[:, 0], noise_measure[:, 1],
c=scales, cmap=spec_cmaps[i], **space_kwargs)
# Indicate scale color code:
big_subfig.colorbar(handle, cax=bar_axes[i])
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -0,0 +1,160 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from thunderhopper.filtertools import find_kern_specs
from color_functions import load_colors, shade_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, super_ylabel,\
plot_line, plot_barcode, strip_zeros, time_bar,\
letter_subplot, letter_subplots, hide_ticks,\
super_xlabel, super_ylabel, assign_colors
from IPython import embed
# GENERAL SETTINGS:
target = 'Omocestus_rufipes'
search_kwargs = dict(
incl='subset',
excl='noise',
dir='../data/inv/thresh_lp/'
)
pure_paths = search_files(target, **search_kwargs)
load_kwargs = dict(
keywords=['scales', 'measure', 'thresh']
)
save_path = None#'../figures/fig_invariance_thresh_lp_subset.pdf'
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 16/2.54),
)
super_grid_kwargs = dict(
nrows=1,
ncols=1,
wspace=0,
hspace=0,
left=0,
right=1,
bottom=0,
top=1
)
grid_kwargs = dict(
nrows=2,
ncols=1,
wspace=0,
hspace=0.1,
left=0.15,
right=0.95,
bottom=0.1,
top=0.85
)
inset_bounds = [0.2, 1.01, 0.6, 0.4]
# PLOT SETTINGS:
colors = load_colors('../data/stage_colors.npz')
color_factors = [-0.5, 0.5]
lw = dict(
one=3,
kern=3,
all=1,
)
ax_labels = dict(
x='scale $\\alpha$',
y='$\\mu_f$',
)
xlab_kwargs = dict(
y=0.005,
fontsize=16,
ha='center',
va='bottom',
)
ylab_kwargs = dict(
x=0,
fontsize=20,
ha='left',
va='center',
)
yloc = 0.2
# EXECUTION:
for pure_path in pure_paths:
print(f'Processing {pure_path}')
noise_path = pure_path.replace('.npz', '_noise.npz')
# Load kernel invariance data:
pure_data, config = load_data(pure_path, **load_kwargs)
noise_data, _ = load_data(noise_path, **load_kwargs)
scales = pure_data['scales']
# Adjust grid parameters:
n_columns = config['k_specs'].shape[0] + 1
super_grid_kwargs['ncols'] = n_columns
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare axes:
all_axes = np.zeros((grid_kwargs['nrows'], n_columns), dtype=object)
subfigs = []
for i in range(n_columns):
subfig = fig.add_subfigure(super_grid[0, i])
grid = subfig.add_gridspec(**grid_kwargs)
subfigs.append(subfig)
for j in range(grid_kwargs['nrows']):
ax = subfig.add_subplot(grid[j, 0])
ax.set_xlim(scales[0], scales[-1])
ax.set_ylim(0, 1)
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc))
if i > 0:
hide_ticks(ax, side='left')
all_axes[j, i] = ax
hide_ticks(all_axes[0, i], side='bottom')
super_xlabel(ax_labels['x'], fig, all_axes[-1, 0], all_axes[-1, -1], **xlab_kwargs)
super_ylabel(ax_labels['y'], fig, all_axes[0, 0], all_axes[1, 0], **ylab_kwargs)
# Plot kernel-specific results:
in_min, in_high = ylimits(config['kernels'], pad=0.05)
for i in range(config['k_specs'].shape[0]):
pure_ax, noise_ax = all_axes[:, i]
# Plot results of pure-song analysis:
pure_ax.plot(scales, pure_data['measure_feat'][:, i, :],
c=colors['feat'], lw=lw['one'])
# Plot results of noise-song analysis:
noise_ax.plot(scales, noise_data['measure_feat'][:, i, :],
c=colors['feat'], lw=lw['one'])
# Indicate kernel waveform:
inset = pure_ax.inset_axes(inset_bounds)
inset.plot(config['k_times'], config['kernels'][:, i], c='k', lw=lw['kern'])
inset.set_xlim(config['k_times'][0], config['k_times'][-1])
inset.set_ylim(in_min, in_high)
inset.axis('off')
# Load population invariance data:
pure_data, config = load_data(pure_path.replace('_subset', ''), **load_kwargs)
noise_data, _ = load_data(noise_path.replace('_subset', ''), **load_kwargs)
scales = pure_data['scales']
# Get kernel type-specific colors:
types, ind = np.unique(config['k_specs'][:, 0], return_index=True)
types = types[np.argsort(ind)].astype(int)
factors = np.linspace(*color_factors, types.size)
kern_colors = shade_colors(colors['feat'], factors)
kern_colors = dict(zip(types.astype(str), kern_colors))
# Plot population-wide results:
pure_ax, noise_ax = all_axes[:, -1]
handles = pure_ax.plot(scales, pure_data['measure_feat'], c='k', lw=lw['all'])
assign_colors(handles, config['k_specs'][:, 0], kern_colors)
handles = noise_ax.plot(scales, noise_data['measure_feat'], c='k', lw=lw['all'])
assign_colors(handles, config['k_specs'][:, 0], kern_colors)
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -35,8 +35,7 @@ def letter_subplot(artist, label, x=None, y=None, xref=None, yref=None, ref=None
ha='left', va='bottom', fontsize=16, fontweight='bold', **kwargs): ha='left', va='bottom', fontsize=16, fontweight='bold', **kwargs):
trans_artist = BboxTransformTo(artist.bbox) trans_artist = BboxTransformTo(artist.bbox)
if x is None or y is None: if x is None or y is None:
trans_ref = BboxTransformTo(ref.bbox) transform = BboxTransformTo(ref.bbox) + trans_artist.inverted()
transform = trans_ref + trans_artist.inverted()
if x is None: if x is None:
x = transform.transform([xref, 0])[0] x = transform.transform([xref, 0])[0]
if y is None: if y is None:
@@ -102,15 +101,33 @@ def ylabel(ax, label, x=-0.2, y=None, fontsize=20, transform=None, **kwargs):
ax.yaxis.set_label_coords(x, y, transform=transform) ax.yaxis.set_label_coords(x, y, transform=transform)
return None return None
def super_xlabel(label, fig, high_ax, low_ax, y=0.005, **kwargs): def super_xlabel(label, fig, left_ax, right_ax, y=0.005,
x = (low_ax.get_position().x0 + high_ax.get_position().x1) / 2 left_fig=None, right_fig=None, **kwargs):
fig.supxlabel(label, x=x, y=y, **kwargs) left_x = left_ax.get_position().x0
return None right_x = right_ax.get_position().x1
if left_fig is not None or right_fig is not None:
trans_fig = BboxTransformTo(fig.bbox)
if left_fig is not None:
transform = BboxTransformTo(left_fig.bbox) + trans_fig.inverted()
left_x = transform.transform((left_x, 0))[0]
if right_fig is not None:
transform = BboxTransformTo(right_fig.bbox) + trans_fig.inverted()
right_x = transform.transform((right_x, 0))[0]
return fig.supxlabel(label, x=(left_x + right_x) / 2, y=y, **kwargs)
def super_ylabel(label, fig, high_ax, low_ax, x=0.005, **kwargs): def super_ylabel(label, fig, low_ax, high_ax, x=0.005,
y = (low_ax.get_position().y0 + high_ax.get_position().y1) / 2 high_fig=None, low_fig=None, **kwargs):
fig.supylabel(label, x=x, y=y, **kwargs) low_y = high_ax.get_position().y0
return None high_y = low_ax.get_position().y1
if low_fig is not None or high_fig is not None:
trans_fig = BboxTransformTo(fig.bbox)
if low_fig is not None:
transform = BboxTransformTo(low_fig.bbox) + trans_fig.inverted()
low_y = transform.transform((0, low_y))[1]
if high_fig is not None:
transform = BboxTransformTo(high_fig.bbox) + trans_fig.inverted()
high_y = transform.transform((0, high_y))[1]
return fig.supylabel(label, x=x, y=(low_y + high_y) / 2, **kwargs)
def plot_line(ax, time, signal, ymin=None, ymax=None, xmin=None, xmax=None, def plot_line(ax, time, signal, ymin=None, ymax=None, xmin=None, xmax=None,
xpad=None, ypad=0.05, yloc=None, xloc=None, **kwargs): xpad=None, ypad=0.05, yloc=None, xloc=None, **kwargs):
@@ -170,18 +187,20 @@ def strip_zeros(num, right_digits=5):
return left return left
def time_bar(ax, dur, y0=0.9, y1=0.95, xshift=0.5, parent=None, transform=None, **kwargs): def time_bar(ax, dur, y0=0.9, y1=0.95, xshift=0.5, parent=None, transform=None, **kwargs):
t0, t1 = ax.get_xlim() t_lims = ax.get_xlim()
offset = (t1 - t0 - dur) * xshift span = t_lims[1] - t_lims[0]
x0 = t0 + offset if parent is not None or transform is not None:
x1 = x0 + dur if transform is None:
if parent is None: transform = BboxTransformTo(parent.bbox)
kwargs['transform'] = transform
transform = ax.transData + transform.inverted()
x0 = transform.transform((t_lims[0], 0))[0]
x1 = transform.transform((t_lims[0] + dur, 0))[0]
dur = x1 - x0
span = 1
elif parent is None:
parent = ax parent = ax
if transform is None: x0 = (span - dur) * xshift
transform = BboxTransformTo(parent.bbox) x1 = x0 + dur
if transform is not ax.transData: parent.add_artist(plt.Rectangle((x0, y0), dur, y1 - y0, **kwargs))
trans = ax.transData + transform.inverted()
x0 = trans.transform((x0, 0))[0]
x1 = trans.transform((x1, 0))[0]
parent.add_artist(plt.Rectangle((x0, y0), x1 - x0, y1 - y0,
transform=transform, **kwargs))
return None return None

View File

@@ -14,28 +14,30 @@ save_path = '../data/inv/full/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
example_scales = np.array([0, 0.5, 1, 5, 10]) example_scales = np.array([0, 0.5, 1, 5, 10])
scales = np.linspace(0, 10, 100) scales = np.geomspace(0.01, 10, 100)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate((scales, example_scales)))
# EXECUTION: # EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)): for data_path, name in zip(data_paths, crop_paths(data_paths)):
print(f'Processing {name}') print(f'Processing {name}')
# Get normalized song recording: # Get song recording:
data, config = load_data(data_path, files='raw') data, config = load_data(data_path, files='raw')
song, rate = data['raw'], config['rate'] song, rate = data['raw'], config['rate']
song /= song.std(axis=0)
# Get normalized noise:
rng = np.random.default_rng()
noise = rng.normal(size=song.shape[0])
noise /= noise.std()
# Get song segment to be analyzed: # Get song segment to be analyzed:
time = np.arange(song.shape[0]) / rate time = np.arange(song.shape[0]) / rate
start, end = data['songs_0'].ravel() start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end) segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std(axis=0)
# Get normalized noise:
rng = np.random.default_rng()
noise = rng.normal(size=song.shape[0])
noise /= noise[segment].std()
# Prepare snippet storage: # Prepare snippet storage:
shape_low = (song.shape[0], example_scales.size) shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size) shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
@@ -82,25 +84,11 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
scale_ind = np.nonzero(example_scales == scale)[0][0] scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[stage][:, ..., scale_ind] = signals[stage] snippets[stage][:, ..., scale_ind] = signals[stage]
# Log "intensity measure" per stage: # Log intensity measure per stage (excluding binary):
if stage in ['raw', 'filt', 'env', 'log', 'inv', 'conv']: if stage in ['raw', 'filt', 'env', 'log', 'inv', 'conv']:
measures[key][i] = signals[stage][segment, ...].std(axis=0) measures[key][i] = signals[stage][segment, ...].std(axis=0)
elif stage == 'feat': elif stage == 'feat':
measures[key][i] = signals[stage][segment, :].mean(axis=0) / signals[stage][segment, :].std(axis=0) measures[key][i] = signals[stage][segment, :].mean(axis=0)
# Relate to smallest scale:
base_ind = np.argmin(scales)
for stage in stages:
if stage == 'bi':
continue
key = f'measure_{stage}'
measures[key] /= measures[key][base_ind, ...]
if stage in ['conv', 'feat']:
spread = np.zeros((2, scales.size))
spread[0] = np.percentile(measures[key], 25, axis=1)
spread[1] = np.percentile(measures[key], 75, axis=1)
measures[f'spread_{stage}'] = spread
measures[key] = np.median(measures[key], axis=1)
# Save analysis results: # Save analysis results:
if save_path is not None: if save_path is not None:

View File

@@ -1,6 +1,5 @@
import glob import glob
import numpy as np import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import crop_paths from thunderhopper.filetools import crop_paths
from thunderhopper.filters import sosfilter from thunderhopper.filters import sosfilter
@@ -14,10 +13,9 @@ save_path = '../data/inv/thresh_lp/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
add_noise = False add_noise = False
thresh_percent = 90 thresh_percent = 90
example_scales = np.array([0, 0.5, 1, 10, 50]) example_scales = np.array([0, 1, 10, 50])
scales = np.geomspace(0.01, 50, 100) scales = np.geomspace(0.01, 100, 100)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate((scales, example_scales)))
plot_results = True
# EXECUTION: # EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)): for data_path, name in zip(data_paths, crop_paths(data_paths)):
@@ -48,20 +46,14 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
# Reuse threshold from previous noise run: # Reuse threshold from previous noise run:
threshold = np.load(save_name + '_noise.npz')['thresh'] threshold = np.load(save_name + '_noise.npz')['thresh']
# Prepare snippet storage:
shape = song.shape + (example_scales.size,)
conv = np.zeros(shape, dtype=float)
bi = np.zeros(shape, dtype=float)
feat = np.zeros(shape, dtype=float)
# Prepare measure storage: # Prepare measure storage:
shape = (scales.size, song.shape[1]) shape = (scales.size, song.shape[1])
measure_conv = np.zeros(shape, dtype=float) # measure_conv = np.zeros(shape, dtype=float)
measure_feat = np.zeros(shape, dtype=float) measure_feat = np.zeros(shape, dtype=float)
# Execute piecewise: # Execute piecewise:
for i, scale in enumerate(scales): for i, scale in enumerate(scales):
print('Simulating scale ', scale) print('Simulating scale', scale)
# Rescale song component: # Rescale song component:
scaled_conv = song * scale scaled_conv = song * scale
@@ -74,53 +66,16 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
scaled_feat = sosfilter(scaled_bi, rate, config['feat_fcut'], 'lp', scaled_feat = sosfilter(scaled_bi, rate, config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen']) padtype='fixed', padlen=config['padlen'])
# Log snippet data: # Get intensity measure per stage:
if scale in example_scales: # measure_conv[i] = scaled_conv[segment, :].std(axis=0)
scale_ind = np.nonzero(example_scales == scale)[0][0]
conv[:, :, scale_ind] = scaled_conv
bi[:, :, scale_ind] = scaled_bi
feat[:, :, scale_ind] = scaled_feat
# Get "intensity measure" per stage:
measure_conv[i] = scaled_conv[segment, :].std(axis=0)
measure_feat[i] = scaled_feat[segment, :].mean(axis=0) measure_feat[i] = scaled_feat[segment, :].mean(axis=0)
# # Relate to smallest scale:
# base_ind = np.argmin(scales)
# measure_conv /= measure_conv[base_ind, :]
if plot_results:
fig, (ax1, ax2) = plt.subplots(2, 1)
ax1.plot(scales, measure_conv)
ax1.plot(scales, measure_conv.mean(axis=1), c='k')
ax1.plot(scales, np.median(measure_conv, axis=1), c='k', ls='--')
ax2.plot(scales, measure_feat)
ax2.plot(scales, np.nanmean(measure_feat, axis=1), c='k')
ax2.plot(scales, np.nanmedian(measure_feat, axis=1), c='k', ls='--')
plt.show()
# Condense measures across kernels:
spread_conv = np.zeros((2, scales.size))
spread_conv[0] = np.nanpercentile(measure_conv, 25, axis=1)
spread_conv[1] = np.nanpercentile(measure_conv, 75, axis=1)
measure_conv = np.nanmedian(measure_conv, axis=1)
spread_feat = np.zeros((2, scales.size))
spread_feat[0] = np.nanpercentile(measure_feat, 25, axis=1)
spread_feat[1] = np.nanpercentile(measure_feat, 75, axis=1)
measure_feat = np.nanmedian(measure_feat, axis=1)
# Save analysis results: # Save analysis results:
if save_path is not None: if save_path is not None:
data = dict( data = dict(
scales=scales, scales=scales,
example_scales=example_scales, # measure_conv=measure_conv,
conv=conv,
bi=bi,
feat=feat,
measure_conv=measure_conv,
spread_conv=spread_conv,
measure_feat=measure_feat, measure_feat=measure_feat,
spread_feat=spread_feat,
thresh=threshold, thresh=threshold,
thresh_perc=thresh_percent, thresh_perc=thresh_percent,
) )

View File

@@ -1,27 +1,29 @@
import glob
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import crop_paths from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import sosfilter from thunderhopper.filters import sosfilter
from thunderhopper.filtertools import find_kern_specs from thunderhopper.filtertools import find_kern_specs, pdf_proportion
from IPython import embed from IPython import embed
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes' target = ['Omocestus_rufipes', '*'][0]
data_paths = glob.glob(f'../data/processed/{target}*.npz') data_paths = search_files(target, dir='../data/processed/')
save_path = '../data/inv/thresh_lp/' save_path = '../data/inv/thresh_lp/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
add_noise = True add_noise = False
thresh_percent = np.array([50, 75, 100]) save_snippets = True
example_scales = np.array([0, 1, 10, 50]) example_scales = np.array([0, 1, 10, 50])
scales = np.geomspace(0.01, 100, 100) scales = np.geomspace(0.01, 1000, 100)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate((scales, example_scales)))
thresh_percent = np.array([0.6, 0.75, 0.999])
thresholds = pdf_proportion(thresh_percent, sd=1, mu=0)
plot_results = False plot_results = False
kernels = np.array([ kernels = np.array([
[2, 0.008], [1, 0.008],
[4, 0.008], [2, 0.004],
[3, 0.002],
]) ])
# EXECUTION: # EXECUTION:
@@ -54,22 +56,20 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
noise = rng.normal(size=(conv.shape[0], 1)) noise = rng.normal(size=(conv.shape[0], 1))
noise /= noise[segment].std() noise /= noise[segment].std()
# Prepare snippet storage: if save_snippets:
shape = conv.shape + (example_scales.size, thresh_percent.size) # Prepare snippet storage:
snip_conv = np.zeros(shape, dtype=float) shape = conv.shape + (example_scales.size, thresh_percent.size)
snip_bi = np.zeros(shape, dtype=float) snip_conv = np.zeros(shape, dtype=float)
snip_feat = np.zeros(shape, dtype=float) snip_bi = np.zeros(shape, dtype=float)
snip_feat = np.zeros(shape, dtype=float)
# Prepare measure storage: # Prepare measure storage:
shape = (scales.size, conv.shape[1], thresh_percent.size) shape = (scales.size, conv.shape[1], thresh_percent.size)
measure_conv = np.zeros(shape, dtype=float) # measure_conv = np.zeros(shape, dtype=float)
measure_feat = np.zeros(shape, dtype=float) measure_feat = np.zeros(shape, dtype=float)
# Compute kernel-specific thresholds (thresholds, kernels):
thresholds = np.percentile(conv, thresh_percent, axis=0)
# Execute piecewise analysis: # Execute piecewise analysis:
for i, threshs in enumerate(thresholds): for i, thresh in enumerate(thresholds):
print('\nSimulating threshold ', thresh_percent[i]) print('\nSimulating threshold ', thresh_percent[i])
for j, scale in enumerate(scales): for j, scale in enumerate(scales):
@@ -82,20 +82,20 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
scaled_conv += noise scaled_conv += noise
# Process mixture: # Process mixture:
scaled_bi = (scaled_conv > threshs).astype(float) scaled_bi = (scaled_conv > thresh).astype(float)
scaled_feat = sosfilter(scaled_bi, rate, config['feat_fcut'], 'lp', scaled_feat = sosfilter(scaled_bi, rate, config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen']) padtype='fixed', padlen=config['padlen'])
# Log snippet data: # Log snippet data:
if scale in example_scales: if save_snippets and scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0] scale_ind = np.nonzero(example_scales == scale)[0][0]
snip_conv[:, :, scale_ind, i] = scaled_conv snip_conv[:, :, scale_ind, i] = scaled_conv
snip_bi[:, :, scale_ind, i] = scaled_bi snip_bi[:, :, scale_ind, i] = scaled_bi
snip_feat[:, :, scale_ind, i] = scaled_feat snip_feat[:, :, scale_ind, i] = scaled_feat
# Get intensity measure per stage: # Get intensity measure per stage:
measure_conv[j, :, i] = scaled_conv[segment, :].std(axis=0)
measure_feat[j, :, i] = scaled_feat[segment, :].mean(axis=0) measure_feat[j, :, i] = scaled_feat[segment, :].mean(axis=0)
# measure_conv[j, :, i] = scaled_conv[segment, :].std(axis=0)
if plot_results: if plot_results:
fig, axes = plt.subplots(thresh_percent.size, kernels.shape[0], fig, axes = plt.subplots(thresh_percent.size, kernels.shape[0],
@@ -103,6 +103,7 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
sharex=True, sharey=True, squeeze=True) sharex=True, sharey=True, squeeze=True)
axes[0, 0].set_xscale('symlog', linthresh=scales[scales>0].min(), axes[0, 0].set_xscale('symlog', linthresh=scales[scales>0].min(),
linscale=0.25) linscale=0.25)
axes[0, 0].set_ylim(0, 1)
for i, thresh in enumerate(thresh_percent): for i, thresh in enumerate(thresh_percent):
for j, kernel in enumerate(kernels): for j, kernel in enumerate(kernels):
@@ -111,7 +112,7 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
if i == 0: if i == 0:
ax.set_title(f'Kernel {kernel}') ax.set_title(f'Kernel {kernel}')
if j == 0: if j == 0:
ax.set_ylabel(f'{thresh}%') ax.set_ylabel(f'{100 * thresh}%')
plt.show() plt.show()
# Save analysis results: # Save analysis results:
@@ -119,14 +120,17 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
data = dict( data = dict(
scales=scales, scales=scales,
example_scales=example_scales, example_scales=example_scales,
snip_conv=snip_conv, # measure_conv=measure_conv,
snip_bi=snip_bi,
snip_feat=snip_feat,
measure_conv=measure_conv,
measure_feat=measure_feat, measure_feat=measure_feat,
thresh_perc=thresh_percent, thresh_perc=thresh_percent,
threshs=thresholds, threshs=thresholds,
) )
if save_snippets:
data.update(dict(
snip_conv=snip_conv,
snip_bi=snip_bi,
snip_feat=snip_feat,
))
if add_noise: if add_noise:
save_name += '_noise' save_name += '_noise'
save_data(save_name, data, config, overwrite=True) save_data(save_name, data, config, overwrite=True)