Seriously, no idea. Wild amount of changes. Good luck.

This commit is contained in:
j-hartling
2026-04-17 17:19:30 +02:00
parent 36ac504efa
commit 3b4b7f2161
40 changed files with 2067 additions and 672 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -269,6 +269,8 @@
\newlabel{}{{8}{20}{}{}{}} \newlabel{}{{8}{20}{}{}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces \textbf {} }}{20}{}\protected@file@percent } \@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces \textbf {} }}{20}{}\protected@file@percent }
\newlabel{}{{9}{20}{}{}{}} \newlabel{}{{9}{20}{}{}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces \textbf {} }}{21}{}\protected@file@percent }
\newlabel{}{{10}{21}{}{}{}}
\gdef\svg@ink@ver@settings{{\m@ne }{inkscape}{\m@ne }} \gdef\svg@ink@ver@settings{{\m@ne }{inkscape}{\m@ne }}
\abx@aux@read@bbl@mdfivesum{1380DC8C93D2855FDB132CC5A40AD52F} \abx@aux@read@bbl@mdfivesum{1380DC8C93D2855FDB132CC5A40AD52F}
\gdef \@abspage@last{20} \gdef \@abspage@last{21}

138
main.blg
View File

@@ -1,71 +1,71 @@
[0] Config.pm:307> INFO - This is Biber 2.19 [0] Config.pm:307> INFO - This is Biber 2.19
[0] Config.pm:310> INFO - Logfile is 'main.blg' [0] Config.pm:310> INFO - Logfile is 'main.blg'
[36] biber:340> INFO - === Di Apr 14, 2026, 17:23:24 [39] biber:340> INFO - === Fr Apr 17, 2026, 11:47:16
[45] Biber.pm:419> INFO - Reading 'main.bcf' [47] Biber.pm:419> INFO - Reading 'main.bcf'
[74] Biber.pm:979> INFO - Found 55 citekeys in bib section 0 [76] Biber.pm:979> INFO - Found 55 citekeys in bib section 0
[79] Biber.pm:4419> INFO - Processing section 0 [82] Biber.pm:4419> INFO - Processing section 0
[84] Biber.pm:4610> INFO - Looking for bibtex file 'cite.bib' for section 0 [87] Biber.pm:4610> INFO - Looking for bibtex file 'cite.bib' for section 0
[86] bibtex.pm:1713> INFO - LaTeX decoding ... [88] bibtex.pm:1713> INFO - LaTeX decoding ...
[115] bibtex.pm:1519> INFO - Found BibTeX data source 'cite.bib' [119] bibtex.pm:1519> INFO - Found BibTeX data source 'cite.bib'
[289] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized' [296] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized'
[289] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable' [296] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable'
[289] Biber.pm:4239> INFO - Sorting list 'nyt/global//global/global' of type 'entry' with template 'nyt' and locale 'en-US' [296] Biber.pm:4239> INFO - Sorting list 'nyt/global//global/global' of type 'entry' with template 'nyt' and locale 'en-US'
[289] Biber.pm:4245> INFO - No sort tailoring available for locale 'en-US' [296] Biber.pm:4245> INFO - No sort tailoring available for locale 'en-US'
[312] bbl.pm:660> INFO - Writing 'main.bbl' with encoding 'UTF-8' [322] bbl.pm:660> INFO - Writing 'main.bbl' with encoding 'UTF-8'
[323] bbl.pm:763> INFO - Output to main.bbl [333] bbl.pm:763> INFO - Output to main.bbl
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 10, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 10, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 21, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 21, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 38, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 38, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 49, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 49, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 58, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 58, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 73, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 73, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 82, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 82, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 91, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 91, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 100, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 100, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 109, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 109, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 118, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 118, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 127, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 127, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 136, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 136, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 157, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 157, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 178, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 178, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 187, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 187, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 196, warning: 6 characters of junk seen at toplevel [333] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 196, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 207, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 207, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 218, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 218, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 229, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 229, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 240, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 240, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 249, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 249, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 258, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 258, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 269, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 269, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 278, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 278, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 289, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 289, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 300, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 300, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 309, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 309, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 328, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 328, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 337, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 337, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 400, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 400, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 419, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 419, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 428, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 428, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 437, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 437, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 456, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 456, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 491, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 491, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 526, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 526, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 535, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 535, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 556, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 556, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 565, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 565, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 576, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 576, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 587, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 587, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 619, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 619, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 648, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 648, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 658, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 658, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 667, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 667, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 688, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 688, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 709, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 709, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 720, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 720, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 729, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 729, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 749, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 749, warning: 6 characters of junk seen at toplevel
[324] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 766, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 766, warning: 6 characters of junk seen at toplevel
[325] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 775, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 775, warning: 6 characters of junk seen at toplevel
[325] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 800, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 800, warning: 6 characters of junk seen at toplevel
[325] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_Fmmc/347c261ec4135a5723bef5c751f5078f_87767.utf8, line 817, warning: 6 characters of junk seen at toplevel [334] Biber.pm:131> WARN - BibTeX subsystem: /tmp/biber_tmp_0bop/347c261ec4135a5723bef5c751f5078f_47066.utf8, line 817, warning: 6 characters of junk seen at toplevel
[325] Biber.pm:133> INFO - WARNINGS: 55 [334] Biber.pm:133> INFO - WARNINGS: 55

View File

@@ -1,14 +1,14 @@
# Fdb version 4 # Fdb version 4
["biber main"] 1776180204.48481 "main.bcf" "main.bbl" "main" 1776180220.70543 0 ["biber main"] 1776419236.06276 "main.bcf" "main.bbl" "main" 1776419708.51683 0
"cite.bib" 1770904753.08918 27483 4290db0c91f7b5055e25472ef913f6b4 "" "cite.bib" 1770904753.08918 27483 4290db0c91f7b5055e25472ef913f6b4 ""
"main.bcf" 1776180220.65176 112931 2a478116d80ebb1ada7083a24facd6e3 "pdflatex" "main.bcf" 1776419708.45482 112931 2a478116d80ebb1ada7083a24facd6e3 "pdflatex"
(generated) (generated)
"main.bbl" "main.bbl"
"main.blg" "main.blg"
(rewritten before read) (rewritten before read)
["pdflatex"] 1776180219.79356 "/home/hartling/phd/paper/paper_2025/main.tex" "main.pdf" "main" 1776180220.70564 0 ["pdflatex"] 1776419707.50594 "/home/hartling/phd/paper/paper_2025/main.tex" "main.pdf" "main" 1776419708.51707 0
"/etc/texmf/web2c/texmf.cnf" 1761560044.43676 475 c0e671620eb5563b2130f56340a5fde8 "" "/etc/texmf/web2c/texmf.cnf" 1761560044.43676 475 c0e671620eb5563b2130f56340a5fde8 ""
"/home/hartling/phd/paper/paper_2025/main.tex" 1776180218.75477 48226 a69e6ee3ae074d48c75cb0bb31adebb7 "" "/home/hartling/phd/paper/paper_2025/main.tex" 1776419707.40583 49006 9efa08ff012ade2af94d38bf09748eb2 ""
"/usr/share/texlive/texmf-dist/fonts/map/fontname/texfonts.map" 1577235249 3524 cb3e574dea2d1052e39280babc910dc8 "" "/usr/share/texlive/texmf-dist/fonts/map/fontname/texfonts.map" 1577235249 3524 cb3e574dea2d1052e39280babc910dc8 ""
"/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex7.tfm" 1246382020 1004 54797486969f23fa377b128694d548df "" "/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex7.tfm" 1246382020 1004 54797486969f23fa377b128694d548df ""
"/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex8.tfm" 1246382020 988 bdf658c3bfc2d96d3c8b02cfc1c94c20 "" "/usr/share/texlive/texmf-dist/fonts/tfm/public/amsfonts/cmextra/cmex8.tfm" 1246382020 988 bdf658c3bfc2d96d3c8b02cfc1c94c20 ""
@@ -154,16 +154,17 @@
"figures/fig_auditory_pathway.pdf" 1771593904.14638 1153923 3df8539421fd21dc866cc8d320bd9b1d "" "figures/fig_auditory_pathway.pdf" 1771593904.14638 1153923 3df8539421fd21dc866cc8d320bd9b1d ""
"figures/fig_feat_stages.pdf" 1774002994.98767 11091006 565fe951f1255c121429a060082398f5 "" "figures/fig_feat_stages.pdf" 1774002994.98767 11091006 565fe951f1255c121429a060082398f5 ""
"figures/fig_invariance_full.pdf" 1775044457.44326 51364 cdc2f94096a4ec98dd58b81315199213 "" "figures/fig_invariance_full.pdf" 1775044457.44326 51364 cdc2f94096a4ec98dd58b81315199213 ""
"figures/fig_invariance_log-hp_species.pdf" 1776180090.89783 565762 9243d82b7795c23ce67cc60cc79852fe "" "figures/fig_invariance_log-hp_appendix.pdf" 1776417315.40604 569449 c18c449fd64897296eb276a04e989264 ""
"figures/fig_invariance_log_hp.pdf" 1776176630.23299 837853 77e94473c532de76ca2a152c8c1509f4 "" "figures/fig_invariance_log_hp.pdf" 1776341186.13598 838755 47a174c0a49618da118b592fecb771d6 ""
"figures/fig_invariance_thresh-lp_appendix.pdf" 1776416900.35698 1579513 2f918382c2096f397b128a9d11011674 ""
"figures/fig_invariance_thresh_lp_single.pdf" 1774448531.93474 921028 cae18b62e262b42f630e219fcaa0ca09 "" "figures/fig_invariance_thresh_lp_single.pdf" 1774448531.93474 921028 cae18b62e262b42f630e219fcaa0ca09 ""
"figures/fig_invariance_thresh_lp_species.pdf" 1774942339.01068 397460 09bf9690db00ef77c4a2faf0d4e2c8d7 "" "figures/fig_invariance_thresh_lp_species.pdf" 1776410754.59568 1801023 deef4bae585ba144c48a08300664517f ""
"figures/fig_noise_env_sd_conversion.pdf" 1774256952.42051 45466 671a2b8fbf72b4eba6b970b4421f2521 "" "figures/fig_noise_env_sd_conversion_appendix.pdf" 1776328774.43347 45466 c2be20312c1572203bdbeb9c8e32525e ""
"figures/fig_pre_stages.pdf" 1774002992.74268 449426 5762be15627fe5d8b6d108b7ea18db44 "" "figures/fig_pre_stages.pdf" 1774002992.74268 449426 5762be15627fe5d8b6d108b7ea18db44 ""
"main.aux" 1776180220.64576 15421 c52b01adcd46aa2ef82220b23727550f "pdflatex" "main.aux" 1776419708.44782 15566 dacdf566c3a198221a23ecda641af360 "pdflatex"
"main.bbl" 1776180205.19289 91039 1380dc8c93d2855fdb132cc5a40ad52f "biber main" "main.bbl" 1776419236.72776 91039 1380dc8c93d2855fdb132cc5a40ad52f "biber main"
"main.run.xml" 1776180220.65276 2335 a049bc26a7f032e842ce55de5bc38328 "pdflatex" "main.run.xml" 1776419708.45582 2335 a049bc26a7f032e842ce55de5bc38328 "pdflatex"
"main.tex" 1776180218.75477 48226 a69e6ee3ae074d48c75cb0bb31adebb7 "" "main.tex" 1776419707.40583 49006 9efa08ff012ade2af94d38bf09748eb2 ""
(generated) (generated)
"main.aux" "main.aux"
"main.bcf" "main.bcf"

View File

@@ -308,16 +308,21 @@ INPUT ./figures/fig_invariance_full.pdf
INPUT ./figures/fig_invariance_full.pdf INPUT ./figures/fig_invariance_full.pdf
INPUT ./figures/fig_invariance_full.pdf INPUT ./figures/fig_invariance_full.pdf
INPUT ./figures/fig_invariance_full.pdf INPUT ./figures/fig_invariance_full.pdf
INPUT ./figures/fig_noise_env_sd_conversion.pdf INPUT ./figures/fig_noise_env_sd_conversion_appendix.pdf
INPUT ./figures/fig_noise_env_sd_conversion.pdf INPUT ./figures/fig_noise_env_sd_conversion_appendix.pdf
INPUT ./figures/fig_noise_env_sd_conversion.pdf INPUT ./figures/fig_noise_env_sd_conversion_appendix.pdf
INPUT ./figures/fig_noise_env_sd_conversion.pdf INPUT ./figures/fig_noise_env_sd_conversion_appendix.pdf
INPUT ./figures/fig_noise_env_sd_conversion.pdf INPUT ./figures/fig_noise_env_sd_conversion_appendix.pdf
INPUT ./figures/fig_invariance_log-hp_species.pdf INPUT ./figures/fig_invariance_log-hp_appendix.pdf
INPUT ./figures/fig_invariance_log-hp_species.pdf INPUT ./figures/fig_invariance_log-hp_appendix.pdf
INPUT ./figures/fig_invariance_log-hp_species.pdf INPUT ./figures/fig_invariance_log-hp_appendix.pdf
INPUT ./figures/fig_invariance_log-hp_species.pdf INPUT ./figures/fig_invariance_log-hp_appendix.pdf
INPUT ./figures/fig_invariance_log-hp_species.pdf INPUT ./figures/fig_invariance_log-hp_appendix.pdf
INPUT ./figures/fig_invariance_thresh-lp_appendix.pdf
INPUT ./figures/fig_invariance_thresh-lp_appendix.pdf
INPUT ./figures/fig_invariance_thresh-lp_appendix.pdf
INPUT ./figures/fig_invariance_thresh-lp_appendix.pdf
INPUT ./figures/fig_invariance_thresh-lp_appendix.pdf
INPUT main.aux INPUT main.aux
INPUT main.run.xml INPUT main.run.xml
OUTPUT main.run.xml OUTPUT main.run.xml

View File

@@ -1,4 +1,4 @@
This is pdfTeX, Version 3.141592653-2.6-1.40.25 (TeX Live 2023/Debian) (preloaded format=pdflatex 2025.10.28) 14 APR 2026 17:23 This is pdfTeX, Version 3.141592653-2.6-1.40.25 (TeX Live 2023/Debian) (preloaded format=pdflatex 2025.10.28) 17 APR 2026 11:55
entering extended mode entering extended mode
restricted \write18 enabled. restricted \write18 enabled.
file:line:error style messages enabled. file:line:error style messages enabled.
@@ -595,6 +595,9 @@ Package biblatex Warning: 'babel/polyglossia' detected but 'csquotes' missing.
\@quotereset=\count435 \@quotereset=\count435
(./main.aux (./main.aux
LaTeX Warning: Label `' multiply defined.
LaTeX Warning: Label `' multiply defined. LaTeX Warning: Label `' multiply defined.
) )
@@ -771,7 +774,7 @@ Package pdftex.def Info: figures/fig_feat_stages.pdf used on input line 542.
<figures/fig_invariance_log_hp.pdf, id=1141, 890.96089pt x 918.57785pt> <figures/fig_invariance_log_hp.pdf, id=1141, 890.96089pt x 918.57785pt>
File: figures/fig_invariance_log_hp.pdf Graphic file (type pdf) File: figures/fig_invariance_log_hp.pdf Graphic file (type pdf)
<use figures/fig_invariance_log_hp.pdf> <use figures/fig_invariance_log_hp.pdf>
Package pdftex.def Info: figures/fig_invariance_log_hp.pdf used on input line 632. Package pdftex.def Info: figures/fig_invariance_log_hp.pdf used on input line 645.
(pdftex.def) Requested size: 483.69687pt x 498.70178pt. (pdftex.def) Requested size: 483.69687pt x 498.70178pt.
[12] [12]
@@ -781,34 +784,39 @@ LaTeX Warning: Text page 13 contains only floats.
<figures/fig_invariance_thresh_lp_single.pdf, id=1232, 910.48819pt x 455.2441pt> <figures/fig_invariance_thresh_lp_single.pdf, id=1232, 910.48819pt x 455.2441pt>
File: figures/fig_invariance_thresh_lp_single.pdf Graphic file (type pdf) File: figures/fig_invariance_thresh_lp_single.pdf Graphic file (type pdf)
<use figures/fig_invariance_thresh_lp_single.pdf> <use figures/fig_invariance_thresh_lp_single.pdf>
Package pdftex.def Info: figures/fig_invariance_thresh_lp_single.pdf used on input line 663. Package pdftex.def Info: figures/fig_invariance_thresh_lp_single.pdf used on input line 676.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt. (pdftex.def) Requested size: 483.69687pt x 241.84782pt.
<figures/fig_invariance_thresh_lp_species.pdf, id=1233, 910.48819pt x 910.48819pt> <figures/fig_invariance_thresh_lp_species.pdf, id=1233, 910.48819pt x 910.48819pt>
File: figures/fig_invariance_thresh_lp_species.pdf Graphic file (type pdf) File: figures/fig_invariance_thresh_lp_species.pdf Graphic file (type pdf)
<use figures/fig_invariance_thresh_lp_species.pdf> <use figures/fig_invariance_thresh_lp_species.pdf>
Package pdftex.def Info: figures/fig_invariance_thresh_lp_species.pdf used on input line 693. Package pdftex.def Info: figures/fig_invariance_thresh_lp_species.pdf used on input line 706.
(pdftex.def) Requested size: 483.69687pt x 483.69566pt. (pdftex.def) Requested size: 483.69687pt x 483.69566pt.
[14 <./figures/fig_invariance_thresh_lp_single.pdf>] [14 <./figures/fig_invariance_thresh_lp_single.pdf>]
<figures/fig_invariance_full.pdf, id=1305, 910.48819pt x 569.05513pt> <figures/fig_invariance_full.pdf, id=1305, 910.48819pt x 569.05513pt>
File: figures/fig_invariance_full.pdf Graphic file (type pdf) File: figures/fig_invariance_full.pdf Graphic file (type pdf)
<use figures/fig_invariance_full.pdf> <use figures/fig_invariance_full.pdf>
Package pdftex.def Info: figures/fig_invariance_full.pdf used on input line 703. Package pdftex.def Info: figures/fig_invariance_full.pdf used on input line 716.
(pdftex.def) Requested size: 483.69687pt x 302.3098pt. (pdftex.def) Requested size: 483.69687pt x 302.3098pt.
[15 <./figures/fig_invariance_thresh_lp_species.pdf>] [16 <./figures/fig_invariance_full.pdf>] [17 [15 <./figures/fig_invariance_thresh_lp_species.pdf>] [16 <./figures/fig_invariance_full.pdf>] [17
] [18] ] [18]
<figures/fig_noise_env_sd_conversion.pdf, id=1512, 910.48819pt x 455.2441pt> <figures/fig_noise_env_sd_conversion_appendix.pdf, id=1526, 910.48819pt x 455.2441pt>
File: figures/fig_noise_env_sd_conversion.pdf Graphic file (type pdf) File: figures/fig_noise_env_sd_conversion_appendix.pdf Graphic file (type pdf)
<use figures/fig_noise_env_sd_conversion.pdf> <use figures/fig_noise_env_sd_conversion_appendix.pdf>
Package pdftex.def Info: figures/fig_noise_env_sd_conversion.pdf used on input line 862. Package pdftex.def Info: figures/fig_noise_env_sd_conversion_appendix.pdf used on input line 875.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt. (pdftex.def) Requested size: 483.69687pt x 241.84782pt.
[19] [19]
<figures/fig_invariance_log-hp_species.pdf, id=1517, 910.48819pt x 455.2441pt> <figures/fig_invariance_log-hp_appendix.pdf, id=1531, 910.48819pt x 455.2441pt>
File: figures/fig_invariance_log-hp_species.pdf Graphic file (type pdf) File: figures/fig_invariance_log-hp_appendix.pdf Graphic file (type pdf)
<use figures/fig_invariance_log-hp_species.pdf> <use figures/fig_invariance_log-hp_appendix.pdf>
Package pdftex.def Info: figures/fig_invariance_log-hp_species.pdf used on input line 871. Package pdftex.def Info: figures/fig_invariance_log-hp_appendix.pdf used on input line 884.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt. (pdftex.def) Requested size: 483.69687pt x 241.84782pt.
[20 <./figures/fig_noise_env_sd_conversion.pdf> <./figures/fig_invariance_log-hp_species.pdf>] (./main.aux) <figures/fig_invariance_thresh-lp_appendix.pdf, id=1532, 910.48819pt x 455.2441pt>
File: figures/fig_invariance_thresh-lp_appendix.pdf Graphic file (type pdf)
<use figures/fig_invariance_thresh-lp_appendix.pdf>
Package pdftex.def Info: figures/fig_invariance_thresh-lp_appendix.pdf used on input line 893.
(pdftex.def) Requested size: 483.69687pt x 241.84782pt.
[20 <./figures/fig_noise_env_sd_conversion_appendix.pdf> <./figures/fig_invariance_log-hp_appendix.pdf>] [21 <./figures/fig_invariance_thresh-lp_appendix.pdf>] (./main.aux)
*********** ***********
LaTeX2e <2023-11-01> patch level 1 LaTeX2e <2023-11-01> patch level 1
L3 programming layer <2024-01-22> L3 programming layer <2024-01-22>
@@ -822,18 +830,18 @@ Package logreq Info: Writing requests to 'main.run.xml'.
) )
Here is how much of TeX's memory you used: Here is how much of TeX's memory you used:
20769 strings out of 474222 20776 strings out of 474222
448376 string characters out of 5748732 448833 string characters out of 5748732
1937975 words of memory out of 5000000 1937975 words of memory out of 5000000
42759 multiletter control sequences out of 15000+600000 42766 multiletter control sequences out of 15000+600000
569394 words of font info for 79 fonts, out of 8000000 for 9000 569394 words of font info for 79 fonts, out of 8000000 for 9000
1143 hyphenation exceptions out of 8191 1143 hyphenation exceptions out of 8191
94i,19n,93p,1496b,1740s stack positions out of 10000i,1000n,20000p,200000b,200000s 94i,18n,93p,1496b,1740s stack positions out of 10000i,1000n,20000p,200000b,200000s
</usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbxti10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmex10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr17.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmti12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/symbols/msbm10.pfb> </usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbx12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmbxti10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmex10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr17.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmsy8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmti12.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/symbols/msbm10.pfb>
Output written on main.pdf (20 pages, 15648193 bytes). Output written on main.pdf (21 pages, 18635824 bytes).
PDF statistics: PDF statistics:
1702 PDF objects out of 1728 (max. 8388607) 1786 PDF objects out of 2073 (max. 8388607)
884 compressed objects within 9 object streams 905 compressed objects within 10 object streams
0 named destinations out of 1000 (max. 500000) 0 named destinations out of 1000 (max. 500000)
58 words of extra memory for PDF output out of 10000 (max. 10000000) 63 words of extra memory for PDF output out of 10000 (max. 10000000)

BIN
main.pdf

Binary file not shown.

Binary file not shown.

View File

@@ -393,7 +393,7 @@ signal~(\bcite{machens2001discrimination}), which likely involves a rectifying
nonlinearity~(\bcite{machens2001representation}). This can be modelled as nonlinearity~(\bcite{machens2001representation}). This can be modelled as
full-wave rectification followed by lowpass filtering full-wave rectification followed by lowpass filtering
\begin{equation} \begin{equation}
\env(t)\,=\,|\filt(t)|\,*\,\lp, \qquad \fc\,=\,500\,\text{Hz} \env(t)\,=\,|\filt(t)|\,*\,\lp, \qquad \fc\,=\,250\,\text{Hz}
\label{eq:env} \label{eq:env}
\end{equation} \end{equation}
of the tympanal signal $\filt(t)$. Furthermore, the receptors exhibit a of the tympanal signal $\filt(t)$. Furthermore, the receptors exhibit a
@@ -401,7 +401,7 @@ sigmoidal response curve over logarithmically compressed intensity
levels~(\bcite{suga1960peripheral}; \bcite{gollisch2002energy}). In the model levels~(\bcite{suga1960peripheral}; \bcite{gollisch2002energy}). In the model
pathway, logarithmic compression is achieved by conversion to decibel scale pathway, logarithmic compression is achieved by conversion to decibel scale
\begin{equation} \begin{equation}
\db(t)\,=\,10\,\cdot\,\dec \frac{\env(t)}{\dbref}, \qquad \dbref\,=\,\max\big[\env(t)\big] \db(t)\,=\,20\,\cdot\,\dec \frac{\env(t)}{\dbref}, \qquad \dbref\,=\,1
\label{eq:log} \label{eq:log}
\end{equation} \end{equation}
relative to the maximum intensity $\dbref$ of the signal envelope $\env(t)$. relative to the maximum intensity $\dbref$ of the signal envelope $\env(t)$.
@@ -586,10 +586,17 @@ and a fixed-scale noise component $\noc(t)$. Both $\soc(t)$ and $\noc(t)$ are
assumed to have unit variance. By conversion of $\env(t)$ to decibel assumed to have unit variance. By conversion of $\env(t)$ to decibel
scale~(Eq.\,\ref{eq:log}), $\sca$ turns from a multiplicative scale in linear scale~(Eq.\,\ref{eq:log}), $\sca$ turns from a multiplicative scale in linear
space into an additive term, or offset, in logarithmic space space into an additive term, or offset, in logarithmic space
% \begin{equation}
% \begin{split}
% \db(t)\,&=\,\dec \frac{\alpha\,\cdot\,s(t)\,+\,\eta(t)}{\dbref}\\
% &=\,\dec \frac{\alpha}{\dbref}\,+\,\dec \left[s(t)\,+\,\frac{\eta(t)}{\alpha}\right], \qquad \sca\,>\,0
% \end{split}
% \label{eq:toy_log}
% \end{equation}
\begin{equation} \begin{equation}
\begin{split} \begin{split}
\db(t)\,&=\,\log \frac{\alpha\,\cdot\,s(t)\,+\,\eta(t)}{\dbref}\\ \db(t)\,&=\,20\,\cdot\,\dec \left[\,\sca\,\cdot\,s(t)\,+\,\eta(t)\,\right]\\
&=\,\log \frac{\alpha}{\dbref}\,+\,\log \left[s(t)\,+\,\frac{\eta(t)}{\alpha}\right] &=\,20\,\cdot\,\left(\dec \sca\,+\,\dec \left[s(t)\,+\,\frac{\eta(t)}{\sca}\right]\right), \qquad \sca\,>\,0
\end{split} \end{split}
\label{eq:toy_log} \label{eq:toy_log}
\end{equation} \end{equation}
@@ -598,9 +605,15 @@ $\noc(t)$ by the inverse of $\sca$. The subsequent
highpass-filtering~(Eq.\,\ref{eq:highpass}) of $\db(t)$ can then be highpass-filtering~(Eq.\,\ref{eq:highpass}) of $\db(t)$ can then be
approximated as a subtraction of the local offset within a suitable time approximated as a subtraction of the local offset within a suitable time
interval $0 \ll \thp < \frac{1}{\fc}$: interval $0 \ll \thp < \frac{1}{\fc}$:
% \begin{equation}
% \begin{split}
% \adapt(t)\,\approx\,\db(t)\,-\,\dec \frac{\sca}{\dbref}\,=\,\dec\left[s(t)\,+\,\frac{\eta(t)}{\sca}\right], \qquad \sca\,>\,0
% \end{split}
% \label{eq:toy_highpass}
% \end{equation}
\begin{equation} \begin{equation}
\begin{split} \begin{split}
\adapt(t)\,\approx\,\db(t)\,-\,\log \frac{\alpha}{\dbref}\,=\,\log\left[s(t)\,+\,\frac{\eta(t)}{\alpha}\right] \adapt(t)\,\approx\,\db(t)\,-\,20\,\cdot\,\dec \sca\,=\,20\,\cdot\,\dec\left[s(t)\,+\,\frac{\eta(t)}{\sca}\right], \qquad \sca\,>\,0
\end{split} \end{split}
\label{eq:toy_highpass} \label{eq:toy_highpass}
\end{equation} \end{equation}
@@ -859,7 +872,7 @@ initiation of one behavior over another is categorical (e.g. approach/stay)
\begin{figure}[!ht] \begin{figure}[!ht]
\centering \centering
\includegraphics[width=\textwidth]{figures/fig_noise_env_sd_conversion.pdf} \includegraphics[width=\textwidth]{figures/fig_noise_env_sd_conversion_appendix.pdf}
\caption{\textbf{} \caption{\textbf{}
} }
\label{} \label{}
@@ -868,7 +881,16 @@ initiation of one behavior over another is categorical (e.g. approach/stay)
\begin{figure}[!ht] \begin{figure}[!ht]
\centering \centering
\includegraphics[width=\textwidth]{figures/fig_invariance_log-hp_species.pdf} \includegraphics[width=\textwidth]{figures/fig_invariance_log-hp_appendix.pdf}
\caption{\textbf{}
}
\label{}
\end{figure}
\FloatBarrier
\begin{figure}[!ht]
\centering
\includegraphics[width=\textwidth]{figures/fig_invariance_thresh-lp_appendix.pdf}
\caption{\textbf{} \caption{\textbf{}
} }
\label{} \label{}

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
search_path = '../data/inv/full/'
save_path = '../data/inv/full/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)
species_data[mkey] = np.zeros(shape, dtype=float)
# Log species data:
for stage in stages:
mkey = f'measure_{stage}'
species_data[mkey][..., j] = data[mkey]
# Save collected file data:
save_name = save_path + species
save_data(save_name, species_data, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,46 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
search_path = '../data/inv/log_hp/'
save_path = '../data/inv/log_hp/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, incl='noise', ext='npz', dir=search_path)
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_inv'])
scales, measure = data['scales'], data['measure_inv']
if j == 0:
# Prepare species-specific storage:
spec_data = np.zeros((measure.shape + (len(all_paths),)), dtype=float)
# Log file data:
spec_data[..., j] = measure
# Save collected file data:
save_name = save_path + species
archive = dict(scales=scales, measure_inv=spec_data)
save_data(save_name, archive, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
stages = ['filt', 'env', 'conv', 'feat']
search_path = '../data/inv/short/'
save_path = '../data/inv/short/collected/'
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if j == 0:
# Prepare species-specific storage:
species_data = dict(scales=data['scales'])
for stage in stages:
mkey = f'measure_{stage}'
shape = data[mkey].shape + (len(all_paths),)
species_data[mkey] = np.zeros(shape, dtype=float)
# Log species data:
for stage in stages:
mkey = f'measure_{stage}'
species_data[mkey][..., j] = data[mkey]
# Save collected file data:
save_name = save_path + species
save_data(save_name, species_data, config, overwrite=True)
print('Done.')

View File

@@ -0,0 +1,52 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
search_path = '../data/inv/thresh_lp/'
save_path = '../data/inv/thresh_lp/collected/'
# ANALYSIS SETTINGS:
with_noise = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
incl = 'noise' if with_noise else 'pure'
all_paths = search_files(species, incl=incl, ext='npz', dir=search_path)
# Run through files:
for j, path in enumerate(all_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_feat', 'thresh_rel'])
measure = data['measure_feat']
if j == 0:
# Prepare species-specific storage:
spec_data = np.zeros((measure.shape + (len(all_paths),)), dtype=float)
# Log file data:
spec_data[..., j] = measure
# Save collected file data:
save_name = save_path + species + f'_{incl}'
archive = dict(
scales=data['scales'],
measure_feat=spec_data,
thresh_rel=data['thresh_rel'])
save_data(save_name, archive, config)
print('Done.')

View File

@@ -0,0 +1,87 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
search_path = '../data/inv/full/'
save_path = '../data/inv/full/condensed/'
# ANALYSIS SETTINGS:
compute_ratios = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if k == 0:
# Prepare song file-specific storage:
file_data = {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(rec_paths),)
file_data[stage] = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
rec_mean, rec_sd = {}, {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(sorted_paths),)
rec_mean[f'mean_{stage}'] = np.zeros(shape, dtype=float)
rec_sd[f'sd_{stage}'] = np.zeros(shape, dtype=float)
# Log song file data:
for stage in stages:
mkey = f'measure_{stage}'
if compute_ratios:
data[mkey] /= data[mkey][0]
file_data[stage][..., k] = data[mkey]
# Get recording statistics:
for stage in stages:
rec_mean[f'mean_{stage}'][..., j] = np.nanmean(file_data[stage], axis=-1)
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
save_name = save_path + species
if compute_ratios:
save_name += '_normed'
else:
save_name += '_raw'
archive = dict(scales=data['scales'])
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_name, archive, config, overwrite=True)
print('Done.')

View File

@@ -1,8 +1,8 @@
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files, crop_paths from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species from misc_functions import shorten_species, sort_files_by_rec
from IPython import embed from IPython import embed
# GENERAL SETTINGS: # GENERAL SETTINGS:
@@ -23,7 +23,6 @@ sources = [
'FTN' 'FTN'
] ]
search_path = '../data/inv/log_hp/' search_path = '../data/inv/log_hp/'
ref_path = '../data/inv/log_hp/ref_measures.npz'
save_path = '../data/inv/log_hp/condensed/' save_path = '../data/inv/log_hp/condensed/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
@@ -31,14 +30,12 @@ compute_ratios = True
plot_overview = True plot_overview = True
# PREPARATION: # PREPARATION:
if compute_ratios:
ref_measure = np.load(ref_path)['inv']
if plot_overview: if plot_overview:
fig, axes = plt.subplots(3, len(target_species), figsize=(16, 9), fig, axes = plt.subplots(3, len(target_species), figsize=(16, 9),
sharex=True, sharey=True, layout='constrained') sharex=True, sharey=True, layout='constrained')
axes[0, 0].set_ylabel('songs') axes[0, 0].set_ylabel('songs')
axes[1, 0].set_ylabel('recordings\n(mean ± SD)') axes[1, 0].set_ylabel('recordings\n(mean ± SD)')
axes[2, 0].set_ylabel('total\n(mean ± SEM)') axes[2, 0].set_ylabel('total\n(mean ± SD)')
# EXECUTION: # EXECUTION:
for i, species in enumerate(target_species): for i, species in enumerate(target_species):
@@ -48,49 +45,21 @@ for i, species in enumerate(target_species):
# Fetch all species-specific song files: # Fetch all species-specific song files:
all_paths = search_files(species, incl='noise', ext='npz', dir=search_path) all_paths = search_files(species, incl='noise', ext='npz', dir=search_path)
# Separate by source:
sorted_paths = {}
for source in sources:
# Check for any source-specific song files:
source_paths = [path for path in all_paths if source in path]
if not source_paths:
continue
# Separate by recording:
sorted_paths[source] = [[]]
for path, name in zip(source_paths, crop_paths(source_paths)):
# Find numerical ID behind source tag:
id_ind = name.find(source) + len(source) + 1
# Check if ID is followed by sub-ID:
sub_id = name[id_ind:].split('-')[1]
if 's' in sub_id:
# Single (time stamp in next spot):
sorted_paths[source][0].append(path)
continue
sub_id = int(sub_id)
# Multiple (sub-ID in next spot):
if sub_id > len(sorted_paths[source]):
# Open new recording-specific slot:
sorted_paths[source].append([])
sorted_paths[source][sub_id - 1].append(path)
# Re-sort song files only by recording (discarding source separation): # Sort song files by recording (one or more per source):
sorted_paths = [path for paths in sorted_paths.values() for path in paths] sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording: # Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths): for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths): for k, path in enumerate(rec_paths):
# Load invariance data: # Load invariance data:
data, _ = load_data(path, ['scales', 'measure_inv']) data, config = load_data(path, ['scales', 'measure_inv'])
scales, measure = data['scales'], data['measure_inv'] scales, measure = data['scales'], data['measure_inv']
# Relate to noise: # Relate to noise:
if compute_ratios: if compute_ratios:
measure /= ref_measure measure /= measure[0]
if k == 0: if k == 0:
# Prepare song file-specific storage: # Prepare song file-specific storage:
@@ -116,7 +85,8 @@ for i, species in enumerate(target_species):
rec_mean[:, j] + rec_sd[:, j], color='k', alpha=0.2) rec_mean[:, j] + rec_sd[:, j], color='k', alpha=0.2)
# Save condensed recording data for current species: # Save condensed recording data for current species:
np.savez(save_path + species, scales=scales, mean=rec_mean, sd=rec_sd) archive = dict(scales=scales, mean_inv=rec_mean, sd_inv=rec_sd)
save_data(save_path + species, archive, config, overwrite=True)
if plot_overview: if plot_overview:
spec_mean = rec_mean.mean(axis=1) spec_mean = rec_mean.mean(axis=1)
@@ -128,9 +98,7 @@ for i, species in enumerate(target_species):
print('Done.') print('Done.')
if plot_overview: if plot_overview:
axes[0, 0].set_xlim(scales[0], scales[-1])
axes[0, 0].set_xscale('log') axes[0, 0].set_xscale('log')
axes[0, 0].set_yscale('log') axes[0, 0].set_yscale('log')
plt.show() axes[0, 0].set_xlim(scales[1], scales[-1])
plt.show()

View File

@@ -0,0 +1,87 @@
import numpy as np
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
stages = ['filt', 'env', 'conv', 'feat']
search_path = '../data/inv/short/'
save_path = '../data/inv/short/condensed/'
# ANALYSIS SETTINGS:
compute_ratios = False
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
# Fetch all species-specific song files:
all_paths = search_files(species, ext='npz', dir=search_path)
if not all_paths:
continue
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, 'scales', 'measure')
if k == 0:
# Prepare song file-specific storage:
file_data = {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(rec_paths),)
file_data[stage] = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
rec_mean, rec_sd = {}, {}
for stage in stages:
shape = data[f'measure_{stage}'].shape + (len(sorted_paths),)
rec_mean[f'mean_{stage}'] = np.zeros(shape, dtype=float)
rec_sd[f'sd_{stage}'] = np.zeros(shape, dtype=float)
# Log song file data:
for stage in stages:
mkey = f'measure_{stage}'
if compute_ratios:
data[mkey] /= data[mkey][0]
file_data[stage][..., k] = data[mkey]
# Get recording statistics:
for stage in stages:
rec_mean[f'mean_{stage}'][..., j] = np.nanmean(file_data[stage], axis=-1)
rec_sd[f'sd_{stage}'][..., j] = np.nanstd(file_data[stage], axis=-1)
# Save condensed recording data:
save_name = save_path + species
if compute_ratios:
save_name += '_normed'
else:
save_name += '_raw'
archive = dict(scales=data['scales'])
archive.update(rec_mean)
archive.update(rec_sd)
save_data(save_name, archive, config)
print('Done.')

View File

@@ -0,0 +1,135 @@
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data, save_data
from misc_functions import shorten_species, sort_files_by_rec
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
sources = [
'BM04',
'BM93',
'DJN',
'GBC',
'FTN'
]
search_path = '../data/inv/thresh_lp/'
save_path = '../data/inv/thresh_lp/condensed/'
# ANALYSIS SETTINGS:
with_noise = True
plot_overview = True
thresh_rel = np.array([0.5, 1, 3])
# PREPARATION:
if plot_overview:
kern_colors = ['r', 'g', 'b']
all_figs, all_axes = {}, {}
for thresh in thresh_rel:
fig, axes = plt.subplots(3, len(target_species), figsize=(16, 9),
sharex=True, sharey=True, layout='constrained')
fig.suptitle(f'rel. thresh: {thresh}')
axes[0, 0].set_ylim(0, 1)
axes[0, 0].set_ylabel('songs')
axes[1, 0].set_ylabel('recordings\n(mean ± SD)')
axes[2, 0].set_ylabel('total\n(mean ± SD)')
all_figs[thresh] = fig
all_axes[thresh] = axes
# EXECUTION:
for i, species in enumerate(target_species):
print(f'Processing {species}')
if plot_overview:
for thresh in thresh_rel:
all_axes[thresh][0, i].set_title(shorten_species(species))
# Fetch all species-specific song files:
incl = 'noise' if with_noise else 'pure'
all_paths = search_files(species, incl=incl, ext='npz', dir=search_path)
# Sort song files by recording (one or more per source):
sorted_paths = sort_files_by_rec(all_paths, sources)
# Condense across song files per recording:
for j, rec_paths in enumerate(sorted_paths):
for k, path in enumerate(rec_paths):
# Load invariance data:
data, config = load_data(path, ['scales', 'measure_feat'])
scales, measure = data['scales'], data['measure_feat']
if k == 0:
# Prepare song file-specific storage:
shape = measure.shape + (len(rec_paths),)
file_data = np.zeros(shape, dtype=float)
if j == 0:
# Prepare recording-specific storage:
shape = measure.shape + (len(sorted_paths),)
rec_mean = np.zeros(shape, dtype=float)
rec_sd = np.zeros(shape, dtype=float)
# Log song file data:
file_data[..., k] = measure
if plot_overview:
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[0, i].plot(scales, measure[:, m, l], c=c, alpha=0.5)
# Get recording statistics:
rec_mean[..., j] = file_data.mean(axis=-1)
rec_sd[..., j] = file_data.std(axis=-1)
if plot_overview:
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[1, i].plot(scales, rec_mean[:, m, l, j], c=c)
spread = (rec_mean[:, m, l, j] - rec_sd[:, m, l, j],
rec_mean[:, m, l, j] + rec_sd[:, m, l, j])
axes[1, i].fill_between(scales, *spread, color=c, alpha=0.2)
# Save condensed recording data:
save_name = save_path + species
if with_noise:
save_name += '_noise'
else:
save_name += '_pure'
archive = dict(
scales=scales,
mean_feat=rec_mean,
sd_feat=rec_sd,
thresh_rel=thresh_rel,)
save_data(save_name, archive, config)
if plot_overview:
spec_mean = rec_mean.mean(axis=-1)
spec_sd = rec_mean.std(axis=-1)
for l, thresh in enumerate(thresh_rel):
axes = all_axes[thresh]
for m, c in enumerate(kern_colors):
axes[2, i].plot(scales, spec_mean[:, m, l], c=c)
spread = (spec_mean[:, m, l] - spec_sd[:, m, l],
spec_mean[:, m, l] + spec_sd[:, m, l])
axes[2, i].fill_between(scales, *spread, color=c, alpha=0.2)
print('Done.')
if plot_overview:
for thresh in thresh_rel:
axes = all_axes[thresh]
axes[0, 0].set_xscale('log')
axes[0, 0].set_xlim(scales[1], scales[-1])
plt.show()

View File

@@ -0,0 +1,4 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files

View File

@@ -5,7 +5,7 @@ from plot_functions import xlabel, ylabel, strip_zeros, letter_subplots
# GENERAL SETTINGS: # GENERAL SETTINGS:
data_path = '../data/inv/noise_env/sd_conversion.npz' data_path = '../data/inv/noise_env/sd_conversion.npz'
save_path = '../figures/fig_noise_env_sd_conversion.pdf' save_path = '../figures/fig_noise_env_sd_conversion_appendix.pdf'
# PLOT SETTINGS: # PLOT SETTINGS:
fig_kwargs = dict( fig_kwargs = dict(

View File

@@ -1,13 +1,13 @@
import plotstyle_plt import plotstyle_plt
import glob
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from itertools import product from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data from thunderhopper.modeltools import load_data
from misc_functions import get_saturation from misc_functions import get_saturation
from color_functions import load_colors from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\ from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\
plot_line, plot_barcode, strip_zeros, time_bar,\ plot_line, strip_zeros, time_bar,\
letter_subplot, letter_subplots letter_subplot, letter_subplots
from IPython import embed from IPython import embed
@@ -17,11 +17,6 @@ def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs) plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs)
return None return None
def plot_bi_snippets(axes, time, snippets, **kwargs):
for i, ax in enumerate(axes):
plot_barcode(ax, time, snippets[:, ..., i], **kwargs)
return None
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs): def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1: if measures.ndim == 1:
ax.plot(scales, measures, **kwargs)[0] ax.plot(scales, measures, **kwargs)[0]
@@ -39,8 +34,28 @@ def show_saturation(ax, scales, measures, high=0.95, **kwargs):
marker='o', ms=10, zorder=6, clip_on=False, **kwargs) marker='o', ms=10, zorder=6, clip_on=False, **kwargs)
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes' target_species = [
data_paths = glob.glob(f'../data/inv/full/{target}*.npz') 'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
raw_path = search_files(target_species, incl='raw', dir='../data/inv/full/condensed/')[0]
norm_path = search_files(target_species, incl='norm', dir='../data/inv/full/condensed/')[0]
snip_path = search_files(example_file, dir='../data/inv/full/')[0]
trace_path = search_files(target_species, dir='../data/inv/full/collected/')[0]
ref_path = '../data/inv/full/ref_measures.npz' ref_path = '../data/inv/full/ref_measures.npz'
save_path = '../figures/fig_invariance_full.pdf' save_path = '../figures/fig_invariance_full.pdf'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat'] stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
@@ -105,9 +120,9 @@ lw = dict(
log=0.25, log=0.25,
inv=0.25, inv=0.25,
conv=0.25, conv=0.25,
bi=0,
feat=1, feat=1,
big=3 big=3,
plateau=1.5,
) )
xlabels = dict( xlabels = dict(
big='scale $\\alpha$', big='scale $\\alpha$',
@@ -118,7 +133,6 @@ ylabels = dict(
log='$x_{\\text{db}}$', log='$x_{\\text{db}}$',
inv='$x_{\\text{inv}}$', inv='$x_{\\text{inv}}$',
conv='$c_i$', conv='$c_i$',
bi='$b_i$',
feat='$f_i$', feat='$f_i$',
big=['intensity', 'rel. intensity', 'norm. intensity'] big=['intensity', 'rel. intensity', 'norm. intensity']
) )
@@ -187,121 +201,160 @@ bar_kwargs = dict(
va='center', va='center',
) )
) )
plateau_settings = dict(
# PREPARATION: low=0.05,
ref_data = dict(np.load(ref_path)) high=0.95,
first=True,
last=True,
condense=None,
)
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
# EXECUTION: # EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
# Load invariance data: # Load invariance data:
data, config = load_data(data_path, **load_kwargs) raw_data, config = load_data(raw_path, files='scales', keywords='mean')
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate'] norm_data, _ = load_data(norm_path, files='scales', keywords='mean')
scales = raw_data['scales']
# Adjust grid parameters: # Load snippet data:
snip_grid_kwargs['ncols'] = data['example_scales'].size snip, _ = load_data(snip_path, files='example_scales', keywords='snip')
t_full = np.arange(snip['snip_filt'].shape[0]) / config['rate']
snip_scales = snip['example_scales']
# Prepare overall graph: # Adjust grid parameters:
fig = plt.figure(**fig_kwargs) snip_grid_kwargs['ncols'] = snip_scales.size
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare stage-specific snippet axes: # Prepare overall graph:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']]) fig = plt.figure(**fig_kwargs)
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs) super_grid = fig.add_gridspec(**super_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(data["example_scales"][j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# Prepare analysis axes: # Prepare stage-specific snippet axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']]) snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs) snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object) snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i in range(big_grid.ncols): for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = big_subfig.add_subplot(big_grid[0, i]) ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(data['scales'][0], data['scales'][-1]) ax.set_xlim(t_full[0], t_full[-1])
ax.set_xscale('symlog', linthresh=data['scales'][1], linscale=0.5) ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1) hide_axis(ax, 'bottom')
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs) if i == 0:
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs) title = title_subplot(ax, f'$\\alpha={strip_zeros(snip_scales[j])}$',
big_axes[i] = ax ref=snip_subfig, **title_kwargs)
letter_subplots(big_axes, 'bc', **letter_big_kwargs) if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# # Plot filtered snippets: # Prepare analysis axes:
# plot_snippets(snip_axes[0, :], t_full, data['snip_filt'], big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
# c=colors['filt'], lw=lw['filt']) big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
# # Plot envelope snippets: if False:
# plot_snippets(snip_axes[1, :], t_full, data['snip_env'], # Plot filtered snippets:
# ymin=0, c=colors['env'], lw=lw['env']) plot_snippets(snip_axes[0, :], t_full, snip['snip_filt'],
c=colors['filt'], lw=lw['filt'])
# # Plot logarithmic snippets: # Plot envelope snippets:
# plot_snippets(snip_axes[2, :], t_full, data['snip_log'], plot_snippets(snip_axes[1, :], t_full, snip['snip_env'],
# c=colors['log'], lw=lw['log']) ymin=0, c=colors['env'], lw=lw['env'])
# # Plot invariant snippets: # Plot logarithmic snippets:
# plot_snippets(snip_axes[3, :], t_full, data['snip_inv'], plot_snippets(snip_axes[2, :], t_full, snip['snip_log'],
# c=colors['inv'], lw=lw['inv']) c=colors['log'], lw=lw['log'])
# # Plot kernel response snippets: # Plot invariant snippets:
# plot_snippets(snip_axes[4, :], t_full, data['snip_conv'], plot_snippets(snip_axes[3, :], t_full, snip['snip_inv'],
# c=colors['conv'], lw=lw['conv']) c=colors['inv'], lw=lw['inv'])
# # Plot feature snippets: # Plot kernel response snippets:
# plot_snippets(snip_axes[5, :], t_full, data['snip_feat'], plot_snippets(snip_axes[4, :], t_full, snip['snip_conv'],
# ymin=0, ymax=1, c=colors['feat'], lw=lw['feat']) c=colors['conv'], lw=lw['conv'])
# Analysis results: # Plot feature snippets:
scales_rel = data['scales'] - data['scales'][0] plot_snippets(snip_axes[5, :], t_full, snip['snip_feat'],
scales_rel /= scales_rel[-1] ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
for stage in stages:
measure = data[f'measure_{stage}']
# Plot unmodified intensity measures: # Plot analysis results:
curve = plot_curves(big_axes[0], data['scales'], measure, c=colors[stage], lw=lw['big'], for stage in stages:
fill_kwargs=dict(color=colors[stage], alpha=0.25)) # Get average unnormed measure across recordings:
if stage in ['log', 'inv', 'conv', 'feat']: raw_measure = raw_data[f'mean_{stage}'].mean(axis=-1)
show_saturation(big_axes[0], data['scales'], curve, c=colors[stage])
# # Relate to pure-noise reference: # Plot unmodified intensity measures:
# norm_measure = measure / ref_data[stage] curve = plot_curves(big_axes[0], scales, raw_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
ind = get_saturation(curve, **plateau_settings)[1]
scale = scales[ind]
big_axes[0].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[0].get_xaxis_transform())
big_axes[0].vlines(scale, big_axes[0].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# # Plot noise-related intensity measures: # Get average noise-related measure across recordings:
# big_axes[1].plot(data['scales'], norm_measure, c=colors[stage], lw=lw['big']) norm_measure = norm_data[f'mean_{stage}'].mean(axis=-1)
# Normalize measure to [0, 1]: # Plot noise-related intensity measure:
min_measure = measure.min(axis=0) curve = plot_curves(big_axes[1], scales, norm_measure, c=colors[stage], lw=lw['big'],
max_measure = measure.max(axis=0) fill_kwargs=dict(color=colors[stage], alpha=0.25))
norm_measure = (measure - min_measure) / (max_measure - min_measure)
# Plot normalized intensity measures: # Indicate saturation point:
curve = plot_curves(big_axes[1], data['scales'], norm_measure, c=colors[stage], lw=lw['big'], if stage in ['log', 'inv', 'conv', 'feat']:
fill_kwargs=dict(color=colors[stage], alpha=0.25)) big_axes[1].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
if stage in ['log', 'inv', 'conv', 'feat']: transform=big_axes[1].get_xaxis_transform())
show_saturation(big_axes[1], data['scales'], curve, c=colors[stage]) big_axes[1].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[1].get_xaxis_transform())
big_axes[1].vlines(scale, big_axes[1].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
# # Plot over relative scales: # Normalize measure to [0, 1]:
# plot_curves(big_axes[2], scales_rel, norm_measure, c=colors[stage], lw=lw['big'], min_measure = raw_measure.min(axis=0)
# fill_kwargs=dict(color=colors[stage], alpha=0.25)) max_measure = raw_measure.max(axis=0)
# scales_rel = curve - curve.min() norm_measure = (raw_measure - min_measure) / (max_measure - min_measure)
# scales_rel /= scales_rel.max()
if save_path is not None: # Plot range-normalized intensity measure:
fig.savefig(save_path) curve = plot_curves(big_axes[2], scales, norm_measure, c=colors[stage], lw=lw['big'],
plt.show() fill_kwargs=dict(color=colors[stage], alpha=0.25))
# Indicate saturation point:
if stage in ['log', 'inv', 'conv', 'feat']:
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=colors[stage], mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], curve[ind],
color=colors[stage], **plateau_line_kwargs)
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.') print('Done.')
embed() embed()

View File

@@ -0,0 +1,326 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from misc_functions import get_saturation
from color_functions import load_colors
from plot_functions import hide_axis, ylimits, xlabel, ylabel, title_subplot,\
plot_line, plot_barcode, strip_zeros, time_bar,\
letter_subplot, letter_subplots
from IPython import embed
def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
ymin, ymax = ylimits(snippets, minval=ymin, maxval=ymax, pad=0.05)
for i, ax in enumerate(axes):
plot_line(ax, time, snippets[:, ..., i], ymin=ymin, ymax=ymax, **kwargs)
return None
def plot_bi_snippets(axes, time, snippets, **kwargs):
for i, ax in enumerate(axes):
plot_barcode(ax, time, snippets[:, ..., i], **kwargs)
return None
def plot_curves(ax, scales, measures, fill_kwargs={}, **kwargs):
if measures.ndim == 1:
ax.plot(scales, measures, **kwargs)[0]
return measures
median_measure = np.median(measures, axis=1)
spread_measure = [np.percentile(measures, 25, axis=1),
np.percentile(measures, 75, axis=1)]
ax.plot(scales, median_measure, **kwargs)[0]
ax.fill_between(scales, *spread_measure, **fill_kwargs)
return median_measure
def show_saturation(ax, scales, measures, high=0.95, **kwargs):
high_ind = get_saturation(measures, high=high)[1]
return ax.plot(scales[high_ind], 0, transform=ax.get_xaxis_transform(),
marker='o', ms=10, zorder=6, clip_on=False, **kwargs)
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/inv/full/condensed/')
snip_paths = search_files(example_file, dir='../data/inv/full/')
ref_path = '../data/inv/full/ref_measures.npz'
save_path = '../figures/fig_invariance_full.pdf'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
load_kwargs = dict(
files=stages,
keywords=['scales', 'snip', 'measure']
)
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 20/2.54),
)
super_grid_kwargs = dict(
nrows=2,
ncols=1,
wspace=0,
hspace=0,
left=0,
right=1,
bottom=0,
top=1,
height_ratios=[3, 2]
)
subfig_specs = dict(
snip=(0, 0),
big=(1, 0),
)
snip_grid_kwargs = dict(
nrows=len(stages),
ncols=None,
wspace=0.1,
hspace=0.4,
left=0.08,
right=0.95,
bottom=0.08,
top=0.95
)
big_grid_kwargs = dict(
nrows=1,
ncols=3,
wspace=0.2,
hspace=0,
left=snip_grid_kwargs['left'],
right=0.96,
bottom=0.2,
top=0.95
)
# PLOT SETTINGS:
fs = dict(
lab_norm=16,
lab_tex=20,
letter=22,
tit_norm=16,
tit_tex=20,
bar=16,
)
colors = load_colors('../data/stage_colors.npz')
lw = dict(
filt=0.25,
env=0.25,
log=0.25,
inv=0.25,
conv=0.25,
bi=0,
feat=1,
big=3
)
xlabels = dict(
big='scale $\\alpha$',
)
ylabels = dict(
filt='$x_{\\text{filt}}$',
env='$x_{\\text{env}}$',
log='$x_{\\text{db}}$',
inv='$x_{\\text{inv}}$',
conv='$c_i$',
bi='$b_i$',
feat='$f_i$',
big=['intensity', 'rel. intensity', 'norm. intensity']
)
xlab_big_kwargs = dict(
y=0,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
)
ylab_snip_kwargs = dict(
x=0,
fontsize=fs['lab_tex'],
rotation=0,
ha='left',
va='center'
)
ylab_big_kwargs = dict(
x=-0.12,
fontsize=fs['lab_norm'],
ha='center',
va='bottom',
)
yloc = dict(
filt=3000,
env=1000,
log=50,
inv=20,
conv=2,
feat=1,
)
title_kwargs = dict(
x=0.5,
yref=1,
ha='center',
va='top',
fontsize=fs['tit_norm'],
)
letter_snip_kwargs = dict(
x=0,
yref=0.5,
ha='left',
va='center',
fontsize=fs['letter'],
)
letter_big_kwargs = dict(
x=0,
y=1,
ha='left',
va='bottom',
fontsize=fs['letter'],
)
bar_time = 5
bar_kwargs = dict(
dur=bar_time,
y0=-0.25,
y1=-0.1,
xshift=1,
color='k',
lw=0,
clip_on=False,
text_pos=(-0.1, 0.5),
text_str=f'${bar_time}\\,\\text{{s}}$',
text_kwargs=dict(
fontsize=fs['bar'],
ha='right',
va='center',
)
)
# PREPARATION:
ref_data = dict(np.load(ref_path))
# EXECUTION:
for data_path in data_paths:
print(f'Processing {data_path}')
# Load invariance data:
data, config = load_data(data_path, **load_kwargs)
t_full = np.arange(data['snip_filt'].shape[0]) / config['rate']
# Adjust grid parameters:
snip_grid_kwargs['ncols'] = data['example_scales'].size
# Prepare overall graph:
fig = plt.figure(**fig_kwargs)
super_grid = fig.add_gridspec(**super_grid_kwargs)
# Prepare stage-specific snippet axes:
snip_subfig = fig.add_subfigure(super_grid[subfig_specs['snip']])
snip_grid = snip_subfig.add_gridspec(**snip_grid_kwargs)
snip_axes = np.zeros((snip_grid.nrows, snip_grid.ncols), dtype=object)
for i, j in product(range(snip_grid.nrows), range(snip_grid.ncols)):
ax = snip_subfig.add_subplot(snip_grid[i, j])
ax.set_xlim(t_full[0], t_full[-1])
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stages[i]]))
hide_axis(ax, 'bottom')
if i == 0:
title = title_subplot(ax, f'$\\alpha={strip_zeros(data["example_scales"][j])}$',
ref=snip_subfig, **title_kwargs)
if j == 0:
ylabel(ax, ylabels[stages[i]], **ylab_snip_kwargs, transform=snip_subfig.transSubfigure)
else:
hide_axis(ax, 'left')
snip_axes[i, j] = ax
time_bar(snip_axes[-1, -1], **bar_kwargs)
letter_subplot(snip_subfig, 'a', ref=title, **letter_snip_kwargs)
# Prepare analysis axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i in range(big_grid.ncols):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(data['scales'][0], data['scales'][-1])
ax.set_xscale('symlog', linthresh=data['scales'][1], linscale=0.5)
ax.set_yscale('symlog', linthresh=0.01, linscale=0.1)
xlabel(ax, xlabels['big'], transform=big_subfig, **xlab_big_kwargs)
ylabel(ax, ylabels['big'][i], **ylab_big_kwargs)
big_axes[i] = ax
letter_subplots(big_axes, 'bc', **letter_big_kwargs)
plt.show()
# # Plot filtered snippets:
# plot_snippets(snip_axes[0, :], t_full, data['snip_filt'],
# c=colors['filt'], lw=lw['filt'])
# # Plot envelope snippets:
# plot_snippets(snip_axes[1, :], t_full, data['snip_env'],
# ymin=0, c=colors['env'], lw=lw['env'])
# # Plot logarithmic snippets:
# plot_snippets(snip_axes[2, :], t_full, data['snip_log'],
# c=colors['log'], lw=lw['log'])
# # Plot invariant snippets:
# plot_snippets(snip_axes[3, :], t_full, data['snip_inv'],
# c=colors['inv'], lw=lw['inv'])
# # Plot kernel response snippets:
# plot_snippets(snip_axes[4, :], t_full, data['snip_conv'],
# c=colors['conv'], lw=lw['conv'])
# # Plot feature snippets:
# plot_snippets(snip_axes[5, :], t_full, data['snip_feat'],
# ymin=0, ymax=1, c=colors['feat'], lw=lw['feat'])
# Analysis results:
scales_rel = data['scales'] - data['scales'][0]
scales_rel /= scales_rel[-1]
for stage in stages:
measure = data[f'measure_{stage}']
# Plot unmodified intensity measures:
curve = plot_curves(big_axes[0], data['scales'], measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[0], data['scales'], curve, c=colors[stage])
# # Relate to pure-noise reference:
# norm_measure = measure / ref_data[stage]
# # Plot noise-related intensity measures:
# big_axes[1].plot(data['scales'], norm_measure, c=colors[stage], lw=lw['big'])
# Normalize measure to [0, 1]:
min_measure = measure.min(axis=0)
max_measure = measure.max(axis=0)
norm_measure = (measure - min_measure) / (max_measure - min_measure)
# Plot normalized intensity measures:
curve = plot_curves(big_axes[1], data['scales'], norm_measure, c=colors[stage], lw=lw['big'],
fill_kwargs=dict(color=colors[stage], alpha=0.25))
if stage in ['log', 'inv', 'conv', 'feat']:
show_saturation(big_axes[1], data['scales'], curve, c=colors[stage])
# # Plot over relative scales:
# plot_curves(big_axes[2], scales_rel, norm_measure, c=colors[stage], lw=lw['big'],
# fill_kwargs=dict(color=colors[stage], alpha=0.25))
# scales_rel = curve - curve.min()
# scales_rel /= scales_rel.max()
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.')
embed()

View File

@@ -4,7 +4,7 @@ import matplotlib.pyplot as plt
from itertools import product from itertools import product
from thunderhopper.filetools import search_files from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data from thunderhopper.modeltools import load_data
from misc_functions import shorten_species, get_kde, get_saturation from misc_functions import shorten_species, get_saturation
from color_functions import load_colors from color_functions import load_colors
from plot_functions import hide_axis, ylimits, super_xlabel, ylabel, hide_ticks,\ from plot_functions import hide_axis, ylimits, super_xlabel, ylabel, hide_ticks,\
plot_line, strip_zeros, time_bar, zoom_inset,\ plot_line, strip_zeros, time_bar, zoom_inset,\
@@ -27,18 +27,9 @@ def plot_snippets(axes, time, snippets, ymin=None, ymax=None, **kwargs):
handles.extend(plot_line(ax, time, snippet, ymin=ymin, ymax=ymax, **kwargs)) handles.extend(plot_line(ax, time, snippet, ymin=ymin, ymax=ymax, **kwargs))
return handles return handles
# def zalpha(handles, background='w', down=1):
# twins = []
# for handle in handles:
# twin = handle.copy()
# twin.set(color=background, alpha=1)
# twin.set_zorder(handle.get_zorder() - down)
# twins.append(twin)
# return twins
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms' target = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_paths = search_files(target, excl='noise', dir='../data/inv/log_hp/') data_path = search_files(target, excl='noise', dir='../data/inv/log_hp/')[0]
ref_path = '../data/inv/log_hp/ref_measures.npz' ref_path = '../data/inv/log_hp/ref_measures.npz'
save_path = '../figures/fig_invariance_log_hp.pdf' save_path = '../figures/fig_invariance_log_hp.pdf'
target_species = [ target_species = [
@@ -56,6 +47,7 @@ load_kwargs = dict(
keywords=['scales', 'snip', 'measure'] keywords=['scales', 'snip', 'measure']
) )
compute_ratios = True compute_ratios = True
exclude_zero = True
show_diag = True show_diag = True
show_plateaus = True show_plateaus = True
@@ -275,169 +267,180 @@ plateau_dot_kwargs = dict(
) )
# PREPARATION: # PREPARATION:
if compute_ratios:
ref_measures = dict(np.load(ref_path))
species_measures = {} species_measures = {}
thresh_inds = np.zeros((len(target_species),), dtype=int) thresh_inds = np.zeros((len(target_species),), dtype=int)
for i, species in enumerate(target_species): for i, species in enumerate(target_species):
spec_path = search_files(species, dir='../data/inv/log_hp/condensed/')[0] spec_path = search_files(species, dir='../data/inv/log_hp/condensed/')[0]
spec_data = dict(np.load(spec_path)) spec_data = dict(np.load(spec_path))
measure = spec_data['mean'].mean(axis=1) measure = spec_data['mean_inv'].mean(axis=-1)
if exclude_zero:
measure = measure[spec_data['scales'] > 0]
species_measures[species] = measure species_measures[species] = measure
thresh_inds[i] = get_saturation(measure, **plateau_settings)[1] thresh_inds[i] = get_saturation(measure, **plateau_settings)[1]
# EXECUTION: # EXECUTION:
for data_path in data_paths: print(f'Processing {data_path}')
print(f'Processing {data_path}')
# Load invariance data: # Load invariance data:
pure_data, config = load_data(data_path, **load_kwargs) pure_data, config = load_data(data_path, **load_kwargs)
noise_data, _ = load_data(data_path.replace('.npz', '_noise.npz'), **load_kwargs) noise_data, _ = load_data(data_path.replace('pure', 'noise'), **load_kwargs)
pure_scales, noise_scales = pure_data['scales'], noise_data['scales'] pure_scales, noise_scales = pure_data['scales'], noise_data['scales']
t_full = np.arange(pure_data['snip_env'].shape[0]) / config['env_rate'] t_full = np.arange(pure_data['snip_env'].shape[0]) / config['env_rate']
# Prepare overall graph: if compute_ratios:
fig = plt.figure(**fig_kwargs) # Relate pure-song measures to near-zero scale:
super_grid = fig.add_gridspec(**super_grid_kwargs) pure_data['measure_env'] /= pure_data['measure_env'][1]
fig.canvas.draw() pure_data['measure_log'] /= pure_data['measure_log'][1]
pure_data['measure_inv'] /= pure_data['measure_inv'][1]
# Relate noise-song measures to zero scale:
noise_data['measure_env'] /= noise_data['measure_env'][0]
noise_data['measure_log'] /= noise_data['measure_log'][0]
noise_data['measure_inv'] /= noise_data['measure_inv'][0]
# Prepare pure-song snippet axes: if exclude_zero:
pure_grid_kwargs['ncols'] = pure_data['example_scales'].size # Exclude zero scales:
pure_subfig = fig.add_subfigure(super_grid[subfig_specs['pure']]) inds = pure_scales > 0
pure_axes = add_snip_axes(pure_subfig, pure_grid_kwargs) pure_scales = pure_scales[inds]
for ax, stage in zip(pure_axes[:, 0], stages): pure_data['measure_env'] = pure_data['measure_env'][inds]
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage])) pure_data['measure_log'] = pure_data['measure_log'][inds]
ylabel(ax, ylabels[stage], **ylab_snip_kwargs, pure_data['measure_inv'] = pure_data['measure_inv'][inds]
transform=pure_subfig.transSubfigure) inds = noise_scales > 0
for ax, scale in zip(pure_axes[0, :], pure_data['example_scales']): noise_scales = noise_scales[inds]
pure_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs) noise_data['measure_env'] = noise_data['measure_env'][inds]
letter_subplot(pure_subfig, 'a', ref=pure_title, **letter_snip_kwargs) noise_data['measure_log'] = noise_data['measure_log'][inds]
pure_inset = pure_axes[0, 0].inset_axes(zoom_inset_bounds) noise_data['measure_inv'] = noise_data['measure_inv'][inds]
pure_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
pure_inset.tick_params(**inset_tick_kwargs)
hide_ticks(pure_inset, 'bottom', ticks=False)
# Prepare noise-song snippet axes: # Prepare overall graph:
noise_grid_kwargs['ncols'] = noise_data['example_scales'].size fig = plt.figure(**fig_kwargs)
noise_subfig = fig.add_subfigure(super_grid[subfig_specs['noise']]) super_grid = fig.add_gridspec(**super_grid_kwargs)
noise_axes = add_snip_axes(noise_subfig, noise_grid_kwargs) fig.canvas.draw()
for ax, stage in zip(noise_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=noise_subfig.transSubfigure)
for ax, scale in zip(noise_axes[0, :], noise_data['example_scales']):
noise_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(noise_subfig, 'b', ref=noise_title, **letter_snip_kwargs)
noise_inset = noise_axes[0, 0].inset_axes(zoom_inset_bounds)
noise_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
noise_inset.tick_params(**inset_tick_kwargs)
hide_ticks(noise_inset, 'bottom', ticks=False)
# Prepare analysis axes: # Prepare pure-song snippet axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']]) pure_grid_kwargs['ncols'] = pure_data['example_scales'].size
big_grid = big_subfig.add_gridspec(**big_grid_kwargs) pure_subfig = fig.add_subfigure(super_grid[subfig_specs['pure']])
big_axes = np.zeros((big_grid.ncols,), dtype=object) pure_axes = add_snip_axes(pure_subfig, pure_grid_kwargs)
for i, scales in enumerate([pure_scales, noise_scales, noise_scales]): for ax, stage in zip(pure_axes[:, 0], stages):
ax = big_subfig.add_subplot(big_grid[0, i]) ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ax.set_xlim(scales[0], scales[-1]) ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
ax.set_ylim(scales[0], scales[-1]) transform=pure_subfig.transSubfigure)
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5) for ax, scale in zip(pure_axes[0, :], pure_data['example_scales']):
ax.set_yscale('symlog', linthresh=scales[1], linscale=0.5) pure_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
ax.set_aspect(**anchor_kwargs) letter_subplot(pure_subfig, 'a', ref=pure_title, **letter_snip_kwargs)
if i > 0: pure_inset = pure_axes[0, 0].inset_axes(zoom_inset_bounds)
hide_ticks(ax, 'left') pure_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
big_axes[i] = ax pure_inset.tick_params(**inset_tick_kwargs)
ylabel(big_axes[0], ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs) hide_ticks(pure_inset, 'bottom', ticks=False)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)
# Plot pure-song envelope snippets: # Prepare noise-song snippet axes:
handle = plot_snippets(pure_axes[0, :], t_full, pure_data['snip_env'], noise_grid_kwargs['ncols'] = noise_data['example_scales'].size
ymin=0, c=colors['env'], lw=lw['snip'])[0] noise_subfig = fig.add_subfigure(super_grid[subfig_specs['noise']])
zoom_inset(pure_axes[0, 0], pure_inset, handle, transform=pure_axes[0, 0].transAxes, **zoom_kwargs) noise_axes = add_snip_axes(noise_subfig, noise_grid_kwargs)
for ax, stage in zip(noise_axes[:, 0], stages):
ax.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage]))
ylabel(ax, ylabels[stage], **ylab_snip_kwargs,
transform=noise_subfig.transSubfigure)
for ax, scale in zip(noise_axes[0, :], noise_data['example_scales']):
noise_title = title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', **title_kwargs)
letter_subplot(noise_subfig, 'b', ref=noise_title, **letter_snip_kwargs)
noise_inset = noise_axes[0, 0].inset_axes(zoom_inset_bounds)
noise_inset.spines[:].set(visible=True, lw=zoom_kwargs['lw'])
noise_inset.tick_params(**inset_tick_kwargs)
hide_ticks(noise_inset, 'bottom', ticks=False)
# Plot pure-song logarithmic snippets: # Prepare analysis axes:
plot_snippets(pure_axes[1, :], t_full, pure_data['snip_log'], big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
c=colors['log'], lw=lw['snip']) big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
big_axes = np.zeros((big_grid.ncols,), dtype=object)
for i, scales in enumerate([pure_scales, noise_scales, noise_scales]):
ax = big_subfig.add_subplot(big_grid[0, i])
ax.set_xlim(scales[0], scales[-1])
ax.set_ylim(scales[0], scales[-1])
ax.set_xscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_yscale('symlog', linthresh=scales[1], linscale=0.5)
ax.set_aspect(**anchor_kwargs)
if i > 0:
hide_ticks(ax, 'left')
big_axes[i] = ax
ylabel(big_axes[0], ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
super_xlabel(xlabels['big'], big_subfig, big_axes[0], big_axes[-1], **xlab_big_kwargs)
letter_subplots(big_axes, 'cde', **letter_big_kwargs)
# Plot pure-song invariant snippets: # Plot pure-song envelope snippets:
plot_snippets(pure_axes[2, :], t_full, pure_data['snip_inv'], handle = plot_snippets(pure_axes[0, :], t_full, pure_data['snip_env'],
c=colors['inv'], lw=lw['snip']) ymin=0, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(pure_axes[0, 0], pure_inset, handle, transform=pure_axes[0, 0].transAxes, **zoom_kwargs)
# Plot noise-song envelope snippets: # Plot pure-song logarithmic snippets:
ymin, ymax = pure_axes[0, 0].get_ylim() plot_snippets(pure_axes[1, :], t_full, pure_data['snip_log'],
handle = plot_snippets(noise_axes[0, :], t_full, noise_data['snip_env'], c=colors['log'], lw=lw['snip'])
ymin, ymax, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(noise_axes[0, 0], noise_inset, handle, transform=noise_axes[0, 0].transAxes, **zoom_kwargs)
# Plot noise-song logarithmic snippets: # Plot pure-song invariant snippets:
ymin, ymax = pure_axes[1, 0].get_ylim() plot_snippets(pure_axes[2, :], t_full, pure_data['snip_inv'],
plot_snippets(noise_axes[1, :], t_full, noise_data['snip_log'], c=colors['inv'], lw=lw['snip'])
ymin, ymax, c=colors['log'], lw=lw['snip'])
# Plot noise-song invariant snippets: # Plot noise-song envelope snippets:
ymin, ymax = pure_axes[2, 0].get_ylim() ymin, ymax = pure_axes[0, 0].get_ylim()
plot_snippets(noise_axes[2, :], t_full, noise_data['snip_inv'], handle = plot_snippets(noise_axes[0, :], t_full, noise_data['snip_env'],
ymin, ymax, c=colors['inv'], lw=lw['snip']) ymin, ymax, c=colors['env'], lw=lw['snip'])[0]
zoom_inset(noise_axes[0, 0], noise_inset, handle, transform=noise_axes[0, 0].transAxes, **zoom_kwargs)
# Indicate time scale: # Plot noise-song logarithmic snippets:
time_bar(noise_axes[-1, -1], **bar_kwargs) ymin, ymax = pure_axes[1, 0].get_ylim()
plot_snippets(noise_axes[1, :], t_full, noise_data['snip_log'],
ymin, ymax, c=colors['log'], lw=lw['snip'])
if compute_ratios: # Plot noise-song invariant snippets:
# Relate pure-song measures to zero scale: ymin, ymax = pure_axes[2, 0].get_ylim()
pure_data['measure_env'] /= ref_measures['env'] plot_snippets(noise_axes[2, :], t_full, noise_data['snip_inv'],
pure_data['measure_log'] /= ref_measures['log'] ymin, ymax, c=colors['inv'], lw=lw['snip'])
pure_data['measure_inv'] /= ref_measures['inv']
# Relate noise-song measures to zero scale:
noise_data['measure_env'] /= ref_measures['env']
noise_data['measure_log'] /= ref_measures['log']
noise_data['measure_inv'] /= ref_measures['inv']
# Plot pure-song measures (ideal): # Indicate time scale:
big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big']) time_bar(noise_axes[-1, -1], **bar_kwargs)
big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'])
# Plot noise-song measures (limited): # Plot pure-song measures (ideal):
big_axes[1].plot(noise_scales, noise_data['measure_env'], c=colors['env'], lw=lw['big']) big_axes[0].plot(pure_scales, pure_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_log'], c=colors['log'], lw=lw['big']) big_axes[0].plot(pure_scales, pure_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_data['measure_inv'], c=colors['inv'], lw=lw['big']) big_axes[0].plot(pure_scales, pure_data['measure_inv'], c=colors['inv'], lw=lw['big'])
if show_diag: # Plot noise-song measures (limited):
# Indicate diagonal: big_axes[1].plot(noise_scales, noise_data['measure_env'], c=colors['env'], lw=lw['big'])
big_axes[0].plot(pure_scales, pure_scales, **diag_kwargs) big_axes[1].plot(noise_scales, noise_data['measure_log'], c=colors['log'], lw=lw['big'])
big_axes[1].plot(noise_scales, noise_scales, **diag_kwargs) big_axes[1].plot(noise_scales, noise_data['measure_inv'], c=colors['inv'], lw=lw['big'])
if show_plateaus: if show_diag:
# Indicate low and high plateaus of noise invariance curve: # Indicate diagonal:
low_ind, high_ind = get_saturation(noise_data['measure_inv'], **plateau_settings) big_axes[0].plot(pure_scales, pure_scales, **diag_kwargs)
big_axes[1].axvspan(noise_scales[0], noise_scales[low_ind], big_axes[1].plot(noise_scales, noise_scales, **diag_kwargs)
fc=noise_colors[0], **plateau_rect_kwargs)
big_axes[1].axvspan(noise_scales[low_ind], noise_scales[high_ind],
fc=noise_colors[1], **plateau_rect_kwargs)
# Plot species-specific noise-song invariance curves: if show_plateaus:
for i, (species, measure) in enumerate(species_measures.items()): # Indicate low and high plateaus of noise invariance curve:
# Plot invariance curve: low_ind, high_ind = get_saturation(noise_data['measure_inv'], **plateau_settings)
color = species_colors[species] big_axes[1].axvspan(noise_scales[0], noise_scales[low_ind],
big_axes[2].plot(noise_scales, measure, label=shorten_species(species), fc=noise_colors[0], **plateau_rect_kwargs)
c=color, lw=lw['spec']) big_axes[1].axvspan(noise_scales[low_ind], noise_scales[high_ind],
# Indicate saturation: fc=noise_colors[1], **plateau_rect_kwargs)
ind = thresh_inds[i]
scale = noise_scales[ind]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], measure[ind],
color=color, **plateau_line_kwargs)
legend = big_axes[2].legend(**leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
if save_path is not None: # Plot species-specific noise-song invariance curves:
fig.savefig(save_path, bbox_inches='tight') for i, (species, measure) in enumerate(species_measures.items()):
plt.show() # Plot invariance curve:
color = species_colors[species]
big_axes[2].plot(noise_scales, measure, label=shorten_species(species),
c=color, lw=lw['spec'])
# Indicate saturation:
ind = thresh_inds[i]
scale = noise_scales[ind]
big_axes[2].plot(scale, 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].plot(scale, 0, mfc=color, mec='k', alpha=0.75, zorder=6, **plateau_dot_kwargs,
transform=big_axes[2].get_xaxis_transform())
big_axes[2].vlines(scale, big_axes[2].get_ylim()[0], measure[ind],
color=color, **plateau_line_kwargs)
legend = big_axes[2].legend(**leg_kwargs)
[h.set_lw(lw['legend']) for h in legend.legend_handles]
if save_path is not None:
fig.savefig(save_path, bbox_inches='tight')
plt.show()
print('Done.') print('Done.')
embed() embed()

View File

@@ -2,7 +2,7 @@ import plotstyle_plt
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files from thunderhopper.filetools import search_files
from plot_functions import ylabel, super_xlabel, letter_subplots, title_subplot from plot_functions import ylabel, super_xlabel, title_subplot
from color_functions import load_colors from color_functions import load_colors
from misc_functions import shorten_species from misc_functions import shorten_species
@@ -17,7 +17,10 @@ target_species = [
'Pseudochorthippus_parallelus', 'Pseudochorthippus_parallelus',
] ]
data_path = '../data/inv/log_hp/condensed/' data_path = '../data/inv/log_hp/condensed/'
save_path = '../figures/fig_invariance_log-hp_species.pdf' save_path = '../figures/fig_invariance_log-hp_appendix.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
# GRAPH SETTINGS: # GRAPH SETTINGS:
fig_kwargs = dict( fig_kwargs = dict(
@@ -45,6 +48,22 @@ fill_kwargs = dict(
alpha=0.3, alpha=0.3,
zorder=1, zorder=1,
) )
mean_kwargs = dict(
# c=(0.5,) * 3,
lw=2,
alpha=1,
zorder=3,
ls='--'
)
mean_colors = {
'Chorthippus_biguttulus': (1,) * 3,
'Chorthippus_mollis': (0,) * 3,
'Chrysochraon_dispar': (0,) * 3,
'Euchorthippus_declivus': (0,) * 3,
'Gomphocerippus_rufus': (0,) * 3,
'Omocestus_rufipes': (0,) * 3,
'Pseudochorthippus_parallelus': (0,) * 3,
}
xlab = 'scale $\\alpha$' xlab = 'scale $\\alpha$'
ylab = '$\\sigma_{\\alpha}\\,/\\,\\sigma_{\\eta}$' ylab = '$\\sigma_{\\alpha}\\,/\\,\\sigma_{\\eta}$'
xlab_kwargs = dict( xlab_kwargs = dict(
@@ -82,7 +101,6 @@ axes[0].set_xscale('log')
axes[0].set_yscale('log') axes[0].set_yscale('log')
super_xlabel(xlab, fig, axes[0], axes[-1], **xlab_kwargs) super_xlabel(xlab, fig, axes[0], axes[-1], **xlab_kwargs)
ylabel(axes[0], ylab, **ylab_kwargs, transform=fig.transFigure) ylabel(axes[0], ylab, **ylab_kwargs, transform=fig.transFigure)
# letter_subplots(axes, **letter_kwargs)
# Run through species: # Run through species:
for species, ax in zip(target_species, axes): for species, ax in zip(target_species, axes):
@@ -93,14 +111,24 @@ for species, ax in zip(target_species, axes):
path = search_files(species, dir=data_path)[0] path = search_files(species, dir=data_path)[0]
data = dict(np.load(path)) data = dict(np.load(path))
scales = data['scales'] scales = data['scales']
means = data['mean'] means = data['mean_inv']
sds = data['sd'] sds = data['sd_inv']
if exclude_zero:
# Exclude zero scale:
inds = scales > 0
scales = scales[inds]
means = means[inds, :]
sds = sds[inds, :]
# Plot recording-specific traces: # Plot recording-specific traces:
for mean, sd in zip(means.T, sds.T): for mean, sd in zip(means.T, sds.T):
ax.plot(scales, mean, c=color, **line_kwargs) ax.plot(scales, mean, c=color, **line_kwargs)
ax.fill_between(scales, mean - sd, mean + sd, color=color, **fill_kwargs) ax.fill_between(scales, mean - sd, mean + sd, color=color, **fill_kwargs)
# Plot species mean trace:
ax.plot(scales, means.mean(axis=-1), c=mean_colors[species], **mean_kwargs)
# Save graph: # Save graph:
fig.savefig(save_path) fig.savefig(save_path)
plt.show() plt.show()

View File

@@ -0,0 +1,191 @@
import plotstyle_plt
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.filetools import search_files
from thunderhopper.modeltools import load_data
from plot_functions import ylabel, ylimits, super_xlabel, title_subplot, time_bar
from color_functions import load_colors, shade_colors
from misc_functions import shorten_species
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
]
data_path = '../data/inv/thresh_lp/condensed/'
save_path = '../figures/fig_invariance_thresh-lp_appendix.pdf'
# ANALYSIS SETTINGS:
exclude_zero = True
# SUBSET SETTINGS:
thresh_rel = np.array([0.5, 1, 3])[0]
kern_specs = np.array([
[1, 0.008],
[2, 0.004],
[3, 0.002],
])[np.array([0, 1, 2])]
n_kernels = kern_specs.shape[0]
# GRAPH SETTINGS:
fig_kwargs = dict(
figsize=(32/2.54, 16/2.54),
nrows=n_kernels,
ncols=len(target_species),
sharex=True,
sharey=True,
gridspec_kw=dict(
wspace=0.4,
hspace=0.2,
left=0.07,
right=0.98,
bottom=0.1,
top=0.95,
)
)
# PLOT SETTINGS:
species_colors = load_colors('../data/species_colors.npz')
kern_shades = [0, 0.75]
kern_colors = shade_colors((0., 0., 0.), np.linspace(*kern_shades, n_kernels))
line_kwargs = dict(
lw=2,
alpha=0.5,
zorder=2,
)
fill_kwargs = dict(
alpha=0.3,
zorder=1,
)
mean_kwargs = dict(
# c=(0.5,) * 3,
lw=2,
alpha=1,
zorder=3,
ls='--'
)
mean_colors = {
'Chorthippus_biguttulus': (1,) * 3,
'Chorthippus_mollis': (0,) * 3,
'Chrysochraon_dispar': (0,) * 3,
'Euchorthippus_declivus': (0,) * 3,
'Gomphocerippus_rufus': (0,) * 3,
'Omocestus_rufipes': (0,) * 3,
'Pseudochorthippus_parallelus': (1,) * 3,
}
kern_kwargs = dict(
lw=2,
)
inset_bounds = [0.05, 0.6, 0.3, 0.25]
kern_bar_time = 0.05
kern_bar_kwargs = dict(
dur=kern_bar_time,
y0=0.1,
y1=0.2,
color='k',
lw=0,
clip_on=False,
text_pos=(0.5, -1),
text_str=f'${int(kern_bar_time * 1000)}\\,\\text{{ms}}$',
text_kwargs=dict(
fontsize=12,
ha='center',
va='top',
)
)
xlab = 'scale $\\alpha$'
ylabs = [f'$\\mu_{{f_{i}}}$' for i in range(1, n_kernels + 1)]
xlab_kwargs = dict(
y=0,
fontsize=16,
ha='center',
va='bottom',
)
ylab_kwargs = dict(
x=0,
fontsize=20,
ha='center',
va='top',
)
title_kwargs = dict(
x=0.5,
yref=0.99,
ha='center',
va='top',
fontsize=16,
fontstyle='italic',
)
letter_kwargs = dict(
x=0.005,
y=0.99,
fontsize=22,
ha='left',
va='top',
)
# Prepare graph:
fig, axes = plt.subplots(**fig_kwargs)
axes[0, 0].set_xscale('log')
axes[0, 0].set_ylim(0, 1)
axes[0, 0].yaxis.set_major_locator(plt.MultipleLocator(0.5))
super_xlabel(xlab, fig, axes[-1, 0], axes[-1, -1], **xlab_kwargs)
insets = []
for ax, ylab in zip(axes[:, 0], ylabs):
ylabel(ax, ylab, **ylab_kwargs, transform=fig.transFigure)
insets.append(ax.inset_axes(inset_bounds))
# Run through species:
for i, (species, spec_axes) in enumerate(zip(target_species, axes.T)):
title_subplot(spec_axes[0], shorten_species(species), ref=fig, **title_kwargs)
# Load species data:
path = search_files(species, dir=data_path)[0]
data, config = load_data(path, files=['scales', 'mean_feat', 'sd_feat', 'thresh_rel'])
scales = data['scales']
means = data['mean_feat']
sds = data['sd_feat']
# Reduce to single threshold:
ind = np.nonzero(data['thresh_rel'] == thresh_rel)[0][0]
means = means[:, :, ind, :]
sds = sds[:, :, ind, :]
if exclude_zero:
# Exclude zero scale:
inds = scales > 0
scales = scales[inds]
means = means[inds, :, :]
sds = sds[inds, :, :]
# Run through kernels:
for j, (ax, inset) in enumerate(zip(spec_axes, insets)):
if i == 0:
# Indicate kernel waveform:
inset.plot(config['k_times'], config['kernels'][:, j],
c=kern_colors[j], **kern_kwargs)
inset.set_xlim(config['k_times'][[0, -1]])
ylimits(config['kernels'], inset, pad=0.05)
inset.set_title(rf'$k_{{{j+1}}}$', fontsize=15)
if j == 0:
time_bar(inset, **kern_bar_kwargs)
inset.axis('off')
# Plot recording-specific traces:
for k in range(means.shape[-1]):
ax.plot(scales, means[:, j, k], c=species_colors[species], **line_kwargs)
spread = (means[:, j, k] - sds[:, j, k], means[:, j, k] + sds[:, j, k])
ax.fill_between(scales, *spread, color=species_colors[species], **fill_kwargs)
# Plot kernel-specific mean trace:
ax.plot(scales, means[:, j, :].mean(axis=-1), c=mean_colors[species], **mean_kwargs)
# Save graph:
fig.savefig(save_path)
plt.show()

View File

@@ -58,19 +58,19 @@ def side_distributions(axes, snippets, inset_bounds, thresh, nbins=1000,
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes' example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_paths = search_files(target, incl='noise', dir='../data/inv/thresh_lp/') data_path = search_files(example_file, incl='noise', dir='../data/inv/thresh_lp/')[0]
stages = ['conv', 'bi', 'feat'] stages = ['conv', 'bi', 'feat']
load_kwargs = dict( load_kwargs = dict(
files=stages, files=stages,
keywords=['scales', 'snip', 'measure', 'thresh'] keywords=['scales', 'snip', 'measure', 'thresh']
) )
save_path = None#'../figures/fig_invariance_thresh_lp_single.pdf' save_path = '../figures/fig_invariance_thresh_lp_single.pdf'
exclude_zero = True exclude_zero = True
# GRAPH SETTINGS: # GRAPH SETTINGS:
fig_kwargs = dict( fig_kwargs = dict(
figsize=(32/2.54, 16/2.54), figsize=(32/2.54, 32/2.54),
) )
super_grid_kwargs = dict( super_grid_kwargs = dict(
nrows=None, nrows=None,
@@ -140,6 +140,8 @@ lw = dict(
bi=0.1, bi=0.1,
feat=3, feat=3,
big=4, big=4,
kern=2.5,
plateau=1.5,
) )
xlabels = dict( xlabels = dict(
alpha='scale $\\alpha$', alpha='scale $\\alpha$',
@@ -216,6 +218,10 @@ letter_big_kwargs = dict(
va='top', va='top',
fontsize=fs['letter'], fontsize=fs['letter'],
) )
kern_kwargs = dict(
c='k',
lw=lw['kern'],
)
dist_kwargs = dict( dist_kwargs = dict(
c='k', c='k',
lw=1, lw=1,
@@ -257,171 +263,198 @@ plateau_settings = dict(
last=True, last=True,
condense=None, condense=None,
) )
plateau_line_kwargs = dict(
lw=lw['plateau'],
ls='--',
zorder=1,
)
plateau_dot_kwargs = dict(
marker='o',
markersize=8,
markeredgewidth=1,
clip_on=False,
)
zoom_rel = np.array([0.5, 0.515])
# SUBSET SETTINGS:
kern_specs = np.array([ kern_specs = np.array([
[1, 0.008], [1, 0.008],
[2, 0.004], [2, 0.004],
[3, 0.002], [3, 0.002],
])[np.array([1])] ])[np.array([1])]
zoom_rel = np.array([0.5, 0.515])
# EXECUTION: # EXECUTION:
for data_path in data_paths: print(f'Processing {data_path}')
print(f'Processing {data_path}')
# Load invariance data: # Load invariance data:
noise_data, config = load_data(data_path, **load_kwargs) noise_data, config = load_data(data_path, **load_kwargs)
pure_data, _ = load_data(data_path.replace('noise', 'pure'), **load_kwargs) pure_data, _ = load_data(data_path.replace('noise', 'pure'), **load_kwargs)
# Unpack shared variables: # Unpack shared variables:
scales = noise_data['scales'] scales = noise_data['scales']
plot_scales = noise_data['example_scales'] plot_scales = noise_data['example_scales']
thresh_rel = noise_data['thresh_rel'] thresh_rel = noise_data['thresh_rel']
thresh_abs = noise_data['thresh_abs'] thresh_abs = noise_data['thresh_abs']
# Reduce to kernel subset and crop to zoom frame: # Reduce to kernel subset and crop to zoom frame:
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate'] t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
zoom_abs = zoom_rel * t_full[-1] zoom_abs = zoom_rel * t_full[-1]
zoom_inds = (t_full >= zoom_abs[0]) & (t_full <= zoom_abs[1]) zoom_inds = (t_full >= zoom_abs[0]) & (t_full <= zoom_abs[1])
kern_ind = find_kern_specs(config['k_specs'], kerns=kern_specs)[0] kern_ind = find_kern_specs(config['k_specs'], kerns=kern_specs)[0]
noise_data['snip_inv'] = noise_data['snip_inv'][zoom_inds, :] noise_data['snip_inv'] = noise_data['snip_inv'][zoom_inds, :]
noise_data['snip_conv'] = noise_data['snip_conv'][zoom_inds, kern_ind, :] noise_data['snip_conv'] = noise_data['snip_conv'][zoom_inds, kern_ind, :]
noise_data['snip_bi'] = noise_data['snip_bi'][zoom_inds, kern_ind, :, :] noise_data['snip_bi'] = noise_data['snip_bi'][zoom_inds, kern_ind, :, :]
noise_data['snip_feat'] = noise_data['snip_feat'][zoom_inds, kern_ind, :, :] noise_data['snip_feat'] = noise_data['snip_feat'][zoom_inds, kern_ind, :, :]
noise_data['measure_feat'] = noise_data['measure_feat'][:, kern_ind, :] noise_data['measure_feat'] = noise_data['measure_feat'][:, kern_ind, :]
pure_data['measure_feat'] = pure_data['measure_feat'][:, kern_ind, :] pure_data['measure_feat'] = pure_data['measure_feat'][:, kern_ind, :]
thresh_abs = thresh_abs[:, kern_ind] config['kernels'] = config['kernels'][:, kern_ind]
t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate'] thresh_abs = thresh_abs[:, kern_ind]
if exclude_zero: t_full = np.arange(noise_data['snip_conv'].shape[0]) / config['env_rate']
# Reduce to nonzero scales:
nonzero_inds = scales > 0
scales = scales[nonzero_inds]
noise_data['measure_inv'] = noise_data['measure_inv'][nonzero_inds]
noise_data['measure_feat'] = noise_data['measure_feat'][nonzero_inds, :]
pure_data['measure_feat'] = pure_data['measure_feat'][nonzero_inds, :]
# Get threshold-specific colors: if exclude_zero:
factors = np.linspace(*shade_factors, thresh_rel.size) # Exclude zero scale:
shaded = dict( inds = scales > 0
conv=shade_colors(colors['conv'], factors), scales = scales[inds]
bi=shade_colors(colors['bi'], factors), noise_data['measure_inv'] = noise_data['measure_inv'][inds]
feat=shade_colors(colors['feat'], factors), noise_data['measure_feat'] = noise_data['measure_feat'][inds, :]
) pure_data['measure_feat'] = pure_data['measure_feat'][inds, :]
# Adjust grid parameters to loaded data: # Get threshold-specific colors:
super_grid_kwargs['nrows'] = snip_rows * thresh_rel.size + input_rows factors = np.linspace(*shade_factors, thresh_rel.size)
input_grid_kwargs['ncols'] = plot_scales.size shaded = dict(
snip_grid_kwargs['ncols'] = plot_scales.size conv=shade_colors(colors['conv'], factors),
bi=shade_colors(colors['bi'], factors),
feat=shade_colors(colors['feat'], factors),
)
# Prepare overall graph: # Adjust grid parameters to loaded data:
fig = plt.figure(**fig_kwargs) super_grid_kwargs['nrows'] = snip_rows * thresh_rel.size + input_rows
super_grid = fig.add_gridspec(**super_grid_kwargs) input_grid_kwargs['ncols'] = plot_scales.size
snip_grid_kwargs['ncols'] = plot_scales.size
# Prepare input snippet axes: # Prepare overall graph:
input_subfig = fig.add_subfigure(super_grid[subfig_specs['input']]) fig = plt.figure(**fig_kwargs)
input_axes = add_snip_axes(input_subfig, input_grid_kwargs).ravel() super_grid = fig.add_gridspec(**super_grid_kwargs)
input_axes[0].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][0]))
input_axes[1].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][1]))
ylabel(input_axes[0], ylabels['inv'], transform=input_subfig.transSubfigure, **ylab_snip_kwargs)
for ax, scale in zip(input_axes, plot_scales):
title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', ref=input_subfig, **title_kwargs)
letter_subplot(input_subfig, 'a', **letter_snip_kwargs)
# Prepare snippet axes: # Prepare input snippet axes:
snip_subfigs, snip_axes = [], [] input_subfig = fig.add_subfigure(super_grid[subfig_specs['input']])
for i in range(thresh_rel.size): input_axes = add_snip_axes(input_subfig, input_grid_kwargs).ravel()
subfig_spec = subfig_specs['snip'].copy() input_axes[0].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][0]))
subfig_spec[0] = slice(*(subfig_spec[0] + i * snip_rows)) input_axes[1].yaxis.set_major_locator(plt.MultipleLocator(yloc['inv'][1]))
snip_subfig = fig.add_subfigure(super_grid[*subfig_spec]) ylabel(input_axes[0], ylabels['inv'], transform=input_subfig.transSubfigure, **ylab_snip_kwargs)
axes = add_snip_axes(snip_subfig, snip_grid_kwargs) for ax, scale in zip(input_axes, plot_scales):
[hide_axis(ax, 'left') for ax in axes[1:, 1]] title_subplot(ax, f'$\\alpha={strip_zeros(scale)}$', ref=input_subfig, **title_kwargs)
super_ylabel(f'$\\Theta={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$', letter_subplot(input_subfig, 'a', **letter_snip_kwargs)
snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for (ax1, ax2), stage in zip(axes[:, :2], stages):
ax1.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][0]))
ax2.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][1]))
ylabel(ax1, ylabels[stage], transform=snip_subfig.transSubfigure, **ylab_snip_kwargs)
if i == thresh_rel.size - 1:
axes[-1, -1].set_xlim(t_full[0], t_full[-1])
time_bar(axes[-1, -1], **bar_kwargs)
snip_subfigs.append(snip_subfig)
snip_axes.append(axes)
letter_subplots(snip_subfigs, 'bcd', **letter_snip_kwargs)
# Prepare analysis axes: # Prepare snippet axes:
big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']]) snip_subfigs, snip_axes = [], []
big_grid = big_subfig.add_gridspec(**big_grid_kwargs) for i in range(thresh_rel.size):
subfig_spec = subfig_specs['snip'].copy()
subfig_spec[0] = slice(*(subfig_spec[0] + i * snip_rows))
snip_subfig = fig.add_subfigure(super_grid[*subfig_spec])
axes = add_snip_axes(snip_subfig, snip_grid_kwargs)
[hide_axis(ax, 'left') for ax in axes[1:, 1]]
super_ylabel(f'$\\Theta={strip_zeros(thresh_rel[i])}\\cdot\\sigma_{{\\eta}}$',
snip_subfig, axes[-1, 0], axes[0, 0], **ylab_super_kwargs)
for (ax1, ax2), stage in zip(axes[:, :2], stages):
ax1.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][0]))
ax2.yaxis.set_major_locator(plt.MultipleLocator(yloc[stage][1]))
ylabel(ax1, ylabels[stage], transform=snip_subfig.transSubfigure, **ylab_snip_kwargs)
if i == thresh_rel.size - 1:
axes[-1, -1].set_xlim(t_full[0], t_full[-1])
time_bar(axes[-1, -1], **bar_kwargs)
snip_subfigs.append(snip_subfig)
snip_axes.append(axes)
letter_subplots(snip_subfigs, 'bcd', **letter_snip_kwargs)
alpha_ax = big_subfig.add_subplot(big_grid[0, 0]) # Prepare analysis axes:
alpha_ax.set_xlim(scales[0], scales[-1]) big_subfig = fig.add_subfigure(super_grid[subfig_specs['big']])
alpha_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5) big_grid = big_subfig.add_gridspec(**big_grid_kwargs)
ylimits(pure_data['measure_feat'], alpha_ax, minval=0, pad=ypad['big'])
alpha_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(alpha_ax, xlabels['alpha'], **xlab_alpha_kwargs)
ylabel(alpha_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
sigma_ax = big_subfig.add_subplot(big_grid[1, 0]) alpha_ax = big_subfig.add_subplot(big_grid[0, 0])
sigma_ax.set_xlim(noise_data['measure_inv'].min(), noise_data['measure_inv'].max()) alpha_ax.set_xlim(scales[0], scales[-1])
# sigma_ax.set_xscale('log') alpha_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
sigma_ax.set_xlim(scales[0], scales[-1]) ylimits(pure_data['measure_feat'], alpha_ax, minval=0, pad=ypad['big'])
sigma_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5) alpha_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
ylimits(pure_data['measure_feat'], sigma_ax, minval=0, pad=ypad['big']) xlabel(alpha_ax, xlabels['alpha'], **xlab_alpha_kwargs)
sigma_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big'])) ylabel(alpha_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
xlabel(sigma_ax, xlabels['sigma'], **xlab_sigma_kwargs)
ylabel(sigma_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
# Plot intensity-adapted snippets: sigma_ax = big_subfig.add_subplot(big_grid[1, 0])
plot_snippets(input_axes, t_full, noise_data['snip_inv'], sigma_ax.set_xlim(noise_data['measure_inv'].min(), noise_data['measure_inv'].max())
ypad=ypad['inv'], c=colors['inv'], lw=lw['inv']) sigma_ax.set_xlim(scales[0], scales[-1])
ylimits(noise_data['snip_inv'][:, 0], input_axes[0], pad=ypad['inv']) sigma_ax.set_xscale('symlog', linthresh=scales[scales > 0][0], linscale=0.5)
ylimits(pure_data['measure_feat'], sigma_ax, minval=0, pad=ypad['big'])
sigma_ax.yaxis.set_major_locator(plt.MultipleLocator(yloc['big']))
xlabel(sigma_ax, xlabels['sigma'], **xlab_sigma_kwargs)
ylabel(sigma_ax, ylabels['big'], transform=big_subfig.transSubfigure, **ylab_big_kwargs)
# Plot representation snippets per threshold: # Plot intensity-adapted snippets:
for i, (subfig, axes) in enumerate(zip(snip_subfigs, snip_axes)): plot_snippets(input_axes, t_full, noise_data['snip_inv'],
dist_fill_kwargs['color'] = shaded['bi'][i] ypad=ypad['inv'], c=colors['inv'], lw=lw['inv'])
ylimits(noise_data['snip_inv'][:, 0], input_axes[0], pad=ypad['inv'])
# Plot kernel response snippets: # Indicate kernel waveform over 1st intensity-adapted snippet:
plot_snippets(axes[0, :], t_full, noise_data['snip_conv'], thresh=thresh_abs[i], input_axes[0].plot(config['k_times'] + 0.5 * t_full[-1], config['kernels'], **kern_kwargs)
ypad=ypad['conv'], fill_kwargs=dist_fill_kwargs, c=shaded['conv'][i], lw=lw['conv'])
ylimits(noise_data['snip_conv'][:, 0], axes[0, 0], pad=ypad['conv'])
# Plot kernel response distributions: # Plot representation snippets per threshold:
side_distributions(axes[0, :1], noise_data['snip_conv'][:, :1], dist_inset_bounds, for i, (subfig, axes) in enumerate(zip(snip_subfigs, snip_axes)):
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs) dist_fill_kwargs['color'] = shaded['bi'][i]
side_distributions(axes[0, 1:], noise_data['snip_conv'][:, 1:], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
# Plot binary snippets: # Plot kernel response snippets:
plot_bi_snippets(axes[1, :], t_full, noise_data['snip_bi'][:, :, i], plot_snippets(axes[0, :], t_full, noise_data['snip_conv'], thresh=thresh_abs[i],
color=shaded['bi'][i], lw=lw['bi']) ypad=ypad['conv'], fill_kwargs=dist_fill_kwargs, c=shaded['conv'][i], lw=lw['conv'])
ylimits(noise_data['snip_conv'][:, 0], axes[0, 0], pad=ypad['conv'])
# Plot feature snippets: # Plot kernel response distributions:
handles = plot_snippets(axes[2, :], t_full, noise_data['snip_feat'][:, :, i], side_distributions(axes[0, :1], noise_data['snip_conv'][:, :1], dist_inset_bounds,
ymin=0, ymax=1, c=shaded['feat'][i], lw=lw['feat']) thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
[set_clip_box(h[0], ax, bounds=[[0, -0.05], [1, 1.05]]) for h, ax in zip(handles, axes[2, :])] side_distributions(axes[0, 1:], noise_data['snip_conv'][:, 1:], dist_inset_bounds,
thresh_abs[i], nbins=50, fill_kwargs=dist_fill_kwargs, **dist_kwargs)
# Get threshold-specific saturation: # Plot binary snippets:
for i in range(thresh_rel.size): plot_bi_snippets(axes[1, :], t_full, noise_data['snip_bi'][:, :, i],
ind = get_saturation(noise_data['measure_feat'][:, i], **plateau_settings)[1] color=shaded['bi'][i], lw=lw['bi'])
# Plot analysis results: # Plot feature snippets:
for ax, x in zip([alpha_ax, sigma_ax], [scales, noise_data['measure_inv']]): handles = plot_snippets(axes[2, :], t_full, noise_data['snip_feat'][:, :, i],
# Plot pure-song analysis results: ymin=0, ymax=1, c=shaded['feat'][i], lw=lw['feat'])
handles = ax.plot(x, pure_data['measure_feat'], lw=lw['big'], ls='dotted') [set_clip_box(h[0], ax, bounds=[[0, -0.05], [1, 1.05]]) for h, ax in zip(handles, axes[2, :])]
[h.set_color(c) for h, c in zip(handles, shaded['feat'])]
# Plot noise-song analysis results: # Get saturation:
handles = ax.plot(x, noise_data['measure_feat'], lw=lw['big']) saturation_inds = []
[h.set_color(c) for h, c in zip(handles, shaded['feat'])] for i in range(thresh_rel.size):
ind = get_saturation(noise_data['measure_feat'][:, i], **plateau_settings)[1]
saturation_inds.append(ind)
# Add proxy legend: # Plot analysis results:
if ax == alpha_ax: for ax, x in zip([alpha_ax, sigma_ax], [scales, noise_data['measure_inv']]):
h1 = ax.plot([], [], c='k', lw=lw['big'], label='$\\alpha\\cdot s(t) + \\eta(t)$')[0] # Plot pure-song analysis results:
h2 = ax.plot([], [], c='k', lw=lw['big'], ls='dotted', label='$\\alpha\\cdot s(t)$')[0] handles = ax.plot(x, pure_data['measure_feat'], lw=lw['big'], ls='dotted')
ax.legend(handles=[h1, h2], **leg_kwargs) [h.set_color(c) for h, c in zip(handles, shaded['feat'])]
if save_path is not None: # Plot noise-song analysis results:
fig.savefig(save_path) handles = ax.plot(x, noise_data['measure_feat'], lw=lw['big'])
plt.show() [h.set_color(c) for h, c in zip(handles, shaded['feat'])]
# Indicate threshold-specific saturation:
for i, ind in enumerate(saturation_inds):
color = shaded['feat'][i]
ax.plot(x[ind], 0, c='w', alpha=1, zorder=5.5, **plateau_dot_kwargs,
transform=ax.get_xaxis_transform())
ax.plot(x[ind], 0, mfc=color, mec='k', alpha=0.75, zorder=6,
**plateau_dot_kwargs, transform=ax.get_xaxis_transform())
ax.vlines(x[ind], ax.get_ylim()[0], noise_data['measure_feat'][ind, i],
color=color, **plateau_line_kwargs)
# Add proxy legend:
if ax == alpha_ax:
h1 = ax.plot([], [], c='k', lw=lw['big'], label='$\\alpha\\cdot s(t) + \\eta(t)$')[0]
h2 = ax.plot([], [], c='k', lw=lw['big'], ls='dotted', label='$\\alpha\\cdot s(t)$')[0]
ax.legend(handles=[h1, h2], **leg_kwargs)
if save_path is not None:
fig.savefig(save_path)
plt.show()
print('Done.') print('Done.')
embed() embed()

View File

@@ -162,20 +162,30 @@ def add_cross_axes(fig, n, long='col', fill='row', **grid_kwargs):
# GENERAL SETTINGS: # GENERAL SETTINGS:
target_species = [ target_species = [
'Omocestus_rufipes',
'Chorthippus_biguttulus', 'Chorthippus_biguttulus',
'Chorthippus_mollis', 'Chorthippus_mollis',
'Chrysochraon_dispar', 'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus', 'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus', 'Pseudochorthippus_parallelus',
] ]
example_files = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}
n_species = len(target_species) n_species = len(target_species)
load_kwargs = dict( load_kwargs = dict(
keywords=['scales', 'measure', 'thresh'] keywords=['scales', 'mean', 'thresh']
) )
save_path = '../figures/fig_invariance_thresh_lp_species.pdf' save_path = '../figures/fig_invariance_thresh_lp_species.pdf'
exclude_zero = True exclude_zero = True
show_floor = True show_floor = False
# SUBSET SETTINGS: # SUBSET SETTINGS:
thresh_rel = np.array([0.5, 1, 3])[0] thresh_rel = np.array([0.5, 1, 3])[0]
@@ -266,14 +276,6 @@ lw = dict(
kern=2.5, kern=2.5,
plateau=3, plateau=3,
) )
zorder = dict(
Omocestus_rufipes=2,
Chorthippus_biguttulus=2.5,
Chorthippus_mollis=2.4,
Chrysochraon_dispar=2,
Gomphocerippus_rufus=2,
Pseudochorthippus_parallelus=2,
)
space_kwargs = dict( space_kwargs = dict(
s=30, s=30,
) )
@@ -357,21 +359,27 @@ letter_space_kwargs = dict(
va='center', va='center',
fontsize=fs['letter'], fontsize=fs['letter'],
) )
song_bar_time = 1.0 spec_bar_times = dict(
Chorthippus_biguttulus=1,
Chorthippus_mollis=10,
Chrysochraon_dispar=1,
Euchorthippus_declivus=0.25,
Gomphocerippus_rufus=5,
Omocestus_rufipes=5,
Pseudochorthippus_parallelus=1,
)
song_bar_kwargs = dict( song_bar_kwargs = dict(
dur=song_bar_time,
y0=-0.1, y0=-0.1,
y1=0, y1=0,
xshift=0, xshift=0.5,
color='k', color='k',
lw=0, lw=0,
clip_on=False, clip_on=False,
text_pos=(1.25, 0.5), text_pos=(0.5, -0.1),
text_str=f'${int(song_bar_time)}\\,\\text{{s}}$',
text_kwargs=dict( text_kwargs=dict(
fontsize=fs['bar'], fontsize=fs['bar'],
ha='left', ha='center',
va='center', va='top',
) )
) )
kern_bar_time = 0.05 kern_bar_time = 0.05
@@ -382,7 +390,7 @@ kern_bar_kwargs = dict(
color='k', color='k',
lw=0, lw=0,
clip_on=False, clip_on=False,
text_pos=(0.6, -1), text_pos=(0.7, -1),
text_str=f'${int(kern_bar_time * 1000)}\\,\\text{{ms}}$', text_str=f'${int(kern_bar_time * 1000)}\\,\\text{{ms}}$',
text_kwargs=dict( text_kwargs=dict(
fontsize=fs['bar'], fontsize=fs['bar'],
@@ -502,7 +510,7 @@ for i, species in enumerate(target_species):
print(f'Processing {species}') print(f'Processing {species}')
# Fetch species-specific recording file: # Fetch species-specific recording file:
song_path = search_files(species, dir='../data/processed/')[0] song_path = search_files(example_files[species], dir='../data/processed/')[0]
# Load song data: # Load song data:
song_data, _ = load_data(song_path, files='filt') song_data, _ = load_data(song_path, files='filt')
@@ -513,16 +521,18 @@ for i, species in enumerate(target_species):
time = np.arange(song.shape[0]) / rate time = np.arange(song.shape[0]) / rate
plot_line(song_ax, time, song, ypad=0.05, c='k', lw=lw['song']) plot_line(song_ax, time, song, ypad=0.05, c='k', lw=lw['song'])
title_subplot(song_ax, shorten_species(species), ref=song_subfig, **title_kwargs) title_subplot(song_ax, shorten_species(species), ref=song_subfig, **title_kwargs)
time_bar(song_ax, **song_bar_kwargs) time_bar(song_ax, dur=spec_bar_times[species], **song_bar_kwargs,
song_bar_kwargs['text_pos'] = None text_str=f'${spec_bar_times[species]}\\,\\text{{s}}$')
# Fetch species-specific invariance files: # Fetch species-specific invariance files:
pure_path = search_files(species, incl='pure', dir='../data/inv/thresh_lp/')[0] pure_path = search_files(species, incl='pure', dir='../data/inv/thresh_lp/condensed/')[0]
noise_path = search_files(species, incl='noise', dir='../data/inv/thresh_lp/')[0] noise_path = search_files(species, incl='noise', dir='../data/inv/thresh_lp/condensed/')[0]
# Load invariance data: # Load invariance data:
pure_data, config = load_data(pure_path, **load_kwargs) pure_data, config = load_data(pure_path, **load_kwargs)
noise_data, _ = load_data(noise_path, **load_kwargs) noise_data, _ = load_data(noise_path, **load_kwargs)
pure_measure = pure_data['mean_feat'].mean(axis=-1)
noise_measure = noise_data['mean_feat'].mean(axis=-1)
scales = pure_data['scales'] scales = pure_data['scales']
# Reduce to kernel subset and a single threshold: # Reduce to kernel subset and a single threshold:
@@ -530,8 +540,8 @@ for i, species in enumerate(target_species):
kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs) kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs)
config['k_specs'] = config['k_specs'][kern_inds] config['k_specs'] = config['k_specs'][kern_inds]
config['kernels'] = config['kernels'][:, kern_inds] config['kernels'] = config['kernels'][:, kern_inds]
pure_measure = pure_data['measure_feat'][:, kern_inds, thresh_ind] pure_measure = pure_measure[:, kern_inds, thresh_ind]
noise_measure = noise_data['measure_feat'][:, kern_inds, thresh_ind] noise_measure = noise_measure[:, kern_inds, thresh_ind]
if exclude_zero: if exclude_zero:
# Reduce to nonzero scales: # Reduce to nonzero scales:
nonzero_inds = scales > 0 nonzero_inds = scales > 0
@@ -564,7 +574,6 @@ for i, species in enumerate(target_species):
inset.plot(config['k_times'], kern, c=c, lw=lw['kern']) inset.plot(config['k_times'], kern, c=c, lw=lw['kern'])
inset.set_xlim(xlims) inset.set_xlim(xlims)
inset.set_ylim(ylims) inset.set_ylim(ylims)
# time_bar(insets[0], parent=feat_axes[0, 0], **kern_bar_kwargs)
time_bar(insets[0], **kern_bar_kwargs) time_bar(insets[0], **kern_bar_kwargs)
# Plot invariance curves in feature space: # Plot invariance curves in feature space:
@@ -572,13 +581,11 @@ for i, species in enumerate(target_species):
for ind, (pure_ax, noise_ax) in enumerate(zip(pure_axes, noise_axes)): for ind, (pure_ax, noise_ax) in enumerate(zip(pure_axes, noise_axes)):
irow, icol = row_inds[ind], col_inds[ind] irow, icol = row_inds[ind], col_inds[ind]
pure_handle = pure_ax.scatter(pure_measure[:, icol], pure_measure[:, irow], pure_handle = pure_ax.scatter(pure_measure[:, icol], pure_measure[:, irow],
c=scales, cmap=scale_cmap, norm=norm, c=scales, cmap=scale_cmap, norm=norm, **space_kwargs)
zorder=zorder[species], **space_kwargs)
pure_space_handles[pure_ax].append(pure_handle) pure_space_handles[pure_ax].append(pure_handle)
noise_handle = noise_ax.scatter(noise_measure[:, icol], noise_measure[:, irow], noise_handle = noise_ax.scatter(noise_measure[:, icol], noise_measure[:, irow],
c=scales, cmap=scale_cmap, norm=norm, c=scales, cmap=scale_cmap, norm=norm, **space_kwargs)
zorder=zorder[species], **space_kwargs)
noise_space_handles[noise_ax].append(noise_handle) noise_space_handles[noise_ax].append(noise_handle)
# Indicate scale color code in pure subfigure: # Indicate scale color code in pure subfigure:

View File

@@ -1,5 +1,6 @@
import numpy as np import numpy as np
from scipy.stats import gaussian_kde from scipy.stats import gaussian_kde
from thunderhopper.filetools import crop_paths
def shorten_species(name): def shorten_species(name):
genus, species = name.split('_') genus, species = name.split('_')
@@ -9,6 +10,44 @@ def unsort_unique(array):
values, inds = np.unique(array, return_index=True) values, inds = np.unique(array, return_index=True)
return values[np.argsort(inds)] return values[np.argsort(inds)]
def draw_noise_segment(noise, n):
rng = np.random.default_rng()
start = rng.integers(0, noise.shape[0] - n, endpoint=True)
return np.take(noise, np.arange(start, start + n), axis=0)
def sort_files_by_rec(paths, sources=['BM04', 'BM93', 'DJN', 'GBC', 'FTN']):
# Separate by source:
sorted_paths = {}
for source in sources:
# Check for any source-specific song files:
source_paths = [path for path in paths if source in path]
if not source_paths:
continue
# Separate by recording:
sorted_paths[source] = [[]]
for path, name in zip(source_paths, crop_paths(source_paths)):
# Find numerical ID behind source tag:
id_ind = name.find(source) + len(source) + 1
# Get segment where sub-ID would be:
sub_id = name[id_ind:].split('-')[1]
if 's' in sub_id:
# Found time stamp (single recording):
sorted_paths[source][0].append(path)
continue
sub_id = int(sub_id)
# Found sub-ID (multiple recordings):
if sub_id > len(sorted_paths[source]):
# Open new recording-specific slot:
sorted_paths[source].append([])
sorted_paths[source][sub_id - 1].append(path)
# Re-sort song files by recording only (discarding source separation):
sorted_paths = [path for paths in sorted_paths.values() for path in paths]
return sorted_paths
def get_kde(data, sigma, axis=None, n=1000, pad=10): def get_kde(data, sigma, axis=None, n=1000, pad=10):
if axis is None: if axis is None:
axis = np.linspace(data.min() - pad * sigma, data.max() + pad * sigma, n) axis = np.linspace(data.min() - pad * sigma, data.max() + pad * sigma, n)

View File

@@ -1,24 +1,42 @@
import glob
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import crop_paths from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filtertools import find_kern_specs from thunderhopper.filtertools import find_kern_specs
from thunderhopper.model import process_signal, convolve_kernels from thunderhopper.model import process_signal
from misc_functions import draw_noise_segment
from IPython import embed from IPython import embed
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = 'Omocestus_rufipes' target_species = [
data_paths = glob.glob(f'../data/processed/{target}*.npz') 'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][0]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz' noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/full/ref_measures.npz'
stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat'] stages = ['filt', 'env', 'log', 'inv', 'conv', 'feat']
save_path = '../data/inv/full/' save_path = '../data/inv/full/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
example_scales = np.array([0.1, 1, 10, 30, 100, 300]) example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 100) scales = np.geomspace(0.01, 10000, 500)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = 3 thresh_rel = 0.5
# SUBSET SETTINGS: # SUBSET SETTINGS:
kernels = np.array([ kernels = np.array([
@@ -34,11 +52,14 @@ types = None#np.array([-1])
sigmas = None#np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032]) sigmas = None#np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# PREPARATION: # PREPARATION:
noise_data = np.load(noise_path) pure_noise = np.load(noise_path)['raw']
pure_noise = noise_data['raw'] if thresh_rel is not None:
# Get threshold values from pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'] * thresh_rel
# EXECUTION: # EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)): for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}') print(f'Processing {name}')
# Get song recording (prior to anything): # Get song recording (prior to anything):
@@ -46,8 +67,8 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song, rate = data['raw'], config['rate'] song, rate = data['raw'], config['rate']
if thresh_rel is not None: if thresh_rel is not None:
# Get noise-bound kernel-specific thresholds: # Set kernel-specific thresholds:
config['feat_thresh'] = noise_data['conv'].std(axis=0) * thresh_rel config['feat_thresh'] = thresh_abs
# Reduce to kernel subset: # Reduce to kernel subset:
if any(var is not None for var in [kernels, types, sigmas]): if any(var is not None for var in [kernels, types, sigmas]):
@@ -66,22 +87,10 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song /= song[segment].std(axis=0) song /= song[segment].std(axis=0)
# Get normalized noise component: # Get normalized noise component:
noise = pure_noise[:song.shape[0]] noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std() noise /= noise[segment].std()
# Prepare snippet storage: # Prepare storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_log=np.zeros(shape_low, dtype=float),
snip_inv=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
)
# Prepare measure storage:
shape_low = (scales.size,) shape_low = (scales.size,)
shape_high = (scales.size, config['k_specs'].shape[0]) shape_high = (scales.size, config['k_specs'].shape[0])
measures = dict( measures = dict(
@@ -91,6 +100,18 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
measure_inv=np.zeros(shape_low, dtype=float), measure_inv=np.zeros(shape_low, dtype=float),
measure_conv=np.zeros(shape_high, dtype=float), measure_conv=np.zeros(shape_high, dtype=float),
measure_feat=np.zeros(shape_high, dtype=float) measure_feat=np.zeros(shape_high, dtype=float)
)
if save_detailed:
# Prepare optional storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_log=np.zeros(shape_low, dtype=float),
snip_inv=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
) )
# Execute piecewise: # Execute piecewise:
@@ -105,18 +126,17 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
signal=scaled, rate=rate) signal=scaled, rate=rate)
# Store results: # Store results:
for stage in stages: for stage in stages:
mkey, skey = f'measure_{stage}', f'snip_{stage}' # Log intensity measures:
mkey = f'measure_{stage}'
# Log snippet data: if stage == 'feat':
if scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[skey][:, ..., scale_ind] = signals[stage]
# Log intensity measure per stage (excluding binary):
if stage in ['raw', 'filt', 'env', 'log', 'inv', 'conv']:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
elif stage == 'feat':
measures[mkey][i] = signals[stage][segment, :].mean(axis=0) measures[mkey][i] = signals[stage][segment, :].mean(axis=0)
else:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
# Log optional snippet data:
if save_detailed and scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[f'snip_{stage}'][:, ..., scale_ind] = signals[stage]
# Save analysis results: # Save analysis results:
if save_path is not None: if save_path is not None:
@@ -124,8 +144,9 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
scales=scales, scales=scales,
example_scales=example_scales, example_scales=example_scales,
) )
data.update(snippets)
data.update(measures) data.update(measures)
if save_detailed:
data.update(snippets)
save_data(save_path + name, data, config, overwrite=True) save_data(save_path + name, data, config, overwrite=True)
print('Done.') print('Done.')
embed() embed()

View File

@@ -2,6 +2,7 @@ import numpy as np
from thunderhopper.modeltools import load_data, save_data from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import search_files, crop_paths from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import decibel, sosfilter from thunderhopper.filters import decibel, sosfilter
from misc_functions import draw_noise_segment
from IPython import embed from IPython import embed
# GENERAL SETTINGS: # GENERAL SETTINGS:
@@ -12,17 +13,18 @@ noise_path = '../data/processed/white_noise_sd-1.npz'
save_path = '../data/inv/log_hp/' save_path = '../data/inv/log_hp/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
add_noise = search_target == '*' or False add_noise = search_target == '*'
save_detailed = search_target == example_file
example_scales = np.array([0.1, 1, 10, 30, 100, 300]) example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 1000) scales = np.geomspace(0.01, 10000, 1000)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate(([0], scales, example_scales)))
# PREPARATION: # PREPARATION:
pure_noise = np.load(noise_path)['filt'] if add_noise:
pure_noise = np.load(noise_path)['filt']
# EXECUTION: # EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)): for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}') print(f'Processing {name}')
# Get filtered song (prior to envelope extraction): # Get filtered song (prior to envelope extraction):
@@ -38,7 +40,7 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
song /= song[segment].std() song /= song[segment].std()
if add_noise: if add_noise:
# Get normalized noise component: # Get normalized noise component:
noise = pure_noise[:song.shape[0]] noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std() noise /= noise[segment].std()
# Prepare storage: # Prepare storage:
@@ -93,10 +95,12 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
snip_log=snip_log, snip_log=snip_log,
snip_inv=snip_inv, snip_inv=snip_inv,
) )
file_name = save_path + name save_name = save_path + name
if add_noise: if add_noise:
file_name += '_noise' save_name += '_noise'
save_data(file_name, archive, config, overwrite=True) else:
save_name += '_pure'
save_data(save_name, archive, config, overwrite=True)
print('Done.') print('Done.')
embed() embed()

View File

@@ -0,0 +1,157 @@
import numpy as np
import matplotlib.pyplot as plt
from thunderhopper.modeltools import load_data, save_data
from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filtertools import find_kern_specs
from thunderhopper.filters import sosfilter
from thunderhopper.model import convolve_kernels, process_signal
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
target_species = [
'Chorthippus_biguttulus',
'Chorthippus_mollis',
'Chrysochraon_dispar',
'Euchorthippus_declivus',
'Gomphocerippus_rufus',
'Omocestus_rufipes',
'Pseudochorthippus_parallelus',
][5]
example_file = {
'Chorthippus_biguttulus': 'Chorthippus_biguttulus_GBC_94-17s73.1ms-19s977ms',
'Chorthippus_mollis': 'Chorthippus_mollis_DJN_41_T28C-46s4.58ms-1m15s697ms',
'Chrysochraon_dispar': 'Chrysochraon_dispar_DJN_26_T28C_DT-32s134ms-34s432ms',
'Euchorthippus_declivus': 'Euchorthippus_declivus_FTN_79-2s167ms-2s563ms',
'Gomphocerippus_rufus': 'Gomphocerippus_rufus_FTN_91-3-884ms-10s427ms',
'Omocestus_rufipes': 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms',
'Pseudochorthippus_parallelus': 'Pseudochorthippus_parallelus_GBC_88-6s678ms-9s32.3ms'
}[target_species]
data_paths = search_files(target_species, dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/short/ref_measures.npz'
pre_stages = ['filt', 'env']
stages = pre_stages + ['conv', 'feat']
save_path = '../data/inv/short/'
# ANALYSIS SETTINGS:
example_scales = np.array([0.1, 1, 10, 30, 100, 300])
scales = np.geomspace(0.01, 10000, 500)
scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = 0.5
# SUBSET SETTINGS:
kernels = np.array([
[1, 0.002],
[-1, 0.002],
[2, 0.004],
[-2, 0.004],
[3, 0.032],
[-3, 0.032]
])
kernels = None
types = None#np.array([-1])
sigmas = None#np.array([0.001, 0.002, 0.004, 0.008, 0.016, 0.032])
# PREPARATION:
pure_noise = np.load(noise_path)['raw']
if thresh_rel is not None:
# Get threshold values from pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'] * thresh_rel
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
# Get song recording (prior to anything):
data, config = load_data(data_path, files='raw')
song, rate = data['raw'], config['rate']
if thresh_rel is not None:
# Set kernel-specific thresholds:
config['feat_thresh'] = thresh_abs
# Reduce to kernel subset:
if any(var is not None for var in [kernels, types, sigmas]):
kern_inds = find_kern_specs(config['k_specs'], kernels, types, sigmas)
config['kernels'] = config['kernels'][:, kern_inds]
config['k_specs'] = config['k_specs'][kern_inds, :]
config['k_props'] = [config['k_props'][i] for i in kern_inds]
config['feat_thresh'] = config['feat_thresh'][kern_inds]
# Get song segment to be analyzed:
time = np.arange(song.shape[0]) / rate
start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std(axis=0)
# Get normalized noise component:
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Prepare storage:
shape_low = (scales.size,)
shape_high = (scales.size, config['k_specs'].shape[0])
measures = dict(
measure_filt=np.zeros(shape_low, dtype=float),
measure_env=np.zeros(shape_low, dtype=float),
measure_conv=np.zeros(shape_high, dtype=float),
measure_feat=np.zeros(shape_high, dtype=float)
)
if save_detailed:
# Prepare optional storage:
shape_low = (song.shape[0], example_scales.size)
shape_high = (song.shape[0], config['k_specs'].shape[0], example_scales.size)
snippets = dict(
snip_filt=np.zeros(shape_low, dtype=float),
snip_env=np.zeros(shape_low, dtype=float),
snip_conv=np.zeros(shape_high, dtype=float),
snip_feat=np.zeros(shape_high, dtype=float)
)
# Execute piecewise:
for i, scale in enumerate(scales):
print('Simulating scale ', scale)
# Rescale song and add noise:
scaled = song * scale + noise
# Process mixture:
signals, rates = process_signal(config, returns=pre_stages,
signal=scaled, rate=rate)
# Process mixture further:
signals['conv'] = convolve_kernels(signals['env'], config['kernels'], config['k_specs'])
signals['feat'] = sosfilter((signals['conv'] > config['feat_thresh']).astype(float),
rate, config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen'])
# Store results:
for stage in stages:
# Log intensity measures:
mkey = f'measure_{stage}'
if stage == 'feat':
measures[mkey][i] = signals[stage][segment, :].mean(axis=0)
else:
measures[mkey][i] = signals[stage][segment, ...].std(axis=0)
# Log optional snippet data:
if save_detailed and scale in example_scales:
scale_ind = np.nonzero(example_scales == scale)[0][0]
snippets[f'snip_{stage}'][:, ..., scale_ind] = signals[stage]
# Save analysis results:
if save_path is not None:
data = dict(
scales=scales,
example_scales=example_scales,
)
data.update(measures)
if save_detailed:
data.update(snippets)
save_data(save_path + name, data, config, overwrite=True)
print('Done.')
embed()

View File

@@ -5,21 +5,23 @@ from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import sosfilter from thunderhopper.filters import sosfilter
from thunderhopper.filtertools import find_kern_specs from thunderhopper.filtertools import find_kern_specs
from thunderhopper.model import convolve_kernels from thunderhopper.model import convolve_kernels
from misc_functions import draw_noise_segment
from IPython import embed from IPython import embed
# GENERAL SETTINGS: # GENERAL SETTINGS:
target = ['Omocestus_rufipes', '*'][0] example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
data_paths = search_files(target, excl='noise', dir='../data/processed/') search_target = ['*', example_file][0]
data_paths = search_files(search_target, excl='noise', dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz' noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/thresh_lp/ref_measures.npz'
save_path = '../data/inv/thresh_lp/' save_path = '../data/inv/thresh_lp/'
# ANALYSIS SETTINGS: # ANALYSIS SETTINGS:
add_noise = False add_noise = False
save_snippets = add_noise and (target == 'Omocestus_rufipes')
plot_results = False plot_results = False
example_scales = np.array([0, 1, 10, 30, 100]) example_scales = np.array([0, 1, 10, 30, 100])
scales = np.geomspace(0.01, 10000, 100) scales = np.geomspace(0.01, 10000, 1000)
scales = np.unique(np.concatenate((scales, example_scales))) scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = np.array([0.5, 1, 3]) thresh_rel = np.array([0.5, 1, 3])
kern_specs = np.array([ kern_specs = np.array([
[1, 0.008], [1, 0.008],
@@ -28,12 +30,15 @@ kern_specs = np.array([
]) ])
# PREPARATION: # PREPARATION:
pure_noise = np.load(noise_path)['inv'] if add_noise:
pure_noise = np.load(noise_path)['inv']
# Define kernel-specific threshold values based on pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'][None, :] * thresh_rel[:, None]
# EXECUTION: # EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)): for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}') print(f'Processing {name}')
save_name = save_path + name
# Get adapted envelope (prior to convolution): # Get adapted envelope (prior to convolution):
data, config = load_data(data_path, files='inv') data, config = load_data(data_path, files='inv')
@@ -44,28 +49,25 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
start, end = data['songs_0'].ravel() start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end) segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std()
# Reduce to kernel subset: # Reduce to kernel subset:
kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs) kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs)
config['kernels'] = config['kernels'][:, kern_inds] config['kernels'] = config['kernels'][:, kern_inds]
config['k_specs'] = config['k_specs'][kern_inds, :] config['k_specs'] = config['k_specs'][kern_inds, :]
config['k_props'] = [config['k_props'][i] for i in kern_inds] config['k_props'] = [config['k_props'][i] for i in kern_inds]
# Get normalized noise component: if add_noise:
noise = pure_noise[:song.shape[0]] # Get normalized noise component:
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Normalize both components: # Prepare storage:
song /= song[segment].std()
noise /= noise[segment].std()
# Define kernel-specific threshold values based on pure-noise response SD:
ref_conv = convolve_kernels(noise, config['kernels'], config['k_specs'])
thresh_abs = ref_conv[segment, :].std(axis=0, keepdims=True) * thresh_rel[:, None]
# Prepare measure storage:
measure_inv = np.zeros((scales.size,), dtype=float)
measure_feat = np.zeros((scales.size, kern_specs.shape[0], thresh_rel.size), dtype=float) measure_feat = np.zeros((scales.size, kern_specs.shape[0], thresh_rel.size), dtype=float)
if save_snippets: if save_detailed:
# Prepare snippet storage: # Prepare optional storage:
measure_inv = np.zeros((scales.size,), dtype=float)
snip_inv = np.zeros((song.size, example_scales.size), dtype=float) snip_inv = np.zeros((song.size, example_scales.size), dtype=float)
shape = (song.size, kern_specs.shape[0], example_scales.size, thresh_rel.size) shape = (song.size, kern_specs.shape[0], example_scales.size, thresh_rel.size)
snip_conv = np.zeros(shape[:-1], dtype=float) snip_conv = np.zeros(shape[:-1], dtype=float)
@@ -82,20 +84,21 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
# Add noise: # Add noise:
scaled_song += noise scaled_song += noise
# Log input intensity measure: if save_detailed:
measure_inv[i] = scaled_song[segment].std() # Log input intensity measure:
measure_inv[i] = scaled_song[segment].std()
# Process mixture: # Process mixture:
scaled_conv = convolve_kernels(scaled_song, config['kernels'], config['k_specs']) scaled_conv = convolve_kernels(scaled_song, config['kernels'], config['k_specs'])
# Log threshold-independent snippet data: # Log threshold-independent snippet data:
if save_snippets and scale in example_scales: if save_detailed and scale in example_scales:
save_ind = np.nonzero(example_scales == scale)[0][0] save_ind = np.nonzero(example_scales == scale)[0][0]
snip_inv[:, save_ind] = scaled_song snip_inv[:, save_ind] = scaled_song
snip_conv[:, :, save_ind] = scaled_conv snip_conv[:, :, save_ind] = scaled_conv
# Execute piecewise again: # Execute piecewise again:
for j, thresholds in enumerate(thresh_abs): for j, thresholds in enumerate(thresh_abs[:, kern_inds]):
# Process mixture further: # Process mixture further:
scaled_bi = (scaled_conv > thresholds).astype(float) scaled_bi = (scaled_conv > thresholds).astype(float)
@@ -103,11 +106,11 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
padtype='fixed', padlen=config['padlen']) padtype='fixed', padlen=config['padlen'])
# Log threshold-dependent snippet data: # Log threshold-dependent snippet data:
if save_snippets and scale in example_scales: if save_detailed and scale in example_scales:
snip_bi[:, :, save_ind, j] = scaled_bi snip_bi[:, :, save_ind, j] = scaled_bi
snip_feat[:, :, save_ind, j] = scaled_feat snip_feat[:, :, save_ind, j] = scaled_feat
# Log intensity measure: # Log output intensity measure:
measure_feat[i, :, j] = scaled_feat[segment, :].mean(axis=0) measure_feat[i, :, j] = scaled_feat[segment, :].mean(axis=0)
# Overview plot: # Overview plot:
@@ -133,18 +136,19 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
data = dict( data = dict(
scales=scales, scales=scales,
example_scales=example_scales, example_scales=example_scales,
measure_inv=measure_inv,
measure_feat=measure_feat, measure_feat=measure_feat,
thresh_rel=thresh_rel, thresh_rel=thresh_rel,
thresh_abs=thresh_abs, thresh_abs=thresh_abs,
) )
if save_snippets: if save_detailed:
data.update(dict( data.update(dict(
measure_inv=measure_inv,
snip_inv=snip_inv, snip_inv=snip_inv,
snip_conv=snip_conv, snip_conv=snip_conv,
snip_bi=snip_bi, snip_bi=snip_bi,
snip_feat=snip_feat, snip_feat=snip_feat,
)) ))
save_name = save_path + name
if add_noise: if add_noise:
save_name += '_noise' save_name += '_noise'
else: else:

View File

@@ -9,7 +9,7 @@ from IPython import embed
save_path = '../data/processed/white_noise' save_path = '../data/processed/white_noise'
stages = ['raw', 'filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat'] stages = ['raw', 'filt', 'env', 'log', 'inv', 'conv', 'bi', 'feat']
sds = [1] sds = [1]
dur = 60 dur = 180
# Interactivity: # Interactivity:
reload_saved = False reload_saved = False
@@ -45,6 +45,7 @@ for sd in sds:
# Generate white noise signal: # Generate white noise signal:
noise = rng.normal(loc=0, scale=sd, size=n_samples) noise = rng.normal(loc=0, scale=sd, size=n_samples)
print('Got your no(i)se!')
# Fetch and store representations: # Fetch and store representations:
save = None if save_path is None else save_path + f'_sd-{sd}.npz' save = None if save_path is None else save_path + f'_sd-{sd}.npz'

View File

@@ -7,7 +7,7 @@ from IPython import embed
## SETTINGS: ## SETTINGS:
# General: # General:
mode = ['log_hp', 'thresh_lp', 'full'][2] mode = ['log_hp', 'thresh_lp', 'full', 'short'][3]
noise_path = '../data/processed/white_noise_sd-1.npz' noise_path = '../data/processed/white_noise_sd-1.npz'
save_path = '../data/inv/' save_path = '../data/inv/'
pad = np.array([0.1, 0.9]) pad = np.array([0.1, 0.9])
@@ -15,7 +15,8 @@ pad = np.array([0.1, 0.9])
stages = dict( stages = dict(
log_hp=['filt', 'env', 'log', 'inv'], log_hp=['filt', 'env', 'log', 'inv'],
thresh_lp=['inv', 'conv', 'feat'], thresh_lp=['inv', 'conv', 'feat'],
full=['raw', 'filt', 'env', 'log', 'inv', 'conv', 'feat'] full=['raw', 'filt', 'env', 'log', 'inv', 'conv', 'feat'],
short=['raw', 'filt', 'env', 'conv', 'feat']
)[mode] )[mode]
# PROCESSING: # PROCESSING:
@@ -49,6 +50,12 @@ elif mode == 'thresh_lp':
padtype='fixed', padlen=config['padlen']) padtype='fixed', padlen=config['padlen'])
elif mode == 'full': elif mode == 'full':
data = process_signal(config, stages, signal=starter, rate=config['rate'])[0] data = process_signal(config, stages, signal=starter, rate=config['rate'])[0]
elif mode == 'short':
data = process_signal(config, ['raw', 'filt', 'env'], signal=starter, rate=config['rate'])[0]
data['conv'] = convolve_kernels(data['env'], config['kernels'], config['k_specs'])
data['feat'] = sosfilter((data['conv'] > config['feat_thresh']).astype(float),
config['env_rate'], config['feat_fcut'], 'lp',
padtype='fixed', padlen=config['padlen'])
# Get measures: # Get measures:
measures = {} measures = {}