Seriously, no idea. Wild amount of changes. Good luck.

This commit is contained in:
j-hartling
2026-04-17 17:19:30 +02:00
parent 36ac504efa
commit 3b4b7f2161
40 changed files with 2067 additions and 672 deletions

View File

@@ -5,21 +5,23 @@ from thunderhopper.filetools import search_files, crop_paths
from thunderhopper.filters import sosfilter
from thunderhopper.filtertools import find_kern_specs
from thunderhopper.model import convolve_kernels
from misc_functions import draw_noise_segment
from IPython import embed
# GENERAL SETTINGS:
target = ['Omocestus_rufipes', '*'][0]
data_paths = search_files(target, excl='noise', dir='../data/processed/')
example_file = 'Omocestus_rufipes_DJN_32-40s724ms-48s779ms'
search_target = ['*', example_file][0]
data_paths = search_files(search_target, excl='noise', dir='../data/processed/')
noise_path = '../data/processed/white_noise_sd-1.npz'
ref_path = '../data/inv/thresh_lp/ref_measures.npz'
save_path = '../data/inv/thresh_lp/'
# ANALYSIS SETTINGS:
add_noise = False
save_snippets = add_noise and (target == 'Omocestus_rufipes')
plot_results = False
example_scales = np.array([0, 1, 10, 30, 100])
scales = np.geomspace(0.01, 10000, 100)
scales = np.unique(np.concatenate((scales, example_scales)))
scales = np.geomspace(0.01, 10000, 1000)
scales = np.unique(np.concatenate(([0], scales, example_scales)))
thresh_rel = np.array([0.5, 1, 3])
kern_specs = np.array([
[1, 0.008],
@@ -28,12 +30,15 @@ kern_specs = np.array([
])
# PREPARATION:
pure_noise = np.load(noise_path)['inv']
if add_noise:
pure_noise = np.load(noise_path)['inv']
# Define kernel-specific threshold values based on pure-noise response SD:
thresh_abs = np.load(ref_path)['conv'][None, :] * thresh_rel[:, None]
# EXECUTION:
for data_path, name in zip(data_paths, crop_paths(data_paths)):
save_detailed = example_file in name
print(f'Processing {name}')
save_name = save_path + name
# Get adapted envelope (prior to convolution):
data, config = load_data(data_path, files='inv')
@@ -44,28 +49,25 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
start, end = data['songs_0'].ravel()
segment = (time >= start) & (time <= end)
# Normalize song component:
song /= song[segment].std()
# Reduce to kernel subset:
kern_inds = find_kern_specs(config['k_specs'], kerns=kern_specs)
config['kernels'] = config['kernels'][:, kern_inds]
config['k_specs'] = config['k_specs'][kern_inds, :]
config['k_props'] = [config['k_props'][i] for i in kern_inds]
# Get normalized noise component:
noise = pure_noise[:song.shape[0]]
if add_noise:
# Get normalized noise component:
noise = draw_noise_segment(pure_noise, song.shape[0])
noise /= noise[segment].std()
# Normalize both components:
song /= song[segment].std()
noise /= noise[segment].std()
# Define kernel-specific threshold values based on pure-noise response SD:
ref_conv = convolve_kernels(noise, config['kernels'], config['k_specs'])
thresh_abs = ref_conv[segment, :].std(axis=0, keepdims=True) * thresh_rel[:, None]
# Prepare measure storage:
measure_inv = np.zeros((scales.size,), dtype=float)
# Prepare storage:
measure_feat = np.zeros((scales.size, kern_specs.shape[0], thresh_rel.size), dtype=float)
if save_snippets:
# Prepare snippet storage:
if save_detailed:
# Prepare optional storage:
measure_inv = np.zeros((scales.size,), dtype=float)
snip_inv = np.zeros((song.size, example_scales.size), dtype=float)
shape = (song.size, kern_specs.shape[0], example_scales.size, thresh_rel.size)
snip_conv = np.zeros(shape[:-1], dtype=float)
@@ -82,20 +84,21 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
# Add noise:
scaled_song += noise
# Log input intensity measure:
measure_inv[i] = scaled_song[segment].std()
if save_detailed:
# Log input intensity measure:
measure_inv[i] = scaled_song[segment].std()
# Process mixture:
scaled_conv = convolve_kernels(scaled_song, config['kernels'], config['k_specs'])
# Log threshold-independent snippet data:
if save_snippets and scale in example_scales:
if save_detailed and scale in example_scales:
save_ind = np.nonzero(example_scales == scale)[0][0]
snip_inv[:, save_ind] = scaled_song
snip_conv[:, :, save_ind] = scaled_conv
# Execute piecewise again:
for j, thresholds in enumerate(thresh_abs):
for j, thresholds in enumerate(thresh_abs[:, kern_inds]):
# Process mixture further:
scaled_bi = (scaled_conv > thresholds).astype(float)
@@ -103,11 +106,11 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
padtype='fixed', padlen=config['padlen'])
# Log threshold-dependent snippet data:
if save_snippets and scale in example_scales:
if save_detailed and scale in example_scales:
snip_bi[:, :, save_ind, j] = scaled_bi
snip_feat[:, :, save_ind, j] = scaled_feat
# Log intensity measure:
# Log output intensity measure:
measure_feat[i, :, j] = scaled_feat[segment, :].mean(axis=0)
# Overview plot:
@@ -133,18 +136,19 @@ for data_path, name in zip(data_paths, crop_paths(data_paths)):
data = dict(
scales=scales,
example_scales=example_scales,
measure_inv=measure_inv,
measure_feat=measure_feat,
thresh_rel=thresh_rel,
thresh_abs=thresh_abs,
)
if save_snippets:
if save_detailed:
data.update(dict(
measure_inv=measure_inv,
snip_inv=snip_inv,
snip_conv=snip_conv,
snip_bi=snip_bi,
snip_feat=snip_feat,
))
save_name = save_path + name
if add_noise:
save_name += '_noise'
else: