Added multi-thresh simulation to "full" and "short" (currently running).
Added complete "rect-lp" analysis except figure. Added multiple appendix figs. Overhauled normalization options across all condense scripts. Co-authored-by: Copilot <copilot@github.com>
This commit is contained in:
72
python/save_thresholds.py
Normal file
72
python/save_thresholds.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import numpy as np
|
||||
from thunderhopper.filters import sosfilter
|
||||
from thunderhopper.model import convolve_kernels, process_signal
|
||||
from thunderhopper.modeltools import load_data
|
||||
from IPython import embed
|
||||
|
||||
## SETTINGS:
|
||||
|
||||
# General:
|
||||
mode = ['thresh_lp', 'full', 'short', 'field'][3]
|
||||
if mode == 'field':
|
||||
noise_path = '../data/field/processed/noise/merged_noise.npz'
|
||||
channels = np.array([0, 1, 2, 3, 4, 5, 6, 7])
|
||||
else:
|
||||
noise_path = '../data/processed/white_noise_sd-1.npz'
|
||||
save_path = '../data/inv/'
|
||||
start_stage = dict(
|
||||
thresh_lp='inv',
|
||||
full='raw',
|
||||
short='raw',
|
||||
field='raw'
|
||||
)[mode]
|
||||
|
||||
# Analysis:
|
||||
factors = np.concatenate([np.arange(-4, 0, 0.01), np.arange(0, 4.01, 0.01)])
|
||||
pad = np.array([0.1, 0.9])
|
||||
|
||||
# PROCESSING:
|
||||
|
||||
print(f'Fetching threshold data in {mode} mode...')
|
||||
|
||||
# Load pure-noise starter representation:
|
||||
noise_data, config = load_data(noise_path, start_stage)
|
||||
starter = noise_data[start_stage]
|
||||
|
||||
# Prepare buffered measurement segment:
|
||||
pad = (pad * starter.shape[0]).astype(int)
|
||||
segment = np.arange(starter.shape[0])[pad[0]:pad[1]]
|
||||
|
||||
if mode != 'field':
|
||||
# Normalize starter:
|
||||
starter /= starter[segment].std()
|
||||
|
||||
# Run (partial) pipeline:
|
||||
print('Running pipeline...')
|
||||
if mode == 'thresh_lp':
|
||||
conv = convolve_kernels(starter, config['kernels'], config['k_specs'])
|
||||
elif mode == 'full':
|
||||
conv = process_signal(config, 'conv', signal=starter, rate=config['rate'])[0]['conv']
|
||||
elif mode == 'short':
|
||||
env = process_signal(config, 'env', signal=starter, rate=config['rate'])[0]['env']
|
||||
inv = sosfilter(env, config['env_rate'], config['inv_fcut'], 'hp',
|
||||
padtype='constant', padlen=config['padlen'])
|
||||
conv = convolve_kernels(inv, config['kernels'], config['k_specs'])
|
||||
elif mode == 'field':
|
||||
starter = starter[:, channels].ravel(order='F')
|
||||
conv = process_signal(config, 'conv', signal=starter, rate=config['rate'])[0]['conv']
|
||||
|
||||
# Get baseline kernel response SDs:
|
||||
sds = conv[segment, :].std(axis=0)
|
||||
|
||||
# Get corresponding supra-threshold proportions:
|
||||
percs = np.zeros((len(factors), conv.shape[1]))
|
||||
for i, factor in enumerate(factors):
|
||||
print(f'Processing factor {i + 1} / {factors.size}...')
|
||||
percs[i] = (conv > (factor * sds)).sum(axis=0) / conv.shape[0]
|
||||
|
||||
# Save results:
|
||||
np.savez(save_path + f'{mode}/thresholds.npz', factors=factors, sds=sds, percs=percs)
|
||||
|
||||
print('Done.')
|
||||
embed()
|
||||
Reference in New Issue
Block a user