Added loads of units in nearly all graphs.
Overhauled fig_invariance_full.pdf. Added some legends, somewhere.
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import numpy as np
|
||||
from scipy.stats import gaussian_kde
|
||||
from itertools import product
|
||||
from thunderhopper.filetools import crop_paths
|
||||
from IPython import embed
|
||||
|
||||
@@ -16,6 +17,34 @@ def draw_noise_segment(noise, n):
|
||||
start = rng.integers(0, noise.shape[0] - n, endpoint=True)
|
||||
return np.take(noise, np.arange(start, start + n), axis=0)
|
||||
|
||||
def divide_by_zero(num, denom, replace=np.nan):
|
||||
with np.errstate(divide='ignore', invalid='ignore'):
|
||||
result = np.true_divide(num, denom)
|
||||
result[~np.isfinite(result)] = replace
|
||||
return result
|
||||
|
||||
def exclude_zero_scale(data, keys=None, combis=None):
|
||||
inds = np.nonzero(data['scales'] > 0)[0]
|
||||
data['scales'] = data['scales'][inds]
|
||||
if keys is not None:
|
||||
for key in keys:
|
||||
data[key] = data[key][inds, ...]
|
||||
if combis is not None:
|
||||
for key1, key2 in product(*combis):
|
||||
key = f'{key1}_{key2}'
|
||||
data[key] = data[key][inds, ...]
|
||||
return data
|
||||
|
||||
def reduce_kernel_set(data, inds, keys=None, combis=None):
|
||||
if keys is not None:
|
||||
for key in keys:
|
||||
data[key] = data[key][:, inds, ...]
|
||||
if combis is not None:
|
||||
for key1, key2 in product(*combis):
|
||||
key = f'{key1}_{key2}'
|
||||
data[key] = data[key][:, inds, ...]
|
||||
return data
|
||||
|
||||
def sort_files_by_rec(paths, sources=['BM04', 'BM93', 'DJN', 'GBC', 'FTN']):
|
||||
# Separate by source:
|
||||
sorted_paths = {}
|
||||
|
||||
Reference in New Issue
Block a user