08.09
This commit is contained in:
parent
fb14104325
commit
da1bc387b4
@ -3,68 +3,74 @@ import numpy as np
|
|||||||
import os
|
import os
|
||||||
import nix_helpers as nh
|
import nix_helpers as nh
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
|
from matplotlib.mlab import specgram
|
||||||
#from tqdm import tqdm
|
#from tqdm import tqdm
|
||||||
from jar_functions import parse_stimuli_dat
|
from jar_functions import parse_stimuli_dat
|
||||||
from jar_functions import norm_function_eigen
|
from jar_functions import norm_function_eigen
|
||||||
from jar_functions import mean_noise_cut_eigen
|
from jar_functions import mean_noise_cut_eigen
|
||||||
from jar_functions import get_time_zeros
|
from jar_functions import get_time_zeros
|
||||||
|
from jar_functions import import_data_eigen
|
||||||
|
from scipy.signal import savgol_filter
|
||||||
|
|
||||||
base_path = 'D:\\jar_project\\JAR\\eigenmannia\\2015eigen16'
|
base_path = 'D:\\jar_project\\JAR\\eigenmannia'
|
||||||
|
|
||||||
datasets = ['2020-07-08-aa', '2020-07-08-as']
|
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
|
||||||
|
|
||||||
response = []
|
response = []
|
||||||
deltaf = []
|
deltaf = []
|
||||||
|
for ID in identifier:
|
||||||
for dataset in os.listdir(base_path):
|
for dataset in os.listdir(os.path.join(base_path, ID)):
|
||||||
datapath = os.path.join(base_path, dataset, '%s.nix' % dataset)
|
datapath = os.path.join(base_path, ID, dataset, '%s.nix' % dataset)
|
||||||
print(datapath)
|
print(datapath)
|
||||||
stimuli_dat = os.path.join(base_path, dataset, 'manualjar-eod.dat')
|
stimuli_dat = os.path.join(base_path, ID, dataset, 'manualjar-eod.dat')
|
||||||
|
|
||||||
df, duration = parse_stimuli_dat(stimuli_dat)
|
df, duration = parse_stimuli_dat(stimuli_dat)
|
||||||
dur = int(duration[0][0:2])
|
dur = int(duration[0][0:2])
|
||||||
print(df)
|
print(df)
|
||||||
|
|
||||||
time, eod = nh.read_eod(datapath, duration = 2000)
|
# time, eod = nh.read_eod(datapath, duration = 2000) # anstatt dem import data mit tag manual jar - dann sollte onset wirklich bei 10 sec sein
|
||||||
|
data, pre_dat, dt = import_data_eigen(datapath)
|
||||||
zeropoints = get_time_zeros(time, eod, threshold = np.max(eod)*0.1)
|
|
||||||
|
nfft = 2**17
|
||||||
frequencies = 1 / np.diff(zeropoints)
|
spec, freqs, times = specgram(data[0], Fs=1 / dt, detrend='mean', NFFT=nfft, noverlap=nfft * 0.95)
|
||||||
|
dbspec = 10.0 * np.log10(spec) # in dB
|
||||||
# norm, base, jar = norm_function_eigen(frequencies, zeropoints[:-1], onset_point=(dur - dur)+10, offset_point=dur+10) # dm-dm funktioniert nur wenn onset = 0 sec
|
power = dbspec[:, 50]
|
||||||
|
|
||||||
# cf, ct = mean_noise_cut_eigen(frequencies, zeropoints[:-1], n=200)
|
fish_p = power[(freqs > 200) & (freqs < 1000)]
|
||||||
|
fish_f = freqs[(freqs > 200) & (freqs < 1000)]
|
||||||
window = np.ones(101) / 101
|
|
||||||
freq = np.convolve(frequencies, window, mode='same')
|
index = np.argmax(fish_p)
|
||||||
|
eodf = fish_f[index]
|
||||||
''' # plt.plot(ct, cf)
|
eodf4 = eodf * 4
|
||||||
plt.plot(zeropoints[:-1], freq)
|
|
||||||
plt.ylabel('EOD_frequency [Hz]')
|
lim0 = eodf4-20
|
||||||
plt.xlabel('time [s]')
|
lim1 = eodf4+20
|
||||||
plt.xlim(1, 140)
|
|
||||||
plt.ylim(np.median(freq) - 10, np.median(freq) + 10)
|
df = freqs[1] - freqs[0]
|
||||||
plt.title('JAR_deltaf_%s' % deltaf)
|
ix0 = int(np.floor(lim0/df)) # back to index
|
||||||
plt.show()
|
ix1 = int(np.ceil(lim1/df)) # back to index
|
||||||
'''
|
spec4 = dbspec[ix0:ix1, :]
|
||||||
j = []
|
freq4 = freqs[ix0:ix1]
|
||||||
for idx, i in enumerate(zeropoints):
|
jar4 = freq4[np.argmax(spec4, axis=0)] # all freqs at max specs over axis 0
|
||||||
if i > 20 and i < 80:
|
jar = jar4 / 4
|
||||||
j.append(freq[idx])
|
jm = jar4 - np.mean(jar4) # data we take
|
||||||
|
cut_times = times[:len(jar4)]
|
||||||
b = []
|
|
||||||
for idx, i in enumerate(zeropoints):
|
#plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
||||||
if i < 20:
|
plt.plot(cut_times, jm)
|
||||||
b.append(freq[idx])
|
plt.plot(cut_times, savgol)
|
||||||
|
#plt.ylim(lim0, lim1)
|
||||||
r = np.median(j) - np.median(b)
|
plt.show()
|
||||||
response.append(r)
|
|
||||||
deltaf.append(df[0])
|
|
||||||
|
# nicht unbedingt filtern, einfach wie unten median/mean
|
||||||
res_df1 = sorted(zip(deltaf[:32],response[:32]))
|
|
||||||
res_df2 = sorted(zip(deltaf[33:],response[33:]))
|
'''
|
||||||
res_df = sorted(zip(deltaf,response))
|
res_df = sorted(zip(deltaf,response))
|
||||||
|
|
||||||
np.save('res_df1', res_df1)
|
np.save('res_df_%s' %ID, res_df)
|
||||||
np.save('res_df2', res_df2)
|
'''
|
||||||
np.save('res_df', res_df)
|
|
||||||
|
# problem: rohdaten(data, pre_data) lassen sich auf grund ihrer 1D-array struktur nicht savgol filtern
|
||||||
|
# diese bekomm ich nur über specgram in form von freq / time auftragen, was nicht mehr savgol gefiltert werden kann
|
||||||
|
# jedoch könnte ich trotzdem einfach aus jar4 response herauslesen wobei dies dann weniger gefiltert wäre
|
37
gain_fit.py
Normal file
37
gain_fit.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from scipy import signal
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import pylab
|
||||||
|
from IPython import embed
|
||||||
|
from scipy.optimize import curve_fit
|
||||||
|
from jar_functions import gain_curve_fit
|
||||||
|
|
||||||
|
identifier = ['2018lepto1',
|
||||||
|
'2018lepto4',
|
||||||
|
'2018lepto5',
|
||||||
|
'2018lepto76',
|
||||||
|
'2018lepto98',
|
||||||
|
'2019lepto03',
|
||||||
|
'2019lepto24',
|
||||||
|
'2019lepto27',
|
||||||
|
'2019lepto30',
|
||||||
|
'2020lepto04',
|
||||||
|
'2020lepto06',
|
||||||
|
'2020lepto16',
|
||||||
|
'2020lepto19',
|
||||||
|
'2020lepto20'
|
||||||
|
]
|
||||||
|
|
||||||
|
for ID in identifier:
|
||||||
|
amf = np.load('amf_%s.npy' %ID)
|
||||||
|
gain = np.load('gain_%s.npy' %ID)
|
||||||
|
rms = np.load('rms_%s.npy' %ID)
|
||||||
|
thresh = np.load('thresh_%s.npy' % ID)
|
||||||
|
|
||||||
|
idx_arr = (rms < thresh) | (rms < np.mean(rms))
|
||||||
|
|
||||||
|
embed()
|
||||||
|
sinv, sinc = curve_fit(gain_curve_fit, amf[idx_arr], gain[idx_arr])
|
||||||
|
print(sinv[0])
|
||||||
|
f_cutoff = 1 / (2*np.pi*sinv[0])
|
||||||
|
print(f_cutoff)
|
@ -13,6 +13,10 @@ def sin_response(t, f, p, A):
|
|||||||
r_sin = A*np.sin(2*np.pi*t*f + p)
|
r_sin = A*np.sin(2*np.pi*t*f + p)
|
||||||
return r_sin
|
return r_sin
|
||||||
|
|
||||||
|
def gain_curve_fit(tau, alpha, amf):
|
||||||
|
gain = alpha / np.sqrt(1 + (2*np.pi*amf*tau)**2)
|
||||||
|
return gain
|
||||||
|
|
||||||
def parse_dataset(dataset_name):
|
def parse_dataset(dataset_name):
|
||||||
assert(os.path.exists(dataset_name)) #see if data exists
|
assert(os.path.exists(dataset_name)) #see if data exists
|
||||||
f = open(dataset_name, 'r') #open data we gave in
|
f = open(dataset_name, 'r') #open data we gave in
|
||||||
@ -265,6 +269,25 @@ def import_data(dataset):
|
|||||||
return dat, pre_dat, dt
|
return dat, pre_dat, dt
|
||||||
#nf.close()
|
#nf.close()
|
||||||
|
|
||||||
|
def import_data_eigen(dataset):
|
||||||
|
import nixio as nix
|
||||||
|
nf = nix.File.open(dataset, nix.FileMode.ReadOnly)
|
||||||
|
b = nf.blocks[0]
|
||||||
|
eod = b.data_arrays['EOD-1']
|
||||||
|
dt = eod.dimensions[0].sampling_interval
|
||||||
|
di = int(10.0/dt)
|
||||||
|
t = b.tags['ManualJAR_1']
|
||||||
|
#amfreq = t.metadata['RePro-Info']['settings']['amfreq']
|
||||||
|
dat = []
|
||||||
|
pre_dat = []
|
||||||
|
for mt in b.multi_tags:
|
||||||
|
data = mt.retrieve_data(0, 'EOD-1')[:] # data[0]
|
||||||
|
dat.append(data)
|
||||||
|
i0 = int(mt.positions[0][0]/dt)
|
||||||
|
pre_data = eod[i0-di:i0]
|
||||||
|
pre_dat.append(pre_data)
|
||||||
|
return dat, pre_dat, dt
|
||||||
|
|
||||||
def import_amfreq(dataset):
|
def import_amfreq(dataset):
|
||||||
import nixio as nix
|
import nixio as nix
|
||||||
nf = nix.File.open(dataset, nix.FileMode.ReadOnly)
|
nf = nix.File.open(dataset, nix.FileMode.ReadOnly)
|
||||||
|
18
notes
18
notes
@ -1,13 +1,15 @@
|
|||||||
- mit zu hohem RMS rauskicken: evtl nur ein trace rauskicken wenn nur da RMS zu hoch
|
+ fit für gain kurven: über tau = 1/cutoff_f * 2 * pi --> wie bestimm ich cutoff_f?
|
||||||
- 2019lepto27/30 nochmal anschauen, gainpunkt fehlt
|
+ daten von natalie zu eigenmannia mit + / - delta f anschauen ob unterschiede
|
||||||
- fit für gain kurven: über tau = 1/cutoff_f * 2 * pi --> wie bestimm ich cutoff_f?
|
|
||||||
- daten von natalie zu eigenmannia mit + / - delta f anschauen ob unterschiede
|
|
||||||
- unterschiedliche nffts auf anderem rechner laufen lassen evtl um unterschiede zu sehen
|
|
||||||
|
|
||||||
long term:
|
long term:
|
||||||
- extra datei mit script drin um fertige daten darzustellen, den fit-code als datenverarbeitung allein verwenden
|
- extra datei mit script drin um fertige daten darzustellen, den fit-code nur zur datenverarbeitung verwenden
|
||||||
- darstellung: specgram --> rausgezogene jarspur darüber --> filterung --> fit und daten zusammen dargestellt, das ganze für verschiedene frequenzen
|
- darstellung: specgram --> rausgezogene jarspur darüber --> filterung --> fit und daten zusammen dargestellt, das ganze für verschiedene frequenzen
|
||||||
- größe/evtl. gewicht nachtragen, eod basefrequenz rausziehen und zuweisen
|
+ größe/evtl. gewicht nachtragen, eod basefrequenz rausziehen und zuweisen --> wie macht man das schnell und nicht manuell?
|
||||||
- liste mit eigenschaften der fische (dominanz/größe), messvariablen (temp/conductivity), eodf und evtl ampl machen um diese plotten zu können
|
- liste mit eigenschaften der fische (dominanz/größe), messvariablen (temp/conductivity), eodf und evtl ampl machen um diese plotten zu können
|
||||||
|
- unterschiedliche nffts auf anderem rechner laufen lassen evtl um unterschiede zu sehen
|
||||||
|
|
||||||
- phase in degree
|
- phase in degree: phase % (2pi) - modulo 2pi
|
||||||
|
|
||||||
|
|
||||||
|
- mit zu hohem RMS rauskicken: evtl nur ein trace rauskicken wenn nur da RMS zu hoch
|
||||||
|
- 2019lepto27/30: 27 - 0.05Hz (7-27-af, erste dat mit len(dat)=1), 30 - 0.001Hz (7-30-ah mit 0.005 anstatt gewollten 0.001Hz --> fehlt)
|
||||||
|
@ -4,47 +4,48 @@ import os
|
|||||||
import nix_helpers as nh
|
import nix_helpers as nh
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
|
|
||||||
res_df1 = np.load('res_df1.npy')
|
identifier = ['2013eigen13', '2015eigen16', '2015eigen17', '2015eigen19', '2020eigen22', '2020eigen32']
|
||||||
res_df2 = np.load('res_df2.npy')
|
|
||||||
res_df = np.load('res_df.npy')
|
|
||||||
|
|
||||||
mres = []
|
for ID in identifier:
|
||||||
mdf = []
|
res_df = np.load('res_df_%s.npy' %ID)
|
||||||
|
|
||||||
currf = None
|
mres = []
|
||||||
idxlist = []
|
mdf = []
|
||||||
|
|
||||||
for i, d in enumerate(res_df):
|
currf = None
|
||||||
if currf is None or currf == d[0]:
|
idxlist = []
|
||||||
currf = d[0]
|
|
||||||
idxlist.append(i)
|
|
||||||
|
|
||||||
else: # currf != f
|
for i, d in enumerate(res_df):
|
||||||
meanres = [] # lists to make mean of
|
if currf is None or currf == d[0]:
|
||||||
meandf = []
|
currf = d[0]
|
||||||
for x in idxlist:
|
idxlist.append(i)
|
||||||
meanres.append(res_df[x][1])
|
|
||||||
meandf.append(res_df[x][0])
|
|
||||||
meanedres = np.mean(meanres)
|
|
||||||
meaneddf = np.mean(meandf)
|
|
||||||
mres.append(meanedres)
|
|
||||||
mdf.append(meaneddf)
|
|
||||||
currf = d[0] # set back for next loop
|
|
||||||
idxlist = [i]
|
|
||||||
meanres = [] # lists to make mean of
|
|
||||||
meandf = []
|
|
||||||
for y in idxlist:
|
|
||||||
meanres.append(res_df[y][1])
|
|
||||||
meandf.append(res_df[y][0])
|
|
||||||
meanedres = np.mean(meanres)
|
|
||||||
meaneddf = np.mean(meandf)
|
|
||||||
mres.append(meanedres)
|
|
||||||
mdf.append(meaneddf)
|
|
||||||
|
|
||||||
plt.plot(mdf, mres, 'o')
|
else: # currf != f
|
||||||
plt.xlabel('deltaf [Hz]')
|
meanres = [] # lists to make mean of
|
||||||
plt.ylabel('JAR_respones [Hz]')
|
meandf = []
|
||||||
plt.axhline(0, color='grey', lw =1)
|
for x in idxlist:
|
||||||
plt.axvline(0, color='grey', lw = 1)
|
meanres.append(res_df[x][1])
|
||||||
plt.title('JAR_response_to_deltaf_eigenmannia')
|
meandf.append(res_df[x][0])
|
||||||
plt.show()
|
meanedres = np.mean(meanres)
|
||||||
|
meaneddf = np.mean(meandf)
|
||||||
|
mres.append(meanedres)
|
||||||
|
mdf.append(meaneddf)
|
||||||
|
currf = d[0] # set back for next loop
|
||||||
|
idxlist = [i]
|
||||||
|
meanres = [] # lists to make mean of
|
||||||
|
meandf = []
|
||||||
|
for y in idxlist:
|
||||||
|
meanres.append(res_df[y][1])
|
||||||
|
meandf.append(res_df[y][0])
|
||||||
|
meanedres = np.mean(meanres)
|
||||||
|
meaneddf = np.mean(meandf)
|
||||||
|
mres.append(meanedres)
|
||||||
|
mdf.append(meaneddf)
|
||||||
|
|
||||||
|
plt.plot(mdf, mres, 'o')
|
||||||
|
plt.xlabel('deltaf [Hz]')
|
||||||
|
plt.ylabel('JAR_respones [Hz]')
|
||||||
|
plt.axhline(0, color='grey', lw =1)
|
||||||
|
plt.axvline(0, color='grey', lw = 1)
|
||||||
|
plt.title('JAR_response_to_deltaf_%s' %ID)
|
||||||
|
plt.show()
|
@ -6,24 +6,25 @@ from IPython import embed
|
|||||||
from scipy.optimize import curve_fit
|
from scipy.optimize import curve_fit
|
||||||
from jar_functions import sin_response
|
from jar_functions import sin_response
|
||||||
from jar_functions import mean_noise_cut
|
from jar_functions import mean_noise_cut
|
||||||
|
from jar_functions import gain_curve_fit
|
||||||
|
|
||||||
def take_second(elem): # function for taking the names out of files
|
def take_second(elem): # function for taking the names out of files
|
||||||
return elem[1]
|
return elem[1]
|
||||||
|
|
||||||
identifier = [#'2018lepto1',
|
identifier = ['2018lepto1',
|
||||||
#'2018lepto4',
|
'2018lepto4',
|
||||||
#'2018lepto5',
|
'2018lepto5',
|
||||||
#'2018lepto76',
|
'2018lepto76',
|
||||||
#'2018lepto98',
|
'2018lepto98',
|
||||||
#'2019lepto03',
|
'2019lepto03',
|
||||||
#'2019lepto24',
|
'2019lepto24',
|
||||||
#'2019lepto27',
|
'2019lepto27',
|
||||||
'2019lepto30',
|
'2019lepto30',
|
||||||
#'2020lepto04',
|
'2020lepto04',
|
||||||
#'2020lepto06',
|
'2020lepto06',
|
||||||
#'2020lepto16',
|
'2020lepto16',
|
||||||
#'2020lepto19',
|
'2020lepto19',
|
||||||
#'2020lepto20'
|
'2020lepto20'
|
||||||
]
|
]
|
||||||
for ident in identifier:
|
for ident in identifier:
|
||||||
|
|
||||||
@ -54,7 +55,7 @@ for ident in identifier:
|
|||||||
dt = time[1] - time[0]
|
dt = time[1] - time[0]
|
||||||
|
|
||||||
n = int(1/float(d[1])/dt)
|
n = int(1/float(d[1])/dt)
|
||||||
cutf = mean_noise_cut(jm, time, n = n)
|
cutf = mean_noise_cut(jm, n = n)
|
||||||
cutt = time
|
cutt = time
|
||||||
#plt.plot(time, jm-cutf, label='cut amfreq')
|
#plt.plot(time, jm-cutf, label='cut amfreq')
|
||||||
#plt.plot(time, jm, label='spec')
|
#plt.plot(time, jm, label='spec')
|
||||||
@ -157,9 +158,10 @@ for ident in identifier:
|
|||||||
ax1.set_xlabel('envelope_frequency [Hz]')
|
ax1.set_xlabel('envelope_frequency [Hz]')
|
||||||
ax1.set_ylabel('RMS [Hz]')
|
ax1.set_ylabel('RMS [Hz]')
|
||||||
plt.legend()
|
plt.legend()
|
||||||
pylab.show()
|
#pylab.show()
|
||||||
|
|
||||||
np.save('gain_%s' %ident, mgain_arr[idx_arr])
|
|
||||||
np.save('amf%s' %ident, amfreq_arr[idx_arr])
|
|
||||||
|
|
||||||
|
#np.save('gain_%s' %ident, mgain_arr[idx_arr])
|
||||||
|
#np.save('amf_%s' %ident, amfreq_arr[idx_arr])
|
||||||
|
np.save('rms_%s' %ident, rootmeansquare_arr)
|
||||||
|
np.save('thresh_%s' %ident, threshold_arr)
|
||||||
embed()
|
embed()
|
@ -8,7 +8,7 @@ import IPython
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
#import DataLoader as dl
|
#import DataLoader as dl
|
||||||
from IPython import embed
|
from IPython import embed
|
||||||
from tqdm import tqdm
|
#from tqdm import tqdm
|
||||||
from scipy.optimize import curve_fit
|
from scipy.optimize import curve_fit
|
||||||
from jar_functions import step_response
|
from jar_functions import step_response
|
||||||
from jar_functions import sin_response
|
from jar_functions import sin_response
|
||||||
@ -22,7 +22,7 @@ from jar_functions import average
|
|||||||
from jar_functions import import_data
|
from jar_functions import import_data
|
||||||
from jar_functions import import_amfreq
|
from jar_functions import import_amfreq
|
||||||
|
|
||||||
base_path = 'D:\\jar_project\\JAR\\sin\\2019lepto30'
|
base_path = 'D:\\jar_project\\JAR\\sin\\2019lepto27'
|
||||||
|
|
||||||
time_all = []
|
time_all = []
|
||||||
freq_all = []
|
freq_all = []
|
||||||
@ -31,27 +31,23 @@ amfrequencies = []
|
|||||||
gains = []
|
gains = []
|
||||||
files = []
|
files = []
|
||||||
|
|
||||||
for idx, dataset in tqdm(enumerate(os.listdir(base_path))):
|
for idx, dataset in enumerate(os.listdir(base_path)):
|
||||||
if dataset == 'prerecordings':
|
if dataset == 'prerecordings':
|
||||||
continue
|
continue
|
||||||
datapath = os.path.join(base_path, dataset, '%s.nix' % dataset)
|
datapath = os.path.join(base_path, dataset, '%s.nix' % dataset)
|
||||||
print(datapath)
|
#print(datapath)
|
||||||
|
|
||||||
data, pre_data, dt = import_data(datapath)
|
data, pre_data, dt = import_data(datapath)
|
||||||
|
embed()
|
||||||
nfft = 2**17
|
nfft = 2**17
|
||||||
|
|
||||||
for d, dat in enumerate(data):
|
for d, dat in enumerate(data):
|
||||||
if len(dat) == 1:
|
if len(dat) == 1:
|
||||||
continue
|
|
||||||
'''if len(data) > 0:
|
|
||||||
print(datapath)
|
print(datapath)
|
||||||
|
|
||||||
amfreq = import_amfreq(datapath)
|
|
||||||
print(amfreq)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
embed()
|
embed()
|
||||||
'''
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
file_name = []
|
file_name = []
|
||||||
ID = []
|
ID = []
|
||||||
|
|
||||||
@ -104,10 +100,10 @@ for idx, dataset in tqdm(enumerate(os.listdir(base_path))):
|
|||||||
# plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
# plt.imshow(spec4, cmap='jet', origin='lower', extent=(times[0], times[-1], lim0, lim1), aspect='auto', vmin=-80, vmax=-10)
|
||||||
|
|
||||||
# save data
|
# save data
|
||||||
np.save('%s time' % file_name, cut_times)
|
#np.save('%s time' % file_name, cut_times)
|
||||||
np.save('%s' % file_name, jar4)
|
#np.save('%s' % file_name, jar4)
|
||||||
|
|
||||||
# save filenames for this fish
|
# save filenames for this fish
|
||||||
np.save('%s files' %ID[0], files)
|
#np.save('%s files' %ID[0], files)
|
||||||
|
|
||||||
embed()
|
embed()
|
Loading…
Reference in New Issue
Block a user