fix output

This commit is contained in:
Jan Grewe 2020-07-28 14:29:57 +02:00
parent b01270bb9b
commit 4326148b3f

View File

@ -3,9 +3,10 @@ import datajoint as dj
import nixio as nix import nixio as nix
import os import os
import glob import glob
import socket
from fishbook.backend.util import read_info_file, read_dataset_info, read_stimuli_file from fishbook.backend.util import read_info_file, read_dataset_info, read_stimuli_file
from fishbook.backend.util import find_key_recursive, deep_get, find_mtags_for_tag from fishbook.backend.util import find_key_recursive, deep_get, find_mtags_for_tag
from fishbook.backend.util import mtag_settings_to_yaml, nix_metadata_to_yaml from fishbook.backend.util import mtag_settings_to_yaml, nix_metadata_to_yaml, progress
import uuid import uuid
import yaml import yaml
@ -18,6 +19,7 @@ class Datasets(dj.Manual):
dataset_id : varchar(256) dataset_id : varchar(256)
---- ----
data_source : varchar(512) # path to the dataset data_source : varchar(512) # path to the dataset
data_host : varchar(512) # fully qualified domain name
experimenter : varchar(512) experimenter : varchar(512)
setup : varchar(128) setup : varchar(128)
recording_date : date recording_date : date
@ -32,8 +34,8 @@ class Datasets(dj.Manual):
if id is not None: if id is not None:
d = dict((Datasets() & {"dataset_id": id}).fetch1()) d = dict((Datasets() & {"dataset_id": id}).fetch1())
return d return d
return dict(dataset_id=None, data_source="", experimenter="", setup="", recording_date=None, return dict(dataset_id=None, data_source="", data_host="", experimenter="", setup="",
quality="", comment="", duration=0.0, has_nix=False) recording_date=None, quality="", comment="", duration=0.0, has_nix=False)
@staticmethod @staticmethod
def get_nix_file(key): def get_nix_file(key):
@ -230,6 +232,7 @@ def populate_datasets(data_path, update=False):
inserts = Datasets.get_template_tuple() inserts = Datasets.get_template_tuple()
inserts["dataset_id"] = dset_name inserts["dataset_id"] = dset_name
inserts["data_source"] = data_path inserts["data_source"] = data_path
inserts["data_host"] = socket.getfqdn()
inserts["experimenter"] = experimenter inserts["experimenter"] = experimenter
inserts["recording_date"] = rec_date inserts["recording_date"] = rec_date
inserts["quality"] = quality if not isinstance(quality, dict) else "" inserts["quality"] = quality if not isinstance(quality, dict) else ""
@ -356,7 +359,7 @@ def populate_cells(data_path):
Cells.insert1(cell_props, skip_duplicates=True) Cells.insert1(cell_props, skip_duplicates=True)
# multi mach entry # multi match entry
mm = dict(dataset_id=dataset["dataset_id"], cell_id=cell_props["cell_id"]) mm = dict(dataset_id=dataset["dataset_id"], cell_id=cell_props["cell_id"])
CellDatasetMap.insert1(mm, skip_duplicates=True) CellDatasetMap.insert1(mm, skip_duplicates=True)
@ -372,12 +375,13 @@ def scan_nix_file_for_repros(dataset):
f = nix.File.open(nf, nix.FileMode.ReadOnly) f = nix.File.open(nf, nix.FileMode.ReadOnly)
b = f.blocks[0] b = f.blocks[0]
repro_runs = [t for t in b.tags if "relacs.repro_run" in t.type] repro_runs = [t for t in b.tags if "relacs.repro_run" in t.type]
for t in repro_runs: total = len(repro_runs)
for i, t in enumerate(repro_runs):
progress(i+1, total, "Scanning repro run %s" % rs["RePro"])
rs = t.metadata.find_sections(lambda x: "Run" in x.props) rs = t.metadata.find_sections(lambda x: "Run" in x.props)
if len(rs) == 0: if len(rs) == 0:
continue continue
rs = rs[0] rs = rs[0]
print("\t\t%s" % rs["RePro"])
rp = Repros.get_template_tuple() rp = Repros.get_template_tuple()
rp["run"] = rs["Run"] rp["run"] = rs["Run"]
@ -420,6 +424,7 @@ def scan_nix_file_for_repros(dataset):
stim["stimulus_name"] = mt.name stim["stimulus_name"] = mt.name
stim.update(repro) stim.update(repro)
Stimuli.insert1(stim, skip_duplicates=True) Stimuli.insert1(stim, skip_duplicates=True)
print("\n")
f.close() f.close()
f = None f = None
@ -429,6 +434,7 @@ def scan_folder_for_repros(dataset):
repro_settings, stim_indices = read_stimuli_file(dataset["data_source"]) repro_settings, stim_indices = read_stimuli_file(dataset["data_source"])
repro_counts = {} repro_counts = {}
cell_id = (Cells * CellDatasetMap * (Datasets & "dataset_id = '%s'" % dataset["dataset_id"])).fetch("cell_id", limit=1)[0] cell_id = (Cells * CellDatasetMap * (Datasets & "dataset_id = '%s'" % dataset["dataset_id"])).fetch("cell_id", limit=1)[0]
total = len(repro_settings)
for i, (rs, si) in enumerate(zip(repro_settings, stim_indices)): for i, (rs, si) in enumerate(zip(repro_settings, stim_indices)):
rp = Repros.get_template_tuple() rp = Repros.get_template_tuple()
path = [] path = []
@ -442,8 +448,8 @@ def scan_folder_for_repros(dataset):
path = [] path = []
if not find_key_recursive(rs, "repro", path): if not find_key_recursive(rs, "repro", path):
find_key_recursive(rs, "RePro", path) find_key_recursive(rs, "RePro", path)
print("\t\t %s" % deep_get(rs, path, "None"))
rp["repro_name"] = deep_get(rs, path, "None") rp["repro_name"] = deep_get(rs, path, "None")
progress(i+1, total, "scanning repro %s" % rp["repro_name"])
path = [] path = []
if rp["repro_name"] in repro_counts.keys(): if rp["repro_name"] in repro_counts.keys():
@ -489,6 +495,7 @@ def scan_folder_for_repros(dataset):
stim["stimulus_name"] = "" stim["stimulus_name"] = ""
stim.update(repro) stim.update(repro)
Stimuli.insert1(stim, skip_duplicates=True) Stimuli.insert1(stim, skip_duplicates=True)
print("\n")
def populate_repros(data_path): def populate_repros(data_path):
@ -528,6 +535,6 @@ if __name__ == "__main__":
# data_dir = "../high_freq_chirps/data" # data_dir = "../high_freq_chirps/data"
# drop_tables() # drop_tables()
# datasets = glob.glob("/Users/jan/zwischenlager/2012-*")2010-06-21-ac/info.dat # datasets = glob.glob("/Users/jan/zwischenlager/2012-*")2010-06-21-ac/info.dat
datasets = glob.glob(os.path.join(data_dir, '/data/apteronotus/2019-10-2*')) datasets = glob.glob(os.path.join(data_dir, '/data/apteronotus/2010-06-18*'))
populate(datasets, update=False) populate(datasets, update=False)