forked from jgrewe/fishbook
fixes
This commit is contained in:
parent
8dd0bf7839
commit
a72087d4a0
50
database.py
50
database.py
@ -214,7 +214,7 @@ class Stimulus(dj.Manual):
|
|||||||
return tup
|
return tup
|
||||||
|
|
||||||
|
|
||||||
def populate_datasets(data_path):
|
def populate_datasets(data_path, update=False):
|
||||||
print("Importing dataset %s" % data_path)
|
print("Importing dataset %s" % data_path)
|
||||||
if not os.path.exists(data_path):
|
if not os.path.exists(data_path):
|
||||||
return
|
return
|
||||||
@ -228,9 +228,12 @@ def populate_datasets(data_path):
|
|||||||
inserts["data_source"] = data_path
|
inserts["data_source"] = data_path
|
||||||
inserts["experimenter"] = experimenter
|
inserts["experimenter"] = experimenter
|
||||||
inserts["recording_date"] = rec_date
|
inserts["recording_date"] = rec_date
|
||||||
inserts["quality"] = quality
|
inserts["quality"] = quality if not isinstance(quality, dict) else ""
|
||||||
inserts["comment"] = comment
|
inserts["comment"] = comment if not isinstance(comment, dict) else ""
|
||||||
inserts["has_nix"] = has_nix
|
inserts["has_nix"] = has_nix
|
||||||
|
if len(Dataset & inserts) > 0 and not update:
|
||||||
|
print('\t\t %s is already in database!' % dset_name)
|
||||||
|
return False
|
||||||
Dataset().insert1(inserts, skip_duplicates=True)
|
Dataset().insert1(inserts, skip_duplicates=True)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -242,23 +245,39 @@ def populate_subjects(data_path):
|
|||||||
if not os.path.exists(info_file):
|
if not os.path.exists(info_file):
|
||||||
return None, None, False
|
return None, None, False
|
||||||
info = ut.read_info_file(info_file)
|
info = ut.read_info_file(info_file)
|
||||||
|
|
||||||
p = []
|
p = []
|
||||||
ut.find_key_recursive(info, "Subject", p)
|
ut.find_key_recursive(info, "Subject", p)
|
||||||
|
subj = {}
|
||||||
if len(p) > 0:
|
if len(p) > 0:
|
||||||
subj = ut.deep_get(info, p)
|
subj = ut.deep_get(info, p)
|
||||||
|
|
||||||
inserts = Subject.get_template_tuple()
|
inserts = Subject.get_template_tuple()
|
||||||
inserts["subject_id"] = subj["Identifier"]
|
subj_id = None
|
||||||
|
if "Identifier" in subj.keys():
|
||||||
|
if isinstance(subj["Identifier"], dict):
|
||||||
|
subj_id = "unspecified_" + dset_name
|
||||||
|
else:
|
||||||
|
subj_id = subj["Identifier"]
|
||||||
|
elif "Identifier" in info.keys():
|
||||||
|
if isinstance(info["Identifier"], dict):
|
||||||
|
subj_id = "unspecified_" + dset_name
|
||||||
|
else:
|
||||||
|
subj_id = info["Identifier"]
|
||||||
|
else:
|
||||||
|
subj_id = "unspecified_" + dset_name
|
||||||
|
inserts["subject_id"] = subj_id
|
||||||
inserts["species"] = subj["Species"]
|
inserts["species"] = subj["Species"]
|
||||||
Subject().insert1(inserts, skip_duplicates=True)
|
Subject().insert1(inserts, skip_duplicates=True)
|
||||||
|
|
||||||
# multi match entry
|
# multi match entry
|
||||||
dataset = dict((Dataset() & {"dataset_id": dset_name}).fetch1())
|
dataset = dict((Dataset() & {"dataset_id": dset_name}).fetch1())
|
||||||
mm = dict(dataset_id=dataset["dataset_id"], subject_id=subj["Identifier"])
|
mm = dict(dataset_id=dataset["dataset_id"], subject_id=inserts["subject_id"])
|
||||||
SubjectDatasetMap.insert1(mm, skip_duplicates=True)
|
SubjectDatasetMap.insert1(mm, skip_duplicates=True)
|
||||||
|
|
||||||
# subject properties
|
# subject properties
|
||||||
props = SubjectProperties.get_template_tuple()
|
props = SubjectProperties.get_template_tuple()
|
||||||
props["subject_id"] = subj["Identifier"]
|
props["subject_id"] = inserts["subject_id"]
|
||||||
props["recording_date"] = dataset["recording_date"]
|
props["recording_date"] = dataset["recording_date"]
|
||||||
if "Weight" in subj.keys():
|
if "Weight" in subj.keys():
|
||||||
props["weight"] = np.round(float(subj["Weight"][:-1]), 1)
|
props["weight"] = np.round(float(subj["Weight"][:-1]), 1)
|
||||||
@ -293,8 +312,13 @@ def populate_cells(data_path):
|
|||||||
if isinstance(firing_rate, str):
|
if isinstance(firing_rate, str):
|
||||||
firing_rate = float(firing_rate[:-2])
|
firing_rate = float(firing_rate[:-2])
|
||||||
|
|
||||||
|
if "Identifier" not in subject_info.keys() and ("Identifier" in info.keys() and
|
||||||
|
isinstance(info["Identifier"], dict)):
|
||||||
|
subject_id = "unspecified_" + dset_name
|
||||||
|
else:
|
||||||
|
subject_id = subject_info["Identifier"]
|
||||||
dataset = dict((Dataset & {"dataset_id": dset_name}).fetch1())
|
dataset = dict((Dataset & {"dataset_id": dset_name}).fetch1())
|
||||||
subject = dict((Subject & {"subject_id": subject_info["Identifier"]}).fetch1())
|
subject = dict((Subject & {"subject_id": subject_id}).fetch1())
|
||||||
|
|
||||||
dataset_id = dataset["dataset_id"]
|
dataset_id = dataset["dataset_id"]
|
||||||
cell_id = "-".join(dataset_id.split("-")[:4]) if len(dataset_id) > 4 else dataset_id
|
cell_id = "-".join(dataset_id.split("-")[:4]) if len(dataset_id) > 4 else dataset_id
|
||||||
@ -328,7 +352,7 @@ def scan_nix_file_for_repros(dataset):
|
|||||||
nix_files = glob.glob(os.path.join(dataset["data_source"], "*.nix"))
|
nix_files = glob.glob(os.path.join(dataset["data_source"], "*.nix"))
|
||||||
for nf in nix_files:
|
for nf in nix_files:
|
||||||
if not Dataset.check_file_integrity(nf):
|
if not Dataset.check_file_integrity(nf):
|
||||||
print("file is not sane!!!")
|
print("\t\tfile is not sane!!!")
|
||||||
continue
|
continue
|
||||||
f = nix.File.open(nf, nix.FileMode.ReadOnly)
|
f = nix.File.open(nf, nix.FileMode.ReadOnly)
|
||||||
b = f.blocks[0]
|
b = f.blocks[0]
|
||||||
@ -338,6 +362,8 @@ def scan_nix_file_for_repros(dataset):
|
|||||||
if len(rs) == 0:
|
if len(rs) == 0:
|
||||||
continue
|
continue
|
||||||
rs = rs[0]
|
rs = rs[0]
|
||||||
|
print("\t\t%s" % rs["RePro"])
|
||||||
|
|
||||||
rp = Repro.get_template_tuple()
|
rp = Repro.get_template_tuple()
|
||||||
rp["run"] = rs["Run"]
|
rp["run"] = rs["Run"]
|
||||||
rp["repro_name"] = rs["RePro"]
|
rp["repro_name"] = rs["RePro"]
|
||||||
@ -384,7 +410,7 @@ def scan_nix_file_for_repros(dataset):
|
|||||||
|
|
||||||
|
|
||||||
def scan_folder_for_repros(dataset):
|
def scan_folder_for_repros(dataset):
|
||||||
print("No nix-file, scanning directory!")
|
print("\t\tNo nix-file, scanning directory!")
|
||||||
repro_settings, stim_indices = ut.read_stimuli_file(dataset["data_source"])
|
repro_settings, stim_indices = ut.read_stimuli_file(dataset["data_source"])
|
||||||
repro_counts = {}
|
repro_counts = {}
|
||||||
for i, (rs, si) in enumerate(zip(repro_settings, stim_indices)):
|
for i, (rs, si) in enumerate(zip(repro_settings, stim_indices)):
|
||||||
@ -400,7 +426,7 @@ def scan_folder_for_repros(dataset):
|
|||||||
path = []
|
path = []
|
||||||
if not ut.find_key_recursive(rs, "repro", path):
|
if not ut.find_key_recursive(rs, "repro", path):
|
||||||
ut.find_key_recursive(rs, "RePro", path)
|
ut.find_key_recursive(rs, "RePro", path)
|
||||||
print(ut.deep_get(rs, path, "None"))
|
print("\t\t %s" % ut.deep_get(rs, path, "None"))
|
||||||
rp["repro_name"] = ut.deep_get(rs, path, "None")
|
rp["repro_name"] = ut.deep_get(rs, path, "None")
|
||||||
|
|
||||||
path = []
|
path = []
|
||||||
@ -474,7 +500,7 @@ def populate(datasets):
|
|||||||
try:
|
try:
|
||||||
populate_repros(d)
|
populate_repros(d)
|
||||||
except ():
|
except ():
|
||||||
print("something went wrong! %s" % d)
|
print("\t\tsomething went wrong! %s" % d)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -482,6 +508,6 @@ if __name__ == "__main__":
|
|||||||
# data_dir = "../high_freq_chirps/data"
|
# data_dir = "../high_freq_chirps/data"
|
||||||
# drop_tables()
|
# drop_tables()
|
||||||
|
|
||||||
datasets = glob.glob(os.path.join(data_dir, '201*'))
|
datasets = glob.glob(os.path.join(data_dir, '/data/apteronotus/201*'))
|
||||||
populate(datasets)
|
populate(datasets)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user