fixes and move all convenience methods to forntend classes
This commit is contained in:
parent
ace17d8f7c
commit
f6c170525b
@ -3,18 +3,18 @@ import nixio as nix
|
||||
import os
|
||||
import numpy as np
|
||||
from IPython import embed
|
||||
from database import Datasets, Repros
|
||||
from database import *
|
||||
schema = dj.schema("fish_book", locals())
|
||||
|
||||
|
||||
class BaselineData(object):
|
||||
|
||||
def __init__(self, dataset:Datasets):
|
||||
def __init__(self, dataset:Dataset):
|
||||
self.__data = []
|
||||
self.__dataset = dataset
|
||||
self.__cell = dataset.cells[0]
|
||||
self._get_data()
|
||||
|
||||
|
||||
def _get_data(self):
|
||||
if not self.__dataset:
|
||||
self.__data = []
|
||||
@ -89,6 +89,9 @@ class BaselineData(object):
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Test")
|
||||
dataset = Datasets & "dataset_id like '2018-11-09-aa-%' "
|
||||
baseline = BaselineData(dataset.fetch1())
|
||||
embed()
|
||||
exit()
|
||||
|
||||
dataset = Dataset(tuple=(Datasets & "dataset_id like '2018-11-09-aa-%'").fetch(limit=1, as_dict=True))
|
||||
baseline = BaselineData(dataset)
|
||||
embed()
|
126
database.py
126
database.py
@ -6,7 +6,6 @@ import glob
|
||||
import util as ut
|
||||
import uuid
|
||||
import yaml
|
||||
|
||||
from IPython import embed
|
||||
|
||||
schema = dj.schema("fish_book_new", locals())
|
||||
@ -65,25 +64,13 @@ class Datasets(dj.Manual):
|
||||
sane = False
|
||||
return sane
|
||||
|
||||
@staticmethod
|
||||
def datasets(min_duration=None, experimenter=None, quality=None):
|
||||
dsets = Datasets
|
||||
if min_duration:
|
||||
dsets = dsets & "duration > %.2f" % min_duration
|
||||
if experimenter:
|
||||
dsets = dsets & dict(experimenter=experimenter)
|
||||
if quality:
|
||||
dsets = dsets & "quality like '{0:s}'".format(quality)
|
||||
return [Dataset(tuple=d) for d in dsets]
|
||||
|
||||
|
||||
class Dataset:
|
||||
def __init__(self, dataset_id=None, exact=False, tuple=None):
|
||||
def __init__(self, dataset_id=None, tuple=None):
|
||||
if tuple:
|
||||
self.__tuple = tuple
|
||||
elif dataset_id:
|
||||
wildcard = "%" if not exact else ""
|
||||
pattern = "dataset_id like '{0:s}{0:s}{0:s}'".format(wildcard, dataset_id, wildcard)
|
||||
pattern = "dataset_id like '{0:s}'".format(dataset_id)
|
||||
dsets = (Datasets & pattern)
|
||||
assert(len(dsets) == 1), "Dataset name is not unique!"
|
||||
self.__tuple = dsets.fetch(limit=1)[0]
|
||||
@ -136,6 +123,17 @@ class Dataset:
|
||||
subjs = (Subjects * (SubjectDatasetMap & self.__tuple))
|
||||
return [Subject(tuple=s) for s in subjs]
|
||||
|
||||
@staticmethod
|
||||
def find_datasets(min_duration=None, experimenter=None, quality=None):
|
||||
dsets = Datasets
|
||||
if min_duration:
|
||||
dsets = dsets & "duration > %.2f" % min_duration
|
||||
if experimenter:
|
||||
dsets = dsets & "experimenter like '%{0:s}%'".format(experimenter)
|
||||
if quality:
|
||||
dsets = dsets & "quality like '{0:s}'".format(quality)
|
||||
return [Dataset(tuple=d) for d in dsets]
|
||||
|
||||
|
||||
@schema
|
||||
class Subjects(dj.Manual):
|
||||
@ -171,24 +169,6 @@ class Subjects(dj.Manual):
|
||||
self.insert1(inserts, skip_duplicates=True)
|
||||
nix_file.close()
|
||||
|
||||
@staticmethod
|
||||
def subjects(species=None):
|
||||
subjs = []
|
||||
if species:
|
||||
subjs = (Subjects & "species like '%{0:s}%'".format(species))
|
||||
else:
|
||||
subjs = (Subjects & True)
|
||||
return [Subject(tuple=s for s in subjs]
|
||||
|
||||
@staticmethod
|
||||
def unique_species():
|
||||
all_species = (Subjects & True).fetch("species")
|
||||
return np.unique(all_species)
|
||||
|
||||
@property
|
||||
def properties(self):
|
||||
return (SubjectProperties & self).fetch1()
|
||||
|
||||
#@property
|
||||
#def datasets(self):
|
||||
# retrun
|
||||
@ -218,6 +198,22 @@ class Subject:
|
||||
cs = Cells & self.__tuple
|
||||
return [Cell(tuple=c) for c in cs]
|
||||
|
||||
@property
|
||||
def properties(self):
|
||||
return (SubjectProperties & self.__tuple).fetch(as_dict=True)
|
||||
|
||||
@staticmethod
|
||||
def find_subjects(species=None):
|
||||
subjs = Subjects & True
|
||||
if species:
|
||||
subjs = (Subjects & "species like '%{0:s}%'".format(species))
|
||||
return [Subject(tuple=s) for s in subjs]
|
||||
|
||||
@staticmethod
|
||||
def unique_species():
|
||||
all_species = (Subjects & True).fetch("species")
|
||||
return np.unique(all_species)
|
||||
|
||||
|
||||
@schema
|
||||
class SubjectDatasetMap(dj.Manual):
|
||||
@ -276,24 +272,6 @@ class Cells(dj.Manual):
|
||||
return d
|
||||
return tup
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
return Subjects & self
|
||||
|
||||
@staticmethod
|
||||
def celltypes():
|
||||
return np.unique(Cells.fetch("cell_type"))
|
||||
|
||||
@staticmethod
|
||||
def cells(celltype=None, species=None, quality="good"):
|
||||
cs = Cells * CellDatasetMap * Datasets * Subjects
|
||||
if celltype:
|
||||
cs = cs & "cell_type like '{0:s}'".format(celltype)
|
||||
if species:
|
||||
cs = cs & "species like '%{0:s}%'".format(species)
|
||||
if quality:
|
||||
cs = cs & "quality like '{0:s}'".format(quality)
|
||||
return cs
|
||||
|
||||
class Cell:
|
||||
def __init__(self, cell_id=None, tuple=None):
|
||||
@ -305,12 +283,54 @@ class Cell:
|
||||
assert (len(cells) == 1), "Cell id is not unique!"
|
||||
self.__tuple = cells.fetch(as_dict=True)[0]
|
||||
else:
|
||||
print("Empty Cell, not linke to any database entry!")
|
||||
print("Empty Cell, not linked to any database entry!")
|
||||
|
||||
@property
|
||||
def cell_id(self):
|
||||
return self.__tuple["cell_id"] if "cell_id" in self.__tuple.keys() else ""
|
||||
|
||||
@property
|
||||
def cell_type(self):
|
||||
return self.__tuple["cell_type"] if "cell_type" in self.__tuple.keys() else ""
|
||||
|
||||
@property
|
||||
def firing_rate(self):
|
||||
return self.__tuple["firing_rate"] if "firing_rate" in self.__tuple.keys() else 0.0
|
||||
|
||||
@property
|
||||
def location(self):
|
||||
keys = ["structure", "region", "subregion", "depth", "lateral_pos", "transversal_section"]
|
||||
loc = {}
|
||||
for k in keys:
|
||||
if k in self.__tuple.keys():
|
||||
loc[k] = self.__tuple[k]
|
||||
else:
|
||||
loc[k] = ""
|
||||
return loc
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
return Subject(tuple=(Subjects & {"subject_id": self.__tuple["subject_id"]}).fetch(limit=1, as_dict=True)[0])
|
||||
|
||||
@staticmethod
|
||||
def celltypes():
|
||||
return np.unique(Cells.fetch("cell_type"))
|
||||
|
||||
@staticmethod
|
||||
def find_cells(cell_type=None, species=None, quality="good"):
|
||||
cs = Cells * CellDatasetMap * Datasets * Subjects
|
||||
if cell_type:
|
||||
cs = cs & "cell_type like '{0:s}'".format(cell_type)
|
||||
if species:
|
||||
cs = cs & "species like '%{0:s}%'".format(species)
|
||||
if quality:
|
||||
cs = cs & "quality like '{0:s}'".format(quality)
|
||||
return [Cell(tuple=c) for c in cs]
|
||||
|
||||
def __str__(self):
|
||||
str = ""
|
||||
str += "Cell: %s \t type: %s\n"%(self.cell_id, self.cell_type)
|
||||
return str
|
||||
|
||||
@schema
|
||||
class CellDatasetMap(dj.Manual):
|
||||
@ -677,6 +697,6 @@ if __name__ == "__main__":
|
||||
# data_dir = "../high_freq_chirps/data"
|
||||
# drop_tables()
|
||||
# datasets = glob.glob("/Users/jan/zwischenlager/2012-*")2010-06-21-ac/info.dat
|
||||
datasets = glob.glob(os.path.join(data_dir, '/data/apteronotus/2018-*'))
|
||||
datasets = glob.glob(os.path.join(data_dir, '/data/apteronotus/2010-*'))
|
||||
populate(datasets, update=False)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user