[dataimport] minor optimizations

This commit is contained in:
Jan Grewe 2020-07-28 15:30:37 +02:00
parent 4326148b3f
commit f4d669f1d6

View File

@ -1,25 +1,29 @@
import argparse
import glob
import os
main_descr = "Import data into the fishbook database!"
def run(args):
datasets = sorted(glob.glob(os.path.join(args.folder, args.pattern)))
if len(datasets) < 1 and not args.drop:
print("no matching datasets found when using pattern %s" % os.path.join(args.folder, args.pattern))
exit()
import fishbook as fb
if args.Drop:
if args.drop:
fb.backend.database.drop_tables()
exit()
from IPython import embed
embed
datasets = sorted(glob.glob(os.path.join(args.folder, args.pattern)))
if len(datasets) > 0:
fb.backend.database.populate(datasets, args.update)
fb.backend.database.populate(datasets, args.update)
def main():
parser = argparse.ArgumentParser(prog="fishbookImport", description=main_descr)
parser.add_argument("folder", default=".", help="the location in which to look for datasets. Default is the current folder")
parser.add_argument("-p", "--pattern", type=str, default="20*", help="the dataset name pattern to apply")
parser.add_argument("-p", "--pattern", type=str, default="20*", help="the dataset name pattern to apply, e.g. \"2020-01-01-*\"")
parser.add_argument("-u", "--update", default=False, action="store_true", help="update entries instead of skipping duplicates")
parser.add_argument("--drop", default=False, action="store_true", help="Drop all information from the database")
args = parser.parse_args()