Compare commits
4 Commits
5ceecc6bf4
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
882399f0b3 | ||
|
|
4c8de7dcf5 | ||
|
|
d171652b2a | ||
|
|
4df0f6239b |
25
main.py
25
main.py
@@ -12,7 +12,7 @@ import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--input", help="folder containing input json(s)", required=True, type=pathlib.Path)
|
||||
parser.add_argument("--output", help="folder to place csv", required=True, type=pathlib.Path)
|
||||
parser.add_argument("--delimiter", help="delimiter for CSV (default is '|'", default="|")
|
||||
parser.add_argument("--delimiter", help="delimiter for CSV (default is '|')", default="|")
|
||||
parser.add_argument("--single", action="store_true", help="merge all json files to single output csv")
|
||||
parser.add_argument("--verbose", '-v', action="count", help="set verbose level", default=0)
|
||||
parser.add_argument("--zip", action="store_true", help="make a zipfile of all outputs")
|
||||
@@ -32,7 +32,7 @@ def make_archive(source: pathlib.Path, destination: pathlib.Path) -> None:
|
||||
shutil.make_archive(str(base_name), fmt, root_dir, base_dir)
|
||||
|
||||
|
||||
class DBConn:
|
||||
class JsonToCsv:
|
||||
def __init__(self):
|
||||
self.cur = None
|
||||
self.con = None
|
||||
@@ -43,8 +43,11 @@ class DBConn:
|
||||
|
||||
def init_db(self):
|
||||
self.counter += 1
|
||||
self.ts = time.strftime('%Y%m%d_%H%M%S', time.localtime())
|
||||
self.con = sqlite3.connect(args.output / f"data-{args.name}-{self.ts}-{self.counter}.db")
|
||||
self.ts = time.strftime('%Y%m%d-%H%M%S', time.localtime())
|
||||
db_name = args.output / f"data-{args.name}-{self.ts}-{self.counter}.db"
|
||||
if args.verbose > 0:
|
||||
print(f"creating DB {db_name}")
|
||||
self.con = sqlite3.connect(db_name)
|
||||
self.cur = self.con.cursor()
|
||||
self.make_tables()
|
||||
|
||||
@@ -79,8 +82,7 @@ class DBConn:
|
||||
|
||||
if len(not_found) > 0:
|
||||
if args.verbose >= 1:
|
||||
print(
|
||||
f"added new cols {', '.join(not_found)} to {tbl}, already present {tbl_cols}, want {col_names}")
|
||||
print(f"added new cols {', '.join(not_found)} to {tbl}")
|
||||
|
||||
new_cols = list(tbl_cols)
|
||||
for new_col in not_found:
|
||||
@@ -94,10 +96,10 @@ class DBConn:
|
||||
self.cur.execute(sql, values)
|
||||
|
||||
def make_csv_from_tables(self, prefix=''):
|
||||
dbConn.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
json_csv.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
tbls = []
|
||||
|
||||
for (tbl,) in dbConn.cur.fetchall():
|
||||
for (tbl,) in json_csv.cur.fetchall():
|
||||
if tbl.find(args.name) == 0:
|
||||
tbls.append(tbl)
|
||||
|
||||
@@ -266,15 +268,14 @@ class DBConn:
|
||||
make_archive(args.output, pathlib.Path(f"{top_level}.zip"))
|
||||
|
||||
|
||||
dbConn = DBConn()
|
||||
|
||||
if __name__ == '__main__':
|
||||
if args.verbose >= 1:
|
||||
print(f"args = {args}")
|
||||
|
||||
if args.clean:
|
||||
for d in args.output.glob("*.csv"):
|
||||
for d in args.output.iterdir():
|
||||
print(f"will delete {d}")
|
||||
os.unlink(d)
|
||||
|
||||
dbConn.parse_json()
|
||||
json_csv = JsonToCsv()
|
||||
json_csv.parse_json()
|
||||
|
||||
Reference in New Issue
Block a user