Merge branch 'pypi-release'

Closes #7
This commit is contained in:
2019-03-10 10:44:49 +01:00
2 changed files with 58 additions and 37 deletions

View File

@@ -350,7 +350,7 @@ CREATE VIEW IF NOT EXISTS Capacity_View
"""
def mdb_get_data_text(filename, table):
def mdb_get_data_text(s3db, filename, table):
print("Reading %s..." % table)
try:
mdb_sql = sp.Popen(['mdb-export', '-I', 'postgres', filename, table],
@@ -373,7 +373,7 @@ def mdb_get_data_text(filename, table):
mdb_sql.terminate()
def mdb_get_data_numeric(filename, table):
def mdb_get_data_numeric(s3db, filename, table):
print("Reading %s..." % table)
try:
mdb_sql = sp.Popen(['mdb-export', filename, table],
@@ -392,24 +392,21 @@ def mdb_get_data_numeric(filename, table):
mdb_sql.terminate()
def mdb_get_data(filename, table):
def mdb_get_data(s3db, filename, table):
if table in mdb_tables_text:
mdb_get_data_text(filename, table)
mdb_get_data_text(s3db, filename, table)
elif table in mdb_tables_numeric:
mdb_get_data_numeric(filename, table)
mdb_get_data_numeric(s3db, filename, table)
else:
raise ValueError("'%s' is in neither mdb_tables_text nor mdb_tables_numeric" % table)
## Main part of the script
def convert_arbin_to_sqlite(input_file, output_file):
"""Read data from an Arbin .res data file and write to a sqlite file.
parser = argparse.ArgumentParser(description="Convert Arbin .res files to sqlite3 databases using mdb-export")
parser.add_argument('input_file', type=str) # need file name to pass to sp.Popen
parser.add_argument('output_file', type=str) # need file name to pass to sqlite3.connect
args = parser.parse_args()
s3db = sqlite3.connect(args.output_file)
Any data currently in the sqlite file will be erased!
"""
s3db = sqlite3.connect(output_file)
for table in reversed(mdb_tables + mdb_5_23_tables):
@@ -417,7 +414,7 @@ for table in reversed(mdb_tables + mdb_5_23_tables):
for table in mdb_tables:
s3db.executescript(mdb_create_scripts[table])
mdb_get_data(args.input_file, table)
mdb_get_data(s3db, input_file, table)
if table in mdb_create_indices:
print("Creating indices for %s..." % table)
s3db.executescript(mdb_create_indices[table])
@@ -425,7 +422,7 @@ for table in mdb_tables:
if (s3db.execute("SELECT Version_Schema_Field FROM Version_Table;").fetchone()[0] == "Results File 5.23"):
for table in mdb_5_23_tables:
s3db.executescript(mdb_create_scripts[table])
mdb_get_data(args.input_file, table)
mdb_get_data(input_file, table)
if table in mdb_create_indices:
s3db.executescript(mdb_create_indices[table])
@@ -434,3 +431,16 @@ s3db.executescript(helper_table_script)
print("Vacuuming database...")
s3db.executescript("VACUUM; ANALYZE;")
def main(argv=None):
parser = argparse.ArgumentParser(description="Convert Arbin .res files to sqlite3 databases using mdb-export")
parser.add_argument('input_file', type=str) # need file name to pass to sp.Popen
parser.add_argument('output_file', type=str) # need file name to pass to sqlite3.connect
args = parser.parse_args(argv)
convert_arbin_to_sqlite(args.input_file, args.output_file)
if __name__ == '__main__':
main()

View File

@@ -1,21 +1,32 @@
# -*- coding: utf-8 -*-
import os.path
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as f:
readme = f.read()
setup(
name='galvani',
version='0.0.1a1',
version='0.0.1',
description='Open and process battery charger log data files',
long_description=readme,
long_description_content_type="text/markdown",
url='https://github.com/chatcannon/galvani',
author='Chris Kerr',
author_email='chris.kerr@mykolab.ch',
license='GPLv3+',
classifiers=[
'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English'],
'Natural Language :: English',
],
packages=['galvani'],
scripts=['scripts/res2sqlite.py'], # TODO make this use entry_points
install_requires=['numpy']
entry_points={'console_scripts': [
'res2sqlite = galvani.res2sqlite:main',
]},
install_requires=['numpy'],
)