diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..ac8f3fb --- /dev/null +++ b/.flake8 @@ -0,0 +1,2 @@ +# This file will be ignored - see http://flake8.pycqa.org/en/2.6.0/config.html#per-project +# Edit the [flake8] section in tox.ini instead diff --git a/galvani/BioLogic.py b/galvani/BioLogic.py index e644e68..4eeac7f 100644 --- a/galvani/BioLogic.py +++ b/galvani/BioLogic.py @@ -10,6 +10,7 @@ from os import SEEK_SET import time from datetime import date, datetime, timedelta from collections import defaultdict, OrderedDict +import functools import numpy as np @@ -18,7 +19,7 @@ if sys.version_info.major <= 2: str3 = str from string import maketrans else: - str3 = lambda b: str(b, encoding='ascii') + str3 = functools.partial(str, encoding='ascii') maketrans = bytes.maketrans @@ -71,19 +72,20 @@ def MPTfile(file_or_path): raise ValueError("Bad first line for EC-Lab file: '%s'" % magic) # TODO use rb'string' here once Python 2 is no longer supported - nb_headers_match = re.match(b'Nb header lines : (\\d+)\\s*$', next(mpt_file)) + nb_headers_match = re.match(b'Nb header lines : (\\d+)\\s*$', + next(mpt_file)) nb_headers = int(nb_headers_match.group(1)) if nb_headers < 3: raise ValueError("Too few header lines: %d" % nb_headers) - ## The 'magic number' line, the 'Nb headers' line and the column headers - ## make three lines. Every additional line is a comment line. + # The 'magic number' line, the 'Nb headers' line and the column headers + # make three lines. Every additional line is a comment line. comments = [next(mpt_file) for i in range(nb_headers - 3)] fieldnames = str3(next(mpt_file)).strip().split('\t') record_type = np.dtype(list(map(fieldname_to_dtype, fieldnames))) - ## Must be able to parse files where commas are used for decimal points + # Must be able to parse files where commas are used for decimal points converter_dict = dict(((i, comma_converter) for i in range(len(fieldnames)))) mpt_array = np.loadtxt(mpt_file, dtype=record_type, @@ -113,8 +115,8 @@ def MPTfileCSV(file_or_path): if nb_headers < 3: raise ValueError("Too few header lines: %d" % nb_headers) - ## The 'magic number' line, the 'Nb headers' line and the column headers - ## make three lines. Every additional line is a comment line. + # The 'magic number' line, the 'Nb headers' line and the column headers + # make three lines. Every additional line is a comment line. comments = [next(mpt_file) for i in range(nb_headers - 3)] mpt_csv = csv.DictReader(mpt_file, dialect='excel-tab') @@ -256,7 +258,8 @@ def read_VMP_modules(fileobj, read_module_data=True): if len(module_magic) == 0: # end of file break elif module_magic != b'MODULE': - raise ValueError("Found %r, expecting start of new VMP MODULE" % module_magic) + raise ValueError("Found %r, expecting start of new VMP MODULE" + % module_magic) hdr_bytes = fileobj.read(VMPmodule_hdr.itemsize) if len(hdr_bytes) < VMPmodule_hdr.itemsize: @@ -280,6 +283,9 @@ def read_VMP_modules(fileobj, read_module_data=True): fileobj.seek(hdr_dict['offset'] + hdr_dict['length'], SEEK_SET) +MPR_MAGIC = b'BIO-LOGIC MODULAR FILE\x1a'.ljust(48) + b'\x00\x00\x00\x00' + + class MPRfile: """Bio-Logic .mpr file @@ -302,10 +308,8 @@ class MPRfile: mpr_file = open(file_or_path, 'rb') else: mpr_file = file_or_path - - mpr_magic = b'BIO-LOGIC MODULAR FILE\x1a \x00\x00\x00\x00' - magic = mpr_file.read(len(mpr_magic)) - if magic != mpr_magic: + magic = mpr_file.read(len(MPR_MAGIC)) + if magic != MPR_MAGIC: raise ValueError('Invalid magic for .mpr file: %s' % magic) modules = list(read_VMP_modules(mpr_file)) @@ -326,7 +330,7 @@ class MPRfile: elif data_module['version'] == 2: column_types = np.frombuffer(data_module['data'][5:], dtype=' 40000 and ole_timestamp4 < 50000: ole_timestamp = ole_timestamp4 - + else: raise ValueError("Could not find timestamp in the LOG module") @@ -400,10 +404,10 @@ class MPRfile: ole_timedelta = timedelta(days=ole_timestamp[0]) self.timestamp = ole_base + ole_timedelta if self.startdate != self.timestamp.date(): - raise ValueError("""Date mismatch: - Start date: %s - End date: %s - Timestamp: %s""" % (self.startdate, self.enddate, self.timestamp)) + raise ValueError("Date mismatch:\n" + + " Start date: %s\n" % self.startdate + + " End date: %s\n" % self.enddate + + " Timestamp: %s\n" % self.timestamp) def get_flag(self, flagname): if flagname in self.flags_dict: diff --git a/galvani/__init__.py b/galvani/__init__.py index 1949033..ce98e1d 100644 --- a/galvani/__init__.py +++ b/galvani/__init__.py @@ -1 +1,3 @@ -from .BioLogic import MPTfile, MPRfile +from .BioLogic import MPRfile, MPTfile + +__all__ = ['MPRfile', 'MPTfile'] diff --git a/galvani/res2sqlite.py b/galvani/res2sqlite.py index 0234a7b..f31193f 100755 --- a/galvani/res2sqlite.py +++ b/galvani/res2sqlite.py @@ -7,8 +7,8 @@ import csv import argparse -## The following scripts are adapted from the result of running -## $ mdb-schema oracle +# The following scripts are adapted from the result of running +# $ mdb-schema oracle mdb_tables = ["Version_Table", "Global_Table", "Resume_Table", "Channel_Normal_Table", "Channel_Statistic_Table", @@ -126,7 +126,8 @@ CREATE TABLE Channel_Statistic_Table -- Version 1.14 ends here, version 5.23 continues Charge_Time REAL DEFAULT NULL, Discharge_Time REAL DEFAULT NULL, - FOREIGN KEY (Test_ID, Data_Point) REFERENCES Channel_Normal_Table (Test_ID, Data_Point) + FOREIGN KEY (Test_ID, Data_Point) + REFERENCES Channel_Normal_Table (Test_ID, Data_Point) ); """, "Auxiliary_Table": """ CREATE TABLE Auxiliary_Table @@ -137,7 +138,8 @@ CREATE TABLE Auxiliary_Table Data_Type INTEGER, X REAL, "dX/dt" REAL, - FOREIGN KEY (Test_ID, Data_Point) REFERENCES Channel_Normal_Table (Test_ID, Data_Point) + FOREIGN KEY (Test_ID, Data_Point) + REFERENCES Channel_Normal_Table (Test_ID, Data_Point) ); """, "Event_Table": """ CREATE TABLE Event_Table @@ -220,9 +222,10 @@ CREATE TABLE Smart_Battery_Data_Table ChargingCurrent REAL DEFAULT NULL, ChargingVoltage REAL DEFAULT NULL, ManufacturerData REAL DEFAULT NULL, - FOREIGN KEY (Test_ID, Data_Point) REFERENCES Channel_Normal_Table (Test_ID, Data_Point) + FOREIGN KEY (Test_ID, Data_Point) + REFERENCES Channel_Normal_Table (Test_ID, Data_Point) ); """, - ## The following tables are not present in version 1.14 + # The following tables are not present in version 1.14 'MCell_Aci_Data_Table': """ CREATE TABLE MCell_Aci_Data_Table ( @@ -233,7 +236,8 @@ CREATE TABLE MCell_Aci_Data_Table Phase_Shift REAL, Voltage REAL, Current REAL, - FOREIGN KEY (Test_ID, Data_Point) REFERENCES Channel_Normal_Table (Test_ID, Data_Point) + FOREIGN KEY (Test_ID, Data_Point) + REFERENCES Channel_Normal_Table (Test_ID, Data_Point) );""", 'Aux_Global_Data_Table': """ CREATE TABLE Aux_Global_Data_Table @@ -288,7 +292,8 @@ CREATE TABLE Smart_Battery_Clock_Stretch_Table VCELL3 INTEGER, VCELL2 INTEGER, VCELL1 INTEGER, - FOREIGN KEY (Test_ID, Data_Point) REFERENCES Channel_Normal_Table (Test_ID, Data_Point) + FOREIGN KEY (Test_ID, Data_Point) + REFERENCES Channel_Normal_Table (Test_ID, Data_Point) );"""} mdb_create_indices = { @@ -306,18 +311,21 @@ CREATE TEMPORARY TABLE capacity_helper( Discharge_Capacity REAL NOT NULL, Charge_Energy REAL NOT NULL, Discharge_Energy REAL NOT NULL, - FOREIGN KEY (Test_ID, Cycle_Index) REFERENCES Channel_Normal_Table (Test_ID, Cycle_Index) + FOREIGN KEY (Test_ID, Cycle_Index) + REFERENCES Channel_Normal_Table (Test_ID, Cycle_Index) ); -INSERT INTO capacity_helper - SELECT Test_ID, Cycle_Index, max(Charge_Capacity), max(Discharge_Capacity), max(Charge_Energy), max(Discharge_Energy) - FROM Channel_Normal_Table +INSERT INTO capacity_helper + SELECT Test_ID, Cycle_Index, + max(Charge_Capacity), max(Discharge_Capacity), + max(Charge_Energy), max(Discharge_Energy) + FROM Channel_Normal_Table GROUP BY Test_ID, Cycle_Index; --- ## Alternative way of selecting ## --- select * --- from Channel_Normal_Table as a join Channel_Normal_Table as b --- on (a.Test_ID = b.Test_ID and a.Data_Point = b.Data_Point + 1 +-- ## Alternative way of selecting ## +-- select * +-- from Channel_Normal_Table as a join Channel_Normal_Table as b +-- on (a.Test_ID = b.Test_ID and a.Data_Point = b.Data_Point + 1 -- and a.Charge_Capacity < b.Charge_Capacity); DROP TABLE IF EXISTS Capacity_Sum_Table; @@ -328,12 +336,15 @@ CREATE TABLE Capacity_Sum_Table( Discharge_Capacity_Sum REAL NOT NULL, Charge_Energy_Sum REAL NOT NULL, Discharge_Energy_Sum REAL NOT NULL, - FOREIGN KEY (Test_ID, Cycle_Index) REFERENCES Channel_Normal_Table (Test_ID, Cycle_Index) + FOREIGN KEY (Test_ID, Cycle_Index) + REFERENCES Channel_Normal_Table (Test_ID, Cycle_Index) ); -INSERT INTO Capacity_Sum_Table - SELECT a.Test_ID, a.Cycle_Index, total(b.Charge_Capacity), total(b.Discharge_Capacity), total(b.Charge_Energy), total(b.Discharge_Energy) - FROM capacity_helper AS a LEFT JOIN capacity_helper AS b +INSERT INTO Capacity_Sum_Table + SELECT a.Test_ID, a.Cycle_Index, + total(b.Charge_Capacity), total(b.Discharge_Capacity), + total(b.Charge_Energy), total(b.Discharge_Energy) + FROM capacity_helper AS a LEFT JOIN capacity_helper AS b ON (a.Test_ID = b.Test_ID AND a.Cycle_Index > b.Cycle_Index) GROUP BY a.Test_ID, a.Cycle_Index; @@ -342,28 +353,35 @@ DROP TABLE capacity_helper; CREATE VIEW IF NOT EXISTS Capacity_View AS SELECT Test_ID, Data_Point, Test_Time, Step_Time, DateTime, Step_Index, Cycle_Index, Current, Voltage, "dV/dt", - Discharge_Capacity + Discharge_Capacity_Sum - Charge_Capacity - Charge_Capacity_Sum AS Net_Capacity, - Discharge_Capacity + Discharge_Capacity_Sum + Charge_Capacity + Charge_Capacity_Sum AS Gross_Capacity, - Discharge_Energy + Discharge_Energy_Sum - Charge_Energy - Charge_Energy_Sum AS Net_Energy, - Discharge_Energy + Discharge_Energy_Sum + Charge_Energy + Charge_Energy_Sum AS Gross_Energy + ( (Discharge_Capacity + Discharge_Capacity_Sum) + - (Charge_Capacity + Charge_Capacity_Sum) ) AS Net_Capacity, + ( (Discharge_Capacity + Discharge_Capacity_Sum) + + (Charge_Capacity + Charge_Capacity_Sum) ) AS Gross_Capacity, + ( (Discharge_Energy + Discharge_Energy_Sum) + - (Charge_Energy + Charge_Energy_Sum) ) AS Net_Energy, + ( (Discharge_Energy + Discharge_Energy_Sum) + + (Charge_Energy + Charge_Energy_Sum) ) AS Gross_Energy FROM Channel_Normal_Table NATURAL JOIN Capacity_Sum_Table; """ def mdb_get_data_text(s3db, filename, table): print("Reading %s..." % table) + insert_pattern = re.compile( + r'INSERT INTO "\w+" \([^)]+?\) VALUES \(("[^"]*"|[^")])+?\);\n', + re.IGNORECASE + ) try: mdb_sql = sp.Popen(['mdb-export', '-I', 'postgres', filename, table], bufsize=-1, stdin=None, stdout=sp.PIPE, universal_newlines=True) mdb_output = mdb_sql.stdout.read() while len(mdb_output) > 0: - insert_match = re.match(r'INSERT INTO "\w+" \([^)]+?\) VALUES \(("[^"]*"|[^")])+?\);\n', - mdb_output, re.IGNORECASE) + insert_match = insert_pattern.match(mdb_output) s3db.execute(insert_match.group()) mdb_output = mdb_output[insert_match.end():] s3db.commit() - except: + except BaseException: print("Error while importing %s" % table) print("Remaining mdb-export output:", mdb_output) if insert_match: @@ -384,8 +402,11 @@ def mdb_get_data_numeric(s3db, filename, table): quoted_headers = ['"%s"' % h for h in mdb_headers] joined_headers = ', '.join(quoted_headers) joined_placemarks = ', '.join(['?' for h in mdb_headers]) - insert_stmt = 'INSERT INTO "{0}" ({1}) VALUES ({2});'.format(table, - joined_headers, joined_placemarks) + insert_stmt = 'INSERT INTO "{0}" ({1}) VALUES ({2});'.format( + table, + joined_headers, + joined_placemarks, + ) s3db.executemany(insert_stmt, mdb_csv) s3db.commit() finally: @@ -407,34 +428,37 @@ def convert_arbin_to_sqlite(input_file, output_file): Any data currently in the sqlite file will be erased! """ s3db = sqlite3.connect(output_file) - - + for table in reversed(mdb_tables + mdb_5_23_tables): s3db.execute('DROP TABLE IF EXISTS "%s";' % table) - + for table in mdb_tables: s3db.executescript(mdb_create_scripts[table]) mdb_get_data(s3db, input_file, table) if table in mdb_create_indices: print("Creating indices for %s..." % table) s3db.executescript(mdb_create_indices[table]) - - if (s3db.execute("SELECT Version_Schema_Field FROM Version_Table;").fetchone()[0] == "Results File 5.23"): + + csr = s3db.execute("SELECT Version_Schema_Field FROM Version_Table;") + version_text, = csr.fetchone() + if (version_text == "Results File 5.23"): for table in mdb_5_23_tables: s3db.executescript(mdb_create_scripts[table]) mdb_get_data(input_file, table) if table in mdb_create_indices: s3db.executescript(mdb_create_indices[table]) - + print("Creating helper table for capacity and energy totals...") s3db.executescript(helper_table_script) - + print("Vacuuming database...") s3db.executescript("VACUUM; ANALYZE;") def main(argv=None): - parser = argparse.ArgumentParser(description="Convert Arbin .res files to sqlite3 databases using mdb-export") + parser = argparse.ArgumentParser( + description="Convert Arbin .res files to sqlite3 databases using mdb-export", + ) parser.add_argument('input_file', type=str) # need file name to pass to sp.Popen parser.add_argument('output_file', type=str) # need file name to pass to sqlite3.connect diff --git a/tests/test_BioLogic.py b/tests/test_BioLogic.py index c8bf163..7fe2cb6 100644 --- a/tests/test_BioLogic.py +++ b/tests/test_BioLogic.py @@ -129,7 +129,7 @@ def assert_MPR_matches_MPT(mpr, mpt, comments): assert_array_equal(mpr.get_flag("control changes"), mpt["control changes"]) if "Ns changes" in mpt.dtype.fields: assert_array_equal(mpr.get_flag("Ns changes"), mpt["Ns changes"]) - ## Nothing uses the 0x40 bit of the flags + # Nothing uses the 0x40 bit of the flags assert_array_equal(mpr.get_flag("counter inc."), mpt["counter inc."]) assert_array_almost_equal(mpr.data["time/s"], @@ -146,10 +146,10 @@ def assert_MPR_matches_MPT(mpr, mpt, comments): assert_field_matches("dQ/mA.h", decimal=17) # 64 bit float precision assert_field_matches("P/W", decimal=10) # 32 bit float precision for 1.xxE-5 assert_field_matches("I/mA", decimal=6) # 32 bit float precision - + assert_field_exact("cycle number") assert_field_matches("(Q-Qo)/C", decimal=6) # 32 bit float precision - + try: assert timestamp_from_comments(comments) == mpr.timestamp except AttributeError: diff --git a/tox.ini b/tox.ini index ca5b805..8ccfc09 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,13 @@ [tox] envlist = py27,py35,py37 [testenv] -deps=pytest -commands=pytest +deps = + flake8 + pytest +commands = + flake8 + pytest + +[flake8] +exclude = build,dist,*.egg-info,.cache,.git,.tox,__pycache__ +max-line-length = 100