mirror of
https://github.com/echemdata/galvani.git
synced 2025-12-14 09:15:34 +00:00
Reformatted test scripts with black 23.12.1
This commit is contained in:
@@ -9,7 +9,7 @@ import os
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope="session")
|
||||||
def testdata_dir():
|
def testdata_dir():
|
||||||
"""Path to the testdata directory."""
|
"""Path to the testdata directory."""
|
||||||
return os.path.join(os.path.dirname(__file__), 'testdata')
|
return os.path.join(os.path.dirname(__file__), "testdata")
|
||||||
|
|||||||
@@ -13,8 +13,7 @@ import pytest
|
|||||||
from galvani import res2sqlite
|
from galvani import res2sqlite
|
||||||
|
|
||||||
|
|
||||||
have_mdbtools = (subprocess.call(['which', 'mdb-export'],
|
have_mdbtools = subprocess.call(["which", "mdb-export"], stdout=subprocess.DEVNULL) == 0
|
||||||
stdout=subprocess.DEVNULL) == 0)
|
|
||||||
|
|
||||||
|
|
||||||
def test_res2sqlite_help():
|
def test_res2sqlite_help():
|
||||||
@@ -22,39 +21,47 @@ def test_res2sqlite_help():
|
|||||||
|
|
||||||
This should work even when mdbtools is not installed.
|
This should work even when mdbtools is not installed.
|
||||||
"""
|
"""
|
||||||
help_output = subprocess.check_output(['res2sqlite', '--help'])
|
help_output = subprocess.check_output(["res2sqlite", "--help"])
|
||||||
assert b'Convert Arbin .res files to sqlite3 databases' in help_output
|
assert b"Convert Arbin .res files to sqlite3 databases" in help_output
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(have_mdbtools, reason='This tests the failure when mdbtools is not installed')
|
@pytest.mark.skipif(
|
||||||
|
have_mdbtools, reason="This tests the failure when mdbtools is not installed"
|
||||||
|
)
|
||||||
def test_convert_Arbin_no_mdbtools(testdata_dir, tmpdir):
|
def test_convert_Arbin_no_mdbtools(testdata_dir, tmpdir):
|
||||||
"""Checks that the conversion fails with an appropriate error message."""
|
"""Checks that the conversion fails with an appropriate error message."""
|
||||||
res_file = os.path.join(testdata_dir, 'arbin1.res')
|
res_file = os.path.join(testdata_dir, "arbin1.res")
|
||||||
sqlite_file = os.path.join(str(tmpdir), 'arbin1.s3db')
|
sqlite_file = os.path.join(str(tmpdir), "arbin1.s3db")
|
||||||
with pytest.raises(RuntimeError, match="Could not locate the `mdb-export` executable."):
|
with pytest.raises(
|
||||||
|
RuntimeError, match="Could not locate the `mdb-export` executable."
|
||||||
|
):
|
||||||
res2sqlite.convert_arbin_to_sqlite(res_file, sqlite_file)
|
res2sqlite.convert_arbin_to_sqlite(res_file, sqlite_file)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not have_mdbtools, reason='Reading the Arbin file requires MDBTools')
|
@pytest.mark.skipif(
|
||||||
@pytest.mark.parametrize('basename', ['arbin1', 'UM34_Test005E'])
|
not have_mdbtools, reason="Reading the Arbin file requires MDBTools"
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize("basename", ["arbin1", "UM34_Test005E"])
|
||||||
def test_convert_Arbin_to_sqlite_function(testdata_dir, tmpdir, basename):
|
def test_convert_Arbin_to_sqlite_function(testdata_dir, tmpdir, basename):
|
||||||
"""Convert an Arbin file to SQLite using the functional interface."""
|
"""Convert an Arbin file to SQLite using the functional interface."""
|
||||||
res_file = os.path.join(testdata_dir, basename + '.res')
|
res_file = os.path.join(testdata_dir, basename + ".res")
|
||||||
sqlite_file = os.path.join(str(tmpdir), basename + '.s3db')
|
sqlite_file = os.path.join(str(tmpdir), basename + ".s3db")
|
||||||
res2sqlite.convert_arbin_to_sqlite(res_file, sqlite_file)
|
res2sqlite.convert_arbin_to_sqlite(res_file, sqlite_file)
|
||||||
assert os.path.isfile(sqlite_file)
|
assert os.path.isfile(sqlite_file)
|
||||||
with sqlite3.connect(sqlite_file) as conn:
|
with sqlite3.connect(sqlite_file) as conn:
|
||||||
csr = conn.execute('SELECT * FROM Channel_Normal_Table;')
|
csr = conn.execute("SELECT * FROM Channel_Normal_Table;")
|
||||||
csr.fetchone()
|
csr.fetchone()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not have_mdbtools, reason='Reading the Arbin file requires MDBTools')
|
@pytest.mark.skipif(
|
||||||
|
not have_mdbtools, reason="Reading the Arbin file requires MDBTools"
|
||||||
|
)
|
||||||
def test_convert_cmdline(testdata_dir, tmpdir):
|
def test_convert_cmdline(testdata_dir, tmpdir):
|
||||||
"""Checks that the conversion fails with an appropriate error message."""
|
"""Checks that the conversion fails with an appropriate error message."""
|
||||||
res_file = os.path.join(testdata_dir, 'arbin1.res')
|
res_file = os.path.join(testdata_dir, "arbin1.res")
|
||||||
sqlite_file = os.path.join(str(tmpdir), 'arbin1.s3db')
|
sqlite_file = os.path.join(str(tmpdir), "arbin1.s3db")
|
||||||
subprocess.check_call(['res2sqlite', res_file, sqlite_file])
|
subprocess.check_call(["res2sqlite", res_file, sqlite_file])
|
||||||
assert os.path.isfile(sqlite_file)
|
assert os.path.isfile(sqlite_file)
|
||||||
with sqlite3.connect(sqlite_file) as conn:
|
with sqlite3.connect(sqlite_file) as conn:
|
||||||
csr = conn.execute('SELECT * FROM Channel_Normal_Table;')
|
csr = conn.execute("SELECT * FROM Channel_Normal_Table;")
|
||||||
csr.fetchone()
|
csr.fetchone()
|
||||||
|
|||||||
@@ -17,33 +17,55 @@ from galvani.BioLogic import MPTfileCSV # not exported
|
|||||||
|
|
||||||
|
|
||||||
def test_open_MPT(testdata_dir):
|
def test_open_MPT(testdata_dir):
|
||||||
mpt1, comments = MPTfile(os.path.join(testdata_dir, 'bio_logic1.mpt'))
|
mpt1, comments = MPTfile(os.path.join(testdata_dir, "bio_logic1.mpt"))
|
||||||
assert comments == []
|
assert comments == []
|
||||||
assert mpt1.dtype.names == (
|
assert mpt1.dtype.names == (
|
||||||
"mode", "ox/red", "error", "control changes", "Ns changes",
|
"mode",
|
||||||
"counter inc.", "time/s", "control/V/mA", "Ewe/V", "dQ/mA.h", "P/W",
|
"ox/red",
|
||||||
"I/mA", "(Q-Qo)/mA.h", "x",
|
"error",
|
||||||
|
"control changes",
|
||||||
|
"Ns changes",
|
||||||
|
"counter inc.",
|
||||||
|
"time/s",
|
||||||
|
"control/V/mA",
|
||||||
|
"Ewe/V",
|
||||||
|
"dQ/mA.h",
|
||||||
|
"P/W",
|
||||||
|
"I/mA",
|
||||||
|
"(Q-Qo)/mA.h",
|
||||||
|
"x",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_open_MPT_fails_for_bad_file(testdata_dir):
|
def test_open_MPT_fails_for_bad_file(testdata_dir):
|
||||||
with pytest.raises(ValueError, match='Bad first line'):
|
with pytest.raises(ValueError, match="Bad first line"):
|
||||||
MPTfile(os.path.join(testdata_dir, 'bio_logic1.mpr'))
|
MPTfile(os.path.join(testdata_dir, "bio_logic1.mpr"))
|
||||||
|
|
||||||
|
|
||||||
def test_open_MPT_csv(testdata_dir):
|
def test_open_MPT_csv(testdata_dir):
|
||||||
mpt1, comments = MPTfileCSV(os.path.join(testdata_dir, 'bio_logic1.mpt'))
|
mpt1, comments = MPTfileCSV(os.path.join(testdata_dir, "bio_logic1.mpt"))
|
||||||
assert comments == []
|
assert comments == []
|
||||||
assert mpt1.fieldnames == [
|
assert mpt1.fieldnames == [
|
||||||
"mode", "ox/red", "error", "control changes", "Ns changes",
|
"mode",
|
||||||
"counter inc.", "time/s", "control/V/mA", "Ewe/V", "dq/mA.h", "P/W",
|
"ox/red",
|
||||||
"<I>/mA", "(Q-Qo)/mA.h", "x",
|
"error",
|
||||||
|
"control changes",
|
||||||
|
"Ns changes",
|
||||||
|
"counter inc.",
|
||||||
|
"time/s",
|
||||||
|
"control/V/mA",
|
||||||
|
"Ewe/V",
|
||||||
|
"dq/mA.h",
|
||||||
|
"P/W",
|
||||||
|
"<I>/mA",
|
||||||
|
"(Q-Qo)/mA.h",
|
||||||
|
"x",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_open_MPT_csv_fails_for_bad_file(testdata_dir):
|
def test_open_MPT_csv_fails_for_bad_file(testdata_dir):
|
||||||
with pytest.raises((ValueError, UnicodeDecodeError)):
|
with pytest.raises((ValueError, UnicodeDecodeError)):
|
||||||
MPTfileCSV(os.path.join(testdata_dir, 'bio_logic1.mpr'))
|
MPTfileCSV(os.path.join(testdata_dir, "bio_logic1.mpr"))
|
||||||
|
|
||||||
|
|
||||||
def test_colID_map_uniqueness():
|
def test_colID_map_uniqueness():
|
||||||
@@ -59,13 +81,16 @@ def test_colID_map_uniqueness():
|
|||||||
assert not set(field_names).intersection(flag_names)
|
assert not set(field_names).intersection(flag_names)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('colIDs, expected', [
|
@pytest.mark.parametrize(
|
||||||
([1, 2, 3], [('flags', 'u1')]),
|
"colIDs, expected",
|
||||||
([4, 6], [('time/s', '<f8'), ('Ewe/V', '<f4')]),
|
[
|
||||||
([1, 4, 21], [('flags', 'u1'), ('time/s', '<f8')]),
|
([1, 2, 3], [("flags", "u1")]),
|
||||||
([4, 6, 4], [('time/s', '<f8'), ('Ewe/V', '<f4'), ('time/s 2', '<f8')]),
|
([4, 6], [("time/s", "<f8"), ("Ewe/V", "<f4")]),
|
||||||
([4, 9999], NotImplementedError),
|
([1, 4, 21], [("flags", "u1"), ("time/s", "<f8")]),
|
||||||
])
|
([4, 6, 4], [("time/s", "<f8"), ("Ewe/V", "<f4"), ("time/s 2", "<f8")]),
|
||||||
|
([4, 9999], NotImplementedError),
|
||||||
|
],
|
||||||
|
)
|
||||||
def test_colID_to_dtype(colIDs, expected):
|
def test_colID_to_dtype(colIDs, expected):
|
||||||
"""Test converting column ID to numpy dtype."""
|
"""Test converting column ID to numpy dtype."""
|
||||||
if isinstance(expected, type) and issubclass(expected, Exception):
|
if isinstance(expected, type) and issubclass(expected, Exception):
|
||||||
@@ -77,14 +102,17 @@ def test_colID_to_dtype(colIDs, expected):
|
|||||||
assert dtype == expected_dtype
|
assert dtype == expected_dtype
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('data, expected', [
|
@pytest.mark.parametrize(
|
||||||
('02/23/17', date(2017, 2, 23)),
|
"data, expected",
|
||||||
('10-03-05', date(2005, 10, 3)),
|
[
|
||||||
('11.12.20', date(2020, 11, 12)),
|
("02/23/17", date(2017, 2, 23)),
|
||||||
(b'01/02/03', date(2003, 1, 2)),
|
("10-03-05", date(2005, 10, 3)),
|
||||||
('13.08.07', ValueError),
|
("11.12.20", date(2020, 11, 12)),
|
||||||
('03-04/05', ValueError),
|
(b"01/02/03", date(2003, 1, 2)),
|
||||||
])
|
("13.08.07", ValueError),
|
||||||
|
("03-04/05", ValueError),
|
||||||
|
],
|
||||||
|
)
|
||||||
def test_parse_BioLogic_date(data, expected):
|
def test_parse_BioLogic_date(data, expected):
|
||||||
"""Test the parse_BioLogic_date function."""
|
"""Test the parse_BioLogic_date function."""
|
||||||
if isinstance(expected, type) and issubclass(expected, Exception):
|
if isinstance(expected, type) and issubclass(expected, Exception):
|
||||||
@@ -95,51 +123,54 @@ def test_parse_BioLogic_date(data, expected):
|
|||||||
assert result == expected
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('filename, startdate, enddate', [
|
@pytest.mark.parametrize(
|
||||||
('bio_logic1.mpr', '2011-10-29', '2011-10-31'),
|
"filename, startdate, enddate",
|
||||||
('bio_logic2.mpr', '2012-09-27', '2012-09-27'),
|
[
|
||||||
('bio_logic3.mpr', '2013-03-27', '2013-03-27'),
|
("bio_logic1.mpr", "2011-10-29", "2011-10-31"),
|
||||||
('bio_logic4.mpr', '2011-11-01', '2011-11-02'),
|
("bio_logic2.mpr", "2012-09-27", "2012-09-27"),
|
||||||
('bio_logic5.mpr', '2013-01-28', '2013-01-28'),
|
("bio_logic3.mpr", "2013-03-27", "2013-03-27"),
|
||||||
# bio_logic6.mpr has no end date because it does not have a VMP LOG module
|
("bio_logic4.mpr", "2011-11-01", "2011-11-02"),
|
||||||
('bio_logic6.mpr', '2012-09-11', None),
|
("bio_logic5.mpr", "2013-01-28", "2013-01-28"),
|
||||||
# C019P-0ppb-A_C01.mpr stores the date in a different format
|
# bio_logic6.mpr has no end date because it does not have a VMP LOG module
|
||||||
('C019P-0ppb-A_C01.mpr', '2019-03-14', '2019-03-14'),
|
("bio_logic6.mpr", "2012-09-11", None),
|
||||||
('Rapp_Error.mpr', '2010-12-02', '2010-12-02'),
|
# C019P-0ppb-A_C01.mpr stores the date in a different format
|
||||||
('Ewe_Error.mpr', '2021-11-18', '2021-11-19'),
|
("C019P-0ppb-A_C01.mpr", "2019-03-14", "2019-03-14"),
|
||||||
])
|
("Rapp_Error.mpr", "2010-12-02", "2010-12-02"),
|
||||||
|
("Ewe_Error.mpr", "2021-11-18", "2021-11-19"),
|
||||||
|
],
|
||||||
|
)
|
||||||
def test_MPR_dates(testdata_dir, filename, startdate, enddate):
|
def test_MPR_dates(testdata_dir, filename, startdate, enddate):
|
||||||
"""Check that the start and end dates in .mpr files are read correctly."""
|
"""Check that the start and end dates in .mpr files are read correctly."""
|
||||||
mpr = MPRfile(os.path.join(testdata_dir, filename))
|
mpr = MPRfile(os.path.join(testdata_dir, filename))
|
||||||
assert mpr.startdate.strftime('%Y-%m-%d') == startdate
|
assert mpr.startdate.strftime("%Y-%m-%d") == startdate
|
||||||
if enddate:
|
if enddate:
|
||||||
assert mpr.enddate.strftime('%Y-%m-%d') == enddate
|
assert mpr.enddate.strftime("%Y-%m-%d") == enddate
|
||||||
else:
|
else:
|
||||||
assert not hasattr(mpr, 'enddate')
|
assert not hasattr(mpr, "enddate")
|
||||||
|
|
||||||
|
|
||||||
def test_open_MPR_fails_for_bad_file(testdata_dir):
|
def test_open_MPR_fails_for_bad_file(testdata_dir):
|
||||||
with pytest.raises(ValueError, match='Invalid magic for .mpr file'):
|
with pytest.raises(ValueError, match="Invalid magic for .mpr file"):
|
||||||
MPRfile(os.path.join(testdata_dir, 'arbin1.res'))
|
MPRfile(os.path.join(testdata_dir, "arbin1.res"))
|
||||||
|
|
||||||
|
|
||||||
def timestamp_from_comments(comments):
|
def timestamp_from_comments(comments):
|
||||||
for line in comments:
|
for line in comments:
|
||||||
time_match = re.match(b'Acquisition started on : ([0-9/]+ [0-9:]+)', line)
|
time_match = re.match(b"Acquisition started on : ([0-9/]+ [0-9:]+)", line)
|
||||||
if time_match:
|
if time_match:
|
||||||
timestamp = datetime.strptime(time_match.group(1).decode('ascii'),
|
timestamp = datetime.strptime(
|
||||||
'%m/%d/%Y %H:%M:%S')
|
time_match.group(1).decode("ascii"), "%m/%d/%Y %H:%M:%S"
|
||||||
|
)
|
||||||
return timestamp
|
return timestamp
|
||||||
raise AttributeError("No timestamp in comments")
|
raise AttributeError("No timestamp in comments")
|
||||||
|
|
||||||
|
|
||||||
def assert_MPR_matches_MPT(mpr, mpt, comments):
|
def assert_MPR_matches_MPT(mpr, mpt, comments):
|
||||||
|
|
||||||
def assert_field_matches(fieldname, decimal):
|
def assert_field_matches(fieldname, decimal):
|
||||||
if fieldname in mpr.dtype.fields:
|
if fieldname in mpr.dtype.fields:
|
||||||
assert_array_almost_equal(mpr.data[fieldname],
|
assert_array_almost_equal(
|
||||||
mpt[fieldname],
|
mpr.data[fieldname], mpt[fieldname], decimal=decimal
|
||||||
decimal=decimal)
|
)
|
||||||
|
|
||||||
def assert_field_exact(fieldname):
|
def assert_field_exact(fieldname):
|
||||||
if fieldname in mpr.dtype.fields:
|
if fieldname in mpr.dtype.fields:
|
||||||
@@ -154,16 +185,16 @@ def assert_MPR_matches_MPT(mpr, mpt, comments):
|
|||||||
# Nothing uses the 0x40 bit of the flags
|
# Nothing uses the 0x40 bit of the flags
|
||||||
assert_array_equal(mpr.get_flag("counter inc."), mpt["counter inc."])
|
assert_array_equal(mpr.get_flag("counter inc."), mpt["counter inc."])
|
||||||
|
|
||||||
assert_array_almost_equal(mpr.data["time/s"],
|
assert_array_almost_equal(
|
||||||
mpt["time/s"],
|
mpr.data["time/s"], mpt["time/s"], decimal=2
|
||||||
decimal=2) # 2 digits in CSV
|
) # 2 digits in CSV
|
||||||
|
|
||||||
assert_field_matches("control/V/mA", decimal=6)
|
assert_field_matches("control/V/mA", decimal=6)
|
||||||
assert_field_matches("control/V", decimal=6)
|
assert_field_matches("control/V", decimal=6)
|
||||||
|
|
||||||
assert_array_almost_equal(mpr.data["Ewe/V"],
|
assert_array_almost_equal(
|
||||||
mpt["Ewe/V"],
|
mpr.data["Ewe/V"], mpt["Ewe/V"], decimal=6
|
||||||
decimal=6) # 32 bit float precision
|
) # 32 bit float precision
|
||||||
|
|
||||||
assert_field_matches("dQ/mA.h", decimal=16) # 64 bit float precision
|
assert_field_matches("dQ/mA.h", decimal=16) # 64 bit float precision
|
||||||
assert_field_matches("P/W", decimal=10) # 32 bit float precision for 1.xxE-5
|
assert_field_matches("P/W", decimal=10) # 32 bit float precision for 1.xxE-5
|
||||||
@@ -178,39 +209,45 @@ def assert_MPR_matches_MPT(mpr, mpt, comments):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('basename', [
|
@pytest.mark.parametrize(
|
||||||
'bio_logic1',
|
"basename",
|
||||||
'bio_logic2',
|
[
|
||||||
# No bio_logic3.mpt file
|
"bio_logic1",
|
||||||
'bio_logic4',
|
"bio_logic2",
|
||||||
# bio_logic5 and bio_logic6 are special cases
|
# No bio_logic3.mpt file
|
||||||
'CV_C01',
|
"bio_logic4",
|
||||||
'121_CA_455nm_6V_30min_C01',
|
# bio_logic5 and bio_logic6 are special cases
|
||||||
'020-formation_CB5',
|
"CV_C01",
|
||||||
])
|
"121_CA_455nm_6V_30min_C01",
|
||||||
|
"020-formation_CB5",
|
||||||
|
],
|
||||||
|
)
|
||||||
def test_MPR_matches_MPT(testdata_dir, basename):
|
def test_MPR_matches_MPT(testdata_dir, basename):
|
||||||
"""Check the MPR parser against the MPT parser.
|
"""Check the MPR parser against the MPT parser.
|
||||||
|
|
||||||
Load a binary .mpr file and a text .mpt file which should contain
|
Load a binary .mpr file and a text .mpt file which should contain
|
||||||
exactly the same data. Check that the loaded data actually match.
|
exactly the same data. Check that the loaded data actually match.
|
||||||
"""
|
"""
|
||||||
binpath = os.path.join(testdata_dir, basename + '.mpr')
|
binpath = os.path.join(testdata_dir, basename + ".mpr")
|
||||||
txtpath = os.path.join(testdata_dir, basename + '.mpt')
|
txtpath = os.path.join(testdata_dir, basename + ".mpt")
|
||||||
mpr = MPRfile(binpath)
|
mpr = MPRfile(binpath)
|
||||||
mpt, comments = MPTfile(txtpath, encoding='latin1')
|
mpt, comments = MPTfile(txtpath, encoding="latin1")
|
||||||
assert_MPR_matches_MPT(mpr, mpt, comments)
|
assert_MPR_matches_MPT(mpr, mpt, comments)
|
||||||
|
|
||||||
|
|
||||||
def test_MPR5_matches_MPT5(testdata_dir):
|
def test_MPR5_matches_MPT5(testdata_dir):
|
||||||
mpr = MPRfile(os.path.join(testdata_dir, 'bio_logic5.mpr'))
|
mpr = MPRfile(os.path.join(testdata_dir, "bio_logic5.mpr"))
|
||||||
mpt, comments = MPTfile((re.sub(b'\tXXX\t', b'\t0\t', line) for line in
|
mpt, comments = MPTfile(
|
||||||
open(os.path.join(testdata_dir, 'bio_logic5.mpt'),
|
(
|
||||||
mode='rb')))
|
re.sub(b"\tXXX\t", b"\t0\t", line)
|
||||||
|
for line in open(os.path.join(testdata_dir, "bio_logic5.mpt"), mode="rb")
|
||||||
|
)
|
||||||
|
)
|
||||||
assert_MPR_matches_MPT(mpr, mpt, comments)
|
assert_MPR_matches_MPT(mpr, mpt, comments)
|
||||||
|
|
||||||
|
|
||||||
def test_MPR6_matches_MPT6(testdata_dir):
|
def test_MPR6_matches_MPT6(testdata_dir):
|
||||||
mpr = MPRfile(os.path.join(testdata_dir, 'bio_logic6.mpr'))
|
mpr = MPRfile(os.path.join(testdata_dir, "bio_logic6.mpr"))
|
||||||
mpt, comments = MPTfile(os.path.join(testdata_dir, 'bio_logic6.mpt'))
|
mpt, comments = MPTfile(os.path.join(testdata_dir, "bio_logic6.mpt"))
|
||||||
mpr.data = mpr.data[:958] # .mpt file is incomplete
|
mpr.data = mpr.data[:958] # .mpt file is incomplete
|
||||||
assert_MPR_matches_MPT(mpr, mpt, comments)
|
assert_MPR_matches_MPT(mpr, mpt, comments)
|
||||||
|
|||||||
Reference in New Issue
Block a user