[chronojump/importer-encoder] chronojump-importer: refactors a huge function into a new class (ImportSession) and methods.
- From: Carles Pina i Estany <carlespina src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [chronojump/importer-encoder] chronojump-importer: refactors a huge function into a new class (ImportSession) and methods.
- Date: Thu, 6 Oct 2016 17:39:08 +0000 (UTC)
commit 9986197c65b97d895d1f0e206005a20e544b49b5
Author: Carles Pina i Estany <carles pina cat>
Date: Thu Oct 6 19:38:21 2016 +0200
chronojump-importer: refactors a huge function into a new class (ImportSession) and methods.
src/chronojump-importer/chronojump_importer.py | 477 +++++++++++---------
.../chronojump_importer_test.py | 3 +-
2 files changed, 257 insertions(+), 223 deletions(-)
---
diff --git a/src/chronojump-importer/chronojump_importer.py b/src/chronojump-importer/chronojump_importer.py
index 7d03574..d6f94ce 100755
--- a/src/chronojump-importer/chronojump_importer.py
+++ b/src/chronojump-importer/chronojump_importer.py
@@ -366,229 +366,261 @@ class Database:
self._cursor.execute(sql, where_values)
-def import_database(source_path, destination_path, source_session):
- """ Imports the session source_session from source_db into destination_db """
-
- logging.debug("source path:" + source_path)
- logging.debug("destination path:" + destination_path)
-
- source_db = Database(source_path, read_only=True)
- destination_db = Database(destination_path, read_only=False)
-
- # Imports the session
- session = source_db.read(table_name="Session",
- where_condition="Session.uniqueID={}".format(source_session))
-
- number_of_matching_sessions = len(session)
-
- if number_of_matching_sessions == 0:
- print("Trying to import {session} from {source_file} and it doesn't exist. Cancelling...".format(
- session=source_session,
- source_file=source_path))
- sys.exit(1)
- elif number_of_matching_sessions > 1:
- print("Found {number_of_sessions} in {source_file} which is not possible. Cancelling...".format(
- number_of_sessions=number_of_matching_sessions,
- source_file=source_path))
- sys.exit(1)
-
- destination_db.write(table=session, matches_columns=None,
- avoids_duplicate_column="name")
-
- new_session_id = session[0].get('new_uniqueID')
-
- # Imports JumpType table
- jump_types = source_db.read(table_name="JumpType",
- where_condition="Session.uniqueID={}".format(source_session),
- join_clause="LEFT JOIN Jump ON JumpType.name=Jump.type LEFT JOIN Session ON
Jump.sessionID=Session.uniqueID",
- group_by_clause="JumpType.uniqueID")
-
- destination_db.write(table=jump_types,
- matches_columns=destination_db.column_names("JumpType", ["uniqueID"]),
- avoids_duplicate_column="name")
-
- # Imports JumpRjType table
- jump_rj_types = source_db.read(table_name="JumpRjType",
- where_condition="Session.uniqueID={}".format(source_session),
- join_clause="LEFT JOIN JumpRj ON JumpRjType.name=JumpRj.type LEFT JOIN
Session on JumpRj.sessionID=Session.uniqueID",
- group_by_clause="JumpRjType.uniqueID")
-
- destination_db.write(table=jump_rj_types,
- matches_columns=destination_db.column_names("JumpRjType", ["uniqueID"]),
- avoids_duplicate_column="name")
-
- # Imports RunTypes table
- run_types = source_db.read(table_name="RunType",
- where_condition="Session.uniqueID={}".format(source_session),
- join_clause="LEFT JOIN Run ON RunType.name=Run.type LEFT JOIN Session ON
Run.sessionID=Session.uniqueID",
- group_by_clause="RunType.uniqueID")
-
- destination_db.write(table=run_types,
- matches_columns=destination_db.column_names("RunType", ["uniqueID"]),
- avoids_duplicate_column="name")
-
- # Imports RunIntervalTypes table
- run_interval_types = source_db.read(table_name="RunIntervalType",
- where_condition="Session.uniqueID={}".format(source_session),
- join_clause="LEFT JOIN RunInterval ON
RunIntervalType.name=RunInterval.type LEFT JOIN Session on RunInterval.sessionID=Session.uniqueID",
- group_by_clause="RunIntervalType.uniqueID")
-
- destination_db.write(table=run_interval_types,
- matches_columns=destination_db.column_names("RunIntervalType", ["uniqueID"]),
- avoids_duplicate_column="name")
-
- # Imports PulseTypes table
- pulse_types = source_db.read(table_name="PulseType",
- where_condition="Session.uniqueID={}".format(source_session),
- join_clause="LEFT JOIN Pulse ON PulseType.name=Pulse.type LEFT JOIN Session
on Pulse.sessionID=Session.uniqueID",
- group_by_clause="PulseType.uniqueID")
-
- destination_db.write(table=pulse_types,
- matches_columns=destination_db.column_names("PulseType", ["uniqueID"]),
- avoids_duplicate_column="name")
-
- # Imports Persons77 used by JumpRj table
- persons77_jump_rj = source_db.read(table_name="Person77",
- where_condition="JumpRj.sessionID={}".format(source_session),
- join_clause="LEFT JOIN JumpRj ON Person77.uniqueID=JumpRj.personID",
+class ImportSession:
+ def __init__(self, source_path, destination_path):
+ """ Creates the object to import the session source_session from source_db into destination_db. """
+
+ logging.debug("source path:" + source_path)
+ logging.debug("destination path:" + destination_path)
+
+ self.source_db = Database(source_path, read_only=True)
+ self.destination_db = Database(destination_path, read_only=False)
+
+ self.source_session = None
+ self.new_session_id = None
+
+ self.persons77 = None
+
+ def import_as_new_session(self, source_session):
+ self.source_session = source_session
+ self.new_session_id = self._import_session()
+
+ self.persons77 = self._import_persons77()
+
+ self._import_person_session77()
+
+ self._import_jumps()
+ self._import_runs()
+ self._import_pulse()
+ self._import_encoder()
+
+ def _import_session(self):
+ """
+ Imports the Session information saved in self._source_session (only table Session).
+ Returns the new session ID.
+ """
+
+ session = self.source_db.read(table_name="Session",
+ where_condition="Session.uniqueID={}".format(self.source_session))
+
+ number_of_matching_sessions = len(session)
+
+ if number_of_matching_sessions == 0:
+ print("Trying to import {session} from {source_file} and it doesn't exist. Cancelling...".format(
+ session=self.source_session,
+ source_file=self.source_path))
+ sys.exit(1)
+ elif number_of_matching_sessions > 1:
+ print("Found {number_of_sessions} in {source_file} which is not possible. Cancelling...".format(
+ number_of_sessions=number_of_matching_sessions,
+ source_file=self.source_path))
+ sys.exit(1)
+
+ self.destination_db.write(table=session, matches_columns=None,
+ avoids_duplicate_column="name")
+
+ return session[0].get('new_uniqueID')
+
+ def _import_persons77(self):
+ # Imports Persons77 used by JumpRj table
+ persons77_jump_rj = self.source_db.read(table_name="Person77",
+ where_condition="JumpRj.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN JumpRj ON
Person77.uniqueID=JumpRj.personID",
+ group_by_clause="Person77.uniqueID")
+
+ # Imports Person77 used by Jump table
+ persons77_jump = self.source_db.read(table_name="Person77",
+ where_condition="Jump.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Jump ON Person77.uniqueID=Jump.personID",
+ group_by_clause="Person77.uniqueID")
+
+ # Imports Person77 used by Run table
+ persons77_run = self.source_db.read(table_name="Person77",
+ where_condition="Run.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Run ON Person77.uniqueID=Run.personID",
group_by_clause="Person77.uniqueID")
- # Imports Person77 used by Jump table
- persons77_jump = source_db.read(table_name="Person77",
- where_condition="Jump.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Jump ON Person77.uniqueID=Jump.personID",
- group_by_clause="Person77.uniqueID")
-
- # Imports Person77 used by Run table
- persons77_run = source_db.read(table_name="Person77",
- where_condition="Run.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Run ON Person77.uniqueID=Run.personID",
- group_by_clause="Person77.uniqueID")
-
- # Imports Person77 used by RunInterval table
- persons77_run_interval = source_db.read(table_name="Person77",
-
where_condition="RunInterval.sessionID={}".format(source_session),
- join_clause="LEFT JOIN RunInterval ON
Person77.uniqueID=RunInterval.personID",
- group_by_clause="Person77.uniqueID")
-
- # Imports Person77 used by Pulse table
- persons77_pulse = source_db.read(table_name="Person77",
- where_condition="Pulse.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Pulse ON Person77.uniqueID=Pulse.personID",
- group_by_clause="Pulse.uniqueID")
-
- # Imports Person77 used by Encoder
- persons77_encoder = source_db.read(table_name="Person77",
- where_condition="Encoder.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Encoder ON Person77.uniqueID=Encoder.personID",
- group_by_clause="Encoder.uniqueID")
-
- # Imports Person77 used by Encoder1RM
- persons77_encoder_1rm = source_db.read(table_name="Person77",
- where_condition="Encoder1RM.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Encoder1RM ON
Person77.uniqueID=Encoder1RM.personID",
- group_by_clause="Encoder1RM.uniqueID")
-
- persons77 = Table("person77")
- persons77.concatenate_table(persons77_jump)
- persons77.concatenate_table(persons77_jump_rj)
- persons77.concatenate_table(persons77_run)
- persons77.concatenate_table(persons77_run_interval)
- persons77.concatenate_table(persons77_pulse)
- persons77.concatenate_table(persons77_encoder)
- persons77.concatenate_table(persons77_encoder_1rm)
- persons77.remove_duplicates()
-
- destination_db.write(table=persons77,
- matches_columns=["name"])
-
- # Imports JumpRj table (with the new Person77's uniqueIDs)
- jump_rj = source_db.read(table_name="JumpRj",
- where_condition="JumpRj.sessionID={}".format(source_session))
-
- jump_rj.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- jump_rj.update_session_ids(new_session_id)
- jump_rj.update_ids("type", persons77, "old_name", "new_name")
-
- destination_db.write(table=jump_rj, matches_columns=None)
-
- # Imports Jump table (with the new Person77's uniqueIDs)
- jump = source_db.read(table_name="Jump",
- where_condition="Jump.sessionID={}".format(source_session))
-
- jump.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- jump.update_session_ids(new_session_id)
- jump.update_ids("type", jump_types, "old_name", "new_name")
-
- destination_db.write(table=jump, matches_columns=None)
-
- # Imports Run table (with the new Person77's uniqueIDs)
- run = source_db.read(table_name="Run",
- where_condition="Run.sessionID={}".format(source_session))
- run.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- run.update_session_ids(new_session_id)
- run.update_ids("type", run_types, "old_name", "new_name")
-
- # Imports RunInterval table (with the new Person77's uniqueIDs)
- run_interval = source_db.read(table_name="RunInterval",
- where_condition="RunInterval.sessionID={}".format(source_session))
- run_interval.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- run_interval.update_session_ids(new_session_id)
- run_interval.update_ids("type", run_interval_types, "old_name", "new_name")
-
- # Imports PersonSession77
- person_session_77 = source_db.read(table_name="PersonSession77",
- where_condition="PersonSession77.sessionID={}".format(source_session))
- person_session_77.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- person_session_77.update_session_ids(new_session_id)
- destination_db.write(table=person_session_77, matches_columns=None)
-
- # Imports EncoderExercise
- encoder_exercise = source_db.read(table_name="EncoderExercise",
- where_condition="Encoder.sessionID={}".format(source_session),
- join_clause="LEFT JOIN Encoder ON
Encoder.exerciseID=EncoderExercise.uniqueID",
- group_by_clause="EncoderExercise.uniqueID")
- destination_db.write(table=encoder_exercise,
- matches_columns=destination_db.column_names("EncoderExercise", ["uniqueID"]))
-
- # Imports Encoder1RM
- encoder_1rm = source_db.read(table_name="Encoder1RM",
- where_condition="Encoder1RM.sessionID={}".format(source_session))
- encoder_1rm.update_session_ids(new_session_id)
- encoder_1rm.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- destination_db.write(table=encoder_1rm,
- matches_columns=None)
-
- # Imports Encoder
- encoder = source_db.read(table_name="Encoder",
- where_condition="Encoder.sessionID={}".format(source_session))
- encoder.update_ids("personID", persons77, "uniqueID", "new_uniqueID")
- encoder.update_ids("exerciseID", encoder_1rm, "old_exerciseID", "new_exerciseID")
- encoder.update_session_ids(new_session_id)
- destination_db.write(table=encoder,
- matches_columns=None)
-
- # Imports EncoderSignalCurve
- encoder_signal_curve_signals = source_db.read(table_name="EncoderSignalCurve",
- where_condition="Encoder.signalOrCurve='signal' AND
Encoder.sessionID={}".format(
- source_session),
- join_clause="LEFT JOIN Encoder ON
Encoder.uniqueID=EncoderSignalCurve.SignalID")
-
- encoder_signal_curve_curves = source_db.read(table_name="EncoderSignalCurve",
- where_condition="Encoder.signalOrCurve='curve' AND
Encoder.sessionID={}".format(
- source_session),
- join_clause="LEFT JOIN Encoder ON
Encoder.uniqueID=EncoderSignalCurve.curveID")
-
- encoder_signal_curve = Table("encoderSignalCurve")
- encoder_signal_curve.concatenate_table(encoder_signal_curve_signals)
- encoder_signal_curve.concatenate_table(encoder_signal_curve_curves)
-
- encoder_signal_curve.update_ids("signalID", encoder, "old_signalID", "new_signalID")
- encoder_signal_curve.update_ids("curveID", encoder, "old_curveID", "new_curveID")
-
- destination_db.write(table=encoder_signal_curve,
- avoids_duplicate_column=None,
- matches_columns=None)
+ # Imports Person77 used by RunInterval table
+ persons77_run_interval = self.source_db.read(table_name="Person77",
+
where_condition="RunInterval.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN RunInterval ON
Person77.uniqueID=RunInterval.personID",
+ group_by_clause="Person77.uniqueID")
+
+ # Imports Person77 used by Pulse table
+ persons77_pulse = self.source_db.read(table_name="Person77",
+ where_condition="Pulse.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Pulse ON Person77.uniqueID=Pulse.personID",
+ group_by_clause="Pulse.uniqueID")
+
+ # Imports Person77 used by Encoder
+ persons77_encoder = self.source_db.read(table_name="Person77",
+
where_condition="Encoder.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Encoder ON
Person77.uniqueID=Encoder.personID",
+ group_by_clause="Encoder.uniqueID")
+
+ # Imports Person77 used by Encoder1RM
+ persons77_encoder_1rm = self.source_db.read(table_name="Person77",
+
where_condition="Encoder1RM.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Encoder1RM ON
Person77.uniqueID=Encoder1RM.personID",
+ group_by_clause="Encoder1RM.uniqueID")
+
+ persons77 = Table("person77")
+ persons77.concatenate_table(persons77_jump)
+ persons77.concatenate_table(persons77_jump_rj)
+ persons77.concatenate_table(persons77_run)
+ persons77.concatenate_table(persons77_run_interval)
+ persons77.concatenate_table(persons77_pulse)
+ persons77.concatenate_table(persons77_encoder)
+ persons77.concatenate_table(persons77_encoder_1rm)
+ persons77.remove_duplicates()
+
+ self.destination_db.write(table=persons77,
+ matches_columns=["name"])
+
+ return persons77
+
+ def _import_jumps(self):
+ # Imports JumpType table
+ jump_types = self.source_db.read(table_name="JumpType",
+ where_condition="Session.uniqueID={}".format(self.source_session),
+ join_clause="LEFT JOIN Jump ON JumpType.name=Jump.type LEFT JOIN Session
ON Jump.sessionID=Session.uniqueID",
+ group_by_clause="JumpType.uniqueID")
+
+ self.destination_db.write(table=jump_types,
+ matches_columns=self.destination_db.column_names("JumpType", ["uniqueID"]),
+ avoids_duplicate_column="name")
+
+ # Imports JumpRjType table
+ jump_rj_types = self.source_db.read(table_name="JumpRjType",
+ where_condition="Session.uniqueID={}".format(self.source_session),
+ join_clause="LEFT JOIN JumpRj ON JumpRjType.name=JumpRj.type LEFT
JOIN Session on JumpRj.sessionID=Session.uniqueID",
+ group_by_clause="JumpRjType.uniqueID")
+
+ self.destination_db.write(table=jump_rj_types,
+ matches_columns=self.destination_db.column_names("JumpRjType", ["uniqueID"]),
+ avoids_duplicate_column="name")
+
+ # Imports JumpRj table (with the new Person77's uniqueIDs)
+ jump_rj = self.source_db.read(table_name="JumpRj",
+ where_condition="JumpRj.sessionID={}".format(self.source_session))
+
+ jump_rj.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ jump_rj.update_session_ids(self.new_session_id)
+ jump_rj.update_ids("type", self.persons77, "old_name", "new_name")
+
+ self.destination_db.write(table=jump_rj, matches_columns=None)
+
+ # Imports Jump table (with the new Person77's uniqueIDs)
+ jump = self.source_db.read(table_name="Jump",
+ where_condition="Jump.sessionID={}".format(self.source_session))
+
+ jump.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ jump.update_session_ids(self.new_session_id)
+ jump.update_ids("type", jump_types, "old_name", "new_name")
+
+ self.destination_db.write(table=jump, matches_columns=None)
+
+ def _import_runs(self):
+ # Imports RunTypes table
+ run_types = self.source_db.read(table_name="RunType",
+ where_condition="Session.uniqueID={}".format(self.source_session),
+ join_clause="LEFT JOIN Run ON RunType.name=Run.type LEFT JOIN Session ON
Run.sessionID=Session.uniqueID",
+ group_by_clause="RunType.uniqueID")
+
+ self.destination_db.write(table=run_types,
+ matches_columns=self.destination_db.column_names("RunType", ["uniqueID"]),
+ avoids_duplicate_column="name")
+
+ # Imports RunIntervalTypes table
+ run_interval_types = self.source_db.read(table_name="RunIntervalType",
+
where_condition="Session.uniqueID={}".format(self.source_session),
+ join_clause="LEFT JOIN RunInterval ON
RunIntervalType.name=RunInterval.type LEFT JOIN Session on RunInterval.sessionID=Session.uniqueID",
+ group_by_clause="RunIntervalType.uniqueID")
+
+ self.destination_db.write(table=run_interval_types,
+ matches_columns=self.destination_db.column_names("RunIntervalType",
["uniqueID"]),
+ avoids_duplicate_column="name")
+
+ # Imports Run table (with the new Person77's uniqueIDs)
+ run = self.source_db.read(table_name="Run",
+ where_condition="Run.sessionID={}".format(self.source_session))
+ run.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ run.update_session_ids(self.new_session_id)
+ run.update_ids("type", run_types, "old_name", "new_name")
+
+ # Imports RunInterval table (with the new Person77's uniqueIDs)
+ run_interval = self.source_db.read(table_name="RunInterval",
+ where_condition="RunInterval.sessionID={}".format(self.source_session))
+ run_interval.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ run_interval.update_session_ids(self.new_session_id)
+ run_interval.update_ids("type", run_interval_types, "old_name", "new_name")
+
+ def _import_pulse(self):
+ # Imports PulseTypes table
+ pulse_types = self.source_db.read(table_name="PulseType",
+ where_condition="Session.uniqueID={}".format(self.source_session),
+ join_clause="LEFT JOIN Pulse ON PulseType.name=Pulse.type LEFT JOIN
Session on Pulse.sessionID=Session.uniqueID",
+ group_by_clause="PulseType.uniqueID")
+
+ self.destination_db.write(table=pulse_types,
+ matches_columns=self.destination_db.column_names("PulseType", ["uniqueID"]),
+ avoids_duplicate_column="name")
+
+ def _import_person_session77(self):
+ # Imports PersonSession77
+ person_session_77 = self.source_db.read(table_name="PersonSession77",
+
where_condition="PersonSession77.sessionID={}".format(self.source_session))
+ person_session_77.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ person_session_77.update_session_ids(self.new_session_id)
+ self.destination_db.write(table=person_session_77, matches_columns=None)
+
+ def _import_encoder(self):
+ # Imports EncoderExercise
+ encoder_exercise = self.source_db.read(table_name="EncoderExercise",
+ where_condition="Encoder.sessionID={}".format(self.source_session),
+ join_clause="LEFT JOIN Encoder ON
Encoder.exerciseID=EncoderExercise.uniqueID",
+ group_by_clause="EncoderExercise.uniqueID")
+ self.destination_db.write(table=encoder_exercise,
+ matches_columns=self.destination_db.column_names("EncoderExercise",
["uniqueID"]))
+
+ # Imports Encoder1RM
+ encoder_1rm = self.source_db.read(table_name="Encoder1RM",
+ where_condition="Encoder1RM.sessionID={}".format(self.source_session))
+ encoder_1rm.update_session_ids(self.new_session_id)
+ encoder_1rm.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ self.destination_db.write(table=encoder_1rm,
+ matches_columns=None)
+
+ # Imports Encoder
+ encoder = self.source_db.read(table_name="Encoder",
+ where_condition="Encoder.sessionID={}".format(self.source_session))
+ encoder.update_ids("personID", self.persons77, "uniqueID", "new_uniqueID")
+ encoder.update_ids("exerciseID", encoder_1rm, "old_exerciseID", "new_exerciseID")
+ encoder.update_session_ids(self.new_session_id)
+ self.destination_db.write(table=encoder,
+ matches_columns=None)
+
+ # Imports EncoderSignalCurve
+ encoder_signal_curve_signals = self.source_db.read(table_name="EncoderSignalCurve",
+ where_condition="Encoder.signalOrCurve='signal' AND
Encoder.sessionID={}".format(
+ self.source_session),
+ join_clause="LEFT JOIN Encoder ON
Encoder.uniqueID=EncoderSignalCurve.SignalID")
+
+ encoder_signal_curve_curves = self.source_db.read(table_name="EncoderSignalCurve",
+ where_condition="Encoder.signalOrCurve='curve' AND
Encoder.sessionID={}".format(
+ self.source_session),
+ join_clause="LEFT JOIN Encoder ON
Encoder.uniqueID=EncoderSignalCurve.curveID")
+
+ encoder_signal_curve = Table("encoderSignalCurve")
+ encoder_signal_curve.concatenate_table(encoder_signal_curve_signals)
+ encoder_signal_curve.concatenate_table(encoder_signal_curve_curves)
+
+ encoder_signal_curve.update_ids("signalID", encoder, "old_signalID", "new_signalID")
+ encoder_signal_curve.update_ids("curveID", encoder, "old_curveID", "new_curveID")
+
+ self.destination_db.write(table=encoder_signal_curve,
+ avoids_duplicate_column=None,
+ matches_columns=None)
def json_information(database_path):
@@ -637,7 +669,8 @@ def process_command_line():
show_json_information(args.source)
else:
if args.destination and args.source_session:
- import_database(args.source, args.destination, args.source_session)
+ importer = ImportSession(args.source, args.destination)
+ importer.import_as_new_session(args.source_session)
else:
print("if --information not used --source, --destination and --source_session parameters are
required")
diff --git a/src/chronojump-importer/chronojump_importer_test.py
b/src/chronojump-importer/chronojump_importer_test.py
index 31fb59b..e3b9fc0 100755
--- a/src/chronojump-importer/chronojump_importer_test.py
+++ b/src/chronojump-importer/chronojump_importer_test.py
@@ -44,7 +44,8 @@ class TestImporter(unittest.TestCase):
shutil.copy("tests/{}".format(destination_file_name), destination_file_path)
shutil.copy("tests/{}".format(destination_file_name), original_destination_file_path)
- chronojump_importer.import_database(source_file_path, destination_file_path, 1)
+ importer = chronojump_importer.ImportSession(source_file_path, destination_file_path)
+ importer.import_as_new_session(1)
os.system(
"echo .dump | sqlite3 {} > {}/destination.sql".format(destination_file_path,
temporary_directory_path))
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]