diff --git a/tools/otagui/ota_interface.py b/tools/otagui/ota_interface.py index a39705318..a05a0e2c9 100644 --- a/tools/otagui/ota_interface.py +++ b/tools/otagui/ota_interface.py @@ -50,6 +50,10 @@ class JobInfo: self.isIncremental = True if self.partial: self.isPartial = True + else: + self.partial = [] + if type(self.partial) == str: + self.partial = self.partial.split(',') def to_sql_form_dict(self): """ @@ -134,7 +138,27 @@ class ProcessesManagement: ) """) + def insert_database(self, job_info): + """ + Insert the job_info into the database + Args: + job_info: JobInfo + """ + with sqlite3.connect(self.path) as connect: + cursor = connect.cursor() + cursor.execute(""" + INSERT INTO Jobs (ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime, Finishtime) + VALUES (:id, :target, :incremental, :verbose, :partial, :output, :status, :downgrade, :extra, :stdout, :stderr, :start_time, :finish_time) + """, job_info.to_sql_form_dict()) + def get_status_by_ID(self, id): + """ + Return the status of job as a instance of JobInfo + Args: + id: string + Return: + JobInfo + """ with sqlite3.connect(self.path) as connect: cursor = connect.cursor() logging.info(id) @@ -147,6 +171,11 @@ class ProcessesManagement: return status def get_status(self): + """ + Return the status of all jobs as a list of JobInfo + Return: + List[JobInfo] + """ with sqlite3.connect(self.path) as connect: cursor = connect.cursor() cursor.execute(""" @@ -158,6 +187,13 @@ class ProcessesManagement: return statuses def update_status(self, id, status, finish_time): + """ + Change the status and finish time of job in the database + Args: + id: string + status: string + finish_time: int + """ with sqlite3.connect(self.path) as connect: cursor = connect.cursor() cursor.execute(""" @@ -167,9 +203,13 @@ class ProcessesManagement: (status, finish_time, id)) def ota_run(self, command, id): - # Start a subprocess and collect the output + """ + Initiate a subprocess to run the ota generation. Wait until it finished and update + the record in the database. + """ stderr_pipes = pipes.Template() stdout_pipes = pipes.Template() + # TODO(lishutong): Enable user to use self-defined stderr/stdout path ferr = stderr_pipes.open(os.path.join( 'output', 'stderr.'+str(id)), 'w') fout = stdout_pipes.open(os.path.join( @@ -188,6 +228,17 @@ class ProcessesManagement: self.update_status(id, 'Error', int(time.time())) def ota_generate(self, args, id=0): + """ + Read in the arguments from the frontend and start running the OTA + generation process, then update the records in database. + Format of args: + output: string, extra_keys: List[string], extra: string, + isIncremental: bool, isPartial: bool, partial: List[string], + incremental: string, target: string, verbose: bool + args: + args: dict + id: string + """ command = ['ota_from_target_files'] # Check essential configuration is properly set if not os.path.isfile(args['target']): @@ -197,7 +248,8 @@ class ProcessesManagement: if args['verbose']: command.append('-v') if args['extra_keys']: - args['extra'] += '--' + ' --'.join(args['extra_keys']) + args['extra'] = \ + '--' + ' --'.join(args['extra_keys']) + ' ' + args['extra'] if args['extra']: command += args['extra'].split(' ') command.append('-k') @@ -225,12 +277,7 @@ class ProcessesManagement: ) try: thread = threading.Thread(target=self.ota_run, args=(command, id)) - with sqlite3.connect(self.path) as connect: - cursor = connect.cursor() - cursor.execute(""" - INSERT INTO Jobs (ID, TargetPath, IncrementalPath, Verbose, Partial, OutputPath, Status, Downgrade, OtherFlags, STDOUT, STDERR, StartTime) - VALUES (:id, :target, :incremental, :verbose, :partial, :output, :status, :downgrade, :extra, :stdout, :stderr, :start_time) - """, job_info.to_sql_form_dict()) + self.insert_database(job_info) thread.start() except AssertionError: raise SyntaxError diff --git a/tools/otagui/test_ota_interface.py b/tools/otagui/test_ota_interface.py index effbb3a7c..5d405f3cf 100644 --- a/tools/otagui/test_ota_interface.py +++ b/tools/otagui/test_ota_interface.py @@ -1,6 +1,9 @@ import unittest from ota_interface import JobInfo, ProcessesManagement from unittest.mock import patch, mock_open, Mock, MagicMock +import os +import sqlite3 +import copy class TestJobInfo(unittest.TestCase): def setUp(self): @@ -176,7 +179,152 @@ class TestJobInfo(unittest.TestCase): ) class TestProcessesManagement(unittest.TestCase): - pass + def setUp(self): + if os.path.isfile('test_process.db'): + self.tearDown() + self.processes = ProcessesManagement(path='test_process.db') + testcase_job_info = TestJobInfo() + testcase_job_info.setUp() + self.test_job_info = testcase_job_info.setup_job(incremental='target/source.zip') + self.processes.insert_database(self.test_job_info) + + def tearDown(self): + os.remove('test_process.db') + try: + os.remove('output/stderr.'+self.test_job_info.id) + os.remove('output/stdout.'+self.test_job_info.id) + except FileNotFoundError: + pass + + def test_init(self): + # Test the database is created successfully + self.assertTrue(os.path.isfile('test_process.db')) + test_columns = [ + {'name': 'ID','type':'TEXT'}, + {'name': 'TargetPath','type':'TEXT'}, + {'name': 'IncrementalPath','type':'TEXT'}, + {'name': 'Verbose','type':'INTEGER'}, + {'name': 'Partial','type':'TEXT'}, + {'name': 'OutputPath','type':'TEXT'}, + {'name': 'Status','type':'TEXT'}, + {'name': 'Downgrade','type':'INTEGER'}, + {'name': 'OtherFlags','type':'TEXT'}, + {'name': 'STDOUT','type':'TEXT'}, + {'name': 'STDERR','type':'TEXT'}, + {'name': 'StartTime','type':'INTEGER'}, + {'name': 'FinishTime','type':'INTEGER'}, + ] + connect = sqlite3.connect('test_process.db') + cursor = connect.cursor() + cursor.execute("PRAGMA table_info(jobs)") + columns = cursor.fetchall() + for column in test_columns: + column_found = list(filter(lambda x: x[1]==column['name'], columns)) + self.assertEqual(len(column_found), 1, + 'The column ' + column['name'] + ' is not found in database' + ) + self.assertEqual(column_found[0][2], column['type'], + 'The column' + column['name'] + ' has a wrong type' + ) + + def test_get_status_by_ID(self): + job_info = self.processes.get_status_by_ID(self.test_job_info.id) + self.assertEqual(job_info, self.test_job_info, + 'The data read from database is not the same one as inserted' + ) + + def test_get_status(self): + # Insert the same info again, but change the last digit of id to 0 + test_job_info2 = copy.copy(self.test_job_info) + test_job_info2.id = test_job_info2.id[:-1] + '0' + self.processes.insert_database(test_job_info2) + job_infos = self.processes.get_status() + self.assertEqual(len(job_infos), 2, + 'The number of data entries is not the same as created' + ) + self.assertEqual(job_infos[0], self.test_job_info, + 'The data list read from database is not the same one as inserted' + ) + self.assertEqual(job_infos[1], test_job_info2, + 'The data list read from database is not the same one as inserted' + ) + + def test_ota_run(self): + # Test when the job exit normally + mock_proc = Mock() + mock_proc.wait = Mock(return_value=0) + mock_Popen = Mock(return_value=mock_proc) + test_command = [ + "ota_from_target_files", "-v","build/target.zip", "output/ota.zip", + ] + mock_pipes_template = Mock() + mock_pipes_template.open = Mock() + mock_Template = Mock(return_value=mock_pipes_template) + # Mock the subprocess.Popen, subprocess.Popen().wait and pipes.Template + with patch("subprocess.Popen", mock_Popen), \ + patch("pipes.Template", mock_Template): + self.processes.ota_run(test_command, self.test_job_info.id) + mock_Popen.assert_called_once() + mock_proc.wait.assert_called_once() + job_info = self.processes.get_status_by_ID(self.test_job_info.id) + self.assertEqual(job_info.status, 'Finished') + mock_Popen.reset_mock() + mock_proc.wait.reset_mock() + # Test when the job exit with prbolems + mock_proc.wait = Mock(return_value=1) + with patch("subprocess.Popen", mock_Popen), \ + patch("pipes.Template", mock_Template): + self.processes.ota_run(test_command, self.test_job_info.id) + mock_Popen.assert_called_once() + mock_proc.wait.assert_called_once() + job_info = self.processes.get_status_by_ID(self.test_job_info.id) + self.assertEqual(job_info.status, 'Error') + + def test_ota_generate(self): + test_args = dict({ + 'output': 'ota.zip', + 'extra_keys': ['downgrade', 'wipe_user_data'], + 'extra': '--disable_vabc', + 'isIncremental': True, + 'isPartial': True, + 'partial': ['system', 'vendor'], + 'incremental': 'target/source.zip', + 'target': 'target/build.zip', + 'verbose': True + }) + # Usually the order of commands make no difference, but the following + # order has been validated, so it is best to follow this manner: + # ota_from_target_files [flags like -v, --downgrade] + # [-i incremental_source] [-p partial_list] target output + test_command = [ + 'ota_from_target_files', '-v', '--downgrade', + '--wipe_user_data', '--disable_vabc', '-k', + 'build/make/target/product/security/testkey', + '-i', 'target/source.zip', + '--partial', 'system vendor', 'target/build.zip', 'ota.zip' + ] + mock_os_path_isfile = Mock(return_value=True) + mock_threading = Mock() + mock_thread = Mock(return_value=mock_threading) + with patch("os.path.isfile", mock_os_path_isfile), \ + patch("threading.Thread", mock_thread): + self.processes.ota_generate(test_args, id='test') + job_info = self.processes.get_status_by_ID('test') + self.assertEqual(job_info.status, 'Running', + 'The job cannot be stored into database properly' + ) + # Test if the job stored into database properly + for key, value in test_args.items(): + # extra_keys is merged to extra when stored into database + if key=='extra_keys': + continue + self.assertEqual(job_info.__dict__[key], value, + 'The column ' + key + ' is not stored into database properly' + ) + # Test if the command is in its order + self.assertEqual(mock_thread.call_args[1]['args'][0], test_command, + 'The subprocess command is not in its good shape' + ) if __name__ == '__main__': unittest.main() \ No newline at end of file