From 83758ef7cf9a2eb9a48815dbee1d120d03fc09c2 Mon Sep 17 00:00:00 2001 From: Major Hayden Date: Fri, 13 Jul 2018 14:20:15 -0500 Subject: [PATCH 1/3] db: Add pendingjobs table Allow sktm to store the name and build ID of the Jenkins job that is spawned to run tests. Signed-off-by: Major Hayden --- db_migrations/03-add-pendingjobs.sql | 5 +++++ sktm/db.py | 6 ++++++ 2 files changed, 11 insertions(+) create mode 100644 db_migrations/03-add-pendingjobs.sql diff --git a/db_migrations/03-add-pendingjobs.sql b/db_migrations/03-add-pendingjobs.sql new file mode 100644 index 0000000..fa4e06d --- /dev/null +++ b/db_migrations/03-add-pendingjobs.sql @@ -0,0 +1,5 @@ +CREATE TABLE pendingjobs( + id INTEGER PRIMARY KEY, + job_name TEXT, + build_id INTEGER +); diff --git a/sktm/db.py b/sktm/db.py index 51fcd4f..d57cb88 100644 --- a/sktm/db.py +++ b/sktm/db.py @@ -68,6 +68,12 @@ def __createdb(self, db): FOREIGN KEY(patchsource_id) REFERENCES patchsource(id) ); + CREATE TABLE pendingjobs( + id INTEGER PRIMARY KEY, + job_name TEXT, + build_id INTEGER + ); + CREATE TABLE testrun( id INTEGER PRIMARY KEY, result_id INTEGER, From 1fe843e426643ad30d34b809d85dd48857d29273 Mon Sep 17 00:00:00 2001 From: Major Hayden Date: Fri, 13 Jul 2018 14:34:28 -0500 Subject: [PATCH 2/3] Refactor pending patches logic Add patches to the `patch` table as soon as they are received. Alter the `pendingpatches` table to link test jobs (`pendingjobs`) with patches in the `patch` table. Store each test job in the `pendingjobs` table and refer to the table when checking on tested patches. Signed-off-by: Major Hayden --- db_migrations/04-alter-pendingpatches.sql | 8 ++ sktm/__init__.py | 104 ++++++++++--------- sktm/db.py | 119 ++++++++++++---------- tests/test_db.py | 25 ++--- 4 files changed, 142 insertions(+), 114 deletions(-) create mode 100644 db_migrations/04-alter-pendingpatches.sql diff --git a/db_migrations/04-alter-pendingpatches.sql b/db_migrations/04-alter-pendingpatches.sql new file mode 100644 index 0000000..4c8717f --- /dev/null +++ b/db_migrations/04-alter-pendingpatches.sql @@ -0,0 +1,8 @@ +-- NOTE: Apply this change only when no patches are pending. +DROP TABLE pendingpatches; +CREATE TABLE pendingpatches( + id INTEGER PRIMARY KEY, + pendingjob_id INTEGER, + timestamp INTEGER, + FOREIGN KEY(pendingjob_id) REFERENCES pendingjobs(id) +); diff --git a/sktm/__init__.py b/sktm/__init__.py index ac7f012..cff6d5b 100644 --- a/sktm/__init__.py +++ b/sktm/__init__.py @@ -284,21 +284,32 @@ def check_patchwork(self): new_series = cpw.get_new_patchsets() for series in new_series: logging.info("new series: %s", series.get_obj_url_list()) + + # Get a list of patch URLs from the series + patch_urls = series.get_patch_url_list() + + # Make a list of patch info tuples + patches = [self.get_patch_info_from_url(cpw, patch_url) + for patch_url in patch_urls] + + # Add the patches to the database + self.db.commit_series(patches) + + # Filter the list of series into the ones we will test (ready) + # and the ones that are filtered out (dropped) series_ready, series_dropped = self.filter_patchsets(new_series) + + # Log the series that are ready to test for series in series_ready: logging.info("ready series: %s", series.get_obj_url_list()) + + # Log the series that have been dropped (due to filtering) for series in series_dropped: logging.info("dropped series: %s", series.get_obj_url_list()) - # Retrieve all data and save dropped patches in the DB - patches = [] - for patch_url in series.get_patch_url_list(): - patches.append(self.get_patch_info_from_url(cpw, - patch_url)) - - self.db.commit_series(patches) - + # Add the list of ready series to our list to test series_list += series_ready + # Add series summaries for all patches staying pending for # longer than 12 hours series_list += cpw.get_patchsets( @@ -306,24 +317,35 @@ def check_patchwork(self): cpw.project_id, 43200) ) - # For each series summary + for series in series_list: - # (Re-)add the series' patches to the "pending" list - self.db.set_patchset_pending(cpw.baseurl, cpw.project_id, - series.get_patch_info_list()) # Submit and remember a Jenkins build for the series - self.pj.append((sktm.jtype.PATCHWORK, - self.jk.build( - self.jobname, - baserepo=self.baserepo, - ref=stablecommit, - baseconfig=self.cfgurl, - message_id=series.message_id, - subject=series.subject, - emails=series.email_addr_set, - patchwork=series.get_patch_url_list(), - makeopts=self.makeopts), - cpw)) + build_id = self.jk.build( + self.jobname, + baserepo=self.baserepo, + ref=stablecommit, + baseconfig=self.cfgurl, + message_id=series.message_id, + subject=series.subject, + emails=series.email_addr_set, + patchwork=series.get_patch_url_list(), + makeopts=self.makeopts + ) + self.pj.append((sktm.jtype.PATCHWORK, build_id, cpw)) + + # Store the pending job in the database + pendingjob_id = self.db.create_pending_job( + self.jobname, + build_id + ) + + # Add the series to the list of pending patches or update the + # series if the series is being re-added after expiring. + self.db.set_patchset_pending( + pendingjob_id, + series.get_patch_info_list() + ) + logging.info("submitted message ID: %s", series.message_id) logging.info("submitted subject: %s", series.subject) logging.info("submitted emails: %s", series.email_addr_set) @@ -344,29 +366,17 @@ def check_pending(self): bid ) elif pjt == sktm.jtype.PATCHWORK: - patches = list() - bres = self.jk.get_result(self.jobname, bid) - rurl = self.jk.get_result_url(self.jobname, bid) - logging.info("result=%s", bres) - logging.info("url=%s", rurl) - basehash = self.jk.get_base_hash(self.jobname, bid) - logging.info("basehash=%s", basehash) - if bres == sktm.tresult.BASELINE_FAILURE: - self.db.update_baseline( - self.baserepo, - basehash, - self.jk.get_base_commitdate(self.jobname, bid), - sktm.tresult.TEST_FAILURE, - bid - ) - - patch_url_list = self.jk.get_patchwork(self.jobname, bid) - for patch_url in patch_url_list: - patches.append(self.get_patch_info_from_url(cpw, - patch_url)) - - if bres != sktm.tresult.BASELINE_FAILURE: - self.db.commit_tested(patches) + # Get the build result + result = self.jk.get_result(self.jobname, bid) + logging.info("result=%s", result) + + # Get the build result URL + result_url = self.jk.get_result_url(self.jobname, bid) + logging.info("url=%s", result_url) + + # Remove the patches from the pendingpatches table and + # remove the pendingjob entry. + self.db.unset_patchset_pending(self.jobname, bid) else: raise Exception("Unknown job type: %d" % pjt) diff --git a/sktm/db.py b/sktm/db.py index d57cb88..af249b4 100644 --- a/sktm/db.py +++ b/sktm/db.py @@ -62,10 +62,9 @@ def __createdb(self, db): CREATE TABLE pendingpatches( id INTEGER PRIMARY KEY, - pdate TEXT, - patchsource_id INTEGER, + pendingjob_id INTEGER, timestamp INTEGER, - FOREIGN KEY(patchsource_id) REFERENCES patchsource(id) + FOREIGN KEY(pendingjob_id) REFERENCES pendingjobs(id) ); CREATE TABLE pendingjobs( @@ -160,6 +159,37 @@ def __get_sourceid(self, baseurl, project_id): return result[0] + def create_pending_job(self, job_name, build_id): + """Create a pending job entry. + + Args: + job_name: Job name in Jenkins. + build_id: Build ID for the Jenkins job. + + Returns: The pendingjob ID that was just added. + """ + self.cur.execute( + "INSERT INTO pendingjobs (job_name, build_id) VALUES (?, ?)", + (job_name, build_id) + ) + self.conn.commit() + + return self.cur.lastrowid + + def get_pending_job_id(self, job_name, build_id): + """Get a pending job ID. + + Args: + job_name: Job name in Jenkins. + build_id: Build ID for the Jenkins job. + """ + self.cur.execute( + 'SELECT id FROM pendingjobs WHERE job_name = ? AND build_id = ?', + (job_name, build_id) + ) + result = self.cur.fetchone() + return str(result[0]) + def get_last_checked_patch(self, baseurl, project_id): """Get the patch id of the last patch that was checked. @@ -265,9 +295,10 @@ def get_expired_pending_patches(self, baseurl, project_id, exptime=86400): sourceid = self.__get_sourceid(baseurl, project_id) tstamp = int(time.time()) - exptime - self.cur.execute('SELECT id FROM pendingpatches WHERE ' - 'patchsource_id = ? AND ' - 'timestamp < ?', + self.cur.execute('SELECT pendingpatches.id FROM pendingpatches ' + 'LEFT JOIN patch ON patch.id=pendingpatches.id' + ' WHERE patch.patchsource_id = ? AND ' + 'pendingpatches.timestamp < ?', (sourceid, tstamp)) for res in self.cur.fetchall(): patchlist.append(res[0]) @@ -379,57 +410,50 @@ def __get_latest(self, baserepo): return result[0] - def set_patchset_pending(self, baseurl, project_id, series_data): - """Add a patch to pendingpatches or update an existing entry. + def set_patchset_pending(self, pendingjob_id, series_data): + """Add a patch to `pendingpatches` or update an existing entry. - Add each specified patch to the list of "pending" patches, with - specifed patch date, for specified Patchwork base URL and project ID, - and marked with current timestamp. Replace any previously added - patches with the same ID (bug: should be "same ID, project ID and - base URL"). + Each entry in the `pendingpatches` table refers back to a patch in the + `patch` table. Args: - baseurl: Base URL of the Patchwork instance the project ID and - patch IDs belong to. - project_id: ID of the Patchwork project the patch IDs belong to. + pendingjob_id: ID from the pendingjobs table for the Jenkins job + that is testing the patches. series_data: List of info tuples for patches to add to the list, where each tuple contains the patch ID and a free-form patch date string. """ - sourceid = self.__get_sourceid(baseurl, project_id) tstamp = int(time.time()) logging.debug("setting patches as pending: %s", series_data) - self.cur.executemany('INSERT OR REPLACE INTO ' - 'pendingpatches(id, pdate, patchsource_id, ' - 'timestamp) ' - 'VALUES(?, ?, ?, ?)', - [(patch_id, patch_date, sourceid, tstamp) for - (patch_id, patch_date) in series_data]) + self.cur.executemany( + 'INSERT OR REPLACE INTO pendingpatches ' + '(id, pendingjob_id, timestamp) VALUES(?, ?, ?)', + [(patch_id, pendingjob_id, tstamp) + for (patch_id, patch_date) in series_data]) self.conn.commit() - def __unset_patchset_pending(self, baseurl, patch_id_list): - """Remove a patch from the list of pending patches. - - Remove each specified patch from the list of "pending" patches, for - the specified Patchwork base URL. + def unset_patchset_pending(self, job_name, build_id): + """Clean up pendingpatches and pendingjobs after a test is complete. Args: - baseurl: Base URL of the Patchwork instance the patch IDs - belong to. - patch_id_list: List of IDs of patches to be removed from the list. + job_name: Jenkins job that tested the patches. + build_id: Build ID from the Jenkins job. """ - logging.debug("removing patches from pending list: %s", patch_id_list) - - self.cur.executemany('DELETE FROM pendingpatches WHERE ' - 'patchsource_id IN ' - '(SELECT DISTINCT id FROM patchsource WHERE ' - 'baseurl = ?) ' - 'AND id = ? ', - [(baseurl, patch_id) for - patch_id in patch_id_list]) + # Get the pendingjob ID + pendingjob_id = self.get_pending_job_id(job_name, build_id) + + # Delete the matching pendingpatches and pendingjobs entries + self.cur.executemany( + 'DELETE FROM pendingpatches WHERE pendingjob_id = ?', + (pendingjob_id) + ) + self.conn.commit() + self.cur.execute( + 'DELETE FROM pendingjobs WHERE id = ?', (pendingjob_id) + ) self.conn.commit() def update_baseline(self, baserepo, commithash, commitdate, @@ -468,21 +492,6 @@ def update_baseline(self, baserepo, commithash, commitdate, (testrun_id, commithash, baserepo_id)) self.conn.commit() - def commit_tested(self, patches): - """Saved tested patches. - - Args: - patches: List of patches that were tested - """ - logging.debug("commit_tested: patches=%d", len(patches)) - self.commit_series(patches) - - for (patch_id, patch_name, patch_url, baseurl, project_id, - patch_date) in patches: - # TODO: Can accumulate per-project list instead of doing it one by - # one - self.__unset_patchset_pending(baseurl, [patch_id]) - def __commit_testrun(self, result, buildid): """Add a test run to the database. diff --git a/tests/test_db.py b/tests/test_db.py index 41f7069..a416d0e 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -399,7 +399,7 @@ def test_get_stable_empty(self, mock_sql, mock_get_repoid): def test_set_patchset_pending(self, mock_sql, mock_log): """Ensure patches are added to the pendingpatch table.""" testdb = SktDb(self.database_file) - testdb.set_patchset_pending('baseurl', '1', [('1', '2018-06-04')]) + testdb.set_patchset_pending('1', [('1', '2018-06-04')]) mock_sql.connect().cursor().executemany.assert_called_once() mock_sql.connect().commit.assert_called() @@ -413,26 +413,27 @@ def test_set_patchset_pending(self, mock_sql, mock_log): execute_call_args[0] ) - @mock.patch('logging.debug') - @mock.patch('sktm.db.SktDb._SktDb__get_sourceid') + @mock.patch('sktm.db.SktDb.get_pending_job_id') @mock.patch('sktm.db.sqlite3') - def test_unset_patchset_pending(self, mock_sql, mock_get_sourceid, - mock_log): + def test_unset_patchset_pending(self, mock_sql, mock_getpendingjob): """Ensure patches are removed from the pendingpatch table.""" # pylint: disable=W0212,E1101 testdb = SktDb(self.database_file) - mock_get_sourceid.return_value = 1 + mock_getpendingjob.return_value = '1' - testdb._SktDb__unset_patchset_pending('baseurl', ['1']) + testdb.unset_patchset_pending('skt-multiarch', '1') - # Ensure a debug log was written - mock_log.assert_called_once() - - # Check if we have a proper DELETE query executed + # Ensure we deleted entries from pendingpatches execute_call_args = mock_sql.connect().cursor().executemany.\ call_args[0] self.assertIn('DELETE FROM pendingpatches', execute_call_args[0]) - self.assertEqual([('baseurl', '1')], execute_call_args[1]) + self.assertEqual(('1'), execute_call_args[1]) + + # Ensure we deleted entries from pendingjobs + execute_call_args = mock_sql.connect().cursor().execute.\ + call_args[0] + self.assertIn('DELETE FROM pendingjobs', execute_call_args[0]) + self.assertEqual(('1'), execute_call_args[1]) # Ensure the data was committed to the database mock_sql.connect().commit.assert_called() From 70b353586996ce23e4c4c0b71b1e5e497b7da204 Mon Sep 17 00:00:00 2001 From: Major Hayden Date: Fri, 13 Jul 2018 14:40:43 -0500 Subject: [PATCH 3/3] db: Add tests for pendingjobs functions Signed-off-by: Major Hayden --- tests/test_db.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_db.py b/tests/test_db.py index a416d0e..aebbb91 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -143,6 +143,26 @@ def test_commit_testrun(self, mock_sql, mock_log): # Ensure the data was committed to the database mock_sql.connect().commit.assert_called() + @mock.patch('sktm.db.sqlite3') + def test_create_pendingjob(self, mock_sql): + """Ensure create_pending_job() makes a pendingjob record.""" + # pylint: disable=W0212,E1101 + testdb = SktDb(self.database_file) + mock_sql.connect().cursor().lastrowid = 1 + result = testdb.create_pending_job('skt', '2') + + self.assertEqual(result, 1) + + @mock.patch('sktm.db.sqlite3') + def test_get_pendingjob(self, mock_sql): + """Ensure get_pending_job_id() retrieves a pendingjob record.""" + # pylint: disable=W0212,E1101 + testdb = SktDb(self.database_file) + mock_sql.connect().cursor().fetchone.return_value = [1] + result = testdb.get_pending_job_id('skt', '2') + + self.assertEqual(result, '1') + @mock.patch('sktm.db.sqlite3') def test_create_repoid(self, mock_sql): """Ensure __create_repoid() inserts into DB and retrieves a repoid."""