Merge branch 'main' into avm99963-monorail
Merged commit 34d8229ae2b51fb1a15bd208e6fe6185c94f6266
GitOrigin-RevId: 7ee0917f93a577e475f8e09526dd144d245593f4
diff --git a/services/issue_svc.py b/services/issue_svc.py
index 8e5a45f..ad50f81 100644
--- a/services/issue_svc.py
+++ b/services/issue_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for Monorail issue tracking.
@@ -37,8 +36,8 @@
from framework import permissions
from framework import sql
from infra_libs import ts_mon
-from proto import project_pb2
-from proto import tracker_pb2
+from mrproto import project_pb2
+from mrproto import tracker_pb2
from services import caches
from services import tracker_fulltext
from tracker import tracker_bizobj
@@ -78,10 +77,10 @@
ISSUE_COLS = [
'id', 'project_id', 'local_id', 'status_id', 'owner_id', 'reporter_id',
- 'opened', 'closed', 'modified',
- 'owner_modified', 'status_modified', 'component_modified',
- 'derived_owner_id', 'derived_status_id',
- 'deleted', 'star_count', 'attachment_count', 'is_spam']
+ 'opened', 'closed', 'modified', 'owner_modified', 'status_modified',
+ 'component_modified', 'migration_modified', 'derived_owner_id',
+ 'derived_status_id', 'deleted', 'star_count', 'attachment_count', 'is_spam'
+]
ISSUESUMMARY_COLS = ['issue_id', 'summary']
ISSUE2LABEL_COLS = ['issue_id', 'label_id', 'derived']
ISSUE2COMPONENT_COLS = ['issue_id', 'component_id', 'derived']
@@ -111,7 +110,9 @@
'ext_issue_identifier', 'kind']
ISSUEUPDATE_COLS = [
'id', 'issue_id', 'comment_id', 'field', 'old_value', 'new_value',
- 'added_user_id', 'removed_user_id', 'custom_field_name']
+ 'added_user_id', 'removed_user_id', 'custom_field_name',
+ 'added_component_id', 'removed_component_id'
+]
ISSUEFORMERLOCATIONS_COLS = ['issue_id', 'project_id', 'local_id']
REINDEXQUEUE_COLS = ['issue_id', 'created']
ISSUESNAPSHOT_COLS = ['id', 'issue_id', 'shard', 'project_id', 'local_id',
@@ -188,10 +189,12 @@
def _UnpackIssue(self, cnxn, issue_row):
"""Partially construct an issue object using info from a DB row."""
- (issue_id, project_id, local_id, status_id, owner_id, reporter_id,
- opened, closed, modified, owner_modified, status_modified,
- component_modified, derived_owner_id, derived_status_id,
- deleted, star_count, attachment_count, is_spam) = issue_row
+ (
+ issue_id, project_id, local_id, status_id, owner_id, reporter_id,
+ opened, closed, modified, owner_modified, status_modified,
+ component_modified, migration_modified, derived_owner_id,
+ derived_status_id, deleted, star_count, attachment_count,
+ is_spam) = issue_row
issue = tracker_pb2.Issue()
project = self.project_service.GetProject(cnxn, project_id)
@@ -222,6 +225,8 @@
issue.status_modified_timestamp = status_modified
if component_modified:
issue.component_modified_timestamp = component_modified
+ if migration_modified:
+ issue.migration_modified_timestamp = migration_modified
issue.star_count = star_count
issue.attachment_count = attachment_count
issue.is_spam = bool(is_spam)
@@ -361,7 +366,7 @@
elif kind == 'mergedinto':
src_issue.merged_into_external = ext_id
else:
- logging.warn('unhandled danging relation kind %r', kind)
+ logging.warning('unhandled danging relation kind %r', kind)
continue
return results_dict
@@ -1035,21 +1040,15 @@
"""
status_id = self._config_service.LookupStatusID(
cnxn, issue.project_id, issue.status)
- row = (issue.project_id, issue.local_id, status_id,
- issue.owner_id or None,
- issue.reporter_id,
- issue.opened_timestamp,
- issue.closed_timestamp,
- issue.modified_timestamp,
- issue.owner_modified_timestamp,
- issue.status_modified_timestamp,
- issue.component_modified_timestamp,
- issue.derived_owner_id or None,
- self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.derived_status),
- bool(issue.deleted),
- issue.star_count, issue.attachment_count,
- issue.is_spam)
+ row = (
+ issue.project_id, issue.local_id, status_id, issue.owner_id or
+ None, issue.reporter_id, issue.opened_timestamp, issue.closed_timestamp,
+ issue.modified_timestamp, issue.owner_modified_timestamp,
+ issue.status_modified_timestamp, issue.component_modified_timestamp,
+ issue.migration_modified_timestamp, issue.derived_owner_id or None,
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status), bool(issue.deleted),
+ issue.star_count, issue.attachment_count, issue.is_spam)
# ISSUE_COLs[1:] to skip setting the ID
# Insert into the Primary DB.
generated_ids = self.issue_tbl.InsertRows(
@@ -1095,25 +1094,43 @@
assert not issue.assume_stale, (
'issue2514: Storing issue that might be stale: %r' % issue)
delta = {
- 'project_id': issue.project_id,
- 'local_id': issue.local_id,
- 'owner_id': issue.owner_id or None,
- 'status_id': self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.status) or None,
- 'opened': issue.opened_timestamp,
- 'closed': issue.closed_timestamp,
- 'modified': issue.modified_timestamp,
- 'owner_modified': issue.owner_modified_timestamp,
- 'status_modified': issue.status_modified_timestamp,
- 'component_modified': issue.component_modified_timestamp,
- 'derived_owner_id': issue.derived_owner_id or None,
- 'derived_status_id': self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.derived_status) or None,
- 'deleted': bool(issue.deleted),
- 'star_count': issue.star_count,
- 'attachment_count': issue.attachment_count,
- 'is_spam': issue.is_spam,
- }
+ 'project_id':
+ issue.project_id,
+ 'local_id':
+ issue.local_id,
+ 'owner_id':
+ issue.owner_id or None,
+ 'status_id':
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.status) or None,
+ 'opened':
+ issue.opened_timestamp,
+ 'closed':
+ issue.closed_timestamp,
+ 'modified':
+ issue.modified_timestamp,
+ 'owner_modified':
+ issue.owner_modified_timestamp,
+ 'status_modified':
+ issue.status_modified_timestamp,
+ 'component_modified':
+ issue.component_modified_timestamp,
+ 'migration_modified':
+ issue.migration_modified_timestamp,
+ 'derived_owner_id':
+ issue.derived_owner_id or None,
+ 'derived_status_id':
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status) or None,
+ 'deleted':
+ bool(issue.deleted),
+ 'star_count':
+ issue.star_count,
+ 'attachment_count':
+ issue.attachment_count,
+ 'is_spam':
+ issue.is_spam,
+ }
if update_cols is not None:
delta = {key: val for key, val in delta.items()
if key in update_cols}
@@ -1514,6 +1531,7 @@
# update the modified_timestamp for any comment added, even if it was
# just a text comment with no issue fields changed.
issue.modified_timestamp = timestamp
+ issue.migration_modified_timestamp = timestamp
# Update the closed timestamp before filter rules so that rules
# can test for closed_timestamp, and also after filter rules
@@ -1791,7 +1809,8 @@
"""
issue = self.GetIssueByLocalID(cnxn, project_id, local_id, use_cache=False)
issue.deleted = deleted
- self.UpdateIssue(cnxn, issue, update_cols=['deleted'])
+ issue.migration_modified_timestamp = int(time.time())
+ self.UpdateIssue(cnxn, issue, update_cols=['deleted', 'migration_modified'])
tracker_fulltext.IndexIssues(
cnxn, [issue], user_service, self, self._config_service)
@@ -1910,9 +1929,10 @@
def _UnpackAmendment(self, amendment_row):
"""Construct an Amendment PB from a DB row."""
- (_id, _issue_id, comment_id, field_name,
- old_value, new_value, added_user_id, removed_user_id,
- custom_field_name) = amendment_row
+ (
+ _id, _issue_id, comment_id, field_name, old_value, new_value,
+ added_user_id, removed_user_id, custom_field_name, added_component_id,
+ removed_component_id) = amendment_row
amendment = tracker_pb2.Amendment()
field_enum = tracker_pb2.FieldID(field_name.upper())
amendment.field = field_enum
@@ -1928,6 +1948,12 @@
amendment.removed_user_ids.append(removed_user_id)
if custom_field_name:
amendment.custom_field_name = custom_field_name
+ if added_component_id:
+ added_component_id = int(added_component_id)
+ amendment.added_component_ids.append(added_component_id)
+ if removed_component_id:
+ removed_component_id = int(removed_component_id)
+ amendment.removed_component_ids.append(removed_component_id)
return amendment, comment_id
def _ConsolidateAmendments(self, amendments):
@@ -1962,6 +1988,12 @@
new_amendment.removed_user_ids.extend(amendment.removed_user_ids)
if amendment.custom_field_name:
new_amendment.custom_field_name = amendment.custom_field_name
+ if amendment.added_component_ids:
+ new_amendment.added_component_ids.extend(
+ amendment.added_component_ids)
+ if amendment.removed_component_ids:
+ new_amendment.removed_component_ids.extend(
+ amendment.removed_component_ids)
result.append(new_amendment)
return result
@@ -2164,18 +2196,31 @@
field_enum = str(amendment.field).lower()
if (amendment.get_assigned_value('newvalue') is not None and
not amendment.added_user_ids and not amendment.removed_user_ids):
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum,
- amendment.oldvalue, amendment.newvalue,
- None, None, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, amendment.oldvalue,
+ amendment.newvalue, None, None, amendment.custom_field_name,
+ None, None))
for added_user_id in amendment.added_user_ids:
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum, None, None,
- added_user_id, None, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None,
+ added_user_id, None, amendment.custom_field_name, None, None))
for removed_user_id in amendment.removed_user_ids:
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum, None, None,
- None, removed_user_id, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ removed_user_id, amendment.custom_field_name, None, None))
+ for added_component_id in amendment.added_component_ids:
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ None, amendment.custom_field_name, added_component_id, None))
+ for removed_component_id in amendment.removed_component_ids:
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ None, amendment.custom_field_name, None, removed_component_id))
# ISSUEUPDATE_COLS[1:] to skip id column.
self.issueupdate_tbl.InsertRows(
cnxn, ISSUEUPDATE_COLS[1:], amendment_rows, commit=False)
@@ -2369,16 +2414,19 @@
if not issue_comment.deleted_by:
issue_comment.deleted_by = deleted_by_user_id
issue.attachment_count = issue.attachment_count - attachments
+ issue.migration_modified_timestamp = int(time.time())
# Undelete only if it's in deleted state
elif issue_comment.deleted_by:
issue_comment.deleted_by = 0
issue.attachment_count = issue.attachment_count + attachments
+ issue.migration_modified_timestamp = int(time.time())
issue_comment.is_spam = is_spam
self._UpdateComment(
cnxn, issue_comment, update_cols=['deleted_by', 'is_spam'])
- self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+ self.UpdateIssue(
+ cnxn, issue, update_cols=['attachment_count', 'migration_modified'])
# Reindex the issue to take the comment deletion/undeletion into account.
if reindex:
@@ -2576,10 +2624,12 @@
if delete:
if not attachment.deleted:
issue.attachment_count = issue.attachment_count - 1
+ issue.migration_modified_timestamp = int(time.time())
# Increment attachment count only if it's in deleted state
elif attachment.deleted:
issue.attachment_count = issue.attachment_count + 1
+ issue.migration_modified_timestamp = int(time.time())
logging.info('attachment.deleted was %s', attachment.deleted)
@@ -2589,7 +2639,8 @@
self._UpdateAttachment(
cnxn, issue_comment, attachment, update_cols=['deleted'])
- self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+ self.UpdateIssue(
+ cnxn, issue, update_cols=['attachment_count', 'migration_modified'])
if index_now:
tracker_fulltext.IndexIssues(
@@ -2782,9 +2833,11 @@
user_ids = list(user_ids_by_email.values())
user_emails = list(user_ids_by_email.keys())
# Track issue_ids for issues that will have different search documents
- # as a result of removing users.
+ # and need updates to modification time as a result of removing users.
affected_issue_ids = []
+ timestamp = int(time.time())
+
# Reassign commenter_id and delete inbound_messages.
shard_id = sql.RandomShardID()
comment_content_id_rows = self.comment_tbl.Select(
@@ -2868,6 +2921,18 @@
# User rows can be deleted safely. No limit will be applied.
# Remove users in issue updates.
+ user_added_id_rows = self.issueupdate_tbl.Select(
+ cnxn,
+ cols=['IssueUpdate.issue_id'],
+ added_user_id=user_ids,
+ shard_id=shard_id,
+ limit=limit)
+ user_removed_id_rows = self.issueupdate_tbl.Select(
+ cnxn,
+ cols=['IssueUpdate.issue_id'],
+ removed_user_id=user_ids,
+ shard_id=shard_id,
+ limit=limit)
self.issueupdate_tbl.Update(
cnxn,
{'added_user_id': framework_constants.DELETED_USER_ID},
@@ -2878,6 +2943,8 @@
{'removed_user_id': framework_constants.DELETED_USER_ID},
removed_user_id=user_ids,
commit=commit)
+ affected_issue_ids.extend([row[0] for row in user_added_id_rows])
+ affected_issue_ids.extend([row[0] for row in user_removed_id_rows])
# Remove users in issue notify.
self.issue2notify_tbl.Delete(
@@ -2897,4 +2964,12 @@
self.issuesnapshot2cc_tbl.Delete(
cnxn, cc_id=user_ids, commit=commit, limit=limit)
- return list(set(affected_issue_ids))
+ # Update migration_modified timestamp for affected issues.
+ deduped_issue_ids = list(set(affected_issue_ids))
+ if deduped_issue_ids:
+ self.issue_tbl.Update(
+ cnxn, {'migration_modified': timestamp},
+ id=deduped_issue_ids,
+ commit=commit)
+
+ return deduped_issue_ids