Merge branch 'main' into avm99963-monorail
Merged commit 34d8229ae2b51fb1a15bd208e6fe6185c94f6266
GitOrigin-RevId: 7ee0917f93a577e475f8e09526dd144d245593f4
diff --git a/services/api_pb2_v1_helpers.py b/services/api_pb2_v1_helpers.py
index dcdea66..ea5e496 100644
--- a/services/api_pb2_v1_helpers.py
+++ b/services/api_pb2_v1_helpers.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Convert Monorail PB objects to API PB objects"""
@@ -22,9 +21,9 @@
from framework import framework_views
from framework import permissions
from framework import timestr
-from proto import api_pb2_v1
-from proto import project_pb2
-from proto import tracker_pb2
+from mrproto import api_pb2_v1
+from mrproto import project_pb2
+from mrproto import tracker_pb2
from services import project_svc
from tracker import field_helpers
from tracker import tracker_bizobj
@@ -219,7 +218,7 @@
return converted_phases
-def convert_issue(cls, issue, mar, services):
+def convert_issue(cls, issue, mar, services, migrated_id=None):
"""Convert Monorail Issue PB to API IssuesGetInsertResponse."""
config = services.config.GetProjectConfig(mar.cnxn, issue.project_id)
@@ -320,6 +319,8 @@
if issue.component_modified_timestamp:
resp.component_modified = datetime.datetime.fromtimestamp(
issue.component_modified_timestamp)
+ if migrated_id is not None:
+ resp.migrated_id = migrated_id
return resp
diff --git a/services/api_svc_v1.py b/services/api_svc_v1.py
index 8d8f238..883d69f 100644
--- a/services/api_svc_v1.py
+++ b/services/api_svc_v1.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""API service.
@@ -45,9 +44,9 @@
from framework import ratelimiter
from framework import sql
from project import project_helpers
-from proto import api_pb2_v1
-from proto import project_pb2
-from proto import tracker_pb2
+from mrproto import api_pb2_v1
+from mrproto import project_pb2
+from mrproto import tracker_pb2
from search import frontendsearchpipeline
from services import api_pb2_v1_helpers
from services import client_config_svc
@@ -59,6 +58,7 @@
from tracker import tracker_bizobj
from tracker import tracker_constants
from tracker import tracker_helpers
+from redirect import redirect_utils
from infra_libs import ts_mon
@@ -284,7 +284,9 @@
if not project:
raise exceptions.NoSuchProjectException(
'Project %s does not exist' % project_name)
- if project.state != project_pb2.ProjectState.LIVE:
+ # Allow to view non-live projects that were migrated.
+ if (project.state != project_pb2.ProjectState.LIVE and
+ project_name not in redirect_utils.PROJECT_REDIRECT_MAP):
raise permissions.PermissionException(
'API may not access project %s because it is not live'
% project_name)
@@ -314,7 +316,6 @@
@endpoints.api(name=ENDPOINTS_API_NAME, version='v1',
description='Monorail API to manage issues.',
- auth_level=endpoints.AUTH_LEVEL.NONE,
allowed_client_ids=endpoints.SKIP_CLIENT_ID_CHECK,
documentation=DOC_URL)
class MonorailApi(remote.Service):
@@ -414,7 +415,7 @@
http_method='POST',
name='issues.comments.insert')
def issues_comments_insert(self, mar, request):
- # type (...) -> proto.api_pb2_v1.IssuesCommentsInsertResponse
+ # type (...) -> mrproto.api_pb2_v1.IssuesCommentsInsertResponse
"""Add a comment."""
# Because we will modify issues, load from DB rather than cache.
issue = self._services.issue.GetIssueByLocalID(
@@ -429,7 +430,7 @@
# Temporary block on updating approval subfields.
if request.updates and request.updates.fieldValues:
- fds_by_name = {fd.field_name.lower():fd for fd in mar.config.field_defs}
+ fds_by_name = {fd.field_name.lower(): fd for fd in mar.config.field_defs}
for fv in request.updates.fieldValues:
# Checking for fv.approvalName is unreliable since it can be removed.
fd = fds_by_name.get(fv.fieldName.lower())
@@ -485,22 +486,46 @@
mar.cnxn, updates_dict['cc_remove']).values())
updates_dict['labels_add'], updates_dict['labels_remove'] = (
api_pb2_v1_helpers.split_remove_add(request.updates.labels))
+
+ field_helpers.ValidateLabels(
+ mar.cnxn,
+ self._services,
+ mar.project_id,
+ updates_dict.get('labels_add', []),
+ ezt_errors=mar.errors)
+ if mar.errors.AnyErrors():
+ raise endpoints.BadRequestException(
+ 'Invalid field values: %s' % mar.errors.labels)
+
blocked_on_add_strs, blocked_on_remove_strs = (
api_pb2_v1_helpers.split_remove_add(request.updates.blockedOn))
- updates_dict['blocked_on_add'] = api_pb2_v1_helpers.issue_global_ids(
- blocked_on_add_strs, issue.project_id, mar,
- self._services)
- updates_dict['blocked_on_remove'] = api_pb2_v1_helpers.issue_global_ids(
- blocked_on_remove_strs, issue.project_id, mar,
- self._services)
blocking_add_strs, blocking_remove_strs = (
api_pb2_v1_helpers.split_remove_add(request.updates.blocking))
- updates_dict['blocking_add'] = api_pb2_v1_helpers.issue_global_ids(
- blocking_add_strs, issue.project_id, mar,
- self._services)
- updates_dict['blocking_remove'] = api_pb2_v1_helpers.issue_global_ids(
- blocking_remove_strs, issue.project_id, mar,
- self._services)
+ blocked_on_add_iids = api_pb2_v1_helpers.issue_global_ids(
+ blocked_on_add_strs, issue.project_id, mar, self._services)
+ blocked_on_remove_iids = api_pb2_v1_helpers.issue_global_ids(
+ blocked_on_remove_strs, issue.project_id, mar, self._services)
+ blocking_add_iids = api_pb2_v1_helpers.issue_global_ids(
+ blocking_add_strs, issue.project_id, mar, self._services)
+ blocking_remove_iids = api_pb2_v1_helpers.issue_global_ids(
+ blocking_remove_strs, issue.project_id, mar, self._services)
+ all_block = (
+ blocked_on_add_iids + blocked_on_remove_iids + blocking_add_iids +
+ blocking_remove_iids)
+ for iid in all_block:
+ # Because we will modify issues, load from DB rather than cache.
+ issue = self._services.issue.GetIssue(mar.cnxn, iid, use_cache=False)
+ project = self._services.project.GetProjectByName(
+ mar.cnxn, issue.project_name)
+ if not tracker_helpers.CanEditProjectIssue(mar, project, issue,
+ mar.granted_perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to block with issue (%s, %d)' %
+ (issue.project_name, issue.local_id))
+ updates_dict['blocked_on_add'] = blocked_on_add_iids
+ updates_dict['blocked_on_remove'] = blocked_on_remove_iids
+ updates_dict['blocking_add'] = blocking_add_iids
+ updates_dict['blocking_remove'] = blocking_remove_iids
components_add_strs, components_remove_strs = (
api_pb2_v1_helpers.split_remove_add(request.updates.components))
updates_dict['components_add'] = (
@@ -518,12 +543,11 @@
merge_into_issue = self._services.issue.GetIssueByLocalID(
mar.cnxn, merge_into_project.project_id, merge_local_id,
use_cache=False)
- merge_allowed = tracker_helpers.IsMergeAllowed(
- merge_into_issue, mar, self._services)
- if not merge_allowed:
+ if not tracker_helpers.CanEditProjectIssue(
+ mar, merge_into_project, merge_into_issue, mar.granted_perms):
raise permissions.PermissionException(
- 'User is not allowed to merge into issue %s:%s' %
- (merge_into_issue.project_name, merge_into_issue.local_id))
+ 'User is not allowed to merge into issue %s:%s' %
+ (merge_into_issue.project_name, merge_into_issue.local_id))
updates_dict['merged_into'] = merge_into_issue.issue_id
(updates_dict['field_vals_add'], updates_dict['field_vals_remove'],
updates_dict['fields_clear'], updates_dict['fields_labels_add'],
@@ -730,7 +754,7 @@
http_method='POST',
name='approvals.comments.insert')
def approvals_comments_insert(self, mar, request):
- # type (...) -> proto.api_pb2_v1.ApprovalsCommentsInsertResponse
+ # type (...) -> mrproto.api_pb2_v1.ApprovalsCommentsInsertResponse
"""Add an approval comment."""
approval_fd = tracker_bizobj.FindFieldDef(
request.approvalName, mar.config)
@@ -769,8 +793,10 @@
if request.approvalUpdates.fieldValues:
# Block updating field values that don't belong to the approval.
approvals_fds_by_name = {
- fd.field_name.lower():fd for fd in mar.config.field_defs
- if fd.approval_id == approval_fd.field_id}
+ fd.field_name.lower(): fd
+ for fd in mar.config.field_defs
+ if fd.approval_id == approval_fd.field_id
+ }
for fv in request.approvalUpdates.fieldValues:
if approvals_fds_by_name.get(fv.fieldName.lower()) is None:
raise endpoints.BadRequestException(
@@ -804,7 +830,6 @@
raise permissions.PermissionException(
'User is not allowed to make this status change')
updates_dict['status'] = status
- logging.info(time.time)
approval_delta = tracker_bizobj.MakeApprovalDelta(
updates_dict.get('status'), mar.auth.user_id,
updates_dict.get('approver_ids_add', []),
@@ -903,8 +928,13 @@
issue = self._services.issue.GetIssueByLocalID(
mar.cnxn, mar.project_id, request.issueId)
+ with work_env.WorkEnv(mar, self._services) as we:
+ migrated_id = we.GetIssueMigratedID(
+ request.projectId, request.issueId, issue.labels)
+
return api_pb2_v1_helpers.convert_issue(
- api_pb2_v1.IssuesGetInsertResponse, issue, mar, self._services)
+ api_pb2_v1.IssuesGetInsertResponse, issue, mar, self._services,
+ migrated_id)
@monorail_api_method(
api_pb2_v1.ISSUES_INSERT_REQUEST_RESOURCE_CONTAINER,
@@ -941,6 +971,17 @@
fields_add, _, _, fields_labels, _ = (
api_pb2_v1_helpers.convert_field_values(
request.fieldValues, mar, self._services))
+
+ field_helpers.ValidateLabels(
+ mar.cnxn,
+ self._services,
+ mar.project_id,
+ fields_labels,
+ ezt_errors=mar.errors)
+ if mar.errors.AnyErrors():
+ raise endpoints.BadRequestException(
+ 'Invalid field values: %s' % mar.errors.labels)
+
field_helpers.ValidateCustomFields(
mar.cnxn, self._services, fields_add, mar.config, mar.project,
ezt_errors=mar.errors)
@@ -1190,8 +1231,7 @@
name='components.create')
def components_create(self, mar, request):
"""Create a component."""
- if not mar.perms.CanUsePerm(
- permissions.EDIT_PROJECT, mar.auth.effective_ids, mar.project, []):
+ if not permissions.CanEditProjectConfig(mar, self._services):
raise permissions.PermissionException(
'User is not allowed to create components for this project')
@@ -1207,8 +1247,8 @@
if not parent_def:
raise exceptions.NoSuchComponentException(
'Parent component %s does not exist.' % parent_path)
- if not permissions.CanEditComponentDef(
- mar.auth.effective_ids, mar.perms, mar.project, parent_def, config):
+ if not permissions.CanEditComponentDef(mar, self._services, parent_def,
+ config):
raise permissions.PermissionException(
'User is not allowed to add a subcomponent to component %s' %
parent_path)
@@ -1266,8 +1306,8 @@
mar.auth.effective_ids, mar.perms, mar.project, component_def):
raise permissions.PermissionException(
'User is not allowed to view this component %s' % component_path)
- if not permissions.CanEditComponentDef(
- mar.auth.effective_ids, mar.perms, mar.project, component_def, config):
+ if not permissions.CanEditComponentDef(mar, self._services, component_def,
+ config):
raise permissions.PermissionException(
'User is not allowed to delete this component %s' % component_path)
@@ -1302,8 +1342,8 @@
mar.auth.effective_ids, mar.perms, mar.project, component_def):
raise permissions.PermissionException(
'User is not allowed to view this component %s' % component_path)
- if not permissions.CanEditComponentDef(
- mar.auth.effective_ids, mar.perms, mar.project, component_def, config):
+ if not permissions.CanEditComponentDef(mar, self._services, component_def,
+ config):
raise permissions.PermissionException(
'User is not allowed to edit this component %s' % component_path)
diff --git a/services/cachemanager_svc.py b/services/cachemanager_svc.py
index 753bffa..8801ef6 100644
--- a/services/cachemanager_svc.py
+++ b/services/cachemanager_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A simple in-RAM cache with distributed invalidation.
@@ -139,7 +138,7 @@
cnxn, kind=kind, where=[('timestep < %s', [last_timestep])])
-class RamCacheConsolidate(jsonfeed.FlaskInternalTask):
+class RamCacheConsolidate(jsonfeed.InternalTask):
"""Drop old Invalidate rows when there are too many of them."""
def HandleRequest(self, mr):
diff --git a/services/caches.py b/services/caches.py
index 8869d61..e72b9a2 100644
--- a/services/caches.py
+++ b/services/caches.py
@@ -1,4 +1,4 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
+# Copyright 2020 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Classes to manage cached values.
@@ -53,7 +53,7 @@
def CacheAll(self, new_item_dict):
"""Cache all items in the given dict, dropping old items if needed."""
if len(new_item_dict) >= self.max_size:
- logging.warn('Dumping the entire cache! %s', self.kind)
+ logging.warning('Dumping the entire cache! %s', self.kind)
self.cache = {}
else:
while len(self.cache) + len(new_item_dict) > self.max_size:
@@ -108,7 +108,6 @@
def LocalInvalidateAll(self):
"""Invalidate all keys locally: just start over with an empty dict."""
- logging.info('Locally invalidating all in kind=%r', self.kind)
self.cache = {}
def InvalidateAll(self, cnxn):
@@ -160,7 +159,7 @@
def InvalidateKeys(self, cnxn, keys):
"""Drop keys locally, and append their values to the Invalidate DB table."""
# Find values to invalidate.
- values = [self.cache[key] for key in keys if self.cache.has_key(key)]
+ values = [self.cache[key] for key in keys if key in self.cache]
if len(values) == len(keys):
for value in values:
self.LocalInvalidate(value)
@@ -346,7 +345,7 @@
'kind': self.cache.kind,
'prefix': self.prefix,
'count': len(keys),
- 'keys': str(keys)
+ 'keys': str(keys)[:100000]
})
memcache.delete_multi(
[self._KeyToStr(key) for key in keys],
diff --git a/services/chart_svc.py b/services/chart_svc.py
index 49ccb51..dcf9849 100644
--- a/services/chart_svc.py
+++ b/services/chart_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A service for querying data for charts.
@@ -252,7 +251,7 @@
shard_values_dict[name] += count
else:
if shard_values[0][0] >= settings.chart_query_max_rows:
- search_limit_reached = True
+ search_limit_reached = True
shard_values_dict.setdefault('total', 0)
shard_values_dict['total'] += shard_values[0][0]
diff --git a/services/client_config_svc.py b/services/client_config_svc.py
index d5d6a25..d1eb123 100644
--- a/services/client_config_svc.py
+++ b/services/client_config_svc.py
@@ -1,19 +1,17 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import base64
+import binascii
import json
import logging
import os
import time
-from six.moves import urllib
-import webapp2
import flask
from google.appengine.api import app_identity
@@ -25,15 +23,14 @@
import settings
from framework import framework_constants
-from proto import api_clients_config_pb2
+from mrproto import api_clients_config_pb2
CONFIG_FILE_PATH = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'testing', 'api_clients.cfg')
LUCI_CONFIG_URL = (
- 'https://luci-config.appspot.com/_ah/api/config/v1/config_sets'
- '/services/monorail-prod/config/api_clients.cfg')
+ 'https://config.luci.app/prpc/config.service.v2.Configs/GetConfig')
client_config_svc = None
@@ -51,26 +48,34 @@
[ts_mon.BooleanField('success'),
ts_mon.StringField('type')])
-
def _process_response(response):
try:
- content = json.loads(response.content)
+ utf8_decoded_content = response.content.decode('utf-8')
+ except AttributeError:
+ logging.error('Response content was not binary: %r', response.content)
+ _CONFIG_LOADS.increment({'success': False, 'type': 'json-load-error'})
+ raise
+
+ try:
+ # Strip the XSSI prefix.
+ stripped_content = utf8_decoded_content[len(")]}'"):].strip()
+ json_config = json.loads(stripped_content)
except ValueError:
logging.error('Response was not JSON: %r', response.content)
_CONFIG_LOADS.increment({'success': False, 'type': 'json-load-error'})
raise
try:
- config_content = content['content']
+ config_raw_content = json_config['rawContent']
except KeyError:
- logging.error('JSON contained no content: %r', content)
+ logging.error('JSON missing rawContent: %r', json_config)
_CONFIG_LOADS.increment({'success': False, 'type': 'json-key-error'})
raise
try:
- content_text = base64.b64decode(config_content)
- except TypeError:
- logging.error('Content was not b64: %r', config_content)
+ content_text = base64.b64decode(config_raw_content)
+ except binascii.Error:
+ logging.error('Content was not b64: %r', config_raw_content)
_CONFIG_LOADS.increment({'success': False, 'type': 'b64-decode-error'})
raise
@@ -85,32 +90,44 @@
return content_text
-def GetLoadApiClientConfigs():
- global service_account_map
- global qpm_dict
+def _CallLuciConfig() -> urlfetch._URLFetchResult:
authorization_token, _ = app_identity.get_access_token(
framework_constants.OAUTH_SCOPE)
response = urlfetch.fetch(
LUCI_CONFIG_URL,
- method=urlfetch.GET,
+ method=urlfetch.POST,
follow_redirects=False,
headers={
- 'Content-Type': 'application/json; charset=UTF-8',
- 'Authorization': 'Bearer ' + authorization_token
- })
-
+ 'Content-Type': 'application/json; charset=utf-8',
+ 'Authorization': 'Bearer ' + authorization_token,
+ 'Accept': 'application/json'
+ },
+ payload=json.dumps(
+ {
+ 'configSet': 'services/monorail-prod',
+ 'path': 'api_clients.cfg'
+ }),
+ )
if response.status_code != 200:
logging.error('Invalid response from luci-config: %r', response)
_CONFIG_LOADS.increment({'success': False, 'type': 'luci-cfg-error'})
flask.abort(500, 'Invalid response from luci-config')
+ return response
+
+
+def GetLoadApiClientConfigs():
+ global service_account_map
+ global qpm_dict
+ response = _CallLuciConfig()
try:
- content_text = _process_response(response)
+ config_content_text = _process_response(response)
except Exception as e:
flask.abort(500, str(e))
- logging.info('luci-config content decoded: %r.', content_text)
- configs = ClientConfig(configs=content_text, key_name='api_client_configs')
+ logging.info('luci-config content decoded: %r.', config_content_text)
+ configs = ClientConfig(
+ configs=config_content_text, key_name='api_client_configs')
configs.put()
service_account_map = None
qpm_dict = None
diff --git a/services/config_svc.py b/services/config_svc.py
index 27c1d3a..8ad829d 100644
--- a/services/config_svc.py
+++ b/services/config_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Classes and functions for persistence of issue tracker configuration.
@@ -24,7 +23,7 @@
from framework import exceptions
from framework import framework_constants
from framework import sql
-from proto import tracker_pb2
+from mrproto import tracker_pb2
from services import caches
from services import project_svc
from tracker import tracker_bizobj
@@ -126,7 +125,7 @@
label_rows_dict.update(self._DeserializeLabelRows(label_def_rows))
for rows_in_shard in label_rows_dict.values():
- rows_in_shard.sort(key=lambda row: (row[2], row[3]), reverse=True)
+ rows_in_shard.sort(key=lambda row: (row[2] or 0, row[3]), reverse=True)
return label_rows_dict
@@ -505,7 +504,7 @@
result.extend(pids_to_label_rows_shard[key])
# Sort in python to reduce DB load and integrate results from shards.
# row[2] is rank, row[3] is label name.
- result.sort(key=lambda row: (row[2], row[3]), reverse=True)
+ result.sort(key=lambda row: (row[2] or 0, row[3]), reverse=True)
return result
def GetLabelDefRowsAnyProject(self, cnxn, where=None):
@@ -557,7 +556,8 @@
project_id)
return label_id_to_name.get(label_id)
- def LookupLabelID(self, cnxn, project_id, label, autocreate=True):
+ def LookupLabelID(
+ self, cnxn, project_id, label, autocreate=True, case_sensitive=False):
"""Look up a label ID, optionally interning it.
Args:
@@ -565,6 +565,7 @@
project_id: int ID of the project where the statuses are defined.
label: label string.
autocreate: if not already in the DB, store it and generate a new ID.
+ case_sensitive: if label lookup is case sensivite
Returns:
The label ID for the given label string.
@@ -572,14 +573,19 @@
self._EnsureLabelCacheEntry(cnxn, project_id)
_label_id_to_name, label_name_to_id = self.label_cache.GetItem(
project_id)
- if label.lower() in label_name_to_id:
- return label_name_to_id[label.lower()]
+
+ label_lower = label.lower() if not case_sensitive else label
+ if label_lower in label_name_to_id:
+ return label_name_to_id[label_lower]
+
+ if not case_sensitive:
+ where = [('LOWER(label) = %s', [label_lower])]
+ else:
+ where = [('label = %s', [label])]
# Double check that the label does not already exist in the DB.
rows = self.labeldef_tbl.Select(
- cnxn, cols=['id'], project_id=project_id,
- where=[('LOWER(label) = %s', [label.lower()])],
- limit=1)
+ cnxn, cols=['id'], project_id=project_id, where=where, limit=1)
logging.info('Double checking for %r gave %r', label, rows)
if rows:
self.label_row_2lc.cache.LocalInvalidate(project_id)
diff --git a/services/features_svc.py b/services/features_svc.py
index 471a513..b2edce3 100644
--- a/services/features_svc.py
+++ b/services/features_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A class that provides persistence for Monorail's additional features.
@@ -25,7 +24,7 @@
from framework import framework_bizobj
from framework import framework_constants
from framework import sql
-from proto import features_pb2
+from mrproto import features_pb2
from services import caches
from services import config_svc
from tracker import tracker_bizobj
@@ -109,12 +108,12 @@
adder_id=adder_id , date_added=added,
note=note))
else:
- logging.warn('hotlist %d not found', hotlist_id)
+ logging.warning('hotlist %d not found', hotlist_id)
for (hotlist_id, user_id, role_name) in role_rows:
hotlist = hotlist_dict.get(hotlist_id)
if not hotlist:
- logging.warn('hotlist %d not found', hotlist_id)
+ logging.warning('hotlist %d not found', hotlist_id)
elif role_name == 'owner':
hotlist.owner_ids.append(user_id)
elif role_name == 'editor':
@@ -199,9 +198,9 @@
for (hotlist_id, user_id) in owner_rows:
found_owner_id = hotlist_to_owner_id.get(hotlist_id)
if found_owner_id:
- logging.warn(
- 'hotlist %d has more than one owner: %d, %d',
- hotlist_id, user_id, found_owner_id)
+ logging.warning(
+ 'hotlist %d has more than one owner: %d, %d', hotlist_id, user_id,
+ found_owner_id)
hotlist_to_owner_id[hotlist_id] = user_id
# Note: hotlist_rows hotlists found in the owner_rows that have names
@@ -230,13 +229,13 @@
role_rows = self.features_service.hotlist2user_tbl.Select(
cnxn, cols=['hotlist_id', 'user_id'],
- user_id=wanted_names_for_owner.keys(), role_name='owner')
+ user_id=sorted(wanted_names_for_owner.keys()), role_name='owner')
hotlist_ids = [row[0] for row in role_rows]
hotlist_rows = self.features_service.hotlist_tbl.Select(
cnxn, cols=['id', 'name'], id=hotlist_ids, is_deleted=False,
where=[('LOWER(name) IN (%s)' % sql.PlaceHolders(hotlist_names_set),
- [name.lower() for name in hotlist_names_set])])
+ [name.lower() for name in sorted(hotlist_names_set)])])
return self._DeserializeHotlistIDs(
hotlist_rows, role_rows, wanted_names_for_owner)
@@ -685,11 +684,10 @@
if any(email in predicate for email in emails):
deleted_rows.append(rule_row)
continue
- if any(
- (('add_notify:%s' % email) in consequence or
- ('add_cc_id:%s' % user_id) in consequence or
- ('default_owner_id:%s' % user_id) in consequence)
- for email, user_id in user_ids_by_email.iteritems()):
+ if any((('add_notify:%s' % email) in consequence or
+ ('add_cc_id:%s' % user_id) in consequence or
+ ('default_owner_id:%s' % user_id) in consequence)
+ for email, user_id in user_ids_by_email.items()):
deleted_rows.append(rule_row)
continue
@@ -799,8 +797,8 @@
self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id])
if not hotlist.owner_ids: # Should never happen.
- logging.warn('Modifying unowned Hotlist: id:%r, name:%r',
- hotlist_id, hotlist.name)
+ logging.warning(
+ 'Modifying unowned Hotlist: id:%r, name:%r', hotlist_id, hotlist.name)
elif hotlist.name:
self.hotlist_id_2lc.InvalidateKeys(
cnxn, [(hotlist.name.lower(), owner_id) for
@@ -900,8 +898,10 @@
affected_issue_ids.update(remove_issue_ids)
self.hotlist2issue_tbl.Delete(
cnxn, hotlist_id=hotlist_id, issue_id=remove_issue_ids, commit=False)
- all_hotlist_items = filter(
- lambda item: item.issue_id not in remove_issue_ids, all_hotlist_items)
+ all_hotlist_items = list(
+ filter(
+ lambda item: item.issue_id not in remove_issue_ids,
+ all_hotlist_items))
if updated_items:
updated_issue_ids = [item.issue_id for item in updated_items]
@@ -916,9 +916,10 @@
item.date_added, item.note))
self.hotlist2issue_tbl.InsertRows(
cnxn, cols=HOTLIST2ISSUE_COLS, row_values=insert_rows, commit=False)
- all_hotlist_items = filter(
- lambda item: item.issue_id not in updated_issue_ids,
- all_hotlist_items)
+ all_hotlist_items = list(
+ filter(
+ lambda item: item.issue_id not in updated_issue_ids,
+ all_hotlist_items))
all_hotlist_items.extend(updated_items)
if commit:
@@ -1270,8 +1271,9 @@
self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids)
self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.editor_ids)
if not hotlist.owner_ids: # Should never happen.
- logging.warn('Soft-deleting unowned Hotlist: id:%r, name:%r',
- hotlist_id, hotlist.name)
+ logging.warning(
+ 'Soft-deleting unowned Hotlist: id:%r, name:%r', hotlist_id,
+ hotlist.name)
elif hotlist.name:
self.hotlist_id_2lc.InvalidateKeys(
cnxn, [(hotlist.name.lower(), owner_id) for
diff --git a/services/fulltext_helpers.py b/services/fulltext_helpers.py
index 80d4264..2da6d68 100644
--- a/services/fulltext_helpers.py
+++ b/services/fulltext_helpers.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of helpers functions for fulltext search."""
@@ -14,8 +13,8 @@
from google.appengine.api import search
import settings
-from proto import ast_pb2
-from proto import tracker_pb2
+from mrproto import ast_pb2
+from mrproto import tracker_pb2
from search import query2ast
# GAE search API can only respond with 500 results per call.
@@ -103,7 +102,7 @@
limit=_SEARCH_RESULT_CHUNK_SIZE, returned_fields=[], ids_only=True,
cursor=search.Cursor())))
except ValueError as e:
- raise query2ast.InvalidQueryError(e.message)
+ raise query2ast.InvalidQueryError(str(e))
logging.info('got %d initial results', len(response.results))
ids = [int(result.doc_id) for result in response]
diff --git a/services/issue_svc.py b/services/issue_svc.py
index 8e5a45f..ad50f81 100644
--- a/services/issue_svc.py
+++ b/services/issue_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for Monorail issue tracking.
@@ -37,8 +36,8 @@
from framework import permissions
from framework import sql
from infra_libs import ts_mon
-from proto import project_pb2
-from proto import tracker_pb2
+from mrproto import project_pb2
+from mrproto import tracker_pb2
from services import caches
from services import tracker_fulltext
from tracker import tracker_bizobj
@@ -78,10 +77,10 @@
ISSUE_COLS = [
'id', 'project_id', 'local_id', 'status_id', 'owner_id', 'reporter_id',
- 'opened', 'closed', 'modified',
- 'owner_modified', 'status_modified', 'component_modified',
- 'derived_owner_id', 'derived_status_id',
- 'deleted', 'star_count', 'attachment_count', 'is_spam']
+ 'opened', 'closed', 'modified', 'owner_modified', 'status_modified',
+ 'component_modified', 'migration_modified', 'derived_owner_id',
+ 'derived_status_id', 'deleted', 'star_count', 'attachment_count', 'is_spam'
+]
ISSUESUMMARY_COLS = ['issue_id', 'summary']
ISSUE2LABEL_COLS = ['issue_id', 'label_id', 'derived']
ISSUE2COMPONENT_COLS = ['issue_id', 'component_id', 'derived']
@@ -111,7 +110,9 @@
'ext_issue_identifier', 'kind']
ISSUEUPDATE_COLS = [
'id', 'issue_id', 'comment_id', 'field', 'old_value', 'new_value',
- 'added_user_id', 'removed_user_id', 'custom_field_name']
+ 'added_user_id', 'removed_user_id', 'custom_field_name',
+ 'added_component_id', 'removed_component_id'
+]
ISSUEFORMERLOCATIONS_COLS = ['issue_id', 'project_id', 'local_id']
REINDEXQUEUE_COLS = ['issue_id', 'created']
ISSUESNAPSHOT_COLS = ['id', 'issue_id', 'shard', 'project_id', 'local_id',
@@ -188,10 +189,12 @@
def _UnpackIssue(self, cnxn, issue_row):
"""Partially construct an issue object using info from a DB row."""
- (issue_id, project_id, local_id, status_id, owner_id, reporter_id,
- opened, closed, modified, owner_modified, status_modified,
- component_modified, derived_owner_id, derived_status_id,
- deleted, star_count, attachment_count, is_spam) = issue_row
+ (
+ issue_id, project_id, local_id, status_id, owner_id, reporter_id,
+ opened, closed, modified, owner_modified, status_modified,
+ component_modified, migration_modified, derived_owner_id,
+ derived_status_id, deleted, star_count, attachment_count,
+ is_spam) = issue_row
issue = tracker_pb2.Issue()
project = self.project_service.GetProject(cnxn, project_id)
@@ -222,6 +225,8 @@
issue.status_modified_timestamp = status_modified
if component_modified:
issue.component_modified_timestamp = component_modified
+ if migration_modified:
+ issue.migration_modified_timestamp = migration_modified
issue.star_count = star_count
issue.attachment_count = attachment_count
issue.is_spam = bool(is_spam)
@@ -361,7 +366,7 @@
elif kind == 'mergedinto':
src_issue.merged_into_external = ext_id
else:
- logging.warn('unhandled danging relation kind %r', kind)
+ logging.warning('unhandled danging relation kind %r', kind)
continue
return results_dict
@@ -1035,21 +1040,15 @@
"""
status_id = self._config_service.LookupStatusID(
cnxn, issue.project_id, issue.status)
- row = (issue.project_id, issue.local_id, status_id,
- issue.owner_id or None,
- issue.reporter_id,
- issue.opened_timestamp,
- issue.closed_timestamp,
- issue.modified_timestamp,
- issue.owner_modified_timestamp,
- issue.status_modified_timestamp,
- issue.component_modified_timestamp,
- issue.derived_owner_id or None,
- self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.derived_status),
- bool(issue.deleted),
- issue.star_count, issue.attachment_count,
- issue.is_spam)
+ row = (
+ issue.project_id, issue.local_id, status_id, issue.owner_id or
+ None, issue.reporter_id, issue.opened_timestamp, issue.closed_timestamp,
+ issue.modified_timestamp, issue.owner_modified_timestamp,
+ issue.status_modified_timestamp, issue.component_modified_timestamp,
+ issue.migration_modified_timestamp, issue.derived_owner_id or None,
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status), bool(issue.deleted),
+ issue.star_count, issue.attachment_count, issue.is_spam)
# ISSUE_COLs[1:] to skip setting the ID
# Insert into the Primary DB.
generated_ids = self.issue_tbl.InsertRows(
@@ -1095,25 +1094,43 @@
assert not issue.assume_stale, (
'issue2514: Storing issue that might be stale: %r' % issue)
delta = {
- 'project_id': issue.project_id,
- 'local_id': issue.local_id,
- 'owner_id': issue.owner_id or None,
- 'status_id': self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.status) or None,
- 'opened': issue.opened_timestamp,
- 'closed': issue.closed_timestamp,
- 'modified': issue.modified_timestamp,
- 'owner_modified': issue.owner_modified_timestamp,
- 'status_modified': issue.status_modified_timestamp,
- 'component_modified': issue.component_modified_timestamp,
- 'derived_owner_id': issue.derived_owner_id or None,
- 'derived_status_id': self._config_service.LookupStatusID(
- cnxn, issue.project_id, issue.derived_status) or None,
- 'deleted': bool(issue.deleted),
- 'star_count': issue.star_count,
- 'attachment_count': issue.attachment_count,
- 'is_spam': issue.is_spam,
- }
+ 'project_id':
+ issue.project_id,
+ 'local_id':
+ issue.local_id,
+ 'owner_id':
+ issue.owner_id or None,
+ 'status_id':
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.status) or None,
+ 'opened':
+ issue.opened_timestamp,
+ 'closed':
+ issue.closed_timestamp,
+ 'modified':
+ issue.modified_timestamp,
+ 'owner_modified':
+ issue.owner_modified_timestamp,
+ 'status_modified':
+ issue.status_modified_timestamp,
+ 'component_modified':
+ issue.component_modified_timestamp,
+ 'migration_modified':
+ issue.migration_modified_timestamp,
+ 'derived_owner_id':
+ issue.derived_owner_id or None,
+ 'derived_status_id':
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status) or None,
+ 'deleted':
+ bool(issue.deleted),
+ 'star_count':
+ issue.star_count,
+ 'attachment_count':
+ issue.attachment_count,
+ 'is_spam':
+ issue.is_spam,
+ }
if update_cols is not None:
delta = {key: val for key, val in delta.items()
if key in update_cols}
@@ -1514,6 +1531,7 @@
# update the modified_timestamp for any comment added, even if it was
# just a text comment with no issue fields changed.
issue.modified_timestamp = timestamp
+ issue.migration_modified_timestamp = timestamp
# Update the closed timestamp before filter rules so that rules
# can test for closed_timestamp, and also after filter rules
@@ -1791,7 +1809,8 @@
"""
issue = self.GetIssueByLocalID(cnxn, project_id, local_id, use_cache=False)
issue.deleted = deleted
- self.UpdateIssue(cnxn, issue, update_cols=['deleted'])
+ issue.migration_modified_timestamp = int(time.time())
+ self.UpdateIssue(cnxn, issue, update_cols=['deleted', 'migration_modified'])
tracker_fulltext.IndexIssues(
cnxn, [issue], user_service, self, self._config_service)
@@ -1910,9 +1929,10 @@
def _UnpackAmendment(self, amendment_row):
"""Construct an Amendment PB from a DB row."""
- (_id, _issue_id, comment_id, field_name,
- old_value, new_value, added_user_id, removed_user_id,
- custom_field_name) = amendment_row
+ (
+ _id, _issue_id, comment_id, field_name, old_value, new_value,
+ added_user_id, removed_user_id, custom_field_name, added_component_id,
+ removed_component_id) = amendment_row
amendment = tracker_pb2.Amendment()
field_enum = tracker_pb2.FieldID(field_name.upper())
amendment.field = field_enum
@@ -1928,6 +1948,12 @@
amendment.removed_user_ids.append(removed_user_id)
if custom_field_name:
amendment.custom_field_name = custom_field_name
+ if added_component_id:
+ added_component_id = int(added_component_id)
+ amendment.added_component_ids.append(added_component_id)
+ if removed_component_id:
+ removed_component_id = int(removed_component_id)
+ amendment.removed_component_ids.append(removed_component_id)
return amendment, comment_id
def _ConsolidateAmendments(self, amendments):
@@ -1962,6 +1988,12 @@
new_amendment.removed_user_ids.extend(amendment.removed_user_ids)
if amendment.custom_field_name:
new_amendment.custom_field_name = amendment.custom_field_name
+ if amendment.added_component_ids:
+ new_amendment.added_component_ids.extend(
+ amendment.added_component_ids)
+ if amendment.removed_component_ids:
+ new_amendment.removed_component_ids.extend(
+ amendment.removed_component_ids)
result.append(new_amendment)
return result
@@ -2164,18 +2196,31 @@
field_enum = str(amendment.field).lower()
if (amendment.get_assigned_value('newvalue') is not None and
not amendment.added_user_ids and not amendment.removed_user_ids):
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum,
- amendment.oldvalue, amendment.newvalue,
- None, None, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, amendment.oldvalue,
+ amendment.newvalue, None, None, amendment.custom_field_name,
+ None, None))
for added_user_id in amendment.added_user_ids:
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum, None, None,
- added_user_id, None, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None,
+ added_user_id, None, amendment.custom_field_name, None, None))
for removed_user_id in amendment.removed_user_ids:
- amendment_rows.append((
- comment.issue_id, comment_id, field_enum, None, None,
- None, removed_user_id, amendment.custom_field_name))
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ removed_user_id, amendment.custom_field_name, None, None))
+ for added_component_id in amendment.added_component_ids:
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ None, amendment.custom_field_name, added_component_id, None))
+ for removed_component_id in amendment.removed_component_ids:
+ amendment_rows.append(
+ (
+ comment.issue_id, comment_id, field_enum, None, None, None,
+ None, amendment.custom_field_name, None, removed_component_id))
# ISSUEUPDATE_COLS[1:] to skip id column.
self.issueupdate_tbl.InsertRows(
cnxn, ISSUEUPDATE_COLS[1:], amendment_rows, commit=False)
@@ -2369,16 +2414,19 @@
if not issue_comment.deleted_by:
issue_comment.deleted_by = deleted_by_user_id
issue.attachment_count = issue.attachment_count - attachments
+ issue.migration_modified_timestamp = int(time.time())
# Undelete only if it's in deleted state
elif issue_comment.deleted_by:
issue_comment.deleted_by = 0
issue.attachment_count = issue.attachment_count + attachments
+ issue.migration_modified_timestamp = int(time.time())
issue_comment.is_spam = is_spam
self._UpdateComment(
cnxn, issue_comment, update_cols=['deleted_by', 'is_spam'])
- self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+ self.UpdateIssue(
+ cnxn, issue, update_cols=['attachment_count', 'migration_modified'])
# Reindex the issue to take the comment deletion/undeletion into account.
if reindex:
@@ -2576,10 +2624,12 @@
if delete:
if not attachment.deleted:
issue.attachment_count = issue.attachment_count - 1
+ issue.migration_modified_timestamp = int(time.time())
# Increment attachment count only if it's in deleted state
elif attachment.deleted:
issue.attachment_count = issue.attachment_count + 1
+ issue.migration_modified_timestamp = int(time.time())
logging.info('attachment.deleted was %s', attachment.deleted)
@@ -2589,7 +2639,8 @@
self._UpdateAttachment(
cnxn, issue_comment, attachment, update_cols=['deleted'])
- self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+ self.UpdateIssue(
+ cnxn, issue, update_cols=['attachment_count', 'migration_modified'])
if index_now:
tracker_fulltext.IndexIssues(
@@ -2782,9 +2833,11 @@
user_ids = list(user_ids_by_email.values())
user_emails = list(user_ids_by_email.keys())
# Track issue_ids for issues that will have different search documents
- # as a result of removing users.
+ # and need updates to modification time as a result of removing users.
affected_issue_ids = []
+ timestamp = int(time.time())
+
# Reassign commenter_id and delete inbound_messages.
shard_id = sql.RandomShardID()
comment_content_id_rows = self.comment_tbl.Select(
@@ -2868,6 +2921,18 @@
# User rows can be deleted safely. No limit will be applied.
# Remove users in issue updates.
+ user_added_id_rows = self.issueupdate_tbl.Select(
+ cnxn,
+ cols=['IssueUpdate.issue_id'],
+ added_user_id=user_ids,
+ shard_id=shard_id,
+ limit=limit)
+ user_removed_id_rows = self.issueupdate_tbl.Select(
+ cnxn,
+ cols=['IssueUpdate.issue_id'],
+ removed_user_id=user_ids,
+ shard_id=shard_id,
+ limit=limit)
self.issueupdate_tbl.Update(
cnxn,
{'added_user_id': framework_constants.DELETED_USER_ID},
@@ -2878,6 +2943,8 @@
{'removed_user_id': framework_constants.DELETED_USER_ID},
removed_user_id=user_ids,
commit=commit)
+ affected_issue_ids.extend([row[0] for row in user_added_id_rows])
+ affected_issue_ids.extend([row[0] for row in user_removed_id_rows])
# Remove users in issue notify.
self.issue2notify_tbl.Delete(
@@ -2897,4 +2964,12 @@
self.issuesnapshot2cc_tbl.Delete(
cnxn, cc_id=user_ids, commit=commit, limit=limit)
- return list(set(affected_issue_ids))
+ # Update migration_modified timestamp for affected issues.
+ deduped_issue_ids = list(set(affected_issue_ids))
+ if deduped_issue_ids:
+ self.issue_tbl.Update(
+ cnxn, {'migration_modified': timestamp},
+ id=deduped_issue_ids,
+ commit=commit)
+
+ return deduped_issue_ids
diff --git a/services/ml_helpers.py b/services/ml_helpers.py
index d05a582..6db23d4 100644
--- a/services/ml_helpers.py
+++ b/services/ml_helpers.py
@@ -1,7 +1,6 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""
Helper functions for spam and component classification. These are mostly for
@@ -16,7 +15,6 @@
import csv
import hashlib
-import httplib2
import logging
import re
import sys
@@ -31,7 +29,7 @@
SPAM_COLUMNS = ['verdict', 'subject', 'content', 'email']
LEGACY_CSV_COLUMNS = ['verdict', 'subject', 'content']
-DELIMITERS = ['\s', '\,', '\.', '\?', '!', '\:', '\(', '\)']
+DELIMITERS = [r'\s', r'\,', r'\.', r'\?', '!', r'\:', r'\(', r'\)']
# Must be identical to settings.spam_feature_hashes.
SPAM_FEATURE_HASHES = 500
@@ -175,7 +173,7 @@
"""Sets up an instance of ml engine for ml classes."""
try:
credentials = GoogleCredentials.get_application_default()
- ml_engine = build('ml', 'v1', http=httplib2.Http(), credentials=credentials)
+ ml_engine = build('ml', 'v1', credentials=credentials)
return ml_engine
except (Oauth2ClientError, ApiClientError):
diff --git a/services/project_svc.py b/services/project_svc.py
index e92f6a9..00ad219 100644
--- a/services/project_svc.py
+++ b/services/project_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for projects.
@@ -27,7 +26,7 @@
from framework import sql
from services import caches
from project import project_helpers
-from proto import project_pb2
+from mrproto import project_pb2
PROJECT_TABLE_NAME = 'Project'
@@ -321,9 +320,9 @@
return projects_dict
- def GetVisibleLiveProjects(
+ def GetVisibleProjects(
self, cnxn, logged_in_user, effective_ids, domain=None, use_cache=True):
- """Return all user visible live project ids.
+ """Return all user visible project ids.
Args:
cnxn: connection to SQL database.
@@ -334,7 +333,7 @@
buffers.
Returns:
- A list of project ids of user visible live projects sorted by the names
+ A list of project ids of user visible projects sorted by the names
of the projects. If host was provided, only projects with that host
as their branded domain will be returned.
"""
@@ -599,7 +598,7 @@
elif role_name == 'contributor':
contrib_project_ids.add(project_id)
else:
- logging.warn('Unexpected role name %r', role_name)
+ logging.warning('Unexpected role name %r', role_name)
return owned_project_ids, membered_project_ids, contrib_project_ids
diff --git a/services/secrets_svc.py b/services/secrets_svc.py
index 7b861ce..dec80da 100644
--- a/services/secrets_svc.py
+++ b/services/secrets_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for secret keys.
@@ -27,12 +26,11 @@
from __future__ import division
from __future__ import absolute_import
-import logging
+import six
from google.appengine.api import memcache
from google.appengine.ext import ndb
-import settings
from framework import framework_helpers
@@ -73,15 +71,14 @@
def GetXSRFKey():
"""Return a secret key string used to generate XSRF tokens."""
- return GetSecrets().xsrf_key
+ return six.ensure_binary(GetSecrets().xsrf_key)
def GetEmailKey():
"""Return a secret key string used to generate email tokens."""
- return GetSecrets().email_key
+ return six.ensure_binary(GetSecrets().email_key)
def GetPaginationKey():
"""Return a secret key string used to generate pagination tokens."""
- return GetSecrets().pagination_key
-
+ return six.ensure_binary(GetSecrets().pagination_key)
diff --git a/services/service_manager.py b/services/service_manager.py
index 1cb886a..2458105 100644
--- a/services/service_manager.py
+++ b/services/service_manager.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Service manager to initialize all services."""
from __future__ import print_function
diff --git a/services/spam_svc.py b/services/spam_svc.py
index e916830..02ec7d8 100644
--- a/services/spam_svc.py
+++ b/services/spam_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
""" Set of functions for detaling with spam reports.
"""
@@ -12,6 +11,7 @@
import collections
import logging
import settings
+import time
from collections import defaultdict
from framework import sql
@@ -173,7 +173,10 @@
def FlagIssues(self, cnxn, issue_service, issues, reporting_user_id,
flagged_spam):
- """Creates or deletes a spam report on an issue."""
+ """Creates or deletes a spam report on an issue.
+
+ This function is run when a user flags an issue as spam but does not
+ have 'VerdictSpam' permission."""
verdict_updates = []
if flagged_spam:
rows = [(issue.issue_id, issue.reporter_id, reporting_user_id)
@@ -215,9 +218,11 @@
self.verdict_tbl.InsertRows(cnxn, THRESHVERDICT_ISSUE_COLS, rows,
ignore=True)
update_issues = []
+ current_time = int(time.time())
for issue in issues:
if issue.issue_id in verdict_updates:
issue.is_spam = flagged_spam
+ issue.migration_modified_timestamp = current_time
update_issues.append(issue)
if flagged_spam:
@@ -230,7 +235,8 @@
'issue': issue_ref
})
- issue_service.UpdateIssues(cnxn, update_issues, update_cols=['is_spam'])
+ issue_service.UpdateIssues(
+ cnxn, update_issues, update_cols=['is_spam', 'migration_modified'])
def FlagComment(
self, cnxn, issue, comment_id, reported_user_id, reporting_user_id,
@@ -262,10 +268,19 @@
def RecordClassifierIssueVerdict(self, cnxn, issue, is_spam, confidence,
fail_open):
+ """Records a judgment call on whether a new issue is spam.
+
+ Only run when an issue is newly filed. If the issue is determined to be
+ likely spam, the code increments a counter."""
reason = REASON_FAIL_OPEN if fail_open else REASON_CLASSIFIER
- self.verdict_tbl.InsertRow(cnxn, issue_id=issue.issue_id, is_spam=is_spam,
- reason=reason, classifier_confidence=confidence,
- project_id=issue.project_id)
+ self.verdict_tbl.InsertRow(
+ cnxn,
+ issue_id=issue.issue_id,
+ is_spam=is_spam,
+ reason=reason,
+ classifier_confidence=confidence,
+ project_id=issue.project_id,
+ overruled=False)
if is_spam:
issue_ref = '%s:%s' % (issue.project_name, issue.local_id)
self.issue_actions.increment(
@@ -278,6 +293,9 @@
def RecordManualIssueVerdicts(self, cnxn, issue_service, issues, user_id,
is_spam):
+ """Bypasses the classifier to manually classify an issue as spam.
+
+ This code can only be run by users with the 'VerdictSpam' permission."""
rows = [(user_id, issue.issue_id, is_spam, REASON_MANUAL, issue.project_id)
for issue in issues]
issue_ids = [issue.issue_id for issue in issues]
@@ -290,8 +308,10 @@
self.verdict_tbl.InsertRows(cnxn, MANUALVERDICT_ISSUE_COLS, rows,
ignore=True)
+ current_time = int(time.time())
for issue in issues:
issue.is_spam = is_spam
+ issue.migration_modified_timestamp = current_time
if is_spam:
for issue in issues:
@@ -306,10 +326,14 @@
issue_service.AllocateNewLocalIDs(cnxn, issues)
# This will commit the transaction.
- issue_service.UpdateIssues(cnxn, issues, update_cols=['is_spam'])
+ issue_service.UpdateIssues(
+ cnxn, issues, update_cols=['is_spam', 'migration_modified'])
def RecordManualCommentVerdict(self, cnxn, issue_service, user_service,
comment_id, user_id, is_spam):
+ """Bypasses the classifier to manually classify a comment as spam.
+
+ This code can only be run by users with the 'VerdictSpam' permission."""
# TODO(seanmccullough): Bulk comment verdicts? There's no UI for that.
self.verdict_tbl.InsertRow(cnxn, ignore=True,
user_id=user_id, comment_id=comment_id, is_spam=is_spam,
diff --git a/services/star_svc.py b/services/star_svc.py
index bb92e73..4ef045e 100644
--- a/services/star_svc.py
+++ b/services/star_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for stars.
@@ -13,7 +12,7 @@
import logging
-import settings
+import time
from features import filterrules_helpers
from framework import sql
from services import caches
@@ -55,7 +54,14 @@
self.star_count_cache = caches.RamCache(cache_manager, cache_kind)
def ExpungeStars(self, cnxn, item_id, commit=True, limit=None):
- """Wipes an item's stars from the system."""
+ """Wipes an item's stars from the system.
+
+ Args:
+ cnxn: connection to SQL database.
+ item_id: ID of the item that's starred. ie: an issue, project, etc
+ commit: whether to commit the change.
+ limit: max stars to delete for performance reasons.
+ """
self.tbl.Delete(
cnxn, commit=commit, limit=limit, **{self.item_col: item_id})
@@ -159,7 +165,6 @@
self._SetStarsBatch(cnxn, item_id, [starrer_user_id], starred)
-
class UserStarService(AbstractStarService):
"""Star service for stars on users."""
@@ -195,6 +200,46 @@
super(IssueStarService, self).__init__(
cache_manager, tbl, 'issue_id', 'user_id', 'issue')
+ # HACK. Usually Monorail SQL table references should stay in their
+ # respective service layer class. But for performance reasons, it's better
+ # for us to directly query the Issue table here.
+ self.issue_tbl = sql.SQLTableManager('Issue')
+
+ def ExpungeStarsByUsers(self, cnxn, user_ids, limit=None):
+ """Wipes a user's stars from the system.
+
+ Ensure that issue metadata is updated on expunging.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to persistence layer.
+ user_ids: users to delete stars for.
+ limit: max stars to delete for performance reasons.
+ """
+ # TODO(zhangtiff): update star_count for updated issues. This is tricky
+ # because star_count needs to be recomputd for each issue, so this likely
+ # requires a task queue.
+
+ timestamp = int(time.time())
+
+ shard_id = sql.RandomShardID()
+ issue_id_rows = self.tbl.Select(
+ cnxn,
+ cols=['IssueStar.issue_id'],
+ user_id=user_ids,
+ shard_id=shard_id,
+ limit=limit)
+
+ super(IssueStarService, self).ExpungeStarsByUsers(
+ cnxn, user_ids, limit=limit)
+ issue_ids = [row[0] for row in issue_id_rows]
+ if issue_ids:
+ self.issue_tbl.Update(
+ cnxn, {'migration_modified': timestamp},
+ id=issue_ids,
+ commit=False,
+ limit=limit)
+
# pylint: disable=arguments-differ
def SetStar(
self, cnxn, services, config, issue_id, starrer_user_id, starred):
@@ -232,6 +277,7 @@
# Because we will modify issues, load from DB rather than cache.
issue = services.issue.GetIssue(cnxn, issue_id, use_cache=False)
issue.star_count = self.CountItemStars(cnxn, issue_id)
+ issue.migration_modified_timestamp = int(time.time())
filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
# Note: only star_count could change due to the starring, but any
# field could have changed as a result of filter rules.
diff --git a/services/template_svc.py b/services/template_svc.py
index edfde05..66159db 100644
--- a/services/template_svc.py
+++ b/services/template_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""The TemplateService class providing methods for template persistence."""
from __future__ import print_function
@@ -15,7 +14,7 @@
from framework import exceptions
from framework import sql
-from proto import tracker_pb2
+from mrproto import tracker_pb2
from services import caches
from services import project_svc
from tracker import tracker_bizobj
diff --git a/services/test/api_pb2_v1_helpers_test.py b/services/test/api_pb2_v1_helpers_test.py
index 460f5c3..ac94d57 100644
--- a/services/test/api_pb2_v1_helpers_test.py
+++ b/services/test/api_pb2_v1_helpers_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the API v1 helpers."""
from __future__ import print_function
@@ -17,10 +16,10 @@
from framework import profiler
from services import api_pb2_v1_helpers
from services import service_manager
-from proto import api_pb2_v1
-from proto import project_pb2
-from proto import tracker_pb2
-from proto import usergroup_pb2
+from mrproto import api_pb2_v1
+from mrproto import project_pb2
+from mrproto import tracker_pb2
+from mrproto import usergroup_pb2
from testing import fake
from tracker import tracker_bizobj
@@ -279,7 +278,8 @@
# TODO(jrobbins): set up a lot more fields.
for cls in [api_pb2_v1.IssueWrapper, api_pb2_v1.IssuesGetInsertResponse]:
- result = api_pb2_v1_helpers.convert_issue(cls, issue, mar, self.services)
+ result = api_pb2_v1_helpers.convert_issue(
+ cls, issue, mar, self.services, migrated_id='12345')
self.assertEqual(1, result.id)
self.assertEqual('one', result.title)
self.assertEqual('one', result.summary)
@@ -323,6 +323,7 @@
[api_pb2_v1.Phase(phaseName="JustAPhase", rank=4),
api_pb2_v1.Phase(phaseName="NotAPhase", rank=9)
])
+ self.assertEqual('12345', result.migrated_id)
# TODO(jrobbins): check a lot more fields.
diff --git a/services/test/api_svc_v1_test.py b/services/test/api_svc_v1_test.py
index b7cd9b1..72f7aee 100644
--- a/services/test/api_svc_v1_test.py
+++ b/services/test/api_svc_v1_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the API v1."""
from __future__ import print_function
@@ -9,9 +8,11 @@
from __future__ import absolute_import
import datetime
+from unittest import mock
import endpoints
import logging
from mock import Mock, patch, ANY
+import six
import time
import unittest
import webtest
@@ -27,9 +28,9 @@
from framework import permissions
from framework import profiler
from framework import template_helpers
-from proto import api_pb2_v1
-from proto import project_pb2
-from proto import tracker_pb2
+from mrproto import api_pb2_v1
+from mrproto import project_pb2
+from mrproto import tracker_pb2
from search import frontendsearchpipeline
from services import api_svc_v1
from services import service_manager
@@ -40,6 +41,7 @@
from testing_utils import testing
from tracker import tracker_bizobj
from tracker import tracker_constants
+from redirect import redirect_utils
def MakeFakeServiceManager():
@@ -163,7 +165,7 @@
oauth.get_current_user.side_effect = oauth.Error()
with self.assertRaises(webtest.AppError) as cm:
self.call_api('users_get', self.request)
- self.assertTrue(cm.exception.message.startswith('Bad response: 401'))
+ self.assertTrue(str(cm.exception).startswith('Bad response: 401'))
class MonorailApiTest(testing.EndpointsTestCase):
@@ -198,6 +200,7 @@
lambda x, y, z, u, v, w: ('id', 'email'))
self.mock(tracker_fulltext, 'IndexIssues', lambda x, y, z, u, v: None)
+ self.mock(tracker_fulltext, 'UnindexIssues', lambda _: None)
def SetUpComponents(
self, project_id, component_id, component_name, component_doc='doc',
@@ -303,6 +306,85 @@
self.assertEqual('Field1', resp['fieldValues'][0]['fieldName'])
self.assertEqual('11', resp['fieldValues'][0]['fieldValue'])
+ @mock.patch('businesslogic.work_env.WorkEnv.GetIssueMigratedID')
+ def testIssuesGet_GetIssue_MigratedId(self, mockGetIssueMigratedId):
+ """Get the requested issue."""
+ mockGetIssueMigratedId.return_value = '23456'
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[222], project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+ self.SetUpFieldDefs(1, 12345, 'Field1', tracker_pb2.FieldTypes.INT_TYPE)
+
+ fv = tracker_pb2.FieldValue(field_id=1, int_value=11)
+ issue1 = fake.MakeTestIssue(
+ project_id=12345,
+ local_id=1,
+ owner_id=222,
+ reporter_id=111,
+ status='New',
+ summary='sum',
+ component_ids=[1],
+ field_values=[fv])
+ self.services.issue.TestAddIssue(issue1)
+
+ resp = self.call_api('issues_get', self.request).json_body
+ self.assertEqual(1, resp['id'])
+ self.assertEqual('New', resp['status'])
+ self.assertEqual('open', resp['state'])
+ self.assertFalse(resp['canEdit'])
+ self.assertTrue(resp['canComment'])
+ self.assertEqual('requester@example.com', resp['author']['name'])
+ self.assertEqual('user@example.com', resp['owner']['name'])
+ self.assertEqual('API', resp['components'][0])
+ self.assertEqual('Field1', resp['fieldValues'][0]['fieldName'])
+ self.assertEqual('11', resp['fieldValues'][0]['fieldValue'])
+ self.assertEqual('23456', resp['migrated_id'])
+
+ @patch('framework.cloud_tasks_helpers.create_task')
+ def testIssuesInsert_FreezeLabels(self, _create_task_mock):
+ """Attempts to add new labels are blocked"""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[222], committer_ids=[111], project_id=999)
+ self.SetUpFieldDefs(1, 999, 'Field1', tracker_pb2.FieldTypes.INT_TYPE)
+
+ issue1 = fake.MakeTestIssue(
+ project_id=999,
+ local_id=1,
+ owner_id=222,
+ reporter_id=111,
+ status='New',
+ summary='Test issue')
+ self.services.issue.TestAddIssue(issue1)
+
+ issue_dict = {
+ 'blockedOn': [{
+ 'issueId': 1
+ }],
+ 'cc': [{
+ 'name': 'user@example.com'
+ }, {
+ 'name': ''
+ }, {
+ 'name': ' '
+ }],
+ 'description': 'description',
+ 'labels': ['freeze_new_label', 'label1'],
+ 'owner': {
+ 'name': 'requester@example.com'
+ },
+ 'status': 'New',
+ 'summary': 'Test issue',
+ 'fieldValues': [{
+ 'fieldName': 'Field1',
+ 'fieldValue': '11'
+ }]
+ }
+ self.request.update(issue_dict)
+
+ with self.call_should_fail(400):
+ self.call_api('issues_insert', self.request)
+
def testIssuesInsert_BadRequest(self):
"""The request does not specify summary or status."""
@@ -573,6 +655,36 @@
with self.call_should_fail(403):
self.call_api('issues_comments_insert', self.request)
+ def testIssuesCommentsInsert_ArchivedProject(self):
+ """No permission to comment in an archived project."""
+ self.services.project.TestAddProject(
+ 'test-project',
+ owner_ids=[111],
+ state=project_pb2.ProjectState.ARCHIVED,
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(12345, 1, 'Issue 1', 'New', 2)
+ self.services.issue.TestAddIssue(issue1)
+
+ self.services.project.TestAddProject(
+ 'archived-project', owner_ids=[222], project_id=6789)
+ issue2 = fake.MakeTestIssue(
+ 6789, 2, 'Issue 2', 'New', 222, project_name='archived-project')
+ self.services.issue.TestAddIssue(issue2)
+
+ self.request['updates'] = {
+ 'blockedOn': ['archived-project:2'],
+ 'mergedInto': '',
+ }
+ with self.call_should_fail(403):
+ self.call_api('issues_comments_insert', self.request)
+
+ self.request['updates'] = {
+ 'blockedOn': [],
+ 'mergedInto': 'archived-project:2',
+ }
+ with self.call_should_fail(403):
+ self.call_api('issues_comments_insert', self.request)
+
def testIssuesCommentsInsert_CommentPermissionOnly(self):
"""User has permission to comment, even though they cannot edit."""
self.services.project.TestAddProject(
@@ -600,6 +712,28 @@
with self.call_should_fail(400):
self.call_api('issues_comments_insert', self.request)
+ def testIssuesCommentsInsert_FreezeLabels(self):
+ """Attempts to add new labels are blocked"""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[111], project_id=999)
+
+ issue1 = fake.MakeTestIssue(
+ 999, 1, 'Issue 1', 'New', 222, project_name='test-project')
+ self.services.issue.TestAddIssue(issue1)
+
+ self.request['updates'] = {
+ 'summary': 'new summary',
+ 'status': 'Started',
+ 'owner': 'requester@example.com',
+ 'cc': ['user@example.com'],
+ 'labels': ['freeze_new_label', '-remove_label'],
+ 'blockedOn': ['2'],
+ 'blocking': ['3'],
+ }
+
+ with self.call_should_fail(400):
+ self.call_api('issues_comments_insert', self.request)
+
def testIssuesCommentsInsert_Amendments_Normal(self):
"""Insert comments with amendments."""
@@ -703,10 +837,10 @@
self.assertEqual(2, len(issue2_comments)) # description and merge
source_starrers = self.services.issue_star.LookupItemStarrers(
'cnxn', issue1.issue_id)
- self.assertItemsEqual([111, 222, 333], source_starrers)
+ six.assertCountEqual(self, [111, 222, 333], source_starrers)
target_starrers = self.services.issue_star.LookupItemStarrers(
'cnxn', issue2.issue_id)
- self.assertItemsEqual([111, 222, 333, 555], target_starrers)
+ six.assertCountEqual(self, [111, 222, 333, 555], target_starrers)
def testIssuesCommentsInsert_CustomFields(self):
"""Update custom field values."""
@@ -1470,9 +1604,13 @@
with self.call_should_fail(403):
self.call_api('groups_create', self.request)
- def SetUpGroupRequest(self, group_name, who_can_view_members='MEMBERS',
- ext_group_type=None, perms=None,
- requester='requester@example.com'):
+ def SetUpGroupRequest(
+ self,
+ group_name,
+ who_can_view_members='MEMBERS',
+ ext_group_type='CHROME_INFRA_AUTH',
+ perms=None,
+ requester='requester@example.com'):
request = {
'groupName': group_name,
'requester': requester,
@@ -1648,7 +1786,8 @@
cd_dict = {
'componentPath': 'API'}
self.request.update(cd_dict)
- _ = self.call_api('components_delete', self.request).json_body
+ with self.assertWarns(webtest.lint.WSGIWarning):
+ _ = self.call_api('components_delete', self.request)
self.assertEqual(0, len(self.config.component_defs))
def testComponentsUpdate_Invalid(self):
@@ -1704,12 +1843,13 @@
'requester@example.com', 'user@example.com', '', ' ']},
{'field': 'DEPRECATED', 'deprecated': True}]}
self.request.update(cd_dict)
- _ = self.call_api('components_update', self.request).json_body
+ with self.assertWarns(webtest.lint.WSGIWarning):
+ _ = self.call_api('components_update', self.request)
component_def = tracker_bizobj.FindComponentDef(
'API', self.config)
self.assertIsNotNone(component_def)
self.assertEqual('', component_def.docstring)
- self.assertItemsEqual([111, 222], component_def.cc_ids)
+ six.assertCountEqual(self, [111, 222], component_def.cc_ids)
self.assertTrue(component_def.deprecated)
cd_dict = {
@@ -1717,7 +1857,8 @@
'updates': [
{'field': 'LEAF_NAME', 'leafName': 'NewParent'}]}
self.request.update(cd_dict)
- _ = self.call_api('components_update', self.request).json_body
+ with self.assertWarns(webtest.lint.WSGIWarning):
+ _ = self.call_api('components_update', self.request)
cd_parent = tracker_bizobj.FindComponentDef(
'NewParent', self.config)
cd_child = tracker_bizobj.FindComponentDef(
@@ -1838,6 +1979,21 @@
api_svc_v1.api_base_checks(
request, requester, self.services, None, self.auth_client_ids, [])
+ def testNonLiveMigratedProject(self):
+ archived_project = 'archived-migrated-project'
+ redirect_utils.PROJECT_REDIRECT_MAP = {
+ 'archived-migrated-project': 'https://example.dev'
+ }
+ self.services.project.TestAddProject(
+ archived_project,
+ owner_ids=[111],
+ state=project_pb2.ProjectState.ARCHIVED)
+ request = RequestMock()
+ request.projectId = archived_project
+ requester = RequesterMock(email='test@example.com')
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
def testNoViewProjectPermission(self):
nonmember_email = 'nonmember@example.com'
self.services.user.TestAddUser(nonmember_email, 222)
diff --git a/services/test/cachemanager_svc_test.py b/services/test/cachemanager_svc_test.py
index b84d33e..bd66be4 100644
--- a/services/test/cachemanager_svc_test.py
+++ b/services/test/cachemanager_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the cachemanager service."""
from __future__ import print_function
diff --git a/services/test/caches_test.py b/services/test/caches_test.py
index cd401be..23f793c 100644
--- a/services/test/caches_test.py
+++ b/services/test/caches_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the cache classes."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import unittest
from google.appengine.api import memcache
@@ -141,10 +141,10 @@
self.assertEqual(3, len(self.sharded_ram_cache.cache))
-class TestableTwoLevelCache(caches.AbstractTwoLevelCache):
+class _TestableTwoLevelCache(caches.AbstractTwoLevelCache):
def __init__(self, cache_manager, kind, max_size=None):
- super(TestableTwoLevelCache, self).__init__(
+ super(_TestableTwoLevelCache, self).__init__(
cache_manager, kind, 'testable:', None, max_size=max_size)
# pylint: disable=unused-argument
@@ -162,7 +162,7 @@
self.cnxn = 'fake connection'
self.cache_manager = fake.CacheManager()
- self.testable_2lc = TestableTwoLevelCache(self.cache_manager, 'issue')
+ self.testable_2lc = _TestableTwoLevelCache(self.cache_manager, 'issue')
def tearDown(self):
self.testbed.deactivate()
@@ -239,8 +239,9 @@
self.assertEqual({123: 12300, 124: 12400, 333: 333, 444: 444}, hits)
self.assertEqual([], misses)
# The RAM cache now has items found in memcache and DB.
- self.assertItemsEqual(
- [123, 124, 125, 333, 444], list(self.testable_2lc.cache.cache.keys()))
+ six.assertCountEqual(
+ self, [123, 124, 125, 333, 444],
+ list(self.testable_2lc.cache.cache.keys()))
def testGetAll_FetchGetsItFromDB(self):
self.testable_2lc.CacheItem(123, 12300)
diff --git a/services/test/chart_svc_test.py b/services/test/chart_svc_test.py
index 470bc80..8392481 100644
--- a/services/test/chart_svc_test.py
+++ b/services/test/chart_svc_test.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for chart_svc module."""
from __future__ import print_function
@@ -25,8 +24,8 @@
from services import service_manager
from framework import permissions
from framework import sql
-from proto import ast_pb2
-from proto import tracker_pb2
+from mrproto import ast_pb2
+from mrproto import tracker_pb2
from search import ast2select
from search import search_helpers
from testing import fake
diff --git a/services/test/client_config_svc_test.py b/services/test/client_config_svc_test.py
index d8a305e..fbcd2f9 100644
--- a/services/test/client_config_svc_test.py
+++ b/services/test/client_config_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the client config service."""
from __future__ import print_function
@@ -9,6 +8,8 @@
from __future__ import absolute_import
import base64
+import binascii
+import six
import unittest
from services import client_config_svc
@@ -20,33 +21,39 @@
def __init__(self, content):
self.content = content
+ def testProcessResponse_InvalidContent(self):
+ r = self.FakeResponse('')
+ with self.assertRaises(AttributeError):
+ client_config_svc._process_response(r)
+
def testProcessResponse_InvalidJSON(self):
- r = self.FakeResponse('}{')
+ r = self.FakeResponse(b')]}\'}{')
with self.assertRaises(ValueError):
client_config_svc._process_response(r)
def testProcessResponse_NoContent(self):
- r = self.FakeResponse('{"wrong-key": "some-value"}')
+ r = self.FakeResponse(b')]}\'{"wrong-key": "some-value"}')
with self.assertRaises(KeyError):
client_config_svc._process_response(r)
def testProcessResponse_NotB64(self):
# 'asd' is not a valid base64-encoded string.
- r = self.FakeResponse('{"content": "asd"}')
- with self.assertRaises(TypeError):
+ r = self.FakeResponse(b')]}\'{"rawContent": "asd"}')
+ with self.assertRaises(binascii.Error):
client_config_svc._process_response(r)
def testProcessResponse_NotProto(self):
# 'asdf' is a valid base64-encoded string.
- r = self.FakeResponse('{"content": "asdf"}')
- with self.assertRaises(Exception):
+ r = self.FakeResponse(b')]}\'{"rawContent": "asdf"}')
+ with self.assertRaises(UnicodeDecodeError):
client_config_svc._process_response(r)
def testProcessResponse_Success(self):
- with open(client_config_svc.CONFIG_FILE_PATH) as f:
- r = self.FakeResponse('{"content": "%s"}' % base64.b64encode(f.read()))
+ with open(client_config_svc.CONFIG_FILE_PATH, 'rb') as f:
+ r = self.FakeResponse(
+ b')]}\'{"rawContent": "%s"}' % base64.b64encode(f.read()))
c = client_config_svc._process_response(r)
- assert '123456789.apps.googleusercontent.com' in c
+ assert b'123456789.apps.googleusercontent.com' in c
class ClientConfigServiceTest(unittest.TestCase):
diff --git a/services/test/config_svc_test.py b/services/test/config_svc_test.py
index dd2796c..4100d3e 100644
--- a/services/test/config_svc_test.py
+++ b/services/test/config_svc_test.py
@@ -1,17 +1,17 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for config_svc module."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
-import re
-import unittest
import logging
import mock
+import re
+import six
+import unittest
try:
from mox3 import mox
@@ -24,7 +24,7 @@
from framework import exceptions
from framework import framework_constants
from framework import sql
-from proto import tracker_pb2
+from mrproto import tracker_pb2
from services import config_svc
from services import template_svc
from testing import fake
@@ -220,7 +220,7 @@
self.componentdef_rows, self.component2admin_rows,
self.component2cc_rows, self.component2label_rows,
self.approvaldef2approver_rows, self.approvaldef2survey_rows)
- self.assertItemsEqual([789], list(config_dict.keys()))
+ six.assertCountEqual(self, [789], list(config_dict.keys()))
config = config_dict[789]
self.assertEqual(789, config.project_id)
self.assertEqual(['Duplicate'], config.statuses_offer_merge)
@@ -280,7 +280,7 @@
self.mox.ReplayAll()
config_dict = self.config_2lc._FetchConfigs(self.cnxn, keys)
self.mox.VerifyAll()
- self.assertItemsEqual(keys, list(config_dict.keys()))
+ six.assertCountEqual(self, keys, list(config_dict.keys()))
def testFetchItems(self):
keys = [678, 789]
@@ -288,7 +288,7 @@
self.mox.ReplayAll()
config_dict = self.config_2lc.FetchItems(self.cnxn, keys)
self.mox.VerifyAll()
- self.assertItemsEqual(keys, list(config_dict.keys()))
+ six.assertCountEqual(self, keys, list(config_dict.keys()))
class ConfigServiceTest(unittest.TestCase):
@@ -441,6 +441,22 @@
self.cnxn, 789, 'NewLabel', autocreate=False))
self.mox.VerifyAll()
+ def testLookupLabelID_CaseSensitive(self):
+ label_dicts = {101: 'security', 201: 'ux'}, {'security': 101, 'ux': 201}
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn,
+ cols=['id'],
+ project_id=789,
+ where=[('label = %s', ['Security'])],
+ limit=1).AndReturn([])
+ self.mox.ReplayAll()
+ self.assertIsNone(
+ self.config_service.LookupLabelID(
+ self.cnxn, 789, 'Security', autocreate=False, case_sensitive=True))
+ self.mox.VerifyAll()
+
def testLookupLabelIDs_Hit(self):
label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
self.config_service.label_cache.CacheItem(789, label_dicts)
@@ -456,16 +472,16 @@
self.config_service.label_cache.CacheItem(789, label_dicts)
# No mock calls set up because none are needed.
self.mox.ReplayAll()
- self.assertItemsEqual(
- [1],
+ six.assertCountEqual(
+ self, [1],
self.config_service.LookupIDsOfLabelsMatching(
self.cnxn, 789, re.compile('Sec.*')))
- self.assertItemsEqual(
- [1, 2],
+ six.assertCountEqual(
+ self, [1, 2],
self.config_service.LookupIDsOfLabelsMatching(
self.cnxn, 789, re.compile('.*')))
- self.assertItemsEqual(
- [],
+ six.assertCountEqual(
+ self, [],
self.config_service.LookupIDsOfLabelsMatching(
self.cnxn, 789, re.compile('Zzzzz.*')))
self.mox.VerifyAll()
@@ -789,9 +805,7 @@
with self.assertRaises(exceptions.InputException) as cm:
self.config_service._UpdateWellKnownLabels(self.cnxn, config)
self.mox.VerifyAll()
- self.assertEqual(
- 'Defined label "Type-Defect" twice',
- cm.exception.message)
+ self.assertEqual('Defined label "Type-Defect" twice', str(cm.exception))
def testUpdateWellKnownStatuses(self):
config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
@@ -1001,7 +1015,7 @@
comp_ids = self.config_service.FindMatchingComponentIDsAnyProject(
self.cnxn, ['WindowManager', 'NetworkLayer'])
self.mox.VerifyAll()
- self.assertItemsEqual([1, 2, 3], comp_ids)
+ six.assertCountEqual(self, [1, 2, 3], comp_ids)
def testFindMatchingComponentIDsAnyProject_NonRooted(self):
self.SetUpFindMatchingComponentIDsAnyProject(False, [(1,), (2,), (3,)])
@@ -1010,7 +1024,7 @@
comp_ids = self.config_service.FindMatchingComponentIDsAnyProject(
self.cnxn, ['WindowManager', 'NetworkLayer'], exact=False)
self.mox.VerifyAll()
- self.assertItemsEqual([1, 2, 3], comp_ids)
+ six.assertCountEqual(self, [1, 2, 3], comp_ids)
def SetUpCreateComponentDef(self, comp_id):
self.config_service.componentdef_tbl.InsertRow(
diff --git a/services/test/features_svc_test.py b/services/test/features_svc_test.py
index d285152..fcd0546 100644
--- a/services/test/features_svc_test.py
+++ b/services/test/features_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for features_svc module."""
from __future__ import print_function
@@ -13,6 +12,7 @@
from mox3 import mox
except ImportError:
import mox
+import six
import time
import unittest
import mock
@@ -27,8 +27,8 @@
from framework import exceptions
from framework import framework_constants
from framework import sql
-from proto import tracker_pb2
-from proto import features_pb2
+from mrproto import tracker_pb2
+from mrproto import features_pb2
from services import chart_svc
from services import features_svc
from services import star_svc
@@ -82,14 +82,14 @@
hotlist_dict = self.features_service.hotlist_2lc._DeserializeHotlists(
hotlist_rows, issue_rows, role_rows)
- self.assertItemsEqual([123, 234], list(hotlist_dict.keys()))
+ six.assertCountEqual(self, [123, 234], list(hotlist_dict.keys()))
self.assertEqual(123, hotlist_dict[123].hotlist_id)
self.assertEqual('hot1', hotlist_dict[123].name)
- self.assertItemsEqual([111, 444], hotlist_dict[123].owner_ids)
- self.assertItemsEqual([222], hotlist_dict[123].editor_ids)
- self.assertItemsEqual([333], hotlist_dict[123].follower_ids)
+ six.assertCountEqual(self, [111, 444], hotlist_dict[123].owner_ids)
+ six.assertCountEqual(self, [222], hotlist_dict[123].editor_ids)
+ six.assertCountEqual(self, [333], hotlist_dict[123].follower_ids)
self.assertEqual(234, hotlist_dict[234].hotlist_id)
- self.assertItemsEqual([111], hotlist_dict[234].owner_ids)
+ six.assertCountEqual(self, [111], hotlist_dict[234].owner_ids)
class HotlistIDTwoLevelCache(unittest.TestCase):
@@ -138,12 +138,12 @@
# Assertions
self.features_service.hotlist2user_tbl.Select.assert_called_once_with(
- self.cnxn, cols=['hotlist_id', 'user_id'], user_id=[555, 333, 222],
+ self.cnxn, cols=['hotlist_id', 'user_id'], user_id=[222, 333, 555],
role_name='owner')
hotlist_ids = [123, 124, 125, 126, 127]
self.features_service.hotlist_tbl.Select.assert_called_once_with(
self.cnxn, cols=['id', 'name'], id=hotlist_ids, is_deleted=False,
- where=[('LOWER(name) IN (%s,%s)', ['name3', 'name1'])])
+ where=[('LOWER(name) IN (%s,%s)', ['name1', 'name3'])])
self.assertEqual(hit,{
('name1', 111): 121,
@@ -635,7 +635,7 @@
17: [tracker_pb2.FilterRule(
predicate=rows[3][2], add_cc_ids=[111, 222])],
}
- self.assertItemsEqual(rules_dict, expected_dict)
+ six.assertCountEqual(self, rules_dict, expected_dict)
self.features_service.filterrule_tbl.Select.assert_called_once_with(
self.cnxn, features_svc.FILTERRULE_COLS)
@@ -667,7 +667,7 @@
emails = {'cow@fart.test': 222}
rules_dict = self.features_service.ExpungeFilterRulesByUser(
self.cnxn, emails)
- self.assertItemsEqual(rules_dict, {})
+ six.assertCountEqual(self, rules_dict, {})
self.features_service.filterrule_tbl.Select.assert_called_once_with(
self.cnxn, features_svc.FILTERRULE_COLS)
@@ -773,7 +773,7 @@
self.cnxn, ['q3-todo', 'Q4-TODO'], [222, 333, 444])
self.assertEqual(ret, {('q3-todo', 222) : 123, ('q4-todo', 333): 124})
self.features_service.hotlist2user_tbl.Select.assert_called_once_with(
- self.cnxn, cols=['hotlist_id', 'user_id'], user_id=[444, 333, 222],
+ self.cnxn, cols=['hotlist_id', 'user_id'], user_id=[222, 333, 444],
role_name='owner')
self.features_service.hotlist_tbl.Select.assert_called_once_with(
self.cnxn, cols=['id', 'name'], id=[123, 125], is_deleted=False,
@@ -965,7 +965,7 @@
hotlist_dict = self.features_service.GetHotlists(
self.cnxn, [123, 456])
self.mox.VerifyAll()
- self.assertItemsEqual([123, 456], list(hotlist_dict.keys()))
+ six.assertCountEqual(self, [123, 456], list(hotlist_dict.keys()))
self.assertEqual('hotlist1', hotlist_dict[123].name)
self.assertEqual('hotlist2', hotlist_dict[456].name)
@@ -1306,7 +1306,7 @@
self.features_service.GetProjectIDsFromHotlist = mock.Mock(
return_value=[hotlists_project_id])
- hotlist_ids = hotlists_by_id.keys()
+ hotlist_ids = list(hotlists_by_id.keys())
commit = True # commit in ExpungeHotlists should be True by default.
self.features_service.ExpungeHotlists(
self.cnxn, hotlist_ids, star_service, user_service, chart_service)
diff --git a/services/test/fulltext_helpers_test.py b/services/test/fulltext_helpers_test.py
index fbff1b8..42febf4 100644
--- a/services/test/fulltext_helpers_test.py
+++ b/services/test/fulltext_helpers_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the fulltext_helpers module."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import unittest
try:
@@ -17,8 +17,8 @@
from google.appengine.api import search
-from proto import ast_pb2
-from proto import tracker_pb2
+from mrproto import ast_pb2
+from mrproto import tracker_pb2
from search import query2ast
from services import fulltext_helpers
@@ -247,4 +247,4 @@
project_ids = fulltext_helpers.ComprehensiveSearch(
'browser', 'search index name')
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234, 345], project_ids)
+ six.assertCountEqual(self, [123, 234, 345], project_ids)
diff --git a/services/test/issue_svc_test.py b/services/test/issue_svc_test.py
index fe41aa4..f6b6c29 100644
--- a/services/test/issue_svc_test.py
+++ b/services/test/issue_svc_test.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for issue_svc module."""
@@ -11,6 +10,7 @@
from __future__ import absolute_import
import logging
+import six
import time
import unittest
from mock import patch, Mock, ANY
@@ -27,7 +27,7 @@
from framework import exceptions
from framework import framework_constants
from framework import sql
-from proto import tracker_pb2
+from mrproto import tracker_pb2
from services import caches
from services import chart_svc
from services import issue_svc
@@ -63,12 +63,12 @@
return issue_service
-class TestableIssueTwoLevelCache(issue_svc.IssueTwoLevelCache):
+class _TestableIssueTwoLevelCache(issue_svc.IssueTwoLevelCache):
def __init__(self, issue_list):
cache_manager = fake.CacheManager()
- super(TestableIssueTwoLevelCache, self).__init__(
- cache_manager, None, None, None)
+ super(_TestableIssueTwoLevelCache,
+ self).__init__(cache_manager, None, None, None)
self.cache = caches.RamCache(cache_manager, 'issue')
self.memcache_prefix = 'issue:'
self.pb_class = tracker_pb2.Issue
@@ -134,8 +134,8 @@
issue_dict = self.issue_id_2lc.FetchItems(
self.cnxn, project_local_ids_list)
self.mox.VerifyAll()
- self.assertItemsEqual(project_local_ids_list, list(issue_dict.keys()))
- self.assertItemsEqual(issue_ids, list(issue_dict.values()))
+ six.assertCountEqual(self, project_local_ids_list, list(issue_dict.keys()))
+ six.assertCountEqual(self, issue_ids, list(issue_dict.values()))
def testKeyToStr(self):
self.assertEqual('789,1', self.issue_id_2lc._KeyToStr((789, 1)))
@@ -161,9 +161,10 @@
now = int(time.time())
self.project_service.TestAddProject('proj', project_id=789)
self.issue_rows = [
- (78901, 789, 1, 1, 111, 222,
- now, now, now, now, now, now,
- 0, 0, 0, 1, 0, False)]
+ (
+ 78901, 789, 1, 1, 111, 222, now, now, now, now, now, now, now, 0, 0,
+ 0, 1, 0, False)
+ ]
self.summary_rows = [(78901, 'sum')]
self.label_rows = [(78901, 1, 0)]
self.component_rows = []
@@ -224,14 +225,14 @@
self.component_rows, self.cc_rows, self.notify_rows,
self.fieldvalue_rows, self.relation_rows, self.dangling_relation_rows,
self.phase_rows, self.approvalvalue_rows, self.av_approver_rows)
- self.assertItemsEqual([78901], list(issue_dict.keys()))
+ six.assertCountEqual(self, [78901], list(issue_dict.keys()))
issue = issue_dict[78901]
self.assertEqual(len(issue.phases), 2)
self.assertIsNotNone(tracker_bizobj.FindPhaseByID(1, issue.phases))
av_21 = tracker_bizobj.FindApprovalValueByID(
21, issue.approval_values)
self.assertEqual(av_21.phase_id, 1)
- self.assertItemsEqual(av_21.approver_ids, [111, 222, 333])
+ six.assertCountEqual(self, av_21.approver_ids, [111, 222, 333])
self.assertIsNotNone(tracker_bizobj.FindPhaseByID(2, issue.phases))
self.assertEqual(issue.phases,
[tracker_pb2.Phase(rank=1, phase_id=1, name='Canary'),
@@ -356,7 +357,7 @@
self.mox.ReplayAll()
issue_dict = self.issue_2lc.FetchItems(self.cnxn, issue_ids)
self.mox.VerifyAll()
- self.assertItemsEqual(issue_ids, list(issue_dict.keys()))
+ six.assertCountEqual(self, issue_ids, list(issue_dict.keys()))
self.assertEqual(2, len(issue_dict[78901].phases))
def testFetchItemsNoApprovalValues(self):
@@ -365,7 +366,7 @@
self.mox.ReplayAll()
issue_dict = self.issue_2lc.FetchItems(self.cnxn, issue_ids)
self.mox.VerifyAll()
- self.assertItemsEqual(issue_ids, list(issue_dict.keys()))
+ six.assertCountEqual(self, issue_ids, list(issue_dict.keys()))
self.assertEqual([], issue_dict[78901].phases)
@@ -750,7 +751,7 @@
def testGetIssuesDict(self):
issue_ids = [78901, 78902, 78903]
issue_1, issue_2 = self.SetUpGetIssues()
- self.services.issue.issue_2lc = TestableIssueTwoLevelCache(
+ self.services.issue.issue_2lc = _TestableIssueTwoLevelCache(
[issue_1, issue_2])
issues_dict, missed_iids = self.services.issue.GetIssuesDict(
self.cnxn, issue_ids)
@@ -827,10 +828,9 @@
def SetUpInsertIssue(
self, label_rows=None, av_rows=None, approver_rows=None,
dangling_relation_rows=None):
- row = (789, 1, 1, 111, 111,
- self.now, 0, self.now, self.now, self.now, self.now,
- None, 0,
- False, 0, 0, False)
+ row = (
+ 789, 1, 1, 111, 111, self.now, 0, self.now, self.now, self.now,
+ self.now, self.now, None, 0, False, 0, 0, False)
self.services.issue.issue_tbl.InsertRows(
self.cnxn, issue_svc.ISSUE_COLS[1:], [row],
commit=False, return_generated_ids=True).AndReturn([78901])
@@ -852,9 +852,9 @@
commit=False)
def SetUpInsertSpamIssue(self):
- row = (789, 1, 1, 111, 111,
- self.now, 0, self.now, self.now, self.now, self.now,
- None, 0, False, 0, 0, True)
+ row = (
+ 789, 1, 1, 111, 111, self.now, 0, self.now, self.now, self.now,
+ self.now, self.now, None, 0, False, 0, 0, True)
self.services.issue.issue_tbl.InsertRows(
self.cnxn, issue_svc.ISSUE_COLS[1:], [row],
commit=False, return_generated_ids=True).AndReturn([78901])
@@ -972,13 +972,14 @@
'owner_modified': 123456789,
'status_modified': 123456789,
'component_modified': 123456789,
+ 'migration_modified': 123456789,
'derived_owner_id': None,
'derived_status_id': None,
'deleted': False,
'star_count': 12,
'attachment_count': 0,
'is_spam': False,
- }
+ }
self.services.issue.issue_tbl.Update(
self.cnxn, delta, id=78901, commit=False)
if not given_delta:
@@ -1006,9 +1007,15 @@
def testUpdateIssues_Normal(self):
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, owner_id=111, summary='sum',
- status='Live', labels=['Type-Defect'], issue_id=78901,
- opened_timestamp=123456789, modified_timestamp=123456789,
+ project_id=789,
+ local_id=1,
+ owner_id=111,
+ summary='sum',
+ status='Live',
+ labels=['Type-Defect'],
+ issue_id=78901,
+ opened_timestamp=123456789,
+ modified_timestamp=123456789,
star_count=12)
issue.assume_stale = False
self.SetUpUpdateIssues()
@@ -1018,9 +1025,15 @@
def testUpdateIssue_Normal(self):
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, owner_id=111, summary='sum',
- status='Live', labels=['Type-Defect'], issue_id=78901,
- opened_timestamp=123456789, modified_timestamp=123456789,
+ project_id=789,
+ local_id=1,
+ owner_id=111,
+ summary='sum',
+ status='Live',
+ labels=['Type-Defect'],
+ issue_id=78901,
+ opened_timestamp=123456789,
+ modified_timestamp=123456789,
star_count=12)
issue.assume_stale = False
self.SetUpUpdateIssues()
@@ -1030,9 +1043,15 @@
def testUpdateIssue_Stale(self):
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, owner_id=111, summary='sum',
- status='Live', labels=['Type-Defect'], issue_id=78901,
- opened_timestamp=123456789, modified_timestamp=123456789,
+ project_id=789,
+ local_id=1,
+ owner_id=111,
+ summary='sum',
+ status='Live',
+ labels=['Type-Defect'],
+ issue_id=78901,
+ opened_timestamp=123456789,
+ modified_timestamp=123456789,
star_count=12)
# Do not set issue.assume_stale = False
# Do not call self.SetUpUpdateIssues() because nothing should be updated.
@@ -1270,7 +1289,8 @@
7890101, is_description=True, approval_id=7,
content=config.approval_defs[2].survey, commit=False)
amendment_row = (
- 78901, 7890101, 'custom', None, '-Llama Roo', None, None, 'Approvals')
+ 78901, 7890101, 'custom', None, '-Llama Roo', None, None, 'Approvals',
+ None, None)
self.SetUpInsertComment(
7890101, content=comment_content, amendment_rows=[amendment_row],
commit=False)
@@ -1473,8 +1493,10 @@
# Calls in ApplyIssueDelta
# Call to find added blocking issues.
- issue_refs = {blocking_issue: (
- blocking_issue.project_name, blocking_issue.local_id)}
+ issue_refs = {
+ blocking_issue.issue_id:
+ (blocking_issue.project_name, blocking_issue.local_id)
+ }
self.services.issue.LookupIssueRefs(
self.cnxn, [blocking_issue.issue_id]).AndReturn(issue_refs)
# Call to find removed blocking issues.
@@ -1636,10 +1658,10 @@
def testSoftDeleteIssue(self):
project = fake.Project(project_id=789)
issue_1, issue_2 = self.SetUpGetIssues()
- self.services.issue.issue_2lc = TestableIssueTwoLevelCache(
+ self.services.issue.issue_2lc = _TestableIssueTwoLevelCache(
[issue_1, issue_2])
self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
- delta = {'deleted': True}
+ delta = {'deleted': True, 'migration_modified': self.now}
self.services.issue.issue_tbl.Update(
self.cnxn, delta, id=78901, commit=False)
@@ -1842,7 +1864,10 @@
commentcontent_rows = [(7890101, 'content', 'msg'),
(7890102, 'content2', 'msg')]
amendment_rows = [
- (1, 78901, 7890101, 'cc', 'old', 'new val', 222, None, None)]
+ (
+ 1, 78901, 7890101, 'cc', 'old', 'new val', 222, None, None, None,
+ None)
+ ]
attachment_rows = []
approval_rows = [(23, 7890102)]
importer_rows = []
@@ -1869,6 +1894,24 @@
self.assertEqual(2, len(comments))
self.assertEqual(222, comments[0].importer_id)
+ def testUpackAmendment(self):
+ amendment_row = (
+ 1, 78901, 7890101, 'cc', 'old', 'new val', 222, None, None, None, None)
+ amendment, comment_id = self.services.issue._UnpackAmendment(amendment_row)
+ self.assertEqual(comment_id, 7890101)
+ self.assertEqual(amendment.field, tracker_pb2.FieldID('CC'))
+ self.assertEqual(amendment.newvalue, 'new val')
+ self.assertEqual(amendment.oldvalue, 'old')
+ self.assertEqual(amendment.added_user_ids, [222])
+
+ def testUpackAmendment_With_Unicode(self):
+ amendment_row = (
+ 1, 78901, 7890102, 'custom', None, None, None, None, None, u'123', None)
+ amendment, comment_id = self.services.issue._UnpackAmendment(amendment_row)
+ self.assertEqual(comment_id, 7890102)
+ self.assertEqual(amendment.field, tracker_pb2.FieldID('CUSTOM'))
+ self.assertEqual(amendment.added_component_ids, [123])
+
def MockTheRestOfGetCommentsByID(self, comment_ids):
self.services.issue.commentcontent_tbl.Select = Mock(
return_value=[
@@ -2117,6 +2160,32 @@
self.mox.VerifyAll()
self.assertEqual(7890101, comment.id)
+ def testInsertComment_WithIssueUpdate(self):
+ amendment = tracker_bizobj.MakeAmendment(
+ tracker_pb2.FieldID.COMPONENTS, 'aaa', [], [], added_component_ids=[1])
+ amendment_rows = [
+ (
+ 78901, 7890101, 'components', None, 'aaa', None, None, None, None,
+ None),
+ (78901, 7890101, 'components', None, None, None, None, None, 1, None)
+ ]
+ comment = tracker_pb2.IssueComment(
+ issue_id=78901,
+ timestamp=self.now,
+ project_id=789,
+ user_id=111,
+ content='content',
+ amendments=[amendment])
+ self.services.issue.commentcontent_tbl.InsertRow = Mock(
+ return_value=78901010)
+ self.services.issue.comment_tbl.InsertRow = Mock(return_value=7890101)
+ self.services.issue.issueupdate_tbl.InsertRows = Mock()
+
+ self.services.issue.InsertComment(self.cnxn, comment, commit=True)
+
+ self.services.issue.issueupdate_tbl.InsertRows.assert_called_once_with(
+ self.cnxn, issue_svc.ISSUEUPDATE_COLS[1:], amendment_rows, commit=False)
+
def SetUpUpdateComment(self, comment_id, delta=None):
delta = delta or {
'commenter_id': 111,
@@ -2189,7 +2258,7 @@
def testSoftDeleteComment(self):
"""Deleting a comment with an attachment marks it and updates count."""
issue_1, issue_2 = self.SetUpGetIssues()
- self.services.issue.issue_2lc = TestableIssueTwoLevelCache(
+ self.services.issue.issue_2lc = _TestableIssueTwoLevelCache(
[issue_1, issue_2])
issue_1.attachment_count = 1
issue_1.assume_stale = False
@@ -2198,7 +2267,11 @@
self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
self.SetUpUpdateComment(
comment.id, delta={'deleted_by': 222, 'is_spam': False})
- self.SetUpUpdateIssues(given_delta={'attachment_count': 0})
+ self.SetUpUpdateIssues(
+ given_delta={
+ 'attachment_count': 0,
+ 'migration_modified': self.now
+ })
self.SetUpEnqueueIssuesForIndexing([78901])
self.mox.ReplayAll()
self.services.issue.SoftDeleteComment(
@@ -2418,7 +2491,11 @@
comment.attachments.append(attachment)
self.SetUpUpdateAttachment(179901, 1234, {'deleted': True})
- self.SetUpUpdateIssues(given_delta={'attachment_count': 0})
+ self.SetUpUpdateIssues(
+ given_delta={
+ 'attachment_count': 0,
+ 'migration_modified': self.now
+ })
self.SetUpEnqueueIssuesForIndexing([78901])
self.mox.ReplayAll()
@@ -2626,6 +2703,9 @@
self.services.issue.issueapproval2approver_tbl.Delete = Mock()
self.services.issue.issue2approvalvalue_tbl.Update = Mock()
+ issue_update_id_rows = [(78914,), (78915,)]
+ self.services.issue.issueupdate_tbl.Select = Mock(
+ return_value=issue_update_id_rows)
self.services.issue.issueupdate_tbl.Update = Mock()
self.services.issue.issue2notify_tbl.Delete = Mock()
@@ -2652,18 +2732,19 @@
commit = False
limit = 50
- affected_user_ids = self.services.issue.ExpungeUsersInIssues(
+ affected_issue_ids = self.services.issue.ExpungeUsersInIssues(
self.cnxn, user_ids_by_email, limit=limit)
- self.assertItemsEqual(
- affected_user_ids,
- [78901, 78902, 78903, 78904, 78905, 78906, 78907, 78908, 78909,
- 78910, 78911, 78912, 78913])
+ six.assertCountEqual(
+ self, affected_issue_ids, [
+ 78901, 78902, 78903, 78904, 78905, 78906, 78907, 78908, 78909,
+ 78910, 78911, 78912, 78913, 78914, 78915
+ ])
self.services.issue.comment_tbl.Select.assert_called_once()
_cnxn, kwargs = self.services.issue.comment_tbl.Select.call_args
self.assertEqual(
kwargs['cols'], ['Comment.id', 'Comment.issue_id', 'commentcontent_id'])
- self.assertItemsEqual(kwargs['commenter_id'], user_ids)
+ six.assertCountEqual(self, kwargs['commenter_id'], user_ids)
self.assertEqual(kwargs['limit'], limit)
# since user_ids are passed to ExpungeUsersInIssues via a dictionary,
@@ -2723,9 +2804,6 @@
self.cnxn, {'reporter_id': framework_constants.DELETED_USER_ID},
id=[row[0] for row in reporter_issue_id_rows], commit=commit)
- self.assertEqual(
- 3, len(self.services.issue.issue_tbl.Update.call_args_list))
-
# issue updates
self.services.issue.issueupdate_tbl.Update.assert_any_call(
self.cnxn, {'added_user_id': framework_constants.DELETED_USER_ID},
@@ -2736,11 +2814,19 @@
self.assertEqual(
2, len(self.services.issue.issueupdate_tbl.Update.call_args_list))
+ # check updates across all issues
+ self.services.issue.issue_tbl.Update.assert_any_call(
+ self.cnxn, {'migration_modified': self.now},
+ id=affected_issue_ids,
+ commit=commit)
+ self.assertEqual(
+ 4, len(self.services.issue.issue_tbl.Update.call_args_list))
+
# issue notify
call_args_list = self.services.issue.issue2notify_tbl.Delete.call_args_list
self.assertEqual(1, len(call_args_list))
_cnxn, kwargs = call_args_list[0]
- self.assertItemsEqual(kwargs['email'], emails)
+ six.assertCountEqual(self, kwargs['email'], emails)
self.assertEqual(kwargs['commit'], commit)
# issue snapshots
diff --git a/services/test/project_svc_test.py b/services/test/project_svc_test.py
index 48de180..3ada8ea 100644
--- a/services/test/project_svc_test.py
+++ b/services/test/project_svc_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the project_svc module."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import time
import unittest
@@ -21,8 +21,8 @@
from framework import framework_constants
from framework import sql
-from proto import project_pb2
-from proto import user_pb2
+from mrproto import project_pb2
+from mrproto import user_pb2
from services import config_svc
from services import project_svc
from testing import fake
@@ -79,15 +79,15 @@
project_dict = self.project_service.project_2lc._DeserializeProjects(
project_rows, role_rows, extraperm_rows)
- self.assertItemsEqual([123, 234], list(project_dict.keys()))
+ six.assertCountEqual(self, [123, 234], list(project_dict.keys()))
self.assertEqual(123, project_dict[123].project_id)
self.assertEqual('proj1', project_dict[123].project_name)
self.assertEqual(NOW, project_dict[123].recent_activity)
- self.assertItemsEqual([111, 444], project_dict[123].owner_ids)
- self.assertItemsEqual([222], project_dict[123].committer_ids)
- self.assertItemsEqual([333], project_dict[123].contributor_ids)
+ six.assertCountEqual(self, [111, 444], project_dict[123].owner_ids)
+ six.assertCountEqual(self, [222], project_dict[123].committer_ids)
+ six.assertCountEqual(self, [333], project_dict[123].contributor_ids)
self.assertEqual(234, project_dict[234].project_id)
- self.assertItemsEqual([111], project_dict[234].owner_ids)
+ six.assertCountEqual(self, [111], project_dict[234].owner_ids)
self.assertEqual(False, project_dict[123].issue_notify_always_detailed)
self.assertEqual(True, project_dict[234].issue_notify_always_detailed)
@@ -278,7 +278,7 @@
project_dict = self.project_service.GetProjects(
self.cnxn, [123, 234])
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], list(project_dict.keys()))
+ six.assertCountEqual(self, [123, 234], list(project_dict.keys()))
self.assertEqual('proj1', project_dict[123].project_name)
self.assertEqual('proj2', project_dict[234].project_name)
@@ -288,7 +288,7 @@
self.mox.ReplayAll()
project_dict = self.project_service.GetProjects(self.cnxn, [234])
self.mox.VerifyAll()
- self.assertItemsEqual([234], list(project_dict.keys()))
+ six.assertCountEqual(self, [234], list(project_dict.keys()))
self.assertEqual(
[project_pb2.Project.ExtraPerms(
member_id=111, perms=['FooPerm']),
@@ -297,7 +297,7 @@
project_dict[234].extra_perms)
- def testGetVisibleLiveProjects_AnyoneAccessWithUser(self):
+ def testGetVisibleProjects_AnyoneAccessWithUser(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '',
@@ -311,13 +311,13 @@
self.SetUpGetProjects()
self.mox.ReplayAll()
user_a = user_pb2.User(email='a@example.com')
- project_ids = self.project_service.GetVisibleLiveProjects(
+ project_ids = self.project_service.GetVisibleProjects(
self.cnxn, user_a, set([111]))
self.mox.VerifyAll()
- self.assertItemsEqual([234], project_ids)
+ six.assertCountEqual(self, [234], project_ids)
- def testGetVisibleLiveProjects_AnyoneAccessWithAnon(self):
+ def testGetVisibleProjects_AnyoneAccessWithAnon(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '',
@@ -330,13 +330,12 @@
state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
self.SetUpGetProjects()
self.mox.ReplayAll()
- project_ids = self.project_service.GetVisibleLiveProjects(
- self.cnxn, None, None)
+ project_ids = self.project_service.GetVisibleProjects(self.cnxn, None, None)
self.mox.VerifyAll()
- self.assertItemsEqual([234], project_ids)
+ six.assertCountEqual(self, [234], project_ids)
- def testGetVisibleLiveProjects_RestrictedAccessWithMember(self):
+ def testGetVisibleProjects_RestrictedAccessWithMember(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
@@ -352,13 +351,13 @@
state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
self.mox.ReplayAll()
user_a = user_pb2.User(email='a@example.com')
- project_ids = self.project_service.GetVisibleLiveProjects(
+ project_ids = self.project_service.GetVisibleProjects(
self.cnxn, user_a, set([111]))
self.mox.VerifyAll()
- self.assertItemsEqual([234], project_ids)
+ six.assertCountEqual(self, [234], project_ids)
- def testGetVisibleLiveProjects_RestrictedAccessWithNonMember(self):
+ def testGetVisibleProjects_RestrictedAccessWithNonMember(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
@@ -373,13 +372,13 @@
state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
self.mox.ReplayAll()
user_a = user_pb2.User(email='a@example.com')
- project_ids = self.project_service.GetVisibleLiveProjects(
+ project_ids = self.project_service.GetVisibleProjects(
self.cnxn, user_a, set([111]))
self.mox.VerifyAll()
- self.assertItemsEqual([], project_ids)
+ six.assertCountEqual(self, [], project_ids)
- def testGetVisibleLiveProjects_RestrictedAccessWithAnon(self):
+ def testGetVisibleProjects_RestrictedAccessWithAnon(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
@@ -393,13 +392,12 @@
self.cnxn, cols=['project_id'],
state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
self.mox.ReplayAll()
- project_ids = self.project_service.GetVisibleLiveProjects(
- self.cnxn, None, None)
+ project_ids = self.project_service.GetVisibleProjects(self.cnxn, None, None)
self.mox.VerifyAll()
- self.assertItemsEqual([], project_ids)
+ six.assertCountEqual(self, [], project_ids)
- def testGetVisibleLiveProjects_RestrictedAccessWithSiteAdmin(self):
+ def testGetVisibleProjects_RestrictedAccessWithSiteAdmin(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
@@ -415,13 +413,13 @@
self.mox.ReplayAll()
user_a = user_pb2.User(email='a@example.com')
user_a.is_site_admin = True
- project_ids = self.project_service.GetVisibleLiveProjects(
+ project_ids = self.project_service.GetVisibleProjects(
self.cnxn, user_a, set([111]))
self.mox.VerifyAll()
- self.assertItemsEqual([234], project_ids)
+ six.assertCountEqual(self, [234], project_ids)
- def testGetVisibleLiveProjects_ArchivedProject(self):
+ def testGetVisibleProjects_ArchivedProject(self):
project_rows = [
(
234, 'proj2', 'test proj 2', 'test project', 'archived', 'anyone',
@@ -436,11 +434,11 @@
state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
self.mox.ReplayAll()
user_a = user_pb2.User(email='a@example.com')
- project_ids = self.project_service.GetVisibleLiveProjects(
+ project_ids = self.project_service.GetVisibleProjects(
self.cnxn, user_a, set([111]))
self.mox.VerifyAll()
- self.assertItemsEqual([], project_ids)
+ six.assertCountEqual(self, [234], project_ids)
def testGetProjectsByName(self):
self.project_service.project_names_to_ids.CacheItem('proj1', 123)
@@ -451,7 +449,7 @@
project_dict = self.project_service.GetProjectsByName(
self.cnxn, ['proj1', 'proj2'])
self.mox.VerifyAll()
- self.assertItemsEqual(['proj1', 'proj2'], list(project_dict.keys()))
+ six.assertCountEqual(self, ['proj1', 'proj2'], list(project_dict.keys()))
self.assertEqual(123, project_dict['proj1'].project_id)
self.assertEqual(234, project_dict['proj2'].project_id)
@@ -584,18 +582,18 @@
self.cnxn, {111, 888})
owned_project_ids, membered_project_ids, contrib_project_ids = actual
self.mox.VerifyAll()
- self.assertItemsEqual([234], owned_project_ids)
- self.assertItemsEqual([123], membered_project_ids)
- self.assertItemsEqual([], contrib_project_ids)
+ six.assertCountEqual(self, [234], owned_project_ids)
+ six.assertCountEqual(self, [123], membered_project_ids)
+ six.assertCountEqual(self, [], contrib_project_ids)
def testGetUserRolesInAllProjectsWithoutEffectiveIds(self):
self.mox.ReplayAll()
actual = self.project_service.GetUserRolesInAllProjects(self.cnxn, {})
owned_project_ids, membered_project_ids, contrib_project_ids = actual
self.mox.VerifyAll()
- self.assertItemsEqual([], owned_project_ids)
- self.assertItemsEqual([], membered_project_ids)
- self.assertItemsEqual([], contrib_project_ids)
+ six.assertCountEqual(self, [], owned_project_ids)
+ six.assertCountEqual(self, [], membered_project_ids)
+ six.assertCountEqual(self, [], contrib_project_ids)
def SetUpUpdateExtraPerms(self):
self.project_service.extraperm_tbl.Delete(
diff --git a/services/test/service_manager_test.py b/services/test/service_manager_test.py
index 33c8706..e138c28 100644
--- a/services/test/service_manager_test.py
+++ b/services/test/service_manager_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the service_manager module."""
from __future__ import print_function
diff --git a/services/test/spam_svc_test.py b/services/test/spam_svc_test.py
index 351ec62..156269c 100644
--- a/services/test/spam_svc_test.py
+++ b/services/test/spam_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the spam service."""
from __future__ import print_function
@@ -9,6 +8,8 @@
from __future__ import absolute_import
import mock
+import six
+import time
import unittest
try:
@@ -21,8 +22,9 @@
import settings
from framework import sql
from framework import framework_constants
-from proto import user_pb2
-from proto import tracker_pb2
+from infra_libs import ts_mon
+from mrproto import user_pb2
+from mrproto import tracker_pb2
from services import spam_svc
from testing import fake
from mock import Mock
@@ -51,6 +53,9 @@
self.spam_service.report_tbl.Delete = Mock()
self.spam_service.verdict_tbl.Delete = Mock()
+ self.now = int(time.time())
+
+ ts_mon.reset_for_unittest()
def tearDown(self):
self.testbed.deactivate()
@@ -84,7 +89,7 @@
issue_reporters, comment_reporters = (
self.spam_service.LookupIssueFlaggers(self.cnxn, 234))
self.mox.VerifyAll()
- self.assertItemsEqual([111], issue_reporters)
+ six.assertCountEqual(self, [111], issue_reporters)
self.assertEqual({1: [222]}, comment_reporters)
def testFlagIssues_overThresh(self):
@@ -96,7 +101,9 @@
summary='sum',
status='Live',
issue_id=78901,
- project_name='proj')
+ project_name='proj',
+ migration_modified_timestamp=1234567,
+ is_spam=False)
issue.assume_stale = False # We will store this issue.
self.mock_report_tbl.InsertRows(self.cnxn,
@@ -118,6 +125,8 @@
self.cnxn, self.issue_service, [issue], 111, True)
self.mox.VerifyAll()
self.assertIn(issue, self.issue_service.updated_issues)
+ self.assertEqual(issue.migration_modified_timestamp, self.now)
+ self.assertEqual(issue.is_spam, True)
self.assertEqual(
1,
@@ -137,7 +146,9 @@
summary='sum',
status='Live',
issue_id=78901,
- project_name='proj')
+ project_name='proj',
+ migration_modified_timestamp=1234567,
+ is_spam=False)
self.mock_report_tbl.InsertRows(self.cnxn,
['issue_id', 'reported_user_id', 'user_id'],
@@ -157,6 +168,8 @@
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
+ self.assertEqual(issue.migration_modified_timestamp, 1234567)
+ self.assertEqual(issue.is_spam, False)
self.assertIsNone(
self.spam_service.issue_actions.get(
fields={
@@ -167,8 +180,15 @@
def testUnflagIssue_overThresh(self):
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, reporter_id=111, owner_id=456,
- summary='sum', status='Live', issue_id=78901, is_spam=True)
+ project_id=789,
+ local_id=1,
+ reporter_id=111,
+ owner_id=456,
+ summary='sum',
+ status='Live',
+ issue_id=78901,
+ migration_modified_timestamp=1234567,
+ is_spam=True)
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111)
self.mock_report_tbl.Select(self.cnxn,
@@ -185,15 +205,23 @@
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
- self.assertEqual(True, issue.is_spam)
+ self.assertEqual(issue.migration_modified_timestamp, 1234567)
+ self.assertEqual(issue.is_spam, True)
def testUnflagIssue_underThresh(self):
"""A non-member un-flagging an issue as spam should not be able
to overturn the verdict to ham. This is different from previous
behavior. See https://crbug.com/monorail/2232 for details."""
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, reporter_id=111, owner_id=456,
- summary='sum', status='Live', issue_id=78901, is_spam=True)
+ project_id=789,
+ local_id=1,
+ reporter_id=111,
+ owner_id=456,
+ summary='sum',
+ status='Live',
+ issue_id=78901,
+ migration_modified_timestamp=1234567,
+ is_spam=True)
issue.assume_stale = False # We will store this issue.
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111)
@@ -211,12 +239,20 @@
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
- self.assertEqual(True, issue.is_spam)
+ self.assertEqual(issue.migration_modified_timestamp, 1234567)
+ self.assertEqual(issue.is_spam, True)
def testUnflagIssue_underThreshNoManualOverride(self):
issue = fake.MakeTestIssue(
- project_id=789, local_id=1, reporter_id=111, owner_id=456,
- summary='sum', status='Live', issue_id=78901, is_spam=True)
+ project_id=789,
+ local_id=1,
+ reporter_id=111,
+ owner_id=456,
+ summary='sum',
+ status='Live',
+ issue_id=78901,
+ migration_modified_timestamp=1234567,
+ is_spam=True)
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111)
self.mock_report_tbl.Select(self.cnxn,
@@ -234,7 +270,8 @@
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
- self.assertEqual(True, issue.is_spam)
+ self.assertEqual(issue.migration_modified_timestamp, 1234567)
+ self.assertEqual(issue.is_spam, True)
def testIsExempt_RegularUser(self):
author = user_pb2.MakeUser(111, email='test@example.com')
diff --git a/services/test/star_svc_test.py b/services/test/star_svc_test.py
index 3a5ce74..d3b4cea 100644
--- a/services/test/star_svc_test.py
+++ b/services/test/star_svc_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the star service."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import unittest
try:
@@ -15,13 +15,13 @@
except ImportError:
import mox
import mock
+import time
from google.appengine.ext import testbed
-import settings
from mock import Mock
from framework import sql
-from proto import user_pb2
+from services import service_manager
from services import star_svc
from testing import fake
@@ -78,13 +78,13 @@
starrer_list_dict = self.star_service.LookupItemsStarrers(
self.cnxn, [123, 234])
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], list(starrer_list_dict.keys()))
- self.assertItemsEqual([111, 333], starrer_list_dict[123])
- self.assertItemsEqual([111, 222], starrer_list_dict[234])
- self.assertItemsEqual([111, 333],
- self.star_service.starrer_cache.GetItem(123))
- self.assertItemsEqual([111, 222],
- self.star_service.starrer_cache.GetItem(234))
+ six.assertCountEqual(self, [123, 234], list(starrer_list_dict.keys()))
+ six.assertCountEqual(self, [111, 333], starrer_list_dict[123])
+ six.assertCountEqual(self, [111, 222], starrer_list_dict[234])
+ six.assertCountEqual(
+ self, [111, 333], self.star_service.starrer_cache.GetItem(123))
+ six.assertCountEqual(
+ self, [111, 222], self.star_service.starrer_cache.GetItem(234))
def SetUpLookupStarredItemIDs(self):
self.mock_tbl.Select(
@@ -96,9 +96,9 @@
self.mox.ReplayAll()
item_ids = self.star_service.LookupStarredItemIDs(self.cnxn, 111)
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], item_ids)
- self.assertItemsEqual([123, 234],
- self.star_service.star_cache.GetItem(111))
+ six.assertCountEqual(self, [123, 234], item_ids)
+ six.assertCountEqual(
+ self, [123, 234], self.star_service.star_cache.GetItem(111))
def testIsItemStarredBy(self):
self.SetUpLookupStarredItemIDs()
@@ -129,7 +129,7 @@
count_dict = self.star_service.CountItemsStars(
self.cnxn, [123, 234])
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], list(count_dict.keys()))
+ six.assertCountEqual(self, [123, 234], list(count_dict.keys()))
self.assertEqual(3, count_dict[123])
self.assertEqual(2, count_dict[234])
@@ -189,17 +189,86 @@
class IssueStarServiceTest(unittest.TestCase):
def setUp(self):
- self.mock_tbl = mock.Mock()
+ self.mox = mox.Mox()
+ self.mock_tbl = self.mox.CreateMock(sql.SQLTableManager)
self.mock_tbl.Delete = mock.Mock()
self.mock_tbl.InsertRows = mock.Mock()
+ self.mock_issue_tbl = self.mox.CreateMock(sql.SQLTableManager)
+
+ self.services = service_manager.Services()
+ self.services.issue = fake.IssueService()
+ self.services.config = fake.ConfigService()
+ self.services.features = fake.FeaturesService()
+
self.cache_manager = fake.CacheManager()
with mock.patch(
'framework.sql.SQLTableManager', return_value=self.mock_tbl):
self.issue_star = star_svc.IssueStarService(
self.cache_manager)
+ self.issue_star.issue_tbl = self.mock_issue_tbl
self.cnxn = 'fake connection'
+ self.now = int(time.time())
+
+ def testExpungeStarsByUsers(self):
+ self.mock_tbl.Select = mock.Mock(return_value=[(78901,), (78902,)])
+ self.mock_issue_tbl.Update = mock.Mock()
+
+ user_ids = [2, 3, 4]
+
+ self.mox.ReplayAll()
+ self.issue_star.ExpungeStarsByUsers(self.cnxn, user_ids, limit=40)
+ self.mox.VerifyAll()
+
+ self.mock_tbl.Select.assert_called_once_with(
+ self.cnxn,
+ cols=['IssueStar.issue_id'],
+ user_id=user_ids,
+ shard_id=mox.IgnoreArg(),
+ limit=40)
+ self.mock_tbl.Delete.assert_called_once_with(
+ self.cnxn, user_id=user_ids, commit=False, limit=40)
+ self.mock_issue_tbl.Update.assert_called_once_with(
+ self.cnxn, {'migration_modified': self.now},
+ id=[78901, 78902],
+ commit=False,
+ limit=40)
+
+ def testSetStarsBatch_Add(self):
+ issue = fake.MakeTestIssue(
+ project_id=789,
+ local_id=1,
+ reporter_id=111,
+ owner_id=456,
+ summary='sum',
+ status='Live',
+ issue_id=78901,
+ project_name='proj',
+ migration_modified_timestamp=1234567)
+ self.services.issue.TestAddIssue(issue)
+ config = self.services.config.GetProjectConfig(self.cnxn, 789)
+
+ # Set up mock for getting counts.
+ self.mock_tbl.Select(
+ self.cnxn,
+ cols=['issue_id', 'COUNT(user_id)'],
+ group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, 2)])
+ self.mox.ReplayAll()
+
+ self.issue_star.SetStarsBatch(
+ self.cnxn, self.services, config, 78901, [111, 222], True)
+
+ self.mox.VerifyAll()
+ self.mock_tbl.InsertRows.assert_called_once_with(
+ self.cnxn, ['issue_id', 'user_id'], [(78901, 111), (78901, 222)],
+ ignore=True,
+ commit=True)
+
+ self.assertIn(issue, self.services.issue.updated_issues)
+ self.assertEqual(issue.migration_modified_timestamp, self.now)
+ self.assertEqual(issue.star_count, 2)
def testSetStarsBatch_SkipIssueUpdate_Remove(self):
self.issue_star.SetStarsBatch_SkipIssueUpdate(
diff --git a/services/test/template_svc_test.py b/services/test/template_svc_test.py
index 964722d..5e9f488 100644
--- a/services/test/template_svc_test.py
+++ b/services/test/template_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2018 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for services.template_svc module."""
from __future__ import print_function
@@ -13,7 +12,7 @@
from mock import Mock, patch
-from proto import tracker_pb2
+from mrproto import tracker_pb2
from services import template_svc
from testing import fake
from testing import testing_helpers
diff --git a/services/test/tracker_fulltext_test.py b/services/test/tracker_fulltext_test.py
index a4c935e..d977dea 100644
--- a/services/test/tracker_fulltext_test.py
+++ b/services/test/tracker_fulltext_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Unit tests for tracker_fulltext module."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import unittest
try:
@@ -19,8 +19,8 @@
import settings
from framework import framework_views
-from proto import ast_pb2
-from proto import tracker_pb2
+from mrproto import ast_pb2
+from mrproto import tracker_pb2
from services import fulltext_helpers
from services import tracker_fulltext
from testing import fake
@@ -243,7 +243,7 @@
issue_ids, capped = tracker_fulltext.SearchIssueFullText(
[789], query_ast_conj, 1)
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], issue_ids)
+ six.assertCountEqual(self, [123, 234], issue_ids)
self.assertFalse(capped)
def testSearchIssueFullText_CrossProject(self):
@@ -262,7 +262,7 @@
issue_ids, capped = tracker_fulltext.SearchIssueFullText(
[789, 678], query_ast_conj, 1)
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], issue_ids)
+ six.assertCountEqual(self, [123, 234], issue_ids)
self.assertFalse(capped)
def testSearchIssueFullText_Capped(self):
@@ -280,7 +280,7 @@
issue_ids, capped = tracker_fulltext.SearchIssueFullText(
[789], query_ast_conj, 1)
self.mox.VerifyAll()
- self.assertItemsEqual([123, 234], issue_ids)
+ six.assertCountEqual(self, [123, 234], issue_ids)
self.assertTrue(capped)
finally:
settings.fulltext_limit_per_shard = orig
diff --git a/services/test/user_svc_test.py b/services/test/user_svc_test.py
index 323d3eb..c709d75 100644
--- a/services/test/user_svc_test.py
+++ b/services/test/user_svc_test.py
@@ -1,13 +1,13 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the user service."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
+import six
import unittest
import mock
@@ -22,7 +22,7 @@
from framework import exceptions
from framework import framework_constants
from framework import sql
-from proto import user_pb2
+from mrproto import user_pb2
from services import user_svc
from testing import fake
@@ -126,16 +126,17 @@
self.mox.UnsetStubs()
self.mox.ResetAll()
- def SetUpCreateUsers(self):
+ def testCreateUsers(self):
+ self.user_service.user_tbl.Select(
+ self.cnxn,
+ cols=('user_id',),
+ user_id=[3035911623, 2996997680],
+ ).AndReturn([(2996997680,)])
self.user_service.user_tbl.InsertRows(
self.cnxn,
['user_id', 'email', 'obscure_email'],
- [(3035911623, 'a@example.com', True),
- (2996997680, 'b@example.com', True)]
+ [(3035911623, 'a@example.com', True)],
).AndReturn(None)
-
- def testCreateUsers(self):
- self.SetUpCreateUsers()
self.mox.ReplayAll()
self.user_service._CreateUsers(
self.cnxn, ['a@example.com', 'b@example.com'])
@@ -461,7 +462,7 @@
self.user_service.linkedaccount_tbl.Select.return_value = []
with self.assertRaises(exceptions.InputException) as cm:
self.user_service.AcceptLinkedChild(self.cnxn, 111, 333)
- self.assertEqual('No such invite', cm.exception.message)
+ self.assertEqual('No such invite', str(cm.exception))
def testAcceptLinkedChild_Normal(self):
"""Create linkage between accounts and remove invite."""
@@ -587,8 +588,8 @@
self.cnxn, cols=['email'], limit=1000, offset=0,
where=[('user_id != %s', [framework_constants.DELETED_USER_ID])],
order_by=[('user_id ASC', [])])
- self.assertItemsEqual(
- emails, ['cow@test.com', 'pig@test.com', 'fox@test.com'])
+ six.assertCountEqual(
+ self, emails, ['cow@test.com', 'pig@test.com', 'fox@test.com'])
def testGetAllUserEmailsBatch_CustomLimit(self):
rows = [('cow@test.com',), ('pig@test.com',), ('fox@test.com',)]
@@ -599,5 +600,5 @@
self.cnxn, cols=['email'], limit=30, offset=60,
where=[('user_id != %s', [framework_constants.DELETED_USER_ID])],
order_by=[('user_id ASC', [])])
- self.assertItemsEqual(
- emails, ['cow@test.com', 'pig@test.com', 'fox@test.com'])
+ six.assertCountEqual(
+ self, emails, ['cow@test.com', 'pig@test.com', 'fox@test.com'])
diff --git a/services/test/usergroup_svc_test.py b/services/test/usergroup_svc_test.py
index 10b2c8a..79b94d5 100644
--- a/services/test/usergroup_svc_test.py
+++ b/services/test/usergroup_svc_test.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Tests for the usergroup service."""
from __future__ import print_function
@@ -10,6 +9,7 @@
import collections
import mock
+import six
import unittest
try:
@@ -22,7 +22,7 @@
from framework import exceptions
from framework import permissions
from framework import sql
-from proto import usergroup_pb2
+from mrproto import usergroup_pb2
from services import service_manager
from services import usergroup_svc
from testing import fake
@@ -49,9 +49,9 @@
memberships_rows = [(111, 777), (111, 888), (222, 888)]
actual = self.usergroup_service.memberships_2lc._DeserializeMemberships(
memberships_rows)
- self.assertItemsEqual([111, 222], list(actual.keys()))
- self.assertItemsEqual([777, 888], actual[111])
- self.assertItemsEqual([888], actual[222])
+ six.assertCountEqual(self, [111, 222], list(actual.keys()))
+ six.assertCountEqual(self, [777, 888], actual[111])
+ six.assertCountEqual(self, [888], actual[222])
class UserGroupServiceTest(unittest.TestCase):
@@ -236,8 +236,8 @@
members_dict, owners_dict = self.usergroup_service.LookupAllMembers(
self.cnxn, [777])
self.mox.VerifyAll()
- self.assertItemsEqual([111, 222, 888, 999], members_dict[777])
- self.assertItemsEqual([], owners_dict[777])
+ six.assertCountEqual(self, [111, 222, 888, 999], members_dict[777])
+ six.assertCountEqual(self, [], owners_dict[777])
def testExpandAnyGroupEmailRecipients(self):
self.usergroup_service.group_dag.initialized = True
@@ -257,8 +257,8 @@
direct, indirect = self.usergroup_service.ExpandAnyGroupEmailRecipients(
self.cnxn, [111, 777, 888, 999])
self.mox.VerifyAll()
- self.assertItemsEqual([111, 888, 999], direct)
- self.assertItemsEqual([222, 444], indirect)
+ six.assertCountEqual(self, [111, 888, 999], direct)
+ six.assertCountEqual(self, [222, 444], indirect)
def SetUpLookupMembers(self, group_member_dict):
mock_membership_rows = []
@@ -275,7 +275,7 @@
self.mox.ReplayAll()
member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [])
self.mox.VerifyAll()
- self.assertItemsEqual({}, member_ids)
+ six.assertCountEqual(self, {}, member_ids)
def testLookupMembers_Nonexistent(self):
"""If some requested groups don't exist, they are ignored."""
@@ -283,7 +283,7 @@
self.mox.ReplayAll()
member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [777])
self.mox.VerifyAll()
- self.assertItemsEqual([], member_ids[777])
+ six.assertCountEqual(self, [], member_ids[777])
def testLookupMembers_AllEmpty(self):
"""Requesting all empty groups results in no members."""
@@ -291,14 +291,14 @@
self.mox.ReplayAll()
member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888, 999])
self.mox.VerifyAll()
- self.assertItemsEqual([], member_ids[888])
+ six.assertCountEqual(self, [], member_ids[888])
def testLookupMembers_OneGroup(self):
self.SetUpLookupMembers({888: [111, 222]})
self.mox.ReplayAll()
member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888])
self.mox.VerifyAll()
- self.assertItemsEqual([111, 222], member_ids[888])
+ six.assertCountEqual(self, [111, 222], member_ids[888])
def testLookupMembers_GroupsAndNonGroups(self):
"""We ignore any non-groups passed in."""
@@ -307,7 +307,7 @@
member_ids, _ = self.usergroup_service.LookupMembers(
self.cnxn, [111, 333, 888])
self.mox.VerifyAll()
- self.assertItemsEqual([111, 222], member_ids[888])
+ six.assertCountEqual(self, [111, 222], member_ids[888])
def testLookupMembers_OverlappingGroups(self):
"""We get the union of IDs. Imagine 888 = {111} and 999 = {111, 222}."""
@@ -315,8 +315,8 @@
self.mox.ReplayAll()
member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888, 999])
self.mox.VerifyAll()
- self.assertItemsEqual([111, 222], member_ids[999])
- self.assertItemsEqual([111], member_ids[888])
+ six.assertCountEqual(self, [111, 222], member_ids[999])
+ six.assertCountEqual(self, [111], member_ids[888])
def testLookupVisibleMembers_LimitedVisiblity(self):
"""We get only the member IDs in groups that the user is allowed to see."""
@@ -332,7 +332,7 @@
self.cnxn, [888, 999], permissions.USER_PERMISSIONSET, set(),
self.services)
self.mox.VerifyAll()
- self.assertItemsEqual([111], member_ids[888])
+ six.assertCountEqual(self, [111], member_ids[888])
self.assertNotIn(999, member_ids)
def SetUpGetAllUserGroupsInfo(self, mock_settings_rows, mock_count_rows,
diff --git a/services/tracker_fulltext.py b/services/tracker_fulltext.py
index ecbfc44..a5709ea 100644
--- a/services/tracker_fulltext.py
+++ b/services/tracker_fulltext.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide fulltext search for issues."""
from __future__ import print_function
diff --git a/services/user_svc.py b/services/user_svc.py
index 28ad465..3307cfd 100644
--- a/services/user_svc.py
+++ b/services/user_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""A set of functions that provide persistence for users.
@@ -21,7 +20,7 @@
from framework import framework_helpers
from framework import sql
from framework import validate
-from proto import user_pb2
+from mrproto import user_pb2
from services import caches
@@ -204,9 +203,23 @@
"""Create many users in the database."""
emails = [email.lower() for email in emails]
ids = [framework_helpers.MurmurHash3_x86_32(email) for email in emails]
+
+ rows = self.user_tbl.Select(cnxn, cols=('user_id',), user_id=ids)
+ existing_ids = set(row[0] for row in rows)
+ if existing_ids:
+ existing_users = sorted(
+ (user_id, email)
+ for (user_id, email) in zip(ids, emails)
+ if user_id in existing_ids)
+ logging.error(
+ 'Unable to create users because IDs are already taken: %.100000s',
+ existing_users)
+
row_values = [
- (user_id, email, not framework_bizobj.IsPriviledgedDomainUser(email))
- for (user_id, email) in zip(ids, emails)]
+ (user_id, email, not framework_bizobj.IsPriviledgedDomainUser(email))
+ for (user_id, email) in zip(ids, emails)
+ if user_id not in existing_ids
+ ]
self.user_tbl.InsertRows(
cnxn, ['user_id', 'email', 'obscure_email'], row_values)
self.user_2lc.InvalidateKeys(cnxn, ids)
diff --git a/services/usergroup_svc.py b/services/usergroup_svc.py
index 72797fc..5959626 100644
--- a/services/usergroup_svc.py
+++ b/services/usergroup_svc.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
"""Persistence class for user groups.
@@ -24,7 +23,7 @@
from framework import exceptions
from framework import permissions
from framework import sql
-from proto import usergroup_pb2
+from mrproto import usergroup_pb2
from services import caches