Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame^] | 1 | # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 | # Use of this source code is governed by a BSD-style |
| 3 | # license that can be found in the LICENSE file or at |
| 4 | # https://developers.google.com/open-source/licenses/bsd |
| 5 | |
| 6 | """Fake object classes that are useful for unit tests.""" |
| 7 | from __future__ import print_function |
| 8 | from __future__ import division |
| 9 | from __future__ import absolute_import |
| 10 | |
| 11 | import collections |
| 12 | import itertools |
| 13 | import logging |
| 14 | import re |
| 15 | import sys |
| 16 | import time |
| 17 | |
| 18 | from six import string_types |
| 19 | |
| 20 | import settings |
| 21 | from features import filterrules_helpers |
| 22 | from framework import exceptions |
| 23 | from framework import framework_bizobj |
| 24 | from framework import framework_constants |
| 25 | from framework import framework_helpers |
| 26 | from framework import monorailrequest |
| 27 | from framework import permissions |
| 28 | from framework import profiler |
| 29 | from framework import validate |
| 30 | from proto import features_pb2 |
| 31 | from proto import project_pb2 |
| 32 | from proto import tracker_pb2 |
| 33 | from proto import user_pb2 |
| 34 | from proto import usergroup_pb2 |
| 35 | from services import caches |
| 36 | from services import config_svc |
| 37 | from services import features_svc |
| 38 | from services import project_svc |
| 39 | from tracker import tracker_bizobj |
| 40 | from tracker import tracker_constants |
| 41 | |
| 42 | # Many fakes return partial or constant values, regardless of their arguments. |
| 43 | # pylint: disable=unused-argument |
| 44 | |
| 45 | BOUNDARY = '-----thisisaboundary' |
| 46 | OWNER_ROLE = 'OWNER_ROLE' |
| 47 | COMMITTER_ROLE = 'COMMITTER_ROLE' |
| 48 | CONTRIBUTOR_ROLE = 'CONTRIBUTOR_ROLE' |
| 49 | EDITOR_ROLE = 'EDITOR_ROLE' |
| 50 | FOLLOWER_ROLE = 'FOLLOWER_ROLE' |
| 51 | |
| 52 | def Hotlist( |
| 53 | hotlist_name, hotlist_id, hotlist_item_fields=None, |
| 54 | is_private=False, owner_ids=None, editor_ids=None, follower_ids=None, |
| 55 | default_col_spec=None, summary=None, description=None): |
| 56 | hotlist_id = hotlist_id or hash(hotlist_name) |
| 57 | return features_pb2.MakeHotlist( |
| 58 | hotlist_name, hotlist_item_fields=hotlist_item_fields, |
| 59 | hotlist_id=hotlist_id, is_private=is_private, owner_ids=owner_ids or [], |
| 60 | editor_ids=editor_ids or [], follower_ids=follower_ids or [], |
| 61 | default_col_spec=default_col_spec, summary=summary, |
| 62 | description=description) |
| 63 | |
| 64 | def HotlistItem(issue_id, rank=None, adder_id=None, date_added=None, note=None): |
| 65 | return features_pb2.MakeHotlistItem(issue_id=issue_id, rank=rank, |
| 66 | adder_id=adder_id, date_added=date_added, |
| 67 | note=None) |
| 68 | |
| 69 | def Project( |
| 70 | project_name='proj', project_id=None, state=project_pb2.ProjectState.LIVE, |
| 71 | access=project_pb2.ProjectAccess.ANYONE, moved_to=None, |
| 72 | cached_content_timestamp=None, |
| 73 | owner_ids=None, committer_ids=None, contributor_ids=None): |
| 74 | """Returns a project protocol buffer with the given attributes.""" |
| 75 | project_id = project_id or hash(project_name) |
| 76 | return project_pb2.MakeProject( |
| 77 | project_name, project_id=project_id, state=state, access=access, |
| 78 | moved_to=moved_to, cached_content_timestamp=cached_content_timestamp, |
| 79 | owner_ids=owner_ids, committer_ids=committer_ids, |
| 80 | contributor_ids=contributor_ids) |
| 81 | |
| 82 | |
| 83 | def MakeTestFieldDef( |
| 84 | field_id, project_id, field_type, field_name='', applic_type=None, |
| 85 | applic_pred=None, is_required=False, is_niche=False, is_multivalued=False, |
| 86 | min_value=None, max_value=None, regex=None, needs_member=False, |
| 87 | needs_perm=None, grants_perm=None, notify_on=None, date_action_str=None, |
| 88 | docstring=None, admin_ids=None, editor_ids=None, approval_id=None, |
| 89 | is_phase_field=False, is_restricted_field=False): |
| 90 | return tracker_bizobj.MakeFieldDef( |
| 91 | field_id, project_id, field_name, field_type, applic_type, applic_pred, |
| 92 | is_required, is_niche, is_multivalued, min_value, max_value, regex, |
| 93 | needs_member, needs_perm, grants_perm, notify_on, date_action_str, |
| 94 | docstring, False, |
| 95 | approval_id=approval_id, is_phase_field=is_phase_field, |
| 96 | is_restricted_field=is_restricted_field, admin_ids=admin_ids, |
| 97 | editor_ids=editor_ids) |
| 98 | |
| 99 | def MakeTestApprovalDef(approval_id, approver_ids=None, survey=None): |
| 100 | return tracker_pb2.ApprovalDef( |
| 101 | approval_id=approval_id, |
| 102 | approver_ids = approver_ids, |
| 103 | survey = survey) |
| 104 | |
| 105 | def MakePhase(phase_id, name='', rank=0): |
| 106 | return tracker_pb2.Phase(phase_id=phase_id, name=name, rank=rank) |
| 107 | |
| 108 | |
| 109 | def MakeApprovalValue( |
| 110 | approval_id, |
| 111 | status=tracker_pb2.ApprovalStatus.NOT_SET, |
| 112 | setter_id=None, |
| 113 | set_on=None, |
| 114 | approver_ids=None, |
| 115 | phase_id=None): |
| 116 | if approver_ids is None: |
| 117 | approver_ids = [] |
| 118 | return tracker_pb2.ApprovalValue( |
| 119 | approval_id=approval_id, |
| 120 | status=status, |
| 121 | setter_id=setter_id, |
| 122 | set_on=set_on, |
| 123 | approver_ids=approver_ids, |
| 124 | phase_id=phase_id) |
| 125 | |
| 126 | |
| 127 | def MakeFieldValue( |
| 128 | field_id, |
| 129 | int_value=None, |
| 130 | str_value=None, |
| 131 | user_id=None, |
| 132 | date_value=None, |
| 133 | url_value=None, |
| 134 | derived=None, |
| 135 | phase_id=None): |
| 136 | return tracker_pb2.FieldValue( |
| 137 | field_id=field_id, |
| 138 | int_value=int_value, |
| 139 | str_value=str_value, |
| 140 | user_id=user_id, |
| 141 | date_value=date_value, |
| 142 | url_value=url_value, |
| 143 | derived=derived, |
| 144 | phase_id=phase_id) |
| 145 | |
| 146 | |
| 147 | def MakeTestIssue( |
| 148 | project_id, local_id, summary, status, owner_id, labels=None, |
| 149 | derived_labels=None, derived_status=None, merged_into=0, star_count=0, |
| 150 | derived_owner_id=0, issue_id=None, reporter_id=None, opened_timestamp=None, |
| 151 | closed_timestamp=None, modified_timestamp=None, is_spam=False, |
| 152 | component_ids=None, project_name=None, field_values=None, cc_ids=None, |
| 153 | derived_cc_ids=None, assume_stale=True, phases=None, approval_values=None, |
| 154 | merged_into_external=None, attachment_count=0, derived_component_ids=None): |
| 155 | """Easily make an Issue for testing.""" |
| 156 | issue = tracker_pb2.Issue() |
| 157 | issue.project_id = project_id |
| 158 | issue.project_name = project_name |
| 159 | issue.local_id = local_id |
| 160 | issue.issue_id = issue_id if issue_id else 100000 + local_id |
| 161 | issue.reporter_id = reporter_id if reporter_id else owner_id |
| 162 | issue.summary = summary |
| 163 | issue.status = status |
| 164 | issue.owner_id = owner_id |
| 165 | issue.derived_owner_id = derived_owner_id |
| 166 | issue.star_count = star_count |
| 167 | issue.merged_into = merged_into |
| 168 | issue.merged_into_external = merged_into_external |
| 169 | issue.is_spam = is_spam |
| 170 | issue.attachment_count = attachment_count |
| 171 | if cc_ids: |
| 172 | issue.cc_ids = cc_ids |
| 173 | if derived_cc_ids: |
| 174 | issue.derived_cc_ids = derived_cc_ids |
| 175 | issue.assume_stale = assume_stale |
| 176 | if opened_timestamp: |
| 177 | issue.opened_timestamp = opened_timestamp |
| 178 | issue.owner_modified_timestamp = opened_timestamp |
| 179 | issue.status_modified_timestamp = opened_timestamp |
| 180 | issue.component_modified_timestamp = opened_timestamp |
| 181 | if modified_timestamp: |
| 182 | issue.modified_timestamp = modified_timestamp |
| 183 | if closed_timestamp: |
| 184 | issue.closed_timestamp = closed_timestamp |
| 185 | if labels is not None: |
| 186 | if isinstance(labels, string_types): |
| 187 | labels = labels.split() |
| 188 | issue.labels.extend(labels) |
| 189 | if derived_labels is not None: |
| 190 | if isinstance(derived_labels, string_types): |
| 191 | derived_labels = derived_labels.split() |
| 192 | issue.derived_labels.extend(derived_labels) |
| 193 | if derived_status is not None: |
| 194 | issue.derived_status = derived_status |
| 195 | if component_ids is not None: |
| 196 | issue.component_ids = component_ids |
| 197 | if derived_component_ids is not None: |
| 198 | issue.derived_component_ids = derived_component_ids |
| 199 | if field_values is not None: |
| 200 | issue.field_values = field_values |
| 201 | if phases is not None: |
| 202 | issue.phases = phases |
| 203 | if approval_values is not None: |
| 204 | issue.approval_values = approval_values |
| 205 | return issue |
| 206 | |
| 207 | |
| 208 | def MakeTestComponentDef(project_id, comp_id, path='', cc_ids=None): |
| 209 | if cc_ids is None: |
| 210 | cc_ids = [] |
| 211 | return tracker_bizobj.MakeComponentDef( |
| 212 | comp_id, project_id, path, '', False, [], cc_ids, None, None) |
| 213 | |
| 214 | |
| 215 | def MakeTestConfig(project_id, labels, statuses): |
| 216 | """Convenient function to make a ProjectIssueConfig object.""" |
| 217 | config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id) |
| 218 | if isinstance(labels, string_types): |
| 219 | labels = labels.split() |
| 220 | if isinstance(statuses, string_types): |
| 221 | statuses = statuses.split() |
| 222 | config.well_known_labels = [ |
| 223 | tracker_pb2.LabelDef(label=lab) for lab in labels] |
| 224 | config.well_known_statuses = [ |
| 225 | tracker_pb2.StatusDef(status=stat) for stat in statuses] |
| 226 | return config |
| 227 | |
| 228 | |
| 229 | class MonorailConnection(object): |
| 230 | """Fake connection to databases for use in tests.""" |
| 231 | |
| 232 | def Commit(self): |
| 233 | pass |
| 234 | |
| 235 | def Close(self): |
| 236 | pass |
| 237 | |
| 238 | |
| 239 | class MonorailRequest(monorailrequest.MonorailRequest): |
| 240 | """Subclass of MonorailRequest suitable for testing.""" |
| 241 | |
| 242 | def __init__(self, services, user_info=None, project=None, perms=None, |
| 243 | hotlist=None, **kwargs): |
| 244 | """Construct a test MonorailRequest. |
| 245 | |
| 246 | Typically, this is constructed via testing.helpers.GetRequestObjects, |
| 247 | which also causes url parsing and optionally initializes the user, |
| 248 | project, and permissions info. |
| 249 | |
| 250 | Args: |
| 251 | services: connections to backends. |
| 252 | user_info: a dict of user attributes to set on a MonorailRequest object. |
| 253 | For example, "user_id: 5" causes self.auth.user_id=5. |
| 254 | project: the Project pb for this request. |
| 255 | perms: a PermissionSet for this request. |
| 256 | """ |
| 257 | super(MonorailRequest, self).__init__(services, **kwargs) |
| 258 | |
| 259 | if user_info is not None: |
| 260 | for key in user_info: |
| 261 | setattr(self.auth, key, user_info[key]) |
| 262 | if 'user_id' in user_info: |
| 263 | self.auth.effective_ids = {user_info['user_id']} |
| 264 | |
| 265 | self.perms = perms or permissions.ADMIN_PERMISSIONSET |
| 266 | self.profiler = profiler.Profiler() |
| 267 | self.project = project |
| 268 | self.hotlist = hotlist |
| 269 | if hotlist is not None: |
| 270 | self.hotlist_id = hotlist.hotlist_id |
| 271 | |
| 272 | class UserGroupService(object): |
| 273 | """Fake UserGroupService class for testing other code.""" |
| 274 | |
| 275 | def __init__(self): |
| 276 | # Test-only sequence of expunged users. |
| 277 | self.expunged_users_in_groups = [] |
| 278 | |
| 279 | self.group_settings = {} |
| 280 | self.group_members = {} |
| 281 | self.group_addrs = {} |
| 282 | self.role_dict = {} |
| 283 | |
| 284 | def TestAddGroupSettings( |
| 285 | self, |
| 286 | group_id, |
| 287 | email, |
| 288 | who_can_view=None, |
| 289 | anyone_can_join=False, |
| 290 | who_can_add=None, |
| 291 | external_group_type=None, |
| 292 | last_sync_time=0, |
| 293 | friend_projects=None, |
| 294 | notify_members=True, |
| 295 | notify_group=False): |
| 296 | """Set up a fake group for testing. |
| 297 | |
| 298 | Args: |
| 299 | group_id: int user ID of the new user group. |
| 300 | email: string email address to identify the user group. |
| 301 | who_can_view: string enum 'owners', 'members', or 'anyone'. |
| 302 | anyone_can_join: optional boolean to allow any users to join the group. |
| 303 | who_can_add: optional list of int user IDs of users who can add |
| 304 | more members to the group. |
| 305 | notify_members: optional boolean for if emails to this group should be |
| 306 | sent directly to members. |
| 307 | notify_group: optional boolean for if emails to this group should be |
| 308 | sent directly to the group email. |
| 309 | """ |
| 310 | friend_projects = friend_projects or [] |
| 311 | group_settings = usergroup_pb2.MakeSettings( |
| 312 | who_can_view or 'members', external_group_type, last_sync_time, |
| 313 | friend_projects, notify_members, notify_group) |
| 314 | self.group_settings[group_id] = group_settings |
| 315 | self.group_addrs[group_id] = email |
| 316 | # TODO(jrobbins): store the other settings. |
| 317 | |
| 318 | def TestAddMembers(self, group_id, user_ids, role='member'): |
| 319 | self.group_members.setdefault(group_id, []).extend(user_ids) |
| 320 | for user_id in user_ids: |
| 321 | self.role_dict.setdefault(group_id, {})[user_id] = role |
| 322 | |
| 323 | def LookupAllMemberships(self, _cnxn, user_ids, use_cache=True): |
| 324 | return { |
| 325 | user_id: self.LookupMemberships(_cnxn, user_id) |
| 326 | for user_id in user_ids |
| 327 | } |
| 328 | |
| 329 | def LookupMemberships(self, _cnxn, user_id): |
| 330 | memberships = { |
| 331 | group_id for group_id, member_ids in self.group_members.items() |
| 332 | if user_id in member_ids} |
| 333 | return memberships |
| 334 | |
| 335 | def DetermineWhichUserIDsAreGroups(self, _cnxn, user_ids): |
| 336 | return [uid for uid in user_ids |
| 337 | if uid in self.group_settings] |
| 338 | |
| 339 | def GetAllUserGroupsInfo(self, cnxn): |
| 340 | infos = [] |
| 341 | for group_id in self.group_settings: |
| 342 | infos.append( |
| 343 | (self.group_addrs[group_id], |
| 344 | len(self.group_members.get(group_id, [])), |
| 345 | self.group_settings[group_id], group_id)) |
| 346 | |
| 347 | return infos |
| 348 | |
| 349 | def GetAllGroupSettings(self, _cnxn, group_ids): |
| 350 | return {gid: self.group_settings[gid] |
| 351 | for gid in group_ids |
| 352 | if gid in self.group_settings} |
| 353 | |
| 354 | def GetGroupSettings(self, cnxn, group_id): |
| 355 | return self.GetAllGroupSettings(cnxn, [group_id]).get(group_id) |
| 356 | |
| 357 | def CreateGroup(self, cnxn, services, email, who_can_view_members, |
| 358 | ext_group_type=None, friend_projects=None): |
| 359 | friend_projects = friend_projects or [] |
| 360 | group_id = services.user.LookupUserID( |
| 361 | cnxn, email, autocreate=True, allowgroups=True) |
| 362 | self.group_addrs[group_id] = email |
| 363 | group_settings = usergroup_pb2.MakeSettings( |
| 364 | who_can_view_members, ext_group_type, 0, friend_projects) |
| 365 | self.UpdateSettings(cnxn, group_id, group_settings) |
| 366 | return group_id |
| 367 | |
| 368 | def DeleteGroups(self, cnxn, group_ids): |
| 369 | member_ids_dict, owner_ids_dict = self.LookupMembers(cnxn, group_ids) |
| 370 | citizens_id_dict = collections.defaultdict(list) |
| 371 | for g_id, user_ids in member_ids_dict.items(): |
| 372 | citizens_id_dict[g_id].extend(user_ids) |
| 373 | for g_id, user_ids in owner_ids_dict.items(): |
| 374 | citizens_id_dict[g_id].extend(user_ids) |
| 375 | for g_id, citizen_ids in citizens_id_dict.items(): |
| 376 | # Remove group members, friend projects and settings |
| 377 | self.RemoveMembers(cnxn, g_id, citizen_ids) |
| 378 | self.group_settings.pop(g_id, None) |
| 379 | |
| 380 | def LookupComputedMemberships(self, cnxn, domain, use_cache=True): |
| 381 | group_email = 'everyone@%s' % domain |
| 382 | group_id = self.LookupUserGroupID(cnxn, group_email, use_cache=use_cache) |
| 383 | if group_id: |
| 384 | return [group_id] |
| 385 | |
| 386 | return [] |
| 387 | |
| 388 | def LookupUserGroupID(self, cnxn, group_email, use_cache=True): |
| 389 | for group_id in self.group_settings: |
| 390 | if group_email == self.group_addrs.get(group_id): |
| 391 | return group_id |
| 392 | return None |
| 393 | |
| 394 | def LookupMembers(self, _cnxn, group_id_list): |
| 395 | members_dict = {} |
| 396 | owners_dict = {} |
| 397 | for gid in group_id_list: |
| 398 | members_dict[gid] = [] |
| 399 | owners_dict[gid] = [] |
| 400 | for mid in self.group_members.get(gid, []): |
| 401 | if self.role_dict.get(gid, {}).get(mid) == 'owner': |
| 402 | owners_dict[gid].append(mid) |
| 403 | elif self.role_dict.get(gid, {}).get(mid) == 'member': |
| 404 | members_dict[gid].append(mid) |
| 405 | return members_dict, owners_dict |
| 406 | |
| 407 | def LookupAllMembers(self, _cnxn, group_id_list): |
| 408 | direct_members, direct_owners = self.LookupMembers( |
| 409 | _cnxn, group_id_list) |
| 410 | members_dict = {} |
| 411 | owners_dict = {} |
| 412 | for gid in group_id_list: |
| 413 | members = direct_members[gid] |
| 414 | owners = direct_owners[gid] |
| 415 | owners_dict[gid] = owners |
| 416 | members_dict[gid] = members |
| 417 | group_ids = set([uid for uid in members + owners |
| 418 | if uid in self.group_settings]) |
| 419 | while group_ids: |
| 420 | indirect_members, indirect_owners = self.LookupMembers( |
| 421 | _cnxn, group_ids) |
| 422 | child_members = set() |
| 423 | child_owners = set() |
| 424 | for _, children in indirect_members.items(): |
| 425 | child_members.update(children) |
| 426 | for _, children in indirect_owners.items(): |
| 427 | child_owners.update(children) |
| 428 | members_dict[gid].extend(list(child_members)) |
| 429 | owners_dict[gid].extend(list(child_owners)) |
| 430 | group_ids = set(self.DetermineWhichUserIDsAreGroups( |
| 431 | _cnxn, list(child_members) + list(child_owners))) |
| 432 | members_dict[gid] = list(set(members_dict[gid])) |
| 433 | return members_dict, owners_dict |
| 434 | |
| 435 | |
| 436 | def RemoveMembers(self, _cnxn, group_id, old_member_ids): |
| 437 | current_member_ids = self.group_members.get(group_id, []) |
| 438 | revised_member_ids = [mid for mid in current_member_ids |
| 439 | if mid not in old_member_ids] |
| 440 | self.group_members[group_id] = revised_member_ids |
| 441 | |
| 442 | def UpdateMembers(self, _cnxn, group_id, member_ids, new_role): |
| 443 | self.RemoveMembers(_cnxn, group_id, member_ids) |
| 444 | self.TestAddMembers(group_id, member_ids, new_role) |
| 445 | |
| 446 | def UpdateSettings(self, _cnxn, group_id, group_settings): |
| 447 | self.group_settings[group_id] = group_settings |
| 448 | |
| 449 | def ExpandAnyGroupEmailRecipients(self, cnxn, user_ids): |
| 450 | group_ids = set(self.DetermineWhichUserIDsAreGroups(cnxn, user_ids)) |
| 451 | group_settings_dict = self.GetAllGroupSettings(cnxn, group_ids) |
| 452 | member_ids_dict, owner_ids_dict = self.LookupAllMembers(cnxn, group_ids) |
| 453 | indirect_ids = set() |
| 454 | direct_ids = {uid for uid in user_ids if uid not in group_ids} |
| 455 | for gid, group_settings in group_settings_dict.items(): |
| 456 | if group_settings.notify_members: |
| 457 | indirect_ids.update(member_ids_dict.get(gid, set())) |
| 458 | indirect_ids.update(owner_ids_dict.get(gid, set())) |
| 459 | if group_settings.notify_group: |
| 460 | direct_ids.add(gid) |
| 461 | |
| 462 | return list(direct_ids), list(indirect_ids) |
| 463 | |
| 464 | def LookupVisibleMembers( |
| 465 | self, cnxn, group_id_list, perms, effective_ids, services): |
| 466 | settings_dict = self.GetAllGroupSettings(cnxn, group_id_list) |
| 467 | group_ids = list(settings_dict.keys()) |
| 468 | |
| 469 | direct_member_ids_dict, direct_owner_ids_dict = self.LookupMembers( |
| 470 | cnxn, group_ids) |
| 471 | all_member_ids_dict, all_owner_ids_dict = self.LookupAllMembers( |
| 472 | cnxn, group_ids) |
| 473 | visible_member_ids_dict = {} |
| 474 | visible_owner_ids_dict = {} |
| 475 | for gid in group_ids: |
| 476 | member_ids = all_member_ids_dict[gid] |
| 477 | owner_ids = all_owner_ids_dict[gid] |
| 478 | if permissions.CanViewGroupMembers( |
| 479 | perms, effective_ids, settings_dict[gid], member_ids, owner_ids, []): |
| 480 | visible_member_ids_dict[gid] = direct_member_ids_dict[gid] |
| 481 | visible_owner_ids_dict[gid] = direct_owner_ids_dict[gid] |
| 482 | |
| 483 | return visible_member_ids_dict, visible_owner_ids_dict |
| 484 | |
| 485 | def ValidateFriendProjects(self, cnxn, services, friend_projects): |
| 486 | project_names = list(filter(None, re.split('; |, | |;|,', friend_projects))) |
| 487 | id_dict = services.project.LookupProjectIDs(cnxn, project_names) |
| 488 | missed_projects = [] |
| 489 | result = [] |
| 490 | for p_name in project_names: |
| 491 | if p_name in id_dict: |
| 492 | result.append(id_dict[p_name]) |
| 493 | else: |
| 494 | missed_projects.append(p_name) |
| 495 | error_msg = '' |
| 496 | if missed_projects: |
| 497 | error_msg = 'Project(s) %s do not exist' % ', '.join(missed_projects) |
| 498 | return None, error_msg |
| 499 | else: |
| 500 | return result, None |
| 501 | |
| 502 | def ExpungeUsersInGroups(self, cnxn, ids): |
| 503 | self.expunged_users_in_groups.extend(ids) |
| 504 | |
| 505 | |
| 506 | class CacheManager(object): |
| 507 | |
| 508 | def __init__(self, invalidate_tbl=None): |
| 509 | self.last_call = None |
| 510 | self.cache_registry = collections.defaultdict(list) |
| 511 | self.processed_invalidations_up_to = 0 |
| 512 | |
| 513 | def RegisterCache(self, cache, kind): |
| 514 | """Register a cache to be notified of future invalidations.""" |
| 515 | self.cache_registry[kind].append(cache) |
| 516 | |
| 517 | def DoDistributedInvalidation(self, cnxn): |
| 518 | """Drop any cache entries that were invalidated by other jobs.""" |
| 519 | self.last_call = 'DoDistributedInvalidation', cnxn |
| 520 | |
| 521 | def StoreInvalidateRows(self, cnxn, kind, keys): |
| 522 | """Store database rows to let all frontends know to invalidate.""" |
| 523 | self.last_call = 'StoreInvalidateRows', cnxn, kind, keys |
| 524 | |
| 525 | def StoreInvalidateAll(self, cnxn, kind): |
| 526 | """Store a database row to let all frontends know to invalidate.""" |
| 527 | self.last_call = 'StoreInvalidateAll', cnxn, kind |
| 528 | |
| 529 | |
| 530 | |
| 531 | class UserService(object): |
| 532 | |
| 533 | def __init__(self): |
| 534 | """Creates a test-appropriate UserService object.""" |
| 535 | self.users_by_email = {} # {email: user_id, ...} |
| 536 | self.users_by_id = {} # {user_id: email, ...} |
| 537 | self.test_users = {} # {user_id: user_pb, ...} |
| 538 | self.visited_hotlists = {} # user_id:[(hotlist_id, viewed), ...] |
| 539 | self.invite_rows = [] # (parent_id, child_id) |
| 540 | self.linked_account_rows = [] # (parent_id, child_id) |
| 541 | self.prefs_dict = {} # {user_id: UserPrefs} |
| 542 | |
| 543 | def TestAddUser( |
| 544 | self, email, user_id, add_user=True, banned=False, obscure_email=True): |
| 545 | """Add a user to the fake UserService instance. |
| 546 | |
| 547 | Args: |
| 548 | email: Email of the user. |
| 549 | user_id: int user ID. |
| 550 | add_user: Flag whether user pb should be created, i.e. whether a |
| 551 | Monorail account should be created |
| 552 | banned: Boolean to set the user as banned |
| 553 | obscure_email: Boolean to determine whether to obscure the user's email. |
| 554 | |
| 555 | Returns: |
| 556 | The User PB that was added, or None. |
| 557 | """ |
| 558 | self.users_by_email[email] = user_id |
| 559 | self.users_by_id[user_id] = email |
| 560 | |
| 561 | user = None |
| 562 | if add_user: |
| 563 | user = user_pb2.MakeUser(user_id) |
| 564 | user.is_site_admin = False |
| 565 | user.email = email |
| 566 | user.obscure_email = obscure_email |
| 567 | if banned: |
| 568 | user.banned = 'is banned' |
| 569 | self.test_users[user_id] = user |
| 570 | |
| 571 | return user |
| 572 | |
| 573 | def GetUser(self, cnxn, user_id): |
| 574 | return self.GetUsersByIDs(cnxn, [user_id])[user_id] |
| 575 | |
| 576 | def _CreateUser(self, _cnxn, email): |
| 577 | if email in self.users_by_email: |
| 578 | return |
| 579 | user_id = framework_helpers.MurmurHash3_x86_32(email) |
| 580 | self.TestAddUser(email, user_id) |
| 581 | |
| 582 | def _CreateUsers(self, cnxn, emails): |
| 583 | for email in emails: |
| 584 | self._CreateUser(cnxn, email) |
| 585 | |
| 586 | def LookupUserID(self, cnxn, email, autocreate=False, allowgroups=False): |
| 587 | email_dict = self.LookupUserIDs( |
| 588 | cnxn, [email], autocreate=autocreate, allowgroups=allowgroups) |
| 589 | if email in email_dict: |
| 590 | return email_dict[email] |
| 591 | raise exceptions.NoSuchUserException('%r not found' % email) |
| 592 | |
| 593 | def GetUsersByIDs(self, cnxn, user_ids, use_cache=True, skip_missed=False): |
| 594 | user_dict = {} |
| 595 | for user_id in user_ids: |
| 596 | if user_id and self.test_users.get(user_id): |
| 597 | user_dict[user_id] = self.test_users[user_id] |
| 598 | elif not skip_missed: |
| 599 | user_dict[user_id] = user_pb2.MakeUser(user_id) |
| 600 | return user_dict |
| 601 | |
| 602 | def LookupExistingUserIDs(self, cnxn, emails): |
| 603 | email_dict = { |
| 604 | email: self.users_by_email[email] |
| 605 | for email in emails |
| 606 | if email in self.users_by_email} |
| 607 | return email_dict |
| 608 | |
| 609 | def LookupUserIDs(self, cnxn, emails, autocreate=False, |
| 610 | allowgroups=False): |
| 611 | email_dict = {} |
| 612 | needed_emails = [email.lower() for email in emails |
| 613 | if email |
| 614 | and not framework_constants.NO_VALUE_RE.match(email)] |
| 615 | for email in needed_emails: |
| 616 | user_id = self.users_by_email.get(email) |
| 617 | if not user_id: |
| 618 | if autocreate and validate.IsValidEmail(email): |
| 619 | self._CreateUser(cnxn, email) |
| 620 | user_id = self.users_by_email.get(email) |
| 621 | elif not autocreate: |
| 622 | raise exceptions.NoSuchUserException('%r' % email) |
| 623 | if user_id: |
| 624 | email_dict[email] = user_id |
| 625 | return email_dict |
| 626 | |
| 627 | def LookupUserEmail(self, _cnxn, user_id): |
| 628 | email = self.users_by_id.get(user_id) |
| 629 | if not email: |
| 630 | raise exceptions.NoSuchUserException('No user has ID %r' % user_id) |
| 631 | return email |
| 632 | |
| 633 | def LookupUserEmails(self, cnxn, user_ids, ignore_missed=False): |
| 634 | if ignore_missed: |
| 635 | user_dict = {} |
| 636 | for user_id in user_ids: |
| 637 | try: |
| 638 | user_dict[user_id] = self.LookupUserEmail(cnxn, user_id) |
| 639 | except exceptions.NoSuchUserException: |
| 640 | continue |
| 641 | return user_dict |
| 642 | user_dict = { |
| 643 | user_id: self.LookupUserEmail(cnxn, user_id) |
| 644 | for user_id in user_ids} |
| 645 | return user_dict |
| 646 | |
| 647 | def UpdateUser(self, _cnxn, user_id, user): |
| 648 | """Updates the user pb.""" |
| 649 | self.test_users[user_id] = user |
| 650 | |
| 651 | def UpdateUserBan(self, _cnxn, user_id, user, is_banned=None, |
| 652 | banned_reason=None): |
| 653 | """Updates the user pb.""" |
| 654 | self.test_users[user_id] = user |
| 655 | user.banned = banned_reason if is_banned else '' |
| 656 | |
| 657 | def GetPendingLinkedInvites(self, cnxn, user_id): |
| 658 | invite_as_parent = [row[1] for row in self.invite_rows |
| 659 | if row[0] == user_id] |
| 660 | invite_as_child = [row[0] for row in self.invite_rows |
| 661 | if row[1] == user_id] |
| 662 | return invite_as_parent, invite_as_child |
| 663 | |
| 664 | def InviteLinkedParent(self, cnxn, parent_id, child_id): |
| 665 | self.invite_rows.append((parent_id, child_id)) |
| 666 | |
| 667 | def AcceptLinkedChild(self, cnxn, parent_id, child_id): |
| 668 | if (parent_id, child_id) not in self.invite_rows: |
| 669 | raise exceptions.InputException('No such invite') |
| 670 | self.linked_account_rows.append((parent_id, child_id)) |
| 671 | self.invite_rows = [ |
| 672 | (p_id, c_id) for (p_id, c_id) in self.invite_rows |
| 673 | if p_id != parent_id and c_id != child_id] |
| 674 | self.GetUser(cnxn, parent_id).linked_child_ids.append(child_id) |
| 675 | self.GetUser(cnxn, child_id).linked_parent_id = parent_id |
| 676 | |
| 677 | def UnlinkAccounts(self, _cnxn, parent_id, child_id): |
| 678 | """Delete a linked-account relationship.""" |
| 679 | if not parent_id: |
| 680 | raise exceptions.InputException('Parent account is missing') |
| 681 | if not child_id: |
| 682 | raise exceptions.InputException('Child account is missing') |
| 683 | self.linked_account_rows = [(p, c) for (p, c) in self.linked_account_rows |
| 684 | if (p, c) != (parent_id, child_id)] |
| 685 | |
| 686 | def UpdateUserSettings( |
| 687 | self, cnxn, user_id, user, notify=None, notify_starred=None, |
| 688 | email_compact_subject=None, email_view_widget=None, |
| 689 | notify_starred_ping=None, obscure_email=None, after_issue_update=None, |
| 690 | is_site_admin=None, is_banned=None, banned_reason=None, |
| 691 | keep_people_perms_open=None, preview_on_hover=None, |
| 692 | vacation_message=None): |
| 693 | # notifications |
| 694 | if notify is not None: |
| 695 | user.notify_issue_change = notify |
| 696 | if notify_starred is not None: |
| 697 | user.notify_starred_issue_change = notify_starred |
| 698 | if notify_starred_ping is not None: |
| 699 | user.notify_starred_ping = notify_starred_ping |
| 700 | if email_compact_subject is not None: |
| 701 | user.email_compact_subject = email_compact_subject |
| 702 | if email_view_widget is not None: |
| 703 | user.email_view_widget = email_view_widget |
| 704 | |
| 705 | # display options |
| 706 | if after_issue_update is not None: |
| 707 | user.after_issue_update = user_pb2.IssueUpdateNav(after_issue_update) |
| 708 | if preview_on_hover is not None: |
| 709 | user.preview_on_hover = preview_on_hover |
| 710 | if keep_people_perms_open is not None: |
| 711 | user.keep_people_perms_open = keep_people_perms_open |
| 712 | |
| 713 | # misc |
| 714 | if obscure_email is not None: |
| 715 | user.obscure_email = obscure_email |
| 716 | |
| 717 | # admin |
| 718 | if is_site_admin is not None: |
| 719 | user.is_site_admin = is_site_admin |
| 720 | if is_banned is not None: |
| 721 | if is_banned: |
| 722 | user.banned = banned_reason or 'No reason given' |
| 723 | else: |
| 724 | user.reset('banned') |
| 725 | |
| 726 | # user availability |
| 727 | if vacation_message is not None: |
| 728 | user.vacation_message = vacation_message |
| 729 | |
| 730 | return self.UpdateUser(cnxn, user_id, user) |
| 731 | |
| 732 | def GetUsersPrefs(self, cnxn, user_ids, use_cache=True): |
| 733 | for user_id in user_ids: |
| 734 | if user_id not in self.prefs_dict: |
| 735 | self.prefs_dict[user_id] = user_pb2.UserPrefs(user_id=user_id) |
| 736 | return self.prefs_dict |
| 737 | |
| 738 | def GetUserPrefs(self, cnxn, user_id, use_cache=True): |
| 739 | """Return a UserPrefs PB for the requested user ID.""" |
| 740 | prefs_dict = self.GetUsersPrefs(cnxn, [user_id], use_cache=use_cache) |
| 741 | return prefs_dict[user_id] |
| 742 | |
| 743 | def GetUserPrefsByEmail(self, cnxn, email, use_cache=True): |
| 744 | """Return a UserPrefs PB for the requested email, or an empty UserPrefs.""" |
| 745 | try: |
| 746 | user_id = self.LookupUserID(cnxn, email) |
| 747 | user_prefs = self.GetUserPrefs(cnxn, user_id, use_cache=use_cache) |
| 748 | except exceptions.NoSuchUserException: |
| 749 | user_prefs = user_pb2.UserPrefs() |
| 750 | return user_prefs |
| 751 | |
| 752 | def SetUserPrefs(self, cnxn, user_id, pref_values): |
| 753 | userprefs = self.GetUserPrefs(cnxn, user_id) |
| 754 | names_to_overwrite = {upv.name for upv in pref_values} |
| 755 | userprefs.prefs = [upv for upv in userprefs.prefs |
| 756 | if upv.name not in names_to_overwrite] |
| 757 | userprefs.prefs.extend(pref_values) |
| 758 | |
| 759 | def ExpungeUsers(self, cnxn, user_ids): |
| 760 | for user_id in user_ids: |
| 761 | self.test_users.pop(user_id, None) |
| 762 | self.prefs_dict.pop(user_id, None) |
| 763 | email = self.users_by_id.pop(user_id, None) |
| 764 | if email: |
| 765 | self.users_by_email.pop(email, None) |
| 766 | |
| 767 | self.invite_rows = [row for row in self.invite_rows |
| 768 | if row[0] not in user_ids and row[1] not in user_ids] |
| 769 | self.linked_account_rows = [ |
| 770 | row for row in self.linked_account_rows |
| 771 | if row[0] not in user_ids and row[1] not in user_ids] |
| 772 | |
| 773 | def TotalUsersCount(self, cnxn): |
| 774 | return len(self.users_by_id) - 1 if ( |
| 775 | framework_constants.DELETED_USER_ID in self.users_by_id |
| 776 | ) else len(self.users_by_id) |
| 777 | |
| 778 | def GetAllUserEmailsBatch(self, cnxn, limit=1000, offset=0): |
| 779 | sorted_user_ids = sorted(self.users_by_id.keys()) |
| 780 | sorted_user_ids = [ |
| 781 | user_id for user_id in sorted_user_ids |
| 782 | if user_id != framework_constants.DELETED_USER_ID] |
| 783 | emails = [] |
| 784 | for i in range(offset, offset + limit): |
| 785 | try: |
| 786 | user_id = sorted_user_ids[i] |
| 787 | if user_id != framework_constants.DELETED_USER_ID: |
| 788 | emails.append(self.users_by_id[user_id]) |
| 789 | except IndexError: |
| 790 | break |
| 791 | return emails |
| 792 | |
| 793 | def GetRecentlyVisitedHotlists(self, _cnxn, user_id): |
| 794 | try: |
| 795 | return self.visited_hotlists[user_id] |
| 796 | except KeyError: |
| 797 | return [] |
| 798 | |
| 799 | def AddVisitedHotlist(self, _cnxn, user_id, hotlist_id, commit=True): |
| 800 | try: |
| 801 | user_visited_tuples = self.visited_hotlists[user_id] |
| 802 | self.visited_hotlists[user_id] = [ |
| 803 | hid for hid in user_visited_tuples if hid != hotlist_id] |
| 804 | except KeyError: |
| 805 | self.visited_hotlists[user_id] = [] |
| 806 | self.visited_hotlists[user_id].append(hotlist_id) |
| 807 | |
| 808 | def ExpungeUsersHotlistsHistory(self, cnxn, user_ids, commit=True): |
| 809 | for user_id in user_ids: |
| 810 | self.visited_hotlists.pop(user_id, None) |
| 811 | |
| 812 | |
| 813 | class AbstractStarService(object): |
| 814 | """Fake StarService.""" |
| 815 | |
| 816 | def __init__(self): |
| 817 | self.stars_by_item_id = {} |
| 818 | self.stars_by_starrer_id = {} |
| 819 | self.expunged_item_ids = [] |
| 820 | |
| 821 | def ExpungeStars(self, _cnxn, item_id, commit=True, limit=None): |
| 822 | self.expunged_item_ids.append(item_id) |
| 823 | old_starrers = self.stars_by_item_id.get(item_id, []) |
| 824 | self.stars_by_item_id[item_id] = [] |
| 825 | for old_starrer in old_starrers: |
| 826 | if self.stars_by_starrer_id.get(old_starrer): |
| 827 | self.stars_by_starrer_id[old_starrer] = [ |
| 828 | it for it in self.stars_by_starrer_id[old_starrer] |
| 829 | if it != item_id] |
| 830 | |
| 831 | def ExpungeStarsByUsers(self, _cnxn, user_ids, limit=None): |
| 832 | for user_id in user_ids: |
| 833 | item_ids = self.stars_by_starrer_id.pop(user_id, []) |
| 834 | for item_id in item_ids: |
| 835 | starrers = self.stars_by_item_id.get(item_id, None) |
| 836 | if starrers: |
| 837 | self.stars_by_item_id[item_id] = [ |
| 838 | starrer for starrer in starrers if starrer != user_id] |
| 839 | |
| 840 | def LookupItemStarrers(self, _cnxn, item_id): |
| 841 | return self.stars_by_item_id.get(item_id, []) |
| 842 | |
| 843 | def LookupItemsStarrers(self, cnxn, item_ids): |
| 844 | return { |
| 845 | item_id: self.LookupItemStarrers(cnxn, item_id) for item_id in item_ids} |
| 846 | |
| 847 | def LookupStarredItemIDs(self, _cnxn, starrer_user_id): |
| 848 | return self.stars_by_starrer_id.get(starrer_user_id, []) |
| 849 | |
| 850 | def IsItemStarredBy(self, cnxn, item_id, starrer_user_id): |
| 851 | return item_id in self.LookupStarredItemIDs(cnxn, starrer_user_id) |
| 852 | |
| 853 | def CountItemStars(self, cnxn, item_id): |
| 854 | return len(self.LookupItemStarrers(cnxn, item_id)) |
| 855 | |
| 856 | def CountItemsStars(self, cnxn, item_ids): |
| 857 | return {item_id: self.CountItemStars(cnxn, item_id) |
| 858 | for item_id in item_ids} |
| 859 | |
| 860 | def _SetStar(self, cnxn, item_id, starrer_user_id, starred): |
| 861 | if starred and not self.IsItemStarredBy(cnxn, item_id, starrer_user_id): |
| 862 | self.stars_by_item_id.setdefault(item_id, []).append(starrer_user_id) |
| 863 | self.stars_by_starrer_id.setdefault(starrer_user_id, []).append(item_id) |
| 864 | |
| 865 | elif not starred and self.IsItemStarredBy(cnxn, item_id, starrer_user_id): |
| 866 | self.stars_by_item_id[item_id].remove(starrer_user_id) |
| 867 | self.stars_by_starrer_id[starrer_user_id].remove(item_id) |
| 868 | |
| 869 | def SetStar(self, cnxn, item_id, starrer_user_id, starred): |
| 870 | self._SetStar(cnxn, item_id, starrer_user_id, starred) |
| 871 | |
| 872 | def SetStarsBatch( |
| 873 | self, cnxn, item_id, starrer_user_ids, starred, commit=True): |
| 874 | for starrer_user_id in starrer_user_ids: |
| 875 | self._SetStar(cnxn, item_id, starrer_user_id, starred) |
| 876 | |
| 877 | |
| 878 | class UserStarService(AbstractStarService): |
| 879 | pass |
| 880 | |
| 881 | |
| 882 | class ProjectStarService(AbstractStarService): |
| 883 | pass |
| 884 | |
| 885 | |
| 886 | class HotlistStarService(AbstractStarService): |
| 887 | pass |
| 888 | |
| 889 | |
| 890 | class IssueStarService(AbstractStarService): |
| 891 | |
| 892 | # pylint: disable=arguments-differ |
| 893 | def SetStar( |
| 894 | self, cnxn, services, _config, issue_id, starrer_user_id, |
| 895 | starred): |
| 896 | super(IssueStarService, self).SetStar( |
| 897 | cnxn, issue_id, starrer_user_id, starred) |
| 898 | try: |
| 899 | issue = services.issue.GetIssue(cnxn, issue_id) |
| 900 | issue.star_count += (1 if starred else -1) |
| 901 | except exceptions.NoSuchIssueException: |
| 902 | pass |
| 903 | |
| 904 | # pylint: disable=arguments-differ |
| 905 | def SetStarsBatch( |
| 906 | self, cnxn, _service, _config, issue_id, starrer_user_ids, |
| 907 | starred): |
| 908 | super(IssueStarService, self).SetStarsBatch( |
| 909 | cnxn, issue_id, starrer_user_ids, starred) |
| 910 | |
| 911 | def SetStarsBatch_SkipIssueUpdate( |
| 912 | self, cnxn, issue_id, starrer_user_ids, starred, commit=True): |
| 913 | super(IssueStarService, self).SetStarsBatch( |
| 914 | cnxn, issue_id, starrer_user_ids, starred) |
| 915 | |
| 916 | |
| 917 | class ProjectService(object): |
| 918 | """Fake ProjectService object. |
| 919 | |
| 920 | Provides methods for creating users and projects, which are accessible |
| 921 | through parts of the real ProjectService interface. |
| 922 | """ |
| 923 | |
| 924 | def __init__(self): |
| 925 | self.test_projects = {} # project_name -> project_pb |
| 926 | self.projects_by_id = {} # project_id -> project_pb |
| 927 | self.test_star_manager = None |
| 928 | self.indexed_projects = {} |
| 929 | self.unindexed_projects = set() |
| 930 | self.index_counter = 0 |
| 931 | self.project_commitments = {} |
| 932 | self.ac_exclusion_ids = {} |
| 933 | self.no_expand_ids = {} |
| 934 | |
| 935 | def TestAddProject( |
| 936 | self, name, summary='', state=project_pb2.ProjectState.LIVE, |
| 937 | owner_ids=None, committer_ids=None, contrib_ids=None, |
| 938 | issue_notify_address=None, state_reason='', description=None, |
| 939 | project_id=None, process_inbound_email=None, access=None, |
| 940 | extra_perms=None): |
| 941 | """Add a project to the fake ProjectService object. |
| 942 | |
| 943 | Args: |
| 944 | name: The name of the project. Will replace any existing project under |
| 945 | the same name. |
| 946 | summary: The summary string of the project. |
| 947 | state: Initial state for the project from project_pb2.ProjectState. |
| 948 | owner_ids: List of user ids for project owners |
| 949 | committer_ids: List of user ids for project committers |
| 950 | contrib_ids: List of user ids for project contributors |
| 951 | issue_notify_address: email address to send issue change notifications |
| 952 | state_reason: string describing the reason the project is in its current |
| 953 | state. |
| 954 | description: The description string for this project |
| 955 | project_id: A unique integer identifier for the created project. |
| 956 | process_inbound_email: True to make this project accept inbound email. |
| 957 | access: One of the values of enum project_pb2.ProjectAccess. |
| 958 | extra_perms: List of ExtraPerms PBs for project members. |
| 959 | |
| 960 | Returns: |
| 961 | A populated project PB. |
| 962 | """ |
| 963 | proj_pb = project_pb2.Project() |
| 964 | proj_pb.project_id = project_id or hash(name) % 100000 |
| 965 | proj_pb.project_name = name |
| 966 | proj_pb.summary = summary |
| 967 | proj_pb.state = state |
| 968 | proj_pb.state_reason = state_reason |
| 969 | proj_pb.extra_perms = extra_perms or [] |
| 970 | if description is not None: |
| 971 | proj_pb.description = description |
| 972 | |
| 973 | self.TestAddProjectMembers(owner_ids, proj_pb, OWNER_ROLE) |
| 974 | self.TestAddProjectMembers(committer_ids, proj_pb, COMMITTER_ROLE) |
| 975 | self.TestAddProjectMembers(contrib_ids, proj_pb, CONTRIBUTOR_ROLE) |
| 976 | |
| 977 | if issue_notify_address is not None: |
| 978 | proj_pb.issue_notify_address = issue_notify_address |
| 979 | if process_inbound_email is not None: |
| 980 | proj_pb.process_inbound_email = process_inbound_email |
| 981 | if access is not None: |
| 982 | proj_pb.access = access |
| 983 | |
| 984 | self.test_projects[name] = proj_pb |
| 985 | self.projects_by_id[proj_pb.project_id] = proj_pb |
| 986 | return proj_pb |
| 987 | |
| 988 | def TestAddProjectMembers(self, user_id_list, proj_pb, role): |
| 989 | if user_id_list is not None: |
| 990 | for user_id in user_id_list: |
| 991 | if role == OWNER_ROLE: |
| 992 | proj_pb.owner_ids.append(user_id) |
| 993 | elif role == COMMITTER_ROLE: |
| 994 | proj_pb.committer_ids.append(user_id) |
| 995 | elif role == CONTRIBUTOR_ROLE: |
| 996 | proj_pb.contributor_ids.append(user_id) |
| 997 | |
| 998 | def LookupProjectIDs(self, cnxn, project_names): |
| 999 | return { |
| 1000 | project_name: self.test_projects[project_name].project_id |
| 1001 | for project_name in project_names |
| 1002 | if project_name in self.test_projects} |
| 1003 | |
| 1004 | def LookupProjectNames(self, cnxn, project_ids): |
| 1005 | projects_dict = self.GetProjects(cnxn, project_ids) |
| 1006 | return {p.project_id: p.project_name |
| 1007 | for p in projects_dict.values()} |
| 1008 | |
| 1009 | def CreateProject( |
| 1010 | self, _cnxn, project_name, owner_ids, committer_ids, |
| 1011 | contributor_ids, summary, description, |
| 1012 | state=project_pb2.ProjectState.LIVE, access=None, |
| 1013 | read_only_reason=None, |
| 1014 | home_page=None, docs_url=None, source_url=None, |
| 1015 | logo_gcs_id=None, logo_file_name=None): |
| 1016 | """Create and store a Project with the given attributes.""" |
| 1017 | if project_name in self.test_projects: |
| 1018 | raise exceptions.ProjectAlreadyExists() |
| 1019 | project = self.TestAddProject( |
| 1020 | project_name, summary=summary, state=state, |
| 1021 | owner_ids=owner_ids, committer_ids=committer_ids, |
| 1022 | contrib_ids=contributor_ids, description=description, |
| 1023 | access=access) |
| 1024 | return project.project_id |
| 1025 | |
| 1026 | def ExpungeProject(self, _cnxn, project_id): |
| 1027 | project = self.projects_by_id.get(project_id) |
| 1028 | if project: |
| 1029 | self.test_projects.pop(project.project_name, None) |
| 1030 | |
| 1031 | def GetProjectsByName(self, _cnxn, project_name_list, use_cache=True): |
| 1032 | return { |
| 1033 | pn: self.test_projects[pn] for pn in project_name_list |
| 1034 | if pn in self.test_projects} |
| 1035 | |
| 1036 | def GetProjectByName(self, _cnxn, name, use_cache=True): |
| 1037 | return self.test_projects.get(name) |
| 1038 | |
| 1039 | def GetProjectList(self, cnxn, project_id_list, use_cache=True): |
| 1040 | project_dict = self.GetProjects(cnxn, project_id_list, use_cache=use_cache) |
| 1041 | return [project_dict[pid] for pid in project_id_list |
| 1042 | if pid in project_dict] |
| 1043 | |
| 1044 | def GetVisibleLiveProjects( |
| 1045 | self, _cnxn, logged_in_user, effective_ids, domain=None, use_cache=True): |
| 1046 | project_ids = list(self.projects_by_id.keys()) |
| 1047 | visible_project_ids = [] |
| 1048 | for pid in project_ids: |
| 1049 | can_view = permissions.UserCanViewProject( |
| 1050 | logged_in_user, effective_ids, self.projects_by_id[pid]) |
| 1051 | different_domain = framework_helpers.GetNeededDomain( |
| 1052 | self.projects_by_id[pid].project_name, domain) |
| 1053 | if can_view and not different_domain: |
| 1054 | visible_project_ids.append(pid) |
| 1055 | |
| 1056 | return visible_project_ids |
| 1057 | |
| 1058 | def GetProjects(self, _cnxn, project_ids, use_cache=True): |
| 1059 | result = {} |
| 1060 | for project_id in project_ids: |
| 1061 | project = self.projects_by_id.get(project_id) |
| 1062 | if project: |
| 1063 | result[project_id] = project |
| 1064 | else: |
| 1065 | raise exceptions.NoSuchProjectException(project_id) |
| 1066 | return result |
| 1067 | |
| 1068 | def GetAllProjects(self, _cnxn, use_cache=True): |
| 1069 | result = {} |
| 1070 | for project_id in self.projects_by_id: |
| 1071 | project = self.projects_by_id.get(project_id) |
| 1072 | result[project_id] = project |
| 1073 | return result |
| 1074 | |
| 1075 | |
| 1076 | def GetProject(self, cnxn, project_id, use_cache=True): |
| 1077 | """Load the specified project from the database.""" |
| 1078 | project_id_dict = self.GetProjects(cnxn, [project_id], use_cache=use_cache) |
| 1079 | if project_id not in project_id_dict: |
| 1080 | raise exceptions.NoSuchProjectException() |
| 1081 | return project_id_dict[project_id] |
| 1082 | |
| 1083 | def GetProjectCommitments(self, _cnxn, project_id): |
| 1084 | if project_id in self.project_commitments: |
| 1085 | return self.project_commitments[project_id] |
| 1086 | |
| 1087 | project_commitments = project_pb2.ProjectCommitments() |
| 1088 | project_commitments.project_id = project_id |
| 1089 | return project_commitments |
| 1090 | |
| 1091 | def TestStoreProjectCommitments(self, project_commitments): |
| 1092 | key = project_commitments.project_id |
| 1093 | self.project_commitments[key] = project_commitments |
| 1094 | |
| 1095 | def GetProjectAutocompleteExclusion(self, cnxn, project_id): |
| 1096 | return (self.ac_exclusion_ids.get(project_id, []), |
| 1097 | self.no_expand_ids.get(project_id, [])) |
| 1098 | |
| 1099 | def UpdateProject( |
| 1100 | self, |
| 1101 | _cnxn, |
| 1102 | project_id, |
| 1103 | summary=None, |
| 1104 | description=None, |
| 1105 | state=None, |
| 1106 | state_reason=None, |
| 1107 | access=None, |
| 1108 | issue_notify_address=None, |
| 1109 | attachment_bytes_used=None, |
| 1110 | attachment_quota=None, |
| 1111 | moved_to=None, |
| 1112 | process_inbound_email=None, |
| 1113 | only_owners_remove_restrictions=None, |
| 1114 | read_only_reason=None, |
| 1115 | cached_content_timestamp=None, |
| 1116 | only_owners_see_contributors=None, |
| 1117 | delete_time=None, |
| 1118 | recent_activity=None, |
| 1119 | revision_url_format=None, |
| 1120 | home_page=None, |
| 1121 | docs_url=None, |
| 1122 | source_url=None, |
| 1123 | logo_gcs_id=None, |
| 1124 | logo_file_name=None, |
| 1125 | issue_notify_always_detailed=None, |
| 1126 | commit=True): |
| 1127 | project = self.projects_by_id.get(project_id) |
| 1128 | if not project: |
| 1129 | raise exceptions.NoSuchProjectException( |
| 1130 | 'Project "%s" not found!' % project_id) |
| 1131 | |
| 1132 | # TODO(jrobbins): implement all passed arguments - probably as a utility |
| 1133 | # method shared with the real persistence implementation. |
| 1134 | if read_only_reason is not None: |
| 1135 | project.read_only_reason = read_only_reason |
| 1136 | if attachment_bytes_used is not None: |
| 1137 | project.attachment_bytes_used = attachment_bytes_used |
| 1138 | |
| 1139 | def UpdateProjectRoles( |
| 1140 | self, _cnxn, project_id, owner_ids, committer_ids, |
| 1141 | contributor_ids, now=None): |
| 1142 | project = self.projects_by_id.get(project_id) |
| 1143 | if not project: |
| 1144 | raise exceptions.NoSuchProjectException( |
| 1145 | 'Project "%s" not found!' % project_id) |
| 1146 | |
| 1147 | project.owner_ids = owner_ids |
| 1148 | project.committer_ids = committer_ids |
| 1149 | project.contributor_ids = contributor_ids |
| 1150 | |
| 1151 | def MarkProjectDeletable( |
| 1152 | self, _cnxn, project_id, _config_service): |
| 1153 | project = self.projects_by_id[project_id] |
| 1154 | project.project_name = 'DELETABLE_%d' % project_id |
| 1155 | project.state = project_pb2.ProjectState.DELETABLE |
| 1156 | |
| 1157 | def UpdateRecentActivity(self, _cnxn, _project_id, now=None): |
| 1158 | pass |
| 1159 | |
| 1160 | def GetUserRolesInAllProjects(self, _cnxn, effective_ids): |
| 1161 | owned_project_ids = set() |
| 1162 | membered_project_ids = set() |
| 1163 | contrib_project_ids = set() |
| 1164 | |
| 1165 | for project in self.projects_by_id.values(): |
| 1166 | if not effective_ids.isdisjoint(project.owner_ids): |
| 1167 | owned_project_ids.add(project.project_id) |
| 1168 | elif not effective_ids.isdisjoint(project.committer_ids): |
| 1169 | membered_project_ids.add(project.project_id) |
| 1170 | elif not effective_ids.isdisjoint(project.contributor_ids): |
| 1171 | contrib_project_ids.add(project.project_id) |
| 1172 | |
| 1173 | return owned_project_ids, membered_project_ids, contrib_project_ids |
| 1174 | |
| 1175 | def GetProjectMemberships(self, _cnxn, effective_ids, use_cache=True): |
| 1176 | # type: MonorailConnection, Collection[int], bool -> |
| 1177 | # Mapping[int, Collection[int]] |
| 1178 | projects_by_user_id = collections.defaultdict(set) |
| 1179 | |
| 1180 | for project in self.projects_by_id.values(): |
| 1181 | member_ids = set( |
| 1182 | itertools.chain( |
| 1183 | project.owner_ids, project.committer_ids, |
| 1184 | project.contributor_ids)) |
| 1185 | for user_id in effective_ids: |
| 1186 | if user_id in member_ids: |
| 1187 | projects_by_user_id[user_id].add(project.project_id) |
| 1188 | return projects_by_user_id |
| 1189 | |
| 1190 | def ExpungeUsersInProjects(self, cnxn, user_ids, limit=None): |
| 1191 | for project in self.projects_by_id.values(): |
| 1192 | project.owner_ids = [owner_id for owner_id in project.owner_ids |
| 1193 | if owner_id not in user_ids] |
| 1194 | project.committer_ids = [com_id for com_id in project.committer_ids |
| 1195 | if com_id not in user_ids] |
| 1196 | project.contributor_ids = [con_id for con_id in project.contributor_ids |
| 1197 | if con_id not in user_ids] |
| 1198 | |
| 1199 | |
| 1200 | class ConfigService(object): |
| 1201 | """Fake version of ConfigService that just works in-RAM.""" |
| 1202 | |
| 1203 | def __init__(self, user_id=None): |
| 1204 | self.project_configs = {} |
| 1205 | self.next_field_id = 123 |
| 1206 | self.next_component_id = 345 |
| 1207 | self.next_template_id = 23 |
| 1208 | self.expunged_configs = [] |
| 1209 | self.expunged_users_in_configs = [] |
| 1210 | self.component_ids_to_templates = {} |
| 1211 | self.label_to_id = {} |
| 1212 | self.id_to_label = {} |
| 1213 | self.strict = False # Set true to raise more exceptions like real class. |
| 1214 | |
| 1215 | def TestAddLabelsDict(self, label_to_id): |
| 1216 | self.label_to_id = label_to_id |
| 1217 | self.id_to_label = { |
| 1218 | label_id: label |
| 1219 | for label, label_id in list(self.label_to_id.items())} |
| 1220 | |
| 1221 | def TestAddFieldDef(self, fd): |
| 1222 | self.project_configs[fd.project_id].field_defs.append(fd) |
| 1223 | |
| 1224 | def TestAddApprovalDef(self, ad, project_id): |
| 1225 | self.project_configs[project_id].approval_defs.append(ad) |
| 1226 | |
| 1227 | def ExpungeConfig(self, _cnxn, project_id): |
| 1228 | self.expunged_configs.append(project_id) |
| 1229 | |
| 1230 | def ExpungeUsersInConfigs(self, _cnxn, user_ids, limit=None): |
| 1231 | self.expunged_users_in_configs.extend(user_ids) |
| 1232 | |
| 1233 | def GetLabelDefRows(self, cnxn, project_id, use_cache=True): |
| 1234 | """This always returns empty results. Mock it to test other cases.""" |
| 1235 | return [] |
| 1236 | |
| 1237 | def GetLabelDefRowsAnyProject(self, cnxn, where=None): |
| 1238 | """This always returns empty results. Mock it to test other cases.""" |
| 1239 | return [] |
| 1240 | |
| 1241 | def LookupLabel(self, cnxn, project_id, label_id): |
| 1242 | if label_id in self.id_to_label: |
| 1243 | return self.id_to_label[label_id] |
| 1244 | if label_id == 999: |
| 1245 | return None |
| 1246 | return 'label_%d_%d' % (project_id, label_id) |
| 1247 | |
| 1248 | def LookupLabelID(self, cnxn, project_id, label, autocreate=True): |
| 1249 | if label in self.label_to_id: |
| 1250 | return self.label_to_id[label] |
| 1251 | return 1 |
| 1252 | |
| 1253 | def LookupLabelIDs(self, cnxn, project_id, labels, autocreate=False): |
| 1254 | ids = [] |
| 1255 | next_label_id = 0 |
| 1256 | if self.id_to_label.keys(): |
| 1257 | existing_ids = self.id_to_label.keys() |
| 1258 | existing_ids.sort() |
| 1259 | next_label_id = existing_ids[-1] + 1 |
| 1260 | for label in labels: |
| 1261 | if self.label_to_id.get(label) is not None: |
| 1262 | ids.append(self.label_to_id[label]) |
| 1263 | elif autocreate: |
| 1264 | self.label_to_id[label] = next_label_id |
| 1265 | self.id_to_label[next_label_id] = label |
| 1266 | ids.append(next_label_id) |
| 1267 | next_label_id += 1 |
| 1268 | return ids |
| 1269 | |
| 1270 | def LookupIDsOfLabelsMatching(self, cnxn, project_id, regex): |
| 1271 | return [1, 2, 3] |
| 1272 | |
| 1273 | def LookupStatus(self, cnxn, project_id, status_id): |
| 1274 | return 'status_%d_%d' % (project_id, status_id) |
| 1275 | |
| 1276 | def LookupStatusID(self, cnxn, project_id, status, autocreate=True): |
| 1277 | if status: |
| 1278 | return 1 |
| 1279 | else: |
| 1280 | return 0 |
| 1281 | |
| 1282 | def LookupStatusIDs(self, cnxn, project_id, statuses): |
| 1283 | return [idx for idx, _status in enumerate(statuses)] |
| 1284 | |
| 1285 | def LookupClosedStatusIDs(self, cnxn, project_id): |
| 1286 | return [7, 8, 9] |
| 1287 | |
| 1288 | def StoreConfig(self, _cnxn, config): |
| 1289 | self.project_configs[config.project_id] = config |
| 1290 | |
| 1291 | def GetProjectConfig(self, _cnxn, project_id, use_cache=True): |
| 1292 | if project_id in self.project_configs: |
| 1293 | return self.project_configs[project_id] |
| 1294 | elif self.strict: |
| 1295 | raise exceptions.NoSuchProjectException() |
| 1296 | else: |
| 1297 | return tracker_bizobj.MakeDefaultProjectIssueConfig(project_id) |
| 1298 | |
| 1299 | def GetProjectConfigs(self, _cnxn, project_ids, use_cache=True): |
| 1300 | config_dict = {} |
| 1301 | for project_id in project_ids: |
| 1302 | if project_id in self.project_configs: |
| 1303 | config_dict[project_id] = self.project_configs[project_id] |
| 1304 | elif not self.strict: |
| 1305 | config_dict[project_id] = tracker_bizobj.MakeDefaultProjectIssueConfig( |
| 1306 | project_id) |
| 1307 | return config_dict |
| 1308 | |
| 1309 | def UpdateConfig( |
| 1310 | self, cnxn, project, well_known_statuses=None, |
| 1311 | statuses_offer_merge=None, well_known_labels=None, |
| 1312 | excl_label_prefixes=None, default_template_for_developers=None, |
| 1313 | default_template_for_users=None, list_prefs=None, restrict_to_known=None, |
| 1314 | approval_defs=None): |
| 1315 | project_id = project.project_id |
| 1316 | project_config = self.GetProjectConfig(cnxn, project_id, use_cache=False) |
| 1317 | |
| 1318 | if well_known_statuses is not None: |
| 1319 | tracker_bizobj.SetConfigStatuses(project_config, well_known_statuses) |
| 1320 | |
| 1321 | if statuses_offer_merge is not None: |
| 1322 | project_config.statuses_offer_merge = statuses_offer_merge |
| 1323 | |
| 1324 | if well_known_labels is not None: |
| 1325 | tracker_bizobj.SetConfigLabels(project_config, well_known_labels) |
| 1326 | |
| 1327 | if excl_label_prefixes is not None: |
| 1328 | project_config.exclusive_label_prefixes = excl_label_prefixes |
| 1329 | |
| 1330 | if approval_defs is not None: |
| 1331 | tracker_bizobj.SetConfigApprovals(project_config, approval_defs) |
| 1332 | |
| 1333 | if default_template_for_developers is not None: |
| 1334 | project_config.default_template_for_developers = ( |
| 1335 | default_template_for_developers) |
| 1336 | if default_template_for_users is not None: |
| 1337 | project_config.default_template_for_users = default_template_for_users |
| 1338 | |
| 1339 | if list_prefs: |
| 1340 | default_col_spec, default_sort_spec, x_attr, y_attr, m_d_q = list_prefs |
| 1341 | project_config.default_col_spec = default_col_spec |
| 1342 | project_config.default_sort_spec = default_sort_spec |
| 1343 | project_config.default_x_attr = x_attr |
| 1344 | project_config.default_y_attr = y_attr |
| 1345 | project_config.member_default_query = m_d_q |
| 1346 | |
| 1347 | if restrict_to_known is not None: |
| 1348 | project_config.restrict_to_known = restrict_to_known |
| 1349 | |
| 1350 | self.StoreConfig(cnxn, project_config) |
| 1351 | return project_config |
| 1352 | |
| 1353 | def CreateFieldDef( |
| 1354 | self, |
| 1355 | cnxn, |
| 1356 | project_id, |
| 1357 | field_name, |
| 1358 | field_type_str, |
| 1359 | applic_type, |
| 1360 | applic_pred, |
| 1361 | is_required, |
| 1362 | is_niche, |
| 1363 | is_multivalued, |
| 1364 | min_value, |
| 1365 | max_value, |
| 1366 | regex, |
| 1367 | needs_member, |
| 1368 | needs_perm, |
| 1369 | grants_perm, |
| 1370 | notify_on, |
| 1371 | date_action_str, |
| 1372 | docstring, |
| 1373 | admin_ids, |
| 1374 | editor_ids, |
| 1375 | approval_id=None, |
| 1376 | is_phase_field=False, |
| 1377 | is_restricted_field=False): |
| 1378 | config = self.GetProjectConfig(cnxn, project_id) |
| 1379 | field_type = tracker_pb2.FieldTypes(field_type_str) |
| 1380 | field_id = self.next_field_id |
| 1381 | self.next_field_id += 1 |
| 1382 | fd = tracker_bizobj.MakeFieldDef( |
| 1383 | field_id, project_id, field_name, field_type, applic_type, applic_pred, |
| 1384 | is_required, is_niche, is_multivalued, min_value, max_value, regex, |
| 1385 | needs_member, needs_perm, grants_perm, notify_on, date_action_str, |
| 1386 | docstring, False, approval_id, is_phase_field, is_restricted_field, |
| 1387 | admin_ids=admin_ids, editor_ids=editor_ids) |
| 1388 | config.field_defs.append(fd) |
| 1389 | self.StoreConfig(cnxn, config) |
| 1390 | return field_id |
| 1391 | |
| 1392 | def LookupFieldID(self, cnxn, project_id, field): |
| 1393 | config = self.GetProjectConfig(cnxn, project_id) |
| 1394 | for fd in config.field_defs: |
| 1395 | if fd.field_name == field: |
| 1396 | return fd.field_id |
| 1397 | |
| 1398 | return None |
| 1399 | |
| 1400 | def SoftDeleteFieldDefs(self, cnxn, project_id, field_ids): |
| 1401 | config = self.GetProjectConfig(cnxn, project_id) |
| 1402 | for fd in config.field_defs: |
| 1403 | if fd.field_id in field_ids: |
| 1404 | fd.is_deleted = True |
| 1405 | self.StoreConfig(cnxn, config) |
| 1406 | |
| 1407 | def UpdateFieldDef( |
| 1408 | self, |
| 1409 | cnxn, |
| 1410 | project_id, |
| 1411 | field_id, |
| 1412 | field_name=None, |
| 1413 | applicable_type=None, |
| 1414 | applicable_predicate=None, |
| 1415 | is_required=None, |
| 1416 | is_niche=None, |
| 1417 | is_multivalued=None, |
| 1418 | min_value=None, |
| 1419 | max_value=None, |
| 1420 | regex=None, |
| 1421 | needs_member=None, |
| 1422 | needs_perm=None, |
| 1423 | grants_perm=None, |
| 1424 | notify_on=None, |
| 1425 | date_action=None, |
| 1426 | docstring=None, |
| 1427 | admin_ids=None, |
| 1428 | editor_ids=None, |
| 1429 | is_restricted_field=None): |
| 1430 | config = self.GetProjectConfig(cnxn, project_id) |
| 1431 | fd = tracker_bizobj.FindFieldDefByID(field_id, config) |
| 1432 | # pylint: disable=multiple-statements |
| 1433 | if field_name is not None: fd.field_name = field_name |
| 1434 | if applicable_type is not None: fd.applicable_type = applicable_type |
| 1435 | if applicable_predicate is not None: |
| 1436 | fd.applicable_predicate = applicable_predicate |
| 1437 | if is_required is not None: fd.is_required = is_required |
| 1438 | if is_niche is not None: fd.is_niche = is_niche |
| 1439 | if is_multivalued is not None: fd.is_multivalued = is_multivalued |
| 1440 | if min_value is not None: fd.min_value = min_value |
| 1441 | if max_value is not None: fd.max_value = max_value |
| 1442 | if regex is not None: fd.regex = regex |
| 1443 | if date_action is not None: |
| 1444 | fd.date_action = config_svc.DATE_ACTION_ENUM.index(date_action) |
| 1445 | if docstring is not None: fd.docstring = docstring |
| 1446 | if admin_ids is not None: fd.admin_ids = admin_ids |
| 1447 | if editor_ids is not None: |
| 1448 | fd.editor_ids = editor_ids |
| 1449 | if is_restricted_field is not None: |
| 1450 | fd.is_restricted_field = is_restricted_field |
| 1451 | self.StoreConfig(cnxn, config) |
| 1452 | |
| 1453 | def CreateComponentDef( |
| 1454 | self, cnxn, project_id, path, docstring, deprecated, admin_ids, cc_ids, |
| 1455 | created, creator_id, label_ids): |
| 1456 | config = self.GetProjectConfig(cnxn, project_id) |
| 1457 | cd = tracker_bizobj.MakeComponentDef( |
| 1458 | self.next_component_id, project_id, path, docstring, deprecated, |
| 1459 | admin_ids, cc_ids, created, creator_id, label_ids=label_ids) |
| 1460 | config.component_defs.append(cd) |
| 1461 | self.next_component_id += 1 |
| 1462 | self.StoreConfig(cnxn, config) |
| 1463 | return self.next_component_id - 1 |
| 1464 | |
| 1465 | def UpdateComponentDef( |
| 1466 | self, cnxn, project_id, component_id, path=None, docstring=None, |
| 1467 | deprecated=None, admin_ids=None, cc_ids=None, created=None, |
| 1468 | creator_id=None, modified=None, modifier_id=None, label_ids=None): |
| 1469 | config = self.GetProjectConfig(cnxn, project_id) |
| 1470 | cd = tracker_bizobj.FindComponentDefByID(component_id, config) |
| 1471 | if path is not None: |
| 1472 | assert path |
| 1473 | cd.path = path |
| 1474 | # pylint: disable=multiple-statements |
| 1475 | if docstring is not None: cd.docstring = docstring |
| 1476 | if deprecated is not None: cd.deprecated = deprecated |
| 1477 | if admin_ids is not None: cd.admin_ids = admin_ids |
| 1478 | if cc_ids is not None: cd.cc_ids = cc_ids |
| 1479 | if created is not None: cd.created = created |
| 1480 | if creator_id is not None: cd.creator_id = creator_id |
| 1481 | if modified is not None: cd.modified = modified |
| 1482 | if modifier_id is not None: cd.modifier_id = modifier_id |
| 1483 | if label_ids is not None: cd.label_ids = label_ids |
| 1484 | self.StoreConfig(cnxn, config) |
| 1485 | |
| 1486 | def DeleteComponentDef(self, cnxn, project_id, component_id): |
| 1487 | """Delete the specified component definition.""" |
| 1488 | config = self.GetProjectConfig(cnxn, project_id) |
| 1489 | config.component_defs = [ |
| 1490 | cd for cd in config.component_defs |
| 1491 | if cd.component_id != component_id] |
| 1492 | self.StoreConfig(cnxn, config) |
| 1493 | |
| 1494 | def InvalidateMemcache(self, issues, key_prefix=''): |
| 1495 | pass |
| 1496 | |
| 1497 | def InvalidateMemcacheForEntireProject(self, project_id): |
| 1498 | pass |
| 1499 | |
| 1500 | |
| 1501 | class IssueService(object): |
| 1502 | """Fake version of IssueService that just works in-RAM.""" |
| 1503 | # pylint: disable=unused-argument |
| 1504 | |
| 1505 | def __init__(self, user_id=None): |
| 1506 | self.user_id = user_id |
| 1507 | # Dictionary {project_id: issue_pb_dict} |
| 1508 | # where issue_pb_dict is a dictionary of the form |
| 1509 | # {local_id: issue_pb} |
| 1510 | self.issues_by_project = {} |
| 1511 | self.issues_by_iid = {} |
| 1512 | # Dictionary {project_id: comment_pb_dict} |
| 1513 | # where comment_pb_dict is a dictionary of the form |
| 1514 | # {local_id: comment_pb_list} |
| 1515 | self.comments_by_project = {} |
| 1516 | self.comments_by_iid = {} |
| 1517 | self.comments_by_cid = {} |
| 1518 | self.attachments_by_id = {} |
| 1519 | |
| 1520 | # Set of issue IDs for issues that have been indexed by calling |
| 1521 | # IndexIssues(). |
| 1522 | self.indexed_issue_iids = set() |
| 1523 | |
| 1524 | # Set of issue IDs for issues that have been moved by calling MoveIssue(). |
| 1525 | self.moved_back_iids = set() |
| 1526 | |
| 1527 | # Dict of issue IDs mapped to other issue IDs to represent moved issues. |
| 1528 | self.moved_issues = {} |
| 1529 | |
| 1530 | # Test-only indication that the indexer would have been called |
| 1531 | # by the real DITPersist. |
| 1532 | self.indexer_called = False |
| 1533 | |
| 1534 | # Test-only sequence of updated and enqueued. |
| 1535 | self.updated_issues = [] |
| 1536 | self.enqueued_issues = [] # issue_ids |
| 1537 | |
| 1538 | # Test-only sequence of expunged issues and projects. |
| 1539 | self.expunged_issues = [] |
| 1540 | self.expunged_former_locations = [] |
| 1541 | self.expunged_local_ids = [] |
| 1542 | self.expunged_users_in_issues = [] |
| 1543 | |
| 1544 | # Test-only indicators that methods were called. |
| 1545 | self.get_all_issues_in_project_called = False |
| 1546 | self.update_issues_called = False |
| 1547 | self.enqueue_issues_called = False |
| 1548 | self.get_issue_acitivity_called = False |
| 1549 | |
| 1550 | # The next id to return if it is > 0. |
| 1551 | self.next_id = -1 |
| 1552 | |
| 1553 | def UpdateIssues( |
| 1554 | self, cnxn, issues, update_cols=None, just_derived=False, |
| 1555 | commit=True, invalidate=True): |
| 1556 | self.update_issues_called = True |
| 1557 | assert all(issue.assume_stale == False for issue in issues) |
| 1558 | self.updated_issues.extend(issues) |
| 1559 | |
| 1560 | def GetIssueActivity( |
| 1561 | self, cnxn, num=50, before=None, after=None, |
| 1562 | project_ids=None, user_ids=None, ascending=False): |
| 1563 | self.get_issue_acitivity_called = True |
| 1564 | comments_dict = self.comments_by_cid |
| 1565 | comments = [] |
| 1566 | for value in comments_dict.values(): |
| 1567 | if project_ids is not None: |
| 1568 | if value.issue_id > 0 and value.issue_id in self.issues_by_iid: |
| 1569 | issue = self.issues_by_iid[value.issue_id] |
| 1570 | if issue.project_id in project_ids: |
| 1571 | comments.append(value) |
| 1572 | elif user_ids is not None: |
| 1573 | if value.user_id in user_ids: |
| 1574 | comments.append(value) |
| 1575 | else: |
| 1576 | comments.append(value) |
| 1577 | return comments |
| 1578 | |
| 1579 | def EnqueueIssuesForIndexing(self, _cnxn, issue_ids, commit=True): |
| 1580 | self.enqueue_issues_called = True |
| 1581 | for i in issue_ids: |
| 1582 | if i not in self.enqueued_issues: |
| 1583 | self.enqueued_issues.extend(issues) |
| 1584 | |
| 1585 | def ExpungeIssues(self, _cnxn, issue_ids): |
| 1586 | self.expunged_issues.extend(issue_ids) |
| 1587 | |
| 1588 | def ExpungeFormerLocations(self, _cnxn, project_id): |
| 1589 | self.expunged_former_locations.append(project_id) |
| 1590 | |
| 1591 | def ExpungeLocalIDCounters(self, _cnxn, project_id): |
| 1592 | self.expunged_local_ids.append(project_id) |
| 1593 | |
| 1594 | def TestAddIssue(self, issue, importer_id=None): |
| 1595 | project_id = issue.project_id |
| 1596 | self.issues_by_project.setdefault(project_id, {}) |
| 1597 | self.issues_by_project[project_id][issue.local_id] = issue |
| 1598 | self.issues_by_iid[issue.issue_id] = issue |
| 1599 | if issue.issue_id not in self.enqueued_issues: |
| 1600 | self.enqueued_issues.append(issue.issue_id) |
| 1601 | self.enqueue_issues_called = True |
| 1602 | |
| 1603 | # Adding a new issue should add the first comment to the issue |
| 1604 | comment = tracker_pb2.IssueComment() |
| 1605 | comment.project_id = issue.project_id |
| 1606 | comment.issue_id = issue.issue_id |
| 1607 | comment.content = issue.summary |
| 1608 | comment.timestamp = issue.opened_timestamp |
| 1609 | comment.is_description = True |
| 1610 | if issue.reporter_id: |
| 1611 | comment.user_id = issue.reporter_id |
| 1612 | if importer_id: |
| 1613 | comment.importer_id = importer_id |
| 1614 | comment.sequence = 0 |
| 1615 | self.TestAddComment(comment, issue.local_id) |
| 1616 | |
| 1617 | def TestAddMovedIssueRef(self, source_project_id, source_local_id, |
| 1618 | target_project_id, target_local_id): |
| 1619 | self.moved_issues[(source_project_id, source_local_id)] = ( |
| 1620 | target_project_id, target_local_id) |
| 1621 | |
| 1622 | def TestAddComment(self, comment, local_id): |
| 1623 | pid = comment.project_id |
| 1624 | if not comment.id: |
| 1625 | comment.id = len(self.comments_by_cid) |
| 1626 | |
| 1627 | self.comments_by_project.setdefault(pid, {}) |
| 1628 | self.comments_by_project[pid].setdefault(local_id, []).append(comment) |
| 1629 | self.comments_by_iid.setdefault(comment.issue_id, []).append(comment) |
| 1630 | self.comments_by_cid[comment.id] = comment |
| 1631 | |
| 1632 | def TestAddAttachment(self, attachment, comment_id, issue_id): |
| 1633 | if not attachment.attachment_id: |
| 1634 | attachment.attachment_id = len(self.attachments_by_id) |
| 1635 | |
| 1636 | aid = attachment.attachment_id |
| 1637 | self.attachments_by_id[aid] = attachment, comment_id, issue_id |
| 1638 | comment = self.comments_by_cid[comment_id] |
| 1639 | if attachment not in comment.attachments: |
| 1640 | comment.attachments.extend([attachment]) |
| 1641 | |
| 1642 | def SoftDeleteAttachment( |
| 1643 | self, _cnxn, _issue, comment, attach_id, _user_service, delete=True, |
| 1644 | index_now=False): |
| 1645 | attachment = None |
| 1646 | for attach in comment.attachments: |
| 1647 | if attach.attachment_id == attach_id: |
| 1648 | attachment = attach |
| 1649 | if not attachment: |
| 1650 | return |
| 1651 | attachment.deleted = delete |
| 1652 | |
| 1653 | def GetAttachmentAndContext(self, _cnxn, attachment_id): |
| 1654 | if attachment_id in self.attachments_by_id: |
| 1655 | attach, comment_id, issue_id = self.attachments_by_id[attachment_id] |
| 1656 | if not attach.deleted: |
| 1657 | return attach, comment_id, issue_id |
| 1658 | |
| 1659 | raise exceptions.NoSuchAttachmentException() |
| 1660 | |
| 1661 | def GetComments( |
| 1662 | self, _cnxn, where=None, order_by=None, content_only=False, **kwargs): |
| 1663 | # This is a very limited subset of what the real GetComments() can do. |
| 1664 | cid = kwargs.get('id') |
| 1665 | |
| 1666 | comment = self.comments_by_cid.get(cid) |
| 1667 | if comment: |
| 1668 | return [comment] |
| 1669 | else: |
| 1670 | return [] |
| 1671 | |
| 1672 | def GetComment(self, cnxn, comment_id): |
| 1673 | """Get the requested comment, or raise an exception.""" |
| 1674 | comments = self.GetComments(cnxn, id=comment_id) |
| 1675 | if len(comments) == 1: |
| 1676 | return comments[0] |
| 1677 | |
| 1678 | raise exceptions.NoSuchCommentException() |
| 1679 | |
| 1680 | def ResolveIssueRefs(self, cnxn, ref_projects, default_project_name, refs): |
| 1681 | result = [] |
| 1682 | misses = [] |
| 1683 | for project_name, local_id in refs: |
| 1684 | project = ref_projects.get(project_name or default_project_name) |
| 1685 | if not project or project.state == project_pb2.ProjectState.DELETABLE: |
| 1686 | continue # ignore any refs to issues in deleted projects |
| 1687 | try: |
| 1688 | issue = self.GetIssueByLocalID(cnxn, project.project_id, local_id) |
| 1689 | result.append(issue.issue_id) |
| 1690 | except exceptions.NoSuchIssueException: |
| 1691 | misses.append((project.project_id, local_id)) |
| 1692 | |
| 1693 | return result, misses |
| 1694 | |
| 1695 | def LookupIssueRefs(self, cnxn, issue_ids): |
| 1696 | issue_dict, _misses = self.GetIssuesDict(cnxn, issue_ids) |
| 1697 | return { |
| 1698 | issue_id: (issue.project_name, issue.local_id) |
| 1699 | for issue_id, issue in issue_dict.items()} |
| 1700 | |
| 1701 | def GetAllIssuesInProject( |
| 1702 | self, _cnxn, project_id, min_local_id=None, use_cache=True): |
| 1703 | self.get_all_issues_in_project_called = True |
| 1704 | if project_id in self.issues_by_project: |
| 1705 | return list(self.issues_by_project[project_id].values()) |
| 1706 | else: |
| 1707 | return [] |
| 1708 | |
| 1709 | def GetIssuesByLocalIDs( |
| 1710 | self, _cnxn, project_id, local_id_list, use_cache=True, shard_id=None): |
| 1711 | results = [] |
| 1712 | for local_id in local_id_list: |
| 1713 | if (project_id in self.issues_by_project |
| 1714 | and local_id in self.issues_by_project[project_id]): |
| 1715 | results.append(self.issues_by_project[project_id][local_id]) |
| 1716 | |
| 1717 | return results |
| 1718 | |
| 1719 | def GetIssueByLocalID(self, _cnxn, project_id, local_id, use_cache=True): |
| 1720 | try: |
| 1721 | return self.issues_by_project[project_id][local_id] |
| 1722 | except KeyError: |
| 1723 | raise exceptions.NoSuchIssueException() |
| 1724 | |
| 1725 | def GetAnyOnHandIssue(self, issue_ids, start=None, end=None): |
| 1726 | return None # Treat them all like misses. |
| 1727 | |
| 1728 | def GetIssue(self, cnxn, issue_id, use_cache=True): |
| 1729 | issues = self.GetIssues(cnxn, [issue_id], use_cache=use_cache) |
| 1730 | try: |
| 1731 | return issues[0] |
| 1732 | except IndexError: |
| 1733 | raise exceptions.NoSuchIssueException() |
| 1734 | |
| 1735 | def GetCurrentLocationOfMovedIssue(self, cnxn, project_id, local_id): |
| 1736 | key = (project_id, local_id) |
| 1737 | if key in self.moved_issues: |
| 1738 | ref = self.moved_issues[key] |
| 1739 | return ref[0], ref[1] |
| 1740 | return None, None |
| 1741 | |
| 1742 | def GetPreviousLocations(self, cnxn, issue): |
| 1743 | return [] |
| 1744 | |
| 1745 | def GetCommentsByUser(self, cnxn, user_id): |
| 1746 | """Get all comments created by a user""" |
| 1747 | comments = [] |
| 1748 | for cid in self.comments_by_cid: |
| 1749 | comment = self.comments_by_cid[cid] |
| 1750 | if comment.user_id == user_id and not comment.is_description: |
| 1751 | comments.append(comment) |
| 1752 | return comments |
| 1753 | |
| 1754 | def GetCommentsByID(self, cnxn, comment_ids, _sequences, use_cache=True, |
| 1755 | shard_id=None): |
| 1756 | """Return all IssueComment PBs by comment ids.""" |
| 1757 | comments = [self.comments_by_cid[cid] for cid in comment_ids] |
| 1758 | return comments |
| 1759 | |
| 1760 | def GetIssueIDsReportedByUser(self, cnxn, user_id): |
| 1761 | """Get all issues created by a user""" |
| 1762 | ids = [] |
| 1763 | for iid in self.issues_by_iid: |
| 1764 | issue = self.issues_by_iid[iid] |
| 1765 | if issue.reporter_id == user_id: |
| 1766 | ids.append(iid) |
| 1767 | return ids |
| 1768 | |
| 1769 | def LookupIssueIDs(self, _cnxn, project_local_id_pairs): |
| 1770 | hits = [] |
| 1771 | misses = [] |
| 1772 | for (project_id, local_id) in project_local_id_pairs: |
| 1773 | try: |
| 1774 | issue = self.issues_by_project[project_id][local_id] |
| 1775 | hits.append(issue.issue_id) |
| 1776 | except KeyError: |
| 1777 | misses.append((project_id, local_id)) |
| 1778 | |
| 1779 | return hits, misses |
| 1780 | |
| 1781 | def LookupIssueIDsFollowMoves(self, _cnxn, project_local_id_pairs): |
| 1782 | hits = [] |
| 1783 | misses = [] |
| 1784 | for pair in project_local_id_pairs: |
| 1785 | project_id, local_id = self.moved_issues.get(pair, pair) |
| 1786 | try: |
| 1787 | issue = self.issues_by_project[project_id][local_id] |
| 1788 | hits.append(issue.issue_id) |
| 1789 | except KeyError: |
| 1790 | misses.append((project_id, local_id)) |
| 1791 | |
| 1792 | return hits, misses |
| 1793 | |
| 1794 | def LookupIssueID(self, _cnxn, project_id, local_id): |
| 1795 | try: |
| 1796 | issue = self.issues_by_project[project_id][local_id] |
| 1797 | except KeyError: |
| 1798 | raise exceptions.NoSuchIssueException() |
| 1799 | return issue.issue_id |
| 1800 | |
| 1801 | def GetCommentsForIssue(self, _cnxn, issue_id): |
| 1802 | comments = self.comments_by_iid.get(issue_id, []) |
| 1803 | for idx, c in enumerate(comments): |
| 1804 | c.sequence = idx |
| 1805 | |
| 1806 | return comments |
| 1807 | |
| 1808 | def InsertIssue(self, cnxn, issue): |
| 1809 | issue.issue_id = issue.project_id * 1000000 + issue.local_id |
| 1810 | self.issues_by_project.setdefault(issue.project_id, {}) |
| 1811 | self.issues_by_project[issue.project_id][issue.local_id] = issue |
| 1812 | self.issues_by_iid[issue.issue_id] = issue |
| 1813 | return issue.issue_id |
| 1814 | |
| 1815 | def CreateIssue( |
| 1816 | self, |
| 1817 | cnxn, |
| 1818 | services, |
| 1819 | issue, |
| 1820 | marked_description, |
| 1821 | attachments=None, |
| 1822 | index_now=False, |
| 1823 | importer_id=None): |
| 1824 | project_id = issue.project_id |
| 1825 | |
| 1826 | issue.local_id = self.AllocateNextLocalID(cnxn, project_id) |
| 1827 | issue.issue_id = project_id * 1000000 + issue.local_id |
| 1828 | |
| 1829 | self.TestAddIssue(issue, importer_id=importer_id) |
| 1830 | comment = self.comments_by_iid[issue.issue_id][0] |
| 1831 | comment.content = marked_description |
| 1832 | return issue, comment |
| 1833 | |
| 1834 | def GetIssueApproval(self, cnxn, issue_id, approval_id, use_cache=True): |
| 1835 | issue = self.GetIssue(cnxn, issue_id, use_cache=use_cache) |
| 1836 | approval = tracker_bizobj.FindApprovalValueByID( |
| 1837 | approval_id, issue.approval_values) |
| 1838 | if approval: |
| 1839 | return issue, approval |
| 1840 | raise exceptions.NoSuchIssueApprovalException() |
| 1841 | |
| 1842 | def UpdateIssueApprovalStatus( |
| 1843 | self, cnxn, issue_id, approval_id, status, setter_id, set_on, |
| 1844 | commit=True): |
| 1845 | issue = self.GetIssue(cnxn, issue_id) |
| 1846 | for av in issue.approval_values: |
| 1847 | if av.approval_id == approval_id: |
| 1848 | av.status = status |
| 1849 | av.setter_id = setter_id |
| 1850 | av.set_on = set_on |
| 1851 | return |
| 1852 | return |
| 1853 | |
| 1854 | def UpdateIssueApprovalApprovers( |
| 1855 | self, cnxn, issue_id, approval_id, approver_ids, commit=True): |
| 1856 | issue = self.GetIssue(cnxn, issue_id) |
| 1857 | for av in issue.approval_values: |
| 1858 | if av.approval_id == approval_id: |
| 1859 | av.approver_ids = approver_ids |
| 1860 | return |
| 1861 | return |
| 1862 | |
| 1863 | def UpdateIssueStructure( |
| 1864 | self, cnxn, config, issue, template, reporter_id, comment_content, |
| 1865 | commit=True, invalidate=True): |
| 1866 | approval_defs_by_id = {ad.approval_id: ad for ad in config.approval_defs} |
| 1867 | issue_avs_by_id = {av.approval_id: av for av in issue.approval_values} |
| 1868 | |
| 1869 | new_issue_approvals = [] |
| 1870 | |
| 1871 | for template_av in template.approval_values: |
| 1872 | existing_issue_av = issue_avs_by_id.get(template_av.approval_id) |
| 1873 | # Keep approval values as-if fi it exists in issue and template |
| 1874 | if existing_issue_av: |
| 1875 | existing_issue_av.phase_id = template_av.phase_id |
| 1876 | new_issue_approvals.append(existing_issue_av) |
| 1877 | else: |
| 1878 | new_issue_approvals.append(template_av) |
| 1879 | |
| 1880 | # Update all approval surveys so latest ApprovalDef survey changes |
| 1881 | # appear in the converted issue's approval values. |
| 1882 | ad = approval_defs_by_id.get(template_av.approval_id) |
| 1883 | if ad: |
| 1884 | self.CreateIssueComment( |
| 1885 | cnxn, issue, reporter_id, ad.survey, |
| 1886 | is_description=True, approval_id=ad.approval_id, commit=False) |
| 1887 | else: |
| 1888 | logging.info('ApprovalDef not found for approval %r', template_av) |
| 1889 | |
| 1890 | template_phase_by_name = { |
| 1891 | phase.name.lower(): phase for phase in template.phases} |
| 1892 | issue_phase_by_id = {phase.phase_id: phase for phase in issue.phases} |
| 1893 | updated_fvs = [] |
| 1894 | # Trim issue FieldValues or update FieldValue phase_ids |
| 1895 | for fv in issue.field_values: |
| 1896 | # If a fv's phase has the same name as a template's phase, update |
| 1897 | # the fv's phase_id to that of the template phase's. Otherwise, |
| 1898 | # remove the fv. |
| 1899 | if fv.phase_id: |
| 1900 | issue_phase = issue_phase_by_id.get(fv.phase_id) |
| 1901 | if issue_phase and issue_phase.name: |
| 1902 | template_phase = template_phase_by_name.get(issue_phase.name.lower()) |
| 1903 | if template_phase: |
| 1904 | fv.phase_id = template_phase.phase_id |
| 1905 | updated_fvs.append(fv) |
| 1906 | # keep all fvs that do not belong to phases. |
| 1907 | else: |
| 1908 | updated_fvs.append(fv) |
| 1909 | |
| 1910 | fd_names_by_id = {fd.field_id: fd.field_name for fd in config.field_defs} |
| 1911 | amendment = tracker_bizobj.MakeApprovalStructureAmendment( |
| 1912 | [fd_names_by_id.get(av.approval_id) for av in new_issue_approvals], |
| 1913 | [fd_names_by_id.get(av.approval_id) for av in issue.approval_values]) |
| 1914 | |
| 1915 | issue.approval_values = new_issue_approvals |
| 1916 | issue.phases = template.phases |
| 1917 | issue.field_values = updated_fvs |
| 1918 | |
| 1919 | return self.CreateIssueComment( |
| 1920 | cnxn, issue, reporter_id, comment_content, |
| 1921 | amendments=[amendment], commit=False) |
| 1922 | |
| 1923 | def SetUsedLocalID(self, cnxn, project_id): |
| 1924 | self.next_id = self.GetHighestLocalID(cnxn, project_id) + 1 |
| 1925 | |
| 1926 | def AllocateNextLocalID(self, cnxn, project_id): |
| 1927 | return self.GetHighestLocalID(cnxn, project_id) + 1 |
| 1928 | |
| 1929 | def GetHighestLocalID(self, _cnxn, project_id): |
| 1930 | if self.next_id > 0: |
| 1931 | return self.next_id - 1 |
| 1932 | else: |
| 1933 | issue_dict = self.issues_by_project.get(project_id, {}) |
| 1934 | highest = max([0] + [issue.local_id for issue in issue_dict.values()]) |
| 1935 | return highest |
| 1936 | |
| 1937 | def _MakeIssueComment( |
| 1938 | self, project_id, user_id, content, inbound_message=None, |
| 1939 | amendments=None, attachments=None, kept_attachments=None, timestamp=None, |
| 1940 | is_spam=False, is_description=False, approval_id=None, importer_id=None): |
| 1941 | comment = tracker_pb2.IssueComment() |
| 1942 | comment.project_id = project_id |
| 1943 | comment.user_id = user_id |
| 1944 | comment.content = content or '' |
| 1945 | comment.is_spam = is_spam |
| 1946 | comment.is_description = is_description |
| 1947 | if not timestamp: |
| 1948 | timestamp = int(time.time()) |
| 1949 | comment.timestamp = int(timestamp) |
| 1950 | if inbound_message: |
| 1951 | comment.inbound_message = inbound_message |
| 1952 | if amendments: |
| 1953 | comment.amendments.extend(amendments) |
| 1954 | if approval_id: |
| 1955 | comment.approval_id = approval_id |
| 1956 | if importer_id: |
| 1957 | comment.importer_id = importer_id |
| 1958 | return comment |
| 1959 | |
| 1960 | def CopyIssues(self, cnxn, dest_project, issues, user_service, copier_id): |
| 1961 | created_issues = [] |
| 1962 | for target_issue in issues: |
| 1963 | new_issue = tracker_pb2.Issue() |
| 1964 | new_issue.project_id = dest_project.project_id |
| 1965 | new_issue.project_name = dest_project.project_name |
| 1966 | new_issue.summary = target_issue.summary |
| 1967 | new_issue.labels.extend(target_issue.labels) |
| 1968 | new_issue.field_values.extend(target_issue.field_values) |
| 1969 | new_issue.reporter_id = copier_id |
| 1970 | |
| 1971 | timestamp = int(time.time()) |
| 1972 | new_issue.opened_timestamp = timestamp |
| 1973 | new_issue.modified_timestamp = timestamp |
| 1974 | |
| 1975 | target_comments = self.GetCommentsForIssue(cnxn, target_issue.issue_id) |
| 1976 | initial_summary_comment = target_comments[0] |
| 1977 | |
| 1978 | # Note that blocking and merge_into are not copied. |
| 1979 | new_issue.blocked_on_iids = target_issue.blocked_on_iids |
| 1980 | new_issue.blocked_on_ranks = target_issue.blocked_on_ranks |
| 1981 | |
| 1982 | # Create the same summary comment as the target issue. |
| 1983 | comment = self._MakeIssueComment( |
| 1984 | dest_project.project_id, copier_id, initial_summary_comment.content, |
| 1985 | is_description=True) |
| 1986 | |
| 1987 | new_issue.local_id = self.AllocateNextLocalID( |
| 1988 | cnxn, dest_project.project_id) |
| 1989 | issue_id = self.InsertIssue(cnxn, new_issue) |
| 1990 | comment.issue_id = issue_id |
| 1991 | self.InsertComment(cnxn, comment) |
| 1992 | created_issues.append(new_issue) |
| 1993 | |
| 1994 | return created_issues |
| 1995 | |
| 1996 | def MoveIssues(self, cnxn, dest_project, issues, user_service): |
| 1997 | move_to = dest_project.project_id |
| 1998 | self.issues_by_project.setdefault(move_to, {}) |
| 1999 | moved_back_iids = set() |
| 2000 | for issue in issues: |
| 2001 | if issue.issue_id in self.moved_back_iids: |
| 2002 | moved_back_iids.add(issue.issue_id) |
| 2003 | self.moved_back_iids.add(issue.issue_id) |
| 2004 | project_id = issue.project_id |
| 2005 | self.issues_by_project[project_id].pop(issue.local_id) |
| 2006 | issue.local_id = self.AllocateNextLocalID(cnxn, move_to) |
| 2007 | self.issues_by_project[move_to][issue.local_id] = issue |
| 2008 | issue.project_id = move_to |
| 2009 | issue.project_name = dest_project.project_name |
| 2010 | return moved_back_iids |
| 2011 | |
| 2012 | def GetCommentsForIssues(self, _cnxn, issue_ids, content_only=False): |
| 2013 | comments_dict = {} |
| 2014 | for issue_id in issue_ids: |
| 2015 | comments_dict[issue_id] = self.comments_by_iid[issue_id] |
| 2016 | |
| 2017 | return comments_dict |
| 2018 | |
| 2019 | def InsertComment(self, cnxn, comment, commit=True): |
| 2020 | issue = self.GetIssue(cnxn, comment.issue_id) |
| 2021 | self.TestAddComment(comment, issue.local_id) |
| 2022 | |
| 2023 | # pylint: disable=unused-argument |
| 2024 | def DeltaUpdateIssue( |
| 2025 | self, cnxn, services, reporter_id, project_id, |
| 2026 | config, issue, delta, index_now=False, comment=None, attachments=None, |
| 2027 | iids_to_invalidate=None, rules=None, predicate_asts=None, |
| 2028 | is_description=False, timestamp=None, kept_attachments=None, |
| 2029 | importer_id=None, inbound_message=None): |
| 2030 | # Return a bogus amendments list if any of the fields changed |
| 2031 | amendments, _ = tracker_bizobj.ApplyIssueDelta( |
| 2032 | cnxn, self, issue, delta, config) |
| 2033 | |
| 2034 | if not amendments and (not comment or not comment.strip()): |
| 2035 | return [], None |
| 2036 | |
| 2037 | comment_pb = self.CreateIssueComment( |
| 2038 | cnxn, issue, reporter_id, comment, attachments=attachments, |
| 2039 | amendments=amendments, is_description=is_description, |
| 2040 | kept_attachments=kept_attachments, importer_id=importer_id, |
| 2041 | inbound_message=inbound_message) |
| 2042 | |
| 2043 | self.indexer_called = index_now |
| 2044 | return amendments, comment_pb |
| 2045 | |
| 2046 | def InvalidateIIDs(self, cnxn, iids_to_invalidate): |
| 2047 | pass |
| 2048 | |
| 2049 | # pylint: disable=unused-argument |
| 2050 | def CreateIssueComment( |
| 2051 | self, _cnxn, issue, user_id, content, |
| 2052 | inbound_message=None, amendments=None, attachments=None, |
| 2053 | kept_attachments=None, timestamp=None, is_spam=False, |
| 2054 | is_description=False, approval_id=None, commit=True, |
| 2055 | importer_id=None): |
| 2056 | # Add a comment to an issue |
| 2057 | comment = tracker_pb2.IssueComment() |
| 2058 | comment.id = len(self.comments_by_cid) |
| 2059 | comment.project_id = issue.project_id |
| 2060 | comment.issue_id = issue.issue_id |
| 2061 | comment.content = content |
| 2062 | comment.user_id = user_id |
| 2063 | if timestamp is not None: |
| 2064 | comment.timestamp = timestamp |
| 2065 | else: |
| 2066 | comment.timestamp = 1234567890 |
| 2067 | if amendments: |
| 2068 | comment.amendments.extend(amendments) |
| 2069 | if inbound_message: |
| 2070 | comment.inbound_message = inbound_message |
| 2071 | comment.is_spam = is_spam |
| 2072 | comment.is_description = is_description |
| 2073 | if approval_id: |
| 2074 | comment.approval_id = approval_id |
| 2075 | |
| 2076 | pid = issue.project_id |
| 2077 | self.comments_by_project.setdefault(pid, {}) |
| 2078 | self.comments_by_project[pid].setdefault(issue.local_id, []).append(comment) |
| 2079 | self.comments_by_iid.setdefault(issue.issue_id, []).append(comment) |
| 2080 | self.comments_by_cid[comment.id] = comment |
| 2081 | |
| 2082 | if attachments: |
| 2083 | for filename, filecontent, mimetype in attachments: |
| 2084 | aid = len(self.attachments_by_id) |
| 2085 | attach = tracker_pb2.Attachment( |
| 2086 | attachment_id=aid, |
| 2087 | filename=filename, |
| 2088 | filesize=len(filecontent), |
| 2089 | mimetype=mimetype, |
| 2090 | gcs_object_id='gcs_object_id(%s)' % filename) |
| 2091 | comment.attachments.append(attach) |
| 2092 | self.attachments_by_id[aid] = attach, pid, comment.id |
| 2093 | |
| 2094 | if kept_attachments: |
| 2095 | comment.attachments.extend([ |
| 2096 | self.attachments_by_id[aid][0] |
| 2097 | for aid in kept_attachments]) |
| 2098 | |
| 2099 | return comment |
| 2100 | |
| 2101 | def GetOpenAndClosedIssues(self, _cnxn, issue_ids): |
| 2102 | open_issues = [] |
| 2103 | closed_issues = [] |
| 2104 | for issue_id in issue_ids: |
| 2105 | try: |
| 2106 | issue = self.issues_by_iid[issue_id] |
| 2107 | if issue.status == 'Fixed': |
| 2108 | closed_issues.append(issue) |
| 2109 | else: |
| 2110 | open_issues.append(issue) |
| 2111 | except KeyError: |
| 2112 | continue |
| 2113 | |
| 2114 | return open_issues, closed_issues |
| 2115 | |
| 2116 | def GetIssuesDict( |
| 2117 | self, _cnxn, issue_ids, use_cache=True, shard_id=None): |
| 2118 | missing_ids = [iid for iid in issue_ids if iid not in self.issues_by_iid] |
| 2119 | issues_by_id = {} |
| 2120 | for iid in issue_ids: |
| 2121 | if iid in self.issues_by_iid: |
| 2122 | issue = self.issues_by_iid[iid] |
| 2123 | if not use_cache: |
| 2124 | issue.assume_stale = False |
| 2125 | issues_by_id[iid] = issue |
| 2126 | |
| 2127 | return issues_by_id, missing_ids |
| 2128 | |
| 2129 | def GetIssues(self, cnxn, issue_ids, use_cache=True, shard_id=None): |
| 2130 | issues_by_iid, _misses = self.GetIssuesDict( |
| 2131 | cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id) |
| 2132 | results = [ |
| 2133 | issues_by_iid[issue_id] |
| 2134 | for issue_id in issue_ids |
| 2135 | if issue_id in issues_by_iid |
| 2136 | ] |
| 2137 | |
| 2138 | return results |
| 2139 | |
| 2140 | def SoftDeleteIssue( |
| 2141 | self, _cnxn, project_id, local_id, deleted, user_service): |
| 2142 | issue = self.issues_by_project[project_id][local_id] |
| 2143 | issue.deleted = deleted |
| 2144 | |
| 2145 | def SoftDeleteComment( |
| 2146 | self, cnxn, issue, comment, deleted_by_user_id, user_service, |
| 2147 | delete=True, reindex=False, is_spam=False): |
| 2148 | pid = comment.project_id |
| 2149 | # Find the original comment by the sequence number. |
| 2150 | c = None |
| 2151 | by_iid_idx = -1 |
| 2152 | for by_iid_idx, c in enumerate(self.comments_by_iid[issue.issue_id]): |
| 2153 | if c.sequence == comment.sequence: |
| 2154 | break |
| 2155 | comment = c |
| 2156 | by_project_idx = ( |
| 2157 | self.comments_by_project[pid][issue.local_id].index(comment)) |
| 2158 | comment.is_spam = is_spam |
| 2159 | if delete: |
| 2160 | comment.deleted_by = deleted_by_user_id |
| 2161 | else: |
| 2162 | comment.reset('deleted_by') |
| 2163 | self.comments_by_project[pid][issue.local_id][by_project_idx] = comment |
| 2164 | self.comments_by_iid[issue.issue_id][by_iid_idx] = comment |
| 2165 | self.comments_by_cid[comment.id] = comment |
| 2166 | |
| 2167 | def DeleteComponentReferences(self, _cnxn, component_id): |
| 2168 | for _, issue in self.issues_by_iid.items(): |
| 2169 | issue.component_ids = [ |
| 2170 | cid for cid in issue.component_ids if cid != component_id] |
| 2171 | |
| 2172 | def RunIssueQuery( |
| 2173 | self, cnxn, left_joins, where, order_by, shard_id=None, limit=None): |
| 2174 | """This always returns empty results. Mock it to test other cases.""" |
| 2175 | return [], False |
| 2176 | |
| 2177 | def GetIIDsByLabelIDs(self, cnxn, label_ids, project_id, shard_id): |
| 2178 | """This always returns empty results. Mock it to test other cases.""" |
| 2179 | return [] |
| 2180 | |
| 2181 | def GetIIDsByParticipant(self, cnxn, user_ids, project_ids, shard_id): |
| 2182 | """This always returns empty results. Mock it to test other cases.""" |
| 2183 | return [] |
| 2184 | |
| 2185 | def SortBlockedOn(self, cnxn, issue, blocked_on_iids): |
| 2186 | return blocked_on_iids, [0] * len(blocked_on_iids) |
| 2187 | |
| 2188 | def ApplyIssueRerank( |
| 2189 | self, cnxn, parent_id, relations_to_change, commit=True, invalidate=True): |
| 2190 | issue = self.GetIssue(cnxn, parent_id) |
| 2191 | relations_dict = dict( |
| 2192 | list(zip(issue.blocked_on_iids, issue.blocked_on_ranks))) |
| 2193 | relations_dict.update(relations_to_change) |
| 2194 | issue.blocked_on_ranks = sorted(issue.blocked_on_ranks, reverse=True) |
| 2195 | issue.blocked_on_iids = sorted( |
| 2196 | issue.blocked_on_iids, key=relations_dict.get, reverse=True) |
| 2197 | |
| 2198 | def SplitRanks(self, cnxn, parent_id, target_id, open_ids, split_above=False): |
| 2199 | pass |
| 2200 | |
| 2201 | def ExpungeUsersInIssues(self, cnxn, user_ids_by_email, limit=None): |
| 2202 | user_ids = list(user_ids_by_email.values()) |
| 2203 | self.expunged_users_in_issues.extend(user_ids) |
| 2204 | return [] |
| 2205 | |
| 2206 | |
| 2207 | class TemplateService(object): |
| 2208 | """Fake version of TemplateService that just works in-RAM.""" |
| 2209 | |
| 2210 | def __init__(self): |
| 2211 | self.templates_by_id = {} # template_id: template_pb |
| 2212 | self.templates_by_project_id = {} # project_id: [template_id] |
| 2213 | |
| 2214 | def TestAddIssueTemplateDef( |
| 2215 | self, template_id, project_id, name, content="", summary="", |
| 2216 | summary_must_be_edited=False, status='New', members_only=False, |
| 2217 | owner_defaults_to_member=False, component_required=False, owner_id=None, |
| 2218 | labels=None, component_ids=None, admin_ids=None, field_values=None, |
| 2219 | phases=None, approval_values=None): |
| 2220 | template = tracker_bizobj.MakeIssueTemplate( |
| 2221 | name, |
| 2222 | summary, |
| 2223 | status, |
| 2224 | owner_id, |
| 2225 | content, |
| 2226 | labels, |
| 2227 | field_values or [], |
| 2228 | admin_ids or [], |
| 2229 | component_ids, |
| 2230 | summary_must_be_edited=summary_must_be_edited, |
| 2231 | owner_defaults_to_member=owner_defaults_to_member, |
| 2232 | component_required=component_required, |
| 2233 | members_only=members_only, |
| 2234 | phases=phases, |
| 2235 | approval_values=approval_values) |
| 2236 | template.template_id = template_id |
| 2237 | self.templates_by_id[template_id] = template |
| 2238 | if project_id not in self.templates_by_project_id: |
| 2239 | self.templates_by_project_id[project_id] = [] |
| 2240 | self.templates_by_project_id[project_id].append(template_id) |
| 2241 | return template |
| 2242 | |
| 2243 | def GetTemplateByName(self, cnxn, template_name, project_id): |
| 2244 | if project_id not in self.templates_by_project_id: |
| 2245 | return None |
| 2246 | else: |
| 2247 | project_templates = self.templates_by_project_id[project_id] |
| 2248 | for template_id in project_templates: |
| 2249 | template = self.GetTemplateById(cnxn, template_id) |
| 2250 | if template.name == template_name: |
| 2251 | return template |
| 2252 | return None |
| 2253 | |
| 2254 | def GetTemplateById(self, cnxn, template_id): |
| 2255 | return self.templates_by_id.get(template_id) |
| 2256 | |
| 2257 | def GetTemplatesById(self, cnxn, template_ids): |
| 2258 | return filter( |
| 2259 | lambda template: template.template_id in template_ids, |
| 2260 | self.templates_by_id.values()) |
| 2261 | |
| 2262 | def GetProjectTemplates(self, cnxn, project_id): |
| 2263 | template_ids = self.templates_by_project_id[project_id] |
| 2264 | return self.GetTemplatesById(cnxn, template_ids) |
| 2265 | |
| 2266 | def ExpungeUsersInTemplates(self, cnxn, user_ids, limit=None): |
| 2267 | for _, template in self.templates_by_id.items(): |
| 2268 | template.admin_ids = [user_id for user_id in template.admin_ids |
| 2269 | if user_id not in user_ids] |
| 2270 | if template.owner_id in user_ids: |
| 2271 | template.owner_id = None |
| 2272 | template.field_values = [fv for fv in template.field_values |
| 2273 | if fv.user_id in user_ids] |
| 2274 | |
| 2275 | class SpamService(object): |
| 2276 | """Fake version of SpamService that just works in-RAM.""" |
| 2277 | |
| 2278 | def __init__(self, user_id=None): |
| 2279 | self.user_id = user_id |
| 2280 | self.reports_by_issue_id = collections.defaultdict(list) |
| 2281 | self.comment_reports_by_issue_id = collections.defaultdict(dict) |
| 2282 | self.manual_verdicts_by_issue_id = collections.defaultdict(dict) |
| 2283 | self.manual_verdicts_by_comment_id = collections.defaultdict(dict) |
| 2284 | self.expunged_users_in_spam = [] |
| 2285 | |
| 2286 | def LookupIssuesFlaggers(self, cnxn, issue_ids): |
| 2287 | return { |
| 2288 | issue_id: (self.reports_by_issue_id.get(issue_id, []), |
| 2289 | self.comment_reports_by_issue_id.get(issue_id, {})) |
| 2290 | for issue_id in issue_ids} |
| 2291 | |
| 2292 | def LookupIssueFlaggers(self, cnxn, issue_id): |
| 2293 | return self.LookupIssuesFlaggers(cnxn, [issue_id])[issue_id] |
| 2294 | |
| 2295 | def FlagIssues(self, cnxn, issue_service, issues, user_id, flagged_spam): |
| 2296 | for issue in issues: |
| 2297 | if flagged_spam: |
| 2298 | self.reports_by_issue_id[issue.issue_id].append(user_id) |
| 2299 | else: |
| 2300 | self.reports_by_issue_id[issue.issue_id].remove(user_id) |
| 2301 | |
| 2302 | def FlagComment( |
| 2303 | self, cnxn, issue, comment_id, reported_user_id, user_id, flagged_spam): |
| 2304 | if not comment_id in self.comment_reports_by_issue_id[issue.issue_id]: |
| 2305 | self.comment_reports_by_issue_id[issue.issue_id][comment_id] = [] |
| 2306 | if flagged_spam: |
| 2307 | self.comment_reports_by_issue_id[issue.issue_id][comment_id].append( |
| 2308 | user_id) |
| 2309 | else: |
| 2310 | self.comment_reports_by_issue_id[issue.issue_id][comment_id].remove( |
| 2311 | user_id) |
| 2312 | |
| 2313 | def RecordManualIssueVerdicts( |
| 2314 | self, cnxn, issue_service, issues, user_id, is_spam): |
| 2315 | for issue in issues: |
| 2316 | self.manual_verdicts_by_issue_id[issue.issue_id][user_id] = is_spam |
| 2317 | |
| 2318 | def RecordManualCommentVerdict( |
| 2319 | self, cnxn, issue_service, user_service, comment_id, |
| 2320 | user_id, is_spam): |
| 2321 | self.manual_verdicts_by_comment_id[comment_id][user_id] = is_spam |
| 2322 | comment = issue_service.GetComment(cnxn, comment_id) |
| 2323 | comment.is_spam = is_spam |
| 2324 | issue = issue_service.GetIssue(cnxn, comment.issue_id, use_cache=False) |
| 2325 | issue_service.SoftDeleteComment( |
| 2326 | cnxn, issue, comment, user_id, user_service, is_spam, True, is_spam) |
| 2327 | |
| 2328 | def RecordClassifierIssueVerdict(self, cnxn, issue, is_spam, confidence, |
| 2329 | failed_open): |
| 2330 | return |
| 2331 | |
| 2332 | def RecordClassifierCommentVerdict(self, cnxn, issue, is_spam, confidence, |
| 2333 | failed_open): |
| 2334 | return |
| 2335 | |
| 2336 | def ClassifyComment(self, comment, commenter): |
| 2337 | return {'outputLabel': 'ham', |
| 2338 | 'outputMulti': [{'label': 'ham', 'score': '1.0'}], |
| 2339 | 'failed_open': False} |
| 2340 | |
| 2341 | def ClassifyIssue(self, issue, firstComment, reporter): |
| 2342 | return {'outputLabel': 'ham', |
| 2343 | 'outputMulti': [{'label': 'ham', 'score': '1.0'}], |
| 2344 | 'failed_open': False} |
| 2345 | |
| 2346 | def ExpungeUsersInSpam(self, cnxn, user_ids): |
| 2347 | self.expunged_users_in_spam.extend(user_ids) |
| 2348 | |
| 2349 | |
| 2350 | class FeaturesService(object): |
| 2351 | """A fake implementation of FeaturesService.""" |
| 2352 | def __init__(self): |
| 2353 | # Test-only sequence of expunged projects and users. |
| 2354 | self.expunged_saved_queries = [] |
| 2355 | self.expunged_users_in_saved_queries = [] |
| 2356 | self.expunged_filter_rules = [] |
| 2357 | self.expunged_users_in_filter_rules = [] |
| 2358 | self.expunged_quick_edit = [] |
| 2359 | self.expunged_users_in_quick_edits = [] |
| 2360 | self.expunged_hotlist_ids = [] |
| 2361 | self.expunged_users_in_hotlists = [] |
| 2362 | |
| 2363 | # filter rules, project_id => filterrule_pb |
| 2364 | self.test_rules = collections.defaultdict(list) |
| 2365 | |
| 2366 | # TODO(crbug/monorail/7104): Confirm that these are never reassigned |
| 2367 | # to empty {} and then change these to collections.defaultdicts instead. |
| 2368 | # hotlists |
| 2369 | self.test_hotlists = {} # (hotlist_name, owner_id) => hotlist_pb |
| 2370 | self.hotlists_by_id = {} |
| 2371 | self.hotlists_id_by_user = {} # user_id => [hotlist_id, hotlist_id, ...] |
| 2372 | self.hotlists_id_by_issue = {} # issue_id => [hotlist_id, hotlist_id, ...] |
| 2373 | |
| 2374 | # saved queries |
| 2375 | self.saved_queries = [] # [(pid, uid, sq), ...] |
| 2376 | |
| 2377 | def TestAddFilterRule( |
| 2378 | self, project_id, predicate, default_status=None, default_owner_id=None, |
| 2379 | add_cc_ids=None, add_labels=None, add_notify=None, warning=None, |
| 2380 | error=None): |
| 2381 | rule = filterrules_helpers.MakeRule( |
| 2382 | predicate, default_status=default_status, |
| 2383 | default_owner_id=default_owner_id, add_cc_ids=add_cc_ids, |
| 2384 | add_labels=add_labels, add_notify=add_notify, warning=warning, |
| 2385 | error=error) |
| 2386 | self.test_rules[project_id].append(rule) |
| 2387 | return rule |
| 2388 | |
| 2389 | def TestAddHotlist(self, name, summary='', owner_ids=None, editor_ids=None, |
| 2390 | follower_ids=None, description=None, hotlist_id=None, |
| 2391 | is_private=False, hotlist_item_fields=None, |
| 2392 | default_col_spec=None): |
| 2393 | """Add a hotlist to the fake FeaturesService object. |
| 2394 | |
| 2395 | Args: |
| 2396 | name: the name of the hotlist. Will replace any existing hotlist under |
| 2397 | the same name. |
| 2398 | summary: the summary string of the hotlist |
| 2399 | owner_ids: List of user ids for the hotlist owners |
| 2400 | editor_ids: List of user ids for the hotlist editors |
| 2401 | follower_ids: List of user ids for the hotlist followers |
| 2402 | description: The description string for this hotlist |
| 2403 | hotlist_id: A unique integer identifier for the created hotlist |
| 2404 | is_private: A boolean indicating whether the hotlist is private/public |
| 2405 | hotlist_item_fields: a list of tuples -> |
| 2406 | [(issue_id, rank, adder_id, date_added, note),...] |
| 2407 | default_col_spec: string of default columns for the hotlist. |
| 2408 | |
| 2409 | Returns: |
| 2410 | A populated hotlist PB. |
| 2411 | """ |
| 2412 | hotlist_pb = features_pb2.Hotlist() |
| 2413 | hotlist_pb.hotlist_id = hotlist_id or hash(name) % 100000 |
| 2414 | hotlist_pb.name = name |
| 2415 | hotlist_pb.summary = summary |
| 2416 | hotlist_pb.is_private = is_private |
| 2417 | hotlist_pb.default_col_spec = default_col_spec |
| 2418 | if description is not None: |
| 2419 | hotlist_pb.description = description |
| 2420 | |
| 2421 | self.TestAddHotlistMembers(owner_ids, hotlist_pb, OWNER_ROLE) |
| 2422 | self.TestAddHotlistMembers(follower_ids, hotlist_pb, FOLLOWER_ROLE) |
| 2423 | self.TestAddHotlistMembers(editor_ids, hotlist_pb, EDITOR_ROLE) |
| 2424 | |
| 2425 | if hotlist_item_fields is not None: |
| 2426 | for(issue_id, rank, adder_id, date, note) in hotlist_item_fields: |
| 2427 | hotlist_pb.items.append( |
| 2428 | features_pb2.Hotlist.HotlistItem( |
| 2429 | issue_id=issue_id, rank=rank, adder_id=adder_id, |
| 2430 | date_added=date, note=note)) |
| 2431 | try: |
| 2432 | self.hotlists_id_by_issue[issue_id].append(hotlist_pb.hotlist_id) |
| 2433 | except KeyError: |
| 2434 | self.hotlists_id_by_issue[issue_id] = [hotlist_pb.hotlist_id] |
| 2435 | |
| 2436 | owner_id = None |
| 2437 | if hotlist_pb.owner_ids: |
| 2438 | owner_id = hotlist_pb.owner_ids[0] |
| 2439 | self.test_hotlists[(name, owner_id)] = hotlist_pb |
| 2440 | self.hotlists_by_id[hotlist_pb.hotlist_id] = hotlist_pb |
| 2441 | return hotlist_pb |
| 2442 | |
| 2443 | def TestAddHotlistMembers(self, user_id_list, hotlist_pb, role): |
| 2444 | if user_id_list is not None: |
| 2445 | for user_id in user_id_list: |
| 2446 | if role == OWNER_ROLE: |
| 2447 | hotlist_pb.owner_ids.append(user_id) |
| 2448 | elif role == EDITOR_ROLE: |
| 2449 | hotlist_pb.editor_ids.append(user_id) |
| 2450 | elif role == FOLLOWER_ROLE: |
| 2451 | hotlist_pb.follower_ids.append(user_id) |
| 2452 | try: |
| 2453 | self.hotlists_id_by_user[user_id].append(hotlist_pb.hotlist_id) |
| 2454 | except KeyError: |
| 2455 | self.hotlists_id_by_user[user_id] = [hotlist_pb.hotlist_id] |
| 2456 | |
| 2457 | def CheckHotlistName(self, cnxn, name, owner_ids): |
| 2458 | if not framework_bizobj.IsValidHotlistName(name): |
| 2459 | raise exceptions.InputException( |
| 2460 | '%s is not a valid name for a Hotlist' % name) |
| 2461 | if self.LookupHotlistIDs(cnxn, [name], owner_ids): |
| 2462 | raise features_svc.HotlistAlreadyExists() |
| 2463 | |
| 2464 | def CreateHotlist( |
| 2465 | self, _cnxn, hotlist_name, summary, description, owner_ids, editor_ids, |
| 2466 | issue_ids=None, is_private=None, default_col_spec=None, ts=None): |
| 2467 | """Create and store a Hotlist with the given attributes.""" |
| 2468 | if not framework_bizobj.IsValidHotlistName(hotlist_name): |
| 2469 | raise exceptions.InputException() |
| 2470 | if not owner_ids: # Should never happen. |
| 2471 | raise features_svc.UnownedHotlistException() |
| 2472 | if (hotlist_name, owner_ids[0]) in self.test_hotlists: |
| 2473 | raise features_svc.HotlistAlreadyExists() |
| 2474 | hotlist_item_fields = [ |
| 2475 | (issue_id, rank*100, owner_ids[0] or None, ts, '') for |
| 2476 | rank, issue_id in enumerate(issue_ids or [])] |
| 2477 | return self.TestAddHotlist(hotlist_name, summary=summary, |
| 2478 | owner_ids=owner_ids, editor_ids=editor_ids, |
| 2479 | description=description, is_private=is_private, |
| 2480 | hotlist_item_fields=hotlist_item_fields, |
| 2481 | default_col_spec=default_col_spec) |
| 2482 | |
| 2483 | def UpdateHotlist( |
| 2484 | self, cnxn, hotlist_id, name=None, summary=None, description=None, |
| 2485 | is_private=None, default_col_spec=None, owner_id=None, |
| 2486 | add_editor_ids=None): |
| 2487 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2488 | if not hotlist: |
| 2489 | raise features_svc.NoSuchHotlistException( |
| 2490 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2491 | |
| 2492 | if owner_id: |
| 2493 | old_owner_id = hotlist.owner_ids[0] |
| 2494 | self.test_hotlists.pop((hotlist.name, old_owner_id), None) |
| 2495 | self.test_hotlists[(hotlist.name, owner_id)] = hotlist |
| 2496 | |
| 2497 | if add_editor_ids: |
| 2498 | for editor_id in add_editor_ids: |
| 2499 | self.hotlists_id_by_user.get(editor_id, []).append(hotlist_id) |
| 2500 | |
| 2501 | if name is not None: |
| 2502 | hotlist.name = name |
| 2503 | if summary is not None: |
| 2504 | hotlist.summary = summary |
| 2505 | if description is not None: |
| 2506 | hotlist.description = description |
| 2507 | if is_private is not None: |
| 2508 | hotlist.is_private = is_private |
| 2509 | if default_col_spec is not None: |
| 2510 | hotlist.default_col_spec = default_col_spec |
| 2511 | if owner_id is not None: |
| 2512 | hotlist.owner_ids = [owner_id] |
| 2513 | if add_editor_ids: |
| 2514 | hotlist.editor_ids.extend(add_editor_ids) |
| 2515 | |
| 2516 | def RemoveHotlistEditors(self, cnxn, hotlist_id, remove_editor_ids): |
| 2517 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2518 | if not hotlist: |
| 2519 | raise features_svc.NoSuchHotlistException( |
| 2520 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2521 | for editor_id in remove_editor_ids: |
| 2522 | hotlist.editor_ids.remove(editor_id) |
| 2523 | self.hotlists_id_by_user[editor_id].remove(hotlist_id) |
| 2524 | |
| 2525 | def AddIssuesToHotlists(self, cnxn, hotlist_ids, added_tuples, issue_svc, |
| 2526 | chart_svc, commit=True): |
| 2527 | for hotlist_id in hotlist_ids: |
| 2528 | self.UpdateHotlistItems(cnxn, hotlist_id, [], added_tuples, commit=commit) |
| 2529 | |
| 2530 | def RemoveIssuesFromHotlists(self, cnxn, hotlist_ids, issue_ids, issue_svc, |
| 2531 | chart_svc, commit=True): |
| 2532 | for hotlist_id in hotlist_ids: |
| 2533 | self.UpdateHotlistItems(cnxn, hotlist_id, issue_ids, [], commit=commit) |
| 2534 | |
| 2535 | def UpdateHotlistIssues( |
| 2536 | self, |
| 2537 | cnxn, |
| 2538 | hotlist_id, |
| 2539 | updated_items, |
| 2540 | remove_issue_ids, |
| 2541 | issue_svc, |
| 2542 | chart_svc, |
| 2543 | commit=True): |
| 2544 | if not updated_items and not remove_issue_ids: |
| 2545 | raise exceptions.InputException('No changes to make') |
| 2546 | |
| 2547 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2548 | if not hotlist: |
| 2549 | raise NoSuchHotlistException() |
| 2550 | |
| 2551 | updated_ids = [item.issue_id for item in updated_items] |
| 2552 | items = [ |
| 2553 | item for item in hotlist.items |
| 2554 | if item.issue_id not in updated_ids + remove_issue_ids |
| 2555 | ] |
| 2556 | hotlist.items = sorted(updated_items + items, key=lambda item: item.rank) |
| 2557 | |
| 2558 | # Remove all removed and updated issues. |
| 2559 | for issue_id in remove_issue_ids + updated_ids: |
| 2560 | try: |
| 2561 | self.hotlists_id_by_issue[issue_id].remove(hotlist_id) |
| 2562 | except (ValueError, KeyError): |
| 2563 | pass |
| 2564 | # Add all new or updated issues. |
| 2565 | for item in updated_items: |
| 2566 | self.hotlists_id_by_issue.setdefault(item.issue_id, []).append(hotlist_id) |
| 2567 | |
| 2568 | def UpdateHotlistItems( |
| 2569 | self, cnxn, hotlist_id, remove, added_issue_tuples, commit=True): |
| 2570 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2571 | if not hotlist: |
| 2572 | raise features_svc.NoSuchHotlistException( |
| 2573 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2574 | current_issues_ids = { |
| 2575 | item.issue_id for item in hotlist.items} |
| 2576 | items = [ |
| 2577 | item for item in hotlist.items if |
| 2578 | item.issue_id not in remove] |
| 2579 | |
| 2580 | if hotlist.items: |
| 2581 | items_sorted = sorted(hotlist.items, key=lambda item: item.rank) |
| 2582 | rank_base = items_sorted[-1].rank + 10 |
| 2583 | else: |
| 2584 | rank_base = 1 |
| 2585 | |
| 2586 | new_hotlist_items = [ |
| 2587 | features_pb2.MakeHotlistItem( |
| 2588 | issue_id, rank+rank_base*10, adder_id, date, note) |
| 2589 | for rank, (issue_id, adder_id, date, note) in |
| 2590 | enumerate(added_issue_tuples) |
| 2591 | if issue_id not in current_issues_ids] |
| 2592 | items.extend(new_hotlist_items) |
| 2593 | hotlist.items = items |
| 2594 | |
| 2595 | for issue_id in remove: |
| 2596 | try: |
| 2597 | self.hotlists_id_by_issue[issue_id].remove(hotlist_id) |
| 2598 | except ValueError: |
| 2599 | pass |
| 2600 | for item in new_hotlist_items: |
| 2601 | try: |
| 2602 | self.hotlists_id_by_issue[item.issue_id].append(hotlist_id) |
| 2603 | except KeyError: |
| 2604 | self.hotlists_id_by_issue[item.issue_id] = [hotlist_id] |
| 2605 | |
| 2606 | def UpdateHotlistItemsFields( |
| 2607 | self, cnxn, hotlist_id, new_ranks=None, new_notes=None, commit=True): |
| 2608 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2609 | if not hotlist: |
| 2610 | raise features_svc.NoSuchHotlistException( |
| 2611 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2612 | if new_ranks is None: |
| 2613 | new_ranks = {} |
| 2614 | if new_notes is None: |
| 2615 | new_notes = {} |
| 2616 | for hotlist_item in hotlist.items: |
| 2617 | if hotlist_item.issue_id in new_ranks: |
| 2618 | hotlist_item.rank = new_ranks[hotlist_item.issue_id] |
| 2619 | if hotlist_item.issue_id in new_notes: |
| 2620 | hotlist_item.note = new_notes[hotlist_item.issue_id] |
| 2621 | |
| 2622 | hotlist.items.sort(key=lambda item: item.rank) |
| 2623 | |
| 2624 | def TransferHotlistOwnership( |
| 2625 | self, cnxn, hotlist, new_owner_id, remain_editor, commit=True): |
| 2626 | """Transfers ownership of a hotlist to a new owner.""" |
| 2627 | new_editor_ids = hotlist.editor_ids |
| 2628 | if remain_editor: |
| 2629 | new_editor_ids.extend(hotlist.owner_ids) |
| 2630 | if new_owner_id in new_editor_ids: |
| 2631 | new_editor_ids.remove(new_owner_id) |
| 2632 | new_follower_ids = hotlist.follower_ids |
| 2633 | if new_owner_id in new_follower_ids: |
| 2634 | new_follower_ids.remove(new_owner_id) |
| 2635 | self.UpdateHotlistRoles( |
| 2636 | cnxn, hotlist.hotlist_id, [new_owner_id], new_editor_ids, |
| 2637 | new_follower_ids, commit=commit) |
| 2638 | |
| 2639 | def LookupUserHotlists(self, cnxn, user_ids): |
| 2640 | """Return dict of {user_id: [hotlist_id, hotlist_id...]}.""" |
| 2641 | users_hotlists_dict = { |
| 2642 | user_id: self.hotlists_id_by_user.get(user_id, []) |
| 2643 | for user_id in user_ids |
| 2644 | } |
| 2645 | return users_hotlists_dict |
| 2646 | |
| 2647 | def LookupIssueHotlists(self, cnxn, issue_ids): |
| 2648 | """Return dict of {issue_id: [hotlist_id, hotlist_id...]}.""" |
| 2649 | issues_hotlists_dict = { |
| 2650 | issue_id: self.hotlists_id_by_issue[issue_id] |
| 2651 | for issue_id in issue_ids |
| 2652 | if issue_id in self.hotlists_id_by_issue} |
| 2653 | return issues_hotlists_dict |
| 2654 | |
| 2655 | def LookupHotlistIDs(self, cnxn, hotlist_names, owner_ids): |
| 2656 | id_dict = {} |
| 2657 | for name in hotlist_names: |
| 2658 | for owner_id in owner_ids: |
| 2659 | hotlist = self.test_hotlists.get((name, owner_id)) |
| 2660 | if hotlist: |
| 2661 | if not hotlist.owner_ids: # Should never happen. |
| 2662 | logging.warn('Unowned Hotlist: id:%r, name:%r', |
| 2663 | hotlist.hotlist_id, hotlist.name) |
| 2664 | continue |
| 2665 | id_dict[(name.lower(), owner_id)] = hotlist.hotlist_id |
| 2666 | return id_dict |
| 2667 | |
| 2668 | def GetHotlists(self, cnxn, hotlist_ids, use_cache=True): |
| 2669 | """Returns dict of {hotlist_id: hotlist PB}.""" |
| 2670 | result = {} |
| 2671 | for hotlist_id in hotlist_ids: |
| 2672 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2673 | if hotlist: |
| 2674 | result[hotlist_id] = hotlist |
| 2675 | else: |
| 2676 | raise features_svc.NoSuchHotlistException() |
| 2677 | return result |
| 2678 | |
| 2679 | def GetHotlistsByUserID(self, cnxn, user_id, use_cache=True): |
| 2680 | """Get a list of hotlist PBs for a given user.""" |
| 2681 | hotlist_id_dict = self.LookupUserHotlists(cnxn, [user_id]) |
| 2682 | hotlists = self.GetHotlists(cnxn, hotlist_id_dict.get( |
| 2683 | user_id, []), use_cache=use_cache) |
| 2684 | return list(hotlists.values()) |
| 2685 | |
| 2686 | def GetHotlistsByIssueID(self, cnxn, issue_id, use_cache=True): |
| 2687 | """Get a list of hotlist PBs for a given issue.""" |
| 2688 | hotlist_id_dict = self.LookupIssueHotlists(cnxn, [issue_id]) |
| 2689 | hotlists = self.GetHotlists(cnxn, hotlist_id_dict.get( |
| 2690 | issue_id, []), use_cache=use_cache) |
| 2691 | return list(hotlists.values()) |
| 2692 | |
| 2693 | def GetHotlist(self, cnxn, hotlist_id, use_cache=True): |
| 2694 | """Return hotlist PB.""" |
| 2695 | hotlist_id_dict = self.GetHotlists(cnxn, [hotlist_id], use_cache=use_cache) |
| 2696 | return hotlist_id_dict.get(hotlist_id) |
| 2697 | |
| 2698 | def GetHotlistsByID(self, cnxn, hotlist_ids, use_cache=True): |
| 2699 | hotlists_dict = {} |
| 2700 | missed_ids = [] |
| 2701 | for hotlist_id in hotlist_ids: |
| 2702 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2703 | if hotlist: |
| 2704 | hotlists_dict[hotlist_id] = hotlist |
| 2705 | else: |
| 2706 | missed_ids.append(hotlist_id) |
| 2707 | return hotlists_dict, missed_ids |
| 2708 | |
| 2709 | def GetHotlistByID(self, cnxn, hotlist_id, use_cache=True): |
| 2710 | hotlists_dict, _ = self.GetHotlistsByID( |
| 2711 | cnxn, [hotlist_id], use_cache=use_cache) |
| 2712 | return hotlists_dict[hotlist_id] |
| 2713 | |
| 2714 | def UpdateHotlistRoles( |
| 2715 | self, cnxn, hotlist_id, owner_ids, editor_ids, follower_ids, commit=True): |
| 2716 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2717 | if not hotlist: |
| 2718 | raise features_svc.NoSuchHotlistException( |
| 2719 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2720 | |
| 2721 | # Remove hotlist_ids to clear old roles |
| 2722 | for user_id in (hotlist.owner_ids + hotlist.editor_ids + |
| 2723 | hotlist.follower_ids): |
| 2724 | if hotlist_id in self.hotlists_id_by_user[user_id]: |
| 2725 | self.hotlists_id_by_user[user_id].remove(hotlist_id) |
| 2726 | old_owner_id = None |
| 2727 | if hotlist.owner_ids: |
| 2728 | old_owner_id = hotlist.owner_ids[0] |
| 2729 | self.test_hotlists.pop((hotlist.name, old_owner_id), None) |
| 2730 | |
| 2731 | hotlist.owner_ids = owner_ids |
| 2732 | hotlist.editor_ids = editor_ids |
| 2733 | hotlist.follower_ids = follower_ids |
| 2734 | |
| 2735 | # Add new hotlist roles |
| 2736 | for user_id in owner_ids+editor_ids+follower_ids: |
| 2737 | try: |
| 2738 | if hotlist_id not in self.hotlists_id_by_user[user_id]: |
| 2739 | self.hotlists_id_by_user[user_id].append(hotlist_id) |
| 2740 | except KeyError: |
| 2741 | self.hotlists_id_by_user[user_id] = [hotlist_id] |
| 2742 | new_owner_id = None |
| 2743 | if owner_ids: |
| 2744 | new_owner_id = owner_ids[0] |
| 2745 | self.test_hotlists[(hotlist.name, new_owner_id)] = hotlist |
| 2746 | |
| 2747 | def DeleteHotlist(self, cnxn, hotlist_id, commit=True): |
| 2748 | hotlist = self.hotlists_by_id.pop(hotlist_id, None) |
| 2749 | if hotlist is not None: |
| 2750 | self.test_hotlists.pop((hotlist.name, hotlist.owner_ids[0]), None) |
| 2751 | user_ids = hotlist.owner_ids+hotlist.editor_ids+hotlist.follower_ids |
| 2752 | for user_id in user_ids: |
| 2753 | try: |
| 2754 | self.hotlists_id_by_user[user_id].remove(hotlist_id) |
| 2755 | except (ValueError, KeyError): |
| 2756 | pass |
| 2757 | for item in hotlist.items: |
| 2758 | try: |
| 2759 | self.hotlists_id_by_issue[item.issue_id].remove(hotlist_id) |
| 2760 | except (ValueError, KeyError): |
| 2761 | pass |
| 2762 | for owner_id in hotlist.owner_ids: |
| 2763 | self.test_hotlists.pop((hotlist.name, owner_id), None) |
| 2764 | |
| 2765 | def ExpungeHotlists( |
| 2766 | self, cnxn, hotlist_ids, star_svc, user_svc, chart_svc, commit=True): |
| 2767 | self.expunged_hotlist_ids.extend(hotlist_ids) |
| 2768 | for hotlist_id in hotlist_ids: |
| 2769 | self.DeleteHotlist(cnxn, hotlist_id) |
| 2770 | |
| 2771 | def ExpungeUsersInHotlists( |
| 2772 | self, cnxn, user_ids, star_svc, user_svc, chart_svc): |
| 2773 | self.expunged_users_in_hotlists.extend(user_ids) |
| 2774 | |
| 2775 | # end of Hotlist functions |
| 2776 | |
| 2777 | def GetRecentCommands(self, cnxn, user_id, project_id): |
| 2778 | return [], [] |
| 2779 | |
| 2780 | def ExpungeSavedQueriesExecuteInProject(self, _cnxn, project_id): |
| 2781 | self.expunged_saved_queries.append(project_id) |
| 2782 | |
| 2783 | def ExpungeSavedQueriesByUsers(self, cnxn, user_ids, limit=None): |
| 2784 | self.expunged_users_in_saved_queries.extend(user_ids) |
| 2785 | |
| 2786 | def ExpungeFilterRules(self, _cnxn, project_id): |
| 2787 | self.expunged_filter_rules.append(project_id) |
| 2788 | |
| 2789 | def ExpungeFilterRulesByUser(self, cnxn, user_ids_by_email): |
| 2790 | emails = user_ids_by_email.keys() |
| 2791 | user_ids = user_ids_by_email.values() |
| 2792 | project_rules_dict = collections.defaultdict(list) |
| 2793 | for project_id, rules in self.test_rules.iteritems(): |
| 2794 | for rule in rules: |
| 2795 | if rule.default_owner_id in user_ids: |
| 2796 | project_rules_dict[project_id].append(rule) |
| 2797 | continue |
| 2798 | if any(cc_id in user_ids for cc_id in rule.add_cc_ids): |
| 2799 | project_rules_dict[project_id].append(rule) |
| 2800 | continue |
| 2801 | if any(addr in emails for addr in rule.add_notify_addrs): |
| 2802 | project_rules_dict[project_id].append(rule) |
| 2803 | continue |
| 2804 | if any((email in rule.predicate) for email in emails): |
| 2805 | project_rules_dict[project_id].append(rule) |
| 2806 | continue |
| 2807 | self.test_rules[project_id] = [ |
| 2808 | rule for rule in rules |
| 2809 | if rule not in project_rules_dict[project_id]] |
| 2810 | return project_rules_dict |
| 2811 | |
| 2812 | def ExpungeQuickEditHistory(self, _cnxn, project_id): |
| 2813 | self.expunged_quick_edit.append(project_id) |
| 2814 | |
| 2815 | def ExpungeQuickEditsByUsers(self, cnxn, user_ids, limit=None): |
| 2816 | self.expunged_users_in_quick_edits.extend(user_ids) |
| 2817 | |
| 2818 | def GetFilterRules(self, cnxn, project_id): |
| 2819 | return self.test_rules[project_id] |
| 2820 | |
| 2821 | def GetCannedQueriesByProjectID(self, cnxn, project_id): |
| 2822 | return [sq for (pid, _, sq) in self.saved_queries if pid == project_id] |
| 2823 | |
| 2824 | def GetSavedQueriesByUserID(self, cnxn, user_id): |
| 2825 | return [sq for (_, uid, sq) in self.saved_queries if uid == user_id] |
| 2826 | |
| 2827 | def UpdateCannedQueries(self, cnxn, project_id, canned_queries): |
| 2828 | self.saved_queries.extend( |
| 2829 | [(project_id, None, cq) for cq in canned_queries]) |
| 2830 | |
| 2831 | def UpdateUserSavedQueries(self, cnxn, user_id, saved_queries): |
| 2832 | self.saved_queries = [ |
| 2833 | (pid, uid, sq) for (pid, uid, sq) in self.saved_queries |
| 2834 | if uid != user_id] |
| 2835 | for sq in saved_queries: |
| 2836 | if sq.executes_in_project_ids: |
| 2837 | self.saved_queries.extend( |
| 2838 | [(eipid, user_id, sq) for eipid in sq.executes_in_project_ids]) |
| 2839 | else: |
| 2840 | self.saved_queries.append((None, user_id, sq)) |
| 2841 | |
| 2842 | def GetSubscriptionsInProjects(self, cnxn, project_ids): |
| 2843 | sq_by_uid = {} |
| 2844 | for pid, uid, sq in self.saved_queries: |
| 2845 | if pid in project_ids: |
| 2846 | if uid in sq_by_uid: |
| 2847 | sq_by_uid[uid].append(sq) |
| 2848 | else: |
| 2849 | sq_by_uid[uid] = [sq] |
| 2850 | |
| 2851 | return sq_by_uid |
| 2852 | |
| 2853 | def GetSavedQuery(self, cnxn, query_id): |
| 2854 | return tracker_pb2.SavedQuery() |
| 2855 | |
| 2856 | |
| 2857 | class PostData(object): |
| 2858 | """A dictionary-like object that also implements getall().""" |
| 2859 | |
| 2860 | def __init__(self, *args, **kwargs): |
| 2861 | self.dictionary = dict(*args, **kwargs) |
| 2862 | |
| 2863 | def getall(self, key): |
| 2864 | """Return all values, assume that the value at key is already a list.""" |
| 2865 | return self.dictionary.get(key, []) |
| 2866 | |
| 2867 | def get(self, key, default=None): |
| 2868 | """Return first value, assume that the value at key is already a list.""" |
| 2869 | return self.dictionary.get(key, [default])[0] |
| 2870 | |
| 2871 | def __getitem__(self, key): |
| 2872 | """Return first value, assume that the value at key is already a list.""" |
| 2873 | return self.dictionary[key][0] |
| 2874 | |
| 2875 | def __contains__(self, key): |
| 2876 | return key in self.dictionary |
| 2877 | |
| 2878 | def keys(self): |
| 2879 | """Return the keys in the POST data.""" |
| 2880 | return list(self.dictionary.keys()) |
| 2881 | |
| 2882 | |
| 2883 | class FakeFile: |
| 2884 | def __init__(self, data=None): |
| 2885 | self.data = data |
| 2886 | |
| 2887 | def read(self): |
| 2888 | return self.data |
| 2889 | |
| 2890 | def write(self, content): |
| 2891 | return |
| 2892 | |
| 2893 | def __enter__(self): |
| 2894 | return self |
| 2895 | |
| 2896 | def __exit__(self, __1, __2, __3): |
| 2897 | return None |
| 2898 | |
| 2899 | |
| 2900 | def gcs_open(filename, mode): |
| 2901 | return FakeFile(filename) |