Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1 | # Copyright 2016 The Chromium Authors |
| 2 | # Use of this source code is governed by a BSD-style license that can be |
| 3 | # found in the LICENSE file. |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 4 | |
| 5 | """Fake object classes that are useful for unit tests.""" |
| 6 | from __future__ import print_function |
| 7 | from __future__ import division |
| 8 | from __future__ import absolute_import |
| 9 | |
| 10 | import collections |
| 11 | import itertools |
| 12 | import logging |
| 13 | import re |
| 14 | import sys |
| 15 | import time |
| 16 | |
| 17 | from six import string_types |
| 18 | |
| 19 | import settings |
| 20 | from features import filterrules_helpers |
| 21 | from framework import exceptions |
| 22 | from framework import framework_bizobj |
| 23 | from framework import framework_constants |
| 24 | from framework import framework_helpers |
| 25 | from framework import monorailrequest |
| 26 | from framework import permissions |
| 27 | from framework import profiler |
| 28 | from framework import validate |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 29 | from mrproto import features_pb2 |
| 30 | from mrproto import project_pb2 |
| 31 | from mrproto import tracker_pb2 |
| 32 | from mrproto import user_pb2 |
| 33 | from mrproto import usergroup_pb2 |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 34 | from services import caches |
| 35 | from services import config_svc |
| 36 | from services import features_svc |
| 37 | from services import project_svc |
| 38 | from tracker import tracker_bizobj |
| 39 | from tracker import tracker_constants |
| 40 | |
| 41 | # Many fakes return partial or constant values, regardless of their arguments. |
| 42 | # pylint: disable=unused-argument |
| 43 | |
| 44 | BOUNDARY = '-----thisisaboundary' |
| 45 | OWNER_ROLE = 'OWNER_ROLE' |
| 46 | COMMITTER_ROLE = 'COMMITTER_ROLE' |
| 47 | CONTRIBUTOR_ROLE = 'CONTRIBUTOR_ROLE' |
| 48 | EDITOR_ROLE = 'EDITOR_ROLE' |
| 49 | FOLLOWER_ROLE = 'FOLLOWER_ROLE' |
| 50 | |
| 51 | def Hotlist( |
| 52 | hotlist_name, hotlist_id, hotlist_item_fields=None, |
| 53 | is_private=False, owner_ids=None, editor_ids=None, follower_ids=None, |
| 54 | default_col_spec=None, summary=None, description=None): |
| 55 | hotlist_id = hotlist_id or hash(hotlist_name) |
| 56 | return features_pb2.MakeHotlist( |
| 57 | hotlist_name, hotlist_item_fields=hotlist_item_fields, |
| 58 | hotlist_id=hotlist_id, is_private=is_private, owner_ids=owner_ids or [], |
| 59 | editor_ids=editor_ids or [], follower_ids=follower_ids or [], |
| 60 | default_col_spec=default_col_spec, summary=summary, |
| 61 | description=description) |
| 62 | |
| 63 | def HotlistItem(issue_id, rank=None, adder_id=None, date_added=None, note=None): |
| 64 | return features_pb2.MakeHotlistItem(issue_id=issue_id, rank=rank, |
| 65 | adder_id=adder_id, date_added=date_added, |
| 66 | note=None) |
| 67 | |
| 68 | def Project( |
| 69 | project_name='proj', project_id=None, state=project_pb2.ProjectState.LIVE, |
| 70 | access=project_pb2.ProjectAccess.ANYONE, moved_to=None, |
| 71 | cached_content_timestamp=None, |
| 72 | owner_ids=None, committer_ids=None, contributor_ids=None): |
| 73 | """Returns a project protocol buffer with the given attributes.""" |
| 74 | project_id = project_id or hash(project_name) |
| 75 | return project_pb2.MakeProject( |
| 76 | project_name, project_id=project_id, state=state, access=access, |
| 77 | moved_to=moved_to, cached_content_timestamp=cached_content_timestamp, |
| 78 | owner_ids=owner_ids, committer_ids=committer_ids, |
| 79 | contributor_ids=contributor_ids) |
| 80 | |
| 81 | |
| 82 | def MakeTestFieldDef( |
| 83 | field_id, project_id, field_type, field_name='', applic_type=None, |
| 84 | applic_pred=None, is_required=False, is_niche=False, is_multivalued=False, |
| 85 | min_value=None, max_value=None, regex=None, needs_member=False, |
| 86 | needs_perm=None, grants_perm=None, notify_on=None, date_action_str=None, |
| 87 | docstring=None, admin_ids=None, editor_ids=None, approval_id=None, |
| 88 | is_phase_field=False, is_restricted_field=False): |
| 89 | return tracker_bizobj.MakeFieldDef( |
| 90 | field_id, project_id, field_name, field_type, applic_type, applic_pred, |
| 91 | is_required, is_niche, is_multivalued, min_value, max_value, regex, |
| 92 | needs_member, needs_perm, grants_perm, notify_on, date_action_str, |
| 93 | docstring, False, |
| 94 | approval_id=approval_id, is_phase_field=is_phase_field, |
| 95 | is_restricted_field=is_restricted_field, admin_ids=admin_ids, |
| 96 | editor_ids=editor_ids) |
| 97 | |
| 98 | def MakeTestApprovalDef(approval_id, approver_ids=None, survey=None): |
| 99 | return tracker_pb2.ApprovalDef( |
| 100 | approval_id=approval_id, |
| 101 | approver_ids = approver_ids, |
| 102 | survey = survey) |
| 103 | |
| 104 | def MakePhase(phase_id, name='', rank=0): |
| 105 | return tracker_pb2.Phase(phase_id=phase_id, name=name, rank=rank) |
| 106 | |
| 107 | |
| 108 | def MakeApprovalValue( |
| 109 | approval_id, |
| 110 | status=tracker_pb2.ApprovalStatus.NOT_SET, |
| 111 | setter_id=None, |
| 112 | set_on=None, |
| 113 | approver_ids=None, |
| 114 | phase_id=None): |
| 115 | if approver_ids is None: |
| 116 | approver_ids = [] |
| 117 | return tracker_pb2.ApprovalValue( |
| 118 | approval_id=approval_id, |
| 119 | status=status, |
| 120 | setter_id=setter_id, |
| 121 | set_on=set_on, |
| 122 | approver_ids=approver_ids, |
| 123 | phase_id=phase_id) |
| 124 | |
| 125 | |
| 126 | def MakeFieldValue( |
| 127 | field_id, |
| 128 | int_value=None, |
| 129 | str_value=None, |
| 130 | user_id=None, |
| 131 | date_value=None, |
| 132 | url_value=None, |
| 133 | derived=None, |
| 134 | phase_id=None): |
| 135 | return tracker_pb2.FieldValue( |
| 136 | field_id=field_id, |
| 137 | int_value=int_value, |
| 138 | str_value=str_value, |
| 139 | user_id=user_id, |
| 140 | date_value=date_value, |
| 141 | url_value=url_value, |
| 142 | derived=derived, |
| 143 | phase_id=phase_id) |
| 144 | |
| 145 | |
| 146 | def MakeTestIssue( |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 147 | project_id, |
| 148 | local_id, |
| 149 | summary, |
| 150 | status, |
| 151 | owner_id, |
| 152 | labels=None, |
| 153 | derived_labels=None, |
| 154 | derived_status=None, |
| 155 | merged_into=0, |
| 156 | star_count=0, |
| 157 | derived_owner_id=0, |
| 158 | issue_id=None, |
| 159 | reporter_id=None, |
| 160 | opened_timestamp=None, |
| 161 | closed_timestamp=None, |
| 162 | modified_timestamp=None, |
| 163 | migration_modified_timestamp=None, |
| 164 | is_spam=False, |
| 165 | component_ids=None, |
| 166 | project_name=None, |
| 167 | field_values=None, |
| 168 | cc_ids=None, |
| 169 | derived_cc_ids=None, |
| 170 | assume_stale=True, |
| 171 | phases=None, |
| 172 | approval_values=None, |
| 173 | merged_into_external=None, |
| 174 | attachment_count=0, |
| 175 | derived_component_ids=None): |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 176 | """Easily make an Issue for testing.""" |
| 177 | issue = tracker_pb2.Issue() |
| 178 | issue.project_id = project_id |
| 179 | issue.project_name = project_name |
| 180 | issue.local_id = local_id |
| 181 | issue.issue_id = issue_id if issue_id else 100000 + local_id |
| 182 | issue.reporter_id = reporter_id if reporter_id else owner_id |
| 183 | issue.summary = summary |
| 184 | issue.status = status |
| 185 | issue.owner_id = owner_id |
| 186 | issue.derived_owner_id = derived_owner_id |
| 187 | issue.star_count = star_count |
| 188 | issue.merged_into = merged_into |
| 189 | issue.merged_into_external = merged_into_external |
| 190 | issue.is_spam = is_spam |
| 191 | issue.attachment_count = attachment_count |
| 192 | if cc_ids: |
| 193 | issue.cc_ids = cc_ids |
| 194 | if derived_cc_ids: |
| 195 | issue.derived_cc_ids = derived_cc_ids |
| 196 | issue.assume_stale = assume_stale |
| 197 | if opened_timestamp: |
| 198 | issue.opened_timestamp = opened_timestamp |
| 199 | issue.owner_modified_timestamp = opened_timestamp |
| 200 | issue.status_modified_timestamp = opened_timestamp |
| 201 | issue.component_modified_timestamp = opened_timestamp |
| 202 | if modified_timestamp: |
| 203 | issue.modified_timestamp = modified_timestamp |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 204 | # By default, make migration_modified_timestamp the same as |
| 205 | # modified_timestamp |
| 206 | issue.migration_modified_timestamp = modified_timestamp |
| 207 | if migration_modified_timestamp: |
| 208 | issue.migration_modified_timestamp = migration_modified_timestamp |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 209 | if closed_timestamp: |
| 210 | issue.closed_timestamp = closed_timestamp |
| 211 | if labels is not None: |
| 212 | if isinstance(labels, string_types): |
| 213 | labels = labels.split() |
| 214 | issue.labels.extend(labels) |
| 215 | if derived_labels is not None: |
| 216 | if isinstance(derived_labels, string_types): |
| 217 | derived_labels = derived_labels.split() |
| 218 | issue.derived_labels.extend(derived_labels) |
| 219 | if derived_status is not None: |
| 220 | issue.derived_status = derived_status |
| 221 | if component_ids is not None: |
| 222 | issue.component_ids = component_ids |
| 223 | if derived_component_ids is not None: |
| 224 | issue.derived_component_ids = derived_component_ids |
| 225 | if field_values is not None: |
| 226 | issue.field_values = field_values |
| 227 | if phases is not None: |
| 228 | issue.phases = phases |
| 229 | if approval_values is not None: |
| 230 | issue.approval_values = approval_values |
| 231 | return issue |
| 232 | |
| 233 | |
| 234 | def MakeTestComponentDef(project_id, comp_id, path='', cc_ids=None): |
| 235 | if cc_ids is None: |
| 236 | cc_ids = [] |
| 237 | return tracker_bizobj.MakeComponentDef( |
| 238 | comp_id, project_id, path, '', False, [], cc_ids, None, None) |
| 239 | |
| 240 | |
| 241 | def MakeTestConfig(project_id, labels, statuses): |
| 242 | """Convenient function to make a ProjectIssueConfig object.""" |
| 243 | config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id) |
| 244 | if isinstance(labels, string_types): |
| 245 | labels = labels.split() |
| 246 | if isinstance(statuses, string_types): |
| 247 | statuses = statuses.split() |
| 248 | config.well_known_labels = [ |
| 249 | tracker_pb2.LabelDef(label=lab) for lab in labels] |
| 250 | config.well_known_statuses = [ |
| 251 | tracker_pb2.StatusDef(status=stat) for stat in statuses] |
| 252 | return config |
| 253 | |
| 254 | |
| 255 | class MonorailConnection(object): |
| 256 | """Fake connection to databases for use in tests.""" |
| 257 | |
| 258 | def Commit(self): |
| 259 | pass |
| 260 | |
| 261 | def Close(self): |
| 262 | pass |
| 263 | |
| 264 | |
| 265 | class MonorailRequest(monorailrequest.MonorailRequest): |
| 266 | """Subclass of MonorailRequest suitable for testing.""" |
| 267 | |
| 268 | def __init__(self, services, user_info=None, project=None, perms=None, |
| 269 | hotlist=None, **kwargs): |
| 270 | """Construct a test MonorailRequest. |
| 271 | |
| 272 | Typically, this is constructed via testing.helpers.GetRequestObjects, |
| 273 | which also causes url parsing and optionally initializes the user, |
| 274 | project, and permissions info. |
| 275 | |
| 276 | Args: |
| 277 | services: connections to backends. |
| 278 | user_info: a dict of user attributes to set on a MonorailRequest object. |
| 279 | For example, "user_id: 5" causes self.auth.user_id=5. |
| 280 | project: the Project pb for this request. |
| 281 | perms: a PermissionSet for this request. |
| 282 | """ |
| 283 | super(MonorailRequest, self).__init__(services, **kwargs) |
| 284 | |
| 285 | if user_info is not None: |
| 286 | for key in user_info: |
| 287 | setattr(self.auth, key, user_info[key]) |
| 288 | if 'user_id' in user_info: |
| 289 | self.auth.effective_ids = {user_info['user_id']} |
| 290 | |
| 291 | self.perms = perms or permissions.ADMIN_PERMISSIONSET |
| 292 | self.profiler = profiler.Profiler() |
| 293 | self.project = project |
| 294 | self.hotlist = hotlist |
| 295 | if hotlist is not None: |
| 296 | self.hotlist_id = hotlist.hotlist_id |
| 297 | |
| 298 | class UserGroupService(object): |
| 299 | """Fake UserGroupService class for testing other code.""" |
| 300 | |
| 301 | def __init__(self): |
| 302 | # Test-only sequence of expunged users. |
| 303 | self.expunged_users_in_groups = [] |
| 304 | |
| 305 | self.group_settings = {} |
| 306 | self.group_members = {} |
| 307 | self.group_addrs = {} |
| 308 | self.role_dict = {} |
| 309 | |
| 310 | def TestAddGroupSettings( |
| 311 | self, |
| 312 | group_id, |
| 313 | email, |
| 314 | who_can_view=None, |
| 315 | anyone_can_join=False, |
| 316 | who_can_add=None, |
| 317 | external_group_type=None, |
| 318 | last_sync_time=0, |
| 319 | friend_projects=None, |
| 320 | notify_members=True, |
| 321 | notify_group=False): |
| 322 | """Set up a fake group for testing. |
| 323 | |
| 324 | Args: |
| 325 | group_id: int user ID of the new user group. |
| 326 | email: string email address to identify the user group. |
| 327 | who_can_view: string enum 'owners', 'members', or 'anyone'. |
| 328 | anyone_can_join: optional boolean to allow any users to join the group. |
| 329 | who_can_add: optional list of int user IDs of users who can add |
| 330 | more members to the group. |
| 331 | notify_members: optional boolean for if emails to this group should be |
| 332 | sent directly to members. |
| 333 | notify_group: optional boolean for if emails to this group should be |
| 334 | sent directly to the group email. |
| 335 | """ |
| 336 | friend_projects = friend_projects or [] |
| 337 | group_settings = usergroup_pb2.MakeSettings( |
| 338 | who_can_view or 'members', external_group_type, last_sync_time, |
| 339 | friend_projects, notify_members, notify_group) |
| 340 | self.group_settings[group_id] = group_settings |
| 341 | self.group_addrs[group_id] = email |
| 342 | # TODO(jrobbins): store the other settings. |
| 343 | |
| 344 | def TestAddMembers(self, group_id, user_ids, role='member'): |
| 345 | self.group_members.setdefault(group_id, []).extend(user_ids) |
| 346 | for user_id in user_ids: |
| 347 | self.role_dict.setdefault(group_id, {})[user_id] = role |
| 348 | |
| 349 | def LookupAllMemberships(self, _cnxn, user_ids, use_cache=True): |
| 350 | return { |
| 351 | user_id: self.LookupMemberships(_cnxn, user_id) |
| 352 | for user_id in user_ids |
| 353 | } |
| 354 | |
| 355 | def LookupMemberships(self, _cnxn, user_id): |
| 356 | memberships = { |
| 357 | group_id for group_id, member_ids in self.group_members.items() |
| 358 | if user_id in member_ids} |
| 359 | return memberships |
| 360 | |
| 361 | def DetermineWhichUserIDsAreGroups(self, _cnxn, user_ids): |
| 362 | return [uid for uid in user_ids |
| 363 | if uid in self.group_settings] |
| 364 | |
| 365 | def GetAllUserGroupsInfo(self, cnxn): |
| 366 | infos = [] |
| 367 | for group_id in self.group_settings: |
| 368 | infos.append( |
| 369 | (self.group_addrs[group_id], |
| 370 | len(self.group_members.get(group_id, [])), |
| 371 | self.group_settings[group_id], group_id)) |
| 372 | |
| 373 | return infos |
| 374 | |
| 375 | def GetAllGroupSettings(self, _cnxn, group_ids): |
| 376 | return {gid: self.group_settings[gid] |
| 377 | for gid in group_ids |
| 378 | if gid in self.group_settings} |
| 379 | |
| 380 | def GetGroupSettings(self, cnxn, group_id): |
| 381 | return self.GetAllGroupSettings(cnxn, [group_id]).get(group_id) |
| 382 | |
| 383 | def CreateGroup(self, cnxn, services, email, who_can_view_members, |
| 384 | ext_group_type=None, friend_projects=None): |
| 385 | friend_projects = friend_projects or [] |
| 386 | group_id = services.user.LookupUserID( |
| 387 | cnxn, email, autocreate=True, allowgroups=True) |
| 388 | self.group_addrs[group_id] = email |
| 389 | group_settings = usergroup_pb2.MakeSettings( |
| 390 | who_can_view_members, ext_group_type, 0, friend_projects) |
| 391 | self.UpdateSettings(cnxn, group_id, group_settings) |
| 392 | return group_id |
| 393 | |
| 394 | def DeleteGroups(self, cnxn, group_ids): |
| 395 | member_ids_dict, owner_ids_dict = self.LookupMembers(cnxn, group_ids) |
| 396 | citizens_id_dict = collections.defaultdict(list) |
| 397 | for g_id, user_ids in member_ids_dict.items(): |
| 398 | citizens_id_dict[g_id].extend(user_ids) |
| 399 | for g_id, user_ids in owner_ids_dict.items(): |
| 400 | citizens_id_dict[g_id].extend(user_ids) |
| 401 | for g_id, citizen_ids in citizens_id_dict.items(): |
| 402 | # Remove group members, friend projects and settings |
| 403 | self.RemoveMembers(cnxn, g_id, citizen_ids) |
| 404 | self.group_settings.pop(g_id, None) |
| 405 | |
| 406 | def LookupComputedMemberships(self, cnxn, domain, use_cache=True): |
| 407 | group_email = 'everyone@%s' % domain |
| 408 | group_id = self.LookupUserGroupID(cnxn, group_email, use_cache=use_cache) |
| 409 | if group_id: |
| 410 | return [group_id] |
| 411 | |
| 412 | return [] |
| 413 | |
| 414 | def LookupUserGroupID(self, cnxn, group_email, use_cache=True): |
| 415 | for group_id in self.group_settings: |
| 416 | if group_email == self.group_addrs.get(group_id): |
| 417 | return group_id |
| 418 | return None |
| 419 | |
| 420 | def LookupMembers(self, _cnxn, group_id_list): |
| 421 | members_dict = {} |
| 422 | owners_dict = {} |
| 423 | for gid in group_id_list: |
| 424 | members_dict[gid] = [] |
| 425 | owners_dict[gid] = [] |
| 426 | for mid in self.group_members.get(gid, []): |
| 427 | if self.role_dict.get(gid, {}).get(mid) == 'owner': |
| 428 | owners_dict[gid].append(mid) |
| 429 | elif self.role_dict.get(gid, {}).get(mid) == 'member': |
| 430 | members_dict[gid].append(mid) |
| 431 | return members_dict, owners_dict |
| 432 | |
| 433 | def LookupAllMembers(self, _cnxn, group_id_list): |
| 434 | direct_members, direct_owners = self.LookupMembers( |
| 435 | _cnxn, group_id_list) |
| 436 | members_dict = {} |
| 437 | owners_dict = {} |
| 438 | for gid in group_id_list: |
| 439 | members = direct_members[gid] |
| 440 | owners = direct_owners[gid] |
| 441 | owners_dict[gid] = owners |
| 442 | members_dict[gid] = members |
| 443 | group_ids = set([uid for uid in members + owners |
| 444 | if uid in self.group_settings]) |
| 445 | while group_ids: |
| 446 | indirect_members, indirect_owners = self.LookupMembers( |
| 447 | _cnxn, group_ids) |
| 448 | child_members = set() |
| 449 | child_owners = set() |
| 450 | for _, children in indirect_members.items(): |
| 451 | child_members.update(children) |
| 452 | for _, children in indirect_owners.items(): |
| 453 | child_owners.update(children) |
| 454 | members_dict[gid].extend(list(child_members)) |
| 455 | owners_dict[gid].extend(list(child_owners)) |
| 456 | group_ids = set(self.DetermineWhichUserIDsAreGroups( |
| 457 | _cnxn, list(child_members) + list(child_owners))) |
| 458 | members_dict[gid] = list(set(members_dict[gid])) |
| 459 | return members_dict, owners_dict |
| 460 | |
| 461 | |
| 462 | def RemoveMembers(self, _cnxn, group_id, old_member_ids): |
| 463 | current_member_ids = self.group_members.get(group_id, []) |
| 464 | revised_member_ids = [mid for mid in current_member_ids |
| 465 | if mid not in old_member_ids] |
| 466 | self.group_members[group_id] = revised_member_ids |
| 467 | |
| 468 | def UpdateMembers(self, _cnxn, group_id, member_ids, new_role): |
| 469 | self.RemoveMembers(_cnxn, group_id, member_ids) |
| 470 | self.TestAddMembers(group_id, member_ids, new_role) |
| 471 | |
| 472 | def UpdateSettings(self, _cnxn, group_id, group_settings): |
| 473 | self.group_settings[group_id] = group_settings |
| 474 | |
| 475 | def ExpandAnyGroupEmailRecipients(self, cnxn, user_ids): |
| 476 | group_ids = set(self.DetermineWhichUserIDsAreGroups(cnxn, user_ids)) |
| 477 | group_settings_dict = self.GetAllGroupSettings(cnxn, group_ids) |
| 478 | member_ids_dict, owner_ids_dict = self.LookupAllMembers(cnxn, group_ids) |
| 479 | indirect_ids = set() |
| 480 | direct_ids = {uid for uid in user_ids if uid not in group_ids} |
| 481 | for gid, group_settings in group_settings_dict.items(): |
| 482 | if group_settings.notify_members: |
| 483 | indirect_ids.update(member_ids_dict.get(gid, set())) |
| 484 | indirect_ids.update(owner_ids_dict.get(gid, set())) |
| 485 | if group_settings.notify_group: |
| 486 | direct_ids.add(gid) |
| 487 | |
| 488 | return list(direct_ids), list(indirect_ids) |
| 489 | |
| 490 | def LookupVisibleMembers( |
| 491 | self, cnxn, group_id_list, perms, effective_ids, services): |
| 492 | settings_dict = self.GetAllGroupSettings(cnxn, group_id_list) |
| 493 | group_ids = list(settings_dict.keys()) |
| 494 | |
| 495 | direct_member_ids_dict, direct_owner_ids_dict = self.LookupMembers( |
| 496 | cnxn, group_ids) |
| 497 | all_member_ids_dict, all_owner_ids_dict = self.LookupAllMembers( |
| 498 | cnxn, group_ids) |
| 499 | visible_member_ids_dict = {} |
| 500 | visible_owner_ids_dict = {} |
| 501 | for gid in group_ids: |
| 502 | member_ids = all_member_ids_dict[gid] |
| 503 | owner_ids = all_owner_ids_dict[gid] |
| 504 | if permissions.CanViewGroupMembers( |
| 505 | perms, effective_ids, settings_dict[gid], member_ids, owner_ids, []): |
| 506 | visible_member_ids_dict[gid] = direct_member_ids_dict[gid] |
| 507 | visible_owner_ids_dict[gid] = direct_owner_ids_dict[gid] |
| 508 | |
| 509 | return visible_member_ids_dict, visible_owner_ids_dict |
| 510 | |
| 511 | def ValidateFriendProjects(self, cnxn, services, friend_projects): |
| 512 | project_names = list(filter(None, re.split('; |, | |;|,', friend_projects))) |
| 513 | id_dict = services.project.LookupProjectIDs(cnxn, project_names) |
| 514 | missed_projects = [] |
| 515 | result = [] |
| 516 | for p_name in project_names: |
| 517 | if p_name in id_dict: |
| 518 | result.append(id_dict[p_name]) |
| 519 | else: |
| 520 | missed_projects.append(p_name) |
| 521 | error_msg = '' |
| 522 | if missed_projects: |
| 523 | error_msg = 'Project(s) %s do not exist' % ', '.join(missed_projects) |
| 524 | return None, error_msg |
| 525 | else: |
| 526 | return result, None |
| 527 | |
| 528 | def ExpungeUsersInGroups(self, cnxn, ids): |
| 529 | self.expunged_users_in_groups.extend(ids) |
| 530 | |
| 531 | |
| 532 | class CacheManager(object): |
| 533 | |
| 534 | def __init__(self, invalidate_tbl=None): |
| 535 | self.last_call = None |
| 536 | self.cache_registry = collections.defaultdict(list) |
| 537 | self.processed_invalidations_up_to = 0 |
| 538 | |
| 539 | def RegisterCache(self, cache, kind): |
| 540 | """Register a cache to be notified of future invalidations.""" |
| 541 | self.cache_registry[kind].append(cache) |
| 542 | |
| 543 | def DoDistributedInvalidation(self, cnxn): |
| 544 | """Drop any cache entries that were invalidated by other jobs.""" |
| 545 | self.last_call = 'DoDistributedInvalidation', cnxn |
| 546 | |
| 547 | def StoreInvalidateRows(self, cnxn, kind, keys): |
| 548 | """Store database rows to let all frontends know to invalidate.""" |
| 549 | self.last_call = 'StoreInvalidateRows', cnxn, kind, keys |
| 550 | |
| 551 | def StoreInvalidateAll(self, cnxn, kind): |
| 552 | """Store a database row to let all frontends know to invalidate.""" |
| 553 | self.last_call = 'StoreInvalidateAll', cnxn, kind |
| 554 | |
| 555 | |
| 556 | |
| 557 | class UserService(object): |
| 558 | |
| 559 | def __init__(self): |
| 560 | """Creates a test-appropriate UserService object.""" |
| 561 | self.users_by_email = {} # {email: user_id, ...} |
| 562 | self.users_by_id = {} # {user_id: email, ...} |
| 563 | self.test_users = {} # {user_id: user_pb, ...} |
| 564 | self.visited_hotlists = {} # user_id:[(hotlist_id, viewed), ...] |
| 565 | self.invite_rows = [] # (parent_id, child_id) |
| 566 | self.linked_account_rows = [] # (parent_id, child_id) |
| 567 | self.prefs_dict = {} # {user_id: UserPrefs} |
| 568 | |
| 569 | def TestAddUser( |
| 570 | self, email, user_id, add_user=True, banned=False, obscure_email=True): |
| 571 | """Add a user to the fake UserService instance. |
| 572 | |
| 573 | Args: |
| 574 | email: Email of the user. |
| 575 | user_id: int user ID. |
| 576 | add_user: Flag whether user pb should be created, i.e. whether a |
| 577 | Monorail account should be created |
| 578 | banned: Boolean to set the user as banned |
| 579 | obscure_email: Boolean to determine whether to obscure the user's email. |
| 580 | |
| 581 | Returns: |
| 582 | The User PB that was added, or None. |
| 583 | """ |
| 584 | self.users_by_email[email] = user_id |
| 585 | self.users_by_id[user_id] = email |
| 586 | |
| 587 | user = None |
| 588 | if add_user: |
| 589 | user = user_pb2.MakeUser(user_id) |
| 590 | user.is_site_admin = False |
| 591 | user.email = email |
| 592 | user.obscure_email = obscure_email |
| 593 | if banned: |
| 594 | user.banned = 'is banned' |
| 595 | self.test_users[user_id] = user |
| 596 | |
| 597 | return user |
| 598 | |
| 599 | def GetUser(self, cnxn, user_id): |
| 600 | return self.GetUsersByIDs(cnxn, [user_id])[user_id] |
| 601 | |
| 602 | def _CreateUser(self, _cnxn, email): |
| 603 | if email in self.users_by_email: |
| 604 | return |
| 605 | user_id = framework_helpers.MurmurHash3_x86_32(email) |
| 606 | self.TestAddUser(email, user_id) |
| 607 | |
| 608 | def _CreateUsers(self, cnxn, emails): |
| 609 | for email in emails: |
| 610 | self._CreateUser(cnxn, email) |
| 611 | |
| 612 | def LookupUserID(self, cnxn, email, autocreate=False, allowgroups=False): |
| 613 | email_dict = self.LookupUserIDs( |
| 614 | cnxn, [email], autocreate=autocreate, allowgroups=allowgroups) |
| 615 | if email in email_dict: |
| 616 | return email_dict[email] |
| 617 | raise exceptions.NoSuchUserException('%r not found' % email) |
| 618 | |
| 619 | def GetUsersByIDs(self, cnxn, user_ids, use_cache=True, skip_missed=False): |
| 620 | user_dict = {} |
| 621 | for user_id in user_ids: |
| 622 | if user_id and self.test_users.get(user_id): |
| 623 | user_dict[user_id] = self.test_users[user_id] |
| 624 | elif not skip_missed: |
| 625 | user_dict[user_id] = user_pb2.MakeUser(user_id) |
| 626 | return user_dict |
| 627 | |
| 628 | def LookupExistingUserIDs(self, cnxn, emails): |
| 629 | email_dict = { |
| 630 | email: self.users_by_email[email] |
| 631 | for email in emails |
| 632 | if email in self.users_by_email} |
| 633 | return email_dict |
| 634 | |
| 635 | def LookupUserIDs(self, cnxn, emails, autocreate=False, |
| 636 | allowgroups=False): |
| 637 | email_dict = {} |
| 638 | needed_emails = [email.lower() for email in emails |
| 639 | if email |
| 640 | and not framework_constants.NO_VALUE_RE.match(email)] |
| 641 | for email in needed_emails: |
| 642 | user_id = self.users_by_email.get(email) |
| 643 | if not user_id: |
| 644 | if autocreate and validate.IsValidEmail(email): |
| 645 | self._CreateUser(cnxn, email) |
| 646 | user_id = self.users_by_email.get(email) |
| 647 | elif not autocreate: |
| 648 | raise exceptions.NoSuchUserException('%r' % email) |
| 649 | if user_id: |
| 650 | email_dict[email] = user_id |
| 651 | return email_dict |
| 652 | |
| 653 | def LookupUserEmail(self, _cnxn, user_id): |
| 654 | email = self.users_by_id.get(user_id) |
| 655 | if not email: |
| 656 | raise exceptions.NoSuchUserException('No user has ID %r' % user_id) |
| 657 | return email |
| 658 | |
| 659 | def LookupUserEmails(self, cnxn, user_ids, ignore_missed=False): |
| 660 | if ignore_missed: |
| 661 | user_dict = {} |
| 662 | for user_id in user_ids: |
| 663 | try: |
| 664 | user_dict[user_id] = self.LookupUserEmail(cnxn, user_id) |
| 665 | except exceptions.NoSuchUserException: |
| 666 | continue |
| 667 | return user_dict |
| 668 | user_dict = { |
| 669 | user_id: self.LookupUserEmail(cnxn, user_id) |
| 670 | for user_id in user_ids} |
| 671 | return user_dict |
| 672 | |
| 673 | def UpdateUser(self, _cnxn, user_id, user): |
| 674 | """Updates the user pb.""" |
| 675 | self.test_users[user_id] = user |
| 676 | |
| 677 | def UpdateUserBan(self, _cnxn, user_id, user, is_banned=None, |
| 678 | banned_reason=None): |
| 679 | """Updates the user pb.""" |
| 680 | self.test_users[user_id] = user |
| 681 | user.banned = banned_reason if is_banned else '' |
| 682 | |
| 683 | def GetPendingLinkedInvites(self, cnxn, user_id): |
| 684 | invite_as_parent = [row[1] for row in self.invite_rows |
| 685 | if row[0] == user_id] |
| 686 | invite_as_child = [row[0] for row in self.invite_rows |
| 687 | if row[1] == user_id] |
| 688 | return invite_as_parent, invite_as_child |
| 689 | |
| 690 | def InviteLinkedParent(self, cnxn, parent_id, child_id): |
| 691 | self.invite_rows.append((parent_id, child_id)) |
| 692 | |
| 693 | def AcceptLinkedChild(self, cnxn, parent_id, child_id): |
| 694 | if (parent_id, child_id) not in self.invite_rows: |
| 695 | raise exceptions.InputException('No such invite') |
| 696 | self.linked_account_rows.append((parent_id, child_id)) |
| 697 | self.invite_rows = [ |
| 698 | (p_id, c_id) for (p_id, c_id) in self.invite_rows |
| 699 | if p_id != parent_id and c_id != child_id] |
| 700 | self.GetUser(cnxn, parent_id).linked_child_ids.append(child_id) |
| 701 | self.GetUser(cnxn, child_id).linked_parent_id = parent_id |
| 702 | |
| 703 | def UnlinkAccounts(self, _cnxn, parent_id, child_id): |
| 704 | """Delete a linked-account relationship.""" |
| 705 | if not parent_id: |
| 706 | raise exceptions.InputException('Parent account is missing') |
| 707 | if not child_id: |
| 708 | raise exceptions.InputException('Child account is missing') |
| 709 | self.linked_account_rows = [(p, c) for (p, c) in self.linked_account_rows |
| 710 | if (p, c) != (parent_id, child_id)] |
| 711 | |
| 712 | def UpdateUserSettings( |
| 713 | self, cnxn, user_id, user, notify=None, notify_starred=None, |
| 714 | email_compact_subject=None, email_view_widget=None, |
| 715 | notify_starred_ping=None, obscure_email=None, after_issue_update=None, |
| 716 | is_site_admin=None, is_banned=None, banned_reason=None, |
| 717 | keep_people_perms_open=None, preview_on_hover=None, |
| 718 | vacation_message=None): |
| 719 | # notifications |
| 720 | if notify is not None: |
| 721 | user.notify_issue_change = notify |
| 722 | if notify_starred is not None: |
| 723 | user.notify_starred_issue_change = notify_starred |
| 724 | if notify_starred_ping is not None: |
| 725 | user.notify_starred_ping = notify_starred_ping |
| 726 | if email_compact_subject is not None: |
| 727 | user.email_compact_subject = email_compact_subject |
| 728 | if email_view_widget is not None: |
| 729 | user.email_view_widget = email_view_widget |
| 730 | |
| 731 | # display options |
| 732 | if after_issue_update is not None: |
| 733 | user.after_issue_update = user_pb2.IssueUpdateNav(after_issue_update) |
| 734 | if preview_on_hover is not None: |
| 735 | user.preview_on_hover = preview_on_hover |
| 736 | if keep_people_perms_open is not None: |
| 737 | user.keep_people_perms_open = keep_people_perms_open |
| 738 | |
| 739 | # misc |
| 740 | if obscure_email is not None: |
| 741 | user.obscure_email = obscure_email |
| 742 | |
| 743 | # admin |
| 744 | if is_site_admin is not None: |
| 745 | user.is_site_admin = is_site_admin |
| 746 | if is_banned is not None: |
| 747 | if is_banned: |
| 748 | user.banned = banned_reason or 'No reason given' |
| 749 | else: |
| 750 | user.reset('banned') |
| 751 | |
| 752 | # user availability |
| 753 | if vacation_message is not None: |
| 754 | user.vacation_message = vacation_message |
| 755 | |
| 756 | return self.UpdateUser(cnxn, user_id, user) |
| 757 | |
| 758 | def GetUsersPrefs(self, cnxn, user_ids, use_cache=True): |
| 759 | for user_id in user_ids: |
| 760 | if user_id not in self.prefs_dict: |
| 761 | self.prefs_dict[user_id] = user_pb2.UserPrefs(user_id=user_id) |
| 762 | return self.prefs_dict |
| 763 | |
| 764 | def GetUserPrefs(self, cnxn, user_id, use_cache=True): |
| 765 | """Return a UserPrefs PB for the requested user ID.""" |
| 766 | prefs_dict = self.GetUsersPrefs(cnxn, [user_id], use_cache=use_cache) |
| 767 | return prefs_dict[user_id] |
| 768 | |
| 769 | def GetUserPrefsByEmail(self, cnxn, email, use_cache=True): |
| 770 | """Return a UserPrefs PB for the requested email, or an empty UserPrefs.""" |
| 771 | try: |
| 772 | user_id = self.LookupUserID(cnxn, email) |
| 773 | user_prefs = self.GetUserPrefs(cnxn, user_id, use_cache=use_cache) |
| 774 | except exceptions.NoSuchUserException: |
| 775 | user_prefs = user_pb2.UserPrefs() |
| 776 | return user_prefs |
| 777 | |
| 778 | def SetUserPrefs(self, cnxn, user_id, pref_values): |
| 779 | userprefs = self.GetUserPrefs(cnxn, user_id) |
| 780 | names_to_overwrite = {upv.name for upv in pref_values} |
| 781 | userprefs.prefs = [upv for upv in userprefs.prefs |
| 782 | if upv.name not in names_to_overwrite] |
| 783 | userprefs.prefs.extend(pref_values) |
| 784 | |
| 785 | def ExpungeUsers(self, cnxn, user_ids): |
| 786 | for user_id in user_ids: |
| 787 | self.test_users.pop(user_id, None) |
| 788 | self.prefs_dict.pop(user_id, None) |
| 789 | email = self.users_by_id.pop(user_id, None) |
| 790 | if email: |
| 791 | self.users_by_email.pop(email, None) |
| 792 | |
| 793 | self.invite_rows = [row for row in self.invite_rows |
| 794 | if row[0] not in user_ids and row[1] not in user_ids] |
| 795 | self.linked_account_rows = [ |
| 796 | row for row in self.linked_account_rows |
| 797 | if row[0] not in user_ids and row[1] not in user_ids] |
| 798 | |
| 799 | def TotalUsersCount(self, cnxn): |
| 800 | return len(self.users_by_id) - 1 if ( |
| 801 | framework_constants.DELETED_USER_ID in self.users_by_id |
| 802 | ) else len(self.users_by_id) |
| 803 | |
| 804 | def GetAllUserEmailsBatch(self, cnxn, limit=1000, offset=0): |
| 805 | sorted_user_ids = sorted(self.users_by_id.keys()) |
| 806 | sorted_user_ids = [ |
| 807 | user_id for user_id in sorted_user_ids |
| 808 | if user_id != framework_constants.DELETED_USER_ID] |
| 809 | emails = [] |
| 810 | for i in range(offset, offset + limit): |
| 811 | try: |
| 812 | user_id = sorted_user_ids[i] |
| 813 | if user_id != framework_constants.DELETED_USER_ID: |
| 814 | emails.append(self.users_by_id[user_id]) |
| 815 | except IndexError: |
| 816 | break |
| 817 | return emails |
| 818 | |
| 819 | def GetRecentlyVisitedHotlists(self, _cnxn, user_id): |
| 820 | try: |
| 821 | return self.visited_hotlists[user_id] |
| 822 | except KeyError: |
| 823 | return [] |
| 824 | |
| 825 | def AddVisitedHotlist(self, _cnxn, user_id, hotlist_id, commit=True): |
| 826 | try: |
| 827 | user_visited_tuples = self.visited_hotlists[user_id] |
| 828 | self.visited_hotlists[user_id] = [ |
| 829 | hid for hid in user_visited_tuples if hid != hotlist_id] |
| 830 | except KeyError: |
| 831 | self.visited_hotlists[user_id] = [] |
| 832 | self.visited_hotlists[user_id].append(hotlist_id) |
| 833 | |
| 834 | def ExpungeUsersHotlistsHistory(self, cnxn, user_ids, commit=True): |
| 835 | for user_id in user_ids: |
| 836 | self.visited_hotlists.pop(user_id, None) |
| 837 | |
| 838 | |
| 839 | class AbstractStarService(object): |
| 840 | """Fake StarService.""" |
| 841 | |
| 842 | def __init__(self): |
| 843 | self.stars_by_item_id = {} |
| 844 | self.stars_by_starrer_id = {} |
| 845 | self.expunged_item_ids = [] |
| 846 | |
| 847 | def ExpungeStars(self, _cnxn, item_id, commit=True, limit=None): |
| 848 | self.expunged_item_ids.append(item_id) |
| 849 | old_starrers = self.stars_by_item_id.get(item_id, []) |
| 850 | self.stars_by_item_id[item_id] = [] |
| 851 | for old_starrer in old_starrers: |
| 852 | if self.stars_by_starrer_id.get(old_starrer): |
| 853 | self.stars_by_starrer_id[old_starrer] = [ |
| 854 | it for it in self.stars_by_starrer_id[old_starrer] |
| 855 | if it != item_id] |
| 856 | |
| 857 | def ExpungeStarsByUsers(self, _cnxn, user_ids, limit=None): |
| 858 | for user_id in user_ids: |
| 859 | item_ids = self.stars_by_starrer_id.pop(user_id, []) |
| 860 | for item_id in item_ids: |
| 861 | starrers = self.stars_by_item_id.get(item_id, None) |
| 862 | if starrers: |
| 863 | self.stars_by_item_id[item_id] = [ |
| 864 | starrer for starrer in starrers if starrer != user_id] |
| 865 | |
| 866 | def LookupItemStarrers(self, _cnxn, item_id): |
| 867 | return self.stars_by_item_id.get(item_id, []) |
| 868 | |
| 869 | def LookupItemsStarrers(self, cnxn, item_ids): |
| 870 | return { |
| 871 | item_id: self.LookupItemStarrers(cnxn, item_id) for item_id in item_ids} |
| 872 | |
| 873 | def LookupStarredItemIDs(self, _cnxn, starrer_user_id): |
| 874 | return self.stars_by_starrer_id.get(starrer_user_id, []) |
| 875 | |
| 876 | def IsItemStarredBy(self, cnxn, item_id, starrer_user_id): |
| 877 | return item_id in self.LookupStarredItemIDs(cnxn, starrer_user_id) |
| 878 | |
| 879 | def CountItemStars(self, cnxn, item_id): |
| 880 | return len(self.LookupItemStarrers(cnxn, item_id)) |
| 881 | |
| 882 | def CountItemsStars(self, cnxn, item_ids): |
| 883 | return {item_id: self.CountItemStars(cnxn, item_id) |
| 884 | for item_id in item_ids} |
| 885 | |
| 886 | def _SetStar(self, cnxn, item_id, starrer_user_id, starred): |
| 887 | if starred and not self.IsItemStarredBy(cnxn, item_id, starrer_user_id): |
| 888 | self.stars_by_item_id.setdefault(item_id, []).append(starrer_user_id) |
| 889 | self.stars_by_starrer_id.setdefault(starrer_user_id, []).append(item_id) |
| 890 | |
| 891 | elif not starred and self.IsItemStarredBy(cnxn, item_id, starrer_user_id): |
| 892 | self.stars_by_item_id[item_id].remove(starrer_user_id) |
| 893 | self.stars_by_starrer_id[starrer_user_id].remove(item_id) |
| 894 | |
| 895 | def SetStar(self, cnxn, item_id, starrer_user_id, starred): |
| 896 | self._SetStar(cnxn, item_id, starrer_user_id, starred) |
| 897 | |
| 898 | def SetStarsBatch( |
| 899 | self, cnxn, item_id, starrer_user_ids, starred, commit=True): |
| 900 | for starrer_user_id in starrer_user_ids: |
| 901 | self._SetStar(cnxn, item_id, starrer_user_id, starred) |
| 902 | |
| 903 | |
| 904 | class UserStarService(AbstractStarService): |
| 905 | pass |
| 906 | |
| 907 | |
| 908 | class ProjectStarService(AbstractStarService): |
| 909 | pass |
| 910 | |
| 911 | |
| 912 | class HotlistStarService(AbstractStarService): |
| 913 | pass |
| 914 | |
| 915 | |
| 916 | class IssueStarService(AbstractStarService): |
| 917 | |
| 918 | # pylint: disable=arguments-differ |
| 919 | def SetStar( |
| 920 | self, cnxn, services, _config, issue_id, starrer_user_id, |
| 921 | starred): |
| 922 | super(IssueStarService, self).SetStar( |
| 923 | cnxn, issue_id, starrer_user_id, starred) |
| 924 | try: |
| 925 | issue = services.issue.GetIssue(cnxn, issue_id) |
| 926 | issue.star_count += (1 if starred else -1) |
| 927 | except exceptions.NoSuchIssueException: |
| 928 | pass |
| 929 | |
| 930 | # pylint: disable=arguments-differ |
| 931 | def SetStarsBatch( |
| 932 | self, cnxn, _service, _config, issue_id, starrer_user_ids, |
| 933 | starred): |
| 934 | super(IssueStarService, self).SetStarsBatch( |
| 935 | cnxn, issue_id, starrer_user_ids, starred) |
| 936 | |
| 937 | def SetStarsBatch_SkipIssueUpdate( |
| 938 | self, cnxn, issue_id, starrer_user_ids, starred, commit=True): |
| 939 | super(IssueStarService, self).SetStarsBatch( |
| 940 | cnxn, issue_id, starrer_user_ids, starred) |
| 941 | |
| 942 | |
| 943 | class ProjectService(object): |
| 944 | """Fake ProjectService object. |
| 945 | |
| 946 | Provides methods for creating users and projects, which are accessible |
| 947 | through parts of the real ProjectService interface. |
| 948 | """ |
| 949 | |
| 950 | def __init__(self): |
| 951 | self.test_projects = {} # project_name -> project_pb |
| 952 | self.projects_by_id = {} # project_id -> project_pb |
| 953 | self.test_star_manager = None |
| 954 | self.indexed_projects = {} |
| 955 | self.unindexed_projects = set() |
| 956 | self.index_counter = 0 |
| 957 | self.project_commitments = {} |
| 958 | self.ac_exclusion_ids = {} |
| 959 | self.no_expand_ids = {} |
| 960 | |
| 961 | def TestAddProject( |
| 962 | self, name, summary='', state=project_pb2.ProjectState.LIVE, |
| 963 | owner_ids=None, committer_ids=None, contrib_ids=None, |
| 964 | issue_notify_address=None, state_reason='', description=None, |
| 965 | project_id=None, process_inbound_email=None, access=None, |
| 966 | extra_perms=None): |
| 967 | """Add a project to the fake ProjectService object. |
| 968 | |
| 969 | Args: |
| 970 | name: The name of the project. Will replace any existing project under |
| 971 | the same name. |
| 972 | summary: The summary string of the project. |
| 973 | state: Initial state for the project from project_pb2.ProjectState. |
| 974 | owner_ids: List of user ids for project owners |
| 975 | committer_ids: List of user ids for project committers |
| 976 | contrib_ids: List of user ids for project contributors |
| 977 | issue_notify_address: email address to send issue change notifications |
| 978 | state_reason: string describing the reason the project is in its current |
| 979 | state. |
| 980 | description: The description string for this project |
| 981 | project_id: A unique integer identifier for the created project. |
| 982 | process_inbound_email: True to make this project accept inbound email. |
| 983 | access: One of the values of enum project_pb2.ProjectAccess. |
| 984 | extra_perms: List of ExtraPerms PBs for project members. |
| 985 | |
| 986 | Returns: |
| 987 | A populated project PB. |
| 988 | """ |
| 989 | proj_pb = project_pb2.Project() |
| 990 | proj_pb.project_id = project_id or hash(name) % 100000 |
| 991 | proj_pb.project_name = name |
| 992 | proj_pb.summary = summary |
| 993 | proj_pb.state = state |
| 994 | proj_pb.state_reason = state_reason |
| 995 | proj_pb.extra_perms = extra_perms or [] |
| 996 | if description is not None: |
| 997 | proj_pb.description = description |
| 998 | |
| 999 | self.TestAddProjectMembers(owner_ids, proj_pb, OWNER_ROLE) |
| 1000 | self.TestAddProjectMembers(committer_ids, proj_pb, COMMITTER_ROLE) |
| 1001 | self.TestAddProjectMembers(contrib_ids, proj_pb, CONTRIBUTOR_ROLE) |
| 1002 | |
| 1003 | if issue_notify_address is not None: |
| 1004 | proj_pb.issue_notify_address = issue_notify_address |
| 1005 | if process_inbound_email is not None: |
| 1006 | proj_pb.process_inbound_email = process_inbound_email |
| 1007 | if access is not None: |
| 1008 | proj_pb.access = access |
| 1009 | |
| 1010 | self.test_projects[name] = proj_pb |
| 1011 | self.projects_by_id[proj_pb.project_id] = proj_pb |
| 1012 | return proj_pb |
| 1013 | |
| 1014 | def TestAddProjectMembers(self, user_id_list, proj_pb, role): |
| 1015 | if user_id_list is not None: |
| 1016 | for user_id in user_id_list: |
| 1017 | if role == OWNER_ROLE: |
| 1018 | proj_pb.owner_ids.append(user_id) |
| 1019 | elif role == COMMITTER_ROLE: |
| 1020 | proj_pb.committer_ids.append(user_id) |
| 1021 | elif role == CONTRIBUTOR_ROLE: |
| 1022 | proj_pb.contributor_ids.append(user_id) |
| 1023 | |
| 1024 | def LookupProjectIDs(self, cnxn, project_names): |
| 1025 | return { |
| 1026 | project_name: self.test_projects[project_name].project_id |
| 1027 | for project_name in project_names |
| 1028 | if project_name in self.test_projects} |
| 1029 | |
| 1030 | def LookupProjectNames(self, cnxn, project_ids): |
| 1031 | projects_dict = self.GetProjects(cnxn, project_ids) |
| 1032 | return {p.project_id: p.project_name |
| 1033 | for p in projects_dict.values()} |
| 1034 | |
| 1035 | def CreateProject( |
| 1036 | self, _cnxn, project_name, owner_ids, committer_ids, |
| 1037 | contributor_ids, summary, description, |
| 1038 | state=project_pb2.ProjectState.LIVE, access=None, |
| 1039 | read_only_reason=None, |
| 1040 | home_page=None, docs_url=None, source_url=None, |
| 1041 | logo_gcs_id=None, logo_file_name=None): |
| 1042 | """Create and store a Project with the given attributes.""" |
| 1043 | if project_name in self.test_projects: |
| 1044 | raise exceptions.ProjectAlreadyExists() |
| 1045 | project = self.TestAddProject( |
| 1046 | project_name, summary=summary, state=state, |
| 1047 | owner_ids=owner_ids, committer_ids=committer_ids, |
| 1048 | contrib_ids=contributor_ids, description=description, |
| 1049 | access=access) |
| 1050 | return project.project_id |
| 1051 | |
| 1052 | def ExpungeProject(self, _cnxn, project_id): |
| 1053 | project = self.projects_by_id.get(project_id) |
| 1054 | if project: |
| 1055 | self.test_projects.pop(project.project_name, None) |
| 1056 | |
| 1057 | def GetProjectsByName(self, _cnxn, project_name_list, use_cache=True): |
| 1058 | return { |
| 1059 | pn: self.test_projects[pn] for pn in project_name_list |
| 1060 | if pn in self.test_projects} |
| 1061 | |
| 1062 | def GetProjectByName(self, _cnxn, name, use_cache=True): |
| 1063 | return self.test_projects.get(name) |
| 1064 | |
| 1065 | def GetProjectList(self, cnxn, project_id_list, use_cache=True): |
| 1066 | project_dict = self.GetProjects(cnxn, project_id_list, use_cache=use_cache) |
| 1067 | return [project_dict[pid] for pid in project_id_list |
| 1068 | if pid in project_dict] |
| 1069 | |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1070 | def GetVisibleProjects( |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1071 | self, _cnxn, logged_in_user, effective_ids, domain=None, use_cache=True): |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1072 | project_ids = sorted(self.projects_by_id.keys()) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1073 | visible_project_ids = [] |
| 1074 | for pid in project_ids: |
| 1075 | can_view = permissions.UserCanViewProject( |
| 1076 | logged_in_user, effective_ids, self.projects_by_id[pid]) |
| 1077 | different_domain = framework_helpers.GetNeededDomain( |
| 1078 | self.projects_by_id[pid].project_name, domain) |
| 1079 | if can_view and not different_domain: |
| 1080 | visible_project_ids.append(pid) |
| 1081 | |
| 1082 | return visible_project_ids |
| 1083 | |
| 1084 | def GetProjects(self, _cnxn, project_ids, use_cache=True): |
| 1085 | result = {} |
| 1086 | for project_id in project_ids: |
| 1087 | project = self.projects_by_id.get(project_id) |
| 1088 | if project: |
| 1089 | result[project_id] = project |
| 1090 | else: |
| 1091 | raise exceptions.NoSuchProjectException(project_id) |
| 1092 | return result |
| 1093 | |
| 1094 | def GetAllProjects(self, _cnxn, use_cache=True): |
| 1095 | result = {} |
| 1096 | for project_id in self.projects_by_id: |
| 1097 | project = self.projects_by_id.get(project_id) |
| 1098 | result[project_id] = project |
| 1099 | return result |
| 1100 | |
| 1101 | |
| 1102 | def GetProject(self, cnxn, project_id, use_cache=True): |
| 1103 | """Load the specified project from the database.""" |
| 1104 | project_id_dict = self.GetProjects(cnxn, [project_id], use_cache=use_cache) |
| 1105 | if project_id not in project_id_dict: |
| 1106 | raise exceptions.NoSuchProjectException() |
| 1107 | return project_id_dict[project_id] |
| 1108 | |
| 1109 | def GetProjectCommitments(self, _cnxn, project_id): |
| 1110 | if project_id in self.project_commitments: |
| 1111 | return self.project_commitments[project_id] |
| 1112 | |
| 1113 | project_commitments = project_pb2.ProjectCommitments() |
| 1114 | project_commitments.project_id = project_id |
| 1115 | return project_commitments |
| 1116 | |
| 1117 | def TestStoreProjectCommitments(self, project_commitments): |
| 1118 | key = project_commitments.project_id |
| 1119 | self.project_commitments[key] = project_commitments |
| 1120 | |
| 1121 | def GetProjectAutocompleteExclusion(self, cnxn, project_id): |
| 1122 | return (self.ac_exclusion_ids.get(project_id, []), |
| 1123 | self.no_expand_ids.get(project_id, [])) |
| 1124 | |
| 1125 | def UpdateProject( |
| 1126 | self, |
| 1127 | _cnxn, |
| 1128 | project_id, |
| 1129 | summary=None, |
| 1130 | description=None, |
| 1131 | state=None, |
| 1132 | state_reason=None, |
| 1133 | access=None, |
| 1134 | issue_notify_address=None, |
| 1135 | attachment_bytes_used=None, |
| 1136 | attachment_quota=None, |
| 1137 | moved_to=None, |
| 1138 | process_inbound_email=None, |
| 1139 | only_owners_remove_restrictions=None, |
| 1140 | read_only_reason=None, |
| 1141 | cached_content_timestamp=None, |
| 1142 | only_owners_see_contributors=None, |
| 1143 | delete_time=None, |
| 1144 | recent_activity=None, |
| 1145 | revision_url_format=None, |
| 1146 | home_page=None, |
| 1147 | docs_url=None, |
| 1148 | source_url=None, |
| 1149 | logo_gcs_id=None, |
| 1150 | logo_file_name=None, |
| 1151 | issue_notify_always_detailed=None, |
| 1152 | commit=True): |
| 1153 | project = self.projects_by_id.get(project_id) |
| 1154 | if not project: |
| 1155 | raise exceptions.NoSuchProjectException( |
| 1156 | 'Project "%s" not found!' % project_id) |
| 1157 | |
| 1158 | # TODO(jrobbins): implement all passed arguments - probably as a utility |
| 1159 | # method shared with the real persistence implementation. |
| 1160 | if read_only_reason is not None: |
| 1161 | project.read_only_reason = read_only_reason |
| 1162 | if attachment_bytes_used is not None: |
| 1163 | project.attachment_bytes_used = attachment_bytes_used |
| 1164 | |
| 1165 | def UpdateProjectRoles( |
| 1166 | self, _cnxn, project_id, owner_ids, committer_ids, |
| 1167 | contributor_ids, now=None): |
| 1168 | project = self.projects_by_id.get(project_id) |
| 1169 | if not project: |
| 1170 | raise exceptions.NoSuchProjectException( |
| 1171 | 'Project "%s" not found!' % project_id) |
| 1172 | |
| 1173 | project.owner_ids = owner_ids |
| 1174 | project.committer_ids = committer_ids |
| 1175 | project.contributor_ids = contributor_ids |
| 1176 | |
| 1177 | def MarkProjectDeletable( |
| 1178 | self, _cnxn, project_id, _config_service): |
| 1179 | project = self.projects_by_id[project_id] |
| 1180 | project.project_name = 'DELETABLE_%d' % project_id |
| 1181 | project.state = project_pb2.ProjectState.DELETABLE |
| 1182 | |
| 1183 | def UpdateRecentActivity(self, _cnxn, _project_id, now=None): |
| 1184 | pass |
| 1185 | |
| 1186 | def GetUserRolesInAllProjects(self, _cnxn, effective_ids): |
| 1187 | owned_project_ids = set() |
| 1188 | membered_project_ids = set() |
| 1189 | contrib_project_ids = set() |
| 1190 | |
| 1191 | for project in self.projects_by_id.values(): |
| 1192 | if not effective_ids.isdisjoint(project.owner_ids): |
| 1193 | owned_project_ids.add(project.project_id) |
| 1194 | elif not effective_ids.isdisjoint(project.committer_ids): |
| 1195 | membered_project_ids.add(project.project_id) |
| 1196 | elif not effective_ids.isdisjoint(project.contributor_ids): |
| 1197 | contrib_project_ids.add(project.project_id) |
| 1198 | |
| 1199 | return owned_project_ids, membered_project_ids, contrib_project_ids |
| 1200 | |
| 1201 | def GetProjectMemberships(self, _cnxn, effective_ids, use_cache=True): |
| 1202 | # type: MonorailConnection, Collection[int], bool -> |
| 1203 | # Mapping[int, Collection[int]] |
| 1204 | projects_by_user_id = collections.defaultdict(set) |
| 1205 | |
| 1206 | for project in self.projects_by_id.values(): |
| 1207 | member_ids = set( |
| 1208 | itertools.chain( |
| 1209 | project.owner_ids, project.committer_ids, |
| 1210 | project.contributor_ids)) |
| 1211 | for user_id in effective_ids: |
| 1212 | if user_id in member_ids: |
| 1213 | projects_by_user_id[user_id].add(project.project_id) |
| 1214 | return projects_by_user_id |
| 1215 | |
| 1216 | def ExpungeUsersInProjects(self, cnxn, user_ids, limit=None): |
| 1217 | for project in self.projects_by_id.values(): |
| 1218 | project.owner_ids = [owner_id for owner_id in project.owner_ids |
| 1219 | if owner_id not in user_ids] |
| 1220 | project.committer_ids = [com_id for com_id in project.committer_ids |
| 1221 | if com_id not in user_ids] |
| 1222 | project.contributor_ids = [con_id for con_id in project.contributor_ids |
| 1223 | if con_id not in user_ids] |
| 1224 | |
| 1225 | |
| 1226 | class ConfigService(object): |
| 1227 | """Fake version of ConfigService that just works in-RAM.""" |
| 1228 | |
| 1229 | def __init__(self, user_id=None): |
| 1230 | self.project_configs = {} |
| 1231 | self.next_field_id = 123 |
| 1232 | self.next_component_id = 345 |
| 1233 | self.next_template_id = 23 |
| 1234 | self.expunged_configs = [] |
| 1235 | self.expunged_users_in_configs = [] |
| 1236 | self.component_ids_to_templates = {} |
| 1237 | self.label_to_id = {} |
| 1238 | self.id_to_label = {} |
| 1239 | self.strict = False # Set true to raise more exceptions like real class. |
| 1240 | |
| 1241 | def TestAddLabelsDict(self, label_to_id): |
| 1242 | self.label_to_id = label_to_id |
| 1243 | self.id_to_label = { |
| 1244 | label_id: label |
| 1245 | for label, label_id in list(self.label_to_id.items())} |
| 1246 | |
| 1247 | def TestAddFieldDef(self, fd): |
| 1248 | self.project_configs[fd.project_id].field_defs.append(fd) |
| 1249 | |
| 1250 | def TestAddApprovalDef(self, ad, project_id): |
| 1251 | self.project_configs[project_id].approval_defs.append(ad) |
| 1252 | |
| 1253 | def ExpungeConfig(self, _cnxn, project_id): |
| 1254 | self.expunged_configs.append(project_id) |
| 1255 | |
| 1256 | def ExpungeUsersInConfigs(self, _cnxn, user_ids, limit=None): |
| 1257 | self.expunged_users_in_configs.extend(user_ids) |
| 1258 | |
| 1259 | def GetLabelDefRows(self, cnxn, project_id, use_cache=True): |
| 1260 | """This always returns empty results. Mock it to test other cases.""" |
| 1261 | return [] |
| 1262 | |
| 1263 | def GetLabelDefRowsAnyProject(self, cnxn, where=None): |
| 1264 | """This always returns empty results. Mock it to test other cases.""" |
| 1265 | return [] |
| 1266 | |
| 1267 | def LookupLabel(self, cnxn, project_id, label_id): |
| 1268 | if label_id in self.id_to_label: |
| 1269 | return self.id_to_label[label_id] |
| 1270 | if label_id == 999: |
| 1271 | return None |
| 1272 | return 'label_%d_%d' % (project_id, label_id) |
| 1273 | |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1274 | def LookupLabelID( |
| 1275 | self, cnxn, project_id, label, autocreate=True, case_sensitive=False): |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1276 | if label in self.label_to_id: |
| 1277 | return self.label_to_id[label] |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1278 | # TODO: The condition here is specifically added to return 'None' and |
| 1279 | # allow testing for label freezing. This can be removed after refactoring |
| 1280 | # other dependent tests to not fail for returning 'None' instead of '1' |
| 1281 | # when label is not found in 'label_to_id' dict. |
| 1282 | if label == 'freeze_new_label': |
| 1283 | return None |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1284 | return 1 |
| 1285 | |
| 1286 | def LookupLabelIDs(self, cnxn, project_id, labels, autocreate=False): |
| 1287 | ids = [] |
| 1288 | next_label_id = 0 |
| 1289 | if self.id_to_label.keys(): |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1290 | existing_ids = sorted(self.id_to_label.keys()) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1291 | next_label_id = existing_ids[-1] + 1 |
| 1292 | for label in labels: |
| 1293 | if self.label_to_id.get(label) is not None: |
| 1294 | ids.append(self.label_to_id[label]) |
| 1295 | elif autocreate: |
| 1296 | self.label_to_id[label] = next_label_id |
| 1297 | self.id_to_label[next_label_id] = label |
| 1298 | ids.append(next_label_id) |
| 1299 | next_label_id += 1 |
| 1300 | return ids |
| 1301 | |
| 1302 | def LookupIDsOfLabelsMatching(self, cnxn, project_id, regex): |
| 1303 | return [1, 2, 3] |
| 1304 | |
| 1305 | def LookupStatus(self, cnxn, project_id, status_id): |
| 1306 | return 'status_%d_%d' % (project_id, status_id) |
| 1307 | |
| 1308 | def LookupStatusID(self, cnxn, project_id, status, autocreate=True): |
| 1309 | if status: |
| 1310 | return 1 |
| 1311 | else: |
| 1312 | return 0 |
| 1313 | |
| 1314 | def LookupStatusIDs(self, cnxn, project_id, statuses): |
| 1315 | return [idx for idx, _status in enumerate(statuses)] |
| 1316 | |
| 1317 | def LookupClosedStatusIDs(self, cnxn, project_id): |
| 1318 | return [7, 8, 9] |
| 1319 | |
| 1320 | def StoreConfig(self, _cnxn, config): |
| 1321 | self.project_configs[config.project_id] = config |
| 1322 | |
| 1323 | def GetProjectConfig(self, _cnxn, project_id, use_cache=True): |
| 1324 | if project_id in self.project_configs: |
| 1325 | return self.project_configs[project_id] |
| 1326 | elif self.strict: |
| 1327 | raise exceptions.NoSuchProjectException() |
| 1328 | else: |
| 1329 | return tracker_bizobj.MakeDefaultProjectIssueConfig(project_id) |
| 1330 | |
| 1331 | def GetProjectConfigs(self, _cnxn, project_ids, use_cache=True): |
| 1332 | config_dict = {} |
| 1333 | for project_id in project_ids: |
| 1334 | if project_id in self.project_configs: |
| 1335 | config_dict[project_id] = self.project_configs[project_id] |
| 1336 | elif not self.strict: |
| 1337 | config_dict[project_id] = tracker_bizobj.MakeDefaultProjectIssueConfig( |
| 1338 | project_id) |
| 1339 | return config_dict |
| 1340 | |
| 1341 | def UpdateConfig( |
| 1342 | self, cnxn, project, well_known_statuses=None, |
| 1343 | statuses_offer_merge=None, well_known_labels=None, |
| 1344 | excl_label_prefixes=None, default_template_for_developers=None, |
| 1345 | default_template_for_users=None, list_prefs=None, restrict_to_known=None, |
| 1346 | approval_defs=None): |
| 1347 | project_id = project.project_id |
| 1348 | project_config = self.GetProjectConfig(cnxn, project_id, use_cache=False) |
| 1349 | |
| 1350 | if well_known_statuses is not None: |
| 1351 | tracker_bizobj.SetConfigStatuses(project_config, well_known_statuses) |
| 1352 | |
| 1353 | if statuses_offer_merge is not None: |
| 1354 | project_config.statuses_offer_merge = statuses_offer_merge |
| 1355 | |
| 1356 | if well_known_labels is not None: |
| 1357 | tracker_bizobj.SetConfigLabels(project_config, well_known_labels) |
| 1358 | |
| 1359 | if excl_label_prefixes is not None: |
| 1360 | project_config.exclusive_label_prefixes = excl_label_prefixes |
| 1361 | |
| 1362 | if approval_defs is not None: |
| 1363 | tracker_bizobj.SetConfigApprovals(project_config, approval_defs) |
| 1364 | |
| 1365 | if default_template_for_developers is not None: |
| 1366 | project_config.default_template_for_developers = ( |
| 1367 | default_template_for_developers) |
| 1368 | if default_template_for_users is not None: |
| 1369 | project_config.default_template_for_users = default_template_for_users |
| 1370 | |
| 1371 | if list_prefs: |
| 1372 | default_col_spec, default_sort_spec, x_attr, y_attr, m_d_q = list_prefs |
| 1373 | project_config.default_col_spec = default_col_spec |
| 1374 | project_config.default_sort_spec = default_sort_spec |
| 1375 | project_config.default_x_attr = x_attr |
| 1376 | project_config.default_y_attr = y_attr |
| 1377 | project_config.member_default_query = m_d_q |
| 1378 | |
| 1379 | if restrict_to_known is not None: |
| 1380 | project_config.restrict_to_known = restrict_to_known |
| 1381 | |
| 1382 | self.StoreConfig(cnxn, project_config) |
| 1383 | return project_config |
| 1384 | |
| 1385 | def CreateFieldDef( |
| 1386 | self, |
| 1387 | cnxn, |
| 1388 | project_id, |
| 1389 | field_name, |
| 1390 | field_type_str, |
| 1391 | applic_type, |
| 1392 | applic_pred, |
| 1393 | is_required, |
| 1394 | is_niche, |
| 1395 | is_multivalued, |
| 1396 | min_value, |
| 1397 | max_value, |
| 1398 | regex, |
| 1399 | needs_member, |
| 1400 | needs_perm, |
| 1401 | grants_perm, |
| 1402 | notify_on, |
| 1403 | date_action_str, |
| 1404 | docstring, |
| 1405 | admin_ids, |
| 1406 | editor_ids, |
| 1407 | approval_id=None, |
| 1408 | is_phase_field=False, |
| 1409 | is_restricted_field=False): |
| 1410 | config = self.GetProjectConfig(cnxn, project_id) |
| 1411 | field_type = tracker_pb2.FieldTypes(field_type_str) |
| 1412 | field_id = self.next_field_id |
| 1413 | self.next_field_id += 1 |
| 1414 | fd = tracker_bizobj.MakeFieldDef( |
| 1415 | field_id, project_id, field_name, field_type, applic_type, applic_pred, |
| 1416 | is_required, is_niche, is_multivalued, min_value, max_value, regex, |
| 1417 | needs_member, needs_perm, grants_perm, notify_on, date_action_str, |
| 1418 | docstring, False, approval_id, is_phase_field, is_restricted_field, |
| 1419 | admin_ids=admin_ids, editor_ids=editor_ids) |
| 1420 | config.field_defs.append(fd) |
| 1421 | self.StoreConfig(cnxn, config) |
| 1422 | return field_id |
| 1423 | |
| 1424 | def LookupFieldID(self, cnxn, project_id, field): |
| 1425 | config = self.GetProjectConfig(cnxn, project_id) |
| 1426 | for fd in config.field_defs: |
| 1427 | if fd.field_name == field: |
| 1428 | return fd.field_id |
| 1429 | |
| 1430 | return None |
| 1431 | |
| 1432 | def SoftDeleteFieldDefs(self, cnxn, project_id, field_ids): |
| 1433 | config = self.GetProjectConfig(cnxn, project_id) |
| 1434 | for fd in config.field_defs: |
| 1435 | if fd.field_id in field_ids: |
| 1436 | fd.is_deleted = True |
| 1437 | self.StoreConfig(cnxn, config) |
| 1438 | |
| 1439 | def UpdateFieldDef( |
| 1440 | self, |
| 1441 | cnxn, |
| 1442 | project_id, |
| 1443 | field_id, |
| 1444 | field_name=None, |
| 1445 | applicable_type=None, |
| 1446 | applicable_predicate=None, |
| 1447 | is_required=None, |
| 1448 | is_niche=None, |
| 1449 | is_multivalued=None, |
| 1450 | min_value=None, |
| 1451 | max_value=None, |
| 1452 | regex=None, |
| 1453 | needs_member=None, |
| 1454 | needs_perm=None, |
| 1455 | grants_perm=None, |
| 1456 | notify_on=None, |
| 1457 | date_action=None, |
| 1458 | docstring=None, |
| 1459 | admin_ids=None, |
| 1460 | editor_ids=None, |
| 1461 | is_restricted_field=None): |
| 1462 | config = self.GetProjectConfig(cnxn, project_id) |
| 1463 | fd = tracker_bizobj.FindFieldDefByID(field_id, config) |
| 1464 | # pylint: disable=multiple-statements |
| 1465 | if field_name is not None: fd.field_name = field_name |
| 1466 | if applicable_type is not None: fd.applicable_type = applicable_type |
| 1467 | if applicable_predicate is not None: |
| 1468 | fd.applicable_predicate = applicable_predicate |
| 1469 | if is_required is not None: fd.is_required = is_required |
| 1470 | if is_niche is not None: fd.is_niche = is_niche |
| 1471 | if is_multivalued is not None: fd.is_multivalued = is_multivalued |
| 1472 | if min_value is not None: fd.min_value = min_value |
| 1473 | if max_value is not None: fd.max_value = max_value |
| 1474 | if regex is not None: fd.regex = regex |
| 1475 | if date_action is not None: |
| 1476 | fd.date_action = config_svc.DATE_ACTION_ENUM.index(date_action) |
| 1477 | if docstring is not None: fd.docstring = docstring |
| 1478 | if admin_ids is not None: fd.admin_ids = admin_ids |
| 1479 | if editor_ids is not None: |
| 1480 | fd.editor_ids = editor_ids |
| 1481 | if is_restricted_field is not None: |
| 1482 | fd.is_restricted_field = is_restricted_field |
| 1483 | self.StoreConfig(cnxn, config) |
| 1484 | |
| 1485 | def CreateComponentDef( |
| 1486 | self, cnxn, project_id, path, docstring, deprecated, admin_ids, cc_ids, |
| 1487 | created, creator_id, label_ids): |
| 1488 | config = self.GetProjectConfig(cnxn, project_id) |
| 1489 | cd = tracker_bizobj.MakeComponentDef( |
| 1490 | self.next_component_id, project_id, path, docstring, deprecated, |
| 1491 | admin_ids, cc_ids, created, creator_id, label_ids=label_ids) |
| 1492 | config.component_defs.append(cd) |
| 1493 | self.next_component_id += 1 |
| 1494 | self.StoreConfig(cnxn, config) |
| 1495 | return self.next_component_id - 1 |
| 1496 | |
| 1497 | def UpdateComponentDef( |
| 1498 | self, cnxn, project_id, component_id, path=None, docstring=None, |
| 1499 | deprecated=None, admin_ids=None, cc_ids=None, created=None, |
| 1500 | creator_id=None, modified=None, modifier_id=None, label_ids=None): |
| 1501 | config = self.GetProjectConfig(cnxn, project_id) |
| 1502 | cd = tracker_bizobj.FindComponentDefByID(component_id, config) |
| 1503 | if path is not None: |
| 1504 | assert path |
| 1505 | cd.path = path |
| 1506 | # pylint: disable=multiple-statements |
| 1507 | if docstring is not None: cd.docstring = docstring |
| 1508 | if deprecated is not None: cd.deprecated = deprecated |
| 1509 | if admin_ids is not None: cd.admin_ids = admin_ids |
| 1510 | if cc_ids is not None: cd.cc_ids = cc_ids |
| 1511 | if created is not None: cd.created = created |
| 1512 | if creator_id is not None: cd.creator_id = creator_id |
| 1513 | if modified is not None: cd.modified = modified |
| 1514 | if modifier_id is not None: cd.modifier_id = modifier_id |
| 1515 | if label_ids is not None: cd.label_ids = label_ids |
| 1516 | self.StoreConfig(cnxn, config) |
| 1517 | |
| 1518 | def DeleteComponentDef(self, cnxn, project_id, component_id): |
| 1519 | """Delete the specified component definition.""" |
| 1520 | config = self.GetProjectConfig(cnxn, project_id) |
| 1521 | config.component_defs = [ |
| 1522 | cd for cd in config.component_defs |
| 1523 | if cd.component_id != component_id] |
| 1524 | self.StoreConfig(cnxn, config) |
| 1525 | |
| 1526 | def InvalidateMemcache(self, issues, key_prefix=''): |
| 1527 | pass |
| 1528 | |
| 1529 | def InvalidateMemcacheForEntireProject(self, project_id): |
| 1530 | pass |
| 1531 | |
| 1532 | |
| 1533 | class IssueService(object): |
| 1534 | """Fake version of IssueService that just works in-RAM.""" |
| 1535 | # pylint: disable=unused-argument |
| 1536 | |
| 1537 | def __init__(self, user_id=None): |
| 1538 | self.user_id = user_id |
| 1539 | # Dictionary {project_id: issue_pb_dict} |
| 1540 | # where issue_pb_dict is a dictionary of the form |
| 1541 | # {local_id: issue_pb} |
| 1542 | self.issues_by_project = {} |
| 1543 | self.issues_by_iid = {} |
| 1544 | # Dictionary {project_id: comment_pb_dict} |
| 1545 | # where comment_pb_dict is a dictionary of the form |
| 1546 | # {local_id: comment_pb_list} |
| 1547 | self.comments_by_project = {} |
| 1548 | self.comments_by_iid = {} |
| 1549 | self.comments_by_cid = {} |
| 1550 | self.attachments_by_id = {} |
| 1551 | |
| 1552 | # Set of issue IDs for issues that have been indexed by calling |
| 1553 | # IndexIssues(). |
| 1554 | self.indexed_issue_iids = set() |
| 1555 | |
| 1556 | # Set of issue IDs for issues that have been moved by calling MoveIssue(). |
| 1557 | self.moved_back_iids = set() |
| 1558 | |
| 1559 | # Dict of issue IDs mapped to other issue IDs to represent moved issues. |
| 1560 | self.moved_issues = {} |
| 1561 | |
| 1562 | # Test-only indication that the indexer would have been called |
| 1563 | # by the real DITPersist. |
| 1564 | self.indexer_called = False |
| 1565 | |
| 1566 | # Test-only sequence of updated and enqueued. |
| 1567 | self.updated_issues = [] |
| 1568 | self.enqueued_issues = [] # issue_ids |
| 1569 | |
| 1570 | # Test-only sequence of expunged issues and projects. |
| 1571 | self.expunged_issues = [] |
| 1572 | self.expunged_former_locations = [] |
| 1573 | self.expunged_local_ids = [] |
| 1574 | self.expunged_users_in_issues = [] |
| 1575 | |
| 1576 | # Test-only indicators that methods were called. |
| 1577 | self.get_all_issues_in_project_called = False |
| 1578 | self.update_issues_called = False |
| 1579 | self.enqueue_issues_called = False |
| 1580 | self.get_issue_acitivity_called = False |
| 1581 | |
| 1582 | # The next id to return if it is > 0. |
| 1583 | self.next_id = -1 |
| 1584 | |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 1585 | def UpdateIssue( |
| 1586 | self, |
| 1587 | cnxn, |
| 1588 | issue, |
| 1589 | update_cols=None, |
| 1590 | just_derived=False, |
| 1591 | commit=True, |
| 1592 | invalidate=True): |
| 1593 | self.UpdateIssues( |
| 1594 | cnxn, [issue], update_cols, just_derived, commit, invalidate) |
| 1595 | |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1596 | def UpdateIssues( |
| 1597 | self, cnxn, issues, update_cols=None, just_derived=False, |
| 1598 | commit=True, invalidate=True): |
| 1599 | self.update_issues_called = True |
| 1600 | assert all(issue.assume_stale == False for issue in issues) |
| 1601 | self.updated_issues.extend(issues) |
| 1602 | |
| 1603 | def GetIssueActivity( |
| 1604 | self, cnxn, num=50, before=None, after=None, |
| 1605 | project_ids=None, user_ids=None, ascending=False): |
| 1606 | self.get_issue_acitivity_called = True |
| 1607 | comments_dict = self.comments_by_cid |
| 1608 | comments = [] |
| 1609 | for value in comments_dict.values(): |
| 1610 | if project_ids is not None: |
| 1611 | if value.issue_id > 0 and value.issue_id in self.issues_by_iid: |
| 1612 | issue = self.issues_by_iid[value.issue_id] |
| 1613 | if issue.project_id in project_ids: |
| 1614 | comments.append(value) |
| 1615 | elif user_ids is not None: |
| 1616 | if value.user_id in user_ids: |
| 1617 | comments.append(value) |
| 1618 | else: |
| 1619 | comments.append(value) |
| 1620 | return comments |
| 1621 | |
| 1622 | def EnqueueIssuesForIndexing(self, _cnxn, issue_ids, commit=True): |
| 1623 | self.enqueue_issues_called = True |
| 1624 | for i in issue_ids: |
| 1625 | if i not in self.enqueued_issues: |
| 1626 | self.enqueued_issues.extend(issues) |
| 1627 | |
| 1628 | def ExpungeIssues(self, _cnxn, issue_ids): |
| 1629 | self.expunged_issues.extend(issue_ids) |
| 1630 | |
| 1631 | def ExpungeFormerLocations(self, _cnxn, project_id): |
| 1632 | self.expunged_former_locations.append(project_id) |
| 1633 | |
| 1634 | def ExpungeLocalIDCounters(self, _cnxn, project_id): |
| 1635 | self.expunged_local_ids.append(project_id) |
| 1636 | |
| 1637 | def TestAddIssue(self, issue, importer_id=None): |
| 1638 | project_id = issue.project_id |
| 1639 | self.issues_by_project.setdefault(project_id, {}) |
| 1640 | self.issues_by_project[project_id][issue.local_id] = issue |
| 1641 | self.issues_by_iid[issue.issue_id] = issue |
| 1642 | if issue.issue_id not in self.enqueued_issues: |
| 1643 | self.enqueued_issues.append(issue.issue_id) |
| 1644 | self.enqueue_issues_called = True |
| 1645 | |
| 1646 | # Adding a new issue should add the first comment to the issue |
| 1647 | comment = tracker_pb2.IssueComment() |
| 1648 | comment.project_id = issue.project_id |
| 1649 | comment.issue_id = issue.issue_id |
| 1650 | comment.content = issue.summary |
| 1651 | comment.timestamp = issue.opened_timestamp |
| 1652 | comment.is_description = True |
| 1653 | if issue.reporter_id: |
| 1654 | comment.user_id = issue.reporter_id |
| 1655 | if importer_id: |
| 1656 | comment.importer_id = importer_id |
| 1657 | comment.sequence = 0 |
| 1658 | self.TestAddComment(comment, issue.local_id) |
| 1659 | |
| 1660 | def TestAddMovedIssueRef(self, source_project_id, source_local_id, |
| 1661 | target_project_id, target_local_id): |
| 1662 | self.moved_issues[(source_project_id, source_local_id)] = ( |
| 1663 | target_project_id, target_local_id) |
| 1664 | |
| 1665 | def TestAddComment(self, comment, local_id): |
| 1666 | pid = comment.project_id |
| 1667 | if not comment.id: |
| 1668 | comment.id = len(self.comments_by_cid) |
| 1669 | |
| 1670 | self.comments_by_project.setdefault(pid, {}) |
| 1671 | self.comments_by_project[pid].setdefault(local_id, []).append(comment) |
| 1672 | self.comments_by_iid.setdefault(comment.issue_id, []).append(comment) |
| 1673 | self.comments_by_cid[comment.id] = comment |
| 1674 | |
| 1675 | def TestAddAttachment(self, attachment, comment_id, issue_id): |
| 1676 | if not attachment.attachment_id: |
| 1677 | attachment.attachment_id = len(self.attachments_by_id) |
| 1678 | |
| 1679 | aid = attachment.attachment_id |
| 1680 | self.attachments_by_id[aid] = attachment, comment_id, issue_id |
| 1681 | comment = self.comments_by_cid[comment_id] |
| 1682 | if attachment not in comment.attachments: |
| 1683 | comment.attachments.extend([attachment]) |
| 1684 | |
| 1685 | def SoftDeleteAttachment( |
| 1686 | self, _cnxn, _issue, comment, attach_id, _user_service, delete=True, |
| 1687 | index_now=False): |
| 1688 | attachment = None |
| 1689 | for attach in comment.attachments: |
| 1690 | if attach.attachment_id == attach_id: |
| 1691 | attachment = attach |
| 1692 | if not attachment: |
| 1693 | return |
| 1694 | attachment.deleted = delete |
| 1695 | |
| 1696 | def GetAttachmentAndContext(self, _cnxn, attachment_id): |
| 1697 | if attachment_id in self.attachments_by_id: |
| 1698 | attach, comment_id, issue_id = self.attachments_by_id[attachment_id] |
| 1699 | if not attach.deleted: |
| 1700 | return attach, comment_id, issue_id |
| 1701 | |
| 1702 | raise exceptions.NoSuchAttachmentException() |
| 1703 | |
| 1704 | def GetComments( |
| 1705 | self, _cnxn, where=None, order_by=None, content_only=False, **kwargs): |
| 1706 | # This is a very limited subset of what the real GetComments() can do. |
| 1707 | cid = kwargs.get('id') |
| 1708 | |
| 1709 | comment = self.comments_by_cid.get(cid) |
| 1710 | if comment: |
| 1711 | return [comment] |
| 1712 | else: |
| 1713 | return [] |
| 1714 | |
| 1715 | def GetComment(self, cnxn, comment_id): |
| 1716 | """Get the requested comment, or raise an exception.""" |
| 1717 | comments = self.GetComments(cnxn, id=comment_id) |
| 1718 | if len(comments) == 1: |
| 1719 | return comments[0] |
| 1720 | |
| 1721 | raise exceptions.NoSuchCommentException() |
| 1722 | |
| 1723 | def ResolveIssueRefs(self, cnxn, ref_projects, default_project_name, refs): |
| 1724 | result = [] |
| 1725 | misses = [] |
| 1726 | for project_name, local_id in refs: |
| 1727 | project = ref_projects.get(project_name or default_project_name) |
| 1728 | if not project or project.state == project_pb2.ProjectState.DELETABLE: |
| 1729 | continue # ignore any refs to issues in deleted projects |
| 1730 | try: |
| 1731 | issue = self.GetIssueByLocalID(cnxn, project.project_id, local_id) |
| 1732 | result.append(issue.issue_id) |
| 1733 | except exceptions.NoSuchIssueException: |
| 1734 | misses.append((project.project_id, local_id)) |
| 1735 | |
| 1736 | return result, misses |
| 1737 | |
| 1738 | def LookupIssueRefs(self, cnxn, issue_ids): |
| 1739 | issue_dict, _misses = self.GetIssuesDict(cnxn, issue_ids) |
| 1740 | return { |
| 1741 | issue_id: (issue.project_name, issue.local_id) |
| 1742 | for issue_id, issue in issue_dict.items()} |
| 1743 | |
| 1744 | def GetAllIssuesInProject( |
| 1745 | self, _cnxn, project_id, min_local_id=None, use_cache=True): |
| 1746 | self.get_all_issues_in_project_called = True |
| 1747 | if project_id in self.issues_by_project: |
| 1748 | return list(self.issues_by_project[project_id].values()) |
| 1749 | else: |
| 1750 | return [] |
| 1751 | |
| 1752 | def GetIssuesByLocalIDs( |
| 1753 | self, _cnxn, project_id, local_id_list, use_cache=True, shard_id=None): |
| 1754 | results = [] |
| 1755 | for local_id in local_id_list: |
| 1756 | if (project_id in self.issues_by_project |
| 1757 | and local_id in self.issues_by_project[project_id]): |
| 1758 | results.append(self.issues_by_project[project_id][local_id]) |
| 1759 | |
| 1760 | return results |
| 1761 | |
| 1762 | def GetIssueByLocalID(self, _cnxn, project_id, local_id, use_cache=True): |
| 1763 | try: |
| 1764 | return self.issues_by_project[project_id][local_id] |
| 1765 | except KeyError: |
| 1766 | raise exceptions.NoSuchIssueException() |
| 1767 | |
| 1768 | def GetAnyOnHandIssue(self, issue_ids, start=None, end=None): |
| 1769 | return None # Treat them all like misses. |
| 1770 | |
| 1771 | def GetIssue(self, cnxn, issue_id, use_cache=True): |
| 1772 | issues = self.GetIssues(cnxn, [issue_id], use_cache=use_cache) |
| 1773 | try: |
| 1774 | return issues[0] |
| 1775 | except IndexError: |
| 1776 | raise exceptions.NoSuchIssueException() |
| 1777 | |
| 1778 | def GetCurrentLocationOfMovedIssue(self, cnxn, project_id, local_id): |
| 1779 | key = (project_id, local_id) |
| 1780 | if key in self.moved_issues: |
| 1781 | ref = self.moved_issues[key] |
| 1782 | return ref[0], ref[1] |
| 1783 | return None, None |
| 1784 | |
| 1785 | def GetPreviousLocations(self, cnxn, issue): |
| 1786 | return [] |
| 1787 | |
| 1788 | def GetCommentsByUser(self, cnxn, user_id): |
| 1789 | """Get all comments created by a user""" |
| 1790 | comments = [] |
| 1791 | for cid in self.comments_by_cid: |
| 1792 | comment = self.comments_by_cid[cid] |
| 1793 | if comment.user_id == user_id and not comment.is_description: |
| 1794 | comments.append(comment) |
| 1795 | return comments |
| 1796 | |
| 1797 | def GetCommentsByID(self, cnxn, comment_ids, _sequences, use_cache=True, |
| 1798 | shard_id=None): |
| 1799 | """Return all IssueComment PBs by comment ids.""" |
| 1800 | comments = [self.comments_by_cid[cid] for cid in comment_ids] |
| 1801 | return comments |
| 1802 | |
| 1803 | def GetIssueIDsReportedByUser(self, cnxn, user_id): |
| 1804 | """Get all issues created by a user""" |
| 1805 | ids = [] |
| 1806 | for iid in self.issues_by_iid: |
| 1807 | issue = self.issues_by_iid[iid] |
| 1808 | if issue.reporter_id == user_id: |
| 1809 | ids.append(iid) |
| 1810 | return ids |
| 1811 | |
| 1812 | def LookupIssueIDs(self, _cnxn, project_local_id_pairs): |
| 1813 | hits = [] |
| 1814 | misses = [] |
| 1815 | for (project_id, local_id) in project_local_id_pairs: |
| 1816 | try: |
| 1817 | issue = self.issues_by_project[project_id][local_id] |
| 1818 | hits.append(issue.issue_id) |
| 1819 | except KeyError: |
| 1820 | misses.append((project_id, local_id)) |
| 1821 | |
| 1822 | return hits, misses |
| 1823 | |
| 1824 | def LookupIssueIDsFollowMoves(self, _cnxn, project_local_id_pairs): |
| 1825 | hits = [] |
| 1826 | misses = [] |
| 1827 | for pair in project_local_id_pairs: |
| 1828 | project_id, local_id = self.moved_issues.get(pair, pair) |
| 1829 | try: |
| 1830 | issue = self.issues_by_project[project_id][local_id] |
| 1831 | hits.append(issue.issue_id) |
| 1832 | except KeyError: |
| 1833 | misses.append((project_id, local_id)) |
| 1834 | |
| 1835 | return hits, misses |
| 1836 | |
| 1837 | def LookupIssueID(self, _cnxn, project_id, local_id): |
| 1838 | try: |
| 1839 | issue = self.issues_by_project[project_id][local_id] |
| 1840 | except KeyError: |
| 1841 | raise exceptions.NoSuchIssueException() |
| 1842 | return issue.issue_id |
| 1843 | |
| 1844 | def GetCommentsForIssue(self, _cnxn, issue_id): |
| 1845 | comments = self.comments_by_iid.get(issue_id, []) |
| 1846 | for idx, c in enumerate(comments): |
| 1847 | c.sequence = idx |
| 1848 | |
| 1849 | return comments |
| 1850 | |
| 1851 | def InsertIssue(self, cnxn, issue): |
| 1852 | issue.issue_id = issue.project_id * 1000000 + issue.local_id |
| 1853 | self.issues_by_project.setdefault(issue.project_id, {}) |
| 1854 | self.issues_by_project[issue.project_id][issue.local_id] = issue |
| 1855 | self.issues_by_iid[issue.issue_id] = issue |
| 1856 | return issue.issue_id |
| 1857 | |
| 1858 | def CreateIssue( |
| 1859 | self, |
| 1860 | cnxn, |
| 1861 | services, |
| 1862 | issue, |
| 1863 | marked_description, |
| 1864 | attachments=None, |
| 1865 | index_now=False, |
| 1866 | importer_id=None): |
| 1867 | project_id = issue.project_id |
| 1868 | |
| 1869 | issue.local_id = self.AllocateNextLocalID(cnxn, project_id) |
| 1870 | issue.issue_id = project_id * 1000000 + issue.local_id |
| 1871 | |
| 1872 | self.TestAddIssue(issue, importer_id=importer_id) |
| 1873 | comment = self.comments_by_iid[issue.issue_id][0] |
| 1874 | comment.content = marked_description |
| 1875 | return issue, comment |
| 1876 | |
| 1877 | def GetIssueApproval(self, cnxn, issue_id, approval_id, use_cache=True): |
| 1878 | issue = self.GetIssue(cnxn, issue_id, use_cache=use_cache) |
| 1879 | approval = tracker_bizobj.FindApprovalValueByID( |
| 1880 | approval_id, issue.approval_values) |
| 1881 | if approval: |
| 1882 | return issue, approval |
| 1883 | raise exceptions.NoSuchIssueApprovalException() |
| 1884 | |
| 1885 | def UpdateIssueApprovalStatus( |
| 1886 | self, cnxn, issue_id, approval_id, status, setter_id, set_on, |
| 1887 | commit=True): |
| 1888 | issue = self.GetIssue(cnxn, issue_id) |
| 1889 | for av in issue.approval_values: |
| 1890 | if av.approval_id == approval_id: |
| 1891 | av.status = status |
| 1892 | av.setter_id = setter_id |
| 1893 | av.set_on = set_on |
| 1894 | return |
| 1895 | return |
| 1896 | |
| 1897 | def UpdateIssueApprovalApprovers( |
| 1898 | self, cnxn, issue_id, approval_id, approver_ids, commit=True): |
| 1899 | issue = self.GetIssue(cnxn, issue_id) |
| 1900 | for av in issue.approval_values: |
| 1901 | if av.approval_id == approval_id: |
| 1902 | av.approver_ids = approver_ids |
| 1903 | return |
| 1904 | return |
| 1905 | |
| 1906 | def UpdateIssueStructure( |
| 1907 | self, cnxn, config, issue, template, reporter_id, comment_content, |
| 1908 | commit=True, invalidate=True): |
| 1909 | approval_defs_by_id = {ad.approval_id: ad for ad in config.approval_defs} |
| 1910 | issue_avs_by_id = {av.approval_id: av for av in issue.approval_values} |
| 1911 | |
| 1912 | new_issue_approvals = [] |
| 1913 | |
| 1914 | for template_av in template.approval_values: |
| 1915 | existing_issue_av = issue_avs_by_id.get(template_av.approval_id) |
| 1916 | # Keep approval values as-if fi it exists in issue and template |
| 1917 | if existing_issue_av: |
| 1918 | existing_issue_av.phase_id = template_av.phase_id |
| 1919 | new_issue_approvals.append(existing_issue_av) |
| 1920 | else: |
| 1921 | new_issue_approvals.append(template_av) |
| 1922 | |
| 1923 | # Update all approval surveys so latest ApprovalDef survey changes |
| 1924 | # appear in the converted issue's approval values. |
| 1925 | ad = approval_defs_by_id.get(template_av.approval_id) |
| 1926 | if ad: |
| 1927 | self.CreateIssueComment( |
| 1928 | cnxn, issue, reporter_id, ad.survey, |
| 1929 | is_description=True, approval_id=ad.approval_id, commit=False) |
| 1930 | else: |
| 1931 | logging.info('ApprovalDef not found for approval %r', template_av) |
| 1932 | |
| 1933 | template_phase_by_name = { |
| 1934 | phase.name.lower(): phase for phase in template.phases} |
| 1935 | issue_phase_by_id = {phase.phase_id: phase for phase in issue.phases} |
| 1936 | updated_fvs = [] |
| 1937 | # Trim issue FieldValues or update FieldValue phase_ids |
| 1938 | for fv in issue.field_values: |
| 1939 | # If a fv's phase has the same name as a template's phase, update |
| 1940 | # the fv's phase_id to that of the template phase's. Otherwise, |
| 1941 | # remove the fv. |
| 1942 | if fv.phase_id: |
| 1943 | issue_phase = issue_phase_by_id.get(fv.phase_id) |
| 1944 | if issue_phase and issue_phase.name: |
| 1945 | template_phase = template_phase_by_name.get(issue_phase.name.lower()) |
| 1946 | if template_phase: |
| 1947 | fv.phase_id = template_phase.phase_id |
| 1948 | updated_fvs.append(fv) |
| 1949 | # keep all fvs that do not belong to phases. |
| 1950 | else: |
| 1951 | updated_fvs.append(fv) |
| 1952 | |
| 1953 | fd_names_by_id = {fd.field_id: fd.field_name for fd in config.field_defs} |
| 1954 | amendment = tracker_bizobj.MakeApprovalStructureAmendment( |
| 1955 | [fd_names_by_id.get(av.approval_id) for av in new_issue_approvals], |
| 1956 | [fd_names_by_id.get(av.approval_id) for av in issue.approval_values]) |
| 1957 | |
| 1958 | issue.approval_values = new_issue_approvals |
| 1959 | issue.phases = template.phases |
| 1960 | issue.field_values = updated_fvs |
| 1961 | |
| 1962 | return self.CreateIssueComment( |
| 1963 | cnxn, issue, reporter_id, comment_content, |
| 1964 | amendments=[amendment], commit=False) |
| 1965 | |
| 1966 | def SetUsedLocalID(self, cnxn, project_id): |
| 1967 | self.next_id = self.GetHighestLocalID(cnxn, project_id) + 1 |
| 1968 | |
| 1969 | def AllocateNextLocalID(self, cnxn, project_id): |
| 1970 | return self.GetHighestLocalID(cnxn, project_id) + 1 |
| 1971 | |
| 1972 | def GetHighestLocalID(self, _cnxn, project_id): |
| 1973 | if self.next_id > 0: |
| 1974 | return self.next_id - 1 |
| 1975 | else: |
| 1976 | issue_dict = self.issues_by_project.get(project_id, {}) |
| 1977 | highest = max([0] + [issue.local_id for issue in issue_dict.values()]) |
| 1978 | return highest |
| 1979 | |
| 1980 | def _MakeIssueComment( |
| 1981 | self, project_id, user_id, content, inbound_message=None, |
| 1982 | amendments=None, attachments=None, kept_attachments=None, timestamp=None, |
| 1983 | is_spam=False, is_description=False, approval_id=None, importer_id=None): |
| 1984 | comment = tracker_pb2.IssueComment() |
| 1985 | comment.project_id = project_id |
| 1986 | comment.user_id = user_id |
| 1987 | comment.content = content or '' |
| 1988 | comment.is_spam = is_spam |
| 1989 | comment.is_description = is_description |
| 1990 | if not timestamp: |
| 1991 | timestamp = int(time.time()) |
| 1992 | comment.timestamp = int(timestamp) |
| 1993 | if inbound_message: |
| 1994 | comment.inbound_message = inbound_message |
| 1995 | if amendments: |
| 1996 | comment.amendments.extend(amendments) |
| 1997 | if approval_id: |
| 1998 | comment.approval_id = approval_id |
| 1999 | if importer_id: |
| 2000 | comment.importer_id = importer_id |
| 2001 | return comment |
| 2002 | |
| 2003 | def CopyIssues(self, cnxn, dest_project, issues, user_service, copier_id): |
| 2004 | created_issues = [] |
| 2005 | for target_issue in issues: |
| 2006 | new_issue = tracker_pb2.Issue() |
| 2007 | new_issue.project_id = dest_project.project_id |
| 2008 | new_issue.project_name = dest_project.project_name |
| 2009 | new_issue.summary = target_issue.summary |
| 2010 | new_issue.labels.extend(target_issue.labels) |
| 2011 | new_issue.field_values.extend(target_issue.field_values) |
| 2012 | new_issue.reporter_id = copier_id |
| 2013 | |
| 2014 | timestamp = int(time.time()) |
| 2015 | new_issue.opened_timestamp = timestamp |
| 2016 | new_issue.modified_timestamp = timestamp |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 2017 | new_issue.migration_modified_timestamp = timestamp |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 2018 | |
| 2019 | target_comments = self.GetCommentsForIssue(cnxn, target_issue.issue_id) |
| 2020 | initial_summary_comment = target_comments[0] |
| 2021 | |
| 2022 | # Note that blocking and merge_into are not copied. |
| 2023 | new_issue.blocked_on_iids = target_issue.blocked_on_iids |
| 2024 | new_issue.blocked_on_ranks = target_issue.blocked_on_ranks |
| 2025 | |
| 2026 | # Create the same summary comment as the target issue. |
| 2027 | comment = self._MakeIssueComment( |
| 2028 | dest_project.project_id, copier_id, initial_summary_comment.content, |
| 2029 | is_description=True) |
| 2030 | |
| 2031 | new_issue.local_id = self.AllocateNextLocalID( |
| 2032 | cnxn, dest_project.project_id) |
| 2033 | issue_id = self.InsertIssue(cnxn, new_issue) |
| 2034 | comment.issue_id = issue_id |
| 2035 | self.InsertComment(cnxn, comment) |
| 2036 | created_issues.append(new_issue) |
| 2037 | |
| 2038 | return created_issues |
| 2039 | |
| 2040 | def MoveIssues(self, cnxn, dest_project, issues, user_service): |
| 2041 | move_to = dest_project.project_id |
| 2042 | self.issues_by_project.setdefault(move_to, {}) |
| 2043 | moved_back_iids = set() |
| 2044 | for issue in issues: |
| 2045 | if issue.issue_id in self.moved_back_iids: |
| 2046 | moved_back_iids.add(issue.issue_id) |
| 2047 | self.moved_back_iids.add(issue.issue_id) |
| 2048 | project_id = issue.project_id |
| 2049 | self.issues_by_project[project_id].pop(issue.local_id) |
| 2050 | issue.local_id = self.AllocateNextLocalID(cnxn, move_to) |
| 2051 | self.issues_by_project[move_to][issue.local_id] = issue |
| 2052 | issue.project_id = move_to |
| 2053 | issue.project_name = dest_project.project_name |
| 2054 | return moved_back_iids |
| 2055 | |
| 2056 | def GetCommentsForIssues(self, _cnxn, issue_ids, content_only=False): |
| 2057 | comments_dict = {} |
| 2058 | for issue_id in issue_ids: |
| 2059 | comments_dict[issue_id] = self.comments_by_iid[issue_id] |
| 2060 | |
| 2061 | return comments_dict |
| 2062 | |
| 2063 | def InsertComment(self, cnxn, comment, commit=True): |
| 2064 | issue = self.GetIssue(cnxn, comment.issue_id) |
| 2065 | self.TestAddComment(comment, issue.local_id) |
| 2066 | |
| 2067 | # pylint: disable=unused-argument |
| 2068 | def DeltaUpdateIssue( |
| 2069 | self, cnxn, services, reporter_id, project_id, |
| 2070 | config, issue, delta, index_now=False, comment=None, attachments=None, |
| 2071 | iids_to_invalidate=None, rules=None, predicate_asts=None, |
| 2072 | is_description=False, timestamp=None, kept_attachments=None, |
| 2073 | importer_id=None, inbound_message=None): |
| 2074 | # Return a bogus amendments list if any of the fields changed |
| 2075 | amendments, _ = tracker_bizobj.ApplyIssueDelta( |
| 2076 | cnxn, self, issue, delta, config) |
| 2077 | |
| 2078 | if not amendments and (not comment or not comment.strip()): |
| 2079 | return [], None |
| 2080 | |
| 2081 | comment_pb = self.CreateIssueComment( |
| 2082 | cnxn, issue, reporter_id, comment, attachments=attachments, |
| 2083 | amendments=amendments, is_description=is_description, |
| 2084 | kept_attachments=kept_attachments, importer_id=importer_id, |
| 2085 | inbound_message=inbound_message) |
| 2086 | |
| 2087 | self.indexer_called = index_now |
| 2088 | return amendments, comment_pb |
| 2089 | |
| 2090 | def InvalidateIIDs(self, cnxn, iids_to_invalidate): |
| 2091 | pass |
| 2092 | |
| 2093 | # pylint: disable=unused-argument |
| 2094 | def CreateIssueComment( |
| 2095 | self, _cnxn, issue, user_id, content, |
| 2096 | inbound_message=None, amendments=None, attachments=None, |
| 2097 | kept_attachments=None, timestamp=None, is_spam=False, |
| 2098 | is_description=False, approval_id=None, commit=True, |
| 2099 | importer_id=None): |
| 2100 | # Add a comment to an issue |
| 2101 | comment = tracker_pb2.IssueComment() |
| 2102 | comment.id = len(self.comments_by_cid) |
| 2103 | comment.project_id = issue.project_id |
| 2104 | comment.issue_id = issue.issue_id |
| 2105 | comment.content = content |
| 2106 | comment.user_id = user_id |
| 2107 | if timestamp is not None: |
| 2108 | comment.timestamp = timestamp |
| 2109 | else: |
| 2110 | comment.timestamp = 1234567890 |
| 2111 | if amendments: |
| 2112 | comment.amendments.extend(amendments) |
| 2113 | if inbound_message: |
| 2114 | comment.inbound_message = inbound_message |
| 2115 | comment.is_spam = is_spam |
| 2116 | comment.is_description = is_description |
| 2117 | if approval_id: |
| 2118 | comment.approval_id = approval_id |
| 2119 | |
| 2120 | pid = issue.project_id |
| 2121 | self.comments_by_project.setdefault(pid, {}) |
| 2122 | self.comments_by_project[pid].setdefault(issue.local_id, []).append(comment) |
| 2123 | self.comments_by_iid.setdefault(issue.issue_id, []).append(comment) |
| 2124 | self.comments_by_cid[comment.id] = comment |
| 2125 | |
| 2126 | if attachments: |
| 2127 | for filename, filecontent, mimetype in attachments: |
| 2128 | aid = len(self.attachments_by_id) |
| 2129 | attach = tracker_pb2.Attachment( |
| 2130 | attachment_id=aid, |
| 2131 | filename=filename, |
| 2132 | filesize=len(filecontent), |
| 2133 | mimetype=mimetype, |
| 2134 | gcs_object_id='gcs_object_id(%s)' % filename) |
| 2135 | comment.attachments.append(attach) |
| 2136 | self.attachments_by_id[aid] = attach, pid, comment.id |
| 2137 | |
| 2138 | if kept_attachments: |
| 2139 | comment.attachments.extend([ |
| 2140 | self.attachments_by_id[aid][0] |
| 2141 | for aid in kept_attachments]) |
| 2142 | |
| 2143 | return comment |
| 2144 | |
| 2145 | def GetOpenAndClosedIssues(self, _cnxn, issue_ids): |
| 2146 | open_issues = [] |
| 2147 | closed_issues = [] |
| 2148 | for issue_id in issue_ids: |
| 2149 | try: |
| 2150 | issue = self.issues_by_iid[issue_id] |
| 2151 | if issue.status == 'Fixed': |
| 2152 | closed_issues.append(issue) |
| 2153 | else: |
| 2154 | open_issues.append(issue) |
| 2155 | except KeyError: |
| 2156 | continue |
| 2157 | |
| 2158 | return open_issues, closed_issues |
| 2159 | |
| 2160 | def GetIssuesDict( |
| 2161 | self, _cnxn, issue_ids, use_cache=True, shard_id=None): |
| 2162 | missing_ids = [iid for iid in issue_ids if iid not in self.issues_by_iid] |
| 2163 | issues_by_id = {} |
| 2164 | for iid in issue_ids: |
| 2165 | if iid in self.issues_by_iid: |
| 2166 | issue = self.issues_by_iid[iid] |
| 2167 | if not use_cache: |
| 2168 | issue.assume_stale = False |
| 2169 | issues_by_id[iid] = issue |
| 2170 | |
| 2171 | return issues_by_id, missing_ids |
| 2172 | |
| 2173 | def GetIssues(self, cnxn, issue_ids, use_cache=True, shard_id=None): |
| 2174 | issues_by_iid, _misses = self.GetIssuesDict( |
| 2175 | cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id) |
| 2176 | results = [ |
| 2177 | issues_by_iid[issue_id] |
| 2178 | for issue_id in issue_ids |
| 2179 | if issue_id in issues_by_iid |
| 2180 | ] |
| 2181 | |
| 2182 | return results |
| 2183 | |
| 2184 | def SoftDeleteIssue( |
| 2185 | self, _cnxn, project_id, local_id, deleted, user_service): |
| 2186 | issue = self.issues_by_project[project_id][local_id] |
| 2187 | issue.deleted = deleted |
| 2188 | |
| 2189 | def SoftDeleteComment( |
| 2190 | self, cnxn, issue, comment, deleted_by_user_id, user_service, |
| 2191 | delete=True, reindex=False, is_spam=False): |
| 2192 | pid = comment.project_id |
| 2193 | # Find the original comment by the sequence number. |
| 2194 | c = None |
| 2195 | by_iid_idx = -1 |
| 2196 | for by_iid_idx, c in enumerate(self.comments_by_iid[issue.issue_id]): |
| 2197 | if c.sequence == comment.sequence: |
| 2198 | break |
| 2199 | comment = c |
| 2200 | by_project_idx = ( |
| 2201 | self.comments_by_project[pid][issue.local_id].index(comment)) |
| 2202 | comment.is_spam = is_spam |
| 2203 | if delete: |
| 2204 | comment.deleted_by = deleted_by_user_id |
| 2205 | else: |
| 2206 | comment.reset('deleted_by') |
| 2207 | self.comments_by_project[pid][issue.local_id][by_project_idx] = comment |
| 2208 | self.comments_by_iid[issue.issue_id][by_iid_idx] = comment |
| 2209 | self.comments_by_cid[comment.id] = comment |
| 2210 | |
| 2211 | def DeleteComponentReferences(self, _cnxn, component_id): |
| 2212 | for _, issue in self.issues_by_iid.items(): |
| 2213 | issue.component_ids = [ |
| 2214 | cid for cid in issue.component_ids if cid != component_id] |
| 2215 | |
| 2216 | def RunIssueQuery( |
| 2217 | self, cnxn, left_joins, where, order_by, shard_id=None, limit=None): |
| 2218 | """This always returns empty results. Mock it to test other cases.""" |
| 2219 | return [], False |
| 2220 | |
| 2221 | def GetIIDsByLabelIDs(self, cnxn, label_ids, project_id, shard_id): |
| 2222 | """This always returns empty results. Mock it to test other cases.""" |
| 2223 | return [] |
| 2224 | |
| 2225 | def GetIIDsByParticipant(self, cnxn, user_ids, project_ids, shard_id): |
| 2226 | """This always returns empty results. Mock it to test other cases.""" |
| 2227 | return [] |
| 2228 | |
| 2229 | def SortBlockedOn(self, cnxn, issue, blocked_on_iids): |
| 2230 | return blocked_on_iids, [0] * len(blocked_on_iids) |
| 2231 | |
| 2232 | def ApplyIssueRerank( |
| 2233 | self, cnxn, parent_id, relations_to_change, commit=True, invalidate=True): |
| 2234 | issue = self.GetIssue(cnxn, parent_id) |
| 2235 | relations_dict = dict( |
| 2236 | list(zip(issue.blocked_on_iids, issue.blocked_on_ranks))) |
| 2237 | relations_dict.update(relations_to_change) |
| 2238 | issue.blocked_on_ranks = sorted(issue.blocked_on_ranks, reverse=True) |
| 2239 | issue.blocked_on_iids = sorted( |
| 2240 | issue.blocked_on_iids, key=relations_dict.get, reverse=True) |
| 2241 | |
| 2242 | def SplitRanks(self, cnxn, parent_id, target_id, open_ids, split_above=False): |
| 2243 | pass |
| 2244 | |
| 2245 | def ExpungeUsersInIssues(self, cnxn, user_ids_by_email, limit=None): |
| 2246 | user_ids = list(user_ids_by_email.values()) |
| 2247 | self.expunged_users_in_issues.extend(user_ids) |
| 2248 | return [] |
| 2249 | |
| 2250 | |
| 2251 | class TemplateService(object): |
| 2252 | """Fake version of TemplateService that just works in-RAM.""" |
| 2253 | |
| 2254 | def __init__(self): |
| 2255 | self.templates_by_id = {} # template_id: template_pb |
| 2256 | self.templates_by_project_id = {} # project_id: [template_id] |
| 2257 | |
| 2258 | def TestAddIssueTemplateDef( |
| 2259 | self, template_id, project_id, name, content="", summary="", |
| 2260 | summary_must_be_edited=False, status='New', members_only=False, |
| 2261 | owner_defaults_to_member=False, component_required=False, owner_id=None, |
| 2262 | labels=None, component_ids=None, admin_ids=None, field_values=None, |
| 2263 | phases=None, approval_values=None): |
| 2264 | template = tracker_bizobj.MakeIssueTemplate( |
| 2265 | name, |
| 2266 | summary, |
| 2267 | status, |
| 2268 | owner_id, |
| 2269 | content, |
| 2270 | labels, |
| 2271 | field_values or [], |
| 2272 | admin_ids or [], |
| 2273 | component_ids, |
| 2274 | summary_must_be_edited=summary_must_be_edited, |
| 2275 | owner_defaults_to_member=owner_defaults_to_member, |
| 2276 | component_required=component_required, |
| 2277 | members_only=members_only, |
| 2278 | phases=phases, |
| 2279 | approval_values=approval_values) |
| 2280 | template.template_id = template_id |
| 2281 | self.templates_by_id[template_id] = template |
| 2282 | if project_id not in self.templates_by_project_id: |
| 2283 | self.templates_by_project_id[project_id] = [] |
| 2284 | self.templates_by_project_id[project_id].append(template_id) |
| 2285 | return template |
| 2286 | |
| 2287 | def GetTemplateByName(self, cnxn, template_name, project_id): |
| 2288 | if project_id not in self.templates_by_project_id: |
| 2289 | return None |
| 2290 | else: |
| 2291 | project_templates = self.templates_by_project_id[project_id] |
| 2292 | for template_id in project_templates: |
| 2293 | template = self.GetTemplateById(cnxn, template_id) |
| 2294 | if template.name == template_name: |
| 2295 | return template |
| 2296 | return None |
| 2297 | |
| 2298 | def GetTemplateById(self, cnxn, template_id): |
| 2299 | return self.templates_by_id.get(template_id) |
| 2300 | |
| 2301 | def GetTemplatesById(self, cnxn, template_ids): |
| 2302 | return filter( |
| 2303 | lambda template: template.template_id in template_ids, |
| 2304 | self.templates_by_id.values()) |
| 2305 | |
| 2306 | def GetProjectTemplates(self, cnxn, project_id): |
| 2307 | template_ids = self.templates_by_project_id[project_id] |
| 2308 | return self.GetTemplatesById(cnxn, template_ids) |
| 2309 | |
| 2310 | def ExpungeUsersInTemplates(self, cnxn, user_ids, limit=None): |
| 2311 | for _, template in self.templates_by_id.items(): |
| 2312 | template.admin_ids = [user_id for user_id in template.admin_ids |
| 2313 | if user_id not in user_ids] |
| 2314 | if template.owner_id in user_ids: |
| 2315 | template.owner_id = None |
| 2316 | template.field_values = [fv for fv in template.field_values |
| 2317 | if fv.user_id in user_ids] |
| 2318 | |
| 2319 | class SpamService(object): |
| 2320 | """Fake version of SpamService that just works in-RAM.""" |
| 2321 | |
| 2322 | def __init__(self, user_id=None): |
| 2323 | self.user_id = user_id |
| 2324 | self.reports_by_issue_id = collections.defaultdict(list) |
| 2325 | self.comment_reports_by_issue_id = collections.defaultdict(dict) |
| 2326 | self.manual_verdicts_by_issue_id = collections.defaultdict(dict) |
| 2327 | self.manual_verdicts_by_comment_id = collections.defaultdict(dict) |
| 2328 | self.expunged_users_in_spam = [] |
| 2329 | |
| 2330 | def LookupIssuesFlaggers(self, cnxn, issue_ids): |
| 2331 | return { |
| 2332 | issue_id: (self.reports_by_issue_id.get(issue_id, []), |
| 2333 | self.comment_reports_by_issue_id.get(issue_id, {})) |
| 2334 | for issue_id in issue_ids} |
| 2335 | |
| 2336 | def LookupIssueFlaggers(self, cnxn, issue_id): |
| 2337 | return self.LookupIssuesFlaggers(cnxn, [issue_id])[issue_id] |
| 2338 | |
| 2339 | def FlagIssues(self, cnxn, issue_service, issues, user_id, flagged_spam): |
| 2340 | for issue in issues: |
| 2341 | if flagged_spam: |
| 2342 | self.reports_by_issue_id[issue.issue_id].append(user_id) |
| 2343 | else: |
| 2344 | self.reports_by_issue_id[issue.issue_id].remove(user_id) |
| 2345 | |
| 2346 | def FlagComment( |
| 2347 | self, cnxn, issue, comment_id, reported_user_id, user_id, flagged_spam): |
| 2348 | if not comment_id in self.comment_reports_by_issue_id[issue.issue_id]: |
| 2349 | self.comment_reports_by_issue_id[issue.issue_id][comment_id] = [] |
| 2350 | if flagged_spam: |
| 2351 | self.comment_reports_by_issue_id[issue.issue_id][comment_id].append( |
| 2352 | user_id) |
| 2353 | else: |
| 2354 | self.comment_reports_by_issue_id[issue.issue_id][comment_id].remove( |
| 2355 | user_id) |
| 2356 | |
| 2357 | def RecordManualIssueVerdicts( |
| 2358 | self, cnxn, issue_service, issues, user_id, is_spam): |
| 2359 | for issue in issues: |
| 2360 | self.manual_verdicts_by_issue_id[issue.issue_id][user_id] = is_spam |
| 2361 | |
| 2362 | def RecordManualCommentVerdict( |
| 2363 | self, cnxn, issue_service, user_service, comment_id, |
| 2364 | user_id, is_spam): |
| 2365 | self.manual_verdicts_by_comment_id[comment_id][user_id] = is_spam |
| 2366 | comment = issue_service.GetComment(cnxn, comment_id) |
| 2367 | comment.is_spam = is_spam |
| 2368 | issue = issue_service.GetIssue(cnxn, comment.issue_id, use_cache=False) |
| 2369 | issue_service.SoftDeleteComment( |
| 2370 | cnxn, issue, comment, user_id, user_service, is_spam, True, is_spam) |
| 2371 | |
| 2372 | def RecordClassifierIssueVerdict(self, cnxn, issue, is_spam, confidence, |
| 2373 | failed_open): |
| 2374 | return |
| 2375 | |
| 2376 | def RecordClassifierCommentVerdict(self, cnxn, issue, is_spam, confidence, |
| 2377 | failed_open): |
| 2378 | return |
| 2379 | |
| 2380 | def ClassifyComment(self, comment, commenter): |
| 2381 | return {'outputLabel': 'ham', |
| 2382 | 'outputMulti': [{'label': 'ham', 'score': '1.0'}], |
| 2383 | 'failed_open': False} |
| 2384 | |
| 2385 | def ClassifyIssue(self, issue, firstComment, reporter): |
| 2386 | return {'outputLabel': 'ham', |
| 2387 | 'outputMulti': [{'label': 'ham', 'score': '1.0'}], |
| 2388 | 'failed_open': False} |
| 2389 | |
| 2390 | def ExpungeUsersInSpam(self, cnxn, user_ids): |
| 2391 | self.expunged_users_in_spam.extend(user_ids) |
| 2392 | |
| 2393 | |
| 2394 | class FeaturesService(object): |
| 2395 | """A fake implementation of FeaturesService.""" |
| 2396 | def __init__(self): |
| 2397 | # Test-only sequence of expunged projects and users. |
| 2398 | self.expunged_saved_queries = [] |
| 2399 | self.expunged_users_in_saved_queries = [] |
| 2400 | self.expunged_filter_rules = [] |
| 2401 | self.expunged_users_in_filter_rules = [] |
| 2402 | self.expunged_quick_edit = [] |
| 2403 | self.expunged_users_in_quick_edits = [] |
| 2404 | self.expunged_hotlist_ids = [] |
| 2405 | self.expunged_users_in_hotlists = [] |
| 2406 | |
| 2407 | # filter rules, project_id => filterrule_pb |
| 2408 | self.test_rules = collections.defaultdict(list) |
| 2409 | |
| 2410 | # TODO(crbug/monorail/7104): Confirm that these are never reassigned |
| 2411 | # to empty {} and then change these to collections.defaultdicts instead. |
| 2412 | # hotlists |
| 2413 | self.test_hotlists = {} # (hotlist_name, owner_id) => hotlist_pb |
| 2414 | self.hotlists_by_id = {} |
| 2415 | self.hotlists_id_by_user = {} # user_id => [hotlist_id, hotlist_id, ...] |
| 2416 | self.hotlists_id_by_issue = {} # issue_id => [hotlist_id, hotlist_id, ...] |
| 2417 | |
| 2418 | # saved queries |
| 2419 | self.saved_queries = [] # [(pid, uid, sq), ...] |
| 2420 | |
| 2421 | def TestAddFilterRule( |
| 2422 | self, project_id, predicate, default_status=None, default_owner_id=None, |
| 2423 | add_cc_ids=None, add_labels=None, add_notify=None, warning=None, |
| 2424 | error=None): |
| 2425 | rule = filterrules_helpers.MakeRule( |
| 2426 | predicate, default_status=default_status, |
| 2427 | default_owner_id=default_owner_id, add_cc_ids=add_cc_ids, |
| 2428 | add_labels=add_labels, add_notify=add_notify, warning=warning, |
| 2429 | error=error) |
| 2430 | self.test_rules[project_id].append(rule) |
| 2431 | return rule |
| 2432 | |
| 2433 | def TestAddHotlist(self, name, summary='', owner_ids=None, editor_ids=None, |
| 2434 | follower_ids=None, description=None, hotlist_id=None, |
| 2435 | is_private=False, hotlist_item_fields=None, |
| 2436 | default_col_spec=None): |
| 2437 | """Add a hotlist to the fake FeaturesService object. |
| 2438 | |
| 2439 | Args: |
| 2440 | name: the name of the hotlist. Will replace any existing hotlist under |
| 2441 | the same name. |
| 2442 | summary: the summary string of the hotlist |
| 2443 | owner_ids: List of user ids for the hotlist owners |
| 2444 | editor_ids: List of user ids for the hotlist editors |
| 2445 | follower_ids: List of user ids for the hotlist followers |
| 2446 | description: The description string for this hotlist |
| 2447 | hotlist_id: A unique integer identifier for the created hotlist |
| 2448 | is_private: A boolean indicating whether the hotlist is private/public |
| 2449 | hotlist_item_fields: a list of tuples -> |
| 2450 | [(issue_id, rank, adder_id, date_added, note),...] |
| 2451 | default_col_spec: string of default columns for the hotlist. |
| 2452 | |
| 2453 | Returns: |
| 2454 | A populated hotlist PB. |
| 2455 | """ |
| 2456 | hotlist_pb = features_pb2.Hotlist() |
| 2457 | hotlist_pb.hotlist_id = hotlist_id or hash(name) % 100000 |
| 2458 | hotlist_pb.name = name |
| 2459 | hotlist_pb.summary = summary |
| 2460 | hotlist_pb.is_private = is_private |
| 2461 | hotlist_pb.default_col_spec = default_col_spec |
| 2462 | if description is not None: |
| 2463 | hotlist_pb.description = description |
| 2464 | |
| 2465 | self.TestAddHotlistMembers(owner_ids, hotlist_pb, OWNER_ROLE) |
| 2466 | self.TestAddHotlistMembers(follower_ids, hotlist_pb, FOLLOWER_ROLE) |
| 2467 | self.TestAddHotlistMembers(editor_ids, hotlist_pb, EDITOR_ROLE) |
| 2468 | |
| 2469 | if hotlist_item_fields is not None: |
| 2470 | for(issue_id, rank, adder_id, date, note) in hotlist_item_fields: |
| 2471 | hotlist_pb.items.append( |
| 2472 | features_pb2.Hotlist.HotlistItem( |
| 2473 | issue_id=issue_id, rank=rank, adder_id=adder_id, |
| 2474 | date_added=date, note=note)) |
| 2475 | try: |
| 2476 | self.hotlists_id_by_issue[issue_id].append(hotlist_pb.hotlist_id) |
| 2477 | except KeyError: |
| 2478 | self.hotlists_id_by_issue[issue_id] = [hotlist_pb.hotlist_id] |
| 2479 | |
| 2480 | owner_id = None |
| 2481 | if hotlist_pb.owner_ids: |
| 2482 | owner_id = hotlist_pb.owner_ids[0] |
| 2483 | self.test_hotlists[(name, owner_id)] = hotlist_pb |
| 2484 | self.hotlists_by_id[hotlist_pb.hotlist_id] = hotlist_pb |
| 2485 | return hotlist_pb |
| 2486 | |
| 2487 | def TestAddHotlistMembers(self, user_id_list, hotlist_pb, role): |
| 2488 | if user_id_list is not None: |
| 2489 | for user_id in user_id_list: |
| 2490 | if role == OWNER_ROLE: |
| 2491 | hotlist_pb.owner_ids.append(user_id) |
| 2492 | elif role == EDITOR_ROLE: |
| 2493 | hotlist_pb.editor_ids.append(user_id) |
| 2494 | elif role == FOLLOWER_ROLE: |
| 2495 | hotlist_pb.follower_ids.append(user_id) |
| 2496 | try: |
| 2497 | self.hotlists_id_by_user[user_id].append(hotlist_pb.hotlist_id) |
| 2498 | except KeyError: |
| 2499 | self.hotlists_id_by_user[user_id] = [hotlist_pb.hotlist_id] |
| 2500 | |
| 2501 | def CheckHotlistName(self, cnxn, name, owner_ids): |
| 2502 | if not framework_bizobj.IsValidHotlistName(name): |
| 2503 | raise exceptions.InputException( |
| 2504 | '%s is not a valid name for a Hotlist' % name) |
| 2505 | if self.LookupHotlistIDs(cnxn, [name], owner_ids): |
| 2506 | raise features_svc.HotlistAlreadyExists() |
| 2507 | |
| 2508 | def CreateHotlist( |
| 2509 | self, _cnxn, hotlist_name, summary, description, owner_ids, editor_ids, |
| 2510 | issue_ids=None, is_private=None, default_col_spec=None, ts=None): |
| 2511 | """Create and store a Hotlist with the given attributes.""" |
| 2512 | if not framework_bizobj.IsValidHotlistName(hotlist_name): |
| 2513 | raise exceptions.InputException() |
| 2514 | if not owner_ids: # Should never happen. |
| 2515 | raise features_svc.UnownedHotlistException() |
| 2516 | if (hotlist_name, owner_ids[0]) in self.test_hotlists: |
| 2517 | raise features_svc.HotlistAlreadyExists() |
| 2518 | hotlist_item_fields = [ |
| 2519 | (issue_id, rank*100, owner_ids[0] or None, ts, '') for |
| 2520 | rank, issue_id in enumerate(issue_ids or [])] |
| 2521 | return self.TestAddHotlist(hotlist_name, summary=summary, |
| 2522 | owner_ids=owner_ids, editor_ids=editor_ids, |
| 2523 | description=description, is_private=is_private, |
| 2524 | hotlist_item_fields=hotlist_item_fields, |
| 2525 | default_col_spec=default_col_spec) |
| 2526 | |
| 2527 | def UpdateHotlist( |
| 2528 | self, cnxn, hotlist_id, name=None, summary=None, description=None, |
| 2529 | is_private=None, default_col_spec=None, owner_id=None, |
| 2530 | add_editor_ids=None): |
| 2531 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2532 | if not hotlist: |
| 2533 | raise features_svc.NoSuchHotlistException( |
| 2534 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2535 | |
| 2536 | if owner_id: |
| 2537 | old_owner_id = hotlist.owner_ids[0] |
| 2538 | self.test_hotlists.pop((hotlist.name, old_owner_id), None) |
| 2539 | self.test_hotlists[(hotlist.name, owner_id)] = hotlist |
| 2540 | |
| 2541 | if add_editor_ids: |
| 2542 | for editor_id in add_editor_ids: |
| 2543 | self.hotlists_id_by_user.get(editor_id, []).append(hotlist_id) |
| 2544 | |
| 2545 | if name is not None: |
| 2546 | hotlist.name = name |
| 2547 | if summary is not None: |
| 2548 | hotlist.summary = summary |
| 2549 | if description is not None: |
| 2550 | hotlist.description = description |
| 2551 | if is_private is not None: |
| 2552 | hotlist.is_private = is_private |
| 2553 | if default_col_spec is not None: |
| 2554 | hotlist.default_col_spec = default_col_spec |
| 2555 | if owner_id is not None: |
| 2556 | hotlist.owner_ids = [owner_id] |
| 2557 | if add_editor_ids: |
| 2558 | hotlist.editor_ids.extend(add_editor_ids) |
| 2559 | |
| 2560 | def RemoveHotlistEditors(self, cnxn, hotlist_id, remove_editor_ids): |
| 2561 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2562 | if not hotlist: |
| 2563 | raise features_svc.NoSuchHotlistException( |
| 2564 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2565 | for editor_id in remove_editor_ids: |
| 2566 | hotlist.editor_ids.remove(editor_id) |
| 2567 | self.hotlists_id_by_user[editor_id].remove(hotlist_id) |
| 2568 | |
| 2569 | def AddIssuesToHotlists(self, cnxn, hotlist_ids, added_tuples, issue_svc, |
| 2570 | chart_svc, commit=True): |
| 2571 | for hotlist_id in hotlist_ids: |
| 2572 | self.UpdateHotlistItems(cnxn, hotlist_id, [], added_tuples, commit=commit) |
| 2573 | |
| 2574 | def RemoveIssuesFromHotlists(self, cnxn, hotlist_ids, issue_ids, issue_svc, |
| 2575 | chart_svc, commit=True): |
| 2576 | for hotlist_id in hotlist_ids: |
| 2577 | self.UpdateHotlistItems(cnxn, hotlist_id, issue_ids, [], commit=commit) |
| 2578 | |
| 2579 | def UpdateHotlistIssues( |
| 2580 | self, |
| 2581 | cnxn, |
| 2582 | hotlist_id, |
| 2583 | updated_items, |
| 2584 | remove_issue_ids, |
| 2585 | issue_svc, |
| 2586 | chart_svc, |
| 2587 | commit=True): |
| 2588 | if not updated_items and not remove_issue_ids: |
| 2589 | raise exceptions.InputException('No changes to make') |
| 2590 | |
| 2591 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2592 | if not hotlist: |
| 2593 | raise NoSuchHotlistException() |
| 2594 | |
| 2595 | updated_ids = [item.issue_id for item in updated_items] |
| 2596 | items = [ |
| 2597 | item for item in hotlist.items |
| 2598 | if item.issue_id not in updated_ids + remove_issue_ids |
| 2599 | ] |
| 2600 | hotlist.items = sorted(updated_items + items, key=lambda item: item.rank) |
| 2601 | |
| 2602 | # Remove all removed and updated issues. |
| 2603 | for issue_id in remove_issue_ids + updated_ids: |
| 2604 | try: |
| 2605 | self.hotlists_id_by_issue[issue_id].remove(hotlist_id) |
| 2606 | except (ValueError, KeyError): |
| 2607 | pass |
| 2608 | # Add all new or updated issues. |
| 2609 | for item in updated_items: |
| 2610 | self.hotlists_id_by_issue.setdefault(item.issue_id, []).append(hotlist_id) |
| 2611 | |
| 2612 | def UpdateHotlistItems( |
| 2613 | self, cnxn, hotlist_id, remove, added_issue_tuples, commit=True): |
| 2614 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2615 | if not hotlist: |
| 2616 | raise features_svc.NoSuchHotlistException( |
| 2617 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2618 | current_issues_ids = { |
| 2619 | item.issue_id for item in hotlist.items} |
| 2620 | items = [ |
| 2621 | item for item in hotlist.items if |
| 2622 | item.issue_id not in remove] |
| 2623 | |
| 2624 | if hotlist.items: |
| 2625 | items_sorted = sorted(hotlist.items, key=lambda item: item.rank) |
| 2626 | rank_base = items_sorted[-1].rank + 10 |
| 2627 | else: |
| 2628 | rank_base = 1 |
| 2629 | |
| 2630 | new_hotlist_items = [ |
| 2631 | features_pb2.MakeHotlistItem( |
| 2632 | issue_id, rank+rank_base*10, adder_id, date, note) |
| 2633 | for rank, (issue_id, adder_id, date, note) in |
| 2634 | enumerate(added_issue_tuples) |
| 2635 | if issue_id not in current_issues_ids] |
| 2636 | items.extend(new_hotlist_items) |
| 2637 | hotlist.items = items |
| 2638 | |
| 2639 | for issue_id in remove: |
| 2640 | try: |
| 2641 | self.hotlists_id_by_issue[issue_id].remove(hotlist_id) |
| 2642 | except ValueError: |
| 2643 | pass |
| 2644 | for item in new_hotlist_items: |
| 2645 | try: |
| 2646 | self.hotlists_id_by_issue[item.issue_id].append(hotlist_id) |
| 2647 | except KeyError: |
| 2648 | self.hotlists_id_by_issue[item.issue_id] = [hotlist_id] |
| 2649 | |
| 2650 | def UpdateHotlistItemsFields( |
| 2651 | self, cnxn, hotlist_id, new_ranks=None, new_notes=None, commit=True): |
| 2652 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2653 | if not hotlist: |
| 2654 | raise features_svc.NoSuchHotlistException( |
| 2655 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2656 | if new_ranks is None: |
| 2657 | new_ranks = {} |
| 2658 | if new_notes is None: |
| 2659 | new_notes = {} |
| 2660 | for hotlist_item in hotlist.items: |
| 2661 | if hotlist_item.issue_id in new_ranks: |
| 2662 | hotlist_item.rank = new_ranks[hotlist_item.issue_id] |
| 2663 | if hotlist_item.issue_id in new_notes: |
| 2664 | hotlist_item.note = new_notes[hotlist_item.issue_id] |
| 2665 | |
| 2666 | hotlist.items.sort(key=lambda item: item.rank) |
| 2667 | |
| 2668 | def TransferHotlistOwnership( |
| 2669 | self, cnxn, hotlist, new_owner_id, remain_editor, commit=True): |
| 2670 | """Transfers ownership of a hotlist to a new owner.""" |
| 2671 | new_editor_ids = hotlist.editor_ids |
| 2672 | if remain_editor: |
| 2673 | new_editor_ids.extend(hotlist.owner_ids) |
| 2674 | if new_owner_id in new_editor_ids: |
| 2675 | new_editor_ids.remove(new_owner_id) |
| 2676 | new_follower_ids = hotlist.follower_ids |
| 2677 | if new_owner_id in new_follower_ids: |
| 2678 | new_follower_ids.remove(new_owner_id) |
| 2679 | self.UpdateHotlistRoles( |
| 2680 | cnxn, hotlist.hotlist_id, [new_owner_id], new_editor_ids, |
| 2681 | new_follower_ids, commit=commit) |
| 2682 | |
| 2683 | def LookupUserHotlists(self, cnxn, user_ids): |
| 2684 | """Return dict of {user_id: [hotlist_id, hotlist_id...]}.""" |
| 2685 | users_hotlists_dict = { |
| 2686 | user_id: self.hotlists_id_by_user.get(user_id, []) |
| 2687 | for user_id in user_ids |
| 2688 | } |
| 2689 | return users_hotlists_dict |
| 2690 | |
| 2691 | def LookupIssueHotlists(self, cnxn, issue_ids): |
| 2692 | """Return dict of {issue_id: [hotlist_id, hotlist_id...]}.""" |
| 2693 | issues_hotlists_dict = { |
| 2694 | issue_id: self.hotlists_id_by_issue[issue_id] |
| 2695 | for issue_id in issue_ids |
| 2696 | if issue_id in self.hotlists_id_by_issue} |
| 2697 | return issues_hotlists_dict |
| 2698 | |
| 2699 | def LookupHotlistIDs(self, cnxn, hotlist_names, owner_ids): |
| 2700 | id_dict = {} |
| 2701 | for name in hotlist_names: |
| 2702 | for owner_id in owner_ids: |
| 2703 | hotlist = self.test_hotlists.get((name, owner_id)) |
| 2704 | if hotlist: |
| 2705 | if not hotlist.owner_ids: # Should never happen. |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 2706 | logging.warning( |
| 2707 | 'Unowned Hotlist: id:%r, name:%r', hotlist.hotlist_id, |
| 2708 | hotlist.name) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 2709 | continue |
| 2710 | id_dict[(name.lower(), owner_id)] = hotlist.hotlist_id |
| 2711 | return id_dict |
| 2712 | |
| 2713 | def GetHotlists(self, cnxn, hotlist_ids, use_cache=True): |
| 2714 | """Returns dict of {hotlist_id: hotlist PB}.""" |
| 2715 | result = {} |
| 2716 | for hotlist_id in hotlist_ids: |
| 2717 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2718 | if hotlist: |
| 2719 | result[hotlist_id] = hotlist |
| 2720 | else: |
| 2721 | raise features_svc.NoSuchHotlistException() |
| 2722 | return result |
| 2723 | |
| 2724 | def GetHotlistsByUserID(self, cnxn, user_id, use_cache=True): |
| 2725 | """Get a list of hotlist PBs for a given user.""" |
| 2726 | hotlist_id_dict = self.LookupUserHotlists(cnxn, [user_id]) |
| 2727 | hotlists = self.GetHotlists(cnxn, hotlist_id_dict.get( |
| 2728 | user_id, []), use_cache=use_cache) |
| 2729 | return list(hotlists.values()) |
| 2730 | |
| 2731 | def GetHotlistsByIssueID(self, cnxn, issue_id, use_cache=True): |
| 2732 | """Get a list of hotlist PBs for a given issue.""" |
| 2733 | hotlist_id_dict = self.LookupIssueHotlists(cnxn, [issue_id]) |
| 2734 | hotlists = self.GetHotlists(cnxn, hotlist_id_dict.get( |
| 2735 | issue_id, []), use_cache=use_cache) |
| 2736 | return list(hotlists.values()) |
| 2737 | |
| 2738 | def GetHotlist(self, cnxn, hotlist_id, use_cache=True): |
| 2739 | """Return hotlist PB.""" |
| 2740 | hotlist_id_dict = self.GetHotlists(cnxn, [hotlist_id], use_cache=use_cache) |
| 2741 | return hotlist_id_dict.get(hotlist_id) |
| 2742 | |
| 2743 | def GetHotlistsByID(self, cnxn, hotlist_ids, use_cache=True): |
| 2744 | hotlists_dict = {} |
| 2745 | missed_ids = [] |
| 2746 | for hotlist_id in hotlist_ids: |
| 2747 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2748 | if hotlist: |
| 2749 | hotlists_dict[hotlist_id] = hotlist |
| 2750 | else: |
| 2751 | missed_ids.append(hotlist_id) |
| 2752 | return hotlists_dict, missed_ids |
| 2753 | |
| 2754 | def GetHotlistByID(self, cnxn, hotlist_id, use_cache=True): |
| 2755 | hotlists_dict, _ = self.GetHotlistsByID( |
| 2756 | cnxn, [hotlist_id], use_cache=use_cache) |
| 2757 | return hotlists_dict[hotlist_id] |
| 2758 | |
| 2759 | def UpdateHotlistRoles( |
| 2760 | self, cnxn, hotlist_id, owner_ids, editor_ids, follower_ids, commit=True): |
| 2761 | hotlist = self.hotlists_by_id.get(hotlist_id) |
| 2762 | if not hotlist: |
| 2763 | raise features_svc.NoSuchHotlistException( |
| 2764 | 'Hotlist "%s" not found!' % hotlist_id) |
| 2765 | |
| 2766 | # Remove hotlist_ids to clear old roles |
| 2767 | for user_id in (hotlist.owner_ids + hotlist.editor_ids + |
| 2768 | hotlist.follower_ids): |
| 2769 | if hotlist_id in self.hotlists_id_by_user[user_id]: |
| 2770 | self.hotlists_id_by_user[user_id].remove(hotlist_id) |
| 2771 | old_owner_id = None |
| 2772 | if hotlist.owner_ids: |
| 2773 | old_owner_id = hotlist.owner_ids[0] |
| 2774 | self.test_hotlists.pop((hotlist.name, old_owner_id), None) |
| 2775 | |
| 2776 | hotlist.owner_ids = owner_ids |
| 2777 | hotlist.editor_ids = editor_ids |
| 2778 | hotlist.follower_ids = follower_ids |
| 2779 | |
| 2780 | # Add new hotlist roles |
| 2781 | for user_id in owner_ids+editor_ids+follower_ids: |
| 2782 | try: |
| 2783 | if hotlist_id not in self.hotlists_id_by_user[user_id]: |
| 2784 | self.hotlists_id_by_user[user_id].append(hotlist_id) |
| 2785 | except KeyError: |
| 2786 | self.hotlists_id_by_user[user_id] = [hotlist_id] |
| 2787 | new_owner_id = None |
| 2788 | if owner_ids: |
| 2789 | new_owner_id = owner_ids[0] |
| 2790 | self.test_hotlists[(hotlist.name, new_owner_id)] = hotlist |
| 2791 | |
| 2792 | def DeleteHotlist(self, cnxn, hotlist_id, commit=True): |
| 2793 | hotlist = self.hotlists_by_id.pop(hotlist_id, None) |
| 2794 | if hotlist is not None: |
| 2795 | self.test_hotlists.pop((hotlist.name, hotlist.owner_ids[0]), None) |
| 2796 | user_ids = hotlist.owner_ids+hotlist.editor_ids+hotlist.follower_ids |
| 2797 | for user_id in user_ids: |
| 2798 | try: |
| 2799 | self.hotlists_id_by_user[user_id].remove(hotlist_id) |
| 2800 | except (ValueError, KeyError): |
| 2801 | pass |
| 2802 | for item in hotlist.items: |
| 2803 | try: |
| 2804 | self.hotlists_id_by_issue[item.issue_id].remove(hotlist_id) |
| 2805 | except (ValueError, KeyError): |
| 2806 | pass |
| 2807 | for owner_id in hotlist.owner_ids: |
| 2808 | self.test_hotlists.pop((hotlist.name, owner_id), None) |
| 2809 | |
| 2810 | def ExpungeHotlists( |
| 2811 | self, cnxn, hotlist_ids, star_svc, user_svc, chart_svc, commit=True): |
| 2812 | self.expunged_hotlist_ids.extend(hotlist_ids) |
| 2813 | for hotlist_id in hotlist_ids: |
| 2814 | self.DeleteHotlist(cnxn, hotlist_id) |
| 2815 | |
| 2816 | def ExpungeUsersInHotlists( |
| 2817 | self, cnxn, user_ids, star_svc, user_svc, chart_svc): |
| 2818 | self.expunged_users_in_hotlists.extend(user_ids) |
| 2819 | |
| 2820 | # end of Hotlist functions |
| 2821 | |
| 2822 | def GetRecentCommands(self, cnxn, user_id, project_id): |
| 2823 | return [], [] |
| 2824 | |
| 2825 | def ExpungeSavedQueriesExecuteInProject(self, _cnxn, project_id): |
| 2826 | self.expunged_saved_queries.append(project_id) |
| 2827 | |
| 2828 | def ExpungeSavedQueriesByUsers(self, cnxn, user_ids, limit=None): |
| 2829 | self.expunged_users_in_saved_queries.extend(user_ids) |
| 2830 | |
| 2831 | def ExpungeFilterRules(self, _cnxn, project_id): |
| 2832 | self.expunged_filter_rules.append(project_id) |
| 2833 | |
| 2834 | def ExpungeFilterRulesByUser(self, cnxn, user_ids_by_email): |
| 2835 | emails = user_ids_by_email.keys() |
| 2836 | user_ids = user_ids_by_email.values() |
| 2837 | project_rules_dict = collections.defaultdict(list) |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame^] | 2838 | for project_id, rules in self.test_rules.items(): |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 2839 | for rule in rules: |
| 2840 | if rule.default_owner_id in user_ids: |
| 2841 | project_rules_dict[project_id].append(rule) |
| 2842 | continue |
| 2843 | if any(cc_id in user_ids for cc_id in rule.add_cc_ids): |
| 2844 | project_rules_dict[project_id].append(rule) |
| 2845 | continue |
| 2846 | if any(addr in emails for addr in rule.add_notify_addrs): |
| 2847 | project_rules_dict[project_id].append(rule) |
| 2848 | continue |
| 2849 | if any((email in rule.predicate) for email in emails): |
| 2850 | project_rules_dict[project_id].append(rule) |
| 2851 | continue |
| 2852 | self.test_rules[project_id] = [ |
| 2853 | rule for rule in rules |
| 2854 | if rule not in project_rules_dict[project_id]] |
| 2855 | return project_rules_dict |
| 2856 | |
| 2857 | def ExpungeQuickEditHistory(self, _cnxn, project_id): |
| 2858 | self.expunged_quick_edit.append(project_id) |
| 2859 | |
| 2860 | def ExpungeQuickEditsByUsers(self, cnxn, user_ids, limit=None): |
| 2861 | self.expunged_users_in_quick_edits.extend(user_ids) |
| 2862 | |
| 2863 | def GetFilterRules(self, cnxn, project_id): |
| 2864 | return self.test_rules[project_id] |
| 2865 | |
| 2866 | def GetCannedQueriesByProjectID(self, cnxn, project_id): |
| 2867 | return [sq for (pid, _, sq) in self.saved_queries if pid == project_id] |
| 2868 | |
| 2869 | def GetSavedQueriesByUserID(self, cnxn, user_id): |
| 2870 | return [sq for (_, uid, sq) in self.saved_queries if uid == user_id] |
| 2871 | |
| 2872 | def UpdateCannedQueries(self, cnxn, project_id, canned_queries): |
| 2873 | self.saved_queries.extend( |
| 2874 | [(project_id, None, cq) for cq in canned_queries]) |
| 2875 | |
| 2876 | def UpdateUserSavedQueries(self, cnxn, user_id, saved_queries): |
| 2877 | self.saved_queries = [ |
| 2878 | (pid, uid, sq) for (pid, uid, sq) in self.saved_queries |
| 2879 | if uid != user_id] |
| 2880 | for sq in saved_queries: |
| 2881 | if sq.executes_in_project_ids: |
| 2882 | self.saved_queries.extend( |
| 2883 | [(eipid, user_id, sq) for eipid in sq.executes_in_project_ids]) |
| 2884 | else: |
| 2885 | self.saved_queries.append((None, user_id, sq)) |
| 2886 | |
| 2887 | def GetSubscriptionsInProjects(self, cnxn, project_ids): |
| 2888 | sq_by_uid = {} |
| 2889 | for pid, uid, sq in self.saved_queries: |
| 2890 | if pid in project_ids: |
| 2891 | if uid in sq_by_uid: |
| 2892 | sq_by_uid[uid].append(sq) |
| 2893 | else: |
| 2894 | sq_by_uid[uid] = [sq] |
| 2895 | |
| 2896 | return sq_by_uid |
| 2897 | |
| 2898 | def GetSavedQuery(self, cnxn, query_id): |
| 2899 | return tracker_pb2.SavedQuery() |
| 2900 | |
| 2901 | |
| 2902 | class PostData(object): |
| 2903 | """A dictionary-like object that also implements getall().""" |
| 2904 | |
| 2905 | def __init__(self, *args, **kwargs): |
| 2906 | self.dictionary = dict(*args, **kwargs) |
| 2907 | |
| 2908 | def getall(self, key): |
| 2909 | """Return all values, assume that the value at key is already a list.""" |
| 2910 | return self.dictionary.get(key, []) |
| 2911 | |
Adrià Vilanova Martínez | 9f9ade5 | 2022-10-10 23:20:11 +0200 | [diff] [blame] | 2912 | def getlist(self, key): |
| 2913 | """Return all values, assume that the value at key is already a list.""" |
| 2914 | return self.dictionary.get(key, []) |
| 2915 | |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 2916 | def get(self, key, default=None): |
| 2917 | """Return first value, assume that the value at key is already a list.""" |
| 2918 | return self.dictionary.get(key, [default])[0] |
| 2919 | |
| 2920 | def __getitem__(self, key): |
| 2921 | """Return first value, assume that the value at key is already a list.""" |
| 2922 | return self.dictionary[key][0] |
| 2923 | |
| 2924 | def __contains__(self, key): |
| 2925 | return key in self.dictionary |
| 2926 | |
| 2927 | def keys(self): |
| 2928 | """Return the keys in the POST data.""" |
| 2929 | return list(self.dictionary.keys()) |
| 2930 | |
| 2931 | |
| 2932 | class FakeFile: |
| 2933 | def __init__(self, data=None): |
| 2934 | self.data = data |
| 2935 | |
| 2936 | def read(self): |
| 2937 | return self.data |
| 2938 | |
| 2939 | def write(self, content): |
| 2940 | return |
| 2941 | |
| 2942 | def __enter__(self): |
| 2943 | return self |
| 2944 | |
| 2945 | def __exit__(self, __1, __2, __3): |
| 2946 | return None |
| 2947 | |
| 2948 | |
| 2949 | def gcs_open(filename, mode): |
| 2950 | return FakeFile(filename) |