blob: f339b9542b03b32efc79b751b9ee285dbbb0f566 [file] [log] [blame]
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001# Copyright 2022 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
Copybara854996b2021-09-07 19:36:02 +00004
5"""Helper functions and classes used by the Monorail Issue Tracker pages.
6
7This module has functions that are reused in multiple servlets or
8other modules.
9"""
10from __future__ import print_function
11from __future__ import division
12from __future__ import absolute_import
13
14import collections
15import itertools
16import logging
17import re
18import time
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +020019from six.moves import urllib
Copybara854996b2021-09-07 19:36:02 +000020
21from google.appengine.api import app_identity
22
23from six import string_types
24
25import settings
26
27from features import federated
28from framework import authdata
29from framework import exceptions
30from framework import filecontent
31from framework import framework_bizobj
32from framework import framework_constants
33from framework import framework_helpers
34from framework import framework_views
35from framework import permissions
36from framework import sorting
37from framework import template_helpers
38from framework import urls
39from project import project_helpers
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +010040from mrproto import tracker_pb2
Copybara854996b2021-09-07 19:36:02 +000041from services import client_config_svc
42from tracker import field_helpers
43from tracker import tracker_bizobj
44from tracker import tracker_constants
45
46
47# HTML input field names for blocked on and blocking issue refs.
48BLOCKED_ON = 'blocked_on'
49BLOCKING = 'blocking'
50
51# This string is used in HTML form element names to identify custom fields.
52# E.g., a value for a custom field with field_id 12 would be specified in
53# an HTML form element with name="custom_12".
54_CUSTOM_FIELD_NAME_PREFIX = 'custom_'
55
56# When the attachment quota gets within 1MB of the limit, stop offering
57# users the option to attach files.
58_SOFT_QUOTA_LEEWAY = 1024 * 1024
59
60# Accessors for sorting built-in fields.
61SORTABLE_FIELDS = {
62 'project': lambda issue: issue.project_name,
63 'id': lambda issue: issue.local_id,
64 'owner': tracker_bizobj.GetOwnerId, # And postprocessor
65 'reporter': lambda issue: issue.reporter_id, # And postprocessor
66 'component': lambda issue: issue.component_ids,
67 'cc': tracker_bizobj.GetCcIds, # And postprocessor
68 'summary': lambda issue: issue.summary.lower(),
69 'stars': lambda issue: issue.star_count,
70 'attachments': lambda issue: issue.attachment_count,
71 'opened': lambda issue: issue.opened_timestamp,
72 'closed': lambda issue: issue.closed_timestamp,
73 'modified': lambda issue: issue.modified_timestamp,
74 'status': tracker_bizobj.GetStatus,
75 'blocked': lambda issue: bool(issue.blocked_on_iids),
76 'blockedon': lambda issue: issue.blocked_on_iids or sorting.MAX_STRING,
77 'blocking': lambda issue: issue.blocking_iids or sorting.MAX_STRING,
78 'mergedinto': lambda issue: issue.merged_into or sorting.MAX_STRING,
79 'ownermodified': lambda issue: issue.owner_modified_timestamp,
80 'statusmodified': lambda issue: issue.status_modified_timestamp,
81 'componentmodified': lambda issue: issue.component_modified_timestamp,
82 'ownerlastvisit': tracker_bizobj.GetOwnerId, # And postprocessor
83 }
84
85# Some fields take a user ID from the issue and then use that to index
86# into a dictionary of user views, and then get a field of the user view
87# as the value to sort key.
88SORTABLE_FIELDS_POSTPROCESSORS = {
89 'owner': lambda user_view: user_view.email,
90 'reporter': lambda user_view: user_view.email,
91 'cc': lambda user_view: user_view.email,
92 'ownerlastvisit': lambda user_view: -user_view.user.last_visit_timestamp,
93 }
94
95# Here are some restriction labels to help people do the most common things
96# that they might want to do with restrictions.
97_FREQUENT_ISSUE_RESTRICTIONS = [
98 (permissions.VIEW, permissions.EDIT_ISSUE,
99 'Only users who can edit the issue may access it'),
100 (permissions.ADD_ISSUE_COMMENT, permissions.EDIT_ISSUE,
101 'Only users who can edit the issue may add comments'),
102 ]
103
104# These issue restrictions should be offered as examples whenever the project
105# does not have any custom permissions in use already.
106_EXAMPLE_ISSUE_RESTRICTIONS = [
107 (permissions.VIEW, 'CoreTeam',
108 'Custom permission CoreTeam is needed to access'),
109 ]
110
111# Namedtuples that hold data parsed from post_data.
112ParsedComponents = collections.namedtuple(
113 'ParsedComponents', 'entered_str, paths, paths_remove')
114ParsedFields = collections.namedtuple(
115 'ParsedFields',
116 'vals, vals_remove, fields_clear, '
117 'phase_vals, phase_vals_remove')
118ParsedUsers = collections.namedtuple(
119 'ParsedUsers', 'owner_username, owner_id, cc_usernames, '
120 'cc_usernames_remove, cc_ids, cc_ids_remove')
121ParsedBlockers = collections.namedtuple(
122 'ParsedBlockers', 'entered_str, iids, dangling_refs, '
123 'federated_ref_strings')
124ParsedHotlistRef = collections.namedtuple(
125 'ParsedHotlistRef', 'user_email, hotlist_name')
126ParsedHotlists = collections.namedtuple(
127 'ParsedHotlists', 'entered_str, hotlist_refs')
128ParsedIssue = collections.namedtuple(
129 'ParsedIssue', 'summary, comment, is_description, status, users, labels, '
130 'labels_remove, components, fields, template_name, attachments, '
131 'kept_attachments, blocked_on, blocking, hotlists')
132
133
134def ParseIssueRequest(cnxn, post_data, services, errors, default_project_name):
135 """Parse all the possible arguments out of the request.
136
137 Args:
138 cnxn: connection to SQL database.
139 post_data: HTML form information.
140 services: Connections to persistence layer.
141 errors: object to accumulate validation error info.
142 default_project_name: name of the project that contains the issue.
143
144 Returns:
145 A namedtuple with all parsed information. User IDs are looked up, but
146 also the strings are returned to allow bouncing the user back to correct
147 any errors.
148 """
149 summary = post_data.get('summary', '')
150 comment = post_data.get('comment', '')
151 is_description = bool(post_data.get('description', ''))
152 status = post_data.get('status', '')
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +0200153 template_name = urllib.parse.unquote_plus(post_data.get('template_name', ''))
Copybara854996b2021-09-07 19:36:02 +0000154 component_str = post_data.get('components', '')
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100155 label_strs = post_data.getlist('label')
Copybara854996b2021-09-07 19:36:02 +0000156
157 if is_description:
158 tmpl_txt = post_data.get('tmpl_txt', '')
159 comment = MarkupDescriptionOnInput(comment, tmpl_txt)
160
161 comp_paths, comp_paths_remove = _ClassifyPlusMinusItems(
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100162 re.split(r'[,;\s]+', component_str))
Copybara854996b2021-09-07 19:36:02 +0000163 parsed_components = ParsedComponents(
164 component_str, comp_paths, comp_paths_remove)
165 labels, labels_remove = _ClassifyPlusMinusItems(label_strs)
166 parsed_fields = _ParseIssueRequestFields(post_data)
167 # TODO(jrobbins): change from numbered fields to a multi-valued field.
168 attachments = _ParseIssueRequestAttachments(post_data)
169 kept_attachments = _ParseIssueRequestKeptAttachments(post_data)
170 parsed_users = _ParseIssueRequestUsers(cnxn, post_data, services)
171 parsed_blocked_on = _ParseBlockers(
172 cnxn, post_data, services, errors, default_project_name, BLOCKED_ON)
173 parsed_blocking = _ParseBlockers(
174 cnxn, post_data, services, errors, default_project_name, BLOCKING)
175 parsed_hotlists = _ParseHotlists(post_data)
176
177 parsed_issue = ParsedIssue(
178 summary, comment, is_description, status, parsed_users, labels,
179 labels_remove, parsed_components, parsed_fields, template_name,
180 attachments, kept_attachments, parsed_blocked_on, parsed_blocking,
181 parsed_hotlists)
182 return parsed_issue
183
184
185def MarkupDescriptionOnInput(content, tmpl_text):
186 """Return HTML for the content of an issue description or comment.
187
188 Args:
189 content: the text sumbitted by the user, any user-entered markup
190 has already been escaped.
191 tmpl_text: the initial text that was put into the textarea.
192
193 Returns:
194 The description content text with template lines highlighted.
195 """
196 tmpl_lines = tmpl_text.split('\n')
197 tmpl_lines = [pl.strip() for pl in tmpl_lines if pl.strip()]
198
199 entered_lines = content.split('\n')
200 marked_lines = [_MarkupDescriptionLineOnInput(line, tmpl_lines)
201 for line in entered_lines]
202 return '\n'.join(marked_lines)
203
204
205def _MarkupDescriptionLineOnInput(line, tmpl_lines):
206 """Markup one line of an issue description that was just entered.
207
208 Args:
209 line: string containing one line of the user-entered comment.
210 tmpl_lines: list of strings for the text of the template lines.
211
212 Returns:
213 The same user-entered line, or that line highlighted to
214 indicate that it came from the issue template.
215 """
216 for tmpl_line in tmpl_lines:
217 if line.startswith(tmpl_line):
218 return '<b>' + tmpl_line + '</b>' + line[len(tmpl_line):]
219
220 return line
221
222
223def _ClassifyPlusMinusItems(add_remove_list):
224 """Classify the given plus-or-minus items into add and remove lists."""
225 add_remove_set = {s.strip() for s in add_remove_list}
226 add_strs = [s for s in add_remove_set if s and not s.startswith('-')]
227 remove_strs = [s[1:] for s in add_remove_set if s[1:] and s.startswith('-')]
228 return add_strs, remove_strs
229
230
231def _ParseHotlists(post_data):
232 entered_str = post_data.get('hotlists', '').strip()
233 hotlist_refs = []
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100234 for ref_str in re.split(r'[,;\s]+', entered_str):
Copybara854996b2021-09-07 19:36:02 +0000235 if not ref_str:
236 continue
237 if ':' in ref_str:
238 if ref_str.split(':')[0]:
239 # E-mail isn't empty; full reference.
240 hotlist_refs.append(ParsedHotlistRef(*ref_str.split(':', 1)))
241 else:
242 # Short reference.
243 hotlist_refs.append(ParsedHotlistRef(None, ref_str.split(':', 1)[1]))
244 else:
245 # Short reference
246 hotlist_refs.append(ParsedHotlistRef(None, ref_str))
247 parsed_hotlists = ParsedHotlists(entered_str, hotlist_refs)
248 return parsed_hotlists
249
250
251def _ParseIssueRequestFields(post_data):
252 """Iterate over post_data and return custom field values found in it."""
253 field_val_strs = {}
254 field_val_strs_remove = {}
255 phase_field_val_strs = collections.defaultdict(dict)
256 phase_field_val_strs_remove = collections.defaultdict(dict)
257 for key in post_data.keys():
258 if key.startswith(_CUSTOM_FIELD_NAME_PREFIX):
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100259 val_strs = [v for v in post_data.getlist(key) if v]
Copybara854996b2021-09-07 19:36:02 +0000260 if val_strs:
261 try:
262 field_id = int(key[len(_CUSTOM_FIELD_NAME_PREFIX):])
263 phase_name = None
264 except ValueError: # key must be in format <field_id>_<phase_name>
265 field_id, phase_name = key[len(_CUSTOM_FIELD_NAME_PREFIX):].split(
266 '_', 1)
267 field_id = int(field_id)
268 if post_data.get('op_' + key) == 'remove':
269 if phase_name:
270 phase_field_val_strs_remove[field_id][phase_name] = val_strs
271 else:
272 field_val_strs_remove[field_id] = val_strs
273 else:
274 if phase_name:
275 phase_field_val_strs[field_id][phase_name] = val_strs
276 else:
277 field_val_strs[field_id] = val_strs
278
279 # TODO(jojwang): monorail:5154, no support for clearing phase field values.
280 fields_clear = []
281 op_prefix = 'op_' + _CUSTOM_FIELD_NAME_PREFIX
282 for op_key in post_data.keys():
283 if op_key.startswith(op_prefix):
284 if post_data.get(op_key) == 'clear':
285 field_id = int(op_key[len(op_prefix):])
286 fields_clear.append(field_id)
287
288 return ParsedFields(
289 field_val_strs, field_val_strs_remove, fields_clear,
290 phase_field_val_strs, phase_field_val_strs_remove)
291
292
293def _ParseIssueRequestAttachments(post_data):
294 """Extract and clean-up any attached files from the post data.
295
296 Args:
297 post_data: dict w/ values from the user's HTTP POST form data.
298
299 Returns:
300 [(filename, filecontents, mimetype), ...] with items for each attachment.
301 """
302 # TODO(jrobbins): change from numbered fields to a multi-valued field.
303 attachments = []
304 for i in range(1, 16):
305 if 'file%s' % i in post_data:
306 item = post_data['file%s' % i]
307 if isinstance(item, string_types):
308 continue
309 if '\\' in item.filename: # IE insists on giving us the whole path.
310 item.filename = item.filename[item.filename.rindex('\\') + 1:]
311 if not item.filename:
312 continue # Skip any FILE fields that were not filled in.
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100313 attachments.append(
314 (
315 item.filename, item.read(),
316 filecontent.GuessContentTypeFromFilename(item.filename)))
Copybara854996b2021-09-07 19:36:02 +0000317
318 return attachments
319
320
321def _ParseIssueRequestKeptAttachments(post_data):
322 """Extract attachment ids for attachments kept when updating description
323
324 Args:
325 post_data: dict w/ values from the user's HTTP POST form data.
326
327 Returns:
328 a list of attachment ids for kept attachments
329 """
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100330 kept_attachments = post_data.getlist('keep-attachment')
Copybara854996b2021-09-07 19:36:02 +0000331 return [int(aid) for aid in kept_attachments]
332
333
334def _ParseIssueRequestUsers(cnxn, post_data, services):
335 """Extract usernames from the POST data, categorize them, and look up IDs.
336
337 Args:
338 cnxn: connection to SQL database.
339 post_data: dict w/ data from the HTTP POST.
340 services: Services.
341
342 Returns:
343 A namedtuple (owner_username, owner_id, cc_usernames, cc_usernames_remove,
344 cc_ids, cc_ids_remove), containing:
345 - issue owner's name and user ID, if any
346 - the list of all cc'd usernames
347 - the user IDs to add or remove from the issue CC list.
348 Any of these user IDs may be None if the corresponding username
349 or email address is invalid.
350 """
351 # Get the user-entered values from post_data.
352 cc_username_str = post_data.get('cc', '').lower()
353 owner_email = post_data.get('owner', '').strip().lower()
354
355 cc_usernames, cc_usernames_remove = _ClassifyPlusMinusItems(
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100356 re.split(r'[,;\s]+', cc_username_str))
Copybara854996b2021-09-07 19:36:02 +0000357
358 # Figure out the email addresses to lookup and do the lookup.
359 emails_to_lookup = cc_usernames + cc_usernames_remove
360 if owner_email:
361 emails_to_lookup.append(owner_email)
362 all_user_ids = services.user.LookupUserIDs(
363 cnxn, emails_to_lookup, autocreate=True)
364 if owner_email:
365 owner_id = all_user_ids.get(owner_email)
366 else:
367 owner_id = framework_constants.NO_USER_SPECIFIED
368
369 # Lookup the user IDs of the Cc addresses to add or remove.
370 cc_ids = [all_user_ids.get(cc) for cc in cc_usernames if cc]
371 cc_ids_remove = [all_user_ids.get(cc) for cc in cc_usernames_remove if cc]
372
373 return ParsedUsers(owner_email, owner_id, cc_usernames, cc_usernames_remove,
374 cc_ids, cc_ids_remove)
375
376
377def _ParseBlockers(cnxn, post_data, services, errors, default_project_name,
378 field_name):
379 """Parse input for issues that the current issue is blocking/blocked on.
380
381 Args:
382 cnxn: connection to SQL database.
383 post_data: dict w/ values from the user's HTTP POST.
384 services: connections to backend services.
385 errors: object to accumulate validation error info.
386 default_project_name: name of the project that contains the issue.
387 field_name: string HTML input field name, e.g., BLOCKED_ON or BLOCKING.
388
389 Returns:
390 A namedtuple with the user input string, and a list of issue IDs.
391 """
392 entered_str = post_data.get(field_name, '').strip()
393 blocker_iids = []
394 dangling_ref_tuples = []
395 federated_ref_strings = []
396
397 issue_ref = None
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100398 for ref_str in re.split(r'[,;\s]+', entered_str):
Copybara854996b2021-09-07 19:36:02 +0000399 # Handle federated references.
400 if federated.IsShortlinkValid(ref_str):
401 federated_ref_strings.append(ref_str)
402 continue
403
404 try:
405 issue_ref = tracker_bizobj.ParseIssueRef(ref_str)
406 except ValueError:
407 setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
408 break
409
410 if not issue_ref:
411 continue
412
413 blocker_project_name, blocker_issue_id = issue_ref
414 if not blocker_project_name:
415 blocker_project_name = default_project_name
416
417 # Detect and report if the same issue was specified.
418 current_issue_id = int(post_data.get('id')) if post_data.get('id') else -1
419 if (blocker_issue_id == current_issue_id and
420 blocker_project_name == default_project_name):
421 setattr(errors, field_name, 'Cannot be %s the same issue' % field_name)
422 break
423
424 ref_projects = services.project.GetProjectsByName(
425 cnxn, set([blocker_project_name]))
426 blocker_iid, _misses = services.issue.ResolveIssueRefs(
427 cnxn, ref_projects, default_project_name, [issue_ref])
428 if not blocker_iid:
429 if blocker_project_name in settings.recognized_codesite_projects:
430 # We didn't find the issue, but it had a explicitly-specified project
431 # which we know is on Codesite. Allow it as a dangling reference.
432 dangling_ref_tuples.append(issue_ref)
433 continue
434 else:
435 # Otherwise, it doesn't exist, so report it.
436 setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
437 break
438 if blocker_iid[0] not in blocker_iids:
439 blocker_iids.extend(blocker_iid)
440
441 blocker_iids.sort()
442 dangling_ref_tuples.sort()
443 return ParsedBlockers(entered_str, blocker_iids, dangling_ref_tuples,
444 federated_ref_strings)
445
446
447def PairDerivedValuesWithRuleExplanations(
448 proposed_issue, traces, derived_users_by_id):
449 """Pair up values and explanations into JSON objects."""
450 derived_labels_and_why = [
451 {'value': lab,
452 'why': traces.get((tracker_pb2.FieldID.LABELS, lab))}
453 for lab in proposed_issue.derived_labels]
454
455 derived_users_by_id = {
456 user_id: user_view.display_name
457 for user_id, user_view in derived_users_by_id.items()
458 if user_view.display_name}
459
460 derived_owner_and_why = []
461 if proposed_issue.derived_owner_id:
462 derived_owner_and_why = [{
463 'value': derived_users_by_id[proposed_issue.derived_owner_id],
464 'why': traces.get(
465 (tracker_pb2.FieldID.OWNER, proposed_issue.derived_owner_id))}]
466 derived_cc_and_why = [
467 {'value': derived_users_by_id[cc_id],
468 'why': traces.get((tracker_pb2.FieldID.CC, cc_id))}
469 for cc_id in proposed_issue.derived_cc_ids
470 if cc_id in derived_users_by_id]
471
472 warnings_and_why = [
473 {'value': warning,
474 'why': traces.get((tracker_pb2.FieldID.WARNING, warning))}
475 for warning in proposed_issue.derived_warnings]
476
477 errors_and_why = [
478 {'value': error,
479 'why': traces.get((tracker_pb2.FieldID.ERROR, error))}
480 for error in proposed_issue.derived_errors]
481
482 return (derived_labels_and_why, derived_owner_and_why, derived_cc_and_why,
483 warnings_and_why, errors_and_why)
484
485
486def IsValidIssueOwner(cnxn, project, owner_id, services):
487 """Return True if the given user ID can be an issue owner.
488
489 Args:
490 cnxn: connection to SQL database.
491 project: the current Project PB.
492 owner_id: the user ID of the proposed issue owner.
493 services: connections to backends.
494
495 It is OK to have 0 for the owner_id, that simply means that the issue is
496 unassigned.
497
498 Returns:
499 A pair (valid, err_msg). valid is True if the given user ID can be an
500 issue owner. err_msg is an error message string to display to the user
501 if valid == False, and is None if valid == True.
502 """
503 # An issue is always allowed to have no owner specified.
504 if owner_id == framework_constants.NO_USER_SPECIFIED:
505 return True, None
506
507 try:
508 auth = authdata.AuthData.FromUserID(cnxn, owner_id, services)
509 if not framework_bizobj.UserIsInProject(project, auth.effective_ids):
510 return False, 'Issue owner must be a project member.'
511 except exceptions.NoSuchUserException:
512 return False, 'Issue owner user ID not found.'
513
514 group_ids = services.usergroup.DetermineWhichUserIDsAreGroups(
515 cnxn, [owner_id])
516 if owner_id in group_ids:
517 return False, 'Issue owner cannot be a user group.'
518
519 return True, None
520
521
522def GetAllowedOpenedAndClosedIssues(mr, issue_ids, services):
523 """Get filtered lists of open and closed issues identified by issue_ids.
524
525 The function then filters the results to only the issues that the user
526 is allowed to view. E.g., we only auto-link to issues that the user
527 would be able to view if they clicked the link.
528
529 Args:
530 mr: commonly used info parsed from the request.
531 issue_ids: list of int issue IDs for the target issues.
532 services: connection to issue, config, and project persistence layers.
533
534 Returns:
535 Two lists of issues that the user is allowed to view: one for open
536 issues and one for closed issues.
537 """
538 open_issues, closed_issues = services.issue.GetOpenAndClosedIssues(
539 mr.cnxn, issue_ids)
540 return GetAllowedIssues(mr, [open_issues, closed_issues], services)
541
542
543def GetAllowedIssues(mr, issue_groups, services):
544 """Filter lists of issues identified by issue_groups.
545
546 Args:
547 mr: commonly used info parsed from the request.
548 issue_groups: list of list of issues to filter.
549 services: connection to issue, config, and project persistence layers.
550
551 Returns:
552 List of filtered list of issues.
553 """
554
555 project_dict = GetAllIssueProjects(
556 mr.cnxn, itertools.chain.from_iterable(issue_groups), services.project)
557 config_dict = services.config.GetProjectConfigs(mr.cnxn,
558 list(project_dict.keys()))
559 return [FilterOutNonViewableIssues(
560 mr.auth.effective_ids, mr.auth.user_pb, project_dict, config_dict,
561 issues)
562 for issues in issue_groups]
563
564
565def MakeViewsForUsersInIssues(cnxn, issue_list, user_service, omit_ids=None):
566 """Lookup all the users involved in any of the given issues.
567
568 Args:
569 cnxn: connection to SQL database.
570 issue_list: list of Issue PBs from a result query.
571 user_service: Connection to User backend storage.
572 omit_ids: a list of user_ids to omit, e.g., because we already have them.
573
574 Returns:
575 A dictionary {user_id: user_view,...} for all the users involved
576 in the given issues.
577 """
578 issue_participant_id_set = tracker_bizobj.UsersInvolvedInIssues(issue_list)
579 if omit_ids:
580 issue_participant_id_set.difference_update(omit_ids)
581
582 # TODO(jrobbins): consider caching View objects as well.
583 users_by_id = framework_views.MakeAllUserViews(
584 cnxn, user_service, issue_participant_id_set)
585
586 return users_by_id
587
588
589def FormatIssueListURL(
590 mr, config, absolute=True, project_names=None, **kwargs):
591 """Format a link back to list view as configured by user."""
592 if project_names is None:
593 project_names = [mr.project_name]
594 if tracker_constants.JUMP_RE.match(mr.query):
595 kwargs['q'] = 'id=%s' % mr.query
596 kwargs['can'] = 1 # The specified issue might be closed.
597 else:
598 kwargs['q'] = mr.query
599 if mr.can and mr.can != 2:
600 kwargs['can'] = mr.can
601 def_col_spec = config.default_col_spec
602 if mr.col_spec and mr.col_spec != def_col_spec:
603 kwargs['colspec'] = mr.col_spec
604 if mr.sort_spec:
605 kwargs['sort'] = mr.sort_spec
606 if mr.group_by_spec:
607 kwargs['groupby'] = mr.group_by_spec
608 if mr.start:
609 kwargs['start'] = mr.start
610 if mr.num != tracker_constants.DEFAULT_RESULTS_PER_PAGE:
611 kwargs['num'] = mr.num
612
613 if len(project_names) == 1:
614 url = '/p/%s%s' % (project_names[0], urls.ISSUE_LIST)
615 else:
616 url = urls.ISSUE_LIST
617 kwargs['projects'] = ','.join(sorted(project_names))
618
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +0200619 param_strings = [
620 '%s=%s' % (k, urllib.parse.quote((u'%s' % v).encode('utf-8')))
621 for k, v in kwargs.items()
622 ]
Copybara854996b2021-09-07 19:36:02 +0000623 if param_strings:
624 url += '?' + '&'.join(sorted(param_strings))
625 if absolute:
626 url = '%s://%s%s' % (mr.request.scheme, mr.request.host, url)
627
628 return url
629
630
631def FormatRelativeIssueURL(project_name, path, **kwargs):
632 """Format a URL to get to an issue in the named project.
633
634 Args:
635 project_name: string name of the project containing the issue.
636 path: string servlet path, e.g., from framework/urls.py.
637 **kwargs: additional query-string parameters to include in the URL.
638
639 Returns:
640 A URL string.
641 """
642 return framework_helpers.FormatURL(
643 None, '/p/%s%s' % (project_name, path), **kwargs)
644
645
646def FormatCrBugURL(project_name, local_id):
647 """Format a short URL to get to an issue in the named project.
648
649 Args:
650 project_name: string name of the project containing the issue.
651 local_id: int local ID of the issue.
652
653 Returns:
654 A URL string.
655 """
656 if app_identity.get_application_id() != 'monorail-prod':
657 return FormatRelativeIssueURL(
658 project_name, urls.ISSUE_DETAIL, id=local_id)
659
660 if project_name == 'chromium':
661 return 'https://crbug.com/%d' % local_id
662
663 return 'https://crbug.com/%s/%d' % (project_name, local_id)
664
665
666def ComputeNewQuotaBytesUsed(project, attachments):
667 """Add the given attachments to the project's attachment quota usage.
668
669 Args:
670 project: Project PB for the project being updated.
671 attachments: a list of attachments being added to an issue.
672
673 Returns:
674 The new number of bytes used.
675
676 Raises:
677 OverAttachmentQuota: If project would go over quota.
678 """
679 total_attach_size = 0
680 for _filename, content, _mimetype in attachments:
681 total_attach_size += len(content)
682
683 new_bytes_used = project.attachment_bytes_used + total_attach_size
684 quota = (project.attachment_quota or
685 tracker_constants.ISSUE_ATTACHMENTS_QUOTA_HARD)
686 if new_bytes_used > quota:
687 raise exceptions.OverAttachmentQuota(new_bytes_used - quota)
688 return new_bytes_used
689
690
691def IsUnderSoftAttachmentQuota(project):
692 """Check the project's attachment quota against the soft quota limit.
693
694 If there is a custom quota on the project, this will check against
695 that instead of the system-wide default quota.
696
697 Args:
698 project: Project PB for the project to examine
699
700 Returns:
701 True if the project is under quota, false otherwise.
702 """
703 quota = tracker_constants.ISSUE_ATTACHMENTS_QUOTA_SOFT
704 if project.attachment_quota:
705 quota = project.attachment_quota - _SOFT_QUOTA_LEEWAY
706
707 return project.attachment_bytes_used < quota
708
709
710def GetAllIssueProjects(cnxn, issues, project_service):
711 """Get all the projects that the given issues belong to.
712
713 Args:
714 cnxn: connection to SQL database.
715 issues: list of issues, which may come from different projects.
716 project_service: connection to project persistence layer.
717
718 Returns:
719 A dictionary {project_id: project} of all the projects that
720 any of the given issues belongs to.
721 """
722 needed_project_ids = {issue.project_id for issue in issues}
723 project_dict = project_service.GetProjects(cnxn, needed_project_ids)
724 return project_dict
725
726
727def GetPermissionsInAllProjects(user, effective_ids, projects):
728 """Look up the permissions for the given user in each project."""
729 return {
730 project.project_id:
731 permissions.GetPermissions(user, effective_ids, project)
732 for project in projects}
733
734
735def FilterOutNonViewableIssues(
736 effective_ids, user, project_dict, config_dict, issues):
737 """Return a filtered list of issues that the user can view."""
738 perms_dict = GetPermissionsInAllProjects(
739 user, effective_ids, list(project_dict.values()))
740
741 denied_project_ids = {
742 pid for pid, p in project_dict.items()
743 if not permissions.CanView(effective_ids, perms_dict[pid], p, [])}
744
745 results = []
746 for issue in issues:
747 if issue.deleted or issue.project_id in denied_project_ids:
748 continue
749
750 if not permissions.HasRestrictions(issue):
751 may_view = True
752 else:
753 perms = perms_dict[issue.project_id]
754 project = project_dict[issue.project_id]
755 config = config_dict.get(issue.project_id, config_dict.get('harmonized'))
756 granted_perms = tracker_bizobj.GetGrantedPerms(
757 issue, effective_ids, config)
758 may_view = permissions.CanViewIssue(
759 effective_ids, perms, project, issue, granted_perms=granted_perms)
760
761 if may_view:
762 results.append(issue)
763
764 return results
765
766
767def MeansOpenInProject(status, config):
768 """Return true if this status means that the issue is still open.
769
770 This defaults to true if we could not find a matching status.
771
772 Args:
773 status: issue status string. E.g., 'New'.
774 config: the config of the current project.
775
776 Returns:
777 Boolean True if the status means that the issue is open.
778 """
779 status_lower = status.lower()
780
781 # iterate over the list of known statuses for this project
782 # return true if we find a match that declares itself to be open
783 for wks in config.well_known_statuses:
784 if wks.status.lower() == status_lower:
785 return wks.means_open
786
787 return True
788
789
790def IsNoisy(num_comments, num_starrers):
791 """Return True if this is a "noisy" issue that would send a ton of emails.
792
793 The rule is that a very active issue with a large number of comments
794 and starrers will only send notification when a comment (or change)
795 is made by a project member.
796
797 Args:
798 num_comments: int number of comments on issue so far.
799 num_starrers: int number of users who starred the issue.
800
801 Returns:
802 True if we will not bother starrers with an email notification for
803 changes made by non-members.
804 """
805 return (num_comments >= tracker_constants.NOISY_ISSUE_COMMENT_COUNT and
806 num_starrers >= tracker_constants.NOISY_ISSUE_STARRER_COUNT)
807
808
809def MergeCCsAndAddComment(services, mr, issue, merge_into_issue):
810 """Modify the CC field of the target issue and add a comment to it."""
811 return MergeCCsAndAddCommentMultipleIssues(
812 services, mr, [issue], merge_into_issue)
813
814
815def MergeCCsAndAddCommentMultipleIssues(
816 services, mr, issues, merge_into_issue):
817 """Modify the CC field of the target issue and add a comment to it."""
818 merge_comment = ''
819 for issue in issues:
820 if issue.project_name == merge_into_issue.project_name:
821 issue_ref_str = '%d' % issue.local_id
822 else:
823 issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id)
824 if merge_comment:
825 merge_comment += '\n'
826 merge_comment += 'Issue %s has been merged into this issue.' % issue_ref_str
827
828 add_cc = _ComputeNewCcsFromIssueMerge(merge_into_issue, issues)
829
830 config = services.config.GetProjectConfig(
831 mr.cnxn, merge_into_issue.project_id)
832 delta = tracker_pb2.IssueDelta(cc_ids_add=add_cc)
833 _, merge_comment_pb = services.issue.DeltaUpdateIssue(
834 mr.cnxn, services, mr.auth.user_id, merge_into_issue.project_id,
835 config, merge_into_issue, delta, index_now=False, comment=merge_comment)
836
837 return merge_comment_pb
838
839
840def GetAttachmentIfAllowed(mr, services):
841 """Retrieve the requested attachment, or raise an appropriate exception.
842
843 Args:
844 mr: commonly used info parsed from the request.
845 services: connections to backend services.
846
847 Returns:
848 The requested Attachment PB, and the Issue that it belongs to.
849
850 Raises:
851 NoSuchAttachmentException: attachment was not found or was marked deleted.
852 NoSuchIssueException: issue that contains attachment was not found.
853 PermissionException: the user is not allowed to view the attachment.
854 """
855 attachment = None
856
857 attachment, cid, issue_id = services.issue.GetAttachmentAndContext(
858 mr.cnxn, mr.aid)
859
860 issue = services.issue.GetIssue(mr.cnxn, issue_id)
861 config = services.config.GetProjectConfig(mr.cnxn, issue.project_id)
862 granted_perms = tracker_bizobj.GetGrantedPerms(
863 issue, mr.auth.effective_ids, config)
864 permit_view = permissions.CanViewIssue(
865 mr.auth.effective_ids, mr.perms, mr.project, issue,
866 granted_perms=granted_perms)
867 if not permit_view:
868 raise permissions.PermissionException('Cannot view attachment\'s issue')
869
870 comment = services.issue.GetComment(mr.cnxn, cid)
871 commenter = services.user.GetUser(mr.cnxn, comment.user_id)
872 issue_perms = permissions.UpdateIssuePermissions(
873 mr.perms, mr.project, issue, mr.auth.effective_ids,
874 granted_perms=granted_perms)
875 can_view_comment = permissions.CanViewComment(
876 comment, commenter, mr.auth.user_id, issue_perms)
877 if not can_view_comment:
878 raise permissions.PermissionException('Cannot view attachment\'s comment')
879
880 return attachment, issue
881
882
883def LabelsMaskedByFields(config, field_names, trim_prefix=False):
884 """Return a list of EZTItems for labels that would be masked by fields."""
885 return _LabelsMaskedOrNot(config, field_names, trim_prefix=trim_prefix)
886
887
888def LabelsNotMaskedByFields(config, field_names, trim_prefix=False):
889 """Return a list of EZTItems for labels that would not be masked."""
890 return _LabelsMaskedOrNot(
891 config, field_names, invert=True, trim_prefix=trim_prefix)
892
893
894def _LabelsMaskedOrNot(config, field_names, invert=False, trim_prefix=False):
895 """Return EZTItems for labels that'd be masked. Or not, when invert=True."""
896 field_names = [fn.lower() for fn in field_names]
897 result = []
898 for wkl in config.well_known_labels:
899 masked_by = tracker_bizobj.LabelIsMaskedByField(wkl.label, field_names)
900 if (masked_by and not invert) or (not masked_by and invert):
901 display_name = wkl.label
902 if trim_prefix:
903 display_name = display_name[len(masked_by) + 1:]
904 result.append(template_helpers.EZTItem(
905 name=display_name,
906 name_padded=display_name.ljust(20),
907 commented='#' if wkl.deprecated else '',
908 docstring=wkl.label_docstring,
909 docstring_short=template_helpers.FitUnsafeText(
910 wkl.label_docstring, 40),
911 idx=len(result)))
912
913 return result
914
915
916def LookupComponentIDs(component_paths, config, errors=None):
917 """Look up the IDs of the specified components in the given config."""
918 component_ids = []
919 for path in component_paths:
920 if not path:
921 continue
922 cd = tracker_bizobj.FindComponentDef(path, config)
923 if cd:
924 component_ids.append(cd.component_id)
925 else:
926 error_text = 'Unknown component %s' % path
927 if errors:
928 errors.components = error_text
929 else:
930 logging.info(error_text)
931
932 return component_ids
933
934
935def ParsePostDataUsers(cnxn, pd_users_str, user_service):
936 """Parse all the usernames from a users string found in a post data."""
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100937 emails, _remove = _ClassifyPlusMinusItems(re.split(r'[,;\s]+', pd_users_str))
Copybara854996b2021-09-07 19:36:02 +0000938 users_ids_by_email = user_service.LookupUserIDs(cnxn, emails, autocreate=True)
939 user_ids = [users_ids_by_email[username] for username in emails if username]
940 return user_ids, pd_users_str
941
942
943def FilterIssueTypes(config):
944 """Return a list of well-known issue types."""
945 well_known_issue_types = []
946 for wk_label in config.well_known_labels:
947 if wk_label.label.lower().startswith('type-'):
948 _, type_name = wk_label.label.split('-', 1)
949 well_known_issue_types.append(type_name)
950
951 return well_known_issue_types
952
953
954def ParseMergeFields(
955 cnxn, services, project_name, post_data, status, config, issue, errors):
956 """Parse info that identifies the issue to merge into, if any."""
957 merge_into_text = ''
958 merge_into_ref = None
959 merge_into_issue = None
960
961 if status not in config.statuses_offer_merge:
962 return '', None
963
964 merge_into_text = post_data.get('merge_into', '')
965 if merge_into_text:
966 try:
967 merge_into_ref = tracker_bizobj.ParseIssueRef(merge_into_text)
968 except ValueError:
969 logging.info('merge_into not an int: %r', merge_into_text)
970 errors.merge_into_id = 'Please enter a valid issue ID'
971
972 if not merge_into_ref:
973 errors.merge_into_id = 'Please enter an issue ID'
974 return merge_into_text, None
975
976 merge_into_project_name, merge_into_id = merge_into_ref
977 if (merge_into_id == issue.local_id and
978 (merge_into_project_name == project_name or
979 not merge_into_project_name)):
980 logging.info('user tried to merge issue into itself: %r', merge_into_ref)
981 errors.merge_into_id = 'Cannot merge issue into itself'
982 return merge_into_text, None
983
984 project = services.project.GetProjectByName(
985 cnxn, merge_into_project_name or project_name)
986 try:
987 # Because we will modify this issue, load from DB rather than cache.
988 merge_into_issue = services.issue.GetIssueByLocalID(
989 cnxn, project.project_id, merge_into_id, use_cache=False)
990 except Exception:
991 logging.info('merge_into issue not found: %r', merge_into_ref)
992 errors.merge_into_id = 'No such issue'
993 return merge_into_text, None
994
995 return merge_into_text, merge_into_issue
996
997
998def GetNewIssueStarrers(cnxn, services, issue_ids, merge_into_iid):
999 # type: (MonorailConnection, Services, Sequence[int], int) ->
1000 # Collection[int]
1001 """Get starrers of current issue who have not starred the target issue."""
1002 source_starrers_dict = services.issue_star.LookupItemsStarrers(
1003 cnxn, issue_ids)
1004 source_starrers = list(
1005 itertools.chain.from_iterable(source_starrers_dict.values()))
1006 target_starrers = services.issue_star.LookupItemStarrers(
1007 cnxn, merge_into_iid)
1008 return set(source_starrers) - set(target_starrers)
1009
1010
1011def AddIssueStarrers(
1012 cnxn, services, mr, merge_into_iid, merge_into_project, new_starrers):
1013 """Merge all the starrers for the current issue into the target issue."""
1014 project = merge_into_project or mr.project
1015 config = services.config.GetProjectConfig(mr.cnxn, project.project_id)
1016 services.issue_star.SetStarsBatch(
1017 cnxn, services, config, merge_into_iid, new_starrers, True)
1018
1019
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001020def CanEditProjectIssue(mr, project, issue, granted_perms):
1021 """Check if user permissions in another project allow editing.
Copybara854996b2021-09-07 19:36:02 +00001022
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001023 Wraps CanEditIssue with a call to get user permissions in given project.
Copybara854996b2021-09-07 19:36:02 +00001024
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001025 We deviate from using CanUsePerm because that method does not calculate
1026 Project state as part of the permissions. This seems to have deviated in
1027 2018. CanEditIssue uses Project state to authorize user actions.
1028 """
1029 project_perms = permissions.GetPermissions(
1030 mr.auth.user_pb, mr.auth.effective_ids, project)
1031 return permissions.CanEditIssue(
1032 mr.auth.effective_ids, project_perms, project, issue, granted_perms)
Copybara854996b2021-09-07 19:36:02 +00001033
1034
1035def GetVisibleMembers(mr, project, services):
1036 all_member_ids = project_helpers.AllProjectMembers(project)
1037
1038 all_group_ids = services.usergroup.DetermineWhichUserIDsAreGroups(
1039 mr.cnxn, all_member_ids)
1040
1041 (ac_exclusion_ids, no_expand_ids
1042 ) = services.project.GetProjectAutocompleteExclusion(
1043 mr.cnxn, project.project_id)
1044
1045 group_ids_to_expand = [
1046 gid for gid in all_group_ids if gid not in no_expand_ids]
1047
1048 # TODO(jrobbins): Normally, users will be allowed view the members
1049 # of any user group if the project From: email address is listed
1050 # as a group member, as well as any group that they are personally
1051 # members of.
1052 member_ids, owner_ids = services.usergroup.LookupVisibleMembers(
1053 mr.cnxn, group_ids_to_expand, mr.perms, mr.auth.effective_ids, services)
1054 indirect_user_ids = set()
1055 for gids in member_ids.values():
1056 indirect_user_ids.update(gids)
1057 for gids in owner_ids.values():
1058 indirect_user_ids.update(gids)
1059
1060 visible_member_ids = _FilterMemberData(
1061 mr, project.owner_ids, project.committer_ids, project.contributor_ids,
1062 indirect_user_ids, project)
1063
1064 visible_member_ids = _MergeLinkedMembers(
1065 mr.cnxn, services.user, visible_member_ids)
1066
1067 visible_member_views = framework_views.MakeAllUserViews(
1068 mr.cnxn, services.user, visible_member_ids, group_ids=all_group_ids)
1069 framework_views.RevealAllEmailsToMembers(
1070 mr.cnxn, services, mr.auth, visible_member_views, project)
1071
1072 # Filter out service accounts
1073 service_acct_emails = set(
1074 client_config_svc.GetClientConfigSvc().GetClientIDEmails()[1])
1075 visible_member_views = {
1076 m.user_id: m
1077 for m in visible_member_views.values()
1078 # Hide service accounts from autocomplete.
1079 if not framework_helpers.IsServiceAccount(
1080 m.email, client_emails=service_acct_emails)
1081 # Hide users who opted out of autocomplete.
1082 and not m.user_id in ac_exclusion_ids
1083 # Hide users who have obscured email addresses.
1084 and not m.obscure_email
1085 }
1086
1087 return visible_member_views
1088
1089
1090def _MergeLinkedMembers(cnxn, user_service, user_ids):
1091 """Remove any linked child accounts if the parent would also be shown."""
1092 all_ids = set(user_ids)
1093 users_by_id = user_service.GetUsersByIDs(cnxn, user_ids)
1094 result = [uid for uid in user_ids
1095 if users_by_id[uid].linked_parent_id not in all_ids]
1096 return result
1097
1098
1099def _FilterMemberData(
1100 mr, owner_ids, committer_ids, contributor_ids, indirect_member_ids,
1101 project):
1102 """Return a filtered list of members that the user can view.
1103
1104 In most projects, everyone can view the entire member list. But,
1105 some projects are configured to only allow project owners to see
1106 all members. In those projects, committers and contributors do not
1107 see any contributors. Regardless of how the project is configured
1108 or the role that the user plays in the current project, we include
1109 any indirect members through user groups that the user has access
1110 to view.
1111
1112 Args:
1113 mr: Commonly used info parsed from the HTTP request.
1114 owner_views: list of user IDs for project owners.
1115 committer_views: list of user IDs for project committers.
1116 contributor_views: list of user IDs for project contributors.
1117 indirect_member_views: list of user IDs for users who have
1118 an indirect role in the project via a user group, and that the
1119 logged in user is allowed to see.
1120 project: the Project we're interested in.
1121
1122 Returns:
1123 A list of owners, committer and visible indirect members if the user is not
1124 signed in. If the project is set to display contributors to non-owners or
1125 the signed in user has necessary permissions then additionally a list of
1126 contributors.
1127 """
1128 visible_members_ids = set()
1129
1130 # Everyone can view owners and committers
1131 visible_members_ids.update(owner_ids)
1132 visible_members_ids.update(committer_ids)
1133
1134 # The list of indirect members is already limited to ones that the user
1135 # is allowed to see according to user group settings.
1136 visible_members_ids.update(indirect_member_ids)
1137
1138 # If the user is allowed to view the list of contributors, add those too.
1139 if permissions.CanViewContributorList(mr, project):
1140 visible_members_ids.update(contributor_ids)
1141
1142 return sorted(visible_members_ids)
1143
1144
1145def GetLabelOptions(config, custom_permissions):
1146 """Prepares label options for autocomplete."""
1147 labels = []
1148 field_names = [
1149 fd.field_name
1150 for fd in config.field_defs
1151 if not fd.is_deleted
1152 and fd.field_type is tracker_pb2.FieldTypes.ENUM_TYPE
1153 ]
1154 non_masked_labels = LabelsNotMaskedByFields(config, field_names)
1155 for wkl in non_masked_labels:
1156 if not wkl.commented:
1157 item = {'name': wkl.name, 'doc': wkl.docstring}
1158 labels.append(item)
1159
1160 frequent_restrictions = _FREQUENT_ISSUE_RESTRICTIONS[:]
1161 if not custom_permissions:
1162 frequent_restrictions.extend(_EXAMPLE_ISSUE_RESTRICTIONS)
1163
1164 labels.extend(_BuildRestrictionChoices(
1165 frequent_restrictions, permissions.STANDARD_ISSUE_PERMISSIONS,
1166 custom_permissions))
1167
1168 return labels
1169
1170
1171def _BuildRestrictionChoices(freq_restrictions, actions, custom_permissions):
1172 """Return a list of autocompletion choices for restriction labels.
1173
1174 Args:
1175 freq_restrictions: list of (action, perm, doc) tuples for restrictions
1176 that are frequently used.
1177 actions: list of strings for actions that are relevant to the current
1178 artifact.
1179 custom_permissions: list of strings with custom permissions for the project.
1180
1181 Returns:
1182 A list of dictionaries [{'name': 'perm name', 'doc': 'docstring'}, ...]
1183 suitable for use in a JSON feed to our JS autocompletion functions.
1184 """
1185 choices = []
1186
1187 for action, perm, doc in freq_restrictions:
1188 choices.append({
1189 'name': 'Restrict-%s-%s' % (action, perm),
1190 'doc': doc,
1191 })
1192
1193 for action in actions:
1194 for perm in custom_permissions:
1195 choices.append({
1196 'name': 'Restrict-%s-%s' % (action, perm),
1197 'doc': 'Permission %s needed to use %s' % (perm, action),
1198 })
1199
1200 return choices
1201
1202
1203def FilterKeptAttachments(
1204 is_description, kept_attachments, comments, approval_id):
1205 """Filter kept attachments to be a subset of last description's attachments.
1206
1207 Args:
1208 is_description: bool, if the comment is a change to the issue description.
1209 kept_attachments: list of ints with the attachment ids for attachments
1210 kept from previous descriptions, if the comment is a change to the
1211 issue description.
1212 comments: list of IssueComment PBs for the issue we want to edit.
1213 approval_id: int id of the APPROVAL_TYPE fielddef, if we're editing an
1214 approval description, or None otherwise.
1215
1216 Returns:
1217 A list of kept_attachment ids that are a subset of the last description.
1218 """
1219 if not is_description:
1220 return None
1221
1222 attachment_ids = set()
1223 for comment in reversed(comments):
1224 if comment.is_description and comment.approval_id == approval_id:
1225 attachment_ids = set([a.attachment_id for a in comment.attachments])
1226 break
1227
1228 kept_attachments = [
1229 aid for aid in kept_attachments if aid in attachment_ids]
1230 return kept_attachments
1231
1232
1233def _GetEnumFieldValuesAndDocstrings(field_def, config):
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001234 # type: (mrproto.tracker_pb2.LabelDef,
1235 # mrproto.tracker_pb2.ProjectIssueConfig) -> Sequence[tuple(string, string)]
Copybara854996b2021-09-07 19:36:02 +00001236 """Get sequence of value, docstring tuples for an enum field"""
1237 label_defs = config.well_known_labels
1238 lower_field_name = field_def.field_name.lower()
1239 tuples = []
1240 for ld in label_defs:
1241 if (ld.label.lower().startswith(lower_field_name + '-') and
1242 not ld.deprecated):
1243 label_value = ld.label[len(lower_field_name) + 1:]
1244 tuples.append((label_value, ld.label_docstring))
1245 else:
1246 continue
1247 return tuples
1248
1249
1250# _IssueChangesTuple is returned by ApplyAllIssueChanges() and is used to bundle
1251# the updated issues. resulting amendments, and other information needed by the
1252# called to process the changes in the DB and send notifications.
1253_IssueChangesTuple = collections.namedtuple(
1254 '_IssueChangesTuple', [
1255 'issues_to_update_dict', 'merged_from_add_by_iid', 'amendments_by_iid',
1256 'imp_amendments_by_iid', 'old_owners_by_iid', 'old_statuses_by_iid',
1257 'old_components_by_iid', 'new_starrers_by_iid'
1258 ])
1259# type: (Mapping[int, Issue], DefaultDict[int, Sequence[int]],
1260# Mapping[int, Amendment], Mapping[int, Amendment], Mapping[int, int],
1261# Mapping[int, str], Mapping[int, Sequence[int]],
1262# Mapping[int, Sequence[int]])-> None
1263
1264
1265def ApplyAllIssueChanges(cnxn, issue_delta_pairs, services):
1266 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services) ->
1267 # IssueChangesTuple
1268 """Modify the given issues with the given deltas and impacted issues in RAM.
1269
1270 Filter rules are not applied in this method.
1271 This method implements phases 3 and 4 of the process for modifying issues.
1272 See WorkEnv.ModifyIssues() for other phases and overall process.
1273
1274 Args:
1275 cnxn: MonorailConnection object.
1276 issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas
1277 that represent the updates we want to make to each Issue.
1278 services: Services object for connection to backend services.
1279
1280 Returns:
1281 An _IssueChangesTuple named tuple.
1282 """
1283 impacted_tracker = _IssueChangeImpactedIssues()
1284 project_ids = {issue.project_id for issue, _delta in issue_delta_pairs}
1285 configs_by_pid = services.config.GetProjectConfigs(cnxn, list(project_ids))
1286
1287 # Track issues which have been modified in RAM and will need to
1288 # be updated in the DB.
1289 issues_to_update_dict = {}
1290
1291 amendments_by_iid = {}
1292 old_owners_by_iid = {}
1293 old_statuses_by_iid = {}
1294 old_components_by_iid = {}
1295 # PHASE 3: Update the main issues in RAM (not indirectly, impacted issues).
1296 for issue, delta in issue_delta_pairs:
1297 # Cache old data that will be used by future computations.
1298 old_owner = tracker_bizobj.GetOwnerId(issue)
1299 old_status = tracker_bizobj.GetStatus(issue)
1300 if delta.owner_id is not None and delta.owner_id != old_owner:
1301 old_owners_by_iid[issue.issue_id] = old_owner
1302 if delta.status is not None and delta.status != old_status:
1303 old_statuses_by_iid[issue.issue_id] = old_status
1304 new_components = set(issue.component_ids)
1305 new_components.update(delta.comp_ids_add or [])
1306 new_components.difference_update(delta.comp_ids_remove or [])
1307 if set(issue.component_ids) != new_components:
1308 old_components_by_iid[issue.issue_id] = issue.component_ids
1309
1310 impacted_tracker.TrackImpactedIssues(issue, delta)
1311 config = configs_by_pid.get(issue.project_id)
1312 amendments, _impacted_iids = tracker_bizobj.ApplyIssueDelta(
1313 cnxn, services.issue, issue, delta, config)
1314 if amendments:
1315 issues_to_update_dict[issue.issue_id] = issue
1316 amendments_by_iid[issue.issue_id] = amendments
1317
1318 # PHASE 4: Update impacted issues in RAM.
1319 logging.info('Applying impacted issue changes: %r', impacted_tracker.__dict__)
1320 imp_amendments_by_iid = {}
1321 impacted_iids = impacted_tracker.ComputeAllImpactedIIDs()
1322 new_starrers_by_iid = {}
1323 for issue_id in impacted_iids:
1324 # Changes made to an impacted issue should be on top of changes
1325 # made to it in PHASE 3 where it might have been a 'main' issue.
1326 issue = issues_to_update_dict.get(
1327 issue_id, services.issue.GetIssue(cnxn, issue_id, use_cache=False))
1328
1329 # Apply impacted changes.
1330 amendments, new_starrers = impacted_tracker.ApplyImpactedIssueChanges(
1331 cnxn, issue, services)
1332 if amendments:
1333 imp_amendments_by_iid[issue.issue_id] = amendments
1334 issues_to_update_dict[issue.issue_id] = issue
1335 if new_starrers:
1336 new_starrers_by_iid[issue.issue_id] = new_starrers
1337
1338 return _IssueChangesTuple(
1339 issues_to_update_dict, impacted_tracker.merged_from_add,
1340 amendments_by_iid, imp_amendments_by_iid, old_owners_by_iid,
1341 old_statuses_by_iid, old_components_by_iid, new_starrers_by_iid)
1342
1343
1344def UpdateClosedTimestamp(config, issue, old_effective_status):
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001345 # type: (mrproto.tracker_pb2.ProjectIssueConfig,
1346 # mrproto.tracker_pb2.Issue, str) -> None
Copybara854996b2021-09-07 19:36:02 +00001347 """Sets or unsets the closed_timestamp based based on status changes.
1348
1349 If the status is changing from open to closed, the closed_timestamp is set to
1350 the current time.
1351
1352 If the status is changing form closed to open, the close_timestamp is unset.
1353
1354 If the status is changing from one closed to another closed, or from one
1355 open to another open, no operations are performed.
1356
1357 Args:
1358 config: the project configuration
1359 issue: the issue being updated (a protocol buffer)
1360 old_effective_status: the old issue status string. E.g., 'New'
1361
1362 SIDE EFFECTS:
1363 Updated issue in place with new closed timestamp.
1364 """
1365 old_effective_status = old_effective_status or ''
1366 # open -> closed
1367 if (MeansOpenInProject(old_effective_status, config) and
1368 not MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)):
1369
1370 issue.closed_timestamp = int(time.time())
1371 return
1372
1373 # closed -> open
1374 if (not MeansOpenInProject(old_effective_status, config) and
1375 MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)):
1376
1377 issue.reset('closed_timestamp')
1378 return
1379
1380
1381def GroupUniqueDeltaIssues(issue_delta_pairs):
1382 # type: (Tuple[Issue, IssueDelta]) -> (
1383 # Sequence[IssueDelta], Sequence[Sequence[Issue]])
1384 """Identifies unique IssueDeltas and groups Issues with identical IssueDeltas.
1385
1386 Args:
1387 issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas
1388 that represent the updates we want to make to each Issue.
1389
1390 Returns:
1391 (unique_deltas, issues_for_unique_deltas):
1392 unique_deltas: List of unique IssueDeltas found in issue_delta_pairs.
1393 issues_for_unique_deltas: List of Issue lists. Each Issue list
1394 contains all the Issues that had identical IssueDeltas.
1395 Each issues_for_unique_deltas[i] is the list of Issues
1396 that had unique_deltas[i] as their IssueDeltas.
1397 """
1398 unique_deltas = []
1399 issues_for_unique_deltas = []
1400 for issue, delta in issue_delta_pairs:
1401 try:
1402 delta_index = unique_deltas.index(delta)
1403 issues_for_unique_deltas[delta_index].append(issue)
1404 except ValueError:
1405 # delta is not in unique_deltas yet.
1406 # Add delta to unique_deltas and add a new list of issues
1407 # to issues_for_unique_deltas at the same index.
1408 unique_deltas.append(delta)
1409 issues_for_unique_deltas.append([issue])
1410
1411 return unique_deltas, issues_for_unique_deltas
1412
1413
1414def _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg):
1415 # type: (Sequence[Tuple[Issue, IssueDelta]], Mapping[int, str],
1416 # exceptions.ErrorAggregator) -> None
1417 """Checks if any issue deltas conflict with each other or themselves.
1418
1419 Note: refs_dict should contain issue ref strings for all issues found
1420 in issue_delta_pairs, including all issues found in
1421 {blocked_on|blocking}_{add|remove}.
1422 """
1423 err_message = 'Changes for {} conflict for {}'
1424
1425 # Track all delta blocked_on_add and blocking_add in terms of
1426 # 'blocking_add' so we can track when a {blocked_on|blocking}_remove
1427 # is in conflict with some {blocked_on|blocking}_add.
1428 blocking_add = collections.defaultdict(list)
1429 for issue, delta in issue_delta_pairs:
1430 blocking_add[issue.issue_id].extend(delta.blocking_add)
1431
1432 for imp_iid in delta.blocked_on_add:
1433 blocking_add[imp_iid].append(issue.issue_id)
1434
1435 # Check *_remove for conflicts with tracking blocking_add.
1436 for issue, delta in issue_delta_pairs:
1437 added_iids = blocking_add[issue.issue_id]
1438 # Get intersection of iids that are in `blocking_remove` and
1439 # the tracked `blocking_add`.
1440 conflict_iids = set(delta.blocking_remove) & set(added_iids)
1441
1442 # Get iids of `blocked_on_remove` that conflict with the
1443 # tracked `blocking_add`.
1444 for possible_conflict_iid in delta.blocked_on_remove:
1445 if issue.issue_id in blocking_add[possible_conflict_iid]:
1446 conflict_iids.add(possible_conflict_iid)
1447
1448 if conflict_iids:
1449 refs_str = ', '.join([refs_dict[iid] for iid in conflict_iids])
1450 err_agg.AddErrorMessage(err_message, refs_dict[issue.issue_id], refs_str)
1451
1452
1453def PrepareIssueChanges(
1454 cnxn,
1455 issue_delta_pairs,
1456 services,
1457 attachment_uploads=None,
1458 comment_content=None):
1459 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services,
1460 # Optional[Sequence[framework_helpers.AttachmentUpload]], Optional[str])
1461 # -> Mapping[int, int]
1462 """Clean the deltas and assert they are valid for each paired issue."""
1463 _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services)
1464 _AssertIssueChangesValid(
1465 cnxn, issue_delta_pairs, services, comment_content=comment_content)
1466
1467 if attachment_uploads:
1468 return _EnforceAttachmentQuotaLimits(
1469 cnxn, issue_delta_pairs, services, attachment_uploads)
1470 return {}
1471
1472
1473def _EnforceAttachmentQuotaLimits(
1474 cnxn, issue_delta_pairs, services, attachment_uploads):
1475 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services
1476 # Optional[Sequence[framework_helpers.AttachmentUpload]]
1477 # -> Mapping[int, int]
1478 """Assert that the attachments don't exceed project quotas."""
1479 issue_count_by_pid = collections.defaultdict(int)
1480 for issue, _delta in issue_delta_pairs:
1481 issue_count_by_pid[issue.project_id] += 1
1482
1483 projects_by_id = services.project.GetProjects(cnxn, issue_count_by_pid.keys())
1484
1485 new_bytes_by_pid = {}
1486 with exceptions.ErrorAggregator(exceptions.OverAttachmentQuota) as err_agg:
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001487 for pid, count in sorted(issue_count_by_pid.items()):
Copybara854996b2021-09-07 19:36:02 +00001488 project = projects_by_id[pid]
1489 try:
1490 new_bytes_used = ComputeNewQuotaBytesUsed(
1491 project, attachment_uploads * count)
1492 new_bytes_by_pid[pid] = new_bytes_used
1493 except exceptions.OverAttachmentQuota:
1494 err_agg.AddErrorMessage(
1495 'Attachment quota exceeded for project {}', project.project_name)
1496 return new_bytes_by_pid
1497
1498
1499def _AssertIssueChangesValid(
1500 cnxn, issue_delta_pairs, services, comment_content=None):
1501 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services,
1502 # Optional[str]) -> None
1503 """Assert that the delta changes are valid for each paired issue.
1504
1505 Note: this method does not check if the changes trigger any FilterRule
1506 `warnings` or `errors`.
1507 """
1508 project_ids = list(
1509 {issue.project_id for (issue, _delta) in issue_delta_pairs})
1510 projects_by_id = services.project.GetProjects(cnxn, project_ids)
1511 configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids)
1512 refs_dict = {
1513 iss.issue_id: '%s:%d' % (iss.project_name, iss.local_id)
1514 for iss, _delta in issue_delta_pairs
1515 }
1516 # Add refs of deltas' blocking/blocked_on issues needed by
1517 # _AssertNoConflictingDeltas.
1518 relation_iids = set()
1519 for _iss, delta in issue_delta_pairs:
1520 relation_iids.update(
1521 delta.blocked_on_remove + delta.blocking_remove + delta.blocked_on_add +
1522 delta.blocking_add)
1523 relation_issues_dict, misses = services.issue.GetIssuesDict(
1524 cnxn, relation_iids)
1525 if misses:
1526 raise exceptions.NoSuchIssueException(
1527 'Could not find issues with ids: %r' % misses)
1528 for iid, iss in relation_issues_dict.items():
1529 if iid not in refs_dict:
1530 refs_dict[iid] = '%s:%d' % (iss.project_name, iss.local_id)
1531
1532 with exceptions.ErrorAggregator(exceptions.InputException) as err_agg:
1533 if (comment_content and
1534 len(comment_content.strip()) > tracker_constants.MAX_COMMENT_CHARS):
1535 err_agg.AddErrorMessage('Comment is too long.')
1536
1537 _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg)
1538
1539 for issue, delta in issue_delta_pairs:
1540 project = projects_by_id.get(issue.project_id)
1541 config = configs_by_id.get(issue.project_id)
1542 issue_ref = refs_dict[issue.issue_id]
1543
1544 if (delta.merged_into is not None or
1545 delta.merged_into_external is not None or delta.status is not None):
1546 end_status = delta.status or issue.status
1547 merged_options = [
1548 delta.merged_into, delta.merged_into_external, issue.merged_into,
1549 issue.merged_into_external
1550 ]
1551 end_merged_into = next(
1552 (merge for merge in merged_options if merge is not None), None)
1553
1554 is_merge_status = end_status.lower() in [
1555 status.lower() for status in config.statuses_offer_merge
1556 ]
1557
1558 if ((is_merge_status and not end_merged_into) or
1559 (not is_merge_status and end_merged_into)):
1560 err_agg.AddErrorMessage(
1561 '{}: MERGED type statuses must accompany mergedInto values.',
1562 issue_ref)
1563
1564 if delta.merged_into and issue.issue_id == delta.merged_into:
1565 err_agg.AddErrorMessage(
1566 '{}: Cannot merge an issue into itself.', issue_ref)
1567 if (issue.issue_id in set(
1568 delta.blocked_on_add)) or (issue.issue_id in set(delta.blocking_add)):
1569 err_agg.AddErrorMessage(
1570 '{}: Cannot block an issue on itself.', issue_ref)
1571 if (delta.owner_id is not None) and (delta.owner_id != issue.owner_id):
1572 parsed_owner_valid, msg = IsValidIssueOwner(
1573 cnxn, project, delta.owner_id, services)
1574 if not parsed_owner_valid:
1575 err_agg.AddErrorMessage('{}: {}', issue_ref, msg)
1576 # Owner already check by IsValidIssueOwner
1577 all_users = [uid for uid in delta.cc_ids_add]
1578 field_users = [fv.user_id for fv in delta.field_vals_add if fv.user_id]
1579 all_users.extend(field_users)
1580 AssertUsersExist(cnxn, services, all_users, err_agg)
1581 if (delta.summary and
1582 len(delta.summary.strip()) > tracker_constants.MAX_SUMMARY_CHARS):
1583 err_agg.AddErrorMessage('{}: Summary is too long.', issue_ref)
1584 if delta.summary == '':
1585 err_agg.AddErrorMessage('{}: Summary required.', issue_ref)
1586 if delta.status == '':
1587 err_agg.AddErrorMessage('{}: Status is required.', issue_ref)
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001588 labels_err_msgs = field_helpers.ValidateLabels(
1589 cnxn, services, issue.project_id, delta.labels_add)
1590 if labels_err_msgs:
1591 err_agg.AddErrorMessage('{}: {}', issue_ref, labels_err_msgs)
Copybara854996b2021-09-07 19:36:02 +00001592 # Do not pass in issue for validation, as issue is pre-update, and would
1593 # result in being unable to edit issues in invalid states.
1594 fvs_err_msgs = field_helpers.ValidateCustomFields(
1595 cnxn, services, delta.field_vals_add, config, project)
1596 if fvs_err_msgs:
1597 err_agg.AddErrorMessage('{}: {}', issue_ref, '\n'.join(fvs_err_msgs))
1598 # TODO(crbug.com/monorail/9156): Validate that we do not remove fields
1599 # such that a required field becomes unset.
1600
1601
1602def AssertUsersExist(cnxn, services, user_ids, err_agg):
1603 # type: (MonorailConnection, Services, Sequence[int], ErrorAggregator) -> None
1604 """Assert that all users exist.
1605
1606 Has the side-effect of adding error messages to the input ErrorAggregator.
1607 """
1608 users_dict = services.user.GetUsersByIDs(cnxn, user_ids, skip_missed=True)
1609 found_ids = set(users_dict.keys())
1610 missing = [user_id for user_id in user_ids if user_id not in found_ids]
1611 for missing_user_id in missing:
1612 err_agg.AddErrorMessage(
1613 'users/{}: User does not exist.'.format(missing_user_id))
1614
1615
1616def AssertValidIssueForCreate(cnxn, services, issue, description):
1617 # type: (MonorailConnection, Services, Issue, str) -> None
1618 """Assert that issue proto is valid for issue creation.
1619
1620 Args:
1621 cnxn: A connection object to use services with.
1622 services: An object containing services to use to look up relevant data.
1623 issues: A PB containing the issue to validate.
1624 description: The description for the issue.
1625
1626 Raises:
1627 InputException if the issue is not valid.
1628 """
1629 project = services.project.GetProject(cnxn, issue.project_id)
1630 config = services.config.GetProjectConfig(cnxn, issue.project_id)
1631
1632 with exceptions.ErrorAggregator(exceptions.InputException) as err_agg:
1633 owner_is_valid, owner_err_msg = IsValidIssueOwner(
1634 cnxn, project, issue.owner_id, services)
1635 if not owner_is_valid:
1636 err_agg.AddErrorMessage(owner_err_msg)
1637 if not issue.summary.strip():
1638 err_agg.AddErrorMessage('Summary is required')
1639 if not description.strip():
1640 err_agg.AddErrorMessage('Description is required')
1641 if len(issue.summary) > tracker_constants.MAX_SUMMARY_CHARS:
1642 err_agg.AddErrorMessage('Summary is too long')
1643 if len(description) > tracker_constants.MAX_COMMENT_CHARS:
1644 err_agg.AddErrorMessage('Description is too long')
1645
1646 # Check all users exist. Owner already check by IsValidIssueOwner.
1647 all_users = [uid for uid in issue.cc_ids]
1648 for av in issue.approval_values:
1649 all_users.extend(av.approver_ids)
1650 field_users = [fv.user_id for fv in issue.field_values if fv.user_id]
1651 all_users.extend(field_users)
1652 AssertUsersExist(cnxn, services, all_users, err_agg)
1653
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001654 label_validity_error = field_helpers.ValidateLabels(
1655 cnxn, services, issue.project_id, issue.labels)
1656 if label_validity_error:
1657 err_agg.AddErrorMessage(label_validity_error)
1658
Copybara854996b2021-09-07 19:36:02 +00001659 field_validity_errors = field_helpers.ValidateCustomFields(
1660 cnxn, services, issue.field_values, config, project, issue=issue)
1661 if field_validity_errors:
1662 err_agg.AddErrorMessage("\n".join(field_validity_errors))
1663 if not services.config.LookupStatusID(cnxn, issue.project_id, issue.status,
1664 autocreate=False):
1665 err_agg.AddErrorMessage('Undefined status: %s' % issue.status)
1666 all_comp_ids = {
1667 cd.component_id for cd in config.component_defs if not cd.deprecated
1668 }
1669 for comp_id in issue.component_ids:
1670 if comp_id not in all_comp_ids:
1671 err_agg.AddErrorMessage(
1672 'Undefined or deprecated component with id: %d' % comp_id)
1673
1674
1675def _ComputeNewCcsFromIssueMerge(merge_into_issue, source_issues):
1676 # type: (Issue, Collection[Issue]) -> Collection[int]
1677 """Compute ccs that should be added from source_issues to merge_into_issue."""
1678
1679 merge_into_restrictions = permissions.GetRestrictions(merge_into_issue)
1680 new_cc_ids = set()
1681 for issue in source_issues:
1682 # We don't want to leak metadata like ccs of restricted issues.
1683 # So we don't merge ccs from restricted source issues, unless their
1684 # restrictions match the restrictions of the target.
1685 if permissions.HasRestrictions(issue, perm='View'):
1686 source_restrictions = permissions.GetRestrictions(issue)
1687 if (issue.project_id != merge_into_issue.project_id or
1688 set(source_restrictions) != set(merge_into_restrictions)):
1689 continue
1690
1691 new_cc_ids.update(issue.cc_ids)
1692 if issue.owner_id:
1693 new_cc_ids.add(issue.owner_id)
1694
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001695 return [
1696 cc_id for cc_id in sorted(new_cc_ids)
1697 if cc_id not in merge_into_issue.cc_ids
1698 ]
Copybara854996b2021-09-07 19:36:02 +00001699
1700
1701def _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services):
1702 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services)
1703 """Update deltas in RAM to remove merged if a MERGED status is removed."""
1704 project_ids = list(
1705 {issue.project_id for (issue, _delta) in issue_delta_pairs})
1706 configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids)
1707 statuses_offer_merge_by_pid = {
1708 pid:
1709 [status.lower() for status in configs_by_id[pid].statuses_offer_merge]
1710 for pid in project_ids
1711 }
1712
1713 for issue, delta in issue_delta_pairs:
1714 statuses_offer_merge = statuses_offer_merge_by_pid[issue.project_id]
1715 # Remove merged_into and merged_into_external when a status is moved
1716 # to a non-MERGED status ONLY if the delta does not have merged_into values
1717 # If delta does change merged_into values, the request will fail from
1718 # AssertIssueChangesValue().
1719 if (delta.status and delta.status.lower() not in statuses_offer_merge and
1720 delta.merged_into is None and delta.merged_into_external is None):
1721 if issue.merged_into:
1722 delta.merged_into = 0
1723 elif issue.merged_into_external:
1724 delta.merged_into_external = ''
1725
1726
1727class _IssueChangeImpactedIssues():
1728 """Class to track changes of issues impacted by updates to other issues."""
1729
1730 def __init__(self):
1731
1732 # Each of the dicts below should be used to track
1733 # {impacted_issue_id: [issues being modified that impact the keyed issue]}.
1734
1735 # e.g. `blocking_remove` with {iid_1: [iid_2, iid_3]} means that
1736 # `TrackImpactedIssues` has been called with a delta of
1737 # IssueDelta(blocked_on_remove=[iid_1]) for both issue 2 and issue 3.
1738 self.blocking_add = collections.defaultdict(list)
1739 self.blocking_remove = collections.defaultdict(list)
1740 self.blocked_on_add = collections.defaultdict(list)
1741 self.blocked_on_remove = collections.defaultdict(list)
1742 self.merged_from_add = collections.defaultdict(list)
1743 self.merged_from_remove = collections.defaultdict(list)
1744
1745 def ComputeAllImpactedIIDs(self):
1746 # type: () -> Collection[int]
1747 """Computes the unique set of all impacted issue ids."""
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001748 return (
1749 set(self.blocking_add.keys()) | set(self.blocking_remove.keys())
1750 | set(self.blocked_on_add.keys()) | set(self.blocked_on_remove.keys())
1751 | set(self.merged_from_add.keys())
1752 | set(self.merged_from_remove.keys()))
Copybara854996b2021-09-07 19:36:02 +00001753
1754 def TrackImpactedIssues(self, issue, delta):
1755 # type: (Issue, IssueDelta) -> None
1756 """Track impacted issues from when `delta` is applied to `issue`.
1757
1758 Args:
1759 issue: Issue that the delta will be applied to, but has not yet.
1760 delta: IssueDelta representing the changes that will be made to
1761 the issue.
1762 """
1763 for impacted_iid in delta.blocked_on_add:
1764 self.blocking_add[impacted_iid].append(issue.issue_id)
1765 for impacted_iid in delta.blocked_on_remove:
1766 self.blocking_remove[impacted_iid].append(issue.issue_id)
1767
1768 for impacted_iid in delta.blocking_add:
1769 self.blocked_on_add[impacted_iid].append(issue.issue_id)
1770 for impacted_iid in delta.blocking_remove:
1771 self.blocked_on_remove[impacted_iid].append(issue.issue_id)
1772
1773 if (delta.merged_into == framework_constants.NO_ISSUE_SPECIFIED and
1774 issue.merged_into):
1775 self.merged_from_remove[issue.merged_into].append(issue.issue_id)
1776 elif delta.merged_into and issue.merged_into != delta.merged_into:
1777 self.merged_from_add[delta.merged_into].append(issue.issue_id)
1778 if issue.merged_into:
1779 self.merged_from_remove[issue.merged_into].append(issue.issue_id)
1780
1781 def ApplyImpactedIssueChanges(self, cnxn, impacted_issue, services):
1782 # type: (MonorailConnection, Issue, Services) ->
1783 # Tuple[Collection[Amendment], Sequence[int]]
1784 """Apply the tracked changes in RAM for the given impacted issue.
1785
1786 Args:
1787 cnxn: connection to SQL database.
1788 impacted_issue: Issue PB that we are applying the changes to.
1789 services: Services used to fetch info from DB or cache.
1790
1791 Returns:
1792 All the amendments that represent the changes applied to the issue
1793 and a list of the new issue starrers.
1794
1795 Side-effect:
1796 The given impacted_issue will be updated in RAM.
1797 """
1798 issue_id = impacted_issue.issue_id
1799
1800 # Process changes for blocking/blocked_on issue changes.
1801 amendments, _impacted_iids = tracker_bizobj.ApplyIssueBlockRelationChanges(
1802 cnxn, impacted_issue, self.blocked_on_add[issue_id],
1803 self.blocked_on_remove[issue_id], self.blocking_add[issue_id],
1804 self.blocking_remove[issue_id], services.issue)
1805
1806 # Process changes in merged issues.
1807 merged_from_add = self.merged_from_add.get(issue_id, [])
1808 merged_from_remove = self.merged_from_remove.get(issue_id, [])
1809
1810 # Merge ccs into impacted_issue from all merged issues,
1811 # compute new starrers, and set star_count.
1812 new_starrers = []
1813 if merged_from_add:
1814 issues_dict, _misses = services.issue.GetIssuesDict(cnxn, merged_from_add)
1815 merged_from_add_issues = issues_dict.values()
1816 new_cc_ids = _ComputeNewCcsFromIssueMerge(
1817 impacted_issue, merged_from_add_issues)
1818 if new_cc_ids:
1819 impacted_issue.cc_ids.extend(new_cc_ids)
1820 amendments.append(
1821 tracker_bizobj.MakeCcAmendment(new_cc_ids, []))
1822 new_starrers = list(
1823 GetNewIssueStarrers(cnxn, services, merged_from_add, issue_id))
1824 if new_starrers:
1825 impacted_issue.star_count += len(new_starrers)
1826
1827 if merged_from_add or merged_from_remove:
1828 merged_from_add_refs = services.issue.LookupIssueRefs(
1829 cnxn, merged_from_add).values()
1830 merged_from_remove_refs = services.issue.LookupIssueRefs(
1831 cnxn, merged_from_remove).values()
1832 amendments.append(
1833 tracker_bizobj.MakeMergedIntoAmendment(
1834 merged_from_add_refs, merged_from_remove_refs,
1835 default_project_name=impacted_issue.project_name))
1836 return amendments, new_starrers