blob: c9f9e5a0ba41e37990587f3bb52e41c39fe490e6 [file] [log] [blame]
Copybara854996b2021-09-07 19:36:02 +00001# Copyright 2016 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style
3# license that can be found in the LICENSE file or at
4# https://developers.google.com/open-source/licenses/bsd
5
6"""Helper functions and classes used by the Monorail Issue Tracker pages.
7
8This module has functions that are reused in multiple servlets or
9other modules.
10"""
11from __future__ import print_function
12from __future__ import division
13from __future__ import absolute_import
14
15import collections
16import itertools
17import logging
18import re
19import time
20import urllib
21
22from google.appengine.api import app_identity
23
24from six import string_types
25
26import settings
27
28from features import federated
29from framework import authdata
30from framework import exceptions
31from framework import filecontent
32from framework import framework_bizobj
33from framework import framework_constants
34from framework import framework_helpers
35from framework import framework_views
36from framework import permissions
37from framework import sorting
38from framework import template_helpers
39from framework import urls
40from project import project_helpers
41from proto import tracker_pb2
42from services import client_config_svc
43from tracker import field_helpers
44from tracker import tracker_bizobj
45from tracker import tracker_constants
46
47
48# HTML input field names for blocked on and blocking issue refs.
49BLOCKED_ON = 'blocked_on'
50BLOCKING = 'blocking'
51
52# This string is used in HTML form element names to identify custom fields.
53# E.g., a value for a custom field with field_id 12 would be specified in
54# an HTML form element with name="custom_12".
55_CUSTOM_FIELD_NAME_PREFIX = 'custom_'
56
57# When the attachment quota gets within 1MB of the limit, stop offering
58# users the option to attach files.
59_SOFT_QUOTA_LEEWAY = 1024 * 1024
60
61# Accessors for sorting built-in fields.
62SORTABLE_FIELDS = {
63 'project': lambda issue: issue.project_name,
64 'id': lambda issue: issue.local_id,
65 'owner': tracker_bizobj.GetOwnerId, # And postprocessor
66 'reporter': lambda issue: issue.reporter_id, # And postprocessor
67 'component': lambda issue: issue.component_ids,
68 'cc': tracker_bizobj.GetCcIds, # And postprocessor
69 'summary': lambda issue: issue.summary.lower(),
70 'stars': lambda issue: issue.star_count,
71 'attachments': lambda issue: issue.attachment_count,
72 'opened': lambda issue: issue.opened_timestamp,
73 'closed': lambda issue: issue.closed_timestamp,
74 'modified': lambda issue: issue.modified_timestamp,
75 'status': tracker_bizobj.GetStatus,
76 'blocked': lambda issue: bool(issue.blocked_on_iids),
77 'blockedon': lambda issue: issue.blocked_on_iids or sorting.MAX_STRING,
78 'blocking': lambda issue: issue.blocking_iids or sorting.MAX_STRING,
79 'mergedinto': lambda issue: issue.merged_into or sorting.MAX_STRING,
80 'ownermodified': lambda issue: issue.owner_modified_timestamp,
81 'statusmodified': lambda issue: issue.status_modified_timestamp,
82 'componentmodified': lambda issue: issue.component_modified_timestamp,
83 'ownerlastvisit': tracker_bizobj.GetOwnerId, # And postprocessor
84 }
85
86# Some fields take a user ID from the issue and then use that to index
87# into a dictionary of user views, and then get a field of the user view
88# as the value to sort key.
89SORTABLE_FIELDS_POSTPROCESSORS = {
90 'owner': lambda user_view: user_view.email,
91 'reporter': lambda user_view: user_view.email,
92 'cc': lambda user_view: user_view.email,
93 'ownerlastvisit': lambda user_view: -user_view.user.last_visit_timestamp,
94 }
95
96# Here are some restriction labels to help people do the most common things
97# that they might want to do with restrictions.
98_FREQUENT_ISSUE_RESTRICTIONS = [
99 (permissions.VIEW, permissions.EDIT_ISSUE,
100 'Only users who can edit the issue may access it'),
101 (permissions.ADD_ISSUE_COMMENT, permissions.EDIT_ISSUE,
102 'Only users who can edit the issue may add comments'),
103 ]
104
105# These issue restrictions should be offered as examples whenever the project
106# does not have any custom permissions in use already.
107_EXAMPLE_ISSUE_RESTRICTIONS = [
108 (permissions.VIEW, 'CoreTeam',
109 'Custom permission CoreTeam is needed to access'),
110 ]
111
112# Namedtuples that hold data parsed from post_data.
113ParsedComponents = collections.namedtuple(
114 'ParsedComponents', 'entered_str, paths, paths_remove')
115ParsedFields = collections.namedtuple(
116 'ParsedFields',
117 'vals, vals_remove, fields_clear, '
118 'phase_vals, phase_vals_remove')
119ParsedUsers = collections.namedtuple(
120 'ParsedUsers', 'owner_username, owner_id, cc_usernames, '
121 'cc_usernames_remove, cc_ids, cc_ids_remove')
122ParsedBlockers = collections.namedtuple(
123 'ParsedBlockers', 'entered_str, iids, dangling_refs, '
124 'federated_ref_strings')
125ParsedHotlistRef = collections.namedtuple(
126 'ParsedHotlistRef', 'user_email, hotlist_name')
127ParsedHotlists = collections.namedtuple(
128 'ParsedHotlists', 'entered_str, hotlist_refs')
129ParsedIssue = collections.namedtuple(
130 'ParsedIssue', 'summary, comment, is_description, status, users, labels, '
131 'labels_remove, components, fields, template_name, attachments, '
132 'kept_attachments, blocked_on, blocking, hotlists')
133
134
135def ParseIssueRequest(cnxn, post_data, services, errors, default_project_name):
136 """Parse all the possible arguments out of the request.
137
138 Args:
139 cnxn: connection to SQL database.
140 post_data: HTML form information.
141 services: Connections to persistence layer.
142 errors: object to accumulate validation error info.
143 default_project_name: name of the project that contains the issue.
144
145 Returns:
146 A namedtuple with all parsed information. User IDs are looked up, but
147 also the strings are returned to allow bouncing the user back to correct
148 any errors.
149 """
150 summary = post_data.get('summary', '')
151 comment = post_data.get('comment', '')
152 is_description = bool(post_data.get('description', ''))
153 status = post_data.get('status', '')
154 template_name = urllib.unquote_plus(post_data.get('template_name', ''))
155 component_str = post_data.get('components', '')
156 label_strs = post_data.getall('label')
157
158 if is_description:
159 tmpl_txt = post_data.get('tmpl_txt', '')
160 comment = MarkupDescriptionOnInput(comment, tmpl_txt)
161
162 comp_paths, comp_paths_remove = _ClassifyPlusMinusItems(
163 re.split('[,;\s]+', component_str))
164 parsed_components = ParsedComponents(
165 component_str, comp_paths, comp_paths_remove)
166 labels, labels_remove = _ClassifyPlusMinusItems(label_strs)
167 parsed_fields = _ParseIssueRequestFields(post_data)
168 # TODO(jrobbins): change from numbered fields to a multi-valued field.
169 attachments = _ParseIssueRequestAttachments(post_data)
170 kept_attachments = _ParseIssueRequestKeptAttachments(post_data)
171 parsed_users = _ParseIssueRequestUsers(cnxn, post_data, services)
172 parsed_blocked_on = _ParseBlockers(
173 cnxn, post_data, services, errors, default_project_name, BLOCKED_ON)
174 parsed_blocking = _ParseBlockers(
175 cnxn, post_data, services, errors, default_project_name, BLOCKING)
176 parsed_hotlists = _ParseHotlists(post_data)
177
178 parsed_issue = ParsedIssue(
179 summary, comment, is_description, status, parsed_users, labels,
180 labels_remove, parsed_components, parsed_fields, template_name,
181 attachments, kept_attachments, parsed_blocked_on, parsed_blocking,
182 parsed_hotlists)
183 return parsed_issue
184
185
186def MarkupDescriptionOnInput(content, tmpl_text):
187 """Return HTML for the content of an issue description or comment.
188
189 Args:
190 content: the text sumbitted by the user, any user-entered markup
191 has already been escaped.
192 tmpl_text: the initial text that was put into the textarea.
193
194 Returns:
195 The description content text with template lines highlighted.
196 """
197 tmpl_lines = tmpl_text.split('\n')
198 tmpl_lines = [pl.strip() for pl in tmpl_lines if pl.strip()]
199
200 entered_lines = content.split('\n')
201 marked_lines = [_MarkupDescriptionLineOnInput(line, tmpl_lines)
202 for line in entered_lines]
203 return '\n'.join(marked_lines)
204
205
206def _MarkupDescriptionLineOnInput(line, tmpl_lines):
207 """Markup one line of an issue description that was just entered.
208
209 Args:
210 line: string containing one line of the user-entered comment.
211 tmpl_lines: list of strings for the text of the template lines.
212
213 Returns:
214 The same user-entered line, or that line highlighted to
215 indicate that it came from the issue template.
216 """
217 for tmpl_line in tmpl_lines:
218 if line.startswith(tmpl_line):
219 return '<b>' + tmpl_line + '</b>' + line[len(tmpl_line):]
220
221 return line
222
223
224def _ClassifyPlusMinusItems(add_remove_list):
225 """Classify the given plus-or-minus items into add and remove lists."""
226 add_remove_set = {s.strip() for s in add_remove_list}
227 add_strs = [s for s in add_remove_set if s and not s.startswith('-')]
228 remove_strs = [s[1:] for s in add_remove_set if s[1:] and s.startswith('-')]
229 return add_strs, remove_strs
230
231
232def _ParseHotlists(post_data):
233 entered_str = post_data.get('hotlists', '').strip()
234 hotlist_refs = []
235 for ref_str in re.split('[,;\s]+', entered_str):
236 if not ref_str:
237 continue
238 if ':' in ref_str:
239 if ref_str.split(':')[0]:
240 # E-mail isn't empty; full reference.
241 hotlist_refs.append(ParsedHotlistRef(*ref_str.split(':', 1)))
242 else:
243 # Short reference.
244 hotlist_refs.append(ParsedHotlistRef(None, ref_str.split(':', 1)[1]))
245 else:
246 # Short reference
247 hotlist_refs.append(ParsedHotlistRef(None, ref_str))
248 parsed_hotlists = ParsedHotlists(entered_str, hotlist_refs)
249 return parsed_hotlists
250
251
252def _ParseIssueRequestFields(post_data):
253 """Iterate over post_data and return custom field values found in it."""
254 field_val_strs = {}
255 field_val_strs_remove = {}
256 phase_field_val_strs = collections.defaultdict(dict)
257 phase_field_val_strs_remove = collections.defaultdict(dict)
258 for key in post_data.keys():
259 if key.startswith(_CUSTOM_FIELD_NAME_PREFIX):
260 val_strs = [v for v in post_data.getall(key) if v]
261 if val_strs:
262 try:
263 field_id = int(key[len(_CUSTOM_FIELD_NAME_PREFIX):])
264 phase_name = None
265 except ValueError: # key must be in format <field_id>_<phase_name>
266 field_id, phase_name = key[len(_CUSTOM_FIELD_NAME_PREFIX):].split(
267 '_', 1)
268 field_id = int(field_id)
269 if post_data.get('op_' + key) == 'remove':
270 if phase_name:
271 phase_field_val_strs_remove[field_id][phase_name] = val_strs
272 else:
273 field_val_strs_remove[field_id] = val_strs
274 else:
275 if phase_name:
276 phase_field_val_strs[field_id][phase_name] = val_strs
277 else:
278 field_val_strs[field_id] = val_strs
279
280 # TODO(jojwang): monorail:5154, no support for clearing phase field values.
281 fields_clear = []
282 op_prefix = 'op_' + _CUSTOM_FIELD_NAME_PREFIX
283 for op_key in post_data.keys():
284 if op_key.startswith(op_prefix):
285 if post_data.get(op_key) == 'clear':
286 field_id = int(op_key[len(op_prefix):])
287 fields_clear.append(field_id)
288
289 return ParsedFields(
290 field_val_strs, field_val_strs_remove, fields_clear,
291 phase_field_val_strs, phase_field_val_strs_remove)
292
293
294def _ParseIssueRequestAttachments(post_data):
295 """Extract and clean-up any attached files from the post data.
296
297 Args:
298 post_data: dict w/ values from the user's HTTP POST form data.
299
300 Returns:
301 [(filename, filecontents, mimetype), ...] with items for each attachment.
302 """
303 # TODO(jrobbins): change from numbered fields to a multi-valued field.
304 attachments = []
305 for i in range(1, 16):
306 if 'file%s' % i in post_data:
307 item = post_data['file%s' % i]
308 if isinstance(item, string_types):
309 continue
310 if '\\' in item.filename: # IE insists on giving us the whole path.
311 item.filename = item.filename[item.filename.rindex('\\') + 1:]
312 if not item.filename:
313 continue # Skip any FILE fields that were not filled in.
314 attachments.append((
315 item.filename, item.value,
316 filecontent.GuessContentTypeFromFilename(item.filename)))
317
318 return attachments
319
320
321def _ParseIssueRequestKeptAttachments(post_data):
322 """Extract attachment ids for attachments kept when updating description
323
324 Args:
325 post_data: dict w/ values from the user's HTTP POST form data.
326
327 Returns:
328 a list of attachment ids for kept attachments
329 """
330 kept_attachments = post_data.getall('keep-attachment')
331 return [int(aid) for aid in kept_attachments]
332
333
334def _ParseIssueRequestUsers(cnxn, post_data, services):
335 """Extract usernames from the POST data, categorize them, and look up IDs.
336
337 Args:
338 cnxn: connection to SQL database.
339 post_data: dict w/ data from the HTTP POST.
340 services: Services.
341
342 Returns:
343 A namedtuple (owner_username, owner_id, cc_usernames, cc_usernames_remove,
344 cc_ids, cc_ids_remove), containing:
345 - issue owner's name and user ID, if any
346 - the list of all cc'd usernames
347 - the user IDs to add or remove from the issue CC list.
348 Any of these user IDs may be None if the corresponding username
349 or email address is invalid.
350 """
351 # Get the user-entered values from post_data.
352 cc_username_str = post_data.get('cc', '').lower()
353 owner_email = post_data.get('owner', '').strip().lower()
354
355 cc_usernames, cc_usernames_remove = _ClassifyPlusMinusItems(
356 re.split('[,;\s]+', cc_username_str))
357
358 # Figure out the email addresses to lookup and do the lookup.
359 emails_to_lookup = cc_usernames + cc_usernames_remove
360 if owner_email:
361 emails_to_lookup.append(owner_email)
362 all_user_ids = services.user.LookupUserIDs(
363 cnxn, emails_to_lookup, autocreate=True)
364 if owner_email:
365 owner_id = all_user_ids.get(owner_email)
366 else:
367 owner_id = framework_constants.NO_USER_SPECIFIED
368
369 # Lookup the user IDs of the Cc addresses to add or remove.
370 cc_ids = [all_user_ids.get(cc) for cc in cc_usernames if cc]
371 cc_ids_remove = [all_user_ids.get(cc) for cc in cc_usernames_remove if cc]
372
373 return ParsedUsers(owner_email, owner_id, cc_usernames, cc_usernames_remove,
374 cc_ids, cc_ids_remove)
375
376
377def _ParseBlockers(cnxn, post_data, services, errors, default_project_name,
378 field_name):
379 """Parse input for issues that the current issue is blocking/blocked on.
380
381 Args:
382 cnxn: connection to SQL database.
383 post_data: dict w/ values from the user's HTTP POST.
384 services: connections to backend services.
385 errors: object to accumulate validation error info.
386 default_project_name: name of the project that contains the issue.
387 field_name: string HTML input field name, e.g., BLOCKED_ON or BLOCKING.
388
389 Returns:
390 A namedtuple with the user input string, and a list of issue IDs.
391 """
392 entered_str = post_data.get(field_name, '').strip()
393 blocker_iids = []
394 dangling_ref_tuples = []
395 federated_ref_strings = []
396
397 issue_ref = None
398 for ref_str in re.split('[,;\s]+', entered_str):
399 # Handle federated references.
400 if federated.IsShortlinkValid(ref_str):
401 federated_ref_strings.append(ref_str)
402 continue
403
404 try:
405 issue_ref = tracker_bizobj.ParseIssueRef(ref_str)
406 except ValueError:
407 setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
408 break
409
410 if not issue_ref:
411 continue
412
413 blocker_project_name, blocker_issue_id = issue_ref
414 if not blocker_project_name:
415 blocker_project_name = default_project_name
416
417 # Detect and report if the same issue was specified.
418 current_issue_id = int(post_data.get('id')) if post_data.get('id') else -1
419 if (blocker_issue_id == current_issue_id and
420 blocker_project_name == default_project_name):
421 setattr(errors, field_name, 'Cannot be %s the same issue' % field_name)
422 break
423
424 ref_projects = services.project.GetProjectsByName(
425 cnxn, set([blocker_project_name]))
426 blocker_iid, _misses = services.issue.ResolveIssueRefs(
427 cnxn, ref_projects, default_project_name, [issue_ref])
428 if not blocker_iid:
429 if blocker_project_name in settings.recognized_codesite_projects:
430 # We didn't find the issue, but it had a explicitly-specified project
431 # which we know is on Codesite. Allow it as a dangling reference.
432 dangling_ref_tuples.append(issue_ref)
433 continue
434 else:
435 # Otherwise, it doesn't exist, so report it.
436 setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
437 break
438 if blocker_iid[0] not in blocker_iids:
439 blocker_iids.extend(blocker_iid)
440
441 blocker_iids.sort()
442 dangling_ref_tuples.sort()
443 return ParsedBlockers(entered_str, blocker_iids, dangling_ref_tuples,
444 federated_ref_strings)
445
446
447def PairDerivedValuesWithRuleExplanations(
448 proposed_issue, traces, derived_users_by_id):
449 """Pair up values and explanations into JSON objects."""
450 derived_labels_and_why = [
451 {'value': lab,
452 'why': traces.get((tracker_pb2.FieldID.LABELS, lab))}
453 for lab in proposed_issue.derived_labels]
454
455 derived_users_by_id = {
456 user_id: user_view.display_name
457 for user_id, user_view in derived_users_by_id.items()
458 if user_view.display_name}
459
460 derived_owner_and_why = []
461 if proposed_issue.derived_owner_id:
462 derived_owner_and_why = [{
463 'value': derived_users_by_id[proposed_issue.derived_owner_id],
464 'why': traces.get(
465 (tracker_pb2.FieldID.OWNER, proposed_issue.derived_owner_id))}]
466 derived_cc_and_why = [
467 {'value': derived_users_by_id[cc_id],
468 'why': traces.get((tracker_pb2.FieldID.CC, cc_id))}
469 for cc_id in proposed_issue.derived_cc_ids
470 if cc_id in derived_users_by_id]
471
472 warnings_and_why = [
473 {'value': warning,
474 'why': traces.get((tracker_pb2.FieldID.WARNING, warning))}
475 for warning in proposed_issue.derived_warnings]
476
477 errors_and_why = [
478 {'value': error,
479 'why': traces.get((tracker_pb2.FieldID.ERROR, error))}
480 for error in proposed_issue.derived_errors]
481
482 return (derived_labels_and_why, derived_owner_and_why, derived_cc_and_why,
483 warnings_and_why, errors_and_why)
484
485
486def IsValidIssueOwner(cnxn, project, owner_id, services):
487 """Return True if the given user ID can be an issue owner.
488
489 Args:
490 cnxn: connection to SQL database.
491 project: the current Project PB.
492 owner_id: the user ID of the proposed issue owner.
493 services: connections to backends.
494
495 It is OK to have 0 for the owner_id, that simply means that the issue is
496 unassigned.
497
498 Returns:
499 A pair (valid, err_msg). valid is True if the given user ID can be an
500 issue owner. err_msg is an error message string to display to the user
501 if valid == False, and is None if valid == True.
502 """
503 # An issue is always allowed to have no owner specified.
504 if owner_id == framework_constants.NO_USER_SPECIFIED:
505 return True, None
506
507 try:
508 auth = authdata.AuthData.FromUserID(cnxn, owner_id, services)
509 if not framework_bizobj.UserIsInProject(project, auth.effective_ids):
510 return False, 'Issue owner must be a project member.'
511 except exceptions.NoSuchUserException:
512 return False, 'Issue owner user ID not found.'
513
514 group_ids = services.usergroup.DetermineWhichUserIDsAreGroups(
515 cnxn, [owner_id])
516 if owner_id in group_ids:
517 return False, 'Issue owner cannot be a user group.'
518
519 return True, None
520
521
522def GetAllowedOpenedAndClosedIssues(mr, issue_ids, services):
523 """Get filtered lists of open and closed issues identified by issue_ids.
524
525 The function then filters the results to only the issues that the user
526 is allowed to view. E.g., we only auto-link to issues that the user
527 would be able to view if they clicked the link.
528
529 Args:
530 mr: commonly used info parsed from the request.
531 issue_ids: list of int issue IDs for the target issues.
532 services: connection to issue, config, and project persistence layers.
533
534 Returns:
535 Two lists of issues that the user is allowed to view: one for open
536 issues and one for closed issues.
537 """
538 open_issues, closed_issues = services.issue.GetOpenAndClosedIssues(
539 mr.cnxn, issue_ids)
540 return GetAllowedIssues(mr, [open_issues, closed_issues], services)
541
542
543def GetAllowedIssues(mr, issue_groups, services):
544 """Filter lists of issues identified by issue_groups.
545
546 Args:
547 mr: commonly used info parsed from the request.
548 issue_groups: list of list of issues to filter.
549 services: connection to issue, config, and project persistence layers.
550
551 Returns:
552 List of filtered list of issues.
553 """
554
555 project_dict = GetAllIssueProjects(
556 mr.cnxn, itertools.chain.from_iterable(issue_groups), services.project)
557 config_dict = services.config.GetProjectConfigs(mr.cnxn,
558 list(project_dict.keys()))
559 return [FilterOutNonViewableIssues(
560 mr.auth.effective_ids, mr.auth.user_pb, project_dict, config_dict,
561 issues)
562 for issues in issue_groups]
563
564
565def MakeViewsForUsersInIssues(cnxn, issue_list, user_service, omit_ids=None):
566 """Lookup all the users involved in any of the given issues.
567
568 Args:
569 cnxn: connection to SQL database.
570 issue_list: list of Issue PBs from a result query.
571 user_service: Connection to User backend storage.
572 omit_ids: a list of user_ids to omit, e.g., because we already have them.
573
574 Returns:
575 A dictionary {user_id: user_view,...} for all the users involved
576 in the given issues.
577 """
578 issue_participant_id_set = tracker_bizobj.UsersInvolvedInIssues(issue_list)
579 if omit_ids:
580 issue_participant_id_set.difference_update(omit_ids)
581
582 # TODO(jrobbins): consider caching View objects as well.
583 users_by_id = framework_views.MakeAllUserViews(
584 cnxn, user_service, issue_participant_id_set)
585
586 return users_by_id
587
588
589def FormatIssueListURL(
590 mr, config, absolute=True, project_names=None, **kwargs):
591 """Format a link back to list view as configured by user."""
592 if project_names is None:
593 project_names = [mr.project_name]
594 if tracker_constants.JUMP_RE.match(mr.query):
595 kwargs['q'] = 'id=%s' % mr.query
596 kwargs['can'] = 1 # The specified issue might be closed.
597 else:
598 kwargs['q'] = mr.query
599 if mr.can and mr.can != 2:
600 kwargs['can'] = mr.can
601 def_col_spec = config.default_col_spec
602 if mr.col_spec and mr.col_spec != def_col_spec:
603 kwargs['colspec'] = mr.col_spec
604 if mr.sort_spec:
605 kwargs['sort'] = mr.sort_spec
606 if mr.group_by_spec:
607 kwargs['groupby'] = mr.group_by_spec
608 if mr.start:
609 kwargs['start'] = mr.start
610 if mr.num != tracker_constants.DEFAULT_RESULTS_PER_PAGE:
611 kwargs['num'] = mr.num
612
613 if len(project_names) == 1:
614 url = '/p/%s%s' % (project_names[0], urls.ISSUE_LIST)
615 else:
616 url = urls.ISSUE_LIST
617 kwargs['projects'] = ','.join(sorted(project_names))
618
619 param_strings = ['%s=%s' % (k, urllib.quote((u'%s' % v).encode('utf-8')))
620 for k, v in kwargs.items()]
621 if param_strings:
622 url += '?' + '&'.join(sorted(param_strings))
623 if absolute:
624 url = '%s://%s%s' % (mr.request.scheme, mr.request.host, url)
625
626 return url
627
628
629def FormatRelativeIssueURL(project_name, path, **kwargs):
630 """Format a URL to get to an issue in the named project.
631
632 Args:
633 project_name: string name of the project containing the issue.
634 path: string servlet path, e.g., from framework/urls.py.
635 **kwargs: additional query-string parameters to include in the URL.
636
637 Returns:
638 A URL string.
639 """
640 return framework_helpers.FormatURL(
641 None, '/p/%s%s' % (project_name, path), **kwargs)
642
643
644def FormatCrBugURL(project_name, local_id):
645 """Format a short URL to get to an issue in the named project.
646
647 Args:
648 project_name: string name of the project containing the issue.
649 local_id: int local ID of the issue.
650
651 Returns:
652 A URL string.
653 """
654 if app_identity.get_application_id() != 'monorail-prod':
655 return FormatRelativeIssueURL(
656 project_name, urls.ISSUE_DETAIL, id=local_id)
657
658 if project_name == 'chromium':
659 return 'https://crbug.com/%d' % local_id
660
661 return 'https://crbug.com/%s/%d' % (project_name, local_id)
662
663
664def ComputeNewQuotaBytesUsed(project, attachments):
665 """Add the given attachments to the project's attachment quota usage.
666
667 Args:
668 project: Project PB for the project being updated.
669 attachments: a list of attachments being added to an issue.
670
671 Returns:
672 The new number of bytes used.
673
674 Raises:
675 OverAttachmentQuota: If project would go over quota.
676 """
677 total_attach_size = 0
678 for _filename, content, _mimetype in attachments:
679 total_attach_size += len(content)
680
681 new_bytes_used = project.attachment_bytes_used + total_attach_size
682 quota = (project.attachment_quota or
683 tracker_constants.ISSUE_ATTACHMENTS_QUOTA_HARD)
684 if new_bytes_used > quota:
685 raise exceptions.OverAttachmentQuota(new_bytes_used - quota)
686 return new_bytes_used
687
688
689def IsUnderSoftAttachmentQuota(project):
690 """Check the project's attachment quota against the soft quota limit.
691
692 If there is a custom quota on the project, this will check against
693 that instead of the system-wide default quota.
694
695 Args:
696 project: Project PB for the project to examine
697
698 Returns:
699 True if the project is under quota, false otherwise.
700 """
701 quota = tracker_constants.ISSUE_ATTACHMENTS_QUOTA_SOFT
702 if project.attachment_quota:
703 quota = project.attachment_quota - _SOFT_QUOTA_LEEWAY
704
705 return project.attachment_bytes_used < quota
706
707
708def GetAllIssueProjects(cnxn, issues, project_service):
709 """Get all the projects that the given issues belong to.
710
711 Args:
712 cnxn: connection to SQL database.
713 issues: list of issues, which may come from different projects.
714 project_service: connection to project persistence layer.
715
716 Returns:
717 A dictionary {project_id: project} of all the projects that
718 any of the given issues belongs to.
719 """
720 needed_project_ids = {issue.project_id for issue in issues}
721 project_dict = project_service.GetProjects(cnxn, needed_project_ids)
722 return project_dict
723
724
725def GetPermissionsInAllProjects(user, effective_ids, projects):
726 """Look up the permissions for the given user in each project."""
727 return {
728 project.project_id:
729 permissions.GetPermissions(user, effective_ids, project)
730 for project in projects}
731
732
733def FilterOutNonViewableIssues(
734 effective_ids, user, project_dict, config_dict, issues):
735 """Return a filtered list of issues that the user can view."""
736 perms_dict = GetPermissionsInAllProjects(
737 user, effective_ids, list(project_dict.values()))
738
739 denied_project_ids = {
740 pid for pid, p in project_dict.items()
741 if not permissions.CanView(effective_ids, perms_dict[pid], p, [])}
742
743 results = []
744 for issue in issues:
745 if issue.deleted or issue.project_id in denied_project_ids:
746 continue
747
748 if not permissions.HasRestrictions(issue):
749 may_view = True
750 else:
751 perms = perms_dict[issue.project_id]
752 project = project_dict[issue.project_id]
753 config = config_dict.get(issue.project_id, config_dict.get('harmonized'))
754 granted_perms = tracker_bizobj.GetGrantedPerms(
755 issue, effective_ids, config)
756 may_view = permissions.CanViewIssue(
757 effective_ids, perms, project, issue, granted_perms=granted_perms)
758
759 if may_view:
760 results.append(issue)
761
762 return results
763
764
765def MeansOpenInProject(status, config):
766 """Return true if this status means that the issue is still open.
767
768 This defaults to true if we could not find a matching status.
769
770 Args:
771 status: issue status string. E.g., 'New'.
772 config: the config of the current project.
773
774 Returns:
775 Boolean True if the status means that the issue is open.
776 """
777 status_lower = status.lower()
778
779 # iterate over the list of known statuses for this project
780 # return true if we find a match that declares itself to be open
781 for wks in config.well_known_statuses:
782 if wks.status.lower() == status_lower:
783 return wks.means_open
784
785 return True
786
787
788def IsNoisy(num_comments, num_starrers):
789 """Return True if this is a "noisy" issue that would send a ton of emails.
790
791 The rule is that a very active issue with a large number of comments
792 and starrers will only send notification when a comment (or change)
793 is made by a project member.
794
795 Args:
796 num_comments: int number of comments on issue so far.
797 num_starrers: int number of users who starred the issue.
798
799 Returns:
800 True if we will not bother starrers with an email notification for
801 changes made by non-members.
802 """
803 return (num_comments >= tracker_constants.NOISY_ISSUE_COMMENT_COUNT and
804 num_starrers >= tracker_constants.NOISY_ISSUE_STARRER_COUNT)
805
806
807def MergeCCsAndAddComment(services, mr, issue, merge_into_issue):
808 """Modify the CC field of the target issue and add a comment to it."""
809 return MergeCCsAndAddCommentMultipleIssues(
810 services, mr, [issue], merge_into_issue)
811
812
813def MergeCCsAndAddCommentMultipleIssues(
814 services, mr, issues, merge_into_issue):
815 """Modify the CC field of the target issue and add a comment to it."""
816 merge_comment = ''
817 for issue in issues:
818 if issue.project_name == merge_into_issue.project_name:
819 issue_ref_str = '%d' % issue.local_id
820 else:
821 issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id)
822 if merge_comment:
823 merge_comment += '\n'
824 merge_comment += 'Issue %s has been merged into this issue.' % issue_ref_str
825
826 add_cc = _ComputeNewCcsFromIssueMerge(merge_into_issue, issues)
827
828 config = services.config.GetProjectConfig(
829 mr.cnxn, merge_into_issue.project_id)
830 delta = tracker_pb2.IssueDelta(cc_ids_add=add_cc)
831 _, merge_comment_pb = services.issue.DeltaUpdateIssue(
832 mr.cnxn, services, mr.auth.user_id, merge_into_issue.project_id,
833 config, merge_into_issue, delta, index_now=False, comment=merge_comment)
834
835 return merge_comment_pb
836
837
838def GetAttachmentIfAllowed(mr, services):
839 """Retrieve the requested attachment, or raise an appropriate exception.
840
841 Args:
842 mr: commonly used info parsed from the request.
843 services: connections to backend services.
844
845 Returns:
846 The requested Attachment PB, and the Issue that it belongs to.
847
848 Raises:
849 NoSuchAttachmentException: attachment was not found or was marked deleted.
850 NoSuchIssueException: issue that contains attachment was not found.
851 PermissionException: the user is not allowed to view the attachment.
852 """
853 attachment = None
854
855 attachment, cid, issue_id = services.issue.GetAttachmentAndContext(
856 mr.cnxn, mr.aid)
857
858 issue = services.issue.GetIssue(mr.cnxn, issue_id)
859 config = services.config.GetProjectConfig(mr.cnxn, issue.project_id)
860 granted_perms = tracker_bizobj.GetGrantedPerms(
861 issue, mr.auth.effective_ids, config)
862 permit_view = permissions.CanViewIssue(
863 mr.auth.effective_ids, mr.perms, mr.project, issue,
864 granted_perms=granted_perms)
865 if not permit_view:
866 raise permissions.PermissionException('Cannot view attachment\'s issue')
867
868 comment = services.issue.GetComment(mr.cnxn, cid)
869 commenter = services.user.GetUser(mr.cnxn, comment.user_id)
870 issue_perms = permissions.UpdateIssuePermissions(
871 mr.perms, mr.project, issue, mr.auth.effective_ids,
872 granted_perms=granted_perms)
873 can_view_comment = permissions.CanViewComment(
874 comment, commenter, mr.auth.user_id, issue_perms)
875 if not can_view_comment:
876 raise permissions.PermissionException('Cannot view attachment\'s comment')
877
878 return attachment, issue
879
880
881def LabelsMaskedByFields(config, field_names, trim_prefix=False):
882 """Return a list of EZTItems for labels that would be masked by fields."""
883 return _LabelsMaskedOrNot(config, field_names, trim_prefix=trim_prefix)
884
885
886def LabelsNotMaskedByFields(config, field_names, trim_prefix=False):
887 """Return a list of EZTItems for labels that would not be masked."""
888 return _LabelsMaskedOrNot(
889 config, field_names, invert=True, trim_prefix=trim_prefix)
890
891
892def _LabelsMaskedOrNot(config, field_names, invert=False, trim_prefix=False):
893 """Return EZTItems for labels that'd be masked. Or not, when invert=True."""
894 field_names = [fn.lower() for fn in field_names]
895 result = []
896 for wkl in config.well_known_labels:
897 masked_by = tracker_bizobj.LabelIsMaskedByField(wkl.label, field_names)
898 if (masked_by and not invert) or (not masked_by and invert):
899 display_name = wkl.label
900 if trim_prefix:
901 display_name = display_name[len(masked_by) + 1:]
902 result.append(template_helpers.EZTItem(
903 name=display_name,
904 name_padded=display_name.ljust(20),
905 commented='#' if wkl.deprecated else '',
906 docstring=wkl.label_docstring,
907 docstring_short=template_helpers.FitUnsafeText(
908 wkl.label_docstring, 40),
909 idx=len(result)))
910
911 return result
912
913
914def LookupComponentIDs(component_paths, config, errors=None):
915 """Look up the IDs of the specified components in the given config."""
916 component_ids = []
917 for path in component_paths:
918 if not path:
919 continue
920 cd = tracker_bizobj.FindComponentDef(path, config)
921 if cd:
922 component_ids.append(cd.component_id)
923 else:
924 error_text = 'Unknown component %s' % path
925 if errors:
926 errors.components = error_text
927 else:
928 logging.info(error_text)
929
930 return component_ids
931
932
933def ParsePostDataUsers(cnxn, pd_users_str, user_service):
934 """Parse all the usernames from a users string found in a post data."""
935 emails, _remove = _ClassifyPlusMinusItems(re.split('[,;\s]+', pd_users_str))
936 users_ids_by_email = user_service.LookupUserIDs(cnxn, emails, autocreate=True)
937 user_ids = [users_ids_by_email[username] for username in emails if username]
938 return user_ids, pd_users_str
939
940
941def FilterIssueTypes(config):
942 """Return a list of well-known issue types."""
943 well_known_issue_types = []
944 for wk_label in config.well_known_labels:
945 if wk_label.label.lower().startswith('type-'):
946 _, type_name = wk_label.label.split('-', 1)
947 well_known_issue_types.append(type_name)
948
949 return well_known_issue_types
950
951
952def ParseMergeFields(
953 cnxn, services, project_name, post_data, status, config, issue, errors):
954 """Parse info that identifies the issue to merge into, if any."""
955 merge_into_text = ''
956 merge_into_ref = None
957 merge_into_issue = None
958
959 if status not in config.statuses_offer_merge:
960 return '', None
961
962 merge_into_text = post_data.get('merge_into', '')
963 if merge_into_text:
964 try:
965 merge_into_ref = tracker_bizobj.ParseIssueRef(merge_into_text)
966 except ValueError:
967 logging.info('merge_into not an int: %r', merge_into_text)
968 errors.merge_into_id = 'Please enter a valid issue ID'
969
970 if not merge_into_ref:
971 errors.merge_into_id = 'Please enter an issue ID'
972 return merge_into_text, None
973
974 merge_into_project_name, merge_into_id = merge_into_ref
975 if (merge_into_id == issue.local_id and
976 (merge_into_project_name == project_name or
977 not merge_into_project_name)):
978 logging.info('user tried to merge issue into itself: %r', merge_into_ref)
979 errors.merge_into_id = 'Cannot merge issue into itself'
980 return merge_into_text, None
981
982 project = services.project.GetProjectByName(
983 cnxn, merge_into_project_name or project_name)
984 try:
985 # Because we will modify this issue, load from DB rather than cache.
986 merge_into_issue = services.issue.GetIssueByLocalID(
987 cnxn, project.project_id, merge_into_id, use_cache=False)
988 except Exception:
989 logging.info('merge_into issue not found: %r', merge_into_ref)
990 errors.merge_into_id = 'No such issue'
991 return merge_into_text, None
992
993 return merge_into_text, merge_into_issue
994
995
996def GetNewIssueStarrers(cnxn, services, issue_ids, merge_into_iid):
997 # type: (MonorailConnection, Services, Sequence[int], int) ->
998 # Collection[int]
999 """Get starrers of current issue who have not starred the target issue."""
1000 source_starrers_dict = services.issue_star.LookupItemsStarrers(
1001 cnxn, issue_ids)
1002 source_starrers = list(
1003 itertools.chain.from_iterable(source_starrers_dict.values()))
1004 target_starrers = services.issue_star.LookupItemStarrers(
1005 cnxn, merge_into_iid)
1006 return set(source_starrers) - set(target_starrers)
1007
1008
1009def AddIssueStarrers(
1010 cnxn, services, mr, merge_into_iid, merge_into_project, new_starrers):
1011 """Merge all the starrers for the current issue into the target issue."""
1012 project = merge_into_project or mr.project
1013 config = services.config.GetProjectConfig(mr.cnxn, project.project_id)
1014 services.issue_star.SetStarsBatch(
1015 cnxn, services, config, merge_into_iid, new_starrers, True)
1016
1017
1018def IsMergeAllowed(merge_into_issue, mr, services):
1019 """Check to see if user has permission to merge with specified issue."""
1020 merge_into_project = services.project.GetProjectByName(
1021 mr.cnxn, merge_into_issue.project_name)
1022 merge_into_config = services.config.GetProjectConfig(
1023 mr.cnxn, merge_into_project.project_id)
1024 merge_granted_perms = tracker_bizobj.GetGrantedPerms(
1025 merge_into_issue, mr.auth.effective_ids, merge_into_config)
1026
1027 merge_view_allowed = mr.perms.CanUsePerm(
1028 permissions.VIEW, mr.auth.effective_ids,
1029 merge_into_project, permissions.GetRestrictions(merge_into_issue),
1030 granted_perms=merge_granted_perms)
1031 merge_edit_allowed = mr.perms.CanUsePerm(
1032 permissions.EDIT_ISSUE, mr.auth.effective_ids,
1033 merge_into_project, permissions.GetRestrictions(merge_into_issue),
1034 granted_perms=merge_granted_perms)
1035
1036 return merge_view_allowed and merge_edit_allowed
1037
1038
1039def GetVisibleMembers(mr, project, services):
1040 all_member_ids = project_helpers.AllProjectMembers(project)
1041
1042 all_group_ids = services.usergroup.DetermineWhichUserIDsAreGroups(
1043 mr.cnxn, all_member_ids)
1044
1045 (ac_exclusion_ids, no_expand_ids
1046 ) = services.project.GetProjectAutocompleteExclusion(
1047 mr.cnxn, project.project_id)
1048
1049 group_ids_to_expand = [
1050 gid for gid in all_group_ids if gid not in no_expand_ids]
1051
1052 # TODO(jrobbins): Normally, users will be allowed view the members
1053 # of any user group if the project From: email address is listed
1054 # as a group member, as well as any group that they are personally
1055 # members of.
1056 member_ids, owner_ids = services.usergroup.LookupVisibleMembers(
1057 mr.cnxn, group_ids_to_expand, mr.perms, mr.auth.effective_ids, services)
1058 indirect_user_ids = set()
1059 for gids in member_ids.values():
1060 indirect_user_ids.update(gids)
1061 for gids in owner_ids.values():
1062 indirect_user_ids.update(gids)
1063
1064 visible_member_ids = _FilterMemberData(
1065 mr, project.owner_ids, project.committer_ids, project.contributor_ids,
1066 indirect_user_ids, project)
1067
1068 visible_member_ids = _MergeLinkedMembers(
1069 mr.cnxn, services.user, visible_member_ids)
1070
1071 visible_member_views = framework_views.MakeAllUserViews(
1072 mr.cnxn, services.user, visible_member_ids, group_ids=all_group_ids)
1073 framework_views.RevealAllEmailsToMembers(
1074 mr.cnxn, services, mr.auth, visible_member_views, project)
1075
1076 # Filter out service accounts
1077 service_acct_emails = set(
1078 client_config_svc.GetClientConfigSvc().GetClientIDEmails()[1])
1079 visible_member_views = {
1080 m.user_id: m
1081 for m in visible_member_views.values()
1082 # Hide service accounts from autocomplete.
1083 if not framework_helpers.IsServiceAccount(
1084 m.email, client_emails=service_acct_emails)
1085 # Hide users who opted out of autocomplete.
1086 and not m.user_id in ac_exclusion_ids
1087 # Hide users who have obscured email addresses.
1088 and not m.obscure_email
1089 }
1090
1091 return visible_member_views
1092
1093
1094def _MergeLinkedMembers(cnxn, user_service, user_ids):
1095 """Remove any linked child accounts if the parent would also be shown."""
1096 all_ids = set(user_ids)
1097 users_by_id = user_service.GetUsersByIDs(cnxn, user_ids)
1098 result = [uid for uid in user_ids
1099 if users_by_id[uid].linked_parent_id not in all_ids]
1100 return result
1101
1102
1103def _FilterMemberData(
1104 mr, owner_ids, committer_ids, contributor_ids, indirect_member_ids,
1105 project):
1106 """Return a filtered list of members that the user can view.
1107
1108 In most projects, everyone can view the entire member list. But,
1109 some projects are configured to only allow project owners to see
1110 all members. In those projects, committers and contributors do not
1111 see any contributors. Regardless of how the project is configured
1112 or the role that the user plays in the current project, we include
1113 any indirect members through user groups that the user has access
1114 to view.
1115
1116 Args:
1117 mr: Commonly used info parsed from the HTTP request.
1118 owner_views: list of user IDs for project owners.
1119 committer_views: list of user IDs for project committers.
1120 contributor_views: list of user IDs for project contributors.
1121 indirect_member_views: list of user IDs for users who have
1122 an indirect role in the project via a user group, and that the
1123 logged in user is allowed to see.
1124 project: the Project we're interested in.
1125
1126 Returns:
1127 A list of owners, committer and visible indirect members if the user is not
1128 signed in. If the project is set to display contributors to non-owners or
1129 the signed in user has necessary permissions then additionally a list of
1130 contributors.
1131 """
1132 visible_members_ids = set()
1133
1134 # Everyone can view owners and committers
1135 visible_members_ids.update(owner_ids)
1136 visible_members_ids.update(committer_ids)
1137
1138 # The list of indirect members is already limited to ones that the user
1139 # is allowed to see according to user group settings.
1140 visible_members_ids.update(indirect_member_ids)
1141
1142 # If the user is allowed to view the list of contributors, add those too.
1143 if permissions.CanViewContributorList(mr, project):
1144 visible_members_ids.update(contributor_ids)
1145
1146 return sorted(visible_members_ids)
1147
1148
1149def GetLabelOptions(config, custom_permissions):
1150 """Prepares label options for autocomplete."""
1151 labels = []
1152 field_names = [
1153 fd.field_name
1154 for fd in config.field_defs
1155 if not fd.is_deleted
1156 and fd.field_type is tracker_pb2.FieldTypes.ENUM_TYPE
1157 ]
1158 non_masked_labels = LabelsNotMaskedByFields(config, field_names)
1159 for wkl in non_masked_labels:
1160 if not wkl.commented:
1161 item = {'name': wkl.name, 'doc': wkl.docstring}
1162 labels.append(item)
1163
1164 frequent_restrictions = _FREQUENT_ISSUE_RESTRICTIONS[:]
1165 if not custom_permissions:
1166 frequent_restrictions.extend(_EXAMPLE_ISSUE_RESTRICTIONS)
1167
1168 labels.extend(_BuildRestrictionChoices(
1169 frequent_restrictions, permissions.STANDARD_ISSUE_PERMISSIONS,
1170 custom_permissions))
1171
1172 return labels
1173
1174
1175def _BuildRestrictionChoices(freq_restrictions, actions, custom_permissions):
1176 """Return a list of autocompletion choices for restriction labels.
1177
1178 Args:
1179 freq_restrictions: list of (action, perm, doc) tuples for restrictions
1180 that are frequently used.
1181 actions: list of strings for actions that are relevant to the current
1182 artifact.
1183 custom_permissions: list of strings with custom permissions for the project.
1184
1185 Returns:
1186 A list of dictionaries [{'name': 'perm name', 'doc': 'docstring'}, ...]
1187 suitable for use in a JSON feed to our JS autocompletion functions.
1188 """
1189 choices = []
1190
1191 for action, perm, doc in freq_restrictions:
1192 choices.append({
1193 'name': 'Restrict-%s-%s' % (action, perm),
1194 'doc': doc,
1195 })
1196
1197 for action in actions:
1198 for perm in custom_permissions:
1199 choices.append({
1200 'name': 'Restrict-%s-%s' % (action, perm),
1201 'doc': 'Permission %s needed to use %s' % (perm, action),
1202 })
1203
1204 return choices
1205
1206
1207def FilterKeptAttachments(
1208 is_description, kept_attachments, comments, approval_id):
1209 """Filter kept attachments to be a subset of last description's attachments.
1210
1211 Args:
1212 is_description: bool, if the comment is a change to the issue description.
1213 kept_attachments: list of ints with the attachment ids for attachments
1214 kept from previous descriptions, if the comment is a change to the
1215 issue description.
1216 comments: list of IssueComment PBs for the issue we want to edit.
1217 approval_id: int id of the APPROVAL_TYPE fielddef, if we're editing an
1218 approval description, or None otherwise.
1219
1220 Returns:
1221 A list of kept_attachment ids that are a subset of the last description.
1222 """
1223 if not is_description:
1224 return None
1225
1226 attachment_ids = set()
1227 for comment in reversed(comments):
1228 if comment.is_description and comment.approval_id == approval_id:
1229 attachment_ids = set([a.attachment_id for a in comment.attachments])
1230 break
1231
1232 kept_attachments = [
1233 aid for aid in kept_attachments if aid in attachment_ids]
1234 return kept_attachments
1235
1236
1237def _GetEnumFieldValuesAndDocstrings(field_def, config):
1238 # type: (proto.tracker_pb2.LabelDef, proto.tracker_pb2.ProjectIssueConfig) ->
1239 # Sequence[tuple(string, string)]
1240 """Get sequence of value, docstring tuples for an enum field"""
1241 label_defs = config.well_known_labels
1242 lower_field_name = field_def.field_name.lower()
1243 tuples = []
1244 for ld in label_defs:
1245 if (ld.label.lower().startswith(lower_field_name + '-') and
1246 not ld.deprecated):
1247 label_value = ld.label[len(lower_field_name) + 1:]
1248 tuples.append((label_value, ld.label_docstring))
1249 else:
1250 continue
1251 return tuples
1252
1253
1254# _IssueChangesTuple is returned by ApplyAllIssueChanges() and is used to bundle
1255# the updated issues. resulting amendments, and other information needed by the
1256# called to process the changes in the DB and send notifications.
1257_IssueChangesTuple = collections.namedtuple(
1258 '_IssueChangesTuple', [
1259 'issues_to_update_dict', 'merged_from_add_by_iid', 'amendments_by_iid',
1260 'imp_amendments_by_iid', 'old_owners_by_iid', 'old_statuses_by_iid',
1261 'old_components_by_iid', 'new_starrers_by_iid'
1262 ])
1263# type: (Mapping[int, Issue], DefaultDict[int, Sequence[int]],
1264# Mapping[int, Amendment], Mapping[int, Amendment], Mapping[int, int],
1265# Mapping[int, str], Mapping[int, Sequence[int]],
1266# Mapping[int, Sequence[int]])-> None
1267
1268
1269def ApplyAllIssueChanges(cnxn, issue_delta_pairs, services):
1270 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services) ->
1271 # IssueChangesTuple
1272 """Modify the given issues with the given deltas and impacted issues in RAM.
1273
1274 Filter rules are not applied in this method.
1275 This method implements phases 3 and 4 of the process for modifying issues.
1276 See WorkEnv.ModifyIssues() for other phases and overall process.
1277
1278 Args:
1279 cnxn: MonorailConnection object.
1280 issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas
1281 that represent the updates we want to make to each Issue.
1282 services: Services object for connection to backend services.
1283
1284 Returns:
1285 An _IssueChangesTuple named tuple.
1286 """
1287 impacted_tracker = _IssueChangeImpactedIssues()
1288 project_ids = {issue.project_id for issue, _delta in issue_delta_pairs}
1289 configs_by_pid = services.config.GetProjectConfigs(cnxn, list(project_ids))
1290
1291 # Track issues which have been modified in RAM and will need to
1292 # be updated in the DB.
1293 issues_to_update_dict = {}
1294
1295 amendments_by_iid = {}
1296 old_owners_by_iid = {}
1297 old_statuses_by_iid = {}
1298 old_components_by_iid = {}
1299 # PHASE 3: Update the main issues in RAM (not indirectly, impacted issues).
1300 for issue, delta in issue_delta_pairs:
1301 # Cache old data that will be used by future computations.
1302 old_owner = tracker_bizobj.GetOwnerId(issue)
1303 old_status = tracker_bizobj.GetStatus(issue)
1304 if delta.owner_id is not None and delta.owner_id != old_owner:
1305 old_owners_by_iid[issue.issue_id] = old_owner
1306 if delta.status is not None and delta.status != old_status:
1307 old_statuses_by_iid[issue.issue_id] = old_status
1308 new_components = set(issue.component_ids)
1309 new_components.update(delta.comp_ids_add or [])
1310 new_components.difference_update(delta.comp_ids_remove or [])
1311 if set(issue.component_ids) != new_components:
1312 old_components_by_iid[issue.issue_id] = issue.component_ids
1313
1314 impacted_tracker.TrackImpactedIssues(issue, delta)
1315 config = configs_by_pid.get(issue.project_id)
1316 amendments, _impacted_iids = tracker_bizobj.ApplyIssueDelta(
1317 cnxn, services.issue, issue, delta, config)
1318 if amendments:
1319 issues_to_update_dict[issue.issue_id] = issue
1320 amendments_by_iid[issue.issue_id] = amendments
1321
1322 # PHASE 4: Update impacted issues in RAM.
1323 logging.info('Applying impacted issue changes: %r', impacted_tracker.__dict__)
1324 imp_amendments_by_iid = {}
1325 impacted_iids = impacted_tracker.ComputeAllImpactedIIDs()
1326 new_starrers_by_iid = {}
1327 for issue_id in impacted_iids:
1328 # Changes made to an impacted issue should be on top of changes
1329 # made to it in PHASE 3 where it might have been a 'main' issue.
1330 issue = issues_to_update_dict.get(
1331 issue_id, services.issue.GetIssue(cnxn, issue_id, use_cache=False))
1332
1333 # Apply impacted changes.
1334 amendments, new_starrers = impacted_tracker.ApplyImpactedIssueChanges(
1335 cnxn, issue, services)
1336 if amendments:
1337 imp_amendments_by_iid[issue.issue_id] = amendments
1338 issues_to_update_dict[issue.issue_id] = issue
1339 if new_starrers:
1340 new_starrers_by_iid[issue.issue_id] = new_starrers
1341
1342 return _IssueChangesTuple(
1343 issues_to_update_dict, impacted_tracker.merged_from_add,
1344 amendments_by_iid, imp_amendments_by_iid, old_owners_by_iid,
1345 old_statuses_by_iid, old_components_by_iid, new_starrers_by_iid)
1346
1347
1348def UpdateClosedTimestamp(config, issue, old_effective_status):
1349 # type: (proto.tracker_pb2.ProjectIssueConfig, proto.tracker_pb2.Issue, str)
1350 # -> None
1351 """Sets or unsets the closed_timestamp based based on status changes.
1352
1353 If the status is changing from open to closed, the closed_timestamp is set to
1354 the current time.
1355
1356 If the status is changing form closed to open, the close_timestamp is unset.
1357
1358 If the status is changing from one closed to another closed, or from one
1359 open to another open, no operations are performed.
1360
1361 Args:
1362 config: the project configuration
1363 issue: the issue being updated (a protocol buffer)
1364 old_effective_status: the old issue status string. E.g., 'New'
1365
1366 SIDE EFFECTS:
1367 Updated issue in place with new closed timestamp.
1368 """
1369 old_effective_status = old_effective_status or ''
1370 # open -> closed
1371 if (MeansOpenInProject(old_effective_status, config) and
1372 not MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)):
1373
1374 issue.closed_timestamp = int(time.time())
1375 return
1376
1377 # closed -> open
1378 if (not MeansOpenInProject(old_effective_status, config) and
1379 MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)):
1380
1381 issue.reset('closed_timestamp')
1382 return
1383
1384
1385def GroupUniqueDeltaIssues(issue_delta_pairs):
1386 # type: (Tuple[Issue, IssueDelta]) -> (
1387 # Sequence[IssueDelta], Sequence[Sequence[Issue]])
1388 """Identifies unique IssueDeltas and groups Issues with identical IssueDeltas.
1389
1390 Args:
1391 issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas
1392 that represent the updates we want to make to each Issue.
1393
1394 Returns:
1395 (unique_deltas, issues_for_unique_deltas):
1396 unique_deltas: List of unique IssueDeltas found in issue_delta_pairs.
1397 issues_for_unique_deltas: List of Issue lists. Each Issue list
1398 contains all the Issues that had identical IssueDeltas.
1399 Each issues_for_unique_deltas[i] is the list of Issues
1400 that had unique_deltas[i] as their IssueDeltas.
1401 """
1402 unique_deltas = []
1403 issues_for_unique_deltas = []
1404 for issue, delta in issue_delta_pairs:
1405 try:
1406 delta_index = unique_deltas.index(delta)
1407 issues_for_unique_deltas[delta_index].append(issue)
1408 except ValueError:
1409 # delta is not in unique_deltas yet.
1410 # Add delta to unique_deltas and add a new list of issues
1411 # to issues_for_unique_deltas at the same index.
1412 unique_deltas.append(delta)
1413 issues_for_unique_deltas.append([issue])
1414
1415 return unique_deltas, issues_for_unique_deltas
1416
1417
1418def _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg):
1419 # type: (Sequence[Tuple[Issue, IssueDelta]], Mapping[int, str],
1420 # exceptions.ErrorAggregator) -> None
1421 """Checks if any issue deltas conflict with each other or themselves.
1422
1423 Note: refs_dict should contain issue ref strings for all issues found
1424 in issue_delta_pairs, including all issues found in
1425 {blocked_on|blocking}_{add|remove}.
1426 """
1427 err_message = 'Changes for {} conflict for {}'
1428
1429 # Track all delta blocked_on_add and blocking_add in terms of
1430 # 'blocking_add' so we can track when a {blocked_on|blocking}_remove
1431 # is in conflict with some {blocked_on|blocking}_add.
1432 blocking_add = collections.defaultdict(list)
1433 for issue, delta in issue_delta_pairs:
1434 blocking_add[issue.issue_id].extend(delta.blocking_add)
1435
1436 for imp_iid in delta.blocked_on_add:
1437 blocking_add[imp_iid].append(issue.issue_id)
1438
1439 # Check *_remove for conflicts with tracking blocking_add.
1440 for issue, delta in issue_delta_pairs:
1441 added_iids = blocking_add[issue.issue_id]
1442 # Get intersection of iids that are in `blocking_remove` and
1443 # the tracked `blocking_add`.
1444 conflict_iids = set(delta.blocking_remove) & set(added_iids)
1445
1446 # Get iids of `blocked_on_remove` that conflict with the
1447 # tracked `blocking_add`.
1448 for possible_conflict_iid in delta.blocked_on_remove:
1449 if issue.issue_id in blocking_add[possible_conflict_iid]:
1450 conflict_iids.add(possible_conflict_iid)
1451
1452 if conflict_iids:
1453 refs_str = ', '.join([refs_dict[iid] for iid in conflict_iids])
1454 err_agg.AddErrorMessage(err_message, refs_dict[issue.issue_id], refs_str)
1455
1456
1457def PrepareIssueChanges(
1458 cnxn,
1459 issue_delta_pairs,
1460 services,
1461 attachment_uploads=None,
1462 comment_content=None):
1463 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services,
1464 # Optional[Sequence[framework_helpers.AttachmentUpload]], Optional[str])
1465 # -> Mapping[int, int]
1466 """Clean the deltas and assert they are valid for each paired issue."""
1467 _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services)
1468 _AssertIssueChangesValid(
1469 cnxn, issue_delta_pairs, services, comment_content=comment_content)
1470
1471 if attachment_uploads:
1472 return _EnforceAttachmentQuotaLimits(
1473 cnxn, issue_delta_pairs, services, attachment_uploads)
1474 return {}
1475
1476
1477def _EnforceAttachmentQuotaLimits(
1478 cnxn, issue_delta_pairs, services, attachment_uploads):
1479 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services
1480 # Optional[Sequence[framework_helpers.AttachmentUpload]]
1481 # -> Mapping[int, int]
1482 """Assert that the attachments don't exceed project quotas."""
1483 issue_count_by_pid = collections.defaultdict(int)
1484 for issue, _delta in issue_delta_pairs:
1485 issue_count_by_pid[issue.project_id] += 1
1486
1487 projects_by_id = services.project.GetProjects(cnxn, issue_count_by_pid.keys())
1488
1489 new_bytes_by_pid = {}
1490 with exceptions.ErrorAggregator(exceptions.OverAttachmentQuota) as err_agg:
1491 for pid, count in issue_count_by_pid.items():
1492 project = projects_by_id[pid]
1493 try:
1494 new_bytes_used = ComputeNewQuotaBytesUsed(
1495 project, attachment_uploads * count)
1496 new_bytes_by_pid[pid] = new_bytes_used
1497 except exceptions.OverAttachmentQuota:
1498 err_agg.AddErrorMessage(
1499 'Attachment quota exceeded for project {}', project.project_name)
1500 return new_bytes_by_pid
1501
1502
1503def _AssertIssueChangesValid(
1504 cnxn, issue_delta_pairs, services, comment_content=None):
1505 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services,
1506 # Optional[str]) -> None
1507 """Assert that the delta changes are valid for each paired issue.
1508
1509 Note: this method does not check if the changes trigger any FilterRule
1510 `warnings` or `errors`.
1511 """
1512 project_ids = list(
1513 {issue.project_id for (issue, _delta) in issue_delta_pairs})
1514 projects_by_id = services.project.GetProjects(cnxn, project_ids)
1515 configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids)
1516 refs_dict = {
1517 iss.issue_id: '%s:%d' % (iss.project_name, iss.local_id)
1518 for iss, _delta in issue_delta_pairs
1519 }
1520 # Add refs of deltas' blocking/blocked_on issues needed by
1521 # _AssertNoConflictingDeltas.
1522 relation_iids = set()
1523 for _iss, delta in issue_delta_pairs:
1524 relation_iids.update(
1525 delta.blocked_on_remove + delta.blocking_remove + delta.blocked_on_add +
1526 delta.blocking_add)
1527 relation_issues_dict, misses = services.issue.GetIssuesDict(
1528 cnxn, relation_iids)
1529 if misses:
1530 raise exceptions.NoSuchIssueException(
1531 'Could not find issues with ids: %r' % misses)
1532 for iid, iss in relation_issues_dict.items():
1533 if iid not in refs_dict:
1534 refs_dict[iid] = '%s:%d' % (iss.project_name, iss.local_id)
1535
1536 with exceptions.ErrorAggregator(exceptions.InputException) as err_agg:
1537 if (comment_content and
1538 len(comment_content.strip()) > tracker_constants.MAX_COMMENT_CHARS):
1539 err_agg.AddErrorMessage('Comment is too long.')
1540
1541 _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg)
1542
1543 for issue, delta in issue_delta_pairs:
1544 project = projects_by_id.get(issue.project_id)
1545 config = configs_by_id.get(issue.project_id)
1546 issue_ref = refs_dict[issue.issue_id]
1547
1548 if (delta.merged_into is not None or
1549 delta.merged_into_external is not None or delta.status is not None):
1550 end_status = delta.status or issue.status
1551 merged_options = [
1552 delta.merged_into, delta.merged_into_external, issue.merged_into,
1553 issue.merged_into_external
1554 ]
1555 end_merged_into = next(
1556 (merge for merge in merged_options if merge is not None), None)
1557
1558 is_merge_status = end_status.lower() in [
1559 status.lower() for status in config.statuses_offer_merge
1560 ]
1561
1562 if ((is_merge_status and not end_merged_into) or
1563 (not is_merge_status and end_merged_into)):
1564 err_agg.AddErrorMessage(
1565 '{}: MERGED type statuses must accompany mergedInto values.',
1566 issue_ref)
1567
1568 if delta.merged_into and issue.issue_id == delta.merged_into:
1569 err_agg.AddErrorMessage(
1570 '{}: Cannot merge an issue into itself.', issue_ref)
1571 if (issue.issue_id in set(
1572 delta.blocked_on_add)) or (issue.issue_id in set(delta.blocking_add)):
1573 err_agg.AddErrorMessage(
1574 '{}: Cannot block an issue on itself.', issue_ref)
1575 if (delta.owner_id is not None) and (delta.owner_id != issue.owner_id):
1576 parsed_owner_valid, msg = IsValidIssueOwner(
1577 cnxn, project, delta.owner_id, services)
1578 if not parsed_owner_valid:
1579 err_agg.AddErrorMessage('{}: {}', issue_ref, msg)
1580 # Owner already check by IsValidIssueOwner
1581 all_users = [uid for uid in delta.cc_ids_add]
1582 field_users = [fv.user_id for fv in delta.field_vals_add if fv.user_id]
1583 all_users.extend(field_users)
1584 AssertUsersExist(cnxn, services, all_users, err_agg)
1585 if (delta.summary and
1586 len(delta.summary.strip()) > tracker_constants.MAX_SUMMARY_CHARS):
1587 err_agg.AddErrorMessage('{}: Summary is too long.', issue_ref)
1588 if delta.summary == '':
1589 err_agg.AddErrorMessage('{}: Summary required.', issue_ref)
1590 if delta.status == '':
1591 err_agg.AddErrorMessage('{}: Status is required.', issue_ref)
1592 # Do not pass in issue for validation, as issue is pre-update, and would
1593 # result in being unable to edit issues in invalid states.
1594 fvs_err_msgs = field_helpers.ValidateCustomFields(
1595 cnxn, services, delta.field_vals_add, config, project)
1596 if fvs_err_msgs:
1597 err_agg.AddErrorMessage('{}: {}', issue_ref, '\n'.join(fvs_err_msgs))
1598 # TODO(crbug.com/monorail/9156): Validate that we do not remove fields
1599 # such that a required field becomes unset.
1600
1601
1602def AssertUsersExist(cnxn, services, user_ids, err_agg):
1603 # type: (MonorailConnection, Services, Sequence[int], ErrorAggregator) -> None
1604 """Assert that all users exist.
1605
1606 Has the side-effect of adding error messages to the input ErrorAggregator.
1607 """
1608 users_dict = services.user.GetUsersByIDs(cnxn, user_ids, skip_missed=True)
1609 found_ids = set(users_dict.keys())
1610 missing = [user_id for user_id in user_ids if user_id not in found_ids]
1611 for missing_user_id in missing:
1612 err_agg.AddErrorMessage(
1613 'users/{}: User does not exist.'.format(missing_user_id))
1614
1615
1616def AssertValidIssueForCreate(cnxn, services, issue, description):
1617 # type: (MonorailConnection, Services, Issue, str) -> None
1618 """Assert that issue proto is valid for issue creation.
1619
1620 Args:
1621 cnxn: A connection object to use services with.
1622 services: An object containing services to use to look up relevant data.
1623 issues: A PB containing the issue to validate.
1624 description: The description for the issue.
1625
1626 Raises:
1627 InputException if the issue is not valid.
1628 """
1629 project = services.project.GetProject(cnxn, issue.project_id)
1630 config = services.config.GetProjectConfig(cnxn, issue.project_id)
1631
1632 with exceptions.ErrorAggregator(exceptions.InputException) as err_agg:
1633 owner_is_valid, owner_err_msg = IsValidIssueOwner(
1634 cnxn, project, issue.owner_id, services)
1635 if not owner_is_valid:
1636 err_agg.AddErrorMessage(owner_err_msg)
1637 if not issue.summary.strip():
1638 err_agg.AddErrorMessage('Summary is required')
1639 if not description.strip():
1640 err_agg.AddErrorMessage('Description is required')
1641 if len(issue.summary) > tracker_constants.MAX_SUMMARY_CHARS:
1642 err_agg.AddErrorMessage('Summary is too long')
1643 if len(description) > tracker_constants.MAX_COMMENT_CHARS:
1644 err_agg.AddErrorMessage('Description is too long')
1645
1646 # Check all users exist. Owner already check by IsValidIssueOwner.
1647 all_users = [uid for uid in issue.cc_ids]
1648 for av in issue.approval_values:
1649 all_users.extend(av.approver_ids)
1650 field_users = [fv.user_id for fv in issue.field_values if fv.user_id]
1651 all_users.extend(field_users)
1652 AssertUsersExist(cnxn, services, all_users, err_agg)
1653
1654 field_validity_errors = field_helpers.ValidateCustomFields(
1655 cnxn, services, issue.field_values, config, project, issue=issue)
1656 if field_validity_errors:
1657 err_agg.AddErrorMessage("\n".join(field_validity_errors))
1658 if not services.config.LookupStatusID(cnxn, issue.project_id, issue.status,
1659 autocreate=False):
1660 err_agg.AddErrorMessage('Undefined status: %s' % issue.status)
1661 all_comp_ids = {
1662 cd.component_id for cd in config.component_defs if not cd.deprecated
1663 }
1664 for comp_id in issue.component_ids:
1665 if comp_id not in all_comp_ids:
1666 err_agg.AddErrorMessage(
1667 'Undefined or deprecated component with id: %d' % comp_id)
1668
1669
1670def _ComputeNewCcsFromIssueMerge(merge_into_issue, source_issues):
1671 # type: (Issue, Collection[Issue]) -> Collection[int]
1672 """Compute ccs that should be added from source_issues to merge_into_issue."""
1673
1674 merge_into_restrictions = permissions.GetRestrictions(merge_into_issue)
1675 new_cc_ids = set()
1676 for issue in source_issues:
1677 # We don't want to leak metadata like ccs of restricted issues.
1678 # So we don't merge ccs from restricted source issues, unless their
1679 # restrictions match the restrictions of the target.
1680 if permissions.HasRestrictions(issue, perm='View'):
1681 source_restrictions = permissions.GetRestrictions(issue)
1682 if (issue.project_id != merge_into_issue.project_id or
1683 set(source_restrictions) != set(merge_into_restrictions)):
1684 continue
1685
1686 new_cc_ids.update(issue.cc_ids)
1687 if issue.owner_id:
1688 new_cc_ids.add(issue.owner_id)
1689
1690 return [cc_id for cc_id in new_cc_ids if cc_id not in merge_into_issue.cc_ids]
1691
1692
1693def _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services):
1694 # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services)
1695 """Update deltas in RAM to remove merged if a MERGED status is removed."""
1696 project_ids = list(
1697 {issue.project_id for (issue, _delta) in issue_delta_pairs})
1698 configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids)
1699 statuses_offer_merge_by_pid = {
1700 pid:
1701 [status.lower() for status in configs_by_id[pid].statuses_offer_merge]
1702 for pid in project_ids
1703 }
1704
1705 for issue, delta in issue_delta_pairs:
1706 statuses_offer_merge = statuses_offer_merge_by_pid[issue.project_id]
1707 # Remove merged_into and merged_into_external when a status is moved
1708 # to a non-MERGED status ONLY if the delta does not have merged_into values
1709 # If delta does change merged_into values, the request will fail from
1710 # AssertIssueChangesValue().
1711 if (delta.status and delta.status.lower() not in statuses_offer_merge and
1712 delta.merged_into is None and delta.merged_into_external is None):
1713 if issue.merged_into:
1714 delta.merged_into = 0
1715 elif issue.merged_into_external:
1716 delta.merged_into_external = ''
1717
1718
1719class _IssueChangeImpactedIssues():
1720 """Class to track changes of issues impacted by updates to other issues."""
1721
1722 def __init__(self):
1723
1724 # Each of the dicts below should be used to track
1725 # {impacted_issue_id: [issues being modified that impact the keyed issue]}.
1726
1727 # e.g. `blocking_remove` with {iid_1: [iid_2, iid_3]} means that
1728 # `TrackImpactedIssues` has been called with a delta of
1729 # IssueDelta(blocked_on_remove=[iid_1]) for both issue 2 and issue 3.
1730 self.blocking_add = collections.defaultdict(list)
1731 self.blocking_remove = collections.defaultdict(list)
1732 self.blocked_on_add = collections.defaultdict(list)
1733 self.blocked_on_remove = collections.defaultdict(list)
1734 self.merged_from_add = collections.defaultdict(list)
1735 self.merged_from_remove = collections.defaultdict(list)
1736
1737 def ComputeAllImpactedIIDs(self):
1738 # type: () -> Collection[int]
1739 """Computes the unique set of all impacted issue ids."""
1740 return set(self.blocking_add.keys() + self.blocking_remove.keys() +
1741 self.blocked_on_add.keys() + self.blocked_on_remove.keys() +
1742 self.merged_from_add.keys() + self.merged_from_remove.keys())
1743
1744 def TrackImpactedIssues(self, issue, delta):
1745 # type: (Issue, IssueDelta) -> None
1746 """Track impacted issues from when `delta` is applied to `issue`.
1747
1748 Args:
1749 issue: Issue that the delta will be applied to, but has not yet.
1750 delta: IssueDelta representing the changes that will be made to
1751 the issue.
1752 """
1753 for impacted_iid in delta.blocked_on_add:
1754 self.blocking_add[impacted_iid].append(issue.issue_id)
1755 for impacted_iid in delta.blocked_on_remove:
1756 self.blocking_remove[impacted_iid].append(issue.issue_id)
1757
1758 for impacted_iid in delta.blocking_add:
1759 self.blocked_on_add[impacted_iid].append(issue.issue_id)
1760 for impacted_iid in delta.blocking_remove:
1761 self.blocked_on_remove[impacted_iid].append(issue.issue_id)
1762
1763 if (delta.merged_into == framework_constants.NO_ISSUE_SPECIFIED and
1764 issue.merged_into):
1765 self.merged_from_remove[issue.merged_into].append(issue.issue_id)
1766 elif delta.merged_into and issue.merged_into != delta.merged_into:
1767 self.merged_from_add[delta.merged_into].append(issue.issue_id)
1768 if issue.merged_into:
1769 self.merged_from_remove[issue.merged_into].append(issue.issue_id)
1770
1771 def ApplyImpactedIssueChanges(self, cnxn, impacted_issue, services):
1772 # type: (MonorailConnection, Issue, Services) ->
1773 # Tuple[Collection[Amendment], Sequence[int]]
1774 """Apply the tracked changes in RAM for the given impacted issue.
1775
1776 Args:
1777 cnxn: connection to SQL database.
1778 impacted_issue: Issue PB that we are applying the changes to.
1779 services: Services used to fetch info from DB or cache.
1780
1781 Returns:
1782 All the amendments that represent the changes applied to the issue
1783 and a list of the new issue starrers.
1784
1785 Side-effect:
1786 The given impacted_issue will be updated in RAM.
1787 """
1788 issue_id = impacted_issue.issue_id
1789
1790 # Process changes for blocking/blocked_on issue changes.
1791 amendments, _impacted_iids = tracker_bizobj.ApplyIssueBlockRelationChanges(
1792 cnxn, impacted_issue, self.blocked_on_add[issue_id],
1793 self.blocked_on_remove[issue_id], self.blocking_add[issue_id],
1794 self.blocking_remove[issue_id], services.issue)
1795
1796 # Process changes in merged issues.
1797 merged_from_add = self.merged_from_add.get(issue_id, [])
1798 merged_from_remove = self.merged_from_remove.get(issue_id, [])
1799
1800 # Merge ccs into impacted_issue from all merged issues,
1801 # compute new starrers, and set star_count.
1802 new_starrers = []
1803 if merged_from_add:
1804 issues_dict, _misses = services.issue.GetIssuesDict(cnxn, merged_from_add)
1805 merged_from_add_issues = issues_dict.values()
1806 new_cc_ids = _ComputeNewCcsFromIssueMerge(
1807 impacted_issue, merged_from_add_issues)
1808 if new_cc_ids:
1809 impacted_issue.cc_ids.extend(new_cc_ids)
1810 amendments.append(
1811 tracker_bizobj.MakeCcAmendment(new_cc_ids, []))
1812 new_starrers = list(
1813 GetNewIssueStarrers(cnxn, services, merged_from_add, issue_id))
1814 if new_starrers:
1815 impacted_issue.star_count += len(new_starrers)
1816
1817 if merged_from_add or merged_from_remove:
1818 merged_from_add_refs = services.issue.LookupIssueRefs(
1819 cnxn, merged_from_add).values()
1820 merged_from_remove_refs = services.issue.LookupIssueRefs(
1821 cnxn, merged_from_remove).values()
1822 amendments.append(
1823 tracker_bizobj.MakeMergedIntoAmendment(
1824 merged_from_add_refs, merged_from_remove_refs,
1825 default_project_name=impacted_issue.project_name))
1826 return amendments, new_starrers