Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1 | # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 | # Use of this source code is governed by a BSD-style |
| 3 | # license that can be found in the LICENSE file or at |
| 4 | # https://developers.google.com/open-source/licenses/bsd |
| 5 | |
| 6 | """Helper functions and classes used by the Monorail Issue Tracker pages. |
| 7 | |
| 8 | This module has functions that are reused in multiple servlets or |
| 9 | other modules. |
| 10 | """ |
| 11 | from __future__ import print_function |
| 12 | from __future__ import division |
| 13 | from __future__ import absolute_import |
| 14 | |
| 15 | import collections |
| 16 | import itertools |
| 17 | import logging |
| 18 | import re |
| 19 | import time |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 20 | from six.moves import urllib |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 21 | |
| 22 | from google.appengine.api import app_identity |
| 23 | |
| 24 | from six import string_types |
| 25 | |
| 26 | import settings |
| 27 | |
| 28 | from features import federated |
| 29 | from framework import authdata |
| 30 | from framework import exceptions |
| 31 | from framework import filecontent |
| 32 | from framework import framework_bizobj |
| 33 | from framework import framework_constants |
| 34 | from framework import framework_helpers |
| 35 | from framework import framework_views |
| 36 | from framework import permissions |
| 37 | from framework import sorting |
| 38 | from framework import template_helpers |
| 39 | from framework import urls |
| 40 | from project import project_helpers |
| 41 | from proto import tracker_pb2 |
| 42 | from services import client_config_svc |
| 43 | from tracker import field_helpers |
| 44 | from tracker import tracker_bizobj |
| 45 | from tracker import tracker_constants |
| 46 | |
| 47 | |
| 48 | # HTML input field names for blocked on and blocking issue refs. |
| 49 | BLOCKED_ON = 'blocked_on' |
| 50 | BLOCKING = 'blocking' |
| 51 | |
| 52 | # This string is used in HTML form element names to identify custom fields. |
| 53 | # E.g., a value for a custom field with field_id 12 would be specified in |
| 54 | # an HTML form element with name="custom_12". |
| 55 | _CUSTOM_FIELD_NAME_PREFIX = 'custom_' |
| 56 | |
| 57 | # When the attachment quota gets within 1MB of the limit, stop offering |
| 58 | # users the option to attach files. |
| 59 | _SOFT_QUOTA_LEEWAY = 1024 * 1024 |
| 60 | |
| 61 | # Accessors for sorting built-in fields. |
| 62 | SORTABLE_FIELDS = { |
| 63 | 'project': lambda issue: issue.project_name, |
| 64 | 'id': lambda issue: issue.local_id, |
| 65 | 'owner': tracker_bizobj.GetOwnerId, # And postprocessor |
| 66 | 'reporter': lambda issue: issue.reporter_id, # And postprocessor |
| 67 | 'component': lambda issue: issue.component_ids, |
| 68 | 'cc': tracker_bizobj.GetCcIds, # And postprocessor |
| 69 | 'summary': lambda issue: issue.summary.lower(), |
| 70 | 'stars': lambda issue: issue.star_count, |
| 71 | 'attachments': lambda issue: issue.attachment_count, |
| 72 | 'opened': lambda issue: issue.opened_timestamp, |
| 73 | 'closed': lambda issue: issue.closed_timestamp, |
| 74 | 'modified': lambda issue: issue.modified_timestamp, |
| 75 | 'status': tracker_bizobj.GetStatus, |
| 76 | 'blocked': lambda issue: bool(issue.blocked_on_iids), |
| 77 | 'blockedon': lambda issue: issue.blocked_on_iids or sorting.MAX_STRING, |
| 78 | 'blocking': lambda issue: issue.blocking_iids or sorting.MAX_STRING, |
| 79 | 'mergedinto': lambda issue: issue.merged_into or sorting.MAX_STRING, |
| 80 | 'ownermodified': lambda issue: issue.owner_modified_timestamp, |
| 81 | 'statusmodified': lambda issue: issue.status_modified_timestamp, |
| 82 | 'componentmodified': lambda issue: issue.component_modified_timestamp, |
| 83 | 'ownerlastvisit': tracker_bizobj.GetOwnerId, # And postprocessor |
| 84 | } |
| 85 | |
| 86 | # Some fields take a user ID from the issue and then use that to index |
| 87 | # into a dictionary of user views, and then get a field of the user view |
| 88 | # as the value to sort key. |
| 89 | SORTABLE_FIELDS_POSTPROCESSORS = { |
| 90 | 'owner': lambda user_view: user_view.email, |
| 91 | 'reporter': lambda user_view: user_view.email, |
| 92 | 'cc': lambda user_view: user_view.email, |
| 93 | 'ownerlastvisit': lambda user_view: -user_view.user.last_visit_timestamp, |
| 94 | } |
| 95 | |
| 96 | # Here are some restriction labels to help people do the most common things |
| 97 | # that they might want to do with restrictions. |
| 98 | _FREQUENT_ISSUE_RESTRICTIONS = [ |
| 99 | (permissions.VIEW, permissions.EDIT_ISSUE, |
| 100 | 'Only users who can edit the issue may access it'), |
| 101 | (permissions.ADD_ISSUE_COMMENT, permissions.EDIT_ISSUE, |
| 102 | 'Only users who can edit the issue may add comments'), |
| 103 | ] |
| 104 | |
| 105 | # These issue restrictions should be offered as examples whenever the project |
| 106 | # does not have any custom permissions in use already. |
| 107 | _EXAMPLE_ISSUE_RESTRICTIONS = [ |
| 108 | (permissions.VIEW, 'CoreTeam', |
| 109 | 'Custom permission CoreTeam is needed to access'), |
| 110 | ] |
| 111 | |
| 112 | # Namedtuples that hold data parsed from post_data. |
| 113 | ParsedComponents = collections.namedtuple( |
| 114 | 'ParsedComponents', 'entered_str, paths, paths_remove') |
| 115 | ParsedFields = collections.namedtuple( |
| 116 | 'ParsedFields', |
| 117 | 'vals, vals_remove, fields_clear, ' |
| 118 | 'phase_vals, phase_vals_remove') |
| 119 | ParsedUsers = collections.namedtuple( |
| 120 | 'ParsedUsers', 'owner_username, owner_id, cc_usernames, ' |
| 121 | 'cc_usernames_remove, cc_ids, cc_ids_remove') |
| 122 | ParsedBlockers = collections.namedtuple( |
| 123 | 'ParsedBlockers', 'entered_str, iids, dangling_refs, ' |
| 124 | 'federated_ref_strings') |
| 125 | ParsedHotlistRef = collections.namedtuple( |
| 126 | 'ParsedHotlistRef', 'user_email, hotlist_name') |
| 127 | ParsedHotlists = collections.namedtuple( |
| 128 | 'ParsedHotlists', 'entered_str, hotlist_refs') |
| 129 | ParsedIssue = collections.namedtuple( |
| 130 | 'ParsedIssue', 'summary, comment, is_description, status, users, labels, ' |
| 131 | 'labels_remove, components, fields, template_name, attachments, ' |
| 132 | 'kept_attachments, blocked_on, blocking, hotlists') |
| 133 | |
| 134 | |
| 135 | def ParseIssueRequest(cnxn, post_data, services, errors, default_project_name): |
| 136 | """Parse all the possible arguments out of the request. |
| 137 | |
| 138 | Args: |
| 139 | cnxn: connection to SQL database. |
| 140 | post_data: HTML form information. |
| 141 | services: Connections to persistence layer. |
| 142 | errors: object to accumulate validation error info. |
| 143 | default_project_name: name of the project that contains the issue. |
| 144 | |
| 145 | Returns: |
| 146 | A namedtuple with all parsed information. User IDs are looked up, but |
| 147 | also the strings are returned to allow bouncing the user back to correct |
| 148 | any errors. |
| 149 | """ |
| 150 | summary = post_data.get('summary', '') |
| 151 | comment = post_data.get('comment', '') |
| 152 | is_description = bool(post_data.get('description', '')) |
| 153 | status = post_data.get('status', '') |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 154 | template_name = urllib.parse.unquote_plus(post_data.get('template_name', '')) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 155 | component_str = post_data.get('components', '') |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 156 | # TODO: switch when convert /p to flask |
| 157 | # label_strs = post_data.getlist('label') |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 158 | label_strs = post_data.getall('label') |
| 159 | |
| 160 | if is_description: |
| 161 | tmpl_txt = post_data.get('tmpl_txt', '') |
| 162 | comment = MarkupDescriptionOnInput(comment, tmpl_txt) |
| 163 | |
| 164 | comp_paths, comp_paths_remove = _ClassifyPlusMinusItems( |
| 165 | re.split('[,;\s]+', component_str)) |
| 166 | parsed_components = ParsedComponents( |
| 167 | component_str, comp_paths, comp_paths_remove) |
| 168 | labels, labels_remove = _ClassifyPlusMinusItems(label_strs) |
| 169 | parsed_fields = _ParseIssueRequestFields(post_data) |
| 170 | # TODO(jrobbins): change from numbered fields to a multi-valued field. |
| 171 | attachments = _ParseIssueRequestAttachments(post_data) |
| 172 | kept_attachments = _ParseIssueRequestKeptAttachments(post_data) |
| 173 | parsed_users = _ParseIssueRequestUsers(cnxn, post_data, services) |
| 174 | parsed_blocked_on = _ParseBlockers( |
| 175 | cnxn, post_data, services, errors, default_project_name, BLOCKED_ON) |
| 176 | parsed_blocking = _ParseBlockers( |
| 177 | cnxn, post_data, services, errors, default_project_name, BLOCKING) |
| 178 | parsed_hotlists = _ParseHotlists(post_data) |
| 179 | |
| 180 | parsed_issue = ParsedIssue( |
| 181 | summary, comment, is_description, status, parsed_users, labels, |
| 182 | labels_remove, parsed_components, parsed_fields, template_name, |
| 183 | attachments, kept_attachments, parsed_blocked_on, parsed_blocking, |
| 184 | parsed_hotlists) |
| 185 | return parsed_issue |
| 186 | |
| 187 | |
| 188 | def MarkupDescriptionOnInput(content, tmpl_text): |
| 189 | """Return HTML for the content of an issue description or comment. |
| 190 | |
| 191 | Args: |
| 192 | content: the text sumbitted by the user, any user-entered markup |
| 193 | has already been escaped. |
| 194 | tmpl_text: the initial text that was put into the textarea. |
| 195 | |
| 196 | Returns: |
| 197 | The description content text with template lines highlighted. |
| 198 | """ |
| 199 | tmpl_lines = tmpl_text.split('\n') |
| 200 | tmpl_lines = [pl.strip() for pl in tmpl_lines if pl.strip()] |
| 201 | |
| 202 | entered_lines = content.split('\n') |
| 203 | marked_lines = [_MarkupDescriptionLineOnInput(line, tmpl_lines) |
| 204 | for line in entered_lines] |
| 205 | return '\n'.join(marked_lines) |
| 206 | |
| 207 | |
| 208 | def _MarkupDescriptionLineOnInput(line, tmpl_lines): |
| 209 | """Markup one line of an issue description that was just entered. |
| 210 | |
| 211 | Args: |
| 212 | line: string containing one line of the user-entered comment. |
| 213 | tmpl_lines: list of strings for the text of the template lines. |
| 214 | |
| 215 | Returns: |
| 216 | The same user-entered line, or that line highlighted to |
| 217 | indicate that it came from the issue template. |
| 218 | """ |
| 219 | for tmpl_line in tmpl_lines: |
| 220 | if line.startswith(tmpl_line): |
| 221 | return '<b>' + tmpl_line + '</b>' + line[len(tmpl_line):] |
| 222 | |
| 223 | return line |
| 224 | |
| 225 | |
| 226 | def _ClassifyPlusMinusItems(add_remove_list): |
| 227 | """Classify the given plus-or-minus items into add and remove lists.""" |
| 228 | add_remove_set = {s.strip() for s in add_remove_list} |
| 229 | add_strs = [s for s in add_remove_set if s and not s.startswith('-')] |
| 230 | remove_strs = [s[1:] for s in add_remove_set if s[1:] and s.startswith('-')] |
| 231 | return add_strs, remove_strs |
| 232 | |
| 233 | |
| 234 | def _ParseHotlists(post_data): |
| 235 | entered_str = post_data.get('hotlists', '').strip() |
| 236 | hotlist_refs = [] |
| 237 | for ref_str in re.split('[,;\s]+', entered_str): |
| 238 | if not ref_str: |
| 239 | continue |
| 240 | if ':' in ref_str: |
| 241 | if ref_str.split(':')[0]: |
| 242 | # E-mail isn't empty; full reference. |
| 243 | hotlist_refs.append(ParsedHotlistRef(*ref_str.split(':', 1))) |
| 244 | else: |
| 245 | # Short reference. |
| 246 | hotlist_refs.append(ParsedHotlistRef(None, ref_str.split(':', 1)[1])) |
| 247 | else: |
| 248 | # Short reference |
| 249 | hotlist_refs.append(ParsedHotlistRef(None, ref_str)) |
| 250 | parsed_hotlists = ParsedHotlists(entered_str, hotlist_refs) |
| 251 | return parsed_hotlists |
| 252 | |
| 253 | |
| 254 | def _ParseIssueRequestFields(post_data): |
| 255 | """Iterate over post_data and return custom field values found in it.""" |
| 256 | field_val_strs = {} |
| 257 | field_val_strs_remove = {} |
| 258 | phase_field_val_strs = collections.defaultdict(dict) |
| 259 | phase_field_val_strs_remove = collections.defaultdict(dict) |
| 260 | for key in post_data.keys(): |
| 261 | if key.startswith(_CUSTOM_FIELD_NAME_PREFIX): |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 262 | # TODO: switch when convert /p to flask |
| 263 | # val_strs = [v for v in post_data.getlist(key) if v] |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 264 | val_strs = [v for v in post_data.getall(key) if v] |
| 265 | if val_strs: |
| 266 | try: |
| 267 | field_id = int(key[len(_CUSTOM_FIELD_NAME_PREFIX):]) |
| 268 | phase_name = None |
| 269 | except ValueError: # key must be in format <field_id>_<phase_name> |
| 270 | field_id, phase_name = key[len(_CUSTOM_FIELD_NAME_PREFIX):].split( |
| 271 | '_', 1) |
| 272 | field_id = int(field_id) |
| 273 | if post_data.get('op_' + key) == 'remove': |
| 274 | if phase_name: |
| 275 | phase_field_val_strs_remove[field_id][phase_name] = val_strs |
| 276 | else: |
| 277 | field_val_strs_remove[field_id] = val_strs |
| 278 | else: |
| 279 | if phase_name: |
| 280 | phase_field_val_strs[field_id][phase_name] = val_strs |
| 281 | else: |
| 282 | field_val_strs[field_id] = val_strs |
| 283 | |
| 284 | # TODO(jojwang): monorail:5154, no support for clearing phase field values. |
| 285 | fields_clear = [] |
| 286 | op_prefix = 'op_' + _CUSTOM_FIELD_NAME_PREFIX |
| 287 | for op_key in post_data.keys(): |
| 288 | if op_key.startswith(op_prefix): |
| 289 | if post_data.get(op_key) == 'clear': |
| 290 | field_id = int(op_key[len(op_prefix):]) |
| 291 | fields_clear.append(field_id) |
| 292 | |
| 293 | return ParsedFields( |
| 294 | field_val_strs, field_val_strs_remove, fields_clear, |
| 295 | phase_field_val_strs, phase_field_val_strs_remove) |
| 296 | |
| 297 | |
| 298 | def _ParseIssueRequestAttachments(post_data): |
| 299 | """Extract and clean-up any attached files from the post data. |
| 300 | |
| 301 | Args: |
| 302 | post_data: dict w/ values from the user's HTTP POST form data. |
| 303 | |
| 304 | Returns: |
| 305 | [(filename, filecontents, mimetype), ...] with items for each attachment. |
| 306 | """ |
| 307 | # TODO(jrobbins): change from numbered fields to a multi-valued field. |
| 308 | attachments = [] |
| 309 | for i in range(1, 16): |
| 310 | if 'file%s' % i in post_data: |
| 311 | item = post_data['file%s' % i] |
| 312 | if isinstance(item, string_types): |
| 313 | continue |
| 314 | if '\\' in item.filename: # IE insists on giving us the whole path. |
| 315 | item.filename = item.filename[item.filename.rindex('\\') + 1:] |
| 316 | if not item.filename: |
| 317 | continue # Skip any FILE fields that were not filled in. |
| 318 | attachments.append(( |
| 319 | item.filename, item.value, |
| 320 | filecontent.GuessContentTypeFromFilename(item.filename))) |
| 321 | |
| 322 | return attachments |
| 323 | |
| 324 | |
| 325 | def _ParseIssueRequestKeptAttachments(post_data): |
| 326 | """Extract attachment ids for attachments kept when updating description |
| 327 | |
| 328 | Args: |
| 329 | post_data: dict w/ values from the user's HTTP POST form data. |
| 330 | |
| 331 | Returns: |
| 332 | a list of attachment ids for kept attachments |
| 333 | """ |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 334 | # TODO: switch when convert /p to flask |
| 335 | # kept_attachments = post_data.getlist('keep-attachment') |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 336 | kept_attachments = post_data.getall('keep-attachment') |
| 337 | return [int(aid) for aid in kept_attachments] |
| 338 | |
| 339 | |
| 340 | def _ParseIssueRequestUsers(cnxn, post_data, services): |
| 341 | """Extract usernames from the POST data, categorize them, and look up IDs. |
| 342 | |
| 343 | Args: |
| 344 | cnxn: connection to SQL database. |
| 345 | post_data: dict w/ data from the HTTP POST. |
| 346 | services: Services. |
| 347 | |
| 348 | Returns: |
| 349 | A namedtuple (owner_username, owner_id, cc_usernames, cc_usernames_remove, |
| 350 | cc_ids, cc_ids_remove), containing: |
| 351 | - issue owner's name and user ID, if any |
| 352 | - the list of all cc'd usernames |
| 353 | - the user IDs to add or remove from the issue CC list. |
| 354 | Any of these user IDs may be None if the corresponding username |
| 355 | or email address is invalid. |
| 356 | """ |
| 357 | # Get the user-entered values from post_data. |
| 358 | cc_username_str = post_data.get('cc', '').lower() |
| 359 | owner_email = post_data.get('owner', '').strip().lower() |
| 360 | |
| 361 | cc_usernames, cc_usernames_remove = _ClassifyPlusMinusItems( |
| 362 | re.split('[,;\s]+', cc_username_str)) |
| 363 | |
| 364 | # Figure out the email addresses to lookup and do the lookup. |
| 365 | emails_to_lookup = cc_usernames + cc_usernames_remove |
| 366 | if owner_email: |
| 367 | emails_to_lookup.append(owner_email) |
| 368 | all_user_ids = services.user.LookupUserIDs( |
| 369 | cnxn, emails_to_lookup, autocreate=True) |
| 370 | if owner_email: |
| 371 | owner_id = all_user_ids.get(owner_email) |
| 372 | else: |
| 373 | owner_id = framework_constants.NO_USER_SPECIFIED |
| 374 | |
| 375 | # Lookup the user IDs of the Cc addresses to add or remove. |
| 376 | cc_ids = [all_user_ids.get(cc) for cc in cc_usernames if cc] |
| 377 | cc_ids_remove = [all_user_ids.get(cc) for cc in cc_usernames_remove if cc] |
| 378 | |
| 379 | return ParsedUsers(owner_email, owner_id, cc_usernames, cc_usernames_remove, |
| 380 | cc_ids, cc_ids_remove) |
| 381 | |
| 382 | |
| 383 | def _ParseBlockers(cnxn, post_data, services, errors, default_project_name, |
| 384 | field_name): |
| 385 | """Parse input for issues that the current issue is blocking/blocked on. |
| 386 | |
| 387 | Args: |
| 388 | cnxn: connection to SQL database. |
| 389 | post_data: dict w/ values from the user's HTTP POST. |
| 390 | services: connections to backend services. |
| 391 | errors: object to accumulate validation error info. |
| 392 | default_project_name: name of the project that contains the issue. |
| 393 | field_name: string HTML input field name, e.g., BLOCKED_ON or BLOCKING. |
| 394 | |
| 395 | Returns: |
| 396 | A namedtuple with the user input string, and a list of issue IDs. |
| 397 | """ |
| 398 | entered_str = post_data.get(field_name, '').strip() |
| 399 | blocker_iids = [] |
| 400 | dangling_ref_tuples = [] |
| 401 | federated_ref_strings = [] |
| 402 | |
| 403 | issue_ref = None |
| 404 | for ref_str in re.split('[,;\s]+', entered_str): |
| 405 | # Handle federated references. |
| 406 | if federated.IsShortlinkValid(ref_str): |
| 407 | federated_ref_strings.append(ref_str) |
| 408 | continue |
| 409 | |
| 410 | try: |
| 411 | issue_ref = tracker_bizobj.ParseIssueRef(ref_str) |
| 412 | except ValueError: |
| 413 | setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip()) |
| 414 | break |
| 415 | |
| 416 | if not issue_ref: |
| 417 | continue |
| 418 | |
| 419 | blocker_project_name, blocker_issue_id = issue_ref |
| 420 | if not blocker_project_name: |
| 421 | blocker_project_name = default_project_name |
| 422 | |
| 423 | # Detect and report if the same issue was specified. |
| 424 | current_issue_id = int(post_data.get('id')) if post_data.get('id') else -1 |
| 425 | if (blocker_issue_id == current_issue_id and |
| 426 | blocker_project_name == default_project_name): |
| 427 | setattr(errors, field_name, 'Cannot be %s the same issue' % field_name) |
| 428 | break |
| 429 | |
| 430 | ref_projects = services.project.GetProjectsByName( |
| 431 | cnxn, set([blocker_project_name])) |
| 432 | blocker_iid, _misses = services.issue.ResolveIssueRefs( |
| 433 | cnxn, ref_projects, default_project_name, [issue_ref]) |
| 434 | if not blocker_iid: |
| 435 | if blocker_project_name in settings.recognized_codesite_projects: |
| 436 | # We didn't find the issue, but it had a explicitly-specified project |
| 437 | # which we know is on Codesite. Allow it as a dangling reference. |
| 438 | dangling_ref_tuples.append(issue_ref) |
| 439 | continue |
| 440 | else: |
| 441 | # Otherwise, it doesn't exist, so report it. |
| 442 | setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip()) |
| 443 | break |
| 444 | if blocker_iid[0] not in blocker_iids: |
| 445 | blocker_iids.extend(blocker_iid) |
| 446 | |
| 447 | blocker_iids.sort() |
| 448 | dangling_ref_tuples.sort() |
| 449 | return ParsedBlockers(entered_str, blocker_iids, dangling_ref_tuples, |
| 450 | federated_ref_strings) |
| 451 | |
| 452 | |
| 453 | def PairDerivedValuesWithRuleExplanations( |
| 454 | proposed_issue, traces, derived_users_by_id): |
| 455 | """Pair up values and explanations into JSON objects.""" |
| 456 | derived_labels_and_why = [ |
| 457 | {'value': lab, |
| 458 | 'why': traces.get((tracker_pb2.FieldID.LABELS, lab))} |
| 459 | for lab in proposed_issue.derived_labels] |
| 460 | |
| 461 | derived_users_by_id = { |
| 462 | user_id: user_view.display_name |
| 463 | for user_id, user_view in derived_users_by_id.items() |
| 464 | if user_view.display_name} |
| 465 | |
| 466 | derived_owner_and_why = [] |
| 467 | if proposed_issue.derived_owner_id: |
| 468 | derived_owner_and_why = [{ |
| 469 | 'value': derived_users_by_id[proposed_issue.derived_owner_id], |
| 470 | 'why': traces.get( |
| 471 | (tracker_pb2.FieldID.OWNER, proposed_issue.derived_owner_id))}] |
| 472 | derived_cc_and_why = [ |
| 473 | {'value': derived_users_by_id[cc_id], |
| 474 | 'why': traces.get((tracker_pb2.FieldID.CC, cc_id))} |
| 475 | for cc_id in proposed_issue.derived_cc_ids |
| 476 | if cc_id in derived_users_by_id] |
| 477 | |
| 478 | warnings_and_why = [ |
| 479 | {'value': warning, |
| 480 | 'why': traces.get((tracker_pb2.FieldID.WARNING, warning))} |
| 481 | for warning in proposed_issue.derived_warnings] |
| 482 | |
| 483 | errors_and_why = [ |
| 484 | {'value': error, |
| 485 | 'why': traces.get((tracker_pb2.FieldID.ERROR, error))} |
| 486 | for error in proposed_issue.derived_errors] |
| 487 | |
| 488 | return (derived_labels_and_why, derived_owner_and_why, derived_cc_and_why, |
| 489 | warnings_and_why, errors_and_why) |
| 490 | |
| 491 | |
| 492 | def IsValidIssueOwner(cnxn, project, owner_id, services): |
| 493 | """Return True if the given user ID can be an issue owner. |
| 494 | |
| 495 | Args: |
| 496 | cnxn: connection to SQL database. |
| 497 | project: the current Project PB. |
| 498 | owner_id: the user ID of the proposed issue owner. |
| 499 | services: connections to backends. |
| 500 | |
| 501 | It is OK to have 0 for the owner_id, that simply means that the issue is |
| 502 | unassigned. |
| 503 | |
| 504 | Returns: |
| 505 | A pair (valid, err_msg). valid is True if the given user ID can be an |
| 506 | issue owner. err_msg is an error message string to display to the user |
| 507 | if valid == False, and is None if valid == True. |
| 508 | """ |
| 509 | # An issue is always allowed to have no owner specified. |
| 510 | if owner_id == framework_constants.NO_USER_SPECIFIED: |
| 511 | return True, None |
| 512 | |
| 513 | try: |
| 514 | auth = authdata.AuthData.FromUserID(cnxn, owner_id, services) |
| 515 | if not framework_bizobj.UserIsInProject(project, auth.effective_ids): |
| 516 | return False, 'Issue owner must be a project member.' |
| 517 | except exceptions.NoSuchUserException: |
| 518 | return False, 'Issue owner user ID not found.' |
| 519 | |
| 520 | group_ids = services.usergroup.DetermineWhichUserIDsAreGroups( |
| 521 | cnxn, [owner_id]) |
| 522 | if owner_id in group_ids: |
| 523 | return False, 'Issue owner cannot be a user group.' |
| 524 | |
| 525 | return True, None |
| 526 | |
| 527 | |
| 528 | def GetAllowedOpenedAndClosedIssues(mr, issue_ids, services): |
| 529 | """Get filtered lists of open and closed issues identified by issue_ids. |
| 530 | |
| 531 | The function then filters the results to only the issues that the user |
| 532 | is allowed to view. E.g., we only auto-link to issues that the user |
| 533 | would be able to view if they clicked the link. |
| 534 | |
| 535 | Args: |
| 536 | mr: commonly used info parsed from the request. |
| 537 | issue_ids: list of int issue IDs for the target issues. |
| 538 | services: connection to issue, config, and project persistence layers. |
| 539 | |
| 540 | Returns: |
| 541 | Two lists of issues that the user is allowed to view: one for open |
| 542 | issues and one for closed issues. |
| 543 | """ |
| 544 | open_issues, closed_issues = services.issue.GetOpenAndClosedIssues( |
| 545 | mr.cnxn, issue_ids) |
| 546 | return GetAllowedIssues(mr, [open_issues, closed_issues], services) |
| 547 | |
| 548 | |
| 549 | def GetAllowedIssues(mr, issue_groups, services): |
| 550 | """Filter lists of issues identified by issue_groups. |
| 551 | |
| 552 | Args: |
| 553 | mr: commonly used info parsed from the request. |
| 554 | issue_groups: list of list of issues to filter. |
| 555 | services: connection to issue, config, and project persistence layers. |
| 556 | |
| 557 | Returns: |
| 558 | List of filtered list of issues. |
| 559 | """ |
| 560 | |
| 561 | project_dict = GetAllIssueProjects( |
| 562 | mr.cnxn, itertools.chain.from_iterable(issue_groups), services.project) |
| 563 | config_dict = services.config.GetProjectConfigs(mr.cnxn, |
| 564 | list(project_dict.keys())) |
| 565 | return [FilterOutNonViewableIssues( |
| 566 | mr.auth.effective_ids, mr.auth.user_pb, project_dict, config_dict, |
| 567 | issues) |
| 568 | for issues in issue_groups] |
| 569 | |
| 570 | |
| 571 | def MakeViewsForUsersInIssues(cnxn, issue_list, user_service, omit_ids=None): |
| 572 | """Lookup all the users involved in any of the given issues. |
| 573 | |
| 574 | Args: |
| 575 | cnxn: connection to SQL database. |
| 576 | issue_list: list of Issue PBs from a result query. |
| 577 | user_service: Connection to User backend storage. |
| 578 | omit_ids: a list of user_ids to omit, e.g., because we already have them. |
| 579 | |
| 580 | Returns: |
| 581 | A dictionary {user_id: user_view,...} for all the users involved |
| 582 | in the given issues. |
| 583 | """ |
| 584 | issue_participant_id_set = tracker_bizobj.UsersInvolvedInIssues(issue_list) |
| 585 | if omit_ids: |
| 586 | issue_participant_id_set.difference_update(omit_ids) |
| 587 | |
| 588 | # TODO(jrobbins): consider caching View objects as well. |
| 589 | users_by_id = framework_views.MakeAllUserViews( |
| 590 | cnxn, user_service, issue_participant_id_set) |
| 591 | |
| 592 | return users_by_id |
| 593 | |
| 594 | |
| 595 | def FormatIssueListURL( |
| 596 | mr, config, absolute=True, project_names=None, **kwargs): |
| 597 | """Format a link back to list view as configured by user.""" |
| 598 | if project_names is None: |
| 599 | project_names = [mr.project_name] |
| 600 | if tracker_constants.JUMP_RE.match(mr.query): |
| 601 | kwargs['q'] = 'id=%s' % mr.query |
| 602 | kwargs['can'] = 1 # The specified issue might be closed. |
| 603 | else: |
| 604 | kwargs['q'] = mr.query |
| 605 | if mr.can and mr.can != 2: |
| 606 | kwargs['can'] = mr.can |
| 607 | def_col_spec = config.default_col_spec |
| 608 | if mr.col_spec and mr.col_spec != def_col_spec: |
| 609 | kwargs['colspec'] = mr.col_spec |
| 610 | if mr.sort_spec: |
| 611 | kwargs['sort'] = mr.sort_spec |
| 612 | if mr.group_by_spec: |
| 613 | kwargs['groupby'] = mr.group_by_spec |
| 614 | if mr.start: |
| 615 | kwargs['start'] = mr.start |
| 616 | if mr.num != tracker_constants.DEFAULT_RESULTS_PER_PAGE: |
| 617 | kwargs['num'] = mr.num |
| 618 | |
| 619 | if len(project_names) == 1: |
| 620 | url = '/p/%s%s' % (project_names[0], urls.ISSUE_LIST) |
| 621 | else: |
| 622 | url = urls.ISSUE_LIST |
| 623 | kwargs['projects'] = ','.join(sorted(project_names)) |
| 624 | |
Adrià Vilanova Martínez | de94280 | 2022-07-15 14:06:55 +0200 | [diff] [blame] | 625 | param_strings = [ |
| 626 | '%s=%s' % (k, urllib.parse.quote((u'%s' % v).encode('utf-8'))) |
| 627 | for k, v in kwargs.items() |
| 628 | ] |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 629 | if param_strings: |
| 630 | url += '?' + '&'.join(sorted(param_strings)) |
| 631 | if absolute: |
| 632 | url = '%s://%s%s' % (mr.request.scheme, mr.request.host, url) |
| 633 | |
| 634 | return url |
| 635 | |
| 636 | |
| 637 | def FormatRelativeIssueURL(project_name, path, **kwargs): |
| 638 | """Format a URL to get to an issue in the named project. |
| 639 | |
| 640 | Args: |
| 641 | project_name: string name of the project containing the issue. |
| 642 | path: string servlet path, e.g., from framework/urls.py. |
| 643 | **kwargs: additional query-string parameters to include in the URL. |
| 644 | |
| 645 | Returns: |
| 646 | A URL string. |
| 647 | """ |
| 648 | return framework_helpers.FormatURL( |
| 649 | None, '/p/%s%s' % (project_name, path), **kwargs) |
| 650 | |
| 651 | |
| 652 | def FormatCrBugURL(project_name, local_id): |
| 653 | """Format a short URL to get to an issue in the named project. |
| 654 | |
| 655 | Args: |
| 656 | project_name: string name of the project containing the issue. |
| 657 | local_id: int local ID of the issue. |
| 658 | |
| 659 | Returns: |
| 660 | A URL string. |
| 661 | """ |
| 662 | if app_identity.get_application_id() != 'monorail-prod': |
| 663 | return FormatRelativeIssueURL( |
| 664 | project_name, urls.ISSUE_DETAIL, id=local_id) |
| 665 | |
| 666 | if project_name == 'chromium': |
| 667 | return 'https://crbug.com/%d' % local_id |
| 668 | |
| 669 | return 'https://crbug.com/%s/%d' % (project_name, local_id) |
| 670 | |
| 671 | |
| 672 | def ComputeNewQuotaBytesUsed(project, attachments): |
| 673 | """Add the given attachments to the project's attachment quota usage. |
| 674 | |
| 675 | Args: |
| 676 | project: Project PB for the project being updated. |
| 677 | attachments: a list of attachments being added to an issue. |
| 678 | |
| 679 | Returns: |
| 680 | The new number of bytes used. |
| 681 | |
| 682 | Raises: |
| 683 | OverAttachmentQuota: If project would go over quota. |
| 684 | """ |
| 685 | total_attach_size = 0 |
| 686 | for _filename, content, _mimetype in attachments: |
| 687 | total_attach_size += len(content) |
| 688 | |
| 689 | new_bytes_used = project.attachment_bytes_used + total_attach_size |
| 690 | quota = (project.attachment_quota or |
| 691 | tracker_constants.ISSUE_ATTACHMENTS_QUOTA_HARD) |
| 692 | if new_bytes_used > quota: |
| 693 | raise exceptions.OverAttachmentQuota(new_bytes_used - quota) |
| 694 | return new_bytes_used |
| 695 | |
| 696 | |
| 697 | def IsUnderSoftAttachmentQuota(project): |
| 698 | """Check the project's attachment quota against the soft quota limit. |
| 699 | |
| 700 | If there is a custom quota on the project, this will check against |
| 701 | that instead of the system-wide default quota. |
| 702 | |
| 703 | Args: |
| 704 | project: Project PB for the project to examine |
| 705 | |
| 706 | Returns: |
| 707 | True if the project is under quota, false otherwise. |
| 708 | """ |
| 709 | quota = tracker_constants.ISSUE_ATTACHMENTS_QUOTA_SOFT |
| 710 | if project.attachment_quota: |
| 711 | quota = project.attachment_quota - _SOFT_QUOTA_LEEWAY |
| 712 | |
| 713 | return project.attachment_bytes_used < quota |
| 714 | |
| 715 | |
| 716 | def GetAllIssueProjects(cnxn, issues, project_service): |
| 717 | """Get all the projects that the given issues belong to. |
| 718 | |
| 719 | Args: |
| 720 | cnxn: connection to SQL database. |
| 721 | issues: list of issues, which may come from different projects. |
| 722 | project_service: connection to project persistence layer. |
| 723 | |
| 724 | Returns: |
| 725 | A dictionary {project_id: project} of all the projects that |
| 726 | any of the given issues belongs to. |
| 727 | """ |
| 728 | needed_project_ids = {issue.project_id for issue in issues} |
| 729 | project_dict = project_service.GetProjects(cnxn, needed_project_ids) |
| 730 | return project_dict |
| 731 | |
| 732 | |
| 733 | def GetPermissionsInAllProjects(user, effective_ids, projects): |
| 734 | """Look up the permissions for the given user in each project.""" |
| 735 | return { |
| 736 | project.project_id: |
| 737 | permissions.GetPermissions(user, effective_ids, project) |
| 738 | for project in projects} |
| 739 | |
| 740 | |
| 741 | def FilterOutNonViewableIssues( |
| 742 | effective_ids, user, project_dict, config_dict, issues): |
| 743 | """Return a filtered list of issues that the user can view.""" |
| 744 | perms_dict = GetPermissionsInAllProjects( |
| 745 | user, effective_ids, list(project_dict.values())) |
| 746 | |
| 747 | denied_project_ids = { |
| 748 | pid for pid, p in project_dict.items() |
| 749 | if not permissions.CanView(effective_ids, perms_dict[pid], p, [])} |
| 750 | |
| 751 | results = [] |
| 752 | for issue in issues: |
| 753 | if issue.deleted or issue.project_id in denied_project_ids: |
| 754 | continue |
| 755 | |
| 756 | if not permissions.HasRestrictions(issue): |
| 757 | may_view = True |
| 758 | else: |
| 759 | perms = perms_dict[issue.project_id] |
| 760 | project = project_dict[issue.project_id] |
| 761 | config = config_dict.get(issue.project_id, config_dict.get('harmonized')) |
| 762 | granted_perms = tracker_bizobj.GetGrantedPerms( |
| 763 | issue, effective_ids, config) |
| 764 | may_view = permissions.CanViewIssue( |
| 765 | effective_ids, perms, project, issue, granted_perms=granted_perms) |
| 766 | |
| 767 | if may_view: |
| 768 | results.append(issue) |
| 769 | |
| 770 | return results |
| 771 | |
| 772 | |
| 773 | def MeansOpenInProject(status, config): |
| 774 | """Return true if this status means that the issue is still open. |
| 775 | |
| 776 | This defaults to true if we could not find a matching status. |
| 777 | |
| 778 | Args: |
| 779 | status: issue status string. E.g., 'New'. |
| 780 | config: the config of the current project. |
| 781 | |
| 782 | Returns: |
| 783 | Boolean True if the status means that the issue is open. |
| 784 | """ |
| 785 | status_lower = status.lower() |
| 786 | |
| 787 | # iterate over the list of known statuses for this project |
| 788 | # return true if we find a match that declares itself to be open |
| 789 | for wks in config.well_known_statuses: |
| 790 | if wks.status.lower() == status_lower: |
| 791 | return wks.means_open |
| 792 | |
| 793 | return True |
| 794 | |
| 795 | |
| 796 | def IsNoisy(num_comments, num_starrers): |
| 797 | """Return True if this is a "noisy" issue that would send a ton of emails. |
| 798 | |
| 799 | The rule is that a very active issue with a large number of comments |
| 800 | and starrers will only send notification when a comment (or change) |
| 801 | is made by a project member. |
| 802 | |
| 803 | Args: |
| 804 | num_comments: int number of comments on issue so far. |
| 805 | num_starrers: int number of users who starred the issue. |
| 806 | |
| 807 | Returns: |
| 808 | True if we will not bother starrers with an email notification for |
| 809 | changes made by non-members. |
| 810 | """ |
| 811 | return (num_comments >= tracker_constants.NOISY_ISSUE_COMMENT_COUNT and |
| 812 | num_starrers >= tracker_constants.NOISY_ISSUE_STARRER_COUNT) |
| 813 | |
| 814 | |
| 815 | def MergeCCsAndAddComment(services, mr, issue, merge_into_issue): |
| 816 | """Modify the CC field of the target issue and add a comment to it.""" |
| 817 | return MergeCCsAndAddCommentMultipleIssues( |
| 818 | services, mr, [issue], merge_into_issue) |
| 819 | |
| 820 | |
| 821 | def MergeCCsAndAddCommentMultipleIssues( |
| 822 | services, mr, issues, merge_into_issue): |
| 823 | """Modify the CC field of the target issue and add a comment to it.""" |
| 824 | merge_comment = '' |
| 825 | for issue in issues: |
| 826 | if issue.project_name == merge_into_issue.project_name: |
| 827 | issue_ref_str = '%d' % issue.local_id |
| 828 | else: |
| 829 | issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id) |
| 830 | if merge_comment: |
| 831 | merge_comment += '\n' |
| 832 | merge_comment += 'Issue %s has been merged into this issue.' % issue_ref_str |
| 833 | |
| 834 | add_cc = _ComputeNewCcsFromIssueMerge(merge_into_issue, issues) |
| 835 | |
| 836 | config = services.config.GetProjectConfig( |
| 837 | mr.cnxn, merge_into_issue.project_id) |
| 838 | delta = tracker_pb2.IssueDelta(cc_ids_add=add_cc) |
| 839 | _, merge_comment_pb = services.issue.DeltaUpdateIssue( |
| 840 | mr.cnxn, services, mr.auth.user_id, merge_into_issue.project_id, |
| 841 | config, merge_into_issue, delta, index_now=False, comment=merge_comment) |
| 842 | |
| 843 | return merge_comment_pb |
| 844 | |
| 845 | |
| 846 | def GetAttachmentIfAllowed(mr, services): |
| 847 | """Retrieve the requested attachment, or raise an appropriate exception. |
| 848 | |
| 849 | Args: |
| 850 | mr: commonly used info parsed from the request. |
| 851 | services: connections to backend services. |
| 852 | |
| 853 | Returns: |
| 854 | The requested Attachment PB, and the Issue that it belongs to. |
| 855 | |
| 856 | Raises: |
| 857 | NoSuchAttachmentException: attachment was not found or was marked deleted. |
| 858 | NoSuchIssueException: issue that contains attachment was not found. |
| 859 | PermissionException: the user is not allowed to view the attachment. |
| 860 | """ |
| 861 | attachment = None |
| 862 | |
| 863 | attachment, cid, issue_id = services.issue.GetAttachmentAndContext( |
| 864 | mr.cnxn, mr.aid) |
| 865 | |
| 866 | issue = services.issue.GetIssue(mr.cnxn, issue_id) |
| 867 | config = services.config.GetProjectConfig(mr.cnxn, issue.project_id) |
| 868 | granted_perms = tracker_bizobj.GetGrantedPerms( |
| 869 | issue, mr.auth.effective_ids, config) |
| 870 | permit_view = permissions.CanViewIssue( |
| 871 | mr.auth.effective_ids, mr.perms, mr.project, issue, |
| 872 | granted_perms=granted_perms) |
| 873 | if not permit_view: |
| 874 | raise permissions.PermissionException('Cannot view attachment\'s issue') |
| 875 | |
| 876 | comment = services.issue.GetComment(mr.cnxn, cid) |
| 877 | commenter = services.user.GetUser(mr.cnxn, comment.user_id) |
| 878 | issue_perms = permissions.UpdateIssuePermissions( |
| 879 | mr.perms, mr.project, issue, mr.auth.effective_ids, |
| 880 | granted_perms=granted_perms) |
| 881 | can_view_comment = permissions.CanViewComment( |
| 882 | comment, commenter, mr.auth.user_id, issue_perms) |
| 883 | if not can_view_comment: |
| 884 | raise permissions.PermissionException('Cannot view attachment\'s comment') |
| 885 | |
| 886 | return attachment, issue |
| 887 | |
| 888 | |
| 889 | def LabelsMaskedByFields(config, field_names, trim_prefix=False): |
| 890 | """Return a list of EZTItems for labels that would be masked by fields.""" |
| 891 | return _LabelsMaskedOrNot(config, field_names, trim_prefix=trim_prefix) |
| 892 | |
| 893 | |
| 894 | def LabelsNotMaskedByFields(config, field_names, trim_prefix=False): |
| 895 | """Return a list of EZTItems for labels that would not be masked.""" |
| 896 | return _LabelsMaskedOrNot( |
| 897 | config, field_names, invert=True, trim_prefix=trim_prefix) |
| 898 | |
| 899 | |
| 900 | def _LabelsMaskedOrNot(config, field_names, invert=False, trim_prefix=False): |
| 901 | """Return EZTItems for labels that'd be masked. Or not, when invert=True.""" |
| 902 | field_names = [fn.lower() for fn in field_names] |
| 903 | result = [] |
| 904 | for wkl in config.well_known_labels: |
| 905 | masked_by = tracker_bizobj.LabelIsMaskedByField(wkl.label, field_names) |
| 906 | if (masked_by and not invert) or (not masked_by and invert): |
| 907 | display_name = wkl.label |
| 908 | if trim_prefix: |
| 909 | display_name = display_name[len(masked_by) + 1:] |
| 910 | result.append(template_helpers.EZTItem( |
| 911 | name=display_name, |
| 912 | name_padded=display_name.ljust(20), |
| 913 | commented='#' if wkl.deprecated else '', |
| 914 | docstring=wkl.label_docstring, |
| 915 | docstring_short=template_helpers.FitUnsafeText( |
| 916 | wkl.label_docstring, 40), |
| 917 | idx=len(result))) |
| 918 | |
| 919 | return result |
| 920 | |
| 921 | |
| 922 | def LookupComponentIDs(component_paths, config, errors=None): |
| 923 | """Look up the IDs of the specified components in the given config.""" |
| 924 | component_ids = [] |
| 925 | for path in component_paths: |
| 926 | if not path: |
| 927 | continue |
| 928 | cd = tracker_bizobj.FindComponentDef(path, config) |
| 929 | if cd: |
| 930 | component_ids.append(cd.component_id) |
| 931 | else: |
| 932 | error_text = 'Unknown component %s' % path |
| 933 | if errors: |
| 934 | errors.components = error_text |
| 935 | else: |
| 936 | logging.info(error_text) |
| 937 | |
| 938 | return component_ids |
| 939 | |
| 940 | |
| 941 | def ParsePostDataUsers(cnxn, pd_users_str, user_service): |
| 942 | """Parse all the usernames from a users string found in a post data.""" |
| 943 | emails, _remove = _ClassifyPlusMinusItems(re.split('[,;\s]+', pd_users_str)) |
| 944 | users_ids_by_email = user_service.LookupUserIDs(cnxn, emails, autocreate=True) |
| 945 | user_ids = [users_ids_by_email[username] for username in emails if username] |
| 946 | return user_ids, pd_users_str |
| 947 | |
| 948 | |
| 949 | def FilterIssueTypes(config): |
| 950 | """Return a list of well-known issue types.""" |
| 951 | well_known_issue_types = [] |
| 952 | for wk_label in config.well_known_labels: |
| 953 | if wk_label.label.lower().startswith('type-'): |
| 954 | _, type_name = wk_label.label.split('-', 1) |
| 955 | well_known_issue_types.append(type_name) |
| 956 | |
| 957 | return well_known_issue_types |
| 958 | |
| 959 | |
| 960 | def ParseMergeFields( |
| 961 | cnxn, services, project_name, post_data, status, config, issue, errors): |
| 962 | """Parse info that identifies the issue to merge into, if any.""" |
| 963 | merge_into_text = '' |
| 964 | merge_into_ref = None |
| 965 | merge_into_issue = None |
| 966 | |
| 967 | if status not in config.statuses_offer_merge: |
| 968 | return '', None |
| 969 | |
| 970 | merge_into_text = post_data.get('merge_into', '') |
| 971 | if merge_into_text: |
| 972 | try: |
| 973 | merge_into_ref = tracker_bizobj.ParseIssueRef(merge_into_text) |
| 974 | except ValueError: |
| 975 | logging.info('merge_into not an int: %r', merge_into_text) |
| 976 | errors.merge_into_id = 'Please enter a valid issue ID' |
| 977 | |
| 978 | if not merge_into_ref: |
| 979 | errors.merge_into_id = 'Please enter an issue ID' |
| 980 | return merge_into_text, None |
| 981 | |
| 982 | merge_into_project_name, merge_into_id = merge_into_ref |
| 983 | if (merge_into_id == issue.local_id and |
| 984 | (merge_into_project_name == project_name or |
| 985 | not merge_into_project_name)): |
| 986 | logging.info('user tried to merge issue into itself: %r', merge_into_ref) |
| 987 | errors.merge_into_id = 'Cannot merge issue into itself' |
| 988 | return merge_into_text, None |
| 989 | |
| 990 | project = services.project.GetProjectByName( |
| 991 | cnxn, merge_into_project_name or project_name) |
| 992 | try: |
| 993 | # Because we will modify this issue, load from DB rather than cache. |
| 994 | merge_into_issue = services.issue.GetIssueByLocalID( |
| 995 | cnxn, project.project_id, merge_into_id, use_cache=False) |
| 996 | except Exception: |
| 997 | logging.info('merge_into issue not found: %r', merge_into_ref) |
| 998 | errors.merge_into_id = 'No such issue' |
| 999 | return merge_into_text, None |
| 1000 | |
| 1001 | return merge_into_text, merge_into_issue |
| 1002 | |
| 1003 | |
| 1004 | def GetNewIssueStarrers(cnxn, services, issue_ids, merge_into_iid): |
| 1005 | # type: (MonorailConnection, Services, Sequence[int], int) -> |
| 1006 | # Collection[int] |
| 1007 | """Get starrers of current issue who have not starred the target issue.""" |
| 1008 | source_starrers_dict = services.issue_star.LookupItemsStarrers( |
| 1009 | cnxn, issue_ids) |
| 1010 | source_starrers = list( |
| 1011 | itertools.chain.from_iterable(source_starrers_dict.values())) |
| 1012 | target_starrers = services.issue_star.LookupItemStarrers( |
| 1013 | cnxn, merge_into_iid) |
| 1014 | return set(source_starrers) - set(target_starrers) |
| 1015 | |
| 1016 | |
| 1017 | def AddIssueStarrers( |
| 1018 | cnxn, services, mr, merge_into_iid, merge_into_project, new_starrers): |
| 1019 | """Merge all the starrers for the current issue into the target issue.""" |
| 1020 | project = merge_into_project or mr.project |
| 1021 | config = services.config.GetProjectConfig(mr.cnxn, project.project_id) |
| 1022 | services.issue_star.SetStarsBatch( |
| 1023 | cnxn, services, config, merge_into_iid, new_starrers, True) |
| 1024 | |
| 1025 | |
| 1026 | def IsMergeAllowed(merge_into_issue, mr, services): |
| 1027 | """Check to see if user has permission to merge with specified issue.""" |
| 1028 | merge_into_project = services.project.GetProjectByName( |
| 1029 | mr.cnxn, merge_into_issue.project_name) |
| 1030 | merge_into_config = services.config.GetProjectConfig( |
| 1031 | mr.cnxn, merge_into_project.project_id) |
| 1032 | merge_granted_perms = tracker_bizobj.GetGrantedPerms( |
| 1033 | merge_into_issue, mr.auth.effective_ids, merge_into_config) |
| 1034 | |
| 1035 | merge_view_allowed = mr.perms.CanUsePerm( |
| 1036 | permissions.VIEW, mr.auth.effective_ids, |
| 1037 | merge_into_project, permissions.GetRestrictions(merge_into_issue), |
| 1038 | granted_perms=merge_granted_perms) |
| 1039 | merge_edit_allowed = mr.perms.CanUsePerm( |
| 1040 | permissions.EDIT_ISSUE, mr.auth.effective_ids, |
| 1041 | merge_into_project, permissions.GetRestrictions(merge_into_issue), |
| 1042 | granted_perms=merge_granted_perms) |
| 1043 | |
| 1044 | return merge_view_allowed and merge_edit_allowed |
| 1045 | |
| 1046 | |
| 1047 | def GetVisibleMembers(mr, project, services): |
| 1048 | all_member_ids = project_helpers.AllProjectMembers(project) |
| 1049 | |
| 1050 | all_group_ids = services.usergroup.DetermineWhichUserIDsAreGroups( |
| 1051 | mr.cnxn, all_member_ids) |
| 1052 | |
| 1053 | (ac_exclusion_ids, no_expand_ids |
| 1054 | ) = services.project.GetProjectAutocompleteExclusion( |
| 1055 | mr.cnxn, project.project_id) |
| 1056 | |
| 1057 | group_ids_to_expand = [ |
| 1058 | gid for gid in all_group_ids if gid not in no_expand_ids] |
| 1059 | |
| 1060 | # TODO(jrobbins): Normally, users will be allowed view the members |
| 1061 | # of any user group if the project From: email address is listed |
| 1062 | # as a group member, as well as any group that they are personally |
| 1063 | # members of. |
| 1064 | member_ids, owner_ids = services.usergroup.LookupVisibleMembers( |
| 1065 | mr.cnxn, group_ids_to_expand, mr.perms, mr.auth.effective_ids, services) |
| 1066 | indirect_user_ids = set() |
| 1067 | for gids in member_ids.values(): |
| 1068 | indirect_user_ids.update(gids) |
| 1069 | for gids in owner_ids.values(): |
| 1070 | indirect_user_ids.update(gids) |
| 1071 | |
| 1072 | visible_member_ids = _FilterMemberData( |
| 1073 | mr, project.owner_ids, project.committer_ids, project.contributor_ids, |
| 1074 | indirect_user_ids, project) |
| 1075 | |
| 1076 | visible_member_ids = _MergeLinkedMembers( |
| 1077 | mr.cnxn, services.user, visible_member_ids) |
| 1078 | |
| 1079 | visible_member_views = framework_views.MakeAllUserViews( |
| 1080 | mr.cnxn, services.user, visible_member_ids, group_ids=all_group_ids) |
| 1081 | framework_views.RevealAllEmailsToMembers( |
| 1082 | mr.cnxn, services, mr.auth, visible_member_views, project) |
| 1083 | |
| 1084 | # Filter out service accounts |
| 1085 | service_acct_emails = set( |
| 1086 | client_config_svc.GetClientConfigSvc().GetClientIDEmails()[1]) |
| 1087 | visible_member_views = { |
| 1088 | m.user_id: m |
| 1089 | for m in visible_member_views.values() |
| 1090 | # Hide service accounts from autocomplete. |
| 1091 | if not framework_helpers.IsServiceAccount( |
| 1092 | m.email, client_emails=service_acct_emails) |
| 1093 | # Hide users who opted out of autocomplete. |
| 1094 | and not m.user_id in ac_exclusion_ids |
| 1095 | # Hide users who have obscured email addresses. |
| 1096 | and not m.obscure_email |
| 1097 | } |
| 1098 | |
| 1099 | return visible_member_views |
| 1100 | |
| 1101 | |
| 1102 | def _MergeLinkedMembers(cnxn, user_service, user_ids): |
| 1103 | """Remove any linked child accounts if the parent would also be shown.""" |
| 1104 | all_ids = set(user_ids) |
| 1105 | users_by_id = user_service.GetUsersByIDs(cnxn, user_ids) |
| 1106 | result = [uid for uid in user_ids |
| 1107 | if users_by_id[uid].linked_parent_id not in all_ids] |
| 1108 | return result |
| 1109 | |
| 1110 | |
| 1111 | def _FilterMemberData( |
| 1112 | mr, owner_ids, committer_ids, contributor_ids, indirect_member_ids, |
| 1113 | project): |
| 1114 | """Return a filtered list of members that the user can view. |
| 1115 | |
| 1116 | In most projects, everyone can view the entire member list. But, |
| 1117 | some projects are configured to only allow project owners to see |
| 1118 | all members. In those projects, committers and contributors do not |
| 1119 | see any contributors. Regardless of how the project is configured |
| 1120 | or the role that the user plays in the current project, we include |
| 1121 | any indirect members through user groups that the user has access |
| 1122 | to view. |
| 1123 | |
| 1124 | Args: |
| 1125 | mr: Commonly used info parsed from the HTTP request. |
| 1126 | owner_views: list of user IDs for project owners. |
| 1127 | committer_views: list of user IDs for project committers. |
| 1128 | contributor_views: list of user IDs for project contributors. |
| 1129 | indirect_member_views: list of user IDs for users who have |
| 1130 | an indirect role in the project via a user group, and that the |
| 1131 | logged in user is allowed to see. |
| 1132 | project: the Project we're interested in. |
| 1133 | |
| 1134 | Returns: |
| 1135 | A list of owners, committer and visible indirect members if the user is not |
| 1136 | signed in. If the project is set to display contributors to non-owners or |
| 1137 | the signed in user has necessary permissions then additionally a list of |
| 1138 | contributors. |
| 1139 | """ |
| 1140 | visible_members_ids = set() |
| 1141 | |
| 1142 | # Everyone can view owners and committers |
| 1143 | visible_members_ids.update(owner_ids) |
| 1144 | visible_members_ids.update(committer_ids) |
| 1145 | |
| 1146 | # The list of indirect members is already limited to ones that the user |
| 1147 | # is allowed to see according to user group settings. |
| 1148 | visible_members_ids.update(indirect_member_ids) |
| 1149 | |
| 1150 | # If the user is allowed to view the list of contributors, add those too. |
| 1151 | if permissions.CanViewContributorList(mr, project): |
| 1152 | visible_members_ids.update(contributor_ids) |
| 1153 | |
| 1154 | return sorted(visible_members_ids) |
| 1155 | |
| 1156 | |
| 1157 | def GetLabelOptions(config, custom_permissions): |
| 1158 | """Prepares label options for autocomplete.""" |
| 1159 | labels = [] |
| 1160 | field_names = [ |
| 1161 | fd.field_name |
| 1162 | for fd in config.field_defs |
| 1163 | if not fd.is_deleted |
| 1164 | and fd.field_type is tracker_pb2.FieldTypes.ENUM_TYPE |
| 1165 | ] |
| 1166 | non_masked_labels = LabelsNotMaskedByFields(config, field_names) |
| 1167 | for wkl in non_masked_labels: |
| 1168 | if not wkl.commented: |
| 1169 | item = {'name': wkl.name, 'doc': wkl.docstring} |
| 1170 | labels.append(item) |
| 1171 | |
| 1172 | frequent_restrictions = _FREQUENT_ISSUE_RESTRICTIONS[:] |
| 1173 | if not custom_permissions: |
| 1174 | frequent_restrictions.extend(_EXAMPLE_ISSUE_RESTRICTIONS) |
| 1175 | |
| 1176 | labels.extend(_BuildRestrictionChoices( |
| 1177 | frequent_restrictions, permissions.STANDARD_ISSUE_PERMISSIONS, |
| 1178 | custom_permissions)) |
| 1179 | |
| 1180 | return labels |
| 1181 | |
| 1182 | |
| 1183 | def _BuildRestrictionChoices(freq_restrictions, actions, custom_permissions): |
| 1184 | """Return a list of autocompletion choices for restriction labels. |
| 1185 | |
| 1186 | Args: |
| 1187 | freq_restrictions: list of (action, perm, doc) tuples for restrictions |
| 1188 | that are frequently used. |
| 1189 | actions: list of strings for actions that are relevant to the current |
| 1190 | artifact. |
| 1191 | custom_permissions: list of strings with custom permissions for the project. |
| 1192 | |
| 1193 | Returns: |
| 1194 | A list of dictionaries [{'name': 'perm name', 'doc': 'docstring'}, ...] |
| 1195 | suitable for use in a JSON feed to our JS autocompletion functions. |
| 1196 | """ |
| 1197 | choices = [] |
| 1198 | |
| 1199 | for action, perm, doc in freq_restrictions: |
| 1200 | choices.append({ |
| 1201 | 'name': 'Restrict-%s-%s' % (action, perm), |
| 1202 | 'doc': doc, |
| 1203 | }) |
| 1204 | |
| 1205 | for action in actions: |
| 1206 | for perm in custom_permissions: |
| 1207 | choices.append({ |
| 1208 | 'name': 'Restrict-%s-%s' % (action, perm), |
| 1209 | 'doc': 'Permission %s needed to use %s' % (perm, action), |
| 1210 | }) |
| 1211 | |
| 1212 | return choices |
| 1213 | |
| 1214 | |
| 1215 | def FilterKeptAttachments( |
| 1216 | is_description, kept_attachments, comments, approval_id): |
| 1217 | """Filter kept attachments to be a subset of last description's attachments. |
| 1218 | |
| 1219 | Args: |
| 1220 | is_description: bool, if the comment is a change to the issue description. |
| 1221 | kept_attachments: list of ints with the attachment ids for attachments |
| 1222 | kept from previous descriptions, if the comment is a change to the |
| 1223 | issue description. |
| 1224 | comments: list of IssueComment PBs for the issue we want to edit. |
| 1225 | approval_id: int id of the APPROVAL_TYPE fielddef, if we're editing an |
| 1226 | approval description, or None otherwise. |
| 1227 | |
| 1228 | Returns: |
| 1229 | A list of kept_attachment ids that are a subset of the last description. |
| 1230 | """ |
| 1231 | if not is_description: |
| 1232 | return None |
| 1233 | |
| 1234 | attachment_ids = set() |
| 1235 | for comment in reversed(comments): |
| 1236 | if comment.is_description and comment.approval_id == approval_id: |
| 1237 | attachment_ids = set([a.attachment_id for a in comment.attachments]) |
| 1238 | break |
| 1239 | |
| 1240 | kept_attachments = [ |
| 1241 | aid for aid in kept_attachments if aid in attachment_ids] |
| 1242 | return kept_attachments |
| 1243 | |
| 1244 | |
| 1245 | def _GetEnumFieldValuesAndDocstrings(field_def, config): |
| 1246 | # type: (proto.tracker_pb2.LabelDef, proto.tracker_pb2.ProjectIssueConfig) -> |
| 1247 | # Sequence[tuple(string, string)] |
| 1248 | """Get sequence of value, docstring tuples for an enum field""" |
| 1249 | label_defs = config.well_known_labels |
| 1250 | lower_field_name = field_def.field_name.lower() |
| 1251 | tuples = [] |
| 1252 | for ld in label_defs: |
| 1253 | if (ld.label.lower().startswith(lower_field_name + '-') and |
| 1254 | not ld.deprecated): |
| 1255 | label_value = ld.label[len(lower_field_name) + 1:] |
| 1256 | tuples.append((label_value, ld.label_docstring)) |
| 1257 | else: |
| 1258 | continue |
| 1259 | return tuples |
| 1260 | |
| 1261 | |
| 1262 | # _IssueChangesTuple is returned by ApplyAllIssueChanges() and is used to bundle |
| 1263 | # the updated issues. resulting amendments, and other information needed by the |
| 1264 | # called to process the changes in the DB and send notifications. |
| 1265 | _IssueChangesTuple = collections.namedtuple( |
| 1266 | '_IssueChangesTuple', [ |
| 1267 | 'issues_to_update_dict', 'merged_from_add_by_iid', 'amendments_by_iid', |
| 1268 | 'imp_amendments_by_iid', 'old_owners_by_iid', 'old_statuses_by_iid', |
| 1269 | 'old_components_by_iid', 'new_starrers_by_iid' |
| 1270 | ]) |
| 1271 | # type: (Mapping[int, Issue], DefaultDict[int, Sequence[int]], |
| 1272 | # Mapping[int, Amendment], Mapping[int, Amendment], Mapping[int, int], |
| 1273 | # Mapping[int, str], Mapping[int, Sequence[int]], |
| 1274 | # Mapping[int, Sequence[int]])-> None |
| 1275 | |
| 1276 | |
| 1277 | def ApplyAllIssueChanges(cnxn, issue_delta_pairs, services): |
| 1278 | # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services) -> |
| 1279 | # IssueChangesTuple |
| 1280 | """Modify the given issues with the given deltas and impacted issues in RAM. |
| 1281 | |
| 1282 | Filter rules are not applied in this method. |
| 1283 | This method implements phases 3 and 4 of the process for modifying issues. |
| 1284 | See WorkEnv.ModifyIssues() for other phases and overall process. |
| 1285 | |
| 1286 | Args: |
| 1287 | cnxn: MonorailConnection object. |
| 1288 | issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas |
| 1289 | that represent the updates we want to make to each Issue. |
| 1290 | services: Services object for connection to backend services. |
| 1291 | |
| 1292 | Returns: |
| 1293 | An _IssueChangesTuple named tuple. |
| 1294 | """ |
| 1295 | impacted_tracker = _IssueChangeImpactedIssues() |
| 1296 | project_ids = {issue.project_id for issue, _delta in issue_delta_pairs} |
| 1297 | configs_by_pid = services.config.GetProjectConfigs(cnxn, list(project_ids)) |
| 1298 | |
| 1299 | # Track issues which have been modified in RAM and will need to |
| 1300 | # be updated in the DB. |
| 1301 | issues_to_update_dict = {} |
| 1302 | |
| 1303 | amendments_by_iid = {} |
| 1304 | old_owners_by_iid = {} |
| 1305 | old_statuses_by_iid = {} |
| 1306 | old_components_by_iid = {} |
| 1307 | # PHASE 3: Update the main issues in RAM (not indirectly, impacted issues). |
| 1308 | for issue, delta in issue_delta_pairs: |
| 1309 | # Cache old data that will be used by future computations. |
| 1310 | old_owner = tracker_bizobj.GetOwnerId(issue) |
| 1311 | old_status = tracker_bizobj.GetStatus(issue) |
| 1312 | if delta.owner_id is not None and delta.owner_id != old_owner: |
| 1313 | old_owners_by_iid[issue.issue_id] = old_owner |
| 1314 | if delta.status is not None and delta.status != old_status: |
| 1315 | old_statuses_by_iid[issue.issue_id] = old_status |
| 1316 | new_components = set(issue.component_ids) |
| 1317 | new_components.update(delta.comp_ids_add or []) |
| 1318 | new_components.difference_update(delta.comp_ids_remove or []) |
| 1319 | if set(issue.component_ids) != new_components: |
| 1320 | old_components_by_iid[issue.issue_id] = issue.component_ids |
| 1321 | |
| 1322 | impacted_tracker.TrackImpactedIssues(issue, delta) |
| 1323 | config = configs_by_pid.get(issue.project_id) |
| 1324 | amendments, _impacted_iids = tracker_bizobj.ApplyIssueDelta( |
| 1325 | cnxn, services.issue, issue, delta, config) |
| 1326 | if amendments: |
| 1327 | issues_to_update_dict[issue.issue_id] = issue |
| 1328 | amendments_by_iid[issue.issue_id] = amendments |
| 1329 | |
| 1330 | # PHASE 4: Update impacted issues in RAM. |
| 1331 | logging.info('Applying impacted issue changes: %r', impacted_tracker.__dict__) |
| 1332 | imp_amendments_by_iid = {} |
| 1333 | impacted_iids = impacted_tracker.ComputeAllImpactedIIDs() |
| 1334 | new_starrers_by_iid = {} |
| 1335 | for issue_id in impacted_iids: |
| 1336 | # Changes made to an impacted issue should be on top of changes |
| 1337 | # made to it in PHASE 3 where it might have been a 'main' issue. |
| 1338 | issue = issues_to_update_dict.get( |
| 1339 | issue_id, services.issue.GetIssue(cnxn, issue_id, use_cache=False)) |
| 1340 | |
| 1341 | # Apply impacted changes. |
| 1342 | amendments, new_starrers = impacted_tracker.ApplyImpactedIssueChanges( |
| 1343 | cnxn, issue, services) |
| 1344 | if amendments: |
| 1345 | imp_amendments_by_iid[issue.issue_id] = amendments |
| 1346 | issues_to_update_dict[issue.issue_id] = issue |
| 1347 | if new_starrers: |
| 1348 | new_starrers_by_iid[issue.issue_id] = new_starrers |
| 1349 | |
| 1350 | return _IssueChangesTuple( |
| 1351 | issues_to_update_dict, impacted_tracker.merged_from_add, |
| 1352 | amendments_by_iid, imp_amendments_by_iid, old_owners_by_iid, |
| 1353 | old_statuses_by_iid, old_components_by_iid, new_starrers_by_iid) |
| 1354 | |
| 1355 | |
| 1356 | def UpdateClosedTimestamp(config, issue, old_effective_status): |
| 1357 | # type: (proto.tracker_pb2.ProjectIssueConfig, proto.tracker_pb2.Issue, str) |
| 1358 | # -> None |
| 1359 | """Sets or unsets the closed_timestamp based based on status changes. |
| 1360 | |
| 1361 | If the status is changing from open to closed, the closed_timestamp is set to |
| 1362 | the current time. |
| 1363 | |
| 1364 | If the status is changing form closed to open, the close_timestamp is unset. |
| 1365 | |
| 1366 | If the status is changing from one closed to another closed, or from one |
| 1367 | open to another open, no operations are performed. |
| 1368 | |
| 1369 | Args: |
| 1370 | config: the project configuration |
| 1371 | issue: the issue being updated (a protocol buffer) |
| 1372 | old_effective_status: the old issue status string. E.g., 'New' |
| 1373 | |
| 1374 | SIDE EFFECTS: |
| 1375 | Updated issue in place with new closed timestamp. |
| 1376 | """ |
| 1377 | old_effective_status = old_effective_status or '' |
| 1378 | # open -> closed |
| 1379 | if (MeansOpenInProject(old_effective_status, config) and |
| 1380 | not MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)): |
| 1381 | |
| 1382 | issue.closed_timestamp = int(time.time()) |
| 1383 | return |
| 1384 | |
| 1385 | # closed -> open |
| 1386 | if (not MeansOpenInProject(old_effective_status, config) and |
| 1387 | MeansOpenInProject(tracker_bizobj.GetStatus(issue), config)): |
| 1388 | |
| 1389 | issue.reset('closed_timestamp') |
| 1390 | return |
| 1391 | |
| 1392 | |
| 1393 | def GroupUniqueDeltaIssues(issue_delta_pairs): |
| 1394 | # type: (Tuple[Issue, IssueDelta]) -> ( |
| 1395 | # Sequence[IssueDelta], Sequence[Sequence[Issue]]) |
| 1396 | """Identifies unique IssueDeltas and groups Issues with identical IssueDeltas. |
| 1397 | |
| 1398 | Args: |
| 1399 | issue_delta_pairs: List of tuples that couple Issues with the IssueDeltas |
| 1400 | that represent the updates we want to make to each Issue. |
| 1401 | |
| 1402 | Returns: |
| 1403 | (unique_deltas, issues_for_unique_deltas): |
| 1404 | unique_deltas: List of unique IssueDeltas found in issue_delta_pairs. |
| 1405 | issues_for_unique_deltas: List of Issue lists. Each Issue list |
| 1406 | contains all the Issues that had identical IssueDeltas. |
| 1407 | Each issues_for_unique_deltas[i] is the list of Issues |
| 1408 | that had unique_deltas[i] as their IssueDeltas. |
| 1409 | """ |
| 1410 | unique_deltas = [] |
| 1411 | issues_for_unique_deltas = [] |
| 1412 | for issue, delta in issue_delta_pairs: |
| 1413 | try: |
| 1414 | delta_index = unique_deltas.index(delta) |
| 1415 | issues_for_unique_deltas[delta_index].append(issue) |
| 1416 | except ValueError: |
| 1417 | # delta is not in unique_deltas yet. |
| 1418 | # Add delta to unique_deltas and add a new list of issues |
| 1419 | # to issues_for_unique_deltas at the same index. |
| 1420 | unique_deltas.append(delta) |
| 1421 | issues_for_unique_deltas.append([issue]) |
| 1422 | |
| 1423 | return unique_deltas, issues_for_unique_deltas |
| 1424 | |
| 1425 | |
| 1426 | def _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg): |
| 1427 | # type: (Sequence[Tuple[Issue, IssueDelta]], Mapping[int, str], |
| 1428 | # exceptions.ErrorAggregator) -> None |
| 1429 | """Checks if any issue deltas conflict with each other or themselves. |
| 1430 | |
| 1431 | Note: refs_dict should contain issue ref strings for all issues found |
| 1432 | in issue_delta_pairs, including all issues found in |
| 1433 | {blocked_on|blocking}_{add|remove}. |
| 1434 | """ |
| 1435 | err_message = 'Changes for {} conflict for {}' |
| 1436 | |
| 1437 | # Track all delta blocked_on_add and blocking_add in terms of |
| 1438 | # 'blocking_add' so we can track when a {blocked_on|blocking}_remove |
| 1439 | # is in conflict with some {blocked_on|blocking}_add. |
| 1440 | blocking_add = collections.defaultdict(list) |
| 1441 | for issue, delta in issue_delta_pairs: |
| 1442 | blocking_add[issue.issue_id].extend(delta.blocking_add) |
| 1443 | |
| 1444 | for imp_iid in delta.blocked_on_add: |
| 1445 | blocking_add[imp_iid].append(issue.issue_id) |
| 1446 | |
| 1447 | # Check *_remove for conflicts with tracking blocking_add. |
| 1448 | for issue, delta in issue_delta_pairs: |
| 1449 | added_iids = blocking_add[issue.issue_id] |
| 1450 | # Get intersection of iids that are in `blocking_remove` and |
| 1451 | # the tracked `blocking_add`. |
| 1452 | conflict_iids = set(delta.blocking_remove) & set(added_iids) |
| 1453 | |
| 1454 | # Get iids of `blocked_on_remove` that conflict with the |
| 1455 | # tracked `blocking_add`. |
| 1456 | for possible_conflict_iid in delta.blocked_on_remove: |
| 1457 | if issue.issue_id in blocking_add[possible_conflict_iid]: |
| 1458 | conflict_iids.add(possible_conflict_iid) |
| 1459 | |
| 1460 | if conflict_iids: |
| 1461 | refs_str = ', '.join([refs_dict[iid] for iid in conflict_iids]) |
| 1462 | err_agg.AddErrorMessage(err_message, refs_dict[issue.issue_id], refs_str) |
| 1463 | |
| 1464 | |
| 1465 | def PrepareIssueChanges( |
| 1466 | cnxn, |
| 1467 | issue_delta_pairs, |
| 1468 | services, |
| 1469 | attachment_uploads=None, |
| 1470 | comment_content=None): |
| 1471 | # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services, |
| 1472 | # Optional[Sequence[framework_helpers.AttachmentUpload]], Optional[str]) |
| 1473 | # -> Mapping[int, int] |
| 1474 | """Clean the deltas and assert they are valid for each paired issue.""" |
| 1475 | _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services) |
| 1476 | _AssertIssueChangesValid( |
| 1477 | cnxn, issue_delta_pairs, services, comment_content=comment_content) |
| 1478 | |
| 1479 | if attachment_uploads: |
| 1480 | return _EnforceAttachmentQuotaLimits( |
| 1481 | cnxn, issue_delta_pairs, services, attachment_uploads) |
| 1482 | return {} |
| 1483 | |
| 1484 | |
| 1485 | def _EnforceAttachmentQuotaLimits( |
| 1486 | cnxn, issue_delta_pairs, services, attachment_uploads): |
| 1487 | # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services |
| 1488 | # Optional[Sequence[framework_helpers.AttachmentUpload]] |
| 1489 | # -> Mapping[int, int] |
| 1490 | """Assert that the attachments don't exceed project quotas.""" |
| 1491 | issue_count_by_pid = collections.defaultdict(int) |
| 1492 | for issue, _delta in issue_delta_pairs: |
| 1493 | issue_count_by_pid[issue.project_id] += 1 |
| 1494 | |
| 1495 | projects_by_id = services.project.GetProjects(cnxn, issue_count_by_pid.keys()) |
| 1496 | |
| 1497 | new_bytes_by_pid = {} |
| 1498 | with exceptions.ErrorAggregator(exceptions.OverAttachmentQuota) as err_agg: |
| 1499 | for pid, count in issue_count_by_pid.items(): |
| 1500 | project = projects_by_id[pid] |
| 1501 | try: |
| 1502 | new_bytes_used = ComputeNewQuotaBytesUsed( |
| 1503 | project, attachment_uploads * count) |
| 1504 | new_bytes_by_pid[pid] = new_bytes_used |
| 1505 | except exceptions.OverAttachmentQuota: |
| 1506 | err_agg.AddErrorMessage( |
| 1507 | 'Attachment quota exceeded for project {}', project.project_name) |
| 1508 | return new_bytes_by_pid |
| 1509 | |
| 1510 | |
| 1511 | def _AssertIssueChangesValid( |
| 1512 | cnxn, issue_delta_pairs, services, comment_content=None): |
| 1513 | # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services, |
| 1514 | # Optional[str]) -> None |
| 1515 | """Assert that the delta changes are valid for each paired issue. |
| 1516 | |
| 1517 | Note: this method does not check if the changes trigger any FilterRule |
| 1518 | `warnings` or `errors`. |
| 1519 | """ |
| 1520 | project_ids = list( |
| 1521 | {issue.project_id for (issue, _delta) in issue_delta_pairs}) |
| 1522 | projects_by_id = services.project.GetProjects(cnxn, project_ids) |
| 1523 | configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids) |
| 1524 | refs_dict = { |
| 1525 | iss.issue_id: '%s:%d' % (iss.project_name, iss.local_id) |
| 1526 | for iss, _delta in issue_delta_pairs |
| 1527 | } |
| 1528 | # Add refs of deltas' blocking/blocked_on issues needed by |
| 1529 | # _AssertNoConflictingDeltas. |
| 1530 | relation_iids = set() |
| 1531 | for _iss, delta in issue_delta_pairs: |
| 1532 | relation_iids.update( |
| 1533 | delta.blocked_on_remove + delta.blocking_remove + delta.blocked_on_add + |
| 1534 | delta.blocking_add) |
| 1535 | relation_issues_dict, misses = services.issue.GetIssuesDict( |
| 1536 | cnxn, relation_iids) |
| 1537 | if misses: |
| 1538 | raise exceptions.NoSuchIssueException( |
| 1539 | 'Could not find issues with ids: %r' % misses) |
| 1540 | for iid, iss in relation_issues_dict.items(): |
| 1541 | if iid not in refs_dict: |
| 1542 | refs_dict[iid] = '%s:%d' % (iss.project_name, iss.local_id) |
| 1543 | |
| 1544 | with exceptions.ErrorAggregator(exceptions.InputException) as err_agg: |
| 1545 | if (comment_content and |
| 1546 | len(comment_content.strip()) > tracker_constants.MAX_COMMENT_CHARS): |
| 1547 | err_agg.AddErrorMessage('Comment is too long.') |
| 1548 | |
| 1549 | _AssertNoConflictingDeltas(issue_delta_pairs, refs_dict, err_agg) |
| 1550 | |
| 1551 | for issue, delta in issue_delta_pairs: |
| 1552 | project = projects_by_id.get(issue.project_id) |
| 1553 | config = configs_by_id.get(issue.project_id) |
| 1554 | issue_ref = refs_dict[issue.issue_id] |
| 1555 | |
| 1556 | if (delta.merged_into is not None or |
| 1557 | delta.merged_into_external is not None or delta.status is not None): |
| 1558 | end_status = delta.status or issue.status |
| 1559 | merged_options = [ |
| 1560 | delta.merged_into, delta.merged_into_external, issue.merged_into, |
| 1561 | issue.merged_into_external |
| 1562 | ] |
| 1563 | end_merged_into = next( |
| 1564 | (merge for merge in merged_options if merge is not None), None) |
| 1565 | |
| 1566 | is_merge_status = end_status.lower() in [ |
| 1567 | status.lower() for status in config.statuses_offer_merge |
| 1568 | ] |
| 1569 | |
| 1570 | if ((is_merge_status and not end_merged_into) or |
| 1571 | (not is_merge_status and end_merged_into)): |
| 1572 | err_agg.AddErrorMessage( |
| 1573 | '{}: MERGED type statuses must accompany mergedInto values.', |
| 1574 | issue_ref) |
| 1575 | |
| 1576 | if delta.merged_into and issue.issue_id == delta.merged_into: |
| 1577 | err_agg.AddErrorMessage( |
| 1578 | '{}: Cannot merge an issue into itself.', issue_ref) |
| 1579 | if (issue.issue_id in set( |
| 1580 | delta.blocked_on_add)) or (issue.issue_id in set(delta.blocking_add)): |
| 1581 | err_agg.AddErrorMessage( |
| 1582 | '{}: Cannot block an issue on itself.', issue_ref) |
| 1583 | if (delta.owner_id is not None) and (delta.owner_id != issue.owner_id): |
| 1584 | parsed_owner_valid, msg = IsValidIssueOwner( |
| 1585 | cnxn, project, delta.owner_id, services) |
| 1586 | if not parsed_owner_valid: |
| 1587 | err_agg.AddErrorMessage('{}: {}', issue_ref, msg) |
| 1588 | # Owner already check by IsValidIssueOwner |
| 1589 | all_users = [uid for uid in delta.cc_ids_add] |
| 1590 | field_users = [fv.user_id for fv in delta.field_vals_add if fv.user_id] |
| 1591 | all_users.extend(field_users) |
| 1592 | AssertUsersExist(cnxn, services, all_users, err_agg) |
| 1593 | if (delta.summary and |
| 1594 | len(delta.summary.strip()) > tracker_constants.MAX_SUMMARY_CHARS): |
| 1595 | err_agg.AddErrorMessage('{}: Summary is too long.', issue_ref) |
| 1596 | if delta.summary == '': |
| 1597 | err_agg.AddErrorMessage('{}: Summary required.', issue_ref) |
| 1598 | if delta.status == '': |
| 1599 | err_agg.AddErrorMessage('{}: Status is required.', issue_ref) |
| 1600 | # Do not pass in issue for validation, as issue is pre-update, and would |
| 1601 | # result in being unable to edit issues in invalid states. |
| 1602 | fvs_err_msgs = field_helpers.ValidateCustomFields( |
| 1603 | cnxn, services, delta.field_vals_add, config, project) |
| 1604 | if fvs_err_msgs: |
| 1605 | err_agg.AddErrorMessage('{}: {}', issue_ref, '\n'.join(fvs_err_msgs)) |
| 1606 | # TODO(crbug.com/monorail/9156): Validate that we do not remove fields |
| 1607 | # such that a required field becomes unset. |
| 1608 | |
| 1609 | |
| 1610 | def AssertUsersExist(cnxn, services, user_ids, err_agg): |
| 1611 | # type: (MonorailConnection, Services, Sequence[int], ErrorAggregator) -> None |
| 1612 | """Assert that all users exist. |
| 1613 | |
| 1614 | Has the side-effect of adding error messages to the input ErrorAggregator. |
| 1615 | """ |
| 1616 | users_dict = services.user.GetUsersByIDs(cnxn, user_ids, skip_missed=True) |
| 1617 | found_ids = set(users_dict.keys()) |
| 1618 | missing = [user_id for user_id in user_ids if user_id not in found_ids] |
| 1619 | for missing_user_id in missing: |
| 1620 | err_agg.AddErrorMessage( |
| 1621 | 'users/{}: User does not exist.'.format(missing_user_id)) |
| 1622 | |
| 1623 | |
| 1624 | def AssertValidIssueForCreate(cnxn, services, issue, description): |
| 1625 | # type: (MonorailConnection, Services, Issue, str) -> None |
| 1626 | """Assert that issue proto is valid for issue creation. |
| 1627 | |
| 1628 | Args: |
| 1629 | cnxn: A connection object to use services with. |
| 1630 | services: An object containing services to use to look up relevant data. |
| 1631 | issues: A PB containing the issue to validate. |
| 1632 | description: The description for the issue. |
| 1633 | |
| 1634 | Raises: |
| 1635 | InputException if the issue is not valid. |
| 1636 | """ |
| 1637 | project = services.project.GetProject(cnxn, issue.project_id) |
| 1638 | config = services.config.GetProjectConfig(cnxn, issue.project_id) |
| 1639 | |
| 1640 | with exceptions.ErrorAggregator(exceptions.InputException) as err_agg: |
| 1641 | owner_is_valid, owner_err_msg = IsValidIssueOwner( |
| 1642 | cnxn, project, issue.owner_id, services) |
| 1643 | if not owner_is_valid: |
| 1644 | err_agg.AddErrorMessage(owner_err_msg) |
| 1645 | if not issue.summary.strip(): |
| 1646 | err_agg.AddErrorMessage('Summary is required') |
| 1647 | if not description.strip(): |
| 1648 | err_agg.AddErrorMessage('Description is required') |
| 1649 | if len(issue.summary) > tracker_constants.MAX_SUMMARY_CHARS: |
| 1650 | err_agg.AddErrorMessage('Summary is too long') |
| 1651 | if len(description) > tracker_constants.MAX_COMMENT_CHARS: |
| 1652 | err_agg.AddErrorMessage('Description is too long') |
| 1653 | |
| 1654 | # Check all users exist. Owner already check by IsValidIssueOwner. |
| 1655 | all_users = [uid for uid in issue.cc_ids] |
| 1656 | for av in issue.approval_values: |
| 1657 | all_users.extend(av.approver_ids) |
| 1658 | field_users = [fv.user_id for fv in issue.field_values if fv.user_id] |
| 1659 | all_users.extend(field_users) |
| 1660 | AssertUsersExist(cnxn, services, all_users, err_agg) |
| 1661 | |
| 1662 | field_validity_errors = field_helpers.ValidateCustomFields( |
| 1663 | cnxn, services, issue.field_values, config, project, issue=issue) |
| 1664 | if field_validity_errors: |
| 1665 | err_agg.AddErrorMessage("\n".join(field_validity_errors)) |
| 1666 | if not services.config.LookupStatusID(cnxn, issue.project_id, issue.status, |
| 1667 | autocreate=False): |
| 1668 | err_agg.AddErrorMessage('Undefined status: %s' % issue.status) |
| 1669 | all_comp_ids = { |
| 1670 | cd.component_id for cd in config.component_defs if not cd.deprecated |
| 1671 | } |
| 1672 | for comp_id in issue.component_ids: |
| 1673 | if comp_id not in all_comp_ids: |
| 1674 | err_agg.AddErrorMessage( |
| 1675 | 'Undefined or deprecated component with id: %d' % comp_id) |
| 1676 | |
| 1677 | |
| 1678 | def _ComputeNewCcsFromIssueMerge(merge_into_issue, source_issues): |
| 1679 | # type: (Issue, Collection[Issue]) -> Collection[int] |
| 1680 | """Compute ccs that should be added from source_issues to merge_into_issue.""" |
| 1681 | |
| 1682 | merge_into_restrictions = permissions.GetRestrictions(merge_into_issue) |
| 1683 | new_cc_ids = set() |
| 1684 | for issue in source_issues: |
| 1685 | # We don't want to leak metadata like ccs of restricted issues. |
| 1686 | # So we don't merge ccs from restricted source issues, unless their |
| 1687 | # restrictions match the restrictions of the target. |
| 1688 | if permissions.HasRestrictions(issue, perm='View'): |
| 1689 | source_restrictions = permissions.GetRestrictions(issue) |
| 1690 | if (issue.project_id != merge_into_issue.project_id or |
| 1691 | set(source_restrictions) != set(merge_into_restrictions)): |
| 1692 | continue |
| 1693 | |
| 1694 | new_cc_ids.update(issue.cc_ids) |
| 1695 | if issue.owner_id: |
| 1696 | new_cc_ids.add(issue.owner_id) |
| 1697 | |
| 1698 | return [cc_id for cc_id in new_cc_ids if cc_id not in merge_into_issue.cc_ids] |
| 1699 | |
| 1700 | |
| 1701 | def _EnforceNonMergeStatusDeltas(cnxn, issue_delta_pairs, services): |
| 1702 | # type: (MonorailConnection, Sequence[Tuple[Issue, IssueDelta]], Services) |
| 1703 | """Update deltas in RAM to remove merged if a MERGED status is removed.""" |
| 1704 | project_ids = list( |
| 1705 | {issue.project_id for (issue, _delta) in issue_delta_pairs}) |
| 1706 | configs_by_id = services.config.GetProjectConfigs(cnxn, project_ids) |
| 1707 | statuses_offer_merge_by_pid = { |
| 1708 | pid: |
| 1709 | [status.lower() for status in configs_by_id[pid].statuses_offer_merge] |
| 1710 | for pid in project_ids |
| 1711 | } |
| 1712 | |
| 1713 | for issue, delta in issue_delta_pairs: |
| 1714 | statuses_offer_merge = statuses_offer_merge_by_pid[issue.project_id] |
| 1715 | # Remove merged_into and merged_into_external when a status is moved |
| 1716 | # to a non-MERGED status ONLY if the delta does not have merged_into values |
| 1717 | # If delta does change merged_into values, the request will fail from |
| 1718 | # AssertIssueChangesValue(). |
| 1719 | if (delta.status and delta.status.lower() not in statuses_offer_merge and |
| 1720 | delta.merged_into is None and delta.merged_into_external is None): |
| 1721 | if issue.merged_into: |
| 1722 | delta.merged_into = 0 |
| 1723 | elif issue.merged_into_external: |
| 1724 | delta.merged_into_external = '' |
| 1725 | |
| 1726 | |
| 1727 | class _IssueChangeImpactedIssues(): |
| 1728 | """Class to track changes of issues impacted by updates to other issues.""" |
| 1729 | |
| 1730 | def __init__(self): |
| 1731 | |
| 1732 | # Each of the dicts below should be used to track |
| 1733 | # {impacted_issue_id: [issues being modified that impact the keyed issue]}. |
| 1734 | |
| 1735 | # e.g. `blocking_remove` with {iid_1: [iid_2, iid_3]} means that |
| 1736 | # `TrackImpactedIssues` has been called with a delta of |
| 1737 | # IssueDelta(blocked_on_remove=[iid_1]) for both issue 2 and issue 3. |
| 1738 | self.blocking_add = collections.defaultdict(list) |
| 1739 | self.blocking_remove = collections.defaultdict(list) |
| 1740 | self.blocked_on_add = collections.defaultdict(list) |
| 1741 | self.blocked_on_remove = collections.defaultdict(list) |
| 1742 | self.merged_from_add = collections.defaultdict(list) |
| 1743 | self.merged_from_remove = collections.defaultdict(list) |
| 1744 | |
| 1745 | def ComputeAllImpactedIIDs(self): |
| 1746 | # type: () -> Collection[int] |
| 1747 | """Computes the unique set of all impacted issue ids.""" |
| 1748 | return set(self.blocking_add.keys() + self.blocking_remove.keys() + |
| 1749 | self.blocked_on_add.keys() + self.blocked_on_remove.keys() + |
| 1750 | self.merged_from_add.keys() + self.merged_from_remove.keys()) |
| 1751 | |
| 1752 | def TrackImpactedIssues(self, issue, delta): |
| 1753 | # type: (Issue, IssueDelta) -> None |
| 1754 | """Track impacted issues from when `delta` is applied to `issue`. |
| 1755 | |
| 1756 | Args: |
| 1757 | issue: Issue that the delta will be applied to, but has not yet. |
| 1758 | delta: IssueDelta representing the changes that will be made to |
| 1759 | the issue. |
| 1760 | """ |
| 1761 | for impacted_iid in delta.blocked_on_add: |
| 1762 | self.blocking_add[impacted_iid].append(issue.issue_id) |
| 1763 | for impacted_iid in delta.blocked_on_remove: |
| 1764 | self.blocking_remove[impacted_iid].append(issue.issue_id) |
| 1765 | |
| 1766 | for impacted_iid in delta.blocking_add: |
| 1767 | self.blocked_on_add[impacted_iid].append(issue.issue_id) |
| 1768 | for impacted_iid in delta.blocking_remove: |
| 1769 | self.blocked_on_remove[impacted_iid].append(issue.issue_id) |
| 1770 | |
| 1771 | if (delta.merged_into == framework_constants.NO_ISSUE_SPECIFIED and |
| 1772 | issue.merged_into): |
| 1773 | self.merged_from_remove[issue.merged_into].append(issue.issue_id) |
| 1774 | elif delta.merged_into and issue.merged_into != delta.merged_into: |
| 1775 | self.merged_from_add[delta.merged_into].append(issue.issue_id) |
| 1776 | if issue.merged_into: |
| 1777 | self.merged_from_remove[issue.merged_into].append(issue.issue_id) |
| 1778 | |
| 1779 | def ApplyImpactedIssueChanges(self, cnxn, impacted_issue, services): |
| 1780 | # type: (MonorailConnection, Issue, Services) -> |
| 1781 | # Tuple[Collection[Amendment], Sequence[int]] |
| 1782 | """Apply the tracked changes in RAM for the given impacted issue. |
| 1783 | |
| 1784 | Args: |
| 1785 | cnxn: connection to SQL database. |
| 1786 | impacted_issue: Issue PB that we are applying the changes to. |
| 1787 | services: Services used to fetch info from DB or cache. |
| 1788 | |
| 1789 | Returns: |
| 1790 | All the amendments that represent the changes applied to the issue |
| 1791 | and a list of the new issue starrers. |
| 1792 | |
| 1793 | Side-effect: |
| 1794 | The given impacted_issue will be updated in RAM. |
| 1795 | """ |
| 1796 | issue_id = impacted_issue.issue_id |
| 1797 | |
| 1798 | # Process changes for blocking/blocked_on issue changes. |
| 1799 | amendments, _impacted_iids = tracker_bizobj.ApplyIssueBlockRelationChanges( |
| 1800 | cnxn, impacted_issue, self.blocked_on_add[issue_id], |
| 1801 | self.blocked_on_remove[issue_id], self.blocking_add[issue_id], |
| 1802 | self.blocking_remove[issue_id], services.issue) |
| 1803 | |
| 1804 | # Process changes in merged issues. |
| 1805 | merged_from_add = self.merged_from_add.get(issue_id, []) |
| 1806 | merged_from_remove = self.merged_from_remove.get(issue_id, []) |
| 1807 | |
| 1808 | # Merge ccs into impacted_issue from all merged issues, |
| 1809 | # compute new starrers, and set star_count. |
| 1810 | new_starrers = [] |
| 1811 | if merged_from_add: |
| 1812 | issues_dict, _misses = services.issue.GetIssuesDict(cnxn, merged_from_add) |
| 1813 | merged_from_add_issues = issues_dict.values() |
| 1814 | new_cc_ids = _ComputeNewCcsFromIssueMerge( |
| 1815 | impacted_issue, merged_from_add_issues) |
| 1816 | if new_cc_ids: |
| 1817 | impacted_issue.cc_ids.extend(new_cc_ids) |
| 1818 | amendments.append( |
| 1819 | tracker_bizobj.MakeCcAmendment(new_cc_ids, [])) |
| 1820 | new_starrers = list( |
| 1821 | GetNewIssueStarrers(cnxn, services, merged_from_add, issue_id)) |
| 1822 | if new_starrers: |
| 1823 | impacted_issue.star_count += len(new_starrers) |
| 1824 | |
| 1825 | if merged_from_add or merged_from_remove: |
| 1826 | merged_from_add_refs = services.issue.LookupIssueRefs( |
| 1827 | cnxn, merged_from_add).values() |
| 1828 | merged_from_remove_refs = services.issue.LookupIssueRefs( |
| 1829 | cnxn, merged_from_remove).values() |
| 1830 | amendments.append( |
| 1831 | tracker_bizobj.MakeMergedIntoAmendment( |
| 1832 | merged_from_add_refs, merged_from_remove_refs, |
| 1833 | default_project_name=impacted_issue.project_name)) |
| 1834 | return amendments, new_starrers |