Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1 | # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 | # Use of this source code is governed by a BSD-style |
| 3 | # license that can be found in the LICENSE file or at |
| 4 | # https://developers.google.com/open-source/licenses/bsd |
| 5 | |
| 6 | """A class that provides persistence for Monorail's additional features. |
| 7 | |
| 8 | Business objects are described in tracker_pb2.py, features_pb2.py, and |
| 9 | tracker_bizobj.py. |
| 10 | """ |
| 11 | from __future__ import print_function |
| 12 | from __future__ import division |
| 13 | from __future__ import absolute_import |
| 14 | |
| 15 | import collections |
| 16 | import logging |
| 17 | import re |
| 18 | import time |
| 19 | |
| 20 | import settings |
| 21 | |
| 22 | from features import features_constants |
| 23 | from features import filterrules_helpers |
| 24 | from framework import exceptions |
| 25 | from framework import framework_bizobj |
| 26 | from framework import framework_constants |
| 27 | from framework import sql |
| 28 | from proto import features_pb2 |
| 29 | from services import caches |
| 30 | from services import config_svc |
| 31 | from tracker import tracker_bizobj |
| 32 | from tracker import tracker_constants |
| 33 | |
| 34 | QUICKEDITHISTORY_TABLE_NAME = 'QuickEditHistory' |
| 35 | QUICKEDITMOSTRECENT_TABLE_NAME = 'QuickEditMostRecent' |
| 36 | SAVEDQUERY_TABLE_NAME = 'SavedQuery' |
| 37 | PROJECT2SAVEDQUERY_TABLE_NAME = 'Project2SavedQuery' |
| 38 | SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME = 'SavedQueryExecutesInProject' |
| 39 | USER2SAVEDQUERY_TABLE_NAME = 'User2SavedQuery' |
| 40 | FILTERRULE_TABLE_NAME = 'FilterRule' |
| 41 | HOTLIST_TABLE_NAME = 'Hotlist' |
| 42 | HOTLIST2ISSUE_TABLE_NAME = 'Hotlist2Issue' |
| 43 | HOTLIST2USER_TABLE_NAME = 'Hotlist2User' |
| 44 | |
| 45 | |
| 46 | QUICKEDITHISTORY_COLS = [ |
| 47 | 'user_id', 'project_id', 'slot_num', 'command', 'comment'] |
| 48 | QUICKEDITMOSTRECENT_COLS = ['user_id', 'project_id', 'slot_num'] |
| 49 | SAVEDQUERY_COLS = ['id', 'name', 'base_query_id', 'query'] |
| 50 | PROJECT2SAVEDQUERY_COLS = ['project_id', 'rank', 'query_id'] |
| 51 | SAVEDQUERYEXECUTESINPROJECT_COLS = ['query_id', 'project_id'] |
| 52 | USER2SAVEDQUERY_COLS = ['user_id', 'rank', 'query_id', 'subscription_mode'] |
| 53 | FILTERRULE_COLS = ['project_id', 'rank', 'predicate', 'consequence'] |
| 54 | HOTLIST_COLS = [ |
| 55 | 'id', 'name', 'summary', 'description', 'is_private', 'default_col_spec'] |
| 56 | HOTLIST_ABBR_COLS = ['id', 'name', 'summary', 'is_private'] |
| 57 | HOTLIST2ISSUE_COLS = [ |
| 58 | 'hotlist_id', 'issue_id', 'rank', 'adder_id', 'added', 'note'] |
| 59 | HOTLIST2USER_COLS = ['hotlist_id', 'user_id', 'role_name'] |
| 60 | |
| 61 | |
| 62 | # Regex for parsing one action in the filter rule consequence storage syntax. |
| 63 | CONSEQUENCE_RE = re.compile( |
| 64 | r'(default_status:(?P<default_status>[-.\w]+))|' |
| 65 | r'(default_owner_id:(?P<default_owner_id>\d+))|' |
| 66 | r'(add_cc_id:(?P<add_cc_id>\d+))|' |
| 67 | r'(add_label:(?P<add_label>[-.\w]+))|' |
| 68 | r'(add_notify:(?P<add_notify>[-.@\w]+))|' |
| 69 | r'(warning:(?P<warning>.+))|' # Warnings consume the rest of the string. |
| 70 | r'(error:(?P<error>.+))' # Errors consume the rest of the string. |
| 71 | ) |
| 72 | |
| 73 | class HotlistTwoLevelCache(caches.AbstractTwoLevelCache): |
| 74 | """Class to manage both RAM and memcache for Hotlist PBs.""" |
| 75 | |
| 76 | def __init__(self, cachemanager, features_service): |
| 77 | super(HotlistTwoLevelCache, self).__init__( |
| 78 | cachemanager, 'hotlist', 'hotlist:', features_pb2.Hotlist) |
| 79 | self.features_service = features_service |
| 80 | |
| 81 | def _DeserializeHotlists( |
| 82 | self, hotlist_rows, issue_rows, role_rows): |
| 83 | """Convert database rows into a dictionary of Hotlist PB keyed by ID. |
| 84 | |
| 85 | Args: |
| 86 | hotlist_rows: a list of hotlist rows from HOTLIST_TABLE_NAME. |
| 87 | issue_rows: a list of issue rows from HOTLIST2ISSUE_TABLE_NAME, |
| 88 | ordered by rank DESC, issue_id. |
| 89 | role_rows: a list of role rows from HOTLIST2USER_TABLE_NAME. |
| 90 | |
| 91 | Returns: |
| 92 | a dict mapping hotlist_id to hotlist PB""" |
| 93 | hotlist_dict = {} |
| 94 | |
| 95 | for hotlist_row in hotlist_rows: |
| 96 | (hotlist_id, hotlist_name, summary, description, is_private, |
| 97 | default_col_spec) = hotlist_row |
| 98 | hotlist = features_pb2.MakeHotlist( |
| 99 | hotlist_name, hotlist_id=hotlist_id, summary=summary, |
| 100 | description=description, is_private=bool(is_private), |
| 101 | default_col_spec=default_col_spec) |
| 102 | hotlist_dict[hotlist_id] = hotlist |
| 103 | |
| 104 | for (hotlist_id, issue_id, rank, adder_id, added, note) in issue_rows: |
| 105 | hotlist = hotlist_dict.get(hotlist_id) |
| 106 | if hotlist: |
| 107 | hotlist.items.append( |
| 108 | features_pb2.MakeHotlistItem(issue_id=issue_id, rank=rank, |
| 109 | adder_id=adder_id , date_added=added, |
| 110 | note=note)) |
| 111 | else: |
| 112 | logging.warn('hotlist %d not found', hotlist_id) |
| 113 | |
| 114 | for (hotlist_id, user_id, role_name) in role_rows: |
| 115 | hotlist = hotlist_dict.get(hotlist_id) |
| 116 | if not hotlist: |
| 117 | logging.warn('hotlist %d not found', hotlist_id) |
| 118 | elif role_name == 'owner': |
| 119 | hotlist.owner_ids.append(user_id) |
| 120 | elif role_name == 'editor': |
| 121 | hotlist.editor_ids.append(user_id) |
| 122 | elif role_name == 'follower': |
| 123 | hotlist.follower_ids.append(user_id) |
| 124 | else: |
| 125 | logging.info('unknown role name %s', role_name) |
| 126 | |
| 127 | return hotlist_dict |
| 128 | |
| 129 | def FetchItems(self, cnxn, keys): |
| 130 | """On RAM and memcache miss, hit the database to get missing hotlists.""" |
| 131 | hotlist_rows = self.features_service.hotlist_tbl.Select( |
| 132 | cnxn, cols=HOTLIST_COLS, is_deleted=False, id=keys) |
| 133 | issue_rows = self.features_service.hotlist2issue_tbl.Select( |
| 134 | cnxn, cols=HOTLIST2ISSUE_COLS, hotlist_id=keys, |
| 135 | order_by=[('rank DESC', []), ('issue_id', [])]) |
| 136 | role_rows = self.features_service.hotlist2user_tbl.Select( |
| 137 | cnxn, cols=HOTLIST2USER_COLS, hotlist_id=keys) |
| 138 | retrieved_dict = self._DeserializeHotlists( |
| 139 | hotlist_rows, issue_rows, role_rows) |
| 140 | return retrieved_dict |
| 141 | |
| 142 | |
| 143 | class HotlistIDTwoLevelCache(caches.AbstractTwoLevelCache): |
| 144 | """Class to manage both RAM and memcache for hotlist_ids. |
| 145 | |
| 146 | Keys for this cache are tuples (hotlist_name.lower(), owner_id). |
| 147 | This cache should be used to fetch hotlist_ids owned by users or |
| 148 | to check if a user owns a hotlist with a certain name, so the |
| 149 | hotlist_names in keys will always be in lowercase. |
| 150 | """ |
| 151 | |
| 152 | def __init__(self, cachemanager, features_service): |
| 153 | super(HotlistIDTwoLevelCache, self).__init__( |
| 154 | cachemanager, 'hotlist_id', 'hotlist_id:', int, |
| 155 | max_size=settings.issue_cache_max_size) |
| 156 | self.features_service = features_service |
| 157 | |
| 158 | def _MakeCache(self, cache_manager, kind, max_size=None): |
| 159 | """Override normal RamCache creation with ValueCentricRamCache.""" |
| 160 | return caches.ValueCentricRamCache(cache_manager, kind, max_size=max_size) |
| 161 | |
| 162 | def _KeyToStr(self, key): |
| 163 | """This cache uses pairs of (str, int) as keys. Convert them to strings.""" |
| 164 | return '%s,%d' % key |
| 165 | |
| 166 | def _StrToKey(self, key_str): |
| 167 | """This cache uses pairs of (str, int) as keys. |
| 168 | Convert them from strings. |
| 169 | """ |
| 170 | hotlist_name_str, owner_id_str = key_str.split(',') |
| 171 | return (hotlist_name_str, int(owner_id_str)) |
| 172 | |
| 173 | def _DeserializeHotlistIDs( |
| 174 | self, hotlist_rows, owner_rows, wanted_names_for_owners): |
| 175 | """Convert database rows into a dictionary of hotlist_ids keyed by ( |
| 176 | hotlist_name, owner_id). |
| 177 | |
| 178 | Args: |
| 179 | hotlist_rows: a list of hotlist rows [id, name] from HOTLIST for |
| 180 | with names we are interested in. |
| 181 | owner_rows: a list of role rows [hotlist_id, uwer_id] from HOTLIST2USER |
| 182 | for owners that we are interested in that own hotlists with names that |
| 183 | we are interested in. |
| 184 | wanted_names_for_owners: a dict of |
| 185 | {owner_id: [hotlist_name.lower(), ...], ...} |
| 186 | so we know which (hotlist_name, owner_id) keys to return. |
| 187 | |
| 188 | Returns: |
| 189 | A dict mapping (hotlist_name.lower(), owner_id) keys to hotlist_id values. |
| 190 | """ |
| 191 | hotlist_ids_dict = {} |
| 192 | if not hotlist_rows or not owner_rows: |
| 193 | return hotlist_ids_dict |
| 194 | |
| 195 | hotlist_to_owner_id = {} |
| 196 | |
| 197 | # Note: owner_rows contains hotlist owners that we are interested in, but |
| 198 | # may not own hotlists with names we are interested in. |
| 199 | for (hotlist_id, user_id) in owner_rows: |
| 200 | found_owner_id = hotlist_to_owner_id.get(hotlist_id) |
| 201 | if found_owner_id: |
| 202 | logging.warn( |
| 203 | 'hotlist %d has more than one owner: %d, %d', |
| 204 | hotlist_id, user_id, found_owner_id) |
| 205 | hotlist_to_owner_id[hotlist_id] = user_id |
| 206 | |
| 207 | # Note: hotlist_rows hotlists found in the owner_rows that have names |
| 208 | # we're interested in. |
| 209 | # We use wanted_names_for_owners to filter out hotlists in hotlist_rows |
| 210 | # that have a (hotlist_name, owner_id) pair we are not interested in. |
| 211 | for (hotlist_id, hotlist_name) in hotlist_rows: |
| 212 | owner_id = hotlist_to_owner_id.get(hotlist_id) |
| 213 | if owner_id: |
| 214 | if hotlist_name.lower() in wanted_names_for_owners.get(owner_id, []): |
| 215 | hotlist_ids_dict[(hotlist_name.lower(), owner_id)] = hotlist_id |
| 216 | |
| 217 | return hotlist_ids_dict |
| 218 | |
| 219 | def FetchItems(self, cnxn, keys): |
| 220 | """On RAM and memcache miss, hit the database.""" |
| 221 | hotlist_names, _owner_ids = zip(*keys) |
| 222 | # Keys may contain [(name1, user1), (name1, user2)] so we cast this to |
| 223 | # a set to make sure 'name1' is not repeated. |
| 224 | hotlist_names_set = set(hotlist_names) |
| 225 | # Pass this dict to _DeserializeHotlistIDs so it knows what hotlist names |
| 226 | # we're interested in for each owner. |
| 227 | wanted_names_for_owner = collections.defaultdict(list) |
| 228 | for hotlist_name, owner_id in keys: |
| 229 | wanted_names_for_owner[owner_id].append(hotlist_name.lower()) |
| 230 | |
| 231 | role_rows = self.features_service.hotlist2user_tbl.Select( |
| 232 | cnxn, cols=['hotlist_id', 'user_id'], |
| 233 | user_id=wanted_names_for_owner.keys(), role_name='owner') |
| 234 | |
| 235 | hotlist_ids = [row[0] for row in role_rows] |
| 236 | hotlist_rows = self.features_service.hotlist_tbl.Select( |
| 237 | cnxn, cols=['id', 'name'], id=hotlist_ids, is_deleted=False, |
| 238 | where=[('LOWER(name) IN (%s)' % sql.PlaceHolders(hotlist_names_set), |
| 239 | [name.lower() for name in hotlist_names_set])]) |
| 240 | |
| 241 | return self._DeserializeHotlistIDs( |
| 242 | hotlist_rows, role_rows, wanted_names_for_owner) |
| 243 | |
| 244 | |
| 245 | class FeaturesService(object): |
| 246 | """The persistence layer for servlets in the features directory.""" |
| 247 | |
| 248 | def __init__(self, cache_manager, config_service): |
| 249 | """Initialize this object so that it is ready to use. |
| 250 | |
| 251 | Args: |
| 252 | cache_manager: local cache with distributed invalidation. |
| 253 | config_service: an instance of ConfigService. |
| 254 | """ |
| 255 | self.quickedithistory_tbl = sql.SQLTableManager(QUICKEDITHISTORY_TABLE_NAME) |
| 256 | self.quickeditmostrecent_tbl = sql.SQLTableManager( |
| 257 | QUICKEDITMOSTRECENT_TABLE_NAME) |
| 258 | |
| 259 | self.savedquery_tbl = sql.SQLTableManager(SAVEDQUERY_TABLE_NAME) |
| 260 | self.project2savedquery_tbl = sql.SQLTableManager( |
| 261 | PROJECT2SAVEDQUERY_TABLE_NAME) |
| 262 | self.savedqueryexecutesinproject_tbl = sql.SQLTableManager( |
| 263 | SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME) |
| 264 | self.user2savedquery_tbl = sql.SQLTableManager(USER2SAVEDQUERY_TABLE_NAME) |
| 265 | |
| 266 | self.filterrule_tbl = sql.SQLTableManager(FILTERRULE_TABLE_NAME) |
| 267 | |
| 268 | self.hotlist_tbl = sql.SQLTableManager(HOTLIST_TABLE_NAME) |
| 269 | self.hotlist2issue_tbl = sql.SQLTableManager(HOTLIST2ISSUE_TABLE_NAME) |
| 270 | self.hotlist2user_tbl = sql.SQLTableManager(HOTLIST2USER_TABLE_NAME) |
| 271 | |
| 272 | self.saved_query_cache = caches.RamCache( |
| 273 | cache_manager, 'user', max_size=1000) |
| 274 | self.canned_query_cache = caches.RamCache( |
| 275 | cache_manager, 'project', max_size=1000) |
| 276 | |
| 277 | self.hotlist_2lc = HotlistTwoLevelCache(cache_manager, self) |
| 278 | self.hotlist_id_2lc = HotlistIDTwoLevelCache(cache_manager, self) |
| 279 | self.hotlist_user_to_ids = caches.RamCache(cache_manager, 'hotlist') |
| 280 | |
| 281 | self.config_service = config_service |
| 282 | |
| 283 | ### QuickEdit command history |
| 284 | |
| 285 | def GetRecentCommands(self, cnxn, user_id, project_id): |
| 286 | """Return recent command items for the "Redo" menu. |
| 287 | |
| 288 | Args: |
| 289 | cnxn: Connection to SQL database. |
| 290 | user_id: int ID of the current user. |
| 291 | project_id: int ID of the current project. |
| 292 | |
| 293 | Returns: |
| 294 | A pair (cmd_slots, recent_slot_num). cmd_slots is a list of |
| 295 | 3-tuples that can be used to populate the "Redo" menu of the |
| 296 | quick-edit dialog. recent_slot_num indicates which of those |
| 297 | slots should initially populate the command and comment fields. |
| 298 | """ |
| 299 | # Always start with the standard 5 commands. |
| 300 | history = tracker_constants.DEFAULT_RECENT_COMMANDS[:] |
| 301 | # If the user has modified any, then overwrite some standard ones. |
| 302 | history_rows = self.quickedithistory_tbl.Select( |
| 303 | cnxn, cols=['slot_num', 'command', 'comment'], |
| 304 | user_id=user_id, project_id=project_id) |
| 305 | for slot_num, command, comment in history_rows: |
| 306 | if slot_num < len(history): |
| 307 | history[slot_num - 1] = (command, comment) |
| 308 | |
| 309 | slots = [] |
| 310 | for idx, (command, comment) in enumerate(history): |
| 311 | slots.append((idx + 1, command, comment)) |
| 312 | |
| 313 | recent_slot_num = self.quickeditmostrecent_tbl.SelectValue( |
| 314 | cnxn, 'slot_num', default=1, user_id=user_id, project_id=project_id) |
| 315 | |
| 316 | return slots, recent_slot_num |
| 317 | |
| 318 | def StoreRecentCommand( |
| 319 | self, cnxn, user_id, project_id, slot_num, command, comment): |
| 320 | """Store the given command and comment in the user's command history.""" |
| 321 | self.quickedithistory_tbl.InsertRow( |
| 322 | cnxn, replace=True, user_id=user_id, project_id=project_id, |
| 323 | slot_num=slot_num, command=command, comment=comment) |
| 324 | self.quickeditmostrecent_tbl.InsertRow( |
| 325 | cnxn, replace=True, user_id=user_id, project_id=project_id, |
| 326 | slot_num=slot_num) |
| 327 | |
| 328 | def ExpungeQuickEditHistory(self, cnxn, project_id): |
| 329 | """Completely delete every users' quick edit history for this project.""" |
| 330 | self.quickeditmostrecent_tbl.Delete(cnxn, project_id=project_id) |
| 331 | self.quickedithistory_tbl.Delete(cnxn, project_id=project_id) |
| 332 | |
| 333 | def ExpungeQuickEditsByUsers(self, cnxn, user_ids, limit=None): |
| 334 | """Completely delete every given users' quick edits. |
| 335 | |
| 336 | This method will not commit the operations. This method will |
| 337 | not make changes to in-memory data. |
| 338 | """ |
| 339 | commit = False |
| 340 | self.quickeditmostrecent_tbl.Delete( |
| 341 | cnxn, user_id=user_ids, commit=commit, limit=limit) |
| 342 | self.quickedithistory_tbl.Delete( |
| 343 | cnxn, user_id=user_ids, commit=commit, limit=limit) |
| 344 | |
| 345 | ### Saved User and Project Queries |
| 346 | |
| 347 | def GetSavedQueries(self, cnxn, query_ids): |
| 348 | """Retrieve the specified SaveQuery PBs.""" |
| 349 | # TODO(jrobbins): RAM cache |
| 350 | if not query_ids: |
| 351 | return {} |
| 352 | saved_queries = {} |
| 353 | savedquery_rows = self.savedquery_tbl.Select( |
| 354 | cnxn, cols=SAVEDQUERY_COLS, id=query_ids) |
| 355 | for saved_query_tuple in savedquery_rows: |
| 356 | qid, name, base_id, query = saved_query_tuple |
| 357 | saved_queries[qid] = tracker_bizobj.MakeSavedQuery( |
| 358 | qid, name, base_id, query) |
| 359 | |
| 360 | sqeip_rows = self.savedqueryexecutesinproject_tbl.Select( |
| 361 | cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS, query_id=query_ids) |
| 362 | for query_id, project_id in sqeip_rows: |
| 363 | saved_queries[query_id].executes_in_project_ids.append(project_id) |
| 364 | |
| 365 | return saved_queries |
| 366 | |
| 367 | def GetSavedQuery(self, cnxn, query_id): |
| 368 | """Retrieve the specified SaveQuery PB.""" |
| 369 | saved_queries = self.GetSavedQueries(cnxn, [query_id]) |
| 370 | return saved_queries.get(query_id) |
| 371 | |
| 372 | def _GetUsersSavedQueriesDict(self, cnxn, user_ids): |
| 373 | """Return a dict of all SavedQuery PBs for the specified users.""" |
| 374 | results_dict, missed_uids = self.saved_query_cache.GetAll(user_ids) |
| 375 | |
| 376 | if missed_uids: |
| 377 | savedquery_rows = self.user2savedquery_tbl.Select( |
| 378 | cnxn, cols=SAVEDQUERY_COLS + ['user_id', 'subscription_mode'], |
| 379 | left_joins=[('SavedQuery ON query_id = id', [])], |
| 380 | order_by=[('rank', [])], user_id=missed_uids) |
| 381 | sqeip_dict = {} |
| 382 | if savedquery_rows: |
| 383 | query_ids = {row[0] for row in savedquery_rows} |
| 384 | sqeip_rows = self.savedqueryexecutesinproject_tbl.Select( |
| 385 | cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS, query_id=query_ids) |
| 386 | for qid, pid in sqeip_rows: |
| 387 | sqeip_dict.setdefault(qid, []).append(pid) |
| 388 | |
| 389 | for saved_query_tuple in savedquery_rows: |
| 390 | query_id, name, base_id, query, uid, sub_mode = saved_query_tuple |
| 391 | sq = tracker_bizobj.MakeSavedQuery( |
| 392 | query_id, name, base_id, query, subscription_mode=sub_mode, |
| 393 | executes_in_project_ids=sqeip_dict.get(query_id, [])) |
| 394 | results_dict.setdefault(uid, []).append(sq) |
| 395 | |
| 396 | self.saved_query_cache.CacheAll(results_dict) |
| 397 | return results_dict |
| 398 | |
| 399 | # TODO(jrobbins): change this termonology to "canned query" rather than |
| 400 | # "saved" throughout the application. |
| 401 | def GetSavedQueriesByUserID(self, cnxn, user_id): |
| 402 | """Return a list of SavedQuery PBs for the specified user.""" |
| 403 | saved_queries_dict = self._GetUsersSavedQueriesDict(cnxn, [user_id]) |
| 404 | saved_queries = saved_queries_dict.get(user_id, []) |
| 405 | return saved_queries[:] |
| 406 | |
| 407 | def GetCannedQueriesForProjects(self, cnxn, project_ids): |
| 408 | """Return a dict {project_id: [saved_query]} for the specified projects.""" |
| 409 | results_dict, missed_pids = self.canned_query_cache.GetAll(project_ids) |
| 410 | |
| 411 | if missed_pids: |
| 412 | cannedquery_rows = self.project2savedquery_tbl.Select( |
| 413 | cnxn, cols=['project_id'] + SAVEDQUERY_COLS, |
| 414 | left_joins=[('SavedQuery ON query_id = id', [])], |
| 415 | order_by=[('rank', [])], project_id=project_ids) |
| 416 | |
| 417 | for cq_row in cannedquery_rows: |
| 418 | project_id = cq_row[0] |
| 419 | canned_query_tuple = cq_row[1:] |
| 420 | results_dict.setdefault(project_id ,[]).append( |
| 421 | tracker_bizobj.MakeSavedQuery(*canned_query_tuple)) |
| 422 | |
| 423 | self.canned_query_cache.CacheAll(results_dict) |
| 424 | return results_dict |
| 425 | |
| 426 | def GetCannedQueriesByProjectID(self, cnxn, project_id): |
| 427 | """Return the list of SavedQueries for the specified project.""" |
| 428 | project_ids_to_canned_queries = self.GetCannedQueriesForProjects( |
| 429 | cnxn, [project_id]) |
| 430 | return project_ids_to_canned_queries.get(project_id, []) |
| 431 | |
| 432 | def _UpdateSavedQueries(self, cnxn, saved_queries, commit=True): |
| 433 | """Store the given SavedQueries to the DB.""" |
| 434 | savedquery_rows = [ |
| 435 | (sq.query_id or None, sq.name, sq.base_query_id, sq.query) |
| 436 | for sq in saved_queries] |
| 437 | existing_query_ids = [sq.query_id for sq in saved_queries if sq.query_id] |
| 438 | if existing_query_ids: |
| 439 | self.savedquery_tbl.Delete(cnxn, id=existing_query_ids, commit=commit) |
| 440 | |
| 441 | generated_ids = self.savedquery_tbl.InsertRows( |
| 442 | cnxn, SAVEDQUERY_COLS, savedquery_rows, commit=commit, |
| 443 | return_generated_ids=True) |
| 444 | if generated_ids: |
| 445 | logging.info('generated_ids are %r', generated_ids) |
| 446 | for sq in saved_queries: |
| 447 | generated_id = generated_ids.pop(0) |
| 448 | if not sq.query_id: |
| 449 | sq.query_id = generated_id |
| 450 | |
| 451 | def UpdateCannedQueries(self, cnxn, project_id, canned_queries): |
| 452 | """Update the canned queries for a project. |
| 453 | |
| 454 | Args: |
| 455 | cnxn: connection to SQL database. |
| 456 | project_id: int project ID of the project that contains these queries. |
| 457 | canned_queries: list of SavedQuery PBs to update. |
| 458 | """ |
| 459 | self.project2savedquery_tbl.Delete( |
| 460 | cnxn, project_id=project_id, commit=False) |
| 461 | self._UpdateSavedQueries(cnxn, canned_queries, commit=False) |
| 462 | project2savedquery_rows = [ |
| 463 | (project_id, rank, sq.query_id) |
| 464 | for rank, sq in enumerate(canned_queries)] |
| 465 | self.project2savedquery_tbl.InsertRows( |
| 466 | cnxn, PROJECT2SAVEDQUERY_COLS, project2savedquery_rows, |
| 467 | commit=False) |
| 468 | cnxn.Commit() |
| 469 | |
| 470 | self.canned_query_cache.Invalidate(cnxn, project_id) |
| 471 | |
| 472 | def UpdateUserSavedQueries(self, cnxn, user_id, saved_queries): |
| 473 | """Store the given saved_queries for the given user.""" |
| 474 | saved_query_ids = [sq.query_id for sq in saved_queries if sq.query_id] |
| 475 | self.savedqueryexecutesinproject_tbl.Delete( |
| 476 | cnxn, query_id=saved_query_ids, commit=False) |
| 477 | self.user2savedquery_tbl.Delete(cnxn, user_id=user_id, commit=False) |
| 478 | |
| 479 | self._UpdateSavedQueries(cnxn, saved_queries, commit=False) |
| 480 | user2savedquery_rows = [] |
| 481 | for rank, sq in enumerate(saved_queries): |
| 482 | user2savedquery_rows.append( |
| 483 | (user_id, rank, sq.query_id, sq.subscription_mode or 'noemail')) |
| 484 | |
| 485 | self.user2savedquery_tbl.InsertRows( |
| 486 | cnxn, USER2SAVEDQUERY_COLS, user2savedquery_rows, commit=False) |
| 487 | |
| 488 | sqeip_rows = [] |
| 489 | for sq in saved_queries: |
| 490 | for pid in sq.executes_in_project_ids: |
| 491 | sqeip_rows.append((sq.query_id, pid)) |
| 492 | |
| 493 | self.savedqueryexecutesinproject_tbl.InsertRows( |
| 494 | cnxn, SAVEDQUERYEXECUTESINPROJECT_COLS, sqeip_rows, commit=False) |
| 495 | cnxn.Commit() |
| 496 | |
| 497 | self.saved_query_cache.Invalidate(cnxn, user_id) |
| 498 | |
| 499 | ### Subscriptions |
| 500 | |
| 501 | def GetSubscriptionsInProjects(self, cnxn, project_ids): |
| 502 | """Return all saved queries for users that have any subscription there. |
| 503 | |
| 504 | Args: |
| 505 | cnxn: Connection to SQL database. |
| 506 | project_ids: list of int project IDs that contain the modified issues. |
| 507 | |
| 508 | Returns: |
| 509 | A dict {user_id: all_saved_queries, ...} for all users that have any |
| 510 | subscription in any of the specified projects. |
| 511 | """ |
| 512 | sqeip_join_str = ( |
| 513 | 'SavedQueryExecutesInProject ON ' |
| 514 | 'SavedQueryExecutesInProject.query_id = User2SavedQuery.query_id') |
| 515 | user_join_str = ( |
| 516 | 'User ON ' |
| 517 | 'User.user_id = User2SavedQuery.user_id') |
| 518 | now = int(time.time()) |
| 519 | absence_threshold = now - settings.subscription_timeout_secs |
| 520 | where = [ |
| 521 | ('(User.banned IS NULL OR User.banned = %s)', ['']), |
| 522 | ('User.last_visit_timestamp >= %s', [absence_threshold]), |
| 523 | ('(User.email_bounce_timestamp IS NULL OR ' |
| 524 | 'User.email_bounce_timestamp = %s)', [0]), |
| 525 | ] |
| 526 | # TODO(jrobbins): cache this since it rarely changes. |
| 527 | subscriber_rows = self.user2savedquery_tbl.Select( |
| 528 | cnxn, cols=['User2SavedQuery.user_id'], distinct=True, |
| 529 | joins=[(sqeip_join_str, []), (user_join_str, [])], |
| 530 | subscription_mode='immediate', project_id=project_ids, |
| 531 | where=where) |
| 532 | subscriber_ids = [row[0] for row in subscriber_rows] |
| 533 | logging.info('subscribers relevant to projects %r are %r', |
| 534 | project_ids, subscriber_ids) |
| 535 | user_ids_to_saved_queries = self._GetUsersSavedQueriesDict( |
| 536 | cnxn, subscriber_ids) |
| 537 | return user_ids_to_saved_queries |
| 538 | |
| 539 | def ExpungeSavedQueriesExecuteInProject(self, cnxn, project_id): |
| 540 | """Remove any references from saved queries to projects in the database.""" |
| 541 | self.savedqueryexecutesinproject_tbl.Delete(cnxn, project_id=project_id) |
| 542 | |
| 543 | savedquery_rows = self.project2savedquery_tbl.Select( |
| 544 | cnxn, cols=['query_id'], project_id=project_id) |
| 545 | savedquery_ids = [row[0] for row in savedquery_rows] |
| 546 | self.project2savedquery_tbl.Delete(cnxn, project_id=project_id) |
| 547 | self.savedquery_tbl.Delete(cnxn, id=savedquery_ids) |
| 548 | |
| 549 | def ExpungeSavedQueriesByUsers(self, cnxn, user_ids, limit=None): |
| 550 | """Completely delete every given users' saved queries. |
| 551 | |
| 552 | This method will not commit the operations. This method will |
| 553 | not make changes to in-memory data. |
| 554 | """ |
| 555 | commit = False |
| 556 | savedquery_rows = self.user2savedquery_tbl.Select( |
| 557 | cnxn, cols=['query_id'], user_id=user_ids, limit=limit) |
| 558 | savedquery_ids = [row[0] for row in savedquery_rows] |
| 559 | self.user2savedquery_tbl.Delete( |
| 560 | cnxn, query_id=savedquery_ids, commit=commit) |
| 561 | self.savedqueryexecutesinproject_tbl.Delete( |
| 562 | cnxn, query_id=savedquery_ids, commit=commit) |
| 563 | self.savedquery_tbl.Delete(cnxn, id=savedquery_ids, commit=commit) |
| 564 | |
| 565 | |
| 566 | ### Filter rules |
| 567 | |
| 568 | def _DeserializeFilterRules(self, filterrule_rows): |
| 569 | """Convert the given DB row tuples into PBs.""" |
| 570 | result_dict = collections.defaultdict(list) |
| 571 | |
| 572 | for filterrule_row in sorted(filterrule_rows): |
| 573 | project_id, _rank, predicate, consequence = filterrule_row |
| 574 | (default_status, default_owner_id, add_cc_ids, add_labels, |
| 575 | add_notify, warning, error) = self._DeserializeRuleConsequence( |
| 576 | consequence) |
| 577 | rule = filterrules_helpers.MakeRule( |
| 578 | predicate, default_status=default_status, |
| 579 | default_owner_id=default_owner_id, add_cc_ids=add_cc_ids, |
| 580 | add_labels=add_labels, add_notify=add_notify, warning=warning, |
| 581 | error=error) |
| 582 | result_dict[project_id].append(rule) |
| 583 | |
| 584 | return result_dict |
| 585 | |
| 586 | def _DeserializeRuleConsequence(self, consequence): |
| 587 | """Decode the THEN-part of a filter rule.""" |
| 588 | (default_status, default_owner_id, add_cc_ids, add_labels, |
| 589 | add_notify, warning, error) = None, None, [], [], [], None, None |
| 590 | for match in CONSEQUENCE_RE.finditer(consequence): |
| 591 | if match.group('default_status'): |
| 592 | default_status = match.group('default_status') |
| 593 | elif match.group('default_owner_id'): |
| 594 | default_owner_id = int(match.group('default_owner_id')) |
| 595 | elif match.group('add_cc_id'): |
| 596 | add_cc_ids.append(int(match.group('add_cc_id'))) |
| 597 | elif match.group('add_label'): |
| 598 | add_labels.append(match.group('add_label')) |
| 599 | elif match.group('add_notify'): |
| 600 | add_notify.append(match.group('add_notify')) |
| 601 | elif match.group('warning'): |
| 602 | warning = match.group('warning') |
| 603 | elif match.group('error'): |
| 604 | error = match.group('error') |
| 605 | |
| 606 | return (default_status, default_owner_id, add_cc_ids, add_labels, |
| 607 | add_notify, warning, error) |
| 608 | |
| 609 | def _GetFilterRulesByProjectIDs(self, cnxn, project_ids): |
| 610 | """Return {project_id: [FilterRule, ...]} for the specified projects.""" |
| 611 | # TODO(jrobbins): caching |
| 612 | filterrule_rows = self.filterrule_tbl.Select( |
| 613 | cnxn, cols=FILTERRULE_COLS, project_id=project_ids) |
| 614 | return self._DeserializeFilterRules(filterrule_rows) |
| 615 | |
| 616 | def GetFilterRules(self, cnxn, project_id): |
| 617 | """Return a list of FilterRule PBs for the specified project.""" |
| 618 | rules_by_project_id = self._GetFilterRulesByProjectIDs(cnxn, [project_id]) |
| 619 | return rules_by_project_id[project_id] |
| 620 | |
| 621 | def _SerializeRuleConsequence(self, rule): |
| 622 | """Put all actions of a filter rule into one string.""" |
| 623 | assignments = [] |
| 624 | for add_lab in rule.add_labels: |
| 625 | assignments.append('add_label:%s' % add_lab) |
| 626 | if rule.default_status: |
| 627 | assignments.append('default_status:%s' % rule.default_status) |
| 628 | if rule.default_owner_id: |
| 629 | assignments.append('default_owner_id:%d' % rule.default_owner_id) |
| 630 | for add_cc_id in rule.add_cc_ids: |
| 631 | assignments.append('add_cc_id:%d' % add_cc_id) |
| 632 | for add_notify in rule.add_notify_addrs: |
| 633 | assignments.append('add_notify:%s' % add_notify) |
| 634 | if rule.warning: |
| 635 | assignments.append('warning:%s' % rule.warning) |
| 636 | if rule.error: |
| 637 | assignments.append('error:%s' % rule.error) |
| 638 | |
| 639 | return ' '.join(assignments) |
| 640 | |
| 641 | def UpdateFilterRules(self, cnxn, project_id, rules): |
| 642 | """Update the filter rules part of a project's issue configuration. |
| 643 | |
| 644 | Args: |
| 645 | cnxn: connection to SQL database. |
| 646 | project_id: int ID of the current project. |
| 647 | rules: a list of FilterRule PBs. |
| 648 | """ |
| 649 | rows = [] |
| 650 | for rank, rule in enumerate(rules): |
| 651 | predicate = rule.predicate |
| 652 | consequence = self._SerializeRuleConsequence(rule) |
| 653 | if predicate and consequence: |
| 654 | rows.append((project_id, rank, predicate, consequence)) |
| 655 | |
| 656 | self.filterrule_tbl.Delete(cnxn, project_id=project_id) |
| 657 | self.filterrule_tbl.InsertRows(cnxn, FILTERRULE_COLS, rows) |
| 658 | |
| 659 | def ExpungeFilterRules(self, cnxn, project_id): |
| 660 | """Completely destroy filter rule info for the specified project.""" |
| 661 | self.filterrule_tbl.Delete(cnxn, project_id=project_id) |
| 662 | |
| 663 | def ExpungeFilterRulesByUser(self, cnxn, user_ids_by_email): |
| 664 | """Wipes any Filter Rules containing the given users. |
| 665 | |
| 666 | This method will not commit the operation. This method will not make |
| 667 | changes to in-memory data. |
| 668 | Args: |
| 669 | cnxn: connection to SQL database. |
| 670 | user_ids_by_email: dict of {email: user_id ..} of all users we want to |
| 671 | expunge |
| 672 | |
| 673 | Returns: |
| 674 | Dictionary of {project_id: [(predicate, consequence), ..]} for Filter |
| 675 | Rules that will be deleted for containing the given emails. |
| 676 | """ |
| 677 | deleted_project_rules_dict = collections.defaultdict(list) |
| 678 | if user_ids_by_email: |
| 679 | deleted_rows = [] |
| 680 | emails = user_ids_by_email.keys() |
| 681 | all_rules_rows = self.filterrule_tbl.Select(cnxn, FILTERRULE_COLS) |
| 682 | logging.info('Fetched all filter rules: %s' % (all_rules_rows,)) |
| 683 | for rule_row in all_rules_rows: |
| 684 | project_id, _rank, predicate, consequence = rule_row |
| 685 | if any(email in predicate for email in emails): |
| 686 | deleted_rows.append(rule_row) |
| 687 | continue |
| 688 | if any( |
| 689 | (('add_notify:%s' % email) in consequence or |
| 690 | ('add_cc_id:%s' % user_id) in consequence or |
| 691 | ('default_owner_id:%s' % user_id) in consequence) |
| 692 | for email, user_id in user_ids_by_email.iteritems()): |
| 693 | deleted_rows.append(rule_row) |
| 694 | continue |
| 695 | |
| 696 | for deleted_row in deleted_rows: |
| 697 | project_id, rank, predicate, consequence = deleted_row |
| 698 | self.filterrule_tbl.Delete( |
| 699 | cnxn, project_id=project_id, rank=rank, predicate=predicate, |
| 700 | consequence=consequence, commit=False) |
| 701 | deleted_project_rules_dict = self._DeserializeFilterRules(deleted_rows) |
| 702 | |
| 703 | return deleted_project_rules_dict |
| 704 | |
| 705 | ### Creating hotlists |
| 706 | |
| 707 | def CreateHotlist( |
| 708 | self, cnxn, name, summary, description, owner_ids, editor_ids, |
| 709 | issue_ids=None, is_private=None, default_col_spec=None, ts=None): |
| 710 | # type: (MonorailConnection, string, string, string, Collection[int], |
| 711 | # Optional[Collection[int]], Optional[Boolean], Optional[string], |
| 712 | # Optional[int] -> int |
| 713 | """Create and store a Hotlist with the given attributes. |
| 714 | |
| 715 | Args: |
| 716 | cnxn: connection to SQL database. |
| 717 | name: a valid hotlist name. |
| 718 | summary: one-line explanation of the hotlist. |
| 719 | description: one-page explanation of the hotlist. |
| 720 | owner_ids: a list of user IDs for the hotlist owners. |
| 721 | editor_ids: a list of user IDs for the hotlist editors. |
| 722 | issue_ids: a list of issue IDs for the hotlist issues. |
| 723 | is_private: True if the hotlist can only be viewed by owners and editors. |
| 724 | default_col_spec: the default columns that show in list view. |
| 725 | ts: a timestamp for when this hotlist was created. |
| 726 | |
| 727 | Returns: |
| 728 | The int id of the new hotlist. |
| 729 | |
| 730 | Raises: |
| 731 | InputException: if the hotlist name is invalid. |
| 732 | HotlistAlreadyExists: if any of the owners already own a hotlist with |
| 733 | the same name. |
| 734 | UnownedHotlistException: if owner_ids is empty. |
| 735 | """ |
| 736 | # TODO(crbug.com/monorail/7677): These checks should be done in the |
| 737 | # the business layer. |
| 738 | # Remove when calls from non-business layer code are removed. |
| 739 | if not owner_ids: # Should never happen. |
| 740 | logging.error('Attempt to create unowned Hotlist: name:%r', name) |
| 741 | raise UnownedHotlistException() |
| 742 | if not framework_bizobj.IsValidHotlistName(name): |
| 743 | raise exceptions.InputException( |
| 744 | '%s is not a valid name for a Hotlist' % name) |
| 745 | if self.LookupHotlistIDs(cnxn, [name], owner_ids): |
| 746 | raise HotlistAlreadyExists() |
| 747 | # TODO(crbug.com/monorail/7677): We are not setting a |
| 748 | # default default_col_spec in v3. |
| 749 | if default_col_spec is None: |
| 750 | default_col_spec = features_constants.DEFAULT_COL_SPEC |
| 751 | |
| 752 | hotlist_item_fields = [ |
| 753 | (issue_id, rank*100, owner_ids[0], ts, '') for |
| 754 | rank, issue_id in enumerate(issue_ids or [])] |
| 755 | hotlist = features_pb2.MakeHotlist( |
| 756 | name, hotlist_item_fields=hotlist_item_fields, summary=summary, |
| 757 | description=description, is_private=is_private, owner_ids=owner_ids, |
| 758 | editor_ids=editor_ids, default_col_spec=default_col_spec) |
| 759 | hotlist.hotlist_id = self._InsertHotlist(cnxn, hotlist) |
| 760 | return hotlist |
| 761 | |
| 762 | def UpdateHotlist( |
| 763 | self, cnxn, hotlist_id, name=None, summary=None, description=None, |
| 764 | is_private=None, default_col_spec=None, owner_id=None, |
| 765 | add_editor_ids=None): |
| 766 | """Update the DB with the given hotlist information.""" |
| 767 | # Note: If something is None, it does not get changed to None, |
| 768 | # it just does not get updated. |
| 769 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 770 | if not hotlist: |
| 771 | raise NoSuchHotlistException() |
| 772 | |
| 773 | delta = {} |
| 774 | if name is not None: |
| 775 | delta['name'] = name |
| 776 | if summary is not None: |
| 777 | delta['summary'] = summary |
| 778 | if description is not None: |
| 779 | delta['description'] = description |
| 780 | if is_private is not None: |
| 781 | delta['is_private'] = is_private |
| 782 | if default_col_spec is not None: |
| 783 | delta['default_col_spec'] = default_col_spec |
| 784 | |
| 785 | self.hotlist_tbl.Update(cnxn, delta, id=hotlist_id, commit=False) |
| 786 | insert_rows = [] |
| 787 | if owner_id is not None: |
| 788 | insert_rows.append((hotlist_id, owner_id, 'owner')) |
| 789 | self.hotlist2user_tbl.Delete( |
| 790 | cnxn, hotlist_id=hotlist_id, role='owner', commit=False) |
| 791 | if add_editor_ids: |
| 792 | insert_rows.extend( |
| 793 | [(hotlist_id, user_id, 'editor') for user_id in add_editor_ids]) |
| 794 | if insert_rows: |
| 795 | self.hotlist2user_tbl.InsertRows( |
| 796 | cnxn, HOTLIST2USER_COLS, insert_rows, commit=False) |
| 797 | |
| 798 | cnxn.Commit() |
| 799 | |
| 800 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 801 | if not hotlist.owner_ids: # Should never happen. |
| 802 | logging.warn('Modifying unowned Hotlist: id:%r, name:%r', |
| 803 | hotlist_id, hotlist.name) |
| 804 | elif hotlist.name: |
| 805 | self.hotlist_id_2lc.InvalidateKeys( |
| 806 | cnxn, [(hotlist.name.lower(), owner_id) for |
| 807 | owner_id in hotlist.owner_ids]) |
| 808 | |
| 809 | # Update the hotlist PB in RAM |
| 810 | if name is not None: |
| 811 | hotlist.name = name |
| 812 | if summary is not None: |
| 813 | hotlist.summary = summary |
| 814 | if description is not None: |
| 815 | hotlist.description = description |
| 816 | if is_private is not None: |
| 817 | hotlist.is_private = is_private |
| 818 | if default_col_spec is not None: |
| 819 | hotlist.default_col_spec = default_col_spec |
| 820 | if owner_id is not None: |
| 821 | hotlist.owner_ids = [owner_id] |
| 822 | if add_editor_ids: |
| 823 | hotlist.editor_ids.extend(add_editor_ids) |
| 824 | |
| 825 | def RemoveHotlistEditors(self, cnxn, hotlist_id, remove_editor_ids): |
| 826 | # type: MonorailConnection, int, Collection[int] |
| 827 | """Remove given editors from the specified hotlist. |
| 828 | |
| 829 | Args: |
| 830 | cnxn: MonorailConnection object. |
| 831 | hotlist_id: int ID of the Hotlist we want to update. |
| 832 | remove_editor_ids: collection of existing hotlist editor User IDs |
| 833 | that we want to remove from the hotlist. |
| 834 | |
| 835 | Raises: |
| 836 | NoSuchHotlistException: if the hotlist is not found. |
| 837 | InputException: if there are not editors to remove. |
| 838 | """ |
| 839 | if not remove_editor_ids: |
| 840 | raise exceptions.InputException |
| 841 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 842 | if not hotlist: |
| 843 | raise NoSuchHotlistException() |
| 844 | |
| 845 | self.hotlist2user_tbl.Delete( |
| 846 | cnxn, hotlist_id=hotlist_id, user_id=remove_editor_ids) |
| 847 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 848 | |
| 849 | # Update in-memory data |
| 850 | for remove_id in remove_editor_ids: |
| 851 | hotlist.editor_ids.remove(remove_id) |
| 852 | |
| 853 | def UpdateHotlistIssues( |
| 854 | self, |
| 855 | cnxn, # type: sql.MonorailConnection |
| 856 | hotlist_id, # type: int |
| 857 | updated_items, # type: Collection[features_pb2.HotlistItem] |
| 858 | remove_issue_ids, # type: Collection[int] |
| 859 | issue_svc, # type: issue_svc.IssueService |
| 860 | chart_svc, # type: chart_svc.ChartService |
| 861 | commit=True # type: Optional[bool] |
| 862 | ): |
| 863 | # type: (...) -> None |
| 864 | """Update the Issues in a Hotlist. |
| 865 | This method removes the given remove_issue_ids from a Hotlist then |
| 866 | updates or adds the HotlistItems found in updated_items. HotlistItems |
| 867 | in updated_items may exist in the hotlist and just need to be updated |
| 868 | or they may be new items that should be added to the Hotlist. |
| 869 | |
| 870 | Args: |
| 871 | cnxn: MonorailConnection object. |
| 872 | hotlist_id: int ID of the Hotlist to update. |
| 873 | updated_items: Collection of HotlistItems that either already exist in |
| 874 | the hotlist and need to be updated or needed to be added to the hotlist. |
| 875 | remove_issue_ids: Collection of Issue IDs that should be removed from the |
| 876 | hotlist. |
| 877 | issue_svc: IssueService object. |
| 878 | chart_svc: ChartService object. |
| 879 | |
| 880 | Raises: |
| 881 | NoSuchHotlistException if a hotlist with the given ID is not found. |
| 882 | InputException if no changes were given. |
| 883 | """ |
| 884 | if not updated_items and not remove_issue_ids: |
| 885 | raise exceptions.InputException('No changes to make') |
| 886 | |
| 887 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 888 | if not hotlist: |
| 889 | raise NoSuchHotlistException() |
| 890 | |
| 891 | # Used to hold the updated Hotlist.items to use when updating |
| 892 | # the in-memory hotlist. |
| 893 | all_hotlist_items = list(hotlist.items) |
| 894 | |
| 895 | # Used to hold ids of issues affected by this change for storing |
| 896 | # Issue Snapshots. |
| 897 | affected_issue_ids = set() |
| 898 | |
| 899 | if remove_issue_ids: |
| 900 | affected_issue_ids.update(remove_issue_ids) |
| 901 | self.hotlist2issue_tbl.Delete( |
| 902 | cnxn, hotlist_id=hotlist_id, issue_id=remove_issue_ids, commit=False) |
| 903 | all_hotlist_items = filter( |
| 904 | lambda item: item.issue_id not in remove_issue_ids, all_hotlist_items) |
| 905 | |
| 906 | if updated_items: |
| 907 | updated_issue_ids = [item.issue_id for item in updated_items] |
| 908 | affected_issue_ids.update(updated_issue_ids) |
| 909 | self.hotlist2issue_tbl.Delete( |
| 910 | cnxn, hotlist_id=hotlist_id, issue_id=updated_issue_ids, commit=False) |
| 911 | insert_rows = [] |
| 912 | for item in updated_items: |
| 913 | insert_rows.append( |
| 914 | ( |
| 915 | hotlist_id, item.issue_id, item.rank, item.adder_id, |
| 916 | item.date_added, item.note)) |
| 917 | self.hotlist2issue_tbl.InsertRows( |
| 918 | cnxn, cols=HOTLIST2ISSUE_COLS, row_values=insert_rows, commit=False) |
| 919 | all_hotlist_items = filter( |
| 920 | lambda item: item.issue_id not in updated_issue_ids, |
| 921 | all_hotlist_items) |
| 922 | all_hotlist_items.extend(updated_items) |
| 923 | |
| 924 | if commit: |
| 925 | cnxn.Commit() |
| 926 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 927 | |
| 928 | # Update in-memory hotlist items. |
| 929 | hotlist.items = sorted(all_hotlist_items, key=lambda item: item.rank) |
| 930 | |
| 931 | issues = issue_svc.GetIssues(cnxn, list(affected_issue_ids)) |
| 932 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 933 | |
| 934 | # TODO(crbug/monorail/7104): {Add|Remove}IssuesToHotlists both call |
| 935 | # UpdateHotlistItems to add/remove issues from a hotlist. |
| 936 | # UpdateHotlistItemsFields is called by methods for reranking existing issues |
| 937 | # and updating HotlistItem notes. |
| 938 | # (1) We are removing notes from HotlistItems. crbug/monorail/#### |
| 939 | # (2) our v3 AddHotlistItems will allow for inserting new issues to |
| 940 | # non-last ranks of a hotlist. So there could be some shared code |
| 941 | # for the reranking path and the adding issues path. |
| 942 | # UpdateHotlistIssues will be handling adding, removing, and reranking issues. |
| 943 | # {Add|Remove}IssueToHotlists, UpdateHotlistItems, UpdateHotlistItemFields |
| 944 | # should be removed, once all methods are updated to call UpdateHotlistIssues. |
| 945 | |
| 946 | def AddIssueToHotlists(self, cnxn, hotlist_ids, issue_tuple, issue_svc, |
| 947 | chart_svc, commit=True): |
| 948 | """Add a single issue, specified in the issue_tuple, to the given hotlists. |
| 949 | |
| 950 | Args: |
| 951 | cnxn: connection to SQL database. |
| 952 | hotlist_ids: a list of hotlist_ids to add the issues to. |
| 953 | issue_tuple: (issue_id, user_id, ts, note) of the issue to be added. |
| 954 | issue_svc: an instance of IssueService. |
| 955 | chart_svc: an instance of ChartService. |
| 956 | """ |
| 957 | self.AddIssuesToHotlists(cnxn, hotlist_ids, [issue_tuple], issue_svc, |
| 958 | chart_svc, commit=commit) |
| 959 | |
| 960 | def AddIssuesToHotlists(self, cnxn, hotlist_ids, added_tuples, issue_svc, |
| 961 | chart_svc, commit=True): |
| 962 | """Add the issues given in the added_tuples list to the given hotlists. |
| 963 | |
| 964 | Args: |
| 965 | cnxn: connection to SQL database. |
| 966 | hotlist_ids: a list of hotlist_ids to add the issues to. |
| 967 | added_tuples: a list of (issue_id, user_id, ts, note) |
| 968 | for issues to be added. |
| 969 | issue_svc: an instance of IssueService. |
| 970 | chart_svc: an instance of ChartService. |
| 971 | """ |
| 972 | for hotlist_id in hotlist_ids: |
| 973 | self.UpdateHotlistItems(cnxn, hotlist_id, [], added_tuples, commit=commit) |
| 974 | |
| 975 | issues = issue_svc.GetIssues(cnxn, |
| 976 | [added_tuple[0] for added_tuple in added_tuples]) |
| 977 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 978 | |
| 979 | def RemoveIssuesFromHotlists(self, cnxn, hotlist_ids, issue_ids, issue_svc, |
| 980 | chart_svc, commit=True): |
| 981 | """Remove the issues given in issue_ids from the given hotlists. |
| 982 | |
| 983 | Args: |
| 984 | cnxn: connection to SQL database. |
| 985 | hotlist_ids: a list of hotlist ids to remove the issues from. |
| 986 | issue_ids: a list of issue_ids to be removed. |
| 987 | issue_svc: an instance of IssueService. |
| 988 | chart_svc: an instance of ChartService. |
| 989 | """ |
| 990 | for hotlist_id in hotlist_ids: |
| 991 | self.UpdateHotlistItems(cnxn, hotlist_id, issue_ids, [], commit=commit) |
| 992 | |
| 993 | issues = issue_svc.GetIssues(cnxn, issue_ids) |
| 994 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 995 | |
| 996 | def UpdateHotlistItems( |
| 997 | self, cnxn, hotlist_id, remove, added_tuples, commit=True): |
| 998 | """Updates a hotlist's list of hotlistissues. |
| 999 | |
| 1000 | Args: |
| 1001 | cnxn: connection to SQL database. |
| 1002 | hotlist_id: the ID of the hotlist to update. |
| 1003 | remove: a list of issue_ids for be removed. |
| 1004 | added_tuples: a list of (issue_id, user_id, ts, note) |
| 1005 | for issues to be added. |
| 1006 | """ |
| 1007 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1008 | if not hotlist: |
| 1009 | raise NoSuchHotlistException() |
| 1010 | |
| 1011 | # adding new Hotlistissues, ignoring pairs where issue_id is already in |
| 1012 | # hotlist's iid_rank_pairs |
| 1013 | current_issues_ids = { |
| 1014 | item.issue_id for item in hotlist.items} |
| 1015 | |
| 1016 | self.hotlist2issue_tbl.Delete( |
| 1017 | cnxn, hotlist_id=hotlist_id, |
| 1018 | issue_id=[remove_id for remove_id in remove |
| 1019 | if remove_id in current_issues_ids], |
| 1020 | commit=False) |
| 1021 | if hotlist.items: |
| 1022 | items_sorted = sorted(hotlist.items, key=lambda item: item.rank) |
| 1023 | rank_base = items_sorted[-1].rank + 10 |
| 1024 | else: |
| 1025 | rank_base = 1 |
| 1026 | insert_rows = [ |
| 1027 | (hotlist_id, issue_id, rank*10 + rank_base, user_id, ts, note) |
| 1028 | for (rank, (issue_id, user_id, ts, note)) in enumerate(added_tuples) |
| 1029 | if issue_id not in current_issues_ids] |
| 1030 | self.hotlist2issue_tbl.InsertRows( |
| 1031 | cnxn, cols=HOTLIST2ISSUE_COLS, row_values=insert_rows, commit=commit) |
| 1032 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1033 | |
| 1034 | # removing an issue that was never in the hotlist would not cause any |
| 1035 | # problems. |
| 1036 | items = [ |
| 1037 | item for item in hotlist.items if |
| 1038 | item.issue_id not in remove] |
| 1039 | |
| 1040 | new_hotlist_items = [ |
| 1041 | features_pb2.MakeHotlistItem(issue_id, rank, user_id, ts, note) |
| 1042 | for (_hid, issue_id, rank, user_id, ts, note) in insert_rows] |
| 1043 | items.extend(new_hotlist_items) |
| 1044 | hotlist.items = items |
| 1045 | |
| 1046 | def UpdateHotlistItemsFields( |
| 1047 | self, cnxn, hotlist_id, new_ranks=None, new_notes=None, commit=True): |
| 1048 | """Updates rankings or notes of hotlistissues. |
| 1049 | |
| 1050 | Args: |
| 1051 | cnxn: connection to SQL database. |
| 1052 | hotlist_id: the ID of the hotlist to update. |
| 1053 | new_ranks : This should be a dictionary of {issue_id: rank}. |
| 1054 | new_notes: This should be a diciontary of {issue_id: note}. |
| 1055 | commit: set to False to skip the DB commit and do it in the caller. |
| 1056 | """ |
| 1057 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1058 | if not hotlist: |
| 1059 | raise NoSuchHotlistException() |
| 1060 | if new_ranks is None: |
| 1061 | new_ranks = {} |
| 1062 | if new_notes is None: |
| 1063 | new_notes = {} |
| 1064 | issue_ids = [] |
| 1065 | insert_rows = [] |
| 1066 | |
| 1067 | # Update the hotlist PB in RAM |
| 1068 | for hotlist_item in hotlist.items: |
| 1069 | item_updated = False |
| 1070 | if hotlist_item.issue_id in new_ranks: |
| 1071 | # Update rank before adding it to insert_rows |
| 1072 | hotlist_item.rank = new_ranks[hotlist_item.issue_id] |
| 1073 | item_updated = True |
| 1074 | if hotlist_item.issue_id in new_notes: |
| 1075 | # Update note before adding it to insert_rows |
| 1076 | hotlist_item.note = new_notes[hotlist_item.issue_id] |
| 1077 | item_updated = True |
| 1078 | if item_updated: |
| 1079 | issue_ids.append(hotlist_item.issue_id) |
| 1080 | insert_rows.append(( |
| 1081 | hotlist_id, hotlist_item.issue_id, hotlist_item.rank, |
| 1082 | hotlist_item.adder_id, hotlist_item.date_added, hotlist_item.note)) |
| 1083 | hotlist.items = sorted(hotlist.items, key=lambda item: item.rank) |
| 1084 | self.hotlist2issue_tbl.Delete( |
| 1085 | cnxn, hotlist_id=hotlist_id, issue_id=issue_ids, commit=False) |
| 1086 | |
| 1087 | self.hotlist2issue_tbl.InsertRows( |
| 1088 | cnxn, cols=HOTLIST2ISSUE_COLS , row_values=insert_rows, commit=commit) |
| 1089 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1090 | |
| 1091 | def _InsertHotlist(self, cnxn, hotlist): |
| 1092 | """Insert the given hotlist into the database.""" |
| 1093 | hotlist_id = self.hotlist_tbl.InsertRow( |
| 1094 | cnxn, name=hotlist.name, summary=hotlist.summary, |
| 1095 | description=hotlist.description, is_private=hotlist.is_private, |
| 1096 | default_col_spec=hotlist.default_col_spec) |
| 1097 | logging.info('stored hotlist was given id %d', hotlist_id) |
| 1098 | |
| 1099 | self.hotlist2issue_tbl.InsertRows( |
| 1100 | cnxn, HOTLIST2ISSUE_COLS, |
| 1101 | [(hotlist_id, issue.issue_id, issue.rank, |
| 1102 | issue.adder_id, issue.date_added, issue.note) |
| 1103 | for issue in hotlist.items], |
| 1104 | commit=False) |
| 1105 | self.hotlist2user_tbl.InsertRows( |
| 1106 | cnxn, HOTLIST2USER_COLS, |
| 1107 | [(hotlist_id, user_id, 'owner') |
| 1108 | for user_id in hotlist.owner_ids] + |
| 1109 | [(hotlist_id, user_id, 'editor') |
| 1110 | for user_id in hotlist.editor_ids] + |
| 1111 | [(hotlist_id, user_id, 'follower') |
| 1112 | for user_id in hotlist.follower_ids]) |
| 1113 | |
| 1114 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1115 | |
| 1116 | return hotlist_id |
| 1117 | |
| 1118 | def TransferHotlistOwnership( |
| 1119 | self, cnxn, hotlist, new_owner_id, remain_editor, commit=True): |
| 1120 | """Transfers ownership of a hotlist to a new owner.""" |
| 1121 | new_editor_ids = hotlist.editor_ids |
| 1122 | if remain_editor: |
| 1123 | new_editor_ids.extend(hotlist.owner_ids) |
| 1124 | if new_owner_id in new_editor_ids: |
| 1125 | new_editor_ids.remove(new_owner_id) |
| 1126 | new_follower_ids = hotlist.follower_ids |
| 1127 | if new_owner_id in new_follower_ids: |
| 1128 | new_follower_ids.remove(new_owner_id) |
| 1129 | self.UpdateHotlistRoles( |
| 1130 | cnxn, hotlist.hotlist_id, [new_owner_id], new_editor_ids, |
| 1131 | new_follower_ids, commit=commit) |
| 1132 | |
| 1133 | ### Lookup hotlist IDs |
| 1134 | |
| 1135 | def LookupHotlistIDs(self, cnxn, hotlist_names, owner_ids): |
| 1136 | """Return a dict of (name, owner_id) mapped to hotlist_id for all hotlists |
| 1137 | with one of the given names and any of the given owners. Hotlists that |
| 1138 | match multiple owners will be in the dict multiple times.""" |
| 1139 | id_dict, _missed_keys = self.hotlist_id_2lc.GetAll( |
| 1140 | cnxn, [(name.lower(), owner_id) |
| 1141 | for name in hotlist_names for owner_id in owner_ids]) |
| 1142 | return id_dict |
| 1143 | |
| 1144 | def LookupUserHotlists(self, cnxn, user_ids): |
| 1145 | """Return a dict of {user_id: [hotlist_id,...]} for all user_ids.""" |
| 1146 | id_dict, missed_ids = self.hotlist_user_to_ids.GetAll(user_ids) |
| 1147 | if missed_ids: |
| 1148 | retrieved_dict = {user_id: [] for user_id in missed_ids} |
| 1149 | id_rows = self.hotlist2user_tbl.Select( |
| 1150 | cnxn, cols=['user_id', 'hotlist_id'], user_id=user_ids, |
| 1151 | left_joins=[('Hotlist ON hotlist_id = id', [])], |
| 1152 | where=[('Hotlist.is_deleted = %s', [False])]) |
| 1153 | for (user_id, hotlist_id) in id_rows: |
| 1154 | retrieved_dict[user_id].append(hotlist_id) |
| 1155 | self.hotlist_user_to_ids.CacheAll(retrieved_dict) |
| 1156 | id_dict.update(retrieved_dict) |
| 1157 | |
| 1158 | return id_dict |
| 1159 | |
| 1160 | def LookupIssueHotlists(self, cnxn, issue_ids): |
| 1161 | """Return a dict of {issue_id: [hotlist_id,...]} for all issue_ids.""" |
| 1162 | # TODO(jojwang): create hotlist_issue_to_ids cache |
| 1163 | retrieved_dict = {issue_id: [] for issue_id in issue_ids} |
| 1164 | id_rows = self.hotlist2issue_tbl.Select( |
| 1165 | cnxn, cols=['hotlist_id', 'issue_id'], issue_id=issue_ids, |
| 1166 | left_joins=[('Hotlist ON hotlist_id = id', [])], |
| 1167 | where=[('Hotlist.is_deleted = %s', [False])]) |
| 1168 | for hotlist_id, issue_id in id_rows: |
| 1169 | retrieved_dict[issue_id].append(hotlist_id) |
| 1170 | return retrieved_dict |
| 1171 | |
| 1172 | def GetProjectIDsFromHotlist(self, cnxn, hotlist_id): |
| 1173 | project_id_rows = self.hotlist2issue_tbl.Select(cnxn, |
| 1174 | cols=['Issue.project_id'], hotlist_id=hotlist_id, distinct=True, |
| 1175 | left_joins=[('Issue ON issue_id = id', [])]) |
| 1176 | return [row[0] for row in project_id_rows] |
| 1177 | |
| 1178 | ### Get hotlists |
| 1179 | def GetHotlists(self, cnxn, hotlist_ids, use_cache=True): |
| 1180 | """Returns dict of {hotlist_id: hotlist PB}.""" |
| 1181 | hotlists_dict, missed_ids = self.hotlist_2lc.GetAll( |
| 1182 | cnxn, hotlist_ids, use_cache=use_cache) |
| 1183 | |
| 1184 | if missed_ids: |
| 1185 | raise NoSuchHotlistException() |
| 1186 | |
| 1187 | return hotlists_dict |
| 1188 | |
| 1189 | def GetHotlistsByUserID(self, cnxn, user_id, use_cache=True): |
| 1190 | """Get a list of hotlist PBs for a given user.""" |
| 1191 | hotlist_id_dict = self.LookupUserHotlists(cnxn, [user_id]) |
| 1192 | hotlists = self.GetHotlists( |
| 1193 | cnxn, hotlist_id_dict.get(user_id, []), use_cache=use_cache) |
| 1194 | return list(hotlists.values()) |
| 1195 | |
| 1196 | def GetHotlistsByIssueID(self, cnxn, issue_id, use_cache=True): |
| 1197 | """Get a list of hotlist PBs for a given issue.""" |
| 1198 | hotlist_id_dict = self.LookupIssueHotlists(cnxn, [issue_id]) |
| 1199 | hotlists = self.GetHotlists( |
| 1200 | cnxn, hotlist_id_dict.get(issue_id, []), use_cache=use_cache) |
| 1201 | return list(hotlists.values()) |
| 1202 | |
| 1203 | def GetHotlist(self, cnxn, hotlist_id, use_cache=True): |
| 1204 | """Returns hotlist PB.""" |
| 1205 | hotlist_dict = self.GetHotlists(cnxn, [hotlist_id], use_cache=use_cache) |
| 1206 | return hotlist_dict[hotlist_id] |
| 1207 | |
| 1208 | def GetHotlistsByID(self, cnxn, hotlist_ids, use_cache=True): |
| 1209 | """Load all the Hotlist PBs for the given hotlists. |
| 1210 | |
| 1211 | Args: |
| 1212 | cnxn: connection to SQL database. |
| 1213 | hotlist_ids: list of hotlist ids. |
| 1214 | use_cache: specifiy False to force database query. |
| 1215 | |
| 1216 | Returns: |
| 1217 | A dict mapping ids to the corresponding Hotlist protocol buffers and |
| 1218 | a list of any hotlist_ids that were not found. |
| 1219 | """ |
| 1220 | hotlists_dict, missed_ids = self.hotlist_2lc.GetAll( |
| 1221 | cnxn, hotlist_ids, use_cache=use_cache) |
| 1222 | return hotlists_dict, missed_ids |
| 1223 | |
| 1224 | def GetHotlistByID(self, cnxn, hotlist_id, use_cache=True): |
| 1225 | """Load the specified hotlist from the database, None if does not exist.""" |
| 1226 | hotlist_dict, _ = self.GetHotlistsByID( |
| 1227 | cnxn, [hotlist_id], use_cache=use_cache) |
| 1228 | return hotlist_dict.get(hotlist_id) |
| 1229 | |
| 1230 | def UpdateHotlistRoles( |
| 1231 | self, cnxn, hotlist_id, owner_ids, editor_ids, follower_ids, commit=True): |
| 1232 | """"Store the hotlist's roles in the DB.""" |
| 1233 | # This will be a newly contructed object, not from the cache and not |
| 1234 | # shared with any other thread. |
| 1235 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1236 | if not hotlist: |
| 1237 | raise NoSuchHotlistException() |
| 1238 | |
| 1239 | self.hotlist2user_tbl.Delete( |
| 1240 | cnxn, hotlist_id=hotlist_id, commit=False) |
| 1241 | |
| 1242 | insert_rows = [(hotlist_id, user_id, 'owner') for user_id in owner_ids] |
| 1243 | insert_rows.extend( |
| 1244 | [(hotlist_id, user_id, 'editor') for user_id in editor_ids]) |
| 1245 | insert_rows.extend( |
| 1246 | [(hotlist_id, user_id, 'follower') for user_id in follower_ids]) |
| 1247 | self.hotlist2user_tbl.InsertRows( |
| 1248 | cnxn, HOTLIST2USER_COLS, insert_rows, commit=False) |
| 1249 | |
| 1250 | if commit: |
| 1251 | cnxn.Commit() |
| 1252 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1253 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1254 | hotlist.owner_ids = owner_ids |
| 1255 | hotlist.editor_ids = editor_ids |
| 1256 | hotlist.follower_ids = follower_ids |
| 1257 | |
| 1258 | def DeleteHotlist(self, cnxn, hotlist_id, commit=True): |
| 1259 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1260 | if not hotlist: |
| 1261 | raise NoSuchHotlistException() |
| 1262 | |
| 1263 | # Fetch all associated project IDs in order to invalidate their cache. |
| 1264 | project_ids = self.GetProjectIDsFromHotlist(cnxn, hotlist_id) |
| 1265 | |
| 1266 | delta = {'is_deleted': True} |
| 1267 | self.hotlist_tbl.Update(cnxn, delta, id=hotlist_id, commit=commit) |
| 1268 | |
| 1269 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1270 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1271 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.editor_ids) |
| 1272 | if not hotlist.owner_ids: # Should never happen. |
| 1273 | logging.warn('Soft-deleting unowned Hotlist: id:%r, name:%r', |
| 1274 | hotlist_id, hotlist.name) |
| 1275 | elif hotlist.name: |
| 1276 | self.hotlist_id_2lc.InvalidateKeys( |
| 1277 | cnxn, [(hotlist.name.lower(), owner_id) for |
| 1278 | owner_id in hotlist.owner_ids]) |
| 1279 | |
| 1280 | for project_id in project_ids: |
| 1281 | self.config_service.InvalidateMemcacheForEntireProject(project_id) |
| 1282 | |
| 1283 | def ExpungeHotlists( |
| 1284 | self, cnxn, hotlist_ids, star_svc, user_svc, chart_svc, commit=True): |
| 1285 | """Wipes the given hotlists from the DB tables. |
| 1286 | |
| 1287 | This method will only do cache invalidation if commit is set to True. |
| 1288 | |
| 1289 | Args: |
| 1290 | cnxn: connection to SQL database. |
| 1291 | hotlist_ids: the ID of the hotlists to Expunge. |
| 1292 | star_svc: an instance of a HotlistStarService. |
| 1293 | user_svc: an instance of a UserService. |
| 1294 | chart_svc: an instance of a ChartService. |
| 1295 | commit: set to False to skip the DB commit and do it in the caller. |
| 1296 | """ |
| 1297 | |
| 1298 | hotlists_by_id = self.GetHotlists(cnxn, hotlist_ids) |
| 1299 | |
| 1300 | for hotlist_id in hotlist_ids: |
| 1301 | star_svc.ExpungeStars(cnxn, hotlist_id, commit=commit) |
| 1302 | chart_svc.ExpungeHotlistsFromIssueSnapshots( |
| 1303 | cnxn, hotlist_ids, commit=commit) |
| 1304 | user_svc.ExpungeHotlistsFromHistory(cnxn, hotlist_ids, commit=commit) |
| 1305 | self.hotlist2user_tbl.Delete(cnxn, hotlist_id=hotlist_ids, commit=commit) |
| 1306 | self.hotlist2issue_tbl.Delete(cnxn, hotlist_id=hotlist_ids, commit=commit) |
| 1307 | self.hotlist_tbl.Delete(cnxn, id=hotlist_ids, commit=commit) |
| 1308 | |
| 1309 | # Invalidate cache for deleted hotlists. |
| 1310 | self.hotlist_2lc.InvalidateKeys(cnxn, hotlist_ids) |
| 1311 | users_to_invalidate = set() |
| 1312 | for hotlist in hotlists_by_id.values(): |
| 1313 | users_to_invalidate.update( |
| 1314 | hotlist.owner_ids + hotlist.editor_ids + hotlist.follower_ids) |
| 1315 | self.hotlist_id_2lc.InvalidateKeys( |
| 1316 | cnxn, [(hotlist.name, owner_id) for owner_id in hotlist.owner_ids]) |
| 1317 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, list(users_to_invalidate)) |
| 1318 | hotlist_project_ids = set() |
| 1319 | for hotlist_id in hotlist_ids: |
| 1320 | hotlist_project_ids.update(self.GetProjectIDsFromHotlist( |
| 1321 | cnxn, hotlist_id)) |
| 1322 | for project_id in hotlist_project_ids: |
| 1323 | self.config_service.InvalidateMemcacheForEntireProject(project_id) |
| 1324 | |
| 1325 | def ExpungeUsersInHotlists( |
| 1326 | self, cnxn, user_ids, star_svc, user_svc, chart_svc): |
| 1327 | """Wipes the given users and any hotlists they owned from the |
| 1328 | hotlists system. |
| 1329 | |
| 1330 | This method will not commit the operation. This method will not make |
| 1331 | changes to in-memory data. |
| 1332 | """ |
| 1333 | # Transfer hotlist ownership to editors, if possible. |
| 1334 | hotlist_ids_by_user_id = self.LookupUserHotlists(cnxn, user_ids) |
| 1335 | hotlist_ids = [hotlist_id for hotlist_ids in hotlist_ids_by_user_id.values() |
| 1336 | for hotlist_id in hotlist_ids] |
| 1337 | hotlists_by_id, missed = self.GetHotlistsByID( |
| 1338 | cnxn, list(set(hotlist_ids)), use_cache=False) |
| 1339 | logging.info('Missed hotlists: %s', missed) |
| 1340 | |
| 1341 | hotlists_to_delete = [] |
| 1342 | for hotlist_id, hotlist in hotlists_by_id.items(): |
| 1343 | # One of the users to be deleted is an owner of hotlist. |
| 1344 | if not set(hotlist.owner_ids).isdisjoint(user_ids): |
| 1345 | hotlists_to_delete.append(hotlist_id) |
| 1346 | candidate_new_owners = [user_id for user_id in hotlist.editor_ids |
| 1347 | if user_id not in user_ids] |
| 1348 | for candidate_id in candidate_new_owners: |
| 1349 | if not self.LookupHotlistIDs(cnxn, [hotlist.name], [candidate_id]): |
| 1350 | self.TransferHotlistOwnership( |
| 1351 | cnxn, hotlist, candidate_id, False, commit=False) |
| 1352 | # Hotlist transferred successfully. No need to delete it. |
| 1353 | hotlists_to_delete.remove(hotlist_id) |
| 1354 | break |
| 1355 | |
| 1356 | # Delete users |
| 1357 | self.hotlist2user_tbl.Delete(cnxn, user_id=user_ids, commit=False) |
| 1358 | self.hotlist2issue_tbl.Update( |
| 1359 | cnxn, {'adder_id': framework_constants.DELETED_USER_ID}, |
| 1360 | adder_id=user_ids, commit=False) |
| 1361 | user_svc.ExpungeUsersHotlistsHistory(cnxn, user_ids, commit=False) |
| 1362 | # Delete hotlists |
| 1363 | if hotlists_to_delete: |
| 1364 | self.ExpungeHotlists( |
| 1365 | cnxn, hotlists_to_delete, star_svc, user_svc, chart_svc, commit=False) |
| 1366 | |
| 1367 | |
| 1368 | class HotlistAlreadyExists(Exception): |
| 1369 | """Tried to create a hotlist with the same name as another hotlist |
| 1370 | with the same owner.""" |
| 1371 | pass |
| 1372 | |
| 1373 | |
| 1374 | class NoSuchHotlistException(Exception): |
| 1375 | """The requested hotlist was not found.""" |
| 1376 | pass |
| 1377 | |
| 1378 | |
| 1379 | class UnownedHotlistException(Exception): |
| 1380 | """Tried to create a hotlist with no owner.""" |
| 1381 | pass |