Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 1 | # Copyright 2016 The Chromium Authors |
| 2 | # Use of this source code is governed by a BSD-style license that can be |
| 3 | # found in the LICENSE file. |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 4 | |
| 5 | """A class that provides persistence for Monorail's additional features. |
| 6 | |
| 7 | Business objects are described in tracker_pb2.py, features_pb2.py, and |
| 8 | tracker_bizobj.py. |
| 9 | """ |
| 10 | from __future__ import print_function |
| 11 | from __future__ import division |
| 12 | from __future__ import absolute_import |
| 13 | |
| 14 | import collections |
| 15 | import logging |
| 16 | import re |
| 17 | import time |
| 18 | |
| 19 | import settings |
| 20 | |
| 21 | from features import features_constants |
| 22 | from features import filterrules_helpers |
| 23 | from framework import exceptions |
| 24 | from framework import framework_bizobj |
| 25 | from framework import framework_constants |
| 26 | from framework import sql |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 27 | from mrproto import features_pb2 |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 28 | from services import caches |
| 29 | from services import config_svc |
| 30 | from tracker import tracker_bizobj |
| 31 | from tracker import tracker_constants |
| 32 | |
| 33 | QUICKEDITHISTORY_TABLE_NAME = 'QuickEditHistory' |
| 34 | QUICKEDITMOSTRECENT_TABLE_NAME = 'QuickEditMostRecent' |
| 35 | SAVEDQUERY_TABLE_NAME = 'SavedQuery' |
| 36 | PROJECT2SAVEDQUERY_TABLE_NAME = 'Project2SavedQuery' |
| 37 | SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME = 'SavedQueryExecutesInProject' |
| 38 | USER2SAVEDQUERY_TABLE_NAME = 'User2SavedQuery' |
| 39 | FILTERRULE_TABLE_NAME = 'FilterRule' |
| 40 | HOTLIST_TABLE_NAME = 'Hotlist' |
| 41 | HOTLIST2ISSUE_TABLE_NAME = 'Hotlist2Issue' |
| 42 | HOTLIST2USER_TABLE_NAME = 'Hotlist2User' |
| 43 | |
| 44 | |
| 45 | QUICKEDITHISTORY_COLS = [ |
| 46 | 'user_id', 'project_id', 'slot_num', 'command', 'comment'] |
| 47 | QUICKEDITMOSTRECENT_COLS = ['user_id', 'project_id', 'slot_num'] |
| 48 | SAVEDQUERY_COLS = ['id', 'name', 'base_query_id', 'query'] |
| 49 | PROJECT2SAVEDQUERY_COLS = ['project_id', 'rank', 'query_id'] |
| 50 | SAVEDQUERYEXECUTESINPROJECT_COLS = ['query_id', 'project_id'] |
| 51 | USER2SAVEDQUERY_COLS = ['user_id', 'rank', 'query_id', 'subscription_mode'] |
| 52 | FILTERRULE_COLS = ['project_id', 'rank', 'predicate', 'consequence'] |
| 53 | HOTLIST_COLS = [ |
| 54 | 'id', 'name', 'summary', 'description', 'is_private', 'default_col_spec'] |
| 55 | HOTLIST_ABBR_COLS = ['id', 'name', 'summary', 'is_private'] |
| 56 | HOTLIST2ISSUE_COLS = [ |
| 57 | 'hotlist_id', 'issue_id', 'rank', 'adder_id', 'added', 'note'] |
| 58 | HOTLIST2USER_COLS = ['hotlist_id', 'user_id', 'role_name'] |
| 59 | |
| 60 | |
| 61 | # Regex for parsing one action in the filter rule consequence storage syntax. |
| 62 | CONSEQUENCE_RE = re.compile( |
| 63 | r'(default_status:(?P<default_status>[-.\w]+))|' |
| 64 | r'(default_owner_id:(?P<default_owner_id>\d+))|' |
| 65 | r'(add_cc_id:(?P<add_cc_id>\d+))|' |
| 66 | r'(add_label:(?P<add_label>[-.\w]+))|' |
| 67 | r'(add_notify:(?P<add_notify>[-.@\w]+))|' |
| 68 | r'(warning:(?P<warning>.+))|' # Warnings consume the rest of the string. |
| 69 | r'(error:(?P<error>.+))' # Errors consume the rest of the string. |
| 70 | ) |
| 71 | |
| 72 | class HotlistTwoLevelCache(caches.AbstractTwoLevelCache): |
| 73 | """Class to manage both RAM and memcache for Hotlist PBs.""" |
| 74 | |
| 75 | def __init__(self, cachemanager, features_service): |
| 76 | super(HotlistTwoLevelCache, self).__init__( |
| 77 | cachemanager, 'hotlist', 'hotlist:', features_pb2.Hotlist) |
| 78 | self.features_service = features_service |
| 79 | |
| 80 | def _DeserializeHotlists( |
| 81 | self, hotlist_rows, issue_rows, role_rows): |
| 82 | """Convert database rows into a dictionary of Hotlist PB keyed by ID. |
| 83 | |
| 84 | Args: |
| 85 | hotlist_rows: a list of hotlist rows from HOTLIST_TABLE_NAME. |
| 86 | issue_rows: a list of issue rows from HOTLIST2ISSUE_TABLE_NAME, |
| 87 | ordered by rank DESC, issue_id. |
| 88 | role_rows: a list of role rows from HOTLIST2USER_TABLE_NAME. |
| 89 | |
| 90 | Returns: |
| 91 | a dict mapping hotlist_id to hotlist PB""" |
| 92 | hotlist_dict = {} |
| 93 | |
| 94 | for hotlist_row in hotlist_rows: |
| 95 | (hotlist_id, hotlist_name, summary, description, is_private, |
| 96 | default_col_spec) = hotlist_row |
| 97 | hotlist = features_pb2.MakeHotlist( |
| 98 | hotlist_name, hotlist_id=hotlist_id, summary=summary, |
| 99 | description=description, is_private=bool(is_private), |
| 100 | default_col_spec=default_col_spec) |
| 101 | hotlist_dict[hotlist_id] = hotlist |
| 102 | |
| 103 | for (hotlist_id, issue_id, rank, adder_id, added, note) in issue_rows: |
| 104 | hotlist = hotlist_dict.get(hotlist_id) |
| 105 | if hotlist: |
| 106 | hotlist.items.append( |
| 107 | features_pb2.MakeHotlistItem(issue_id=issue_id, rank=rank, |
| 108 | adder_id=adder_id , date_added=added, |
| 109 | note=note)) |
| 110 | else: |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 111 | logging.warning('hotlist %d not found', hotlist_id) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 112 | |
| 113 | for (hotlist_id, user_id, role_name) in role_rows: |
| 114 | hotlist = hotlist_dict.get(hotlist_id) |
| 115 | if not hotlist: |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 116 | logging.warning('hotlist %d not found', hotlist_id) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 117 | elif role_name == 'owner': |
| 118 | hotlist.owner_ids.append(user_id) |
| 119 | elif role_name == 'editor': |
| 120 | hotlist.editor_ids.append(user_id) |
| 121 | elif role_name == 'follower': |
| 122 | hotlist.follower_ids.append(user_id) |
| 123 | else: |
| 124 | logging.info('unknown role name %s', role_name) |
| 125 | |
| 126 | return hotlist_dict |
| 127 | |
| 128 | def FetchItems(self, cnxn, keys): |
| 129 | """On RAM and memcache miss, hit the database to get missing hotlists.""" |
| 130 | hotlist_rows = self.features_service.hotlist_tbl.Select( |
| 131 | cnxn, cols=HOTLIST_COLS, is_deleted=False, id=keys) |
| 132 | issue_rows = self.features_service.hotlist2issue_tbl.Select( |
| 133 | cnxn, cols=HOTLIST2ISSUE_COLS, hotlist_id=keys, |
| 134 | order_by=[('rank DESC', []), ('issue_id', [])]) |
| 135 | role_rows = self.features_service.hotlist2user_tbl.Select( |
| 136 | cnxn, cols=HOTLIST2USER_COLS, hotlist_id=keys) |
| 137 | retrieved_dict = self._DeserializeHotlists( |
| 138 | hotlist_rows, issue_rows, role_rows) |
| 139 | return retrieved_dict |
| 140 | |
| 141 | |
| 142 | class HotlistIDTwoLevelCache(caches.AbstractTwoLevelCache): |
| 143 | """Class to manage both RAM and memcache for hotlist_ids. |
| 144 | |
| 145 | Keys for this cache are tuples (hotlist_name.lower(), owner_id). |
| 146 | This cache should be used to fetch hotlist_ids owned by users or |
| 147 | to check if a user owns a hotlist with a certain name, so the |
| 148 | hotlist_names in keys will always be in lowercase. |
| 149 | """ |
| 150 | |
| 151 | def __init__(self, cachemanager, features_service): |
| 152 | super(HotlistIDTwoLevelCache, self).__init__( |
| 153 | cachemanager, 'hotlist_id', 'hotlist_id:', int, |
| 154 | max_size=settings.issue_cache_max_size) |
| 155 | self.features_service = features_service |
| 156 | |
| 157 | def _MakeCache(self, cache_manager, kind, max_size=None): |
| 158 | """Override normal RamCache creation with ValueCentricRamCache.""" |
| 159 | return caches.ValueCentricRamCache(cache_manager, kind, max_size=max_size) |
| 160 | |
| 161 | def _KeyToStr(self, key): |
| 162 | """This cache uses pairs of (str, int) as keys. Convert them to strings.""" |
| 163 | return '%s,%d' % key |
| 164 | |
| 165 | def _StrToKey(self, key_str): |
| 166 | """This cache uses pairs of (str, int) as keys. |
| 167 | Convert them from strings. |
| 168 | """ |
| 169 | hotlist_name_str, owner_id_str = key_str.split(',') |
| 170 | return (hotlist_name_str, int(owner_id_str)) |
| 171 | |
| 172 | def _DeserializeHotlistIDs( |
| 173 | self, hotlist_rows, owner_rows, wanted_names_for_owners): |
| 174 | """Convert database rows into a dictionary of hotlist_ids keyed by ( |
| 175 | hotlist_name, owner_id). |
| 176 | |
| 177 | Args: |
| 178 | hotlist_rows: a list of hotlist rows [id, name] from HOTLIST for |
| 179 | with names we are interested in. |
| 180 | owner_rows: a list of role rows [hotlist_id, uwer_id] from HOTLIST2USER |
| 181 | for owners that we are interested in that own hotlists with names that |
| 182 | we are interested in. |
| 183 | wanted_names_for_owners: a dict of |
| 184 | {owner_id: [hotlist_name.lower(), ...], ...} |
| 185 | so we know which (hotlist_name, owner_id) keys to return. |
| 186 | |
| 187 | Returns: |
| 188 | A dict mapping (hotlist_name.lower(), owner_id) keys to hotlist_id values. |
| 189 | """ |
| 190 | hotlist_ids_dict = {} |
| 191 | if not hotlist_rows or not owner_rows: |
| 192 | return hotlist_ids_dict |
| 193 | |
| 194 | hotlist_to_owner_id = {} |
| 195 | |
| 196 | # Note: owner_rows contains hotlist owners that we are interested in, but |
| 197 | # may not own hotlists with names we are interested in. |
| 198 | for (hotlist_id, user_id) in owner_rows: |
| 199 | found_owner_id = hotlist_to_owner_id.get(hotlist_id) |
| 200 | if found_owner_id: |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 201 | logging.warning( |
| 202 | 'hotlist %d has more than one owner: %d, %d', hotlist_id, user_id, |
| 203 | found_owner_id) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 204 | hotlist_to_owner_id[hotlist_id] = user_id |
| 205 | |
| 206 | # Note: hotlist_rows hotlists found in the owner_rows that have names |
| 207 | # we're interested in. |
| 208 | # We use wanted_names_for_owners to filter out hotlists in hotlist_rows |
| 209 | # that have a (hotlist_name, owner_id) pair we are not interested in. |
| 210 | for (hotlist_id, hotlist_name) in hotlist_rows: |
| 211 | owner_id = hotlist_to_owner_id.get(hotlist_id) |
| 212 | if owner_id: |
| 213 | if hotlist_name.lower() in wanted_names_for_owners.get(owner_id, []): |
| 214 | hotlist_ids_dict[(hotlist_name.lower(), owner_id)] = hotlist_id |
| 215 | |
| 216 | return hotlist_ids_dict |
| 217 | |
| 218 | def FetchItems(self, cnxn, keys): |
| 219 | """On RAM and memcache miss, hit the database.""" |
| 220 | hotlist_names, _owner_ids = zip(*keys) |
| 221 | # Keys may contain [(name1, user1), (name1, user2)] so we cast this to |
| 222 | # a set to make sure 'name1' is not repeated. |
| 223 | hotlist_names_set = set(hotlist_names) |
| 224 | # Pass this dict to _DeserializeHotlistIDs so it knows what hotlist names |
| 225 | # we're interested in for each owner. |
| 226 | wanted_names_for_owner = collections.defaultdict(list) |
| 227 | for hotlist_name, owner_id in keys: |
| 228 | wanted_names_for_owner[owner_id].append(hotlist_name.lower()) |
| 229 | |
| 230 | role_rows = self.features_service.hotlist2user_tbl.Select( |
| 231 | cnxn, cols=['hotlist_id', 'user_id'], |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 232 | user_id=sorted(wanted_names_for_owner.keys()), role_name='owner') |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 233 | |
| 234 | hotlist_ids = [row[0] for row in role_rows] |
| 235 | hotlist_rows = self.features_service.hotlist_tbl.Select( |
| 236 | cnxn, cols=['id', 'name'], id=hotlist_ids, is_deleted=False, |
| 237 | where=[('LOWER(name) IN (%s)' % sql.PlaceHolders(hotlist_names_set), |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 238 | [name.lower() for name in sorted(hotlist_names_set)])]) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 239 | |
| 240 | return self._DeserializeHotlistIDs( |
| 241 | hotlist_rows, role_rows, wanted_names_for_owner) |
| 242 | |
| 243 | |
| 244 | class FeaturesService(object): |
| 245 | """The persistence layer for servlets in the features directory.""" |
| 246 | |
| 247 | def __init__(self, cache_manager, config_service): |
| 248 | """Initialize this object so that it is ready to use. |
| 249 | |
| 250 | Args: |
| 251 | cache_manager: local cache with distributed invalidation. |
| 252 | config_service: an instance of ConfigService. |
| 253 | """ |
| 254 | self.quickedithistory_tbl = sql.SQLTableManager(QUICKEDITHISTORY_TABLE_NAME) |
| 255 | self.quickeditmostrecent_tbl = sql.SQLTableManager( |
| 256 | QUICKEDITMOSTRECENT_TABLE_NAME) |
| 257 | |
| 258 | self.savedquery_tbl = sql.SQLTableManager(SAVEDQUERY_TABLE_NAME) |
| 259 | self.project2savedquery_tbl = sql.SQLTableManager( |
| 260 | PROJECT2SAVEDQUERY_TABLE_NAME) |
| 261 | self.savedqueryexecutesinproject_tbl = sql.SQLTableManager( |
| 262 | SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME) |
| 263 | self.user2savedquery_tbl = sql.SQLTableManager(USER2SAVEDQUERY_TABLE_NAME) |
| 264 | |
| 265 | self.filterrule_tbl = sql.SQLTableManager(FILTERRULE_TABLE_NAME) |
| 266 | |
| 267 | self.hotlist_tbl = sql.SQLTableManager(HOTLIST_TABLE_NAME) |
| 268 | self.hotlist2issue_tbl = sql.SQLTableManager(HOTLIST2ISSUE_TABLE_NAME) |
| 269 | self.hotlist2user_tbl = sql.SQLTableManager(HOTLIST2USER_TABLE_NAME) |
| 270 | |
| 271 | self.saved_query_cache = caches.RamCache( |
| 272 | cache_manager, 'user', max_size=1000) |
| 273 | self.canned_query_cache = caches.RamCache( |
| 274 | cache_manager, 'project', max_size=1000) |
| 275 | |
| 276 | self.hotlist_2lc = HotlistTwoLevelCache(cache_manager, self) |
| 277 | self.hotlist_id_2lc = HotlistIDTwoLevelCache(cache_manager, self) |
| 278 | self.hotlist_user_to_ids = caches.RamCache(cache_manager, 'hotlist') |
| 279 | |
| 280 | self.config_service = config_service |
| 281 | |
| 282 | ### QuickEdit command history |
| 283 | |
| 284 | def GetRecentCommands(self, cnxn, user_id, project_id): |
| 285 | """Return recent command items for the "Redo" menu. |
| 286 | |
| 287 | Args: |
| 288 | cnxn: Connection to SQL database. |
| 289 | user_id: int ID of the current user. |
| 290 | project_id: int ID of the current project. |
| 291 | |
| 292 | Returns: |
| 293 | A pair (cmd_slots, recent_slot_num). cmd_slots is a list of |
| 294 | 3-tuples that can be used to populate the "Redo" menu of the |
| 295 | quick-edit dialog. recent_slot_num indicates which of those |
| 296 | slots should initially populate the command and comment fields. |
| 297 | """ |
| 298 | # Always start with the standard 5 commands. |
| 299 | history = tracker_constants.DEFAULT_RECENT_COMMANDS[:] |
| 300 | # If the user has modified any, then overwrite some standard ones. |
| 301 | history_rows = self.quickedithistory_tbl.Select( |
| 302 | cnxn, cols=['slot_num', 'command', 'comment'], |
| 303 | user_id=user_id, project_id=project_id) |
| 304 | for slot_num, command, comment in history_rows: |
| 305 | if slot_num < len(history): |
| 306 | history[slot_num - 1] = (command, comment) |
| 307 | |
| 308 | slots = [] |
| 309 | for idx, (command, comment) in enumerate(history): |
| 310 | slots.append((idx + 1, command, comment)) |
| 311 | |
| 312 | recent_slot_num = self.quickeditmostrecent_tbl.SelectValue( |
| 313 | cnxn, 'slot_num', default=1, user_id=user_id, project_id=project_id) |
| 314 | |
| 315 | return slots, recent_slot_num |
| 316 | |
| 317 | def StoreRecentCommand( |
| 318 | self, cnxn, user_id, project_id, slot_num, command, comment): |
| 319 | """Store the given command and comment in the user's command history.""" |
| 320 | self.quickedithistory_tbl.InsertRow( |
| 321 | cnxn, replace=True, user_id=user_id, project_id=project_id, |
| 322 | slot_num=slot_num, command=command, comment=comment) |
| 323 | self.quickeditmostrecent_tbl.InsertRow( |
| 324 | cnxn, replace=True, user_id=user_id, project_id=project_id, |
| 325 | slot_num=slot_num) |
| 326 | |
| 327 | def ExpungeQuickEditHistory(self, cnxn, project_id): |
| 328 | """Completely delete every users' quick edit history for this project.""" |
| 329 | self.quickeditmostrecent_tbl.Delete(cnxn, project_id=project_id) |
| 330 | self.quickedithistory_tbl.Delete(cnxn, project_id=project_id) |
| 331 | |
| 332 | def ExpungeQuickEditsByUsers(self, cnxn, user_ids, limit=None): |
| 333 | """Completely delete every given users' quick edits. |
| 334 | |
| 335 | This method will not commit the operations. This method will |
| 336 | not make changes to in-memory data. |
| 337 | """ |
| 338 | commit = False |
| 339 | self.quickeditmostrecent_tbl.Delete( |
| 340 | cnxn, user_id=user_ids, commit=commit, limit=limit) |
| 341 | self.quickedithistory_tbl.Delete( |
| 342 | cnxn, user_id=user_ids, commit=commit, limit=limit) |
| 343 | |
| 344 | ### Saved User and Project Queries |
| 345 | |
| 346 | def GetSavedQueries(self, cnxn, query_ids): |
| 347 | """Retrieve the specified SaveQuery PBs.""" |
| 348 | # TODO(jrobbins): RAM cache |
| 349 | if not query_ids: |
| 350 | return {} |
| 351 | saved_queries = {} |
| 352 | savedquery_rows = self.savedquery_tbl.Select( |
| 353 | cnxn, cols=SAVEDQUERY_COLS, id=query_ids) |
| 354 | for saved_query_tuple in savedquery_rows: |
| 355 | qid, name, base_id, query = saved_query_tuple |
| 356 | saved_queries[qid] = tracker_bizobj.MakeSavedQuery( |
| 357 | qid, name, base_id, query) |
| 358 | |
| 359 | sqeip_rows = self.savedqueryexecutesinproject_tbl.Select( |
| 360 | cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS, query_id=query_ids) |
| 361 | for query_id, project_id in sqeip_rows: |
| 362 | saved_queries[query_id].executes_in_project_ids.append(project_id) |
| 363 | |
| 364 | return saved_queries |
| 365 | |
| 366 | def GetSavedQuery(self, cnxn, query_id): |
| 367 | """Retrieve the specified SaveQuery PB.""" |
| 368 | saved_queries = self.GetSavedQueries(cnxn, [query_id]) |
| 369 | return saved_queries.get(query_id) |
| 370 | |
| 371 | def _GetUsersSavedQueriesDict(self, cnxn, user_ids): |
| 372 | """Return a dict of all SavedQuery PBs for the specified users.""" |
| 373 | results_dict, missed_uids = self.saved_query_cache.GetAll(user_ids) |
| 374 | |
| 375 | if missed_uids: |
| 376 | savedquery_rows = self.user2savedquery_tbl.Select( |
| 377 | cnxn, cols=SAVEDQUERY_COLS + ['user_id', 'subscription_mode'], |
| 378 | left_joins=[('SavedQuery ON query_id = id', [])], |
| 379 | order_by=[('rank', [])], user_id=missed_uids) |
| 380 | sqeip_dict = {} |
| 381 | if savedquery_rows: |
| 382 | query_ids = {row[0] for row in savedquery_rows} |
| 383 | sqeip_rows = self.savedqueryexecutesinproject_tbl.Select( |
| 384 | cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS, query_id=query_ids) |
| 385 | for qid, pid in sqeip_rows: |
| 386 | sqeip_dict.setdefault(qid, []).append(pid) |
| 387 | |
| 388 | for saved_query_tuple in savedquery_rows: |
| 389 | query_id, name, base_id, query, uid, sub_mode = saved_query_tuple |
| 390 | sq = tracker_bizobj.MakeSavedQuery( |
| 391 | query_id, name, base_id, query, subscription_mode=sub_mode, |
| 392 | executes_in_project_ids=sqeip_dict.get(query_id, [])) |
| 393 | results_dict.setdefault(uid, []).append(sq) |
| 394 | |
| 395 | self.saved_query_cache.CacheAll(results_dict) |
| 396 | return results_dict |
| 397 | |
| 398 | # TODO(jrobbins): change this termonology to "canned query" rather than |
| 399 | # "saved" throughout the application. |
| 400 | def GetSavedQueriesByUserID(self, cnxn, user_id): |
| 401 | """Return a list of SavedQuery PBs for the specified user.""" |
| 402 | saved_queries_dict = self._GetUsersSavedQueriesDict(cnxn, [user_id]) |
| 403 | saved_queries = saved_queries_dict.get(user_id, []) |
| 404 | return saved_queries[:] |
| 405 | |
| 406 | def GetCannedQueriesForProjects(self, cnxn, project_ids): |
| 407 | """Return a dict {project_id: [saved_query]} for the specified projects.""" |
| 408 | results_dict, missed_pids = self.canned_query_cache.GetAll(project_ids) |
| 409 | |
| 410 | if missed_pids: |
| 411 | cannedquery_rows = self.project2savedquery_tbl.Select( |
| 412 | cnxn, cols=['project_id'] + SAVEDQUERY_COLS, |
| 413 | left_joins=[('SavedQuery ON query_id = id', [])], |
| 414 | order_by=[('rank', [])], project_id=project_ids) |
| 415 | |
| 416 | for cq_row in cannedquery_rows: |
| 417 | project_id = cq_row[0] |
| 418 | canned_query_tuple = cq_row[1:] |
| 419 | results_dict.setdefault(project_id ,[]).append( |
| 420 | tracker_bizobj.MakeSavedQuery(*canned_query_tuple)) |
| 421 | |
| 422 | self.canned_query_cache.CacheAll(results_dict) |
| 423 | return results_dict |
| 424 | |
| 425 | def GetCannedQueriesByProjectID(self, cnxn, project_id): |
| 426 | """Return the list of SavedQueries for the specified project.""" |
| 427 | project_ids_to_canned_queries = self.GetCannedQueriesForProjects( |
| 428 | cnxn, [project_id]) |
| 429 | return project_ids_to_canned_queries.get(project_id, []) |
| 430 | |
| 431 | def _UpdateSavedQueries(self, cnxn, saved_queries, commit=True): |
| 432 | """Store the given SavedQueries to the DB.""" |
| 433 | savedquery_rows = [ |
| 434 | (sq.query_id or None, sq.name, sq.base_query_id, sq.query) |
| 435 | for sq in saved_queries] |
| 436 | existing_query_ids = [sq.query_id for sq in saved_queries if sq.query_id] |
| 437 | if existing_query_ids: |
| 438 | self.savedquery_tbl.Delete(cnxn, id=existing_query_ids, commit=commit) |
| 439 | |
| 440 | generated_ids = self.savedquery_tbl.InsertRows( |
| 441 | cnxn, SAVEDQUERY_COLS, savedquery_rows, commit=commit, |
| 442 | return_generated_ids=True) |
| 443 | if generated_ids: |
| 444 | logging.info('generated_ids are %r', generated_ids) |
| 445 | for sq in saved_queries: |
| 446 | generated_id = generated_ids.pop(0) |
| 447 | if not sq.query_id: |
| 448 | sq.query_id = generated_id |
| 449 | |
| 450 | def UpdateCannedQueries(self, cnxn, project_id, canned_queries): |
| 451 | """Update the canned queries for a project. |
| 452 | |
| 453 | Args: |
| 454 | cnxn: connection to SQL database. |
| 455 | project_id: int project ID of the project that contains these queries. |
| 456 | canned_queries: list of SavedQuery PBs to update. |
| 457 | """ |
| 458 | self.project2savedquery_tbl.Delete( |
| 459 | cnxn, project_id=project_id, commit=False) |
| 460 | self._UpdateSavedQueries(cnxn, canned_queries, commit=False) |
| 461 | project2savedquery_rows = [ |
| 462 | (project_id, rank, sq.query_id) |
| 463 | for rank, sq in enumerate(canned_queries)] |
| 464 | self.project2savedquery_tbl.InsertRows( |
| 465 | cnxn, PROJECT2SAVEDQUERY_COLS, project2savedquery_rows, |
| 466 | commit=False) |
| 467 | cnxn.Commit() |
| 468 | |
| 469 | self.canned_query_cache.Invalidate(cnxn, project_id) |
| 470 | |
| 471 | def UpdateUserSavedQueries(self, cnxn, user_id, saved_queries): |
| 472 | """Store the given saved_queries for the given user.""" |
| 473 | saved_query_ids = [sq.query_id for sq in saved_queries if sq.query_id] |
| 474 | self.savedqueryexecutesinproject_tbl.Delete( |
| 475 | cnxn, query_id=saved_query_ids, commit=False) |
| 476 | self.user2savedquery_tbl.Delete(cnxn, user_id=user_id, commit=False) |
| 477 | |
| 478 | self._UpdateSavedQueries(cnxn, saved_queries, commit=False) |
| 479 | user2savedquery_rows = [] |
| 480 | for rank, sq in enumerate(saved_queries): |
| 481 | user2savedquery_rows.append( |
| 482 | (user_id, rank, sq.query_id, sq.subscription_mode or 'noemail')) |
| 483 | |
| 484 | self.user2savedquery_tbl.InsertRows( |
| 485 | cnxn, USER2SAVEDQUERY_COLS, user2savedquery_rows, commit=False) |
| 486 | |
| 487 | sqeip_rows = [] |
| 488 | for sq in saved_queries: |
| 489 | for pid in sq.executes_in_project_ids: |
| 490 | sqeip_rows.append((sq.query_id, pid)) |
| 491 | |
| 492 | self.savedqueryexecutesinproject_tbl.InsertRows( |
| 493 | cnxn, SAVEDQUERYEXECUTESINPROJECT_COLS, sqeip_rows, commit=False) |
| 494 | cnxn.Commit() |
| 495 | |
| 496 | self.saved_query_cache.Invalidate(cnxn, user_id) |
| 497 | |
| 498 | ### Subscriptions |
| 499 | |
| 500 | def GetSubscriptionsInProjects(self, cnxn, project_ids): |
| 501 | """Return all saved queries for users that have any subscription there. |
| 502 | |
| 503 | Args: |
| 504 | cnxn: Connection to SQL database. |
| 505 | project_ids: list of int project IDs that contain the modified issues. |
| 506 | |
| 507 | Returns: |
| 508 | A dict {user_id: all_saved_queries, ...} for all users that have any |
| 509 | subscription in any of the specified projects. |
| 510 | """ |
| 511 | sqeip_join_str = ( |
| 512 | 'SavedQueryExecutesInProject ON ' |
| 513 | 'SavedQueryExecutesInProject.query_id = User2SavedQuery.query_id') |
| 514 | user_join_str = ( |
| 515 | 'User ON ' |
| 516 | 'User.user_id = User2SavedQuery.user_id') |
| 517 | now = int(time.time()) |
| 518 | absence_threshold = now - settings.subscription_timeout_secs |
| 519 | where = [ |
| 520 | ('(User.banned IS NULL OR User.banned = %s)', ['']), |
| 521 | ('User.last_visit_timestamp >= %s', [absence_threshold]), |
| 522 | ('(User.email_bounce_timestamp IS NULL OR ' |
| 523 | 'User.email_bounce_timestamp = %s)', [0]), |
| 524 | ] |
| 525 | # TODO(jrobbins): cache this since it rarely changes. |
| 526 | subscriber_rows = self.user2savedquery_tbl.Select( |
| 527 | cnxn, cols=['User2SavedQuery.user_id'], distinct=True, |
| 528 | joins=[(sqeip_join_str, []), (user_join_str, [])], |
| 529 | subscription_mode='immediate', project_id=project_ids, |
| 530 | where=where) |
| 531 | subscriber_ids = [row[0] for row in subscriber_rows] |
| 532 | logging.info('subscribers relevant to projects %r are %r', |
| 533 | project_ids, subscriber_ids) |
| 534 | user_ids_to_saved_queries = self._GetUsersSavedQueriesDict( |
| 535 | cnxn, subscriber_ids) |
| 536 | return user_ids_to_saved_queries |
| 537 | |
| 538 | def ExpungeSavedQueriesExecuteInProject(self, cnxn, project_id): |
| 539 | """Remove any references from saved queries to projects in the database.""" |
| 540 | self.savedqueryexecutesinproject_tbl.Delete(cnxn, project_id=project_id) |
| 541 | |
| 542 | savedquery_rows = self.project2savedquery_tbl.Select( |
| 543 | cnxn, cols=['query_id'], project_id=project_id) |
| 544 | savedquery_ids = [row[0] for row in savedquery_rows] |
| 545 | self.project2savedquery_tbl.Delete(cnxn, project_id=project_id) |
| 546 | self.savedquery_tbl.Delete(cnxn, id=savedquery_ids) |
| 547 | |
| 548 | def ExpungeSavedQueriesByUsers(self, cnxn, user_ids, limit=None): |
| 549 | """Completely delete every given users' saved queries. |
| 550 | |
| 551 | This method will not commit the operations. This method will |
| 552 | not make changes to in-memory data. |
| 553 | """ |
| 554 | commit = False |
| 555 | savedquery_rows = self.user2savedquery_tbl.Select( |
| 556 | cnxn, cols=['query_id'], user_id=user_ids, limit=limit) |
| 557 | savedquery_ids = [row[0] for row in savedquery_rows] |
| 558 | self.user2savedquery_tbl.Delete( |
| 559 | cnxn, query_id=savedquery_ids, commit=commit) |
| 560 | self.savedqueryexecutesinproject_tbl.Delete( |
| 561 | cnxn, query_id=savedquery_ids, commit=commit) |
| 562 | self.savedquery_tbl.Delete(cnxn, id=savedquery_ids, commit=commit) |
| 563 | |
| 564 | |
| 565 | ### Filter rules |
| 566 | |
| 567 | def _DeserializeFilterRules(self, filterrule_rows): |
| 568 | """Convert the given DB row tuples into PBs.""" |
| 569 | result_dict = collections.defaultdict(list) |
| 570 | |
| 571 | for filterrule_row in sorted(filterrule_rows): |
| 572 | project_id, _rank, predicate, consequence = filterrule_row |
| 573 | (default_status, default_owner_id, add_cc_ids, add_labels, |
| 574 | add_notify, warning, error) = self._DeserializeRuleConsequence( |
| 575 | consequence) |
| 576 | rule = filterrules_helpers.MakeRule( |
| 577 | predicate, default_status=default_status, |
| 578 | default_owner_id=default_owner_id, add_cc_ids=add_cc_ids, |
| 579 | add_labels=add_labels, add_notify=add_notify, warning=warning, |
| 580 | error=error) |
| 581 | result_dict[project_id].append(rule) |
| 582 | |
| 583 | return result_dict |
| 584 | |
| 585 | def _DeserializeRuleConsequence(self, consequence): |
| 586 | """Decode the THEN-part of a filter rule.""" |
| 587 | (default_status, default_owner_id, add_cc_ids, add_labels, |
| 588 | add_notify, warning, error) = None, None, [], [], [], None, None |
| 589 | for match in CONSEQUENCE_RE.finditer(consequence): |
| 590 | if match.group('default_status'): |
| 591 | default_status = match.group('default_status') |
| 592 | elif match.group('default_owner_id'): |
| 593 | default_owner_id = int(match.group('default_owner_id')) |
| 594 | elif match.group('add_cc_id'): |
| 595 | add_cc_ids.append(int(match.group('add_cc_id'))) |
| 596 | elif match.group('add_label'): |
| 597 | add_labels.append(match.group('add_label')) |
| 598 | elif match.group('add_notify'): |
| 599 | add_notify.append(match.group('add_notify')) |
| 600 | elif match.group('warning'): |
| 601 | warning = match.group('warning') |
| 602 | elif match.group('error'): |
| 603 | error = match.group('error') |
| 604 | |
| 605 | return (default_status, default_owner_id, add_cc_ids, add_labels, |
| 606 | add_notify, warning, error) |
| 607 | |
| 608 | def _GetFilterRulesByProjectIDs(self, cnxn, project_ids): |
| 609 | """Return {project_id: [FilterRule, ...]} for the specified projects.""" |
| 610 | # TODO(jrobbins): caching |
| 611 | filterrule_rows = self.filterrule_tbl.Select( |
| 612 | cnxn, cols=FILTERRULE_COLS, project_id=project_ids) |
| 613 | return self._DeserializeFilterRules(filterrule_rows) |
| 614 | |
| 615 | def GetFilterRules(self, cnxn, project_id): |
| 616 | """Return a list of FilterRule PBs for the specified project.""" |
| 617 | rules_by_project_id = self._GetFilterRulesByProjectIDs(cnxn, [project_id]) |
| 618 | return rules_by_project_id[project_id] |
| 619 | |
| 620 | def _SerializeRuleConsequence(self, rule): |
| 621 | """Put all actions of a filter rule into one string.""" |
| 622 | assignments = [] |
| 623 | for add_lab in rule.add_labels: |
| 624 | assignments.append('add_label:%s' % add_lab) |
| 625 | if rule.default_status: |
| 626 | assignments.append('default_status:%s' % rule.default_status) |
| 627 | if rule.default_owner_id: |
| 628 | assignments.append('default_owner_id:%d' % rule.default_owner_id) |
| 629 | for add_cc_id in rule.add_cc_ids: |
| 630 | assignments.append('add_cc_id:%d' % add_cc_id) |
| 631 | for add_notify in rule.add_notify_addrs: |
| 632 | assignments.append('add_notify:%s' % add_notify) |
| 633 | if rule.warning: |
| 634 | assignments.append('warning:%s' % rule.warning) |
| 635 | if rule.error: |
| 636 | assignments.append('error:%s' % rule.error) |
| 637 | |
| 638 | return ' '.join(assignments) |
| 639 | |
| 640 | def UpdateFilterRules(self, cnxn, project_id, rules): |
| 641 | """Update the filter rules part of a project's issue configuration. |
| 642 | |
| 643 | Args: |
| 644 | cnxn: connection to SQL database. |
| 645 | project_id: int ID of the current project. |
| 646 | rules: a list of FilterRule PBs. |
| 647 | """ |
| 648 | rows = [] |
| 649 | for rank, rule in enumerate(rules): |
| 650 | predicate = rule.predicate |
| 651 | consequence = self._SerializeRuleConsequence(rule) |
| 652 | if predicate and consequence: |
| 653 | rows.append((project_id, rank, predicate, consequence)) |
| 654 | |
| 655 | self.filterrule_tbl.Delete(cnxn, project_id=project_id) |
| 656 | self.filterrule_tbl.InsertRows(cnxn, FILTERRULE_COLS, rows) |
| 657 | |
| 658 | def ExpungeFilterRules(self, cnxn, project_id): |
| 659 | """Completely destroy filter rule info for the specified project.""" |
| 660 | self.filterrule_tbl.Delete(cnxn, project_id=project_id) |
| 661 | |
| 662 | def ExpungeFilterRulesByUser(self, cnxn, user_ids_by_email): |
| 663 | """Wipes any Filter Rules containing the given users. |
| 664 | |
| 665 | This method will not commit the operation. This method will not make |
| 666 | changes to in-memory data. |
| 667 | Args: |
| 668 | cnxn: connection to SQL database. |
| 669 | user_ids_by_email: dict of {email: user_id ..} of all users we want to |
| 670 | expunge |
| 671 | |
| 672 | Returns: |
| 673 | Dictionary of {project_id: [(predicate, consequence), ..]} for Filter |
| 674 | Rules that will be deleted for containing the given emails. |
| 675 | """ |
| 676 | deleted_project_rules_dict = collections.defaultdict(list) |
| 677 | if user_ids_by_email: |
| 678 | deleted_rows = [] |
| 679 | emails = user_ids_by_email.keys() |
| 680 | all_rules_rows = self.filterrule_tbl.Select(cnxn, FILTERRULE_COLS) |
| 681 | logging.info('Fetched all filter rules: %s' % (all_rules_rows,)) |
| 682 | for rule_row in all_rules_rows: |
| 683 | project_id, _rank, predicate, consequence = rule_row |
| 684 | if any(email in predicate for email in emails): |
| 685 | deleted_rows.append(rule_row) |
| 686 | continue |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 687 | if any((('add_notify:%s' % email) in consequence or |
| 688 | ('add_cc_id:%s' % user_id) in consequence or |
| 689 | ('default_owner_id:%s' % user_id) in consequence) |
| 690 | for email, user_id in user_ids_by_email.items()): |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 691 | deleted_rows.append(rule_row) |
| 692 | continue |
| 693 | |
| 694 | for deleted_row in deleted_rows: |
| 695 | project_id, rank, predicate, consequence = deleted_row |
| 696 | self.filterrule_tbl.Delete( |
| 697 | cnxn, project_id=project_id, rank=rank, predicate=predicate, |
| 698 | consequence=consequence, commit=False) |
| 699 | deleted_project_rules_dict = self._DeserializeFilterRules(deleted_rows) |
| 700 | |
| 701 | return deleted_project_rules_dict |
| 702 | |
| 703 | ### Creating hotlists |
| 704 | |
| 705 | def CreateHotlist( |
| 706 | self, cnxn, name, summary, description, owner_ids, editor_ids, |
| 707 | issue_ids=None, is_private=None, default_col_spec=None, ts=None): |
| 708 | # type: (MonorailConnection, string, string, string, Collection[int], |
| 709 | # Optional[Collection[int]], Optional[Boolean], Optional[string], |
| 710 | # Optional[int] -> int |
| 711 | """Create and store a Hotlist with the given attributes. |
| 712 | |
| 713 | Args: |
| 714 | cnxn: connection to SQL database. |
| 715 | name: a valid hotlist name. |
| 716 | summary: one-line explanation of the hotlist. |
| 717 | description: one-page explanation of the hotlist. |
| 718 | owner_ids: a list of user IDs for the hotlist owners. |
| 719 | editor_ids: a list of user IDs for the hotlist editors. |
| 720 | issue_ids: a list of issue IDs for the hotlist issues. |
| 721 | is_private: True if the hotlist can only be viewed by owners and editors. |
| 722 | default_col_spec: the default columns that show in list view. |
| 723 | ts: a timestamp for when this hotlist was created. |
| 724 | |
| 725 | Returns: |
| 726 | The int id of the new hotlist. |
| 727 | |
| 728 | Raises: |
| 729 | InputException: if the hotlist name is invalid. |
| 730 | HotlistAlreadyExists: if any of the owners already own a hotlist with |
| 731 | the same name. |
| 732 | UnownedHotlistException: if owner_ids is empty. |
| 733 | """ |
| 734 | # TODO(crbug.com/monorail/7677): These checks should be done in the |
| 735 | # the business layer. |
| 736 | # Remove when calls from non-business layer code are removed. |
| 737 | if not owner_ids: # Should never happen. |
| 738 | logging.error('Attempt to create unowned Hotlist: name:%r', name) |
| 739 | raise UnownedHotlistException() |
| 740 | if not framework_bizobj.IsValidHotlistName(name): |
| 741 | raise exceptions.InputException( |
| 742 | '%s is not a valid name for a Hotlist' % name) |
| 743 | if self.LookupHotlistIDs(cnxn, [name], owner_ids): |
| 744 | raise HotlistAlreadyExists() |
| 745 | # TODO(crbug.com/monorail/7677): We are not setting a |
| 746 | # default default_col_spec in v3. |
| 747 | if default_col_spec is None: |
| 748 | default_col_spec = features_constants.DEFAULT_COL_SPEC |
| 749 | |
| 750 | hotlist_item_fields = [ |
| 751 | (issue_id, rank*100, owner_ids[0], ts, '') for |
| 752 | rank, issue_id in enumerate(issue_ids or [])] |
| 753 | hotlist = features_pb2.MakeHotlist( |
| 754 | name, hotlist_item_fields=hotlist_item_fields, summary=summary, |
| 755 | description=description, is_private=is_private, owner_ids=owner_ids, |
| 756 | editor_ids=editor_ids, default_col_spec=default_col_spec) |
| 757 | hotlist.hotlist_id = self._InsertHotlist(cnxn, hotlist) |
| 758 | return hotlist |
| 759 | |
| 760 | def UpdateHotlist( |
| 761 | self, cnxn, hotlist_id, name=None, summary=None, description=None, |
| 762 | is_private=None, default_col_spec=None, owner_id=None, |
| 763 | add_editor_ids=None): |
| 764 | """Update the DB with the given hotlist information.""" |
| 765 | # Note: If something is None, it does not get changed to None, |
| 766 | # it just does not get updated. |
| 767 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 768 | if not hotlist: |
| 769 | raise NoSuchHotlistException() |
| 770 | |
| 771 | delta = {} |
| 772 | if name is not None: |
| 773 | delta['name'] = name |
| 774 | if summary is not None: |
| 775 | delta['summary'] = summary |
| 776 | if description is not None: |
| 777 | delta['description'] = description |
| 778 | if is_private is not None: |
| 779 | delta['is_private'] = is_private |
| 780 | if default_col_spec is not None: |
| 781 | delta['default_col_spec'] = default_col_spec |
| 782 | |
| 783 | self.hotlist_tbl.Update(cnxn, delta, id=hotlist_id, commit=False) |
| 784 | insert_rows = [] |
| 785 | if owner_id is not None: |
| 786 | insert_rows.append((hotlist_id, owner_id, 'owner')) |
| 787 | self.hotlist2user_tbl.Delete( |
| 788 | cnxn, hotlist_id=hotlist_id, role='owner', commit=False) |
| 789 | if add_editor_ids: |
| 790 | insert_rows.extend( |
| 791 | [(hotlist_id, user_id, 'editor') for user_id in add_editor_ids]) |
| 792 | if insert_rows: |
| 793 | self.hotlist2user_tbl.InsertRows( |
| 794 | cnxn, HOTLIST2USER_COLS, insert_rows, commit=False) |
| 795 | |
| 796 | cnxn.Commit() |
| 797 | |
| 798 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 799 | if not hotlist.owner_ids: # Should never happen. |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 800 | logging.warning( |
| 801 | 'Modifying unowned Hotlist: id:%r, name:%r', hotlist_id, hotlist.name) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 802 | elif hotlist.name: |
| 803 | self.hotlist_id_2lc.InvalidateKeys( |
| 804 | cnxn, [(hotlist.name.lower(), owner_id) for |
| 805 | owner_id in hotlist.owner_ids]) |
| 806 | |
| 807 | # Update the hotlist PB in RAM |
| 808 | if name is not None: |
| 809 | hotlist.name = name |
| 810 | if summary is not None: |
| 811 | hotlist.summary = summary |
| 812 | if description is not None: |
| 813 | hotlist.description = description |
| 814 | if is_private is not None: |
| 815 | hotlist.is_private = is_private |
| 816 | if default_col_spec is not None: |
| 817 | hotlist.default_col_spec = default_col_spec |
| 818 | if owner_id is not None: |
| 819 | hotlist.owner_ids = [owner_id] |
| 820 | if add_editor_ids: |
| 821 | hotlist.editor_ids.extend(add_editor_ids) |
| 822 | |
| 823 | def RemoveHotlistEditors(self, cnxn, hotlist_id, remove_editor_ids): |
| 824 | # type: MonorailConnection, int, Collection[int] |
| 825 | """Remove given editors from the specified hotlist. |
| 826 | |
| 827 | Args: |
| 828 | cnxn: MonorailConnection object. |
| 829 | hotlist_id: int ID of the Hotlist we want to update. |
| 830 | remove_editor_ids: collection of existing hotlist editor User IDs |
| 831 | that we want to remove from the hotlist. |
| 832 | |
| 833 | Raises: |
| 834 | NoSuchHotlistException: if the hotlist is not found. |
| 835 | InputException: if there are not editors to remove. |
| 836 | """ |
| 837 | if not remove_editor_ids: |
| 838 | raise exceptions.InputException |
| 839 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 840 | if not hotlist: |
| 841 | raise NoSuchHotlistException() |
| 842 | |
| 843 | self.hotlist2user_tbl.Delete( |
| 844 | cnxn, hotlist_id=hotlist_id, user_id=remove_editor_ids) |
| 845 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 846 | |
| 847 | # Update in-memory data |
| 848 | for remove_id in remove_editor_ids: |
| 849 | hotlist.editor_ids.remove(remove_id) |
| 850 | |
| 851 | def UpdateHotlistIssues( |
| 852 | self, |
| 853 | cnxn, # type: sql.MonorailConnection |
| 854 | hotlist_id, # type: int |
| 855 | updated_items, # type: Collection[features_pb2.HotlistItem] |
| 856 | remove_issue_ids, # type: Collection[int] |
| 857 | issue_svc, # type: issue_svc.IssueService |
| 858 | chart_svc, # type: chart_svc.ChartService |
| 859 | commit=True # type: Optional[bool] |
| 860 | ): |
| 861 | # type: (...) -> None |
| 862 | """Update the Issues in a Hotlist. |
| 863 | This method removes the given remove_issue_ids from a Hotlist then |
| 864 | updates or adds the HotlistItems found in updated_items. HotlistItems |
| 865 | in updated_items may exist in the hotlist and just need to be updated |
| 866 | or they may be new items that should be added to the Hotlist. |
| 867 | |
| 868 | Args: |
| 869 | cnxn: MonorailConnection object. |
| 870 | hotlist_id: int ID of the Hotlist to update. |
| 871 | updated_items: Collection of HotlistItems that either already exist in |
| 872 | the hotlist and need to be updated or needed to be added to the hotlist. |
| 873 | remove_issue_ids: Collection of Issue IDs that should be removed from the |
| 874 | hotlist. |
| 875 | issue_svc: IssueService object. |
| 876 | chart_svc: ChartService object. |
| 877 | |
| 878 | Raises: |
| 879 | NoSuchHotlistException if a hotlist with the given ID is not found. |
| 880 | InputException if no changes were given. |
| 881 | """ |
| 882 | if not updated_items and not remove_issue_ids: |
| 883 | raise exceptions.InputException('No changes to make') |
| 884 | |
| 885 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 886 | if not hotlist: |
| 887 | raise NoSuchHotlistException() |
| 888 | |
| 889 | # Used to hold the updated Hotlist.items to use when updating |
| 890 | # the in-memory hotlist. |
| 891 | all_hotlist_items = list(hotlist.items) |
| 892 | |
| 893 | # Used to hold ids of issues affected by this change for storing |
| 894 | # Issue Snapshots. |
| 895 | affected_issue_ids = set() |
| 896 | |
| 897 | if remove_issue_ids: |
| 898 | affected_issue_ids.update(remove_issue_ids) |
| 899 | self.hotlist2issue_tbl.Delete( |
| 900 | cnxn, hotlist_id=hotlist_id, issue_id=remove_issue_ids, commit=False) |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 901 | all_hotlist_items = list( |
| 902 | filter( |
| 903 | lambda item: item.issue_id not in remove_issue_ids, |
| 904 | all_hotlist_items)) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 905 | |
| 906 | if updated_items: |
| 907 | updated_issue_ids = [item.issue_id for item in updated_items] |
| 908 | affected_issue_ids.update(updated_issue_ids) |
| 909 | self.hotlist2issue_tbl.Delete( |
| 910 | cnxn, hotlist_id=hotlist_id, issue_id=updated_issue_ids, commit=False) |
| 911 | insert_rows = [] |
| 912 | for item in updated_items: |
| 913 | insert_rows.append( |
| 914 | ( |
| 915 | hotlist_id, item.issue_id, item.rank, item.adder_id, |
| 916 | item.date_added, item.note)) |
| 917 | self.hotlist2issue_tbl.InsertRows( |
| 918 | cnxn, cols=HOTLIST2ISSUE_COLS, row_values=insert_rows, commit=False) |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 919 | all_hotlist_items = list( |
| 920 | filter( |
| 921 | lambda item: item.issue_id not in updated_issue_ids, |
| 922 | all_hotlist_items)) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 923 | all_hotlist_items.extend(updated_items) |
| 924 | |
| 925 | if commit: |
| 926 | cnxn.Commit() |
| 927 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 928 | |
| 929 | # Update in-memory hotlist items. |
| 930 | hotlist.items = sorted(all_hotlist_items, key=lambda item: item.rank) |
| 931 | |
| 932 | issues = issue_svc.GetIssues(cnxn, list(affected_issue_ids)) |
| 933 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 934 | |
| 935 | # TODO(crbug/monorail/7104): {Add|Remove}IssuesToHotlists both call |
| 936 | # UpdateHotlistItems to add/remove issues from a hotlist. |
| 937 | # UpdateHotlistItemsFields is called by methods for reranking existing issues |
| 938 | # and updating HotlistItem notes. |
| 939 | # (1) We are removing notes from HotlistItems. crbug/monorail/#### |
| 940 | # (2) our v3 AddHotlistItems will allow for inserting new issues to |
| 941 | # non-last ranks of a hotlist. So there could be some shared code |
| 942 | # for the reranking path and the adding issues path. |
| 943 | # UpdateHotlistIssues will be handling adding, removing, and reranking issues. |
| 944 | # {Add|Remove}IssueToHotlists, UpdateHotlistItems, UpdateHotlistItemFields |
| 945 | # should be removed, once all methods are updated to call UpdateHotlistIssues. |
| 946 | |
| 947 | def AddIssueToHotlists(self, cnxn, hotlist_ids, issue_tuple, issue_svc, |
| 948 | chart_svc, commit=True): |
| 949 | """Add a single issue, specified in the issue_tuple, to the given hotlists. |
| 950 | |
| 951 | Args: |
| 952 | cnxn: connection to SQL database. |
| 953 | hotlist_ids: a list of hotlist_ids to add the issues to. |
| 954 | issue_tuple: (issue_id, user_id, ts, note) of the issue to be added. |
| 955 | issue_svc: an instance of IssueService. |
| 956 | chart_svc: an instance of ChartService. |
| 957 | """ |
| 958 | self.AddIssuesToHotlists(cnxn, hotlist_ids, [issue_tuple], issue_svc, |
| 959 | chart_svc, commit=commit) |
| 960 | |
| 961 | def AddIssuesToHotlists(self, cnxn, hotlist_ids, added_tuples, issue_svc, |
| 962 | chart_svc, commit=True): |
| 963 | """Add the issues given in the added_tuples list to the given hotlists. |
| 964 | |
| 965 | Args: |
| 966 | cnxn: connection to SQL database. |
| 967 | hotlist_ids: a list of hotlist_ids to add the issues to. |
| 968 | added_tuples: a list of (issue_id, user_id, ts, note) |
| 969 | for issues to be added. |
| 970 | issue_svc: an instance of IssueService. |
| 971 | chart_svc: an instance of ChartService. |
| 972 | """ |
| 973 | for hotlist_id in hotlist_ids: |
| 974 | self.UpdateHotlistItems(cnxn, hotlist_id, [], added_tuples, commit=commit) |
| 975 | |
| 976 | issues = issue_svc.GetIssues(cnxn, |
| 977 | [added_tuple[0] for added_tuple in added_tuples]) |
| 978 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 979 | |
| 980 | def RemoveIssuesFromHotlists(self, cnxn, hotlist_ids, issue_ids, issue_svc, |
| 981 | chart_svc, commit=True): |
| 982 | """Remove the issues given in issue_ids from the given hotlists. |
| 983 | |
| 984 | Args: |
| 985 | cnxn: connection to SQL database. |
| 986 | hotlist_ids: a list of hotlist ids to remove the issues from. |
| 987 | issue_ids: a list of issue_ids to be removed. |
| 988 | issue_svc: an instance of IssueService. |
| 989 | chart_svc: an instance of ChartService. |
| 990 | """ |
| 991 | for hotlist_id in hotlist_ids: |
| 992 | self.UpdateHotlistItems(cnxn, hotlist_id, issue_ids, [], commit=commit) |
| 993 | |
| 994 | issues = issue_svc.GetIssues(cnxn, issue_ids) |
| 995 | chart_svc.StoreIssueSnapshots(cnxn, issues, commit=commit) |
| 996 | |
| 997 | def UpdateHotlistItems( |
| 998 | self, cnxn, hotlist_id, remove, added_tuples, commit=True): |
| 999 | """Updates a hotlist's list of hotlistissues. |
| 1000 | |
| 1001 | Args: |
| 1002 | cnxn: connection to SQL database. |
| 1003 | hotlist_id: the ID of the hotlist to update. |
| 1004 | remove: a list of issue_ids for be removed. |
| 1005 | added_tuples: a list of (issue_id, user_id, ts, note) |
| 1006 | for issues to be added. |
| 1007 | """ |
| 1008 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1009 | if not hotlist: |
| 1010 | raise NoSuchHotlistException() |
| 1011 | |
| 1012 | # adding new Hotlistissues, ignoring pairs where issue_id is already in |
| 1013 | # hotlist's iid_rank_pairs |
| 1014 | current_issues_ids = { |
| 1015 | item.issue_id for item in hotlist.items} |
| 1016 | |
| 1017 | self.hotlist2issue_tbl.Delete( |
| 1018 | cnxn, hotlist_id=hotlist_id, |
| 1019 | issue_id=[remove_id for remove_id in remove |
| 1020 | if remove_id in current_issues_ids], |
| 1021 | commit=False) |
| 1022 | if hotlist.items: |
| 1023 | items_sorted = sorted(hotlist.items, key=lambda item: item.rank) |
| 1024 | rank_base = items_sorted[-1].rank + 10 |
| 1025 | else: |
| 1026 | rank_base = 1 |
| 1027 | insert_rows = [ |
| 1028 | (hotlist_id, issue_id, rank*10 + rank_base, user_id, ts, note) |
| 1029 | for (rank, (issue_id, user_id, ts, note)) in enumerate(added_tuples) |
| 1030 | if issue_id not in current_issues_ids] |
| 1031 | self.hotlist2issue_tbl.InsertRows( |
| 1032 | cnxn, cols=HOTLIST2ISSUE_COLS, row_values=insert_rows, commit=commit) |
| 1033 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1034 | |
| 1035 | # removing an issue that was never in the hotlist would not cause any |
| 1036 | # problems. |
| 1037 | items = [ |
| 1038 | item for item in hotlist.items if |
| 1039 | item.issue_id not in remove] |
| 1040 | |
| 1041 | new_hotlist_items = [ |
| 1042 | features_pb2.MakeHotlistItem(issue_id, rank, user_id, ts, note) |
| 1043 | for (_hid, issue_id, rank, user_id, ts, note) in insert_rows] |
| 1044 | items.extend(new_hotlist_items) |
| 1045 | hotlist.items = items |
| 1046 | |
| 1047 | def UpdateHotlistItemsFields( |
| 1048 | self, cnxn, hotlist_id, new_ranks=None, new_notes=None, commit=True): |
| 1049 | """Updates rankings or notes of hotlistissues. |
| 1050 | |
| 1051 | Args: |
| 1052 | cnxn: connection to SQL database. |
| 1053 | hotlist_id: the ID of the hotlist to update. |
| 1054 | new_ranks : This should be a dictionary of {issue_id: rank}. |
| 1055 | new_notes: This should be a diciontary of {issue_id: note}. |
| 1056 | commit: set to False to skip the DB commit and do it in the caller. |
| 1057 | """ |
| 1058 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1059 | if not hotlist: |
| 1060 | raise NoSuchHotlistException() |
| 1061 | if new_ranks is None: |
| 1062 | new_ranks = {} |
| 1063 | if new_notes is None: |
| 1064 | new_notes = {} |
| 1065 | issue_ids = [] |
| 1066 | insert_rows = [] |
| 1067 | |
| 1068 | # Update the hotlist PB in RAM |
| 1069 | for hotlist_item in hotlist.items: |
| 1070 | item_updated = False |
| 1071 | if hotlist_item.issue_id in new_ranks: |
| 1072 | # Update rank before adding it to insert_rows |
| 1073 | hotlist_item.rank = new_ranks[hotlist_item.issue_id] |
| 1074 | item_updated = True |
| 1075 | if hotlist_item.issue_id in new_notes: |
| 1076 | # Update note before adding it to insert_rows |
| 1077 | hotlist_item.note = new_notes[hotlist_item.issue_id] |
| 1078 | item_updated = True |
| 1079 | if item_updated: |
| 1080 | issue_ids.append(hotlist_item.issue_id) |
| 1081 | insert_rows.append(( |
| 1082 | hotlist_id, hotlist_item.issue_id, hotlist_item.rank, |
| 1083 | hotlist_item.adder_id, hotlist_item.date_added, hotlist_item.note)) |
| 1084 | hotlist.items = sorted(hotlist.items, key=lambda item: item.rank) |
| 1085 | self.hotlist2issue_tbl.Delete( |
| 1086 | cnxn, hotlist_id=hotlist_id, issue_id=issue_ids, commit=False) |
| 1087 | |
| 1088 | self.hotlist2issue_tbl.InsertRows( |
| 1089 | cnxn, cols=HOTLIST2ISSUE_COLS , row_values=insert_rows, commit=commit) |
| 1090 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1091 | |
| 1092 | def _InsertHotlist(self, cnxn, hotlist): |
| 1093 | """Insert the given hotlist into the database.""" |
| 1094 | hotlist_id = self.hotlist_tbl.InsertRow( |
| 1095 | cnxn, name=hotlist.name, summary=hotlist.summary, |
| 1096 | description=hotlist.description, is_private=hotlist.is_private, |
| 1097 | default_col_spec=hotlist.default_col_spec) |
| 1098 | logging.info('stored hotlist was given id %d', hotlist_id) |
| 1099 | |
| 1100 | self.hotlist2issue_tbl.InsertRows( |
| 1101 | cnxn, HOTLIST2ISSUE_COLS, |
| 1102 | [(hotlist_id, issue.issue_id, issue.rank, |
| 1103 | issue.adder_id, issue.date_added, issue.note) |
| 1104 | for issue in hotlist.items], |
| 1105 | commit=False) |
| 1106 | self.hotlist2user_tbl.InsertRows( |
| 1107 | cnxn, HOTLIST2USER_COLS, |
| 1108 | [(hotlist_id, user_id, 'owner') |
| 1109 | for user_id in hotlist.owner_ids] + |
| 1110 | [(hotlist_id, user_id, 'editor') |
| 1111 | for user_id in hotlist.editor_ids] + |
| 1112 | [(hotlist_id, user_id, 'follower') |
| 1113 | for user_id in hotlist.follower_ids]) |
| 1114 | |
| 1115 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1116 | |
| 1117 | return hotlist_id |
| 1118 | |
| 1119 | def TransferHotlistOwnership( |
| 1120 | self, cnxn, hotlist, new_owner_id, remain_editor, commit=True): |
| 1121 | """Transfers ownership of a hotlist to a new owner.""" |
| 1122 | new_editor_ids = hotlist.editor_ids |
| 1123 | if remain_editor: |
| 1124 | new_editor_ids.extend(hotlist.owner_ids) |
| 1125 | if new_owner_id in new_editor_ids: |
| 1126 | new_editor_ids.remove(new_owner_id) |
| 1127 | new_follower_ids = hotlist.follower_ids |
| 1128 | if new_owner_id in new_follower_ids: |
| 1129 | new_follower_ids.remove(new_owner_id) |
| 1130 | self.UpdateHotlistRoles( |
| 1131 | cnxn, hotlist.hotlist_id, [new_owner_id], new_editor_ids, |
| 1132 | new_follower_ids, commit=commit) |
| 1133 | |
| 1134 | ### Lookup hotlist IDs |
| 1135 | |
| 1136 | def LookupHotlistIDs(self, cnxn, hotlist_names, owner_ids): |
| 1137 | """Return a dict of (name, owner_id) mapped to hotlist_id for all hotlists |
| 1138 | with one of the given names and any of the given owners. Hotlists that |
| 1139 | match multiple owners will be in the dict multiple times.""" |
| 1140 | id_dict, _missed_keys = self.hotlist_id_2lc.GetAll( |
| 1141 | cnxn, [(name.lower(), owner_id) |
| 1142 | for name in hotlist_names for owner_id in owner_ids]) |
| 1143 | return id_dict |
| 1144 | |
| 1145 | def LookupUserHotlists(self, cnxn, user_ids): |
| 1146 | """Return a dict of {user_id: [hotlist_id,...]} for all user_ids.""" |
| 1147 | id_dict, missed_ids = self.hotlist_user_to_ids.GetAll(user_ids) |
| 1148 | if missed_ids: |
| 1149 | retrieved_dict = {user_id: [] for user_id in missed_ids} |
| 1150 | id_rows = self.hotlist2user_tbl.Select( |
| 1151 | cnxn, cols=['user_id', 'hotlist_id'], user_id=user_ids, |
| 1152 | left_joins=[('Hotlist ON hotlist_id = id', [])], |
| 1153 | where=[('Hotlist.is_deleted = %s', [False])]) |
| 1154 | for (user_id, hotlist_id) in id_rows: |
| 1155 | retrieved_dict[user_id].append(hotlist_id) |
| 1156 | self.hotlist_user_to_ids.CacheAll(retrieved_dict) |
| 1157 | id_dict.update(retrieved_dict) |
| 1158 | |
| 1159 | return id_dict |
| 1160 | |
| 1161 | def LookupIssueHotlists(self, cnxn, issue_ids): |
| 1162 | """Return a dict of {issue_id: [hotlist_id,...]} for all issue_ids.""" |
| 1163 | # TODO(jojwang): create hotlist_issue_to_ids cache |
| 1164 | retrieved_dict = {issue_id: [] for issue_id in issue_ids} |
| 1165 | id_rows = self.hotlist2issue_tbl.Select( |
| 1166 | cnxn, cols=['hotlist_id', 'issue_id'], issue_id=issue_ids, |
| 1167 | left_joins=[('Hotlist ON hotlist_id = id', [])], |
| 1168 | where=[('Hotlist.is_deleted = %s', [False])]) |
| 1169 | for hotlist_id, issue_id in id_rows: |
| 1170 | retrieved_dict[issue_id].append(hotlist_id) |
| 1171 | return retrieved_dict |
| 1172 | |
| 1173 | def GetProjectIDsFromHotlist(self, cnxn, hotlist_id): |
| 1174 | project_id_rows = self.hotlist2issue_tbl.Select(cnxn, |
| 1175 | cols=['Issue.project_id'], hotlist_id=hotlist_id, distinct=True, |
| 1176 | left_joins=[('Issue ON issue_id = id', [])]) |
| 1177 | return [row[0] for row in project_id_rows] |
| 1178 | |
| 1179 | ### Get hotlists |
| 1180 | def GetHotlists(self, cnxn, hotlist_ids, use_cache=True): |
| 1181 | """Returns dict of {hotlist_id: hotlist PB}.""" |
| 1182 | hotlists_dict, missed_ids = self.hotlist_2lc.GetAll( |
| 1183 | cnxn, hotlist_ids, use_cache=use_cache) |
| 1184 | |
| 1185 | if missed_ids: |
| 1186 | raise NoSuchHotlistException() |
| 1187 | |
| 1188 | return hotlists_dict |
| 1189 | |
| 1190 | def GetHotlistsByUserID(self, cnxn, user_id, use_cache=True): |
| 1191 | """Get a list of hotlist PBs for a given user.""" |
| 1192 | hotlist_id_dict = self.LookupUserHotlists(cnxn, [user_id]) |
| 1193 | hotlists = self.GetHotlists( |
| 1194 | cnxn, hotlist_id_dict.get(user_id, []), use_cache=use_cache) |
| 1195 | return list(hotlists.values()) |
| 1196 | |
| 1197 | def GetHotlistsByIssueID(self, cnxn, issue_id, use_cache=True): |
| 1198 | """Get a list of hotlist PBs for a given issue.""" |
| 1199 | hotlist_id_dict = self.LookupIssueHotlists(cnxn, [issue_id]) |
| 1200 | hotlists = self.GetHotlists( |
| 1201 | cnxn, hotlist_id_dict.get(issue_id, []), use_cache=use_cache) |
| 1202 | return list(hotlists.values()) |
| 1203 | |
| 1204 | def GetHotlist(self, cnxn, hotlist_id, use_cache=True): |
| 1205 | """Returns hotlist PB.""" |
| 1206 | hotlist_dict = self.GetHotlists(cnxn, [hotlist_id], use_cache=use_cache) |
| 1207 | return hotlist_dict[hotlist_id] |
| 1208 | |
| 1209 | def GetHotlistsByID(self, cnxn, hotlist_ids, use_cache=True): |
| 1210 | """Load all the Hotlist PBs for the given hotlists. |
| 1211 | |
| 1212 | Args: |
| 1213 | cnxn: connection to SQL database. |
| 1214 | hotlist_ids: list of hotlist ids. |
| 1215 | use_cache: specifiy False to force database query. |
| 1216 | |
| 1217 | Returns: |
| 1218 | A dict mapping ids to the corresponding Hotlist protocol buffers and |
| 1219 | a list of any hotlist_ids that were not found. |
| 1220 | """ |
| 1221 | hotlists_dict, missed_ids = self.hotlist_2lc.GetAll( |
| 1222 | cnxn, hotlist_ids, use_cache=use_cache) |
| 1223 | return hotlists_dict, missed_ids |
| 1224 | |
| 1225 | def GetHotlistByID(self, cnxn, hotlist_id, use_cache=True): |
| 1226 | """Load the specified hotlist from the database, None if does not exist.""" |
| 1227 | hotlist_dict, _ = self.GetHotlistsByID( |
| 1228 | cnxn, [hotlist_id], use_cache=use_cache) |
| 1229 | return hotlist_dict.get(hotlist_id) |
| 1230 | |
| 1231 | def UpdateHotlistRoles( |
| 1232 | self, cnxn, hotlist_id, owner_ids, editor_ids, follower_ids, commit=True): |
| 1233 | """"Store the hotlist's roles in the DB.""" |
| 1234 | # This will be a newly contructed object, not from the cache and not |
| 1235 | # shared with any other thread. |
| 1236 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1237 | if not hotlist: |
| 1238 | raise NoSuchHotlistException() |
| 1239 | |
| 1240 | self.hotlist2user_tbl.Delete( |
| 1241 | cnxn, hotlist_id=hotlist_id, commit=False) |
| 1242 | |
| 1243 | insert_rows = [(hotlist_id, user_id, 'owner') for user_id in owner_ids] |
| 1244 | insert_rows.extend( |
| 1245 | [(hotlist_id, user_id, 'editor') for user_id in editor_ids]) |
| 1246 | insert_rows.extend( |
| 1247 | [(hotlist_id, user_id, 'follower') for user_id in follower_ids]) |
| 1248 | self.hotlist2user_tbl.InsertRows( |
| 1249 | cnxn, HOTLIST2USER_COLS, insert_rows, commit=False) |
| 1250 | |
| 1251 | if commit: |
| 1252 | cnxn.Commit() |
| 1253 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1254 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1255 | hotlist.owner_ids = owner_ids |
| 1256 | hotlist.editor_ids = editor_ids |
| 1257 | hotlist.follower_ids = follower_ids |
| 1258 | |
| 1259 | def DeleteHotlist(self, cnxn, hotlist_id, commit=True): |
| 1260 | hotlist = self.GetHotlist(cnxn, hotlist_id, use_cache=False) |
| 1261 | if not hotlist: |
| 1262 | raise NoSuchHotlistException() |
| 1263 | |
| 1264 | # Fetch all associated project IDs in order to invalidate their cache. |
| 1265 | project_ids = self.GetProjectIDsFromHotlist(cnxn, hotlist_id) |
| 1266 | |
| 1267 | delta = {'is_deleted': True} |
| 1268 | self.hotlist_tbl.Update(cnxn, delta, id=hotlist_id, commit=commit) |
| 1269 | |
| 1270 | self.hotlist_2lc.InvalidateKeys(cnxn, [hotlist_id]) |
| 1271 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.owner_ids) |
| 1272 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, hotlist.editor_ids) |
| 1273 | if not hotlist.owner_ids: # Should never happen. |
Adrià Vilanova Martínez | f19ea43 | 2024-01-23 20:20:52 +0100 | [diff] [blame] | 1274 | logging.warning( |
| 1275 | 'Soft-deleting unowned Hotlist: id:%r, name:%r', hotlist_id, |
| 1276 | hotlist.name) |
Copybara | 854996b | 2021-09-07 19:36:02 +0000 | [diff] [blame] | 1277 | elif hotlist.name: |
| 1278 | self.hotlist_id_2lc.InvalidateKeys( |
| 1279 | cnxn, [(hotlist.name.lower(), owner_id) for |
| 1280 | owner_id in hotlist.owner_ids]) |
| 1281 | |
| 1282 | for project_id in project_ids: |
| 1283 | self.config_service.InvalidateMemcacheForEntireProject(project_id) |
| 1284 | |
| 1285 | def ExpungeHotlists( |
| 1286 | self, cnxn, hotlist_ids, star_svc, user_svc, chart_svc, commit=True): |
| 1287 | """Wipes the given hotlists from the DB tables. |
| 1288 | |
| 1289 | This method will only do cache invalidation if commit is set to True. |
| 1290 | |
| 1291 | Args: |
| 1292 | cnxn: connection to SQL database. |
| 1293 | hotlist_ids: the ID of the hotlists to Expunge. |
| 1294 | star_svc: an instance of a HotlistStarService. |
| 1295 | user_svc: an instance of a UserService. |
| 1296 | chart_svc: an instance of a ChartService. |
| 1297 | commit: set to False to skip the DB commit and do it in the caller. |
| 1298 | """ |
| 1299 | |
| 1300 | hotlists_by_id = self.GetHotlists(cnxn, hotlist_ids) |
| 1301 | |
| 1302 | for hotlist_id in hotlist_ids: |
| 1303 | star_svc.ExpungeStars(cnxn, hotlist_id, commit=commit) |
| 1304 | chart_svc.ExpungeHotlistsFromIssueSnapshots( |
| 1305 | cnxn, hotlist_ids, commit=commit) |
| 1306 | user_svc.ExpungeHotlistsFromHistory(cnxn, hotlist_ids, commit=commit) |
| 1307 | self.hotlist2user_tbl.Delete(cnxn, hotlist_id=hotlist_ids, commit=commit) |
| 1308 | self.hotlist2issue_tbl.Delete(cnxn, hotlist_id=hotlist_ids, commit=commit) |
| 1309 | self.hotlist_tbl.Delete(cnxn, id=hotlist_ids, commit=commit) |
| 1310 | |
| 1311 | # Invalidate cache for deleted hotlists. |
| 1312 | self.hotlist_2lc.InvalidateKeys(cnxn, hotlist_ids) |
| 1313 | users_to_invalidate = set() |
| 1314 | for hotlist in hotlists_by_id.values(): |
| 1315 | users_to_invalidate.update( |
| 1316 | hotlist.owner_ids + hotlist.editor_ids + hotlist.follower_ids) |
| 1317 | self.hotlist_id_2lc.InvalidateKeys( |
| 1318 | cnxn, [(hotlist.name, owner_id) for owner_id in hotlist.owner_ids]) |
| 1319 | self.hotlist_user_to_ids.InvalidateKeys(cnxn, list(users_to_invalidate)) |
| 1320 | hotlist_project_ids = set() |
| 1321 | for hotlist_id in hotlist_ids: |
| 1322 | hotlist_project_ids.update(self.GetProjectIDsFromHotlist( |
| 1323 | cnxn, hotlist_id)) |
| 1324 | for project_id in hotlist_project_ids: |
| 1325 | self.config_service.InvalidateMemcacheForEntireProject(project_id) |
| 1326 | |
| 1327 | def ExpungeUsersInHotlists( |
| 1328 | self, cnxn, user_ids, star_svc, user_svc, chart_svc): |
| 1329 | """Wipes the given users and any hotlists they owned from the |
| 1330 | hotlists system. |
| 1331 | |
| 1332 | This method will not commit the operation. This method will not make |
| 1333 | changes to in-memory data. |
| 1334 | """ |
| 1335 | # Transfer hotlist ownership to editors, if possible. |
| 1336 | hotlist_ids_by_user_id = self.LookupUserHotlists(cnxn, user_ids) |
| 1337 | hotlist_ids = [hotlist_id for hotlist_ids in hotlist_ids_by_user_id.values() |
| 1338 | for hotlist_id in hotlist_ids] |
| 1339 | hotlists_by_id, missed = self.GetHotlistsByID( |
| 1340 | cnxn, list(set(hotlist_ids)), use_cache=False) |
| 1341 | logging.info('Missed hotlists: %s', missed) |
| 1342 | |
| 1343 | hotlists_to_delete = [] |
| 1344 | for hotlist_id, hotlist in hotlists_by_id.items(): |
| 1345 | # One of the users to be deleted is an owner of hotlist. |
| 1346 | if not set(hotlist.owner_ids).isdisjoint(user_ids): |
| 1347 | hotlists_to_delete.append(hotlist_id) |
| 1348 | candidate_new_owners = [user_id for user_id in hotlist.editor_ids |
| 1349 | if user_id not in user_ids] |
| 1350 | for candidate_id in candidate_new_owners: |
| 1351 | if not self.LookupHotlistIDs(cnxn, [hotlist.name], [candidate_id]): |
| 1352 | self.TransferHotlistOwnership( |
| 1353 | cnxn, hotlist, candidate_id, False, commit=False) |
| 1354 | # Hotlist transferred successfully. No need to delete it. |
| 1355 | hotlists_to_delete.remove(hotlist_id) |
| 1356 | break |
| 1357 | |
| 1358 | # Delete users |
| 1359 | self.hotlist2user_tbl.Delete(cnxn, user_id=user_ids, commit=False) |
| 1360 | self.hotlist2issue_tbl.Update( |
| 1361 | cnxn, {'adder_id': framework_constants.DELETED_USER_ID}, |
| 1362 | adder_id=user_ids, commit=False) |
| 1363 | user_svc.ExpungeUsersHotlistsHistory(cnxn, user_ids, commit=False) |
| 1364 | # Delete hotlists |
| 1365 | if hotlists_to_delete: |
| 1366 | self.ExpungeHotlists( |
| 1367 | cnxn, hotlists_to_delete, star_svc, user_svc, chart_svc, commit=False) |
| 1368 | |
| 1369 | |
| 1370 | class HotlistAlreadyExists(Exception): |
| 1371 | """Tried to create a hotlist with the same name as another hotlist |
| 1372 | with the same owner.""" |
| 1373 | pass |
| 1374 | |
| 1375 | |
| 1376 | class NoSuchHotlistException(Exception): |
| 1377 | """The requested hotlist was not found.""" |
| 1378 | pass |
| 1379 | |
| 1380 | |
| 1381 | class UnownedHotlistException(Exception): |
| 1382 | """Tried to create a hotlist with no owner.""" |
| 1383 | pass |