blob: 9a94c3d8d682171d01733090fad8a43b6d017909 [file] [log] [blame]
Copybara854996b2021-09-07 19:36:02 +00001# Copyright 2016 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style
3# license that can be found in the LICENSE file or at
4# https://developers.google.com/open-source/licenses/bsd
5
6"""Tests for the frontendsearchpipeline module."""
7from __future__ import print_function
8from __future__ import division
9from __future__ import absolute_import
10
Adrià Vilanova Martínez9f9ade52022-10-10 23:20:11 +020011try:
12 from mox3 import mox
13except ImportError:
14 import mox
Copybara854996b2021-09-07 19:36:02 +000015import unittest
16
17from google.appengine.api import memcache
18from google.appengine.api import modules
19from google.appengine.ext import testbed
20from google.appengine.api import urlfetch
21
22import settings
23from framework import framework_helpers
24from framework import sorting
25from framework import urls
26from proto import ast_pb2
27from proto import project_pb2
28from proto import tracker_pb2
29from search import frontendsearchpipeline
30from search import searchpipeline
31from search import query2ast
32from services import service_manager
33from testing import fake
34from testing import testing_helpers
35from tracker import tracker_bizobj
36
37
38# Just an example timestamp. The value does not matter.
39NOW = 2444950132
40
41
42class FrontendSearchPipelineTest(unittest.TestCase):
43
44 def setUp(self):
45 self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
46 self.services = service_manager.Services(
47 user=fake.UserService(),
48 project=fake.ProjectService(),
49 issue=fake.IssueService(),
50 config=fake.ConfigService(),
51 cache_manager=fake.CacheManager())
52 self.services.user.TestAddUser('a@example.com', 111)
53 self.project = self.services.project.TestAddProject('proj', project_id=789)
54 self.mr = testing_helpers.MakeMonorailRequest(
55 path='/p/proj/issues/list', project=self.project)
56 self.mr.me_user_id = 111
57
58 self.issue_1 = fake.MakeTestIssue(
59 789, 1, 'one', 'New', 111, labels=['Priority-High'])
60 self.services.issue.TestAddIssue(self.issue_1)
61 self.issue_2 = fake.MakeTestIssue(
62 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
63 self.services.issue.TestAddIssue(self.issue_2)
64 self.issue_3 = fake.MakeTestIssue(
65 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
66 self.services.issue.TestAddIssue(self.issue_3)
67 self.mr.sort_spec = 'Priority'
68
69 self.cnxn = self.mr.cnxn
70 self.project = self.mr.project
71 self.auth = self.mr.auth
72 self.me_user_id = self.mr.me_user_id
73 self.query = self.mr.query
74 self.query_project_names = self.mr.query_project_names
75 self.items_per_page = self.mr.num # defaults to 100
76 self.paginate_start = self.mr.start
77 self.paginate_end = self.paginate_start + self.items_per_page
78 self.can = self.mr.can
79 self.group_by_spec = self.mr.group_by_spec
80 self.sort_spec = self.mr.sort_spec
81 self.warnings = self.mr.warnings
82 self.errors = self.mr.errors
83 self.use_cached_searches = self.mr.use_cached_searches
84 self.profiler = self.mr.profiler
85
86 self.mox = mox.Mox()
87 self.testbed = testbed.Testbed()
88 self.testbed.activate()
89 self.testbed.init_user_stub()
90 self.testbed.init_memcache_stub()
91 sorting.InitializeArtValues(self.services)
92
93 def tearDown(self):
94 self.testbed.deactivate()
95 self.mox.UnsetStubs()
96 self.mox.ResetAll()
97
98 def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
99 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
100 nonviewable_iids = {1: set()}
101 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
102 frontendsearchpipeline._StartBackendSearch(
103 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
104 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
105 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
106 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
107 self.warnings, self.use_cached_searches).AndReturn([])
108 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
109 frontendsearchpipeline._FinishBackendSearch([])
110 self.mox.ReplayAll()
111
112 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
113 self.cnxn,
114 self.services,
115 self.auth,
116 self.me_user_id,
117 self.query,
118 self.query_project_names,
119 self.items_per_page,
120 self.paginate_start,
121 self.can,
122 self.group_by_spec,
123 self.sort_spec,
124 self.warnings,
125 self.errors,
126 self.use_cached_searches,
127 self.profiler,
128 project=self.project)
129 pipeline.unfiltered_iids = unfiltered_iids
130 pipeline.nonviewable_iids = nonviewable_iids
131 pipeline.SearchForIIDs()
132 self.mox.VerifyAll()
133 self.assertEqual(2, pipeline.total_count)
134 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
135
136 def testSearchForIIDs_CrossProject_AllViewable(self):
137 self.services.project.TestAddProject('other', project_id=790)
138 unfiltered_iids = {(1, 'p:v'): [1001, 1011, 2001]}
139 nonviewable_iids = {1: set()}
140 self.query_project_names = ['other']
141 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
142 frontendsearchpipeline._StartBackendSearch(
143 self.cnxn, ['other', 'proj'], [789, 790],
144 mox.IsA(tracker_pb2.ProjectIssueConfig), unfiltered_iids, {},
145 nonviewable_iids, set(), self.services,
146 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
147 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
148 self.warnings, self.use_cached_searches).AndReturn([])
149 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
150 frontendsearchpipeline._FinishBackendSearch([])
151 self.mox.ReplayAll()
152
153 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
154 self.cnxn,
155 self.services,
156 self.auth,
157 self.me_user_id,
158 self.query,
159 self.query_project_names,
160 self.items_per_page,
161 self.paginate_start,
162 self.can,
163 self.group_by_spec,
164 self.sort_spec,
165 self.warnings,
166 self.errors,
167 self.use_cached_searches,
168 self.profiler,
169 project=self.project)
170
171 pipeline.unfiltered_iids = unfiltered_iids
172 pipeline.nonviewable_iids = nonviewable_iids
173 pipeline.SearchForIIDs()
174 self.mox.VerifyAll()
175 self.assertEqual(3, pipeline.total_count)
176 self.assertEqual([1001, 1011, 2001], pipeline.filtered_iids[(1, 'p:v')])
177
178 def testSearchForIIDs_CrossProject_MembersOnlyOmitted(self):
179 self.services.project.TestAddProject(
180 'other', project_id=790, access=project_pb2.ProjectAccess.MEMBERS_ONLY)
181 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
182 nonviewable_iids = {1: set()}
183 # project 'other' gets filtered out before the backend call.
184 self.mr.query_project_names = ['other']
185 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
186 frontendsearchpipeline._StartBackendSearch(
187 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
188 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
189 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
190 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
191 self.warnings, self.use_cached_searches).AndReturn([])
192 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
193 frontendsearchpipeline._FinishBackendSearch([])
194 self.mox.ReplayAll()
195
196 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
197 self.cnxn,
198 self.services,
199 self.auth,
200 self.me_user_id,
201 self.query,
202 self.query_project_names,
203 self.items_per_page,
204 self.paginate_start,
205 self.can,
206 self.group_by_spec,
207 self.sort_spec,
208 self.warnings,
209 self.errors,
210 self.use_cached_searches,
211 self.profiler,
212 project=self.project)
213 pipeline.unfiltered_iids = unfiltered_iids
214 pipeline.nonviewable_iids = nonviewable_iids
215 pipeline.SearchForIIDs()
216 self.mox.VerifyAll()
217 self.assertEqual(2, pipeline.total_count)
218 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
219
220 def testMergeAndSortIssues_EmptyResult(self):
221 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
222 self.cnxn,
223 self.services,
224 self.auth,
225 self.me_user_id,
226 self.query,
227 self.query_project_names,
228 self.items_per_page,
229 self.paginate_start,
230 self.can,
231 self.group_by_spec,
232 self.sort_spec,
233 self.warnings,
234 self.errors,
235 self.use_cached_searches,
236 self.profiler,
237 project=self.project)
238 pipeline.filtered_iids = {0: [], 1: [], 2: []}
239
240 pipeline.MergeAndSortIssues()
241 self.assertEqual([], pipeline.allowed_iids)
242 self.assertEqual([], pipeline.allowed_results)
243 self.assertEqual({}, pipeline.users_by_id)
244
245 def testMergeAndSortIssues_Normal(self):
246 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
247 self.cnxn,
248 self.services,
249 self.auth,
250 self.me_user_id,
251 self.query,
252 self.query_project_names,
253 self.items_per_page,
254 self.paginate_start,
255 self.can,
256 self.group_by_spec,
257 self.sort_spec,
258 self.warnings,
259 self.errors,
260 self.use_cached_searches,
261 self.profiler,
262 project=self.project)
263 # In this unit test case we are not calling SearchForIIDs(), instead just
264 # set pipeline.filtered_iids directly.
265 pipeline.filtered_iids = {
266 0: [],
267 1: [self.issue_1.issue_id],
268 2: [self.issue_2.issue_id],
269 3: [self.issue_3.issue_id]
270 }
271
272 pipeline.MergeAndSortIssues()
273 self.assertEqual(
274 [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
275 pipeline.allowed_iids)
276 self.assertEqual(
277 [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
278 pipeline.allowed_results)
279 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
280
281 def testDetermineIssuePosition_Normal(self):
282 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
283 self.cnxn,
284 self.services,
285 self.auth,
286 self.me_user_id,
287 self.query,
288 self.query_project_names,
289 self.items_per_page,
290 self.paginate_start,
291 self.can,
292 self.group_by_spec,
293 self.sort_spec,
294 self.warnings,
295 self.errors,
296 self.use_cached_searches,
297 self.profiler,
298 project=self.project)
299 # In this unit test case we are not calling SearchForIIDs(), instead just
300 # set pipeline.filtered_iids directly.
301 pipeline.filtered_iids = {
302 0: [],
303 1: [self.issue_1.issue_id],
304 2: [self.issue_2.issue_id],
305 3: [self.issue_3.issue_id]
306 }
307
308 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
309 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
310 self.assertEqual(self.issue_1.issue_id, prev_iid)
311 self.assertEqual(1, index)
312 self.assertEqual(self.issue_2.issue_id, next_iid)
313
314 def testDetermineIssuePosition_NotInResults(self):
315 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
316 self.cnxn,
317 self.services,
318 self.auth,
319 self.me_user_id,
320 self.query,
321 self.query_project_names,
322 self.items_per_page,
323 self.paginate_start,
324 self.can,
325 self.group_by_spec,
326 self.sort_spec,
327 self.warnings,
328 self.errors,
329 self.use_cached_searches,
330 self.profiler,
331 project=self.project)
332 # In this unit test case we are not calling SearchForIIDs(), instead just
333 # set pipeline.filtered_iids directly.
334 pipeline.filtered_iids = {
335 0: [],
336 1: [self.issue_1.issue_id],
337 2: [self.issue_2.issue_id],
338 3: []
339 }
340
341 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
342 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
343 self.assertEqual(None, prev_iid)
344 self.assertEqual(None, index)
345 self.assertEqual(None, next_iid)
346
347 def testDetermineIssuePositionInShard_IssueIsInShard(self):
348 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
349 self.cnxn,
350 self.services,
351 self.auth,
352 self.me_user_id,
353 self.query,
354 self.query_project_names,
355 self.items_per_page,
356 self.paginate_start,
357 self.can,
358 self.group_by_spec,
359 self.sort_spec,
360 self.warnings,
361 self.errors,
362 self.use_cached_searches,
363 self.profiler,
364 project=self.project)
365 # Let's assume issues 1, 2, and 3 are all in the same shard.
366 pipeline.filtered_iids = {
367 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
368 }
369
370 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
371 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
372 0, self.issue_1, {})
373 self.assertEqual(None, prev_cand)
374 self.assertEqual(0, index)
375 self.assertEqual(self.issue_3, next_cand)
376
377 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
378 0, self.issue_3, {})
379 self.assertEqual(self.issue_1, prev_cand)
380 self.assertEqual(1, index)
381 self.assertEqual(self.issue_2, next_cand)
382
383 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
384 0, self.issue_2, {})
385 self.assertEqual(self.issue_3, prev_cand)
386 self.assertEqual(2, index)
387 self.assertEqual(None, next_cand)
388
389 def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
390 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
391 self.cnxn,
392 self.services,
393 self.auth,
394 self.me_user_id,
395 self.query,
396 self.query_project_names,
397 self.items_per_page,
398 self.paginate_start,
399 self.can,
400 self.group_by_spec,
401 self.sort_spec,
402 self.warnings,
403 self.errors,
404 self.use_cached_searches,
405 self.profiler,
406 project=self.project)
407
408 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
409 pipeline.filtered_iids = {
410 0: [self.issue_2.issue_id, self.issue_3.issue_id],
411 }
412 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
413 0, self.issue_1, {})
414 self.assertEqual(None, prev_cand)
415 self.assertEqual(0, index)
416 self.assertEqual(self.issue_3, next_cand)
417
418 pipeline.filtered_iids = {
419 0: [self.issue_1.issue_id, self.issue_2.issue_id],
420 }
421 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
422 0, self.issue_3, {})
423 self.assertEqual(self.issue_1, prev_cand)
424 self.assertEqual(1, index)
425 self.assertEqual(self.issue_2, next_cand)
426
427 pipeline.filtered_iids = {
428 0: [self.issue_1.issue_id, self.issue_3.issue_id],
429 }
430 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
431 0, self.issue_2, {})
432 self.assertEqual(self.issue_3, prev_cand)
433 self.assertEqual(2, index)
434 self.assertEqual(None, next_cand)
435
436 def testFetchAllSamples_Empty(self):
437 filtered_iids = {}
438 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
439 self.cnxn,
440 self.services,
441 self.auth,
442 self.me_user_id,
443 self.query,
444 self.query_project_names,
445 self.items_per_page,
446 self.paginate_start,
447 self.can,
448 self.group_by_spec,
449 self.sort_spec,
450 self.warnings,
451 self.errors,
452 self.use_cached_searches,
453 self.profiler,
454 project=self.project)
455 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
456 filtered_iids)
457 self.assertEqual({}, samples_by_shard)
458 self.assertEqual({}, sample_iids_to_shard)
459
460 def testFetchAllSamples_SmallResultsPerShard(self):
461 filtered_iids = {
462 0: [100, 110, 120],
463 1: [101, 111, 121],
464 }
465 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
466 self.cnxn,
467 self.services,
468 self.auth,
469 self.me_user_id,
470 self.query,
471 self.query_project_names,
472 self.items_per_page,
473 self.paginate_start,
474 self.can,
475 self.group_by_spec,
476 self.sort_spec,
477 self.warnings,
478 self.errors,
479 self.use_cached_searches,
480 self.profiler,
481 project=self.project)
482
483 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
484 filtered_iids)
485 self.assertEqual(2, len(samples_by_shard))
486 self.assertEqual(0, len(sample_iids_to_shard))
487
488 def testFetchAllSamples_Normal(self):
489 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
490 self.cnxn,
491 self.services,
492 self.auth,
493 self.me_user_id,
494 self.query,
495 self.query_project_names,
496 self.items_per_page,
497 self.paginate_start,
498 self.can,
499 self.group_by_spec,
500 self.sort_spec,
501 self.warnings,
502 self.errors,
503 self.use_cached_searches,
504 self.profiler,
505 project=self.project)
506 issues = self.MakeIssues(23)
507 filtered_iids = {
508 0: [issue.issue_id for issue in issues],
509 }
510
511 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
512 filtered_iids)
513 self.assertEqual(1, len(samples_by_shard))
514 self.assertEqual(2, len(samples_by_shard[0]))
515 self.assertEqual(2, len(sample_iids_to_shard))
516 for sample_iid in sample_iids_to_shard:
517 shard_key = sample_iids_to_shard[sample_iid]
518 self.assertIn(sample_iid, filtered_iids[shard_key])
519
520 def testChooseSampleIssues_Empty(self):
521 """When the search gave no results, there cannot be any samples."""
522 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
523 self.cnxn,
524 self.services,
525 self.auth,
526 self.me_user_id,
527 self.query,
528 self.query_project_names,
529 self.items_per_page,
530 self.paginate_start,
531 self.can,
532 self.group_by_spec,
533 self.sort_spec,
534 self.warnings,
535 self.errors,
536 self.use_cached_searches,
537 self.profiler,
538 project=self.project)
539 issue_ids = []
540 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
541 self.assertEqual({}, on_hand_issues)
542 self.assertEqual([], needed_iids)
543
544 def testChooseSampleIssues_Small(self):
545 """When the search gave few results, don't bother with samples."""
546 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
547 self.cnxn,
548 self.services,
549 self.auth,
550 self.me_user_id,
551 self.query,
552 self.query_project_names,
553 self.items_per_page,
554 self.paginate_start,
555 self.can,
556 self.group_by_spec,
557 self.sort_spec,
558 self.warnings,
559 self.errors,
560 self.use_cached_searches,
561 self.profiler,
562 project=self.project)
563 issue_ids = [78901, 78902]
564 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
565 self.assertEqual({}, on_hand_issues)
566 self.assertEqual([], needed_iids)
567
568 def MakeIssues(self, num_issues):
569 issues = []
570 for i in range(num_issues):
571 issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111)
572 issues.append(issue)
573 self.services.issue.TestAddIssue(issue)
574 return issues
575
576 def testChooseSampleIssues_Normal(self):
577 """We will choose at least one sample for every 10 results in a shard."""
578 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
579 self.cnxn,
580 self.services,
581 self.auth,
582 self.me_user_id,
583 self.query,
584 self.query_project_names,
585 self.items_per_page,
586 self.paginate_start,
587 self.can,
588 self.group_by_spec,
589 self.sort_spec,
590 self.warnings,
591 self.errors,
592 self.use_cached_searches,
593 self.profiler,
594 project=self.project)
595 issues = self.MakeIssues(23)
596 issue_ids = [issue.issue_id for issue in issues]
597 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
598 self.assertEqual({}, on_hand_issues)
599 self.assertEqual(2, len(needed_iids))
600 for sample_iid in needed_iids:
601 self.assertIn(sample_iid, issue_ids)
602
603 def testLookupNeededUsers(self):
604 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
605 self.cnxn,
606 self.services,
607 self.auth,
608 self.me_user_id,
609 self.query,
610 self.query_project_names,
611 self.items_per_page,
612 self.paginate_start,
613 self.can,
614 self.group_by_spec,
615 self.sort_spec,
616 self.warnings,
617 self.errors,
618 self.use_cached_searches,
619 self.profiler,
620 project=self.project)
621
622 pipeline._LookupNeededUsers([])
623 self.assertEqual([], list(pipeline.users_by_id.keys()))
624
625 pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
626 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
627
628 def testPaginate_List(self):
629 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
630 self.cnxn,
631 self.services,
632 self.auth,
633 self.me_user_id,
634 self.query,
635 self.query_project_names,
636 self.items_per_page,
637 self.paginate_start,
638 self.can,
639 self.group_by_spec,
640 self.sort_spec,
641 self.warnings,
642 self.errors,
643 self.use_cached_searches,
644 self.profiler,
645 project=self.project)
646 pipeline.allowed_iids = [
647 self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
648 pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
649 pipeline.total_count = len(pipeline.allowed_results)
650 pipeline.Paginate()
651 self.assertEqual(
652 [self.issue_1, self.issue_2, self.issue_3],
653 pipeline.visible_results)
654 self.assertFalse(pipeline.pagination.limit_reached)
655
656
657class FrontendSearchPipelineMethodsTest(unittest.TestCase):
658
659 def setUp(self):
660 self.mox = mox.Mox()
661 self.testbed = testbed.Testbed()
662 self.testbed.activate()
663 self.testbed.init_user_stub()
664 self.testbed.init_memcache_stub()
665
666 self.project_id = 789
667 self.default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(
668 self.project_id)
669 self.services = service_manager.Services(
670 project=fake.ProjectService())
671 self.project = self.services.project.TestAddProject(
672 'proj', project_id=self.project_id)
673
674 def tearDown(self):
675 self.testbed.deactivate()
676 self.mox.UnsetStubs()
677 self.mox.ResetAll()
678
679 def testMakeBackendCallback(self):
680 called_with = []
681
682 def func(a, b):
683 called_with.append((a, b))
684
685 callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
686 callback()
687 self.assertEqual([(10, 20)], called_with)
688
689 def testParseUserQuery_CheckQuery(self):
690 warnings = []
691 msg = frontendsearchpipeline._CheckQuery(
692 'cnxn', self.services, 'ok query', self.default_config,
693 [self.project_id], True, warnings=warnings)
694 self.assertIsNone(msg)
695 self.assertEqual([], warnings)
696
697 warnings = []
698 msg = frontendsearchpipeline._CheckQuery(
699 'cnxn', self.services, 'modified:0-0-0', self.default_config,
700 [self.project_id], True, warnings=warnings)
701 self.assertEqual(
702 'Could not parse date: 0-0-0',
703 msg)
704
705 warnings = []
706 msg = frontendsearchpipeline._CheckQuery(
707 'cnxn', self.services, 'blocking:3.14', self.default_config,
708 [self.project_id], True, warnings=warnings)
709 self.assertEqual(
710 'Could not parse issue reference: 3.14',
711 msg)
712 self.assertEqual([], warnings)
713
714 def testStartBackendSearch(self):
715 # TODO(jrobbins): write this test.
716 pass
717
718 def testFinishBackendSearch(self):
719 # TODO(jrobbins): write this test.
720 pass
721
722 def testGetProjectTimestamps_NoneSet(self):
723 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
724 [], [])
725 self.assertEqual({}, project_shard_timestamps)
726
727 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
728 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
729 self.assertEqual({}, project_shard_timestamps)
730
731 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
732 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
733 self.assertEqual({}, project_shard_timestamps)
734
735 def testGetProjectTimestamps_SpecificProjects(self):
736 memcache.set('789;0', NOW)
737 memcache.set('789;1', NOW - 1000)
738 memcache.set('789;2', NOW - 3000)
739 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
740 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
741 self.assertEqual(
742 { (789, 0): NOW,
743 (789, 1): NOW - 1000,
744 (789, 2): NOW - 3000,
745 },
746 project_shard_timestamps)
747
748 memcache.set('790;0', NOW)
749 memcache.set('790;1', NOW - 10000)
750 memcache.set('790;2', NOW - 30000)
751 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
752 [789, 790], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
753 self.assertEqual(
754 { (789, 0): NOW,
755 (789, 1): NOW - 1000,
756 (789, 2): NOW - 3000,
757 (790, 0): NOW,
758 (790, 1): NOW - 10000,
759 (790, 2): NOW - 30000,
760 },
761 project_shard_timestamps)
762
763 def testGetProjectTimestamps_SiteWide(self):
764 memcache.set('all;0', NOW)
765 memcache.set('all;1', NOW - 10000)
766 memcache.set('all;2', NOW - 30000)
767 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
768 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
769 self.assertEqual(
770 { ('all', 0): NOW,
771 ('all', 1): NOW - 10000,
772 ('all', 2): NOW - 30000,
773 },
774 project_shard_timestamps)
775
776 def testGetNonviewableIIDs_SearchMissSoNoOp(self):
777 """If search cache missed, don't bother looking up nonviewable IIDs."""
778 unfiltered_iids_dict = {} # No cached search results found.
779 rpc_tuples = [] # Nothing should accumulate here in this case.
780 nonviewable_iids = {} # Nothing should accumulate here in this case.
781 processed_invalidations_up_to = 12345
782 frontendsearchpipeline._GetNonviewableIIDs(
783 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
784 nonviewable_iids, {}, processed_invalidations_up_to, True)
785 self.assertEqual([], rpc_tuples)
786 self.assertEqual({}, nonviewable_iids)
787
788 def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
789 """If search cache hit, get nonviewable info from cache."""
790 unfiltered_iids_dict = {
791 1: [10001, 10021],
792 2: ['the search result issue_ids do not matter'],
793 }
794 rpc_tuples = [] # Nothing should accumulate here in this case.
795 nonviewable_iids = {} # Our mock results should end up here.
796 processed_invalidations_up_to = 12345
797 memcache.set('nonviewable:789;111;1',
798 ([10001, 10031], processed_invalidations_up_to - 10))
799 memcache.set('nonviewable:789;111;2',
800 ([10002, 10042], processed_invalidations_up_to - 30))
801
802 project_shard_timestamps = {
803 (789, 1): 0, # not stale
804 (789, 2): 0, # not stale
805 }
806 frontendsearchpipeline._GetNonviewableIIDs(
807 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
808 nonviewable_iids, project_shard_timestamps,
809 processed_invalidations_up_to, True)
810 self.assertEqual([], rpc_tuples)
811 self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
812
813 def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
814 """If search hit and n-v miss, create RPCs to get nonviewable info."""
815 self.mox.StubOutWithMock(
816 frontendsearchpipeline, '_StartBackendNonviewableCall')
817 unfiltered_iids_dict = {
818 2: ['the search result issue_ids do not matter'],
819 }
820 rpc_tuples = [] # One RPC object should accumulate here.
821 nonviewable_iids = {} # This will stay empty until RPCs complete.
822 processed_invalidations_up_to = 12345
823 # Nothing is set in memcache for this case.
824 a_fake_rpc = testing_helpers.Blank(callback=None)
825 frontendsearchpipeline._StartBackendNonviewableCall(
826 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
827 self.mox.ReplayAll()
828
829 frontendsearchpipeline._GetNonviewableIIDs(
830 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
831 nonviewable_iids, {}, processed_invalidations_up_to, True)
832 self.mox.VerifyAll()
833 _, sid_0, rpc_0 = rpc_tuples[0]
834 self.assertEqual(2, sid_0)
835 self.assertEqual({}, nonviewable_iids)
836 self.assertEqual(a_fake_rpc, rpc_0)
837 self.assertIsNotNone(a_fake_rpc.callback)
838
839 def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
840 processed_invalidations_up_to = 12345
841 cached_dict = {
842 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
843 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
844 }
845 rpc_tuples = [] # Nothing should accumulate here.
846 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
847 project_shard_timestamps = {
848 (789, 1): 0, # not stale
849 (789, 2): 0, # not stale
850 }
851 frontendsearchpipeline._AccumulateNonviewableIIDs(
852 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
853 rpc_tuples, processed_invalidations_up_to)
854 self.assertEqual([], rpc_tuples)
855 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
856
857 def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
858 self.mox.StubOutWithMock(
859 frontendsearchpipeline, '_StartBackendNonviewableCall')
860 processed_invalidations_up_to = 12345
861 cached_dict = {
862 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
863 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
864 }
865 rpc_tuples = [] # Nothing should accumulate here.
866 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
867 project_shard_timestamps = {
868 (789, 1): 0, # not stale
869 (789, 2): processed_invalidations_up_to, # stale!
870 }
871 a_fake_rpc = testing_helpers.Blank(callback=None)
872 frontendsearchpipeline._StartBackendNonviewableCall(
873 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
874 self.mox.ReplayAll()
875
876 frontendsearchpipeline._AccumulateNonviewableIIDs(
877 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
878 rpc_tuples, processed_invalidations_up_to)
879 self.mox.VerifyAll()
880 _, sid_0, rpc_0 = rpc_tuples[0]
881 self.assertEqual(2, sid_0)
882 self.assertEqual(a_fake_rpc, rpc_0)
883 self.assertIsNotNone(a_fake_rpc.callback)
884 self.assertEqual({1: {10001}}, nonviewable_iids)
885
886 def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
887 processed_invalidations_up_to = 12345
888 cached_dict = {
889 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
890 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
891 }
892 rpc_tuples = [] # Nothing should accumulate here.
893 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
894 project_shard_timestamps = {
895 (None, 1): 0, # not stale
896 (None, 2): 0, # not stale
897 }
898 frontendsearchpipeline._AccumulateNonviewableIIDs(
899 None, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
900 rpc_tuples, processed_invalidations_up_to)
901 self.assertEqual([], rpc_tuples)
902 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
903
904 def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
905 self.mox.StubOutWithMock(
906 frontendsearchpipeline, '_StartBackendNonviewableCall')
907 cached_dict = {} # Nothing here, so it is an at-risk cache miss.
908 rpc_tuples = [] # One RPC should accumulate here.
909 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
910 processed_invalidations_up_to = 12345
911 a_fake_rpc = testing_helpers.Blank(callback=None)
912 frontendsearchpipeline._StartBackendNonviewableCall(
913 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
914 self.mox.ReplayAll()
915
916 frontendsearchpipeline._AccumulateNonviewableIIDs(
917 789, 111, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
918 processed_invalidations_up_to)
919 self.mox.VerifyAll()
920 _, sid_0, rpc_0 = rpc_tuples[0]
921 self.assertEqual(2, sid_0)
922 self.assertEqual(a_fake_rpc, rpc_0)
923 self.assertIsNotNone(a_fake_rpc.callback)
924 self.assertEqual({1: {10001}}, nonviewable_iids)
925
926 def testGetCachedSearchResults(self):
927 # TODO(jrobbins): Write this test.
928 pass
929
930 def testMakeBackendRequestHeaders(self):
931 headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
932 self.assertNotIn('X-AppEngine-FailFast', headers)
933 headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
934 self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
935
936 def testStartBackendSearchCall(self):
937 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
938 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
939 self.mox.StubOutWithMock(modules, 'get_hostname')
940 a_fake_rpc = testing_helpers.Blank(callback=None)
941 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
942 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200943 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000944 urlfetch.make_fetch_call(
945 a_fake_rpc, mox.StrContains(
946 urls.BACKEND_SEARCH + '?groupby=cc&invalidation_timestep=12345&'
947 +'logged_in_user_id=777&me_user_ids=555&'
948 +'num=201&projects=proj&q=priority%3Dhigh&shard_id=2&start=0'),
949 follow_redirects=False,
950 headers=mox.IsA(dict))
951 self.mox.ReplayAll()
952
953 processed_invalidations_up_to = 12345
954 me_user_ids = [555]
955 logged_in_user_id = 777
956 new_url_num = 201
957 frontendsearchpipeline._StartBackendSearchCall(
958 ['proj'], (2, 'priority=high'),
959 processed_invalidations_up_to,
960 me_user_ids,
961 logged_in_user_id,
962 new_url_num,
963 group_by_spec='cc')
964 self.mox.VerifyAll()
965
966 def testStartBackendSearchCall_SortAndGroup(self):
967 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
968 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
969 self.mox.StubOutWithMock(modules, 'get_hostname')
970 a_fake_rpc = testing_helpers.Blank(callback=None)
971 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
972 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200973 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000974 urlfetch.make_fetch_call(
975 a_fake_rpc,
976 mox.StrContains(
977 urls.BACKEND_SEARCH + '?groupby=bar&' +
978 'invalidation_timestep=12345&' +
979 'logged_in_user_id=777&me_user_ids=555&num=201&projects=proj&' +
980 'q=priority%3Dhigh&shard_id=2&sort=foo&start=0'),
981 follow_redirects=False,
982 headers=mox.IsA(dict))
983 self.mox.ReplayAll()
984
985 processed_invalidations_up_to = 12345
986 me_user_ids = [555]
987 logged_in_user_id = 777
988 new_url_num = 201
989 sort_spec = 'foo'
990 group_by_spec = 'bar'
991 frontendsearchpipeline._StartBackendSearchCall(
992 ['proj'], (2, 'priority=high'),
993 processed_invalidations_up_to,
994 me_user_ids,
995 logged_in_user_id,
996 new_url_num,
997 sort_spec=sort_spec,
998 group_by_spec=group_by_spec)
999 self.mox.VerifyAll()
1000
1001 def testStartBackendNonviewableCall(self):
1002 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
1003 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
1004 self.mox.StubOutWithMock(modules, 'get_hostname')
1005 a_fake_rpc = testing_helpers.Blank(callback=None)
1006 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
1007 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +02001008 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +00001009 urlfetch.make_fetch_call(
1010 a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
1011 follow_redirects=False, headers=mox.IsA(dict))
1012 self.mox.ReplayAll()
1013
1014 processed_invalidations_up_to = 12345
1015 frontendsearchpipeline._StartBackendNonviewableCall(
1016 789, 111, 2, processed_invalidations_up_to)
1017 self.mox.VerifyAll()
1018
1019 def testHandleBackendSearchResponse_500(self):
1020 response_str = 'There was a problem processing the query.'
1021 rpc = testing_helpers.Blank(
1022 get_result=lambda: testing_helpers.Blank(
1023 content=response_str, status_code=500))
1024 rpc_tuple = (NOW, 2, rpc)
1025 rpc_tuples = [] # Nothing should be added for this case.
1026 filtered_iids = {} # Search results should accumlate here, per-shard.
1027 search_limit_reached = {} # Booleans accumulate here, per-shard.
1028 processed_invalidations_up_to = 12345
1029
1030 me_user_ids = [111]
1031 logged_in_user_id = 0
1032 new_url_num = 100
1033 error_responses = set()
1034
1035 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1036 frontendsearchpipeline._HandleBackendSearchResponse(
1037 ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids, search_limit_reached,
1038 processed_invalidations_up_to, error_responses, me_user_ids,
1039 logged_in_user_id, new_url_num, 1, None, None)
1040 self.assertEqual([], rpc_tuples)
1041 self.assertIn(2, error_responses)
1042
1043 def testHandleBackendSearchResponse_Error(self):
1044 response_str = (
1045 '})]\'\n'
1046 '{'
1047 ' "unfiltered_iids": [],'
1048 ' "search_limit_reached": false,'
1049 ' "error": "Invalid query"'
1050 '}'
1051 )
1052 rpc = testing_helpers.Blank(
1053 get_result=lambda: testing_helpers.Blank(
1054 content=response_str, status_code=200))
1055 rpc_tuple = (NOW, 2, rpc)
1056 rpc_tuples = [] # Nothing should be added for this case.
1057 filtered_iids = {} # Search results should accumlate here, per-shard.
1058 search_limit_reached = {} # Booleans accumulate here, per-shard.
1059 processed_invalidations_up_to = 12345
1060
1061 me_user_ids = [111]
1062 logged_in_user_id = 0
1063 new_url_num = 100
1064 error_responses = set()
1065 frontendsearchpipeline._HandleBackendSearchResponse(
1066 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1067 processed_invalidations_up_to, error_responses, me_user_ids,
1068 logged_in_user_id, new_url_num, 1, None, None)
1069 self.assertEqual([], rpc_tuples)
1070 self.assertEqual({2: []}, filtered_iids)
1071 self.assertEqual({2: False}, search_limit_reached)
1072 self.assertEqual({2}, error_responses)
1073
1074 def testHandleBackendSearchResponse_Normal(self):
1075 response_str = (
1076 '})]\'\n'
1077 '{'
1078 ' "unfiltered_iids": [10002, 10042],'
1079 ' "search_limit_reached": false'
1080 '}'
1081 )
1082 rpc = testing_helpers.Blank(
1083 get_result=lambda: testing_helpers.Blank(
1084 content=response_str, status_code=200))
1085 rpc_tuple = (NOW, 2, rpc)
1086 rpc_tuples = [] # Nothing should be added for this case.
1087 filtered_iids = {} # Search results should accumlate here, per-shard.
1088 search_limit_reached = {} # Booleans accumulate here, per-shard.
1089 processed_invalidations_up_to = 12345
1090
1091 me_user_ids = [111]
1092 logged_in_user_id = 0
1093 new_url_num = 100
1094 error_responses = set()
1095 frontendsearchpipeline._HandleBackendSearchResponse(
1096 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1097 processed_invalidations_up_to, error_responses, me_user_ids,
1098 logged_in_user_id, new_url_num, 1, None, None)
1099 self.assertEqual([], rpc_tuples)
1100 self.assertEqual({2: [10002, 10042]}, filtered_iids)
1101 self.assertEqual({2: False}, search_limit_reached)
1102
1103 def testHandleBackendSearchResponse_TriggersRetry(self):
1104 response_str = None
1105 rpc = testing_helpers.Blank(
1106 get_result=lambda: testing_helpers.Blank(content=response_str))
1107 rpc_tuple = (NOW, 2, rpc)
1108 rpc_tuples = [] # New RPC should be appended here
1109 filtered_iids = {} # No change here until retry completes.
1110 search_limit_reached = {} # No change here until retry completes.
1111 processed_invalidations_up_to = 12345
1112 error_responses = set()
1113
1114 me_user_ids = [111]
1115 logged_in_user_id = 0
1116 new_url_num = 100
1117
1118 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1119 a_fake_rpc = testing_helpers.Blank(callback=None)
1120 rpc = frontendsearchpipeline._StartBackendSearchCall(
1121 ['proj'],
1122 2,
1123 processed_invalidations_up_to,
1124 me_user_ids,
1125 logged_in_user_id,
1126 new_url_num,
1127 can=1,
1128 group_by_spec=None,
1129 sort_spec=None,
1130 failfast=False).AndReturn(a_fake_rpc)
1131 self.mox.ReplayAll()
1132
1133 frontendsearchpipeline._HandleBackendSearchResponse(
1134 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1135 processed_invalidations_up_to, error_responses, me_user_ids,
1136 logged_in_user_id, new_url_num, 1, None, None)
1137 self.mox.VerifyAll()
1138 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1139 self.assertEqual(2, retry_shard_id)
1140 self.assertEqual(a_fake_rpc, retry_rpc)
1141 self.assertIsNotNone(retry_rpc.callback)
1142 self.assertEqual({}, filtered_iids)
1143 self.assertEqual({}, search_limit_reached)
1144
1145 def testHandleBackendNonviewableResponse_Error(self):
1146 response_str = 'There was an error.'
1147 rpc = testing_helpers.Blank(
1148 get_result=lambda: testing_helpers.Blank(
1149 content=response_str,
1150 status_code=500
1151 ))
1152 rpc_tuple = (NOW, 2, rpc)
1153 rpc_tuples = [] # Nothing should be added for this case.
1154 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1155 processed_invalidations_up_to = 12345
1156
1157 self.mox.StubOutWithMock(
1158 frontendsearchpipeline, '_StartBackendNonviewableCall')
1159 frontendsearchpipeline._HandleBackendNonviewableResponse(
1160 789, 111, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
1161 processed_invalidations_up_to)
1162 self.assertEqual([], rpc_tuples)
1163 self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
1164
1165 def testHandleBackendNonviewableResponse_Normal(self):
1166 response_str = (
1167 '})]\'\n'
1168 '{'
1169 ' "nonviewable": [10002, 10042]'
1170 '}'
1171 )
1172 rpc = testing_helpers.Blank(
1173 get_result=lambda: testing_helpers.Blank(
1174 content=response_str,
1175 status_code=200
1176 ))
1177 rpc_tuple = (NOW, 2, rpc)
1178 rpc_tuples = [] # Nothing should be added for this case.
1179 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1180 processed_invalidations_up_to = 12345
1181
1182 frontendsearchpipeline._HandleBackendNonviewableResponse(
1183 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1184 processed_invalidations_up_to)
1185 self.assertEqual([], rpc_tuples)
1186 self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
1187
1188 def testHandleBackendAtRiskResponse_TriggersRetry(self):
1189 response_str = None
1190 rpc = testing_helpers.Blank(
1191 get_result=lambda: testing_helpers.Blank(content=response_str))
1192 rpc_tuple = (NOW, 2, rpc)
1193 rpc_tuples = [] # New RPC should be appended here
1194 nonviewable_iids = {} # No change here until retry completes.
1195 processed_invalidations_up_to = 12345
1196
1197 self.mox.StubOutWithMock(
1198 frontendsearchpipeline, '_StartBackendNonviewableCall')
1199 a_fake_rpc = testing_helpers.Blank(callback=None)
1200 rpc = frontendsearchpipeline._StartBackendNonviewableCall(
1201 789, 111, 2, processed_invalidations_up_to, failfast=False
1202 ).AndReturn(a_fake_rpc)
1203 self.mox.ReplayAll()
1204
1205 frontendsearchpipeline._HandleBackendNonviewableResponse(
1206 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1207 processed_invalidations_up_to)
1208 self.mox.VerifyAll()
1209 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1210 self.assertEqual(2, retry_shard_id)
1211 self.assertIsNotNone(retry_rpc.callback)
1212 self.assertEqual(a_fake_rpc, retry_rpc)
1213 self.assertEqual({}, nonviewable_iids)
1214
1215 def testSortIssues(self):
1216 services = service_manager.Services(
1217 cache_manager=fake.CacheManager())
1218 sorting.InitializeArtValues(services)
1219
1220 issue_1 = fake.MakeTestIssue(
1221 789, 1, 'one', 'New', 111, labels=['Priority-High'])
1222 issue_2 = fake.MakeTestIssue(
1223 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
1224 issue_3 = fake.MakeTestIssue(
1225 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
1226 issues = [issue_1, issue_2, issue_3]
1227 config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
1228
1229 sorted_issues = frontendsearchpipeline._SortIssues(
1230 issues, config, {}, '', 'priority')
1231
1232 self.assertEqual(
1233 [issue_1, issue_3, issue_2], # Order is high, medium, low.
1234 sorted_issues)
1235
1236
1237class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
1238
1239 def setUp(self):
1240 self.sharded_iids = {
1241 (0, 'p:v'): [10, 20, 30, 40, 50],
1242 (1, 'p:v'): [21, 41, 61, 81],
1243 (2, 'p:v'): [42, 52, 62, 72, 102],
1244 (3, 'p:v'): [],
1245 }
1246
1247 def testTotalLength_Empty(self):
1248 """If there were no results, the length of the sharded list is zero."""
1249 self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
1250
1251 def testTotalLength_Normal(self):
1252 """The length of the sharded list is the sum of the shard lengths."""
1253 self.assertEqual(
1254 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
1255
1256 def testReverseShards_Empty(self):
1257 """Reversing an empty sharded list is still empty."""
1258 empty_sharded_iids = {}
1259 frontendsearchpipeline._ReverseShards(empty_sharded_iids)
1260 self.assertEqual({}, empty_sharded_iids)
1261
1262 def testReverseShards_Normal(self):
1263 """Reversing a sharded list reverses each shard."""
1264 frontendsearchpipeline._ReverseShards(self.sharded_iids)
1265 self.assertEqual(
1266 {(0, 'p:v'): [50, 40, 30, 20, 10],
1267 (1, 'p:v'): [81, 61, 41, 21],
1268 (2, 'p:v'): [102, 72, 62, 52, 42],
1269 (3, 'p:v'): [],
1270 },
1271 self.sharded_iids)
1272
1273 def testTrimShardedIIDs_Empty(self):
1274 """If the sharded list is empty, trimming it makes no change."""
1275 empty_sharded_iids = {}
1276 frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
1277 self.assertEqual({}, empty_sharded_iids)
1278
1279 frontendsearchpipeline._TrimEndShardedIIDs(
1280 empty_sharded_iids,
1281 [(100, (0, 'p:v')), (88, (8, 'p:v')), (99, (9, 'p:v'))],
1282 12)
1283 self.assertEqual({}, empty_sharded_iids)
1284
1285 def testTrimShardedIIDs_NoSamples(self):
1286 """If there are no samples, we don't trim off any IIDs."""
1287 orig_sharded_iids = {
1288 shard_id: iids[:] for shard_id, iids in self.sharded_iids.items()}
1289 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1290 self.sharded_iids, [], 12)
1291 self.assertEqual(0, num_trimmed)
1292 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1293
1294 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1295 self.sharded_iids, [], 1)
1296 self.assertEqual(0, num_trimmed)
1297 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1298
1299 def testTrimShardedIIDs_Normal(self):
1300 """The first 3 samples contribute all needed IIDs, so trim off the rest."""
1301 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1302 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1303 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1304 self.sharded_iids, samples, 5)
1305 self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
1306 self.assertEqual(
1307 { # shard_id: iids before lower-bound + iids before 1st excess sample.
1308 (0, 'p:v'): [10, 20] + [30],
1309 (1, 'p:v'): [21] + [41, 61],
1310 (2, 'p:v'): [42, 52] + [62, 72, 102],
1311 (3, 'p:v'): [] + []},
1312 self.sharded_iids)
1313
1314 def testCalcSamplePositions_Empty(self):
1315 sharded_iids = {0: []}
1316 samples = []
1317 self.assertEqual(
1318 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1319
1320 sharded_iids = {0: [10, 20, 30, 40]}
1321 samples = []
1322 self.assertEqual(
1323 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1324
1325 sharded_iids = {0: []}
1326 # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
1327 # But we still have them in the list for the backwards phase, and they
1328 # should just not contribute anything to the result.
1329 samples = [(2, (2, 'p:v')), (4, (4, 'p:v'))]
1330 self.assertEqual(
1331 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1332
1333 def testCalcSamplePositions_Normal(self):
1334 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1335 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1336 self.assertEqual(
1337 [(30, (0, 'p:v'), 2),
1338 (41, (1, 'p:v'), 1),
1339 (62, (2, 'p:v'), 2),
1340 (40, (0, 'p:v'), 3),
1341 (81, (1, 'p:v'), 3)],
1342 frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))