blob: 09883adf871c049a83cd18e4eb77adff3f7a0b8b [file] [log] [blame]
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001# Copyright 2016 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
Copybara854996b2021-09-07 19:36:02 +00004
5"""Tests for the frontendsearchpipeline module."""
6from __future__ import print_function
7from __future__ import division
8from __future__ import absolute_import
9
Adrià Vilanova Martínez9f9ade52022-10-10 23:20:11 +020010try:
11 from mox3 import mox
12except ImportError:
13 import mox
Copybara854996b2021-09-07 19:36:02 +000014import unittest
15
16from google.appengine.api import memcache
17from google.appengine.api import modules
18from google.appengine.ext import testbed
19from google.appengine.api import urlfetch
20
21import settings
22from framework import framework_helpers
23from framework import sorting
24from framework import urls
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +010025from mrproto import ast_pb2
26from mrproto import project_pb2
27from mrproto import tracker_pb2
Copybara854996b2021-09-07 19:36:02 +000028from search import frontendsearchpipeline
29from search import searchpipeline
30from search import query2ast
31from services import service_manager
32from testing import fake
33from testing import testing_helpers
34from tracker import tracker_bizobj
35
36
37# Just an example timestamp. The value does not matter.
38NOW = 2444950132
39
40
41class FrontendSearchPipelineTest(unittest.TestCase):
42
43 def setUp(self):
44 self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
45 self.services = service_manager.Services(
46 user=fake.UserService(),
47 project=fake.ProjectService(),
48 issue=fake.IssueService(),
49 config=fake.ConfigService(),
50 cache_manager=fake.CacheManager())
51 self.services.user.TestAddUser('a@example.com', 111)
52 self.project = self.services.project.TestAddProject('proj', project_id=789)
53 self.mr = testing_helpers.MakeMonorailRequest(
54 path='/p/proj/issues/list', project=self.project)
55 self.mr.me_user_id = 111
56
57 self.issue_1 = fake.MakeTestIssue(
58 789, 1, 'one', 'New', 111, labels=['Priority-High'])
59 self.services.issue.TestAddIssue(self.issue_1)
60 self.issue_2 = fake.MakeTestIssue(
61 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
62 self.services.issue.TestAddIssue(self.issue_2)
63 self.issue_3 = fake.MakeTestIssue(
64 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
65 self.services.issue.TestAddIssue(self.issue_3)
66 self.mr.sort_spec = 'Priority'
67
68 self.cnxn = self.mr.cnxn
69 self.project = self.mr.project
70 self.auth = self.mr.auth
71 self.me_user_id = self.mr.me_user_id
72 self.query = self.mr.query
73 self.query_project_names = self.mr.query_project_names
74 self.items_per_page = self.mr.num # defaults to 100
75 self.paginate_start = self.mr.start
76 self.paginate_end = self.paginate_start + self.items_per_page
77 self.can = self.mr.can
78 self.group_by_spec = self.mr.group_by_spec
79 self.sort_spec = self.mr.sort_spec
80 self.warnings = self.mr.warnings
81 self.errors = self.mr.errors
82 self.use_cached_searches = self.mr.use_cached_searches
83 self.profiler = self.mr.profiler
84
85 self.mox = mox.Mox()
86 self.testbed = testbed.Testbed()
87 self.testbed.activate()
88 self.testbed.init_user_stub()
89 self.testbed.init_memcache_stub()
90 sorting.InitializeArtValues(self.services)
91
92 def tearDown(self):
93 self.testbed.deactivate()
94 self.mox.UnsetStubs()
95 self.mox.ResetAll()
96
97 def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
98 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
99 nonviewable_iids = {1: set()}
100 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
101 frontendsearchpipeline._StartBackendSearch(
102 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
103 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
104 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
105 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
106 self.warnings, self.use_cached_searches).AndReturn([])
107 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
108 frontendsearchpipeline._FinishBackendSearch([])
109 self.mox.ReplayAll()
110
111 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
112 self.cnxn,
113 self.services,
114 self.auth,
115 self.me_user_id,
116 self.query,
117 self.query_project_names,
118 self.items_per_page,
119 self.paginate_start,
120 self.can,
121 self.group_by_spec,
122 self.sort_spec,
123 self.warnings,
124 self.errors,
125 self.use_cached_searches,
126 self.profiler,
127 project=self.project)
128 pipeline.unfiltered_iids = unfiltered_iids
129 pipeline.nonviewable_iids = nonviewable_iids
130 pipeline.SearchForIIDs()
131 self.mox.VerifyAll()
132 self.assertEqual(2, pipeline.total_count)
133 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
134
135 def testSearchForIIDs_CrossProject_AllViewable(self):
136 self.services.project.TestAddProject('other', project_id=790)
137 unfiltered_iids = {(1, 'p:v'): [1001, 1011, 2001]}
138 nonviewable_iids = {1: set()}
139 self.query_project_names = ['other']
140 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
141 frontendsearchpipeline._StartBackendSearch(
142 self.cnxn, ['other', 'proj'], [789, 790],
143 mox.IsA(tracker_pb2.ProjectIssueConfig), unfiltered_iids, {},
144 nonviewable_iids, set(), self.services,
145 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
146 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
147 self.warnings, self.use_cached_searches).AndReturn([])
148 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
149 frontendsearchpipeline._FinishBackendSearch([])
150 self.mox.ReplayAll()
151
152 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
153 self.cnxn,
154 self.services,
155 self.auth,
156 self.me_user_id,
157 self.query,
158 self.query_project_names,
159 self.items_per_page,
160 self.paginate_start,
161 self.can,
162 self.group_by_spec,
163 self.sort_spec,
164 self.warnings,
165 self.errors,
166 self.use_cached_searches,
167 self.profiler,
168 project=self.project)
169
170 pipeline.unfiltered_iids = unfiltered_iids
171 pipeline.nonviewable_iids = nonviewable_iids
172 pipeline.SearchForIIDs()
173 self.mox.VerifyAll()
174 self.assertEqual(3, pipeline.total_count)
175 self.assertEqual([1001, 1011, 2001], pipeline.filtered_iids[(1, 'p:v')])
176
177 def testSearchForIIDs_CrossProject_MembersOnlyOmitted(self):
178 self.services.project.TestAddProject(
179 'other', project_id=790, access=project_pb2.ProjectAccess.MEMBERS_ONLY)
180 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
181 nonviewable_iids = {1: set()}
182 # project 'other' gets filtered out before the backend call.
183 self.mr.query_project_names = ['other']
184 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
185 frontendsearchpipeline._StartBackendSearch(
186 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
187 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
188 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
189 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
190 self.warnings, self.use_cached_searches).AndReturn([])
191 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
192 frontendsearchpipeline._FinishBackendSearch([])
193 self.mox.ReplayAll()
194
195 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
196 self.cnxn,
197 self.services,
198 self.auth,
199 self.me_user_id,
200 self.query,
201 self.query_project_names,
202 self.items_per_page,
203 self.paginate_start,
204 self.can,
205 self.group_by_spec,
206 self.sort_spec,
207 self.warnings,
208 self.errors,
209 self.use_cached_searches,
210 self.profiler,
211 project=self.project)
212 pipeline.unfiltered_iids = unfiltered_iids
213 pipeline.nonviewable_iids = nonviewable_iids
214 pipeline.SearchForIIDs()
215 self.mox.VerifyAll()
216 self.assertEqual(2, pipeline.total_count)
217 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
218
219 def testMergeAndSortIssues_EmptyResult(self):
220 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
221 self.cnxn,
222 self.services,
223 self.auth,
224 self.me_user_id,
225 self.query,
226 self.query_project_names,
227 self.items_per_page,
228 self.paginate_start,
229 self.can,
230 self.group_by_spec,
231 self.sort_spec,
232 self.warnings,
233 self.errors,
234 self.use_cached_searches,
235 self.profiler,
236 project=self.project)
237 pipeline.filtered_iids = {0: [], 1: [], 2: []}
238
239 pipeline.MergeAndSortIssues()
240 self.assertEqual([], pipeline.allowed_iids)
241 self.assertEqual([], pipeline.allowed_results)
242 self.assertEqual({}, pipeline.users_by_id)
243
244 def testMergeAndSortIssues_Normal(self):
245 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
246 self.cnxn,
247 self.services,
248 self.auth,
249 self.me_user_id,
250 self.query,
251 self.query_project_names,
252 self.items_per_page,
253 self.paginate_start,
254 self.can,
255 self.group_by_spec,
256 self.sort_spec,
257 self.warnings,
258 self.errors,
259 self.use_cached_searches,
260 self.profiler,
261 project=self.project)
262 # In this unit test case we are not calling SearchForIIDs(), instead just
263 # set pipeline.filtered_iids directly.
264 pipeline.filtered_iids = {
265 0: [],
266 1: [self.issue_1.issue_id],
267 2: [self.issue_2.issue_id],
268 3: [self.issue_3.issue_id]
269 }
270
271 pipeline.MergeAndSortIssues()
272 self.assertEqual(
273 [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
274 pipeline.allowed_iids)
275 self.assertEqual(
276 [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
277 pipeline.allowed_results)
278 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
279
280 def testDetermineIssuePosition_Normal(self):
281 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
282 self.cnxn,
283 self.services,
284 self.auth,
285 self.me_user_id,
286 self.query,
287 self.query_project_names,
288 self.items_per_page,
289 self.paginate_start,
290 self.can,
291 self.group_by_spec,
292 self.sort_spec,
293 self.warnings,
294 self.errors,
295 self.use_cached_searches,
296 self.profiler,
297 project=self.project)
298 # In this unit test case we are not calling SearchForIIDs(), instead just
299 # set pipeline.filtered_iids directly.
300 pipeline.filtered_iids = {
301 0: [],
302 1: [self.issue_1.issue_id],
303 2: [self.issue_2.issue_id],
304 3: [self.issue_3.issue_id]
305 }
306
307 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
308 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
309 self.assertEqual(self.issue_1.issue_id, prev_iid)
310 self.assertEqual(1, index)
311 self.assertEqual(self.issue_2.issue_id, next_iid)
312
313 def testDetermineIssuePosition_NotInResults(self):
314 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
315 self.cnxn,
316 self.services,
317 self.auth,
318 self.me_user_id,
319 self.query,
320 self.query_project_names,
321 self.items_per_page,
322 self.paginate_start,
323 self.can,
324 self.group_by_spec,
325 self.sort_spec,
326 self.warnings,
327 self.errors,
328 self.use_cached_searches,
329 self.profiler,
330 project=self.project)
331 # In this unit test case we are not calling SearchForIIDs(), instead just
332 # set pipeline.filtered_iids directly.
333 pipeline.filtered_iids = {
334 0: [],
335 1: [self.issue_1.issue_id],
336 2: [self.issue_2.issue_id],
337 3: []
338 }
339
340 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
341 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
342 self.assertEqual(None, prev_iid)
343 self.assertEqual(None, index)
344 self.assertEqual(None, next_iid)
345
346 def testDetermineIssuePositionInShard_IssueIsInShard(self):
347 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
348 self.cnxn,
349 self.services,
350 self.auth,
351 self.me_user_id,
352 self.query,
353 self.query_project_names,
354 self.items_per_page,
355 self.paginate_start,
356 self.can,
357 self.group_by_spec,
358 self.sort_spec,
359 self.warnings,
360 self.errors,
361 self.use_cached_searches,
362 self.profiler,
363 project=self.project)
364 # Let's assume issues 1, 2, and 3 are all in the same shard.
365 pipeline.filtered_iids = {
366 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
367 }
368
369 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
370 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
371 0, self.issue_1, {})
372 self.assertEqual(None, prev_cand)
373 self.assertEqual(0, index)
374 self.assertEqual(self.issue_3, next_cand)
375
376 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
377 0, self.issue_3, {})
378 self.assertEqual(self.issue_1, prev_cand)
379 self.assertEqual(1, index)
380 self.assertEqual(self.issue_2, next_cand)
381
382 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
383 0, self.issue_2, {})
384 self.assertEqual(self.issue_3, prev_cand)
385 self.assertEqual(2, index)
386 self.assertEqual(None, next_cand)
387
388 def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
389 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
390 self.cnxn,
391 self.services,
392 self.auth,
393 self.me_user_id,
394 self.query,
395 self.query_project_names,
396 self.items_per_page,
397 self.paginate_start,
398 self.can,
399 self.group_by_spec,
400 self.sort_spec,
401 self.warnings,
402 self.errors,
403 self.use_cached_searches,
404 self.profiler,
405 project=self.project)
406
407 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
408 pipeline.filtered_iids = {
409 0: [self.issue_2.issue_id, self.issue_3.issue_id],
410 }
411 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
412 0, self.issue_1, {})
413 self.assertEqual(None, prev_cand)
414 self.assertEqual(0, index)
415 self.assertEqual(self.issue_3, next_cand)
416
417 pipeline.filtered_iids = {
418 0: [self.issue_1.issue_id, self.issue_2.issue_id],
419 }
420 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
421 0, self.issue_3, {})
422 self.assertEqual(self.issue_1, prev_cand)
423 self.assertEqual(1, index)
424 self.assertEqual(self.issue_2, next_cand)
425
426 pipeline.filtered_iids = {
427 0: [self.issue_1.issue_id, self.issue_3.issue_id],
428 }
429 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
430 0, self.issue_2, {})
431 self.assertEqual(self.issue_3, prev_cand)
432 self.assertEqual(2, index)
433 self.assertEqual(None, next_cand)
434
435 def testFetchAllSamples_Empty(self):
436 filtered_iids = {}
437 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
438 self.cnxn,
439 self.services,
440 self.auth,
441 self.me_user_id,
442 self.query,
443 self.query_project_names,
444 self.items_per_page,
445 self.paginate_start,
446 self.can,
447 self.group_by_spec,
448 self.sort_spec,
449 self.warnings,
450 self.errors,
451 self.use_cached_searches,
452 self.profiler,
453 project=self.project)
454 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
455 filtered_iids)
456 self.assertEqual({}, samples_by_shard)
457 self.assertEqual({}, sample_iids_to_shard)
458
459 def testFetchAllSamples_SmallResultsPerShard(self):
460 filtered_iids = {
461 0: [100, 110, 120],
462 1: [101, 111, 121],
463 }
464 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
465 self.cnxn,
466 self.services,
467 self.auth,
468 self.me_user_id,
469 self.query,
470 self.query_project_names,
471 self.items_per_page,
472 self.paginate_start,
473 self.can,
474 self.group_by_spec,
475 self.sort_spec,
476 self.warnings,
477 self.errors,
478 self.use_cached_searches,
479 self.profiler,
480 project=self.project)
481
482 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
483 filtered_iids)
484 self.assertEqual(2, len(samples_by_shard))
485 self.assertEqual(0, len(sample_iids_to_shard))
486
487 def testFetchAllSamples_Normal(self):
488 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
489 self.cnxn,
490 self.services,
491 self.auth,
492 self.me_user_id,
493 self.query,
494 self.query_project_names,
495 self.items_per_page,
496 self.paginate_start,
497 self.can,
498 self.group_by_spec,
499 self.sort_spec,
500 self.warnings,
501 self.errors,
502 self.use_cached_searches,
503 self.profiler,
504 project=self.project)
505 issues = self.MakeIssues(23)
506 filtered_iids = {
507 0: [issue.issue_id for issue in issues],
508 }
509
510 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
511 filtered_iids)
512 self.assertEqual(1, len(samples_by_shard))
513 self.assertEqual(2, len(samples_by_shard[0]))
514 self.assertEqual(2, len(sample_iids_to_shard))
515 for sample_iid in sample_iids_to_shard:
516 shard_key = sample_iids_to_shard[sample_iid]
517 self.assertIn(sample_iid, filtered_iids[shard_key])
518
519 def testChooseSampleIssues_Empty(self):
520 """When the search gave no results, there cannot be any samples."""
521 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
522 self.cnxn,
523 self.services,
524 self.auth,
525 self.me_user_id,
526 self.query,
527 self.query_project_names,
528 self.items_per_page,
529 self.paginate_start,
530 self.can,
531 self.group_by_spec,
532 self.sort_spec,
533 self.warnings,
534 self.errors,
535 self.use_cached_searches,
536 self.profiler,
537 project=self.project)
538 issue_ids = []
539 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
540 self.assertEqual({}, on_hand_issues)
541 self.assertEqual([], needed_iids)
542
543 def testChooseSampleIssues_Small(self):
544 """When the search gave few results, don't bother with samples."""
545 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
546 self.cnxn,
547 self.services,
548 self.auth,
549 self.me_user_id,
550 self.query,
551 self.query_project_names,
552 self.items_per_page,
553 self.paginate_start,
554 self.can,
555 self.group_by_spec,
556 self.sort_spec,
557 self.warnings,
558 self.errors,
559 self.use_cached_searches,
560 self.profiler,
561 project=self.project)
562 issue_ids = [78901, 78902]
563 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
564 self.assertEqual({}, on_hand_issues)
565 self.assertEqual([], needed_iids)
566
567 def MakeIssues(self, num_issues):
568 issues = []
569 for i in range(num_issues):
570 issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111)
571 issues.append(issue)
572 self.services.issue.TestAddIssue(issue)
573 return issues
574
575 def testChooseSampleIssues_Normal(self):
576 """We will choose at least one sample for every 10 results in a shard."""
577 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
578 self.cnxn,
579 self.services,
580 self.auth,
581 self.me_user_id,
582 self.query,
583 self.query_project_names,
584 self.items_per_page,
585 self.paginate_start,
586 self.can,
587 self.group_by_spec,
588 self.sort_spec,
589 self.warnings,
590 self.errors,
591 self.use_cached_searches,
592 self.profiler,
593 project=self.project)
594 issues = self.MakeIssues(23)
595 issue_ids = [issue.issue_id for issue in issues]
596 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
597 self.assertEqual({}, on_hand_issues)
598 self.assertEqual(2, len(needed_iids))
599 for sample_iid in needed_iids:
600 self.assertIn(sample_iid, issue_ids)
601
602 def testLookupNeededUsers(self):
603 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
604 self.cnxn,
605 self.services,
606 self.auth,
607 self.me_user_id,
608 self.query,
609 self.query_project_names,
610 self.items_per_page,
611 self.paginate_start,
612 self.can,
613 self.group_by_spec,
614 self.sort_spec,
615 self.warnings,
616 self.errors,
617 self.use_cached_searches,
618 self.profiler,
619 project=self.project)
620
621 pipeline._LookupNeededUsers([])
622 self.assertEqual([], list(pipeline.users_by_id.keys()))
623
624 pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
625 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
626
627 def testPaginate_List(self):
628 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
629 self.cnxn,
630 self.services,
631 self.auth,
632 self.me_user_id,
633 self.query,
634 self.query_project_names,
635 self.items_per_page,
636 self.paginate_start,
637 self.can,
638 self.group_by_spec,
639 self.sort_spec,
640 self.warnings,
641 self.errors,
642 self.use_cached_searches,
643 self.profiler,
644 project=self.project)
645 pipeline.allowed_iids = [
646 self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
647 pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
648 pipeline.total_count = len(pipeline.allowed_results)
649 pipeline.Paginate()
650 self.assertEqual(
651 [self.issue_1, self.issue_2, self.issue_3],
652 pipeline.visible_results)
653 self.assertFalse(pipeline.pagination.limit_reached)
654
655
656class FrontendSearchPipelineMethodsTest(unittest.TestCase):
657
658 def setUp(self):
659 self.mox = mox.Mox()
660 self.testbed = testbed.Testbed()
661 self.testbed.activate()
662 self.testbed.init_user_stub()
663 self.testbed.init_memcache_stub()
664
665 self.project_id = 789
666 self.default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(
667 self.project_id)
668 self.services = service_manager.Services(
669 project=fake.ProjectService())
670 self.project = self.services.project.TestAddProject(
671 'proj', project_id=self.project_id)
672
673 def tearDown(self):
674 self.testbed.deactivate()
675 self.mox.UnsetStubs()
676 self.mox.ResetAll()
677
678 def testMakeBackendCallback(self):
679 called_with = []
680
681 def func(a, b):
682 called_with.append((a, b))
683
684 callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
685 callback()
686 self.assertEqual([(10, 20)], called_with)
687
688 def testParseUserQuery_CheckQuery(self):
689 warnings = []
690 msg = frontendsearchpipeline._CheckQuery(
691 'cnxn', self.services, 'ok query', self.default_config,
692 [self.project_id], True, warnings=warnings)
693 self.assertIsNone(msg)
694 self.assertEqual([], warnings)
695
696 warnings = []
697 msg = frontendsearchpipeline._CheckQuery(
698 'cnxn', self.services, 'modified:0-0-0', self.default_config,
699 [self.project_id], True, warnings=warnings)
700 self.assertEqual(
701 'Could not parse date: 0-0-0',
702 msg)
703
704 warnings = []
705 msg = frontendsearchpipeline._CheckQuery(
706 'cnxn', self.services, 'blocking:3.14', self.default_config,
707 [self.project_id], True, warnings=warnings)
708 self.assertEqual(
709 'Could not parse issue reference: 3.14',
710 msg)
711 self.assertEqual([], warnings)
712
713 def testStartBackendSearch(self):
714 # TODO(jrobbins): write this test.
715 pass
716
717 def testFinishBackendSearch(self):
718 # TODO(jrobbins): write this test.
719 pass
720
721 def testGetProjectTimestamps_NoneSet(self):
722 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
723 [], [])
724 self.assertEqual({}, project_shard_timestamps)
725
726 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
727 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
728 self.assertEqual({}, project_shard_timestamps)
729
730 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
731 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
732 self.assertEqual({}, project_shard_timestamps)
733
734 def testGetProjectTimestamps_SpecificProjects(self):
735 memcache.set('789;0', NOW)
736 memcache.set('789;1', NOW - 1000)
737 memcache.set('789;2', NOW - 3000)
738 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
739 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
740 self.assertEqual(
741 { (789, 0): NOW,
742 (789, 1): NOW - 1000,
743 (789, 2): NOW - 3000,
744 },
745 project_shard_timestamps)
746
747 memcache.set('790;0', NOW)
748 memcache.set('790;1', NOW - 10000)
749 memcache.set('790;2', NOW - 30000)
750 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
751 [789, 790], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
752 self.assertEqual(
753 { (789, 0): NOW,
754 (789, 1): NOW - 1000,
755 (789, 2): NOW - 3000,
756 (790, 0): NOW,
757 (790, 1): NOW - 10000,
758 (790, 2): NOW - 30000,
759 },
760 project_shard_timestamps)
761
762 def testGetProjectTimestamps_SiteWide(self):
763 memcache.set('all;0', NOW)
764 memcache.set('all;1', NOW - 10000)
765 memcache.set('all;2', NOW - 30000)
766 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
767 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
768 self.assertEqual(
769 { ('all', 0): NOW,
770 ('all', 1): NOW - 10000,
771 ('all', 2): NOW - 30000,
772 },
773 project_shard_timestamps)
774
775 def testGetNonviewableIIDs_SearchMissSoNoOp(self):
776 """If search cache missed, don't bother looking up nonviewable IIDs."""
777 unfiltered_iids_dict = {} # No cached search results found.
778 rpc_tuples = [] # Nothing should accumulate here in this case.
779 nonviewable_iids = {} # Nothing should accumulate here in this case.
780 processed_invalidations_up_to = 12345
781 frontendsearchpipeline._GetNonviewableIIDs(
782 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
783 nonviewable_iids, {}, processed_invalidations_up_to, True)
784 self.assertEqual([], rpc_tuples)
785 self.assertEqual({}, nonviewable_iids)
786
787 def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
788 """If search cache hit, get nonviewable info from cache."""
789 unfiltered_iids_dict = {
790 1: [10001, 10021],
791 2: ['the search result issue_ids do not matter'],
792 }
793 rpc_tuples = [] # Nothing should accumulate here in this case.
794 nonviewable_iids = {} # Our mock results should end up here.
795 processed_invalidations_up_to = 12345
796 memcache.set('nonviewable:789;111;1',
797 ([10001, 10031], processed_invalidations_up_to - 10))
798 memcache.set('nonviewable:789;111;2',
799 ([10002, 10042], processed_invalidations_up_to - 30))
800
801 project_shard_timestamps = {
802 (789, 1): 0, # not stale
803 (789, 2): 0, # not stale
804 }
805 frontendsearchpipeline._GetNonviewableIIDs(
806 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
807 nonviewable_iids, project_shard_timestamps,
808 processed_invalidations_up_to, True)
809 self.assertEqual([], rpc_tuples)
810 self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
811
812 def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
813 """If search hit and n-v miss, create RPCs to get nonviewable info."""
814 self.mox.StubOutWithMock(
815 frontendsearchpipeline, '_StartBackendNonviewableCall')
816 unfiltered_iids_dict = {
817 2: ['the search result issue_ids do not matter'],
818 }
819 rpc_tuples = [] # One RPC object should accumulate here.
820 nonviewable_iids = {} # This will stay empty until RPCs complete.
821 processed_invalidations_up_to = 12345
822 # Nothing is set in memcache for this case.
823 a_fake_rpc = testing_helpers.Blank(callback=None)
824 frontendsearchpipeline._StartBackendNonviewableCall(
825 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
826 self.mox.ReplayAll()
827
828 frontendsearchpipeline._GetNonviewableIIDs(
829 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
830 nonviewable_iids, {}, processed_invalidations_up_to, True)
831 self.mox.VerifyAll()
832 _, sid_0, rpc_0 = rpc_tuples[0]
833 self.assertEqual(2, sid_0)
834 self.assertEqual({}, nonviewable_iids)
835 self.assertEqual(a_fake_rpc, rpc_0)
836 self.assertIsNotNone(a_fake_rpc.callback)
837
838 def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
839 processed_invalidations_up_to = 12345
840 cached_dict = {
841 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
842 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
843 }
844 rpc_tuples = [] # Nothing should accumulate here.
845 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
846 project_shard_timestamps = {
847 (789, 1): 0, # not stale
848 (789, 2): 0, # not stale
849 }
850 frontendsearchpipeline._AccumulateNonviewableIIDs(
851 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
852 rpc_tuples, processed_invalidations_up_to)
853 self.assertEqual([], rpc_tuples)
854 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
855
856 def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
857 self.mox.StubOutWithMock(
858 frontendsearchpipeline, '_StartBackendNonviewableCall')
859 processed_invalidations_up_to = 12345
860 cached_dict = {
861 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
862 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
863 }
864 rpc_tuples = [] # Nothing should accumulate here.
865 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
866 project_shard_timestamps = {
867 (789, 1): 0, # not stale
868 (789, 2): processed_invalidations_up_to, # stale!
869 }
870 a_fake_rpc = testing_helpers.Blank(callback=None)
871 frontendsearchpipeline._StartBackendNonviewableCall(
872 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
873 self.mox.ReplayAll()
874
875 frontendsearchpipeline._AccumulateNonviewableIIDs(
876 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
877 rpc_tuples, processed_invalidations_up_to)
878 self.mox.VerifyAll()
879 _, sid_0, rpc_0 = rpc_tuples[0]
880 self.assertEqual(2, sid_0)
881 self.assertEqual(a_fake_rpc, rpc_0)
882 self.assertIsNotNone(a_fake_rpc.callback)
883 self.assertEqual({1: {10001}}, nonviewable_iids)
884
885 def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
886 processed_invalidations_up_to = 12345
887 cached_dict = {
888 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
889 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
890 }
891 rpc_tuples = [] # Nothing should accumulate here.
892 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
893 project_shard_timestamps = {
894 (None, 1): 0, # not stale
895 (None, 2): 0, # not stale
896 }
897 frontendsearchpipeline._AccumulateNonviewableIIDs(
898 None, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
899 rpc_tuples, processed_invalidations_up_to)
900 self.assertEqual([], rpc_tuples)
901 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
902
903 def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
904 self.mox.StubOutWithMock(
905 frontendsearchpipeline, '_StartBackendNonviewableCall')
906 cached_dict = {} # Nothing here, so it is an at-risk cache miss.
907 rpc_tuples = [] # One RPC should accumulate here.
908 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
909 processed_invalidations_up_to = 12345
910 a_fake_rpc = testing_helpers.Blank(callback=None)
911 frontendsearchpipeline._StartBackendNonviewableCall(
912 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
913 self.mox.ReplayAll()
914
915 frontendsearchpipeline._AccumulateNonviewableIIDs(
916 789, 111, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
917 processed_invalidations_up_to)
918 self.mox.VerifyAll()
919 _, sid_0, rpc_0 = rpc_tuples[0]
920 self.assertEqual(2, sid_0)
921 self.assertEqual(a_fake_rpc, rpc_0)
922 self.assertIsNotNone(a_fake_rpc.callback)
923 self.assertEqual({1: {10001}}, nonviewable_iids)
924
925 def testGetCachedSearchResults(self):
926 # TODO(jrobbins): Write this test.
927 pass
928
929 def testMakeBackendRequestHeaders(self):
930 headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
931 self.assertNotIn('X-AppEngine-FailFast', headers)
932 headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
933 self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
934
935 def testStartBackendSearchCall(self):
936 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
937 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
938 self.mox.StubOutWithMock(modules, 'get_hostname')
939 a_fake_rpc = testing_helpers.Blank(callback=None)
940 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
941 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200942 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000943 urlfetch.make_fetch_call(
944 a_fake_rpc, mox.StrContains(
945 urls.BACKEND_SEARCH + '?groupby=cc&invalidation_timestep=12345&'
946 +'logged_in_user_id=777&me_user_ids=555&'
947 +'num=201&projects=proj&q=priority%3Dhigh&shard_id=2&start=0'),
948 follow_redirects=False,
949 headers=mox.IsA(dict))
950 self.mox.ReplayAll()
951
952 processed_invalidations_up_to = 12345
953 me_user_ids = [555]
954 logged_in_user_id = 777
955 new_url_num = 201
956 frontendsearchpipeline._StartBackendSearchCall(
957 ['proj'], (2, 'priority=high'),
958 processed_invalidations_up_to,
959 me_user_ids,
960 logged_in_user_id,
961 new_url_num,
962 group_by_spec='cc')
963 self.mox.VerifyAll()
964
965 def testStartBackendSearchCall_SortAndGroup(self):
966 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
967 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
968 self.mox.StubOutWithMock(modules, 'get_hostname')
969 a_fake_rpc = testing_helpers.Blank(callback=None)
970 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
971 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200972 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000973 urlfetch.make_fetch_call(
974 a_fake_rpc,
975 mox.StrContains(
976 urls.BACKEND_SEARCH + '?groupby=bar&' +
977 'invalidation_timestep=12345&' +
978 'logged_in_user_id=777&me_user_ids=555&num=201&projects=proj&' +
979 'q=priority%3Dhigh&shard_id=2&sort=foo&start=0'),
980 follow_redirects=False,
981 headers=mox.IsA(dict))
982 self.mox.ReplayAll()
983
984 processed_invalidations_up_to = 12345
985 me_user_ids = [555]
986 logged_in_user_id = 777
987 new_url_num = 201
988 sort_spec = 'foo'
989 group_by_spec = 'bar'
990 frontendsearchpipeline._StartBackendSearchCall(
991 ['proj'], (2, 'priority=high'),
992 processed_invalidations_up_to,
993 me_user_ids,
994 logged_in_user_id,
995 new_url_num,
996 sort_spec=sort_spec,
997 group_by_spec=group_by_spec)
998 self.mox.VerifyAll()
999
1000 def testStartBackendNonviewableCall(self):
1001 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
1002 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
1003 self.mox.StubOutWithMock(modules, 'get_hostname')
1004 a_fake_rpc = testing_helpers.Blank(callback=None)
1005 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
1006 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +02001007 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +00001008 urlfetch.make_fetch_call(
1009 a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
1010 follow_redirects=False, headers=mox.IsA(dict))
1011 self.mox.ReplayAll()
1012
1013 processed_invalidations_up_to = 12345
1014 frontendsearchpipeline._StartBackendNonviewableCall(
1015 789, 111, 2, processed_invalidations_up_to)
1016 self.mox.VerifyAll()
1017
1018 def testHandleBackendSearchResponse_500(self):
1019 response_str = 'There was a problem processing the query.'
1020 rpc = testing_helpers.Blank(
1021 get_result=lambda: testing_helpers.Blank(
1022 content=response_str, status_code=500))
1023 rpc_tuple = (NOW, 2, rpc)
1024 rpc_tuples = [] # Nothing should be added for this case.
1025 filtered_iids = {} # Search results should accumlate here, per-shard.
1026 search_limit_reached = {} # Booleans accumulate here, per-shard.
1027 processed_invalidations_up_to = 12345
1028
1029 me_user_ids = [111]
1030 logged_in_user_id = 0
1031 new_url_num = 100
1032 error_responses = set()
1033
1034 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1035 frontendsearchpipeline._HandleBackendSearchResponse(
1036 ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids, search_limit_reached,
1037 processed_invalidations_up_to, error_responses, me_user_ids,
1038 logged_in_user_id, new_url_num, 1, None, None)
1039 self.assertEqual([], rpc_tuples)
1040 self.assertIn(2, error_responses)
1041
1042 def testHandleBackendSearchResponse_Error(self):
1043 response_str = (
1044 '})]\'\n'
1045 '{'
1046 ' "unfiltered_iids": [],'
1047 ' "search_limit_reached": false,'
1048 ' "error": "Invalid query"'
1049 '}'
1050 )
1051 rpc = testing_helpers.Blank(
1052 get_result=lambda: testing_helpers.Blank(
1053 content=response_str, status_code=200))
1054 rpc_tuple = (NOW, 2, rpc)
1055 rpc_tuples = [] # Nothing should be added for this case.
1056 filtered_iids = {} # Search results should accumlate here, per-shard.
1057 search_limit_reached = {} # Booleans accumulate here, per-shard.
1058 processed_invalidations_up_to = 12345
1059
1060 me_user_ids = [111]
1061 logged_in_user_id = 0
1062 new_url_num = 100
1063 error_responses = set()
1064 frontendsearchpipeline._HandleBackendSearchResponse(
1065 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1066 processed_invalidations_up_to, error_responses, me_user_ids,
1067 logged_in_user_id, new_url_num, 1, None, None)
1068 self.assertEqual([], rpc_tuples)
1069 self.assertEqual({2: []}, filtered_iids)
1070 self.assertEqual({2: False}, search_limit_reached)
1071 self.assertEqual({2}, error_responses)
1072
1073 def testHandleBackendSearchResponse_Normal(self):
1074 response_str = (
1075 '})]\'\n'
1076 '{'
1077 ' "unfiltered_iids": [10002, 10042],'
1078 ' "search_limit_reached": false'
1079 '}'
1080 )
1081 rpc = testing_helpers.Blank(
1082 get_result=lambda: testing_helpers.Blank(
1083 content=response_str, status_code=200))
1084 rpc_tuple = (NOW, 2, rpc)
1085 rpc_tuples = [] # Nothing should be added for this case.
1086 filtered_iids = {} # Search results should accumlate here, per-shard.
1087 search_limit_reached = {} # Booleans accumulate here, per-shard.
1088 processed_invalidations_up_to = 12345
1089
1090 me_user_ids = [111]
1091 logged_in_user_id = 0
1092 new_url_num = 100
1093 error_responses = set()
1094 frontendsearchpipeline._HandleBackendSearchResponse(
1095 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1096 processed_invalidations_up_to, error_responses, me_user_ids,
1097 logged_in_user_id, new_url_num, 1, None, None)
1098 self.assertEqual([], rpc_tuples)
1099 self.assertEqual({2: [10002, 10042]}, filtered_iids)
1100 self.assertEqual({2: False}, search_limit_reached)
1101
1102 def testHandleBackendSearchResponse_TriggersRetry(self):
1103 response_str = None
1104 rpc = testing_helpers.Blank(
1105 get_result=lambda: testing_helpers.Blank(content=response_str))
1106 rpc_tuple = (NOW, 2, rpc)
1107 rpc_tuples = [] # New RPC should be appended here
1108 filtered_iids = {} # No change here until retry completes.
1109 search_limit_reached = {} # No change here until retry completes.
1110 processed_invalidations_up_to = 12345
1111 error_responses = set()
1112
1113 me_user_ids = [111]
1114 logged_in_user_id = 0
1115 new_url_num = 100
1116
1117 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1118 a_fake_rpc = testing_helpers.Blank(callback=None)
1119 rpc = frontendsearchpipeline._StartBackendSearchCall(
1120 ['proj'],
1121 2,
1122 processed_invalidations_up_to,
1123 me_user_ids,
1124 logged_in_user_id,
1125 new_url_num,
1126 can=1,
1127 group_by_spec=None,
1128 sort_spec=None,
1129 failfast=False).AndReturn(a_fake_rpc)
1130 self.mox.ReplayAll()
1131
1132 frontendsearchpipeline._HandleBackendSearchResponse(
1133 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1134 processed_invalidations_up_to, error_responses, me_user_ids,
1135 logged_in_user_id, new_url_num, 1, None, None)
1136 self.mox.VerifyAll()
1137 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1138 self.assertEqual(2, retry_shard_id)
1139 self.assertEqual(a_fake_rpc, retry_rpc)
1140 self.assertIsNotNone(retry_rpc.callback)
1141 self.assertEqual({}, filtered_iids)
1142 self.assertEqual({}, search_limit_reached)
1143
1144 def testHandleBackendNonviewableResponse_Error(self):
1145 response_str = 'There was an error.'
1146 rpc = testing_helpers.Blank(
1147 get_result=lambda: testing_helpers.Blank(
1148 content=response_str,
1149 status_code=500
1150 ))
1151 rpc_tuple = (NOW, 2, rpc)
1152 rpc_tuples = [] # Nothing should be added for this case.
1153 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1154 processed_invalidations_up_to = 12345
1155
1156 self.mox.StubOutWithMock(
1157 frontendsearchpipeline, '_StartBackendNonviewableCall')
1158 frontendsearchpipeline._HandleBackendNonviewableResponse(
1159 789, 111, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
1160 processed_invalidations_up_to)
1161 self.assertEqual([], rpc_tuples)
1162 self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
1163
1164 def testHandleBackendNonviewableResponse_Normal(self):
1165 response_str = (
1166 '})]\'\n'
1167 '{'
1168 ' "nonviewable": [10002, 10042]'
1169 '}'
1170 )
1171 rpc = testing_helpers.Blank(
1172 get_result=lambda: testing_helpers.Blank(
1173 content=response_str,
1174 status_code=200
1175 ))
1176 rpc_tuple = (NOW, 2, rpc)
1177 rpc_tuples = [] # Nothing should be added for this case.
1178 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1179 processed_invalidations_up_to = 12345
1180
1181 frontendsearchpipeline._HandleBackendNonviewableResponse(
1182 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1183 processed_invalidations_up_to)
1184 self.assertEqual([], rpc_tuples)
1185 self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
1186
1187 def testHandleBackendAtRiskResponse_TriggersRetry(self):
1188 response_str = None
1189 rpc = testing_helpers.Blank(
1190 get_result=lambda: testing_helpers.Blank(content=response_str))
1191 rpc_tuple = (NOW, 2, rpc)
1192 rpc_tuples = [] # New RPC should be appended here
1193 nonviewable_iids = {} # No change here until retry completes.
1194 processed_invalidations_up_to = 12345
1195
1196 self.mox.StubOutWithMock(
1197 frontendsearchpipeline, '_StartBackendNonviewableCall')
1198 a_fake_rpc = testing_helpers.Blank(callback=None)
1199 rpc = frontendsearchpipeline._StartBackendNonviewableCall(
1200 789, 111, 2, processed_invalidations_up_to, failfast=False
1201 ).AndReturn(a_fake_rpc)
1202 self.mox.ReplayAll()
1203
1204 frontendsearchpipeline._HandleBackendNonviewableResponse(
1205 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1206 processed_invalidations_up_to)
1207 self.mox.VerifyAll()
1208 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1209 self.assertEqual(2, retry_shard_id)
1210 self.assertIsNotNone(retry_rpc.callback)
1211 self.assertEqual(a_fake_rpc, retry_rpc)
1212 self.assertEqual({}, nonviewable_iids)
1213
1214 def testSortIssues(self):
1215 services = service_manager.Services(
1216 cache_manager=fake.CacheManager())
1217 sorting.InitializeArtValues(services)
1218
1219 issue_1 = fake.MakeTestIssue(
1220 789, 1, 'one', 'New', 111, labels=['Priority-High'])
1221 issue_2 = fake.MakeTestIssue(
1222 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
1223 issue_3 = fake.MakeTestIssue(
1224 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
1225 issues = [issue_1, issue_2, issue_3]
1226 config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
1227
1228 sorted_issues = frontendsearchpipeline._SortIssues(
1229 issues, config, {}, '', 'priority')
1230
1231 self.assertEqual(
1232 [issue_1, issue_3, issue_2], # Order is high, medium, low.
1233 sorted_issues)
1234
1235
1236class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
1237
1238 def setUp(self):
1239 self.sharded_iids = {
1240 (0, 'p:v'): [10, 20, 30, 40, 50],
1241 (1, 'p:v'): [21, 41, 61, 81],
1242 (2, 'p:v'): [42, 52, 62, 72, 102],
1243 (3, 'p:v'): [],
1244 }
1245
1246 def testTotalLength_Empty(self):
1247 """If there were no results, the length of the sharded list is zero."""
1248 self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
1249
1250 def testTotalLength_Normal(self):
1251 """The length of the sharded list is the sum of the shard lengths."""
1252 self.assertEqual(
1253 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
1254
1255 def testReverseShards_Empty(self):
1256 """Reversing an empty sharded list is still empty."""
1257 empty_sharded_iids = {}
1258 frontendsearchpipeline._ReverseShards(empty_sharded_iids)
1259 self.assertEqual({}, empty_sharded_iids)
1260
1261 def testReverseShards_Normal(self):
1262 """Reversing a sharded list reverses each shard."""
1263 frontendsearchpipeline._ReverseShards(self.sharded_iids)
1264 self.assertEqual(
1265 {(0, 'p:v'): [50, 40, 30, 20, 10],
1266 (1, 'p:v'): [81, 61, 41, 21],
1267 (2, 'p:v'): [102, 72, 62, 52, 42],
1268 (3, 'p:v'): [],
1269 },
1270 self.sharded_iids)
1271
1272 def testTrimShardedIIDs_Empty(self):
1273 """If the sharded list is empty, trimming it makes no change."""
1274 empty_sharded_iids = {}
1275 frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
1276 self.assertEqual({}, empty_sharded_iids)
1277
1278 frontendsearchpipeline._TrimEndShardedIIDs(
1279 empty_sharded_iids,
1280 [(100, (0, 'p:v')), (88, (8, 'p:v')), (99, (9, 'p:v'))],
1281 12)
1282 self.assertEqual({}, empty_sharded_iids)
1283
1284 def testTrimShardedIIDs_NoSamples(self):
1285 """If there are no samples, we don't trim off any IIDs."""
1286 orig_sharded_iids = {
1287 shard_id: iids[:] for shard_id, iids in self.sharded_iids.items()}
1288 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1289 self.sharded_iids, [], 12)
1290 self.assertEqual(0, num_trimmed)
1291 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1292
1293 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1294 self.sharded_iids, [], 1)
1295 self.assertEqual(0, num_trimmed)
1296 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1297
1298 def testTrimShardedIIDs_Normal(self):
1299 """The first 3 samples contribute all needed IIDs, so trim off the rest."""
1300 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1301 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1302 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1303 self.sharded_iids, samples, 5)
1304 self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
1305 self.assertEqual(
1306 { # shard_id: iids before lower-bound + iids before 1st excess sample.
1307 (0, 'p:v'): [10, 20] + [30],
1308 (1, 'p:v'): [21] + [41, 61],
1309 (2, 'p:v'): [42, 52] + [62, 72, 102],
1310 (3, 'p:v'): [] + []},
1311 self.sharded_iids)
1312
1313 def testCalcSamplePositions_Empty(self):
1314 sharded_iids = {0: []}
1315 samples = []
1316 self.assertEqual(
1317 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1318
1319 sharded_iids = {0: [10, 20, 30, 40]}
1320 samples = []
1321 self.assertEqual(
1322 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1323
1324 sharded_iids = {0: []}
1325 # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
1326 # But we still have them in the list for the backwards phase, and they
1327 # should just not contribute anything to the result.
1328 samples = [(2, (2, 'p:v')), (4, (4, 'p:v'))]
1329 self.assertEqual(
1330 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1331
1332 def testCalcSamplePositions_Normal(self):
1333 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1334 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1335 self.assertEqual(
1336 [(30, (0, 'p:v'), 2),
1337 (41, (1, 'p:v'), 1),
1338 (62, (2, 'p:v'), 2),
1339 (40, (0, 'p:v'), 3),
1340 (81, (1, 'p:v'), 3)],
1341 frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))