blob: 432a9d16ea896717af5692117ec29608311714ca [file] [log] [blame]
Copybara854996b2021-09-07 19:36:02 +00001# Copyright 2016 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style
3# license that can be found in the LICENSE file or at
4# https://developers.google.com/open-source/licenses/bsd
5
6"""Tests for the frontendsearchpipeline module."""
7from __future__ import print_function
8from __future__ import division
9from __future__ import absolute_import
10
11import mox
12import unittest
13
14from google.appengine.api import memcache
15from google.appengine.api import modules
16from google.appengine.ext import testbed
17from google.appengine.api import urlfetch
18
19import settings
20from framework import framework_helpers
21from framework import sorting
22from framework import urls
23from proto import ast_pb2
24from proto import project_pb2
25from proto import tracker_pb2
26from search import frontendsearchpipeline
27from search import searchpipeline
28from search import query2ast
29from services import service_manager
30from testing import fake
31from testing import testing_helpers
32from tracker import tracker_bizobj
33
34
35# Just an example timestamp. The value does not matter.
36NOW = 2444950132
37
38
39class FrontendSearchPipelineTest(unittest.TestCase):
40
41 def setUp(self):
42 self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
43 self.services = service_manager.Services(
44 user=fake.UserService(),
45 project=fake.ProjectService(),
46 issue=fake.IssueService(),
47 config=fake.ConfigService(),
48 cache_manager=fake.CacheManager())
49 self.services.user.TestAddUser('a@example.com', 111)
50 self.project = self.services.project.TestAddProject('proj', project_id=789)
51 self.mr = testing_helpers.MakeMonorailRequest(
52 path='/p/proj/issues/list', project=self.project)
53 self.mr.me_user_id = 111
54
55 self.issue_1 = fake.MakeTestIssue(
56 789, 1, 'one', 'New', 111, labels=['Priority-High'])
57 self.services.issue.TestAddIssue(self.issue_1)
58 self.issue_2 = fake.MakeTestIssue(
59 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
60 self.services.issue.TestAddIssue(self.issue_2)
61 self.issue_3 = fake.MakeTestIssue(
62 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
63 self.services.issue.TestAddIssue(self.issue_3)
64 self.mr.sort_spec = 'Priority'
65
66 self.cnxn = self.mr.cnxn
67 self.project = self.mr.project
68 self.auth = self.mr.auth
69 self.me_user_id = self.mr.me_user_id
70 self.query = self.mr.query
71 self.query_project_names = self.mr.query_project_names
72 self.items_per_page = self.mr.num # defaults to 100
73 self.paginate_start = self.mr.start
74 self.paginate_end = self.paginate_start + self.items_per_page
75 self.can = self.mr.can
76 self.group_by_spec = self.mr.group_by_spec
77 self.sort_spec = self.mr.sort_spec
78 self.warnings = self.mr.warnings
79 self.errors = self.mr.errors
80 self.use_cached_searches = self.mr.use_cached_searches
81 self.profiler = self.mr.profiler
82
83 self.mox = mox.Mox()
84 self.testbed = testbed.Testbed()
85 self.testbed.activate()
86 self.testbed.init_user_stub()
87 self.testbed.init_memcache_stub()
88 sorting.InitializeArtValues(self.services)
89
90 def tearDown(self):
91 self.testbed.deactivate()
92 self.mox.UnsetStubs()
93 self.mox.ResetAll()
94
95 def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
96 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
97 nonviewable_iids = {1: set()}
98 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
99 frontendsearchpipeline._StartBackendSearch(
100 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
101 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
102 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
103 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
104 self.warnings, self.use_cached_searches).AndReturn([])
105 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
106 frontendsearchpipeline._FinishBackendSearch([])
107 self.mox.ReplayAll()
108
109 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
110 self.cnxn,
111 self.services,
112 self.auth,
113 self.me_user_id,
114 self.query,
115 self.query_project_names,
116 self.items_per_page,
117 self.paginate_start,
118 self.can,
119 self.group_by_spec,
120 self.sort_spec,
121 self.warnings,
122 self.errors,
123 self.use_cached_searches,
124 self.profiler,
125 project=self.project)
126 pipeline.unfiltered_iids = unfiltered_iids
127 pipeline.nonviewable_iids = nonviewable_iids
128 pipeline.SearchForIIDs()
129 self.mox.VerifyAll()
130 self.assertEqual(2, pipeline.total_count)
131 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
132
133 def testSearchForIIDs_CrossProject_AllViewable(self):
134 self.services.project.TestAddProject('other', project_id=790)
135 unfiltered_iids = {(1, 'p:v'): [1001, 1011, 2001]}
136 nonviewable_iids = {1: set()}
137 self.query_project_names = ['other']
138 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
139 frontendsearchpipeline._StartBackendSearch(
140 self.cnxn, ['other', 'proj'], [789, 790],
141 mox.IsA(tracker_pb2.ProjectIssueConfig), unfiltered_iids, {},
142 nonviewable_iids, set(), self.services,
143 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
144 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
145 self.warnings, self.use_cached_searches).AndReturn([])
146 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
147 frontendsearchpipeline._FinishBackendSearch([])
148 self.mox.ReplayAll()
149
150 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
151 self.cnxn,
152 self.services,
153 self.auth,
154 self.me_user_id,
155 self.query,
156 self.query_project_names,
157 self.items_per_page,
158 self.paginate_start,
159 self.can,
160 self.group_by_spec,
161 self.sort_spec,
162 self.warnings,
163 self.errors,
164 self.use_cached_searches,
165 self.profiler,
166 project=self.project)
167
168 pipeline.unfiltered_iids = unfiltered_iids
169 pipeline.nonviewable_iids = nonviewable_iids
170 pipeline.SearchForIIDs()
171 self.mox.VerifyAll()
172 self.assertEqual(3, pipeline.total_count)
173 self.assertEqual([1001, 1011, 2001], pipeline.filtered_iids[(1, 'p:v')])
174
175 def testSearchForIIDs_CrossProject_MembersOnlyOmitted(self):
176 self.services.project.TestAddProject(
177 'other', project_id=790, access=project_pb2.ProjectAccess.MEMBERS_ONLY)
178 unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
179 nonviewable_iids = {1: set()}
180 # project 'other' gets filtered out before the backend call.
181 self.mr.query_project_names = ['other']
182 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
183 frontendsearchpipeline._StartBackendSearch(
184 self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
185 unfiltered_iids, {}, nonviewable_iids, set(), self.services,
186 self.me_user_id, self.auth.user_id or 0, self.paginate_end,
187 self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
188 self.warnings, self.use_cached_searches).AndReturn([])
189 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
190 frontendsearchpipeline._FinishBackendSearch([])
191 self.mox.ReplayAll()
192
193 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
194 self.cnxn,
195 self.services,
196 self.auth,
197 self.me_user_id,
198 self.query,
199 self.query_project_names,
200 self.items_per_page,
201 self.paginate_start,
202 self.can,
203 self.group_by_spec,
204 self.sort_spec,
205 self.warnings,
206 self.errors,
207 self.use_cached_searches,
208 self.profiler,
209 project=self.project)
210 pipeline.unfiltered_iids = unfiltered_iids
211 pipeline.nonviewable_iids = nonviewable_iids
212 pipeline.SearchForIIDs()
213 self.mox.VerifyAll()
214 self.assertEqual(2, pipeline.total_count)
215 self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
216
217 def testMergeAndSortIssues_EmptyResult(self):
218 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
219 self.cnxn,
220 self.services,
221 self.auth,
222 self.me_user_id,
223 self.query,
224 self.query_project_names,
225 self.items_per_page,
226 self.paginate_start,
227 self.can,
228 self.group_by_spec,
229 self.sort_spec,
230 self.warnings,
231 self.errors,
232 self.use_cached_searches,
233 self.profiler,
234 project=self.project)
235 pipeline.filtered_iids = {0: [], 1: [], 2: []}
236
237 pipeline.MergeAndSortIssues()
238 self.assertEqual([], pipeline.allowed_iids)
239 self.assertEqual([], pipeline.allowed_results)
240 self.assertEqual({}, pipeline.users_by_id)
241
242 def testMergeAndSortIssues_Normal(self):
243 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
244 self.cnxn,
245 self.services,
246 self.auth,
247 self.me_user_id,
248 self.query,
249 self.query_project_names,
250 self.items_per_page,
251 self.paginate_start,
252 self.can,
253 self.group_by_spec,
254 self.sort_spec,
255 self.warnings,
256 self.errors,
257 self.use_cached_searches,
258 self.profiler,
259 project=self.project)
260 # In this unit test case we are not calling SearchForIIDs(), instead just
261 # set pipeline.filtered_iids directly.
262 pipeline.filtered_iids = {
263 0: [],
264 1: [self.issue_1.issue_id],
265 2: [self.issue_2.issue_id],
266 3: [self.issue_3.issue_id]
267 }
268
269 pipeline.MergeAndSortIssues()
270 self.assertEqual(
271 [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
272 pipeline.allowed_iids)
273 self.assertEqual(
274 [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
275 pipeline.allowed_results)
276 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
277
278 def testDetermineIssuePosition_Normal(self):
279 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
280 self.cnxn,
281 self.services,
282 self.auth,
283 self.me_user_id,
284 self.query,
285 self.query_project_names,
286 self.items_per_page,
287 self.paginate_start,
288 self.can,
289 self.group_by_spec,
290 self.sort_spec,
291 self.warnings,
292 self.errors,
293 self.use_cached_searches,
294 self.profiler,
295 project=self.project)
296 # In this unit test case we are not calling SearchForIIDs(), instead just
297 # set pipeline.filtered_iids directly.
298 pipeline.filtered_iids = {
299 0: [],
300 1: [self.issue_1.issue_id],
301 2: [self.issue_2.issue_id],
302 3: [self.issue_3.issue_id]
303 }
304
305 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
306 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
307 self.assertEqual(self.issue_1.issue_id, prev_iid)
308 self.assertEqual(1, index)
309 self.assertEqual(self.issue_2.issue_id, next_iid)
310
311 def testDetermineIssuePosition_NotInResults(self):
312 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
313 self.cnxn,
314 self.services,
315 self.auth,
316 self.me_user_id,
317 self.query,
318 self.query_project_names,
319 self.items_per_page,
320 self.paginate_start,
321 self.can,
322 self.group_by_spec,
323 self.sort_spec,
324 self.warnings,
325 self.errors,
326 self.use_cached_searches,
327 self.profiler,
328 project=self.project)
329 # In this unit test case we are not calling SearchForIIDs(), instead just
330 # set pipeline.filtered_iids directly.
331 pipeline.filtered_iids = {
332 0: [],
333 1: [self.issue_1.issue_id],
334 2: [self.issue_2.issue_id],
335 3: []
336 }
337
338 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
339 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
340 self.assertEqual(None, prev_iid)
341 self.assertEqual(None, index)
342 self.assertEqual(None, next_iid)
343
344 def testDetermineIssuePositionInShard_IssueIsInShard(self):
345 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
346 self.cnxn,
347 self.services,
348 self.auth,
349 self.me_user_id,
350 self.query,
351 self.query_project_names,
352 self.items_per_page,
353 self.paginate_start,
354 self.can,
355 self.group_by_spec,
356 self.sort_spec,
357 self.warnings,
358 self.errors,
359 self.use_cached_searches,
360 self.profiler,
361 project=self.project)
362 # Let's assume issues 1, 2, and 3 are all in the same shard.
363 pipeline.filtered_iids = {
364 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
365 }
366
367 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
368 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
369 0, self.issue_1, {})
370 self.assertEqual(None, prev_cand)
371 self.assertEqual(0, index)
372 self.assertEqual(self.issue_3, next_cand)
373
374 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
375 0, self.issue_3, {})
376 self.assertEqual(self.issue_1, prev_cand)
377 self.assertEqual(1, index)
378 self.assertEqual(self.issue_2, next_cand)
379
380 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
381 0, self.issue_2, {})
382 self.assertEqual(self.issue_3, prev_cand)
383 self.assertEqual(2, index)
384 self.assertEqual(None, next_cand)
385
386 def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
387 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
388 self.cnxn,
389 self.services,
390 self.auth,
391 self.me_user_id,
392 self.query,
393 self.query_project_names,
394 self.items_per_page,
395 self.paginate_start,
396 self.can,
397 self.group_by_spec,
398 self.sort_spec,
399 self.warnings,
400 self.errors,
401 self.use_cached_searches,
402 self.profiler,
403 project=self.project)
404
405 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
406 pipeline.filtered_iids = {
407 0: [self.issue_2.issue_id, self.issue_3.issue_id],
408 }
409 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
410 0, self.issue_1, {})
411 self.assertEqual(None, prev_cand)
412 self.assertEqual(0, index)
413 self.assertEqual(self.issue_3, next_cand)
414
415 pipeline.filtered_iids = {
416 0: [self.issue_1.issue_id, self.issue_2.issue_id],
417 }
418 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
419 0, self.issue_3, {})
420 self.assertEqual(self.issue_1, prev_cand)
421 self.assertEqual(1, index)
422 self.assertEqual(self.issue_2, next_cand)
423
424 pipeline.filtered_iids = {
425 0: [self.issue_1.issue_id, self.issue_3.issue_id],
426 }
427 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
428 0, self.issue_2, {})
429 self.assertEqual(self.issue_3, prev_cand)
430 self.assertEqual(2, index)
431 self.assertEqual(None, next_cand)
432
433 def testFetchAllSamples_Empty(self):
434 filtered_iids = {}
435 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
436 self.cnxn,
437 self.services,
438 self.auth,
439 self.me_user_id,
440 self.query,
441 self.query_project_names,
442 self.items_per_page,
443 self.paginate_start,
444 self.can,
445 self.group_by_spec,
446 self.sort_spec,
447 self.warnings,
448 self.errors,
449 self.use_cached_searches,
450 self.profiler,
451 project=self.project)
452 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
453 filtered_iids)
454 self.assertEqual({}, samples_by_shard)
455 self.assertEqual({}, sample_iids_to_shard)
456
457 def testFetchAllSamples_SmallResultsPerShard(self):
458 filtered_iids = {
459 0: [100, 110, 120],
460 1: [101, 111, 121],
461 }
462 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
463 self.cnxn,
464 self.services,
465 self.auth,
466 self.me_user_id,
467 self.query,
468 self.query_project_names,
469 self.items_per_page,
470 self.paginate_start,
471 self.can,
472 self.group_by_spec,
473 self.sort_spec,
474 self.warnings,
475 self.errors,
476 self.use_cached_searches,
477 self.profiler,
478 project=self.project)
479
480 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
481 filtered_iids)
482 self.assertEqual(2, len(samples_by_shard))
483 self.assertEqual(0, len(sample_iids_to_shard))
484
485 def testFetchAllSamples_Normal(self):
486 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
487 self.cnxn,
488 self.services,
489 self.auth,
490 self.me_user_id,
491 self.query,
492 self.query_project_names,
493 self.items_per_page,
494 self.paginate_start,
495 self.can,
496 self.group_by_spec,
497 self.sort_spec,
498 self.warnings,
499 self.errors,
500 self.use_cached_searches,
501 self.profiler,
502 project=self.project)
503 issues = self.MakeIssues(23)
504 filtered_iids = {
505 0: [issue.issue_id for issue in issues],
506 }
507
508 samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
509 filtered_iids)
510 self.assertEqual(1, len(samples_by_shard))
511 self.assertEqual(2, len(samples_by_shard[0]))
512 self.assertEqual(2, len(sample_iids_to_shard))
513 for sample_iid in sample_iids_to_shard:
514 shard_key = sample_iids_to_shard[sample_iid]
515 self.assertIn(sample_iid, filtered_iids[shard_key])
516
517 def testChooseSampleIssues_Empty(self):
518 """When the search gave no results, there cannot be any samples."""
519 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
520 self.cnxn,
521 self.services,
522 self.auth,
523 self.me_user_id,
524 self.query,
525 self.query_project_names,
526 self.items_per_page,
527 self.paginate_start,
528 self.can,
529 self.group_by_spec,
530 self.sort_spec,
531 self.warnings,
532 self.errors,
533 self.use_cached_searches,
534 self.profiler,
535 project=self.project)
536 issue_ids = []
537 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
538 self.assertEqual({}, on_hand_issues)
539 self.assertEqual([], needed_iids)
540
541 def testChooseSampleIssues_Small(self):
542 """When the search gave few results, don't bother with samples."""
543 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
544 self.cnxn,
545 self.services,
546 self.auth,
547 self.me_user_id,
548 self.query,
549 self.query_project_names,
550 self.items_per_page,
551 self.paginate_start,
552 self.can,
553 self.group_by_spec,
554 self.sort_spec,
555 self.warnings,
556 self.errors,
557 self.use_cached_searches,
558 self.profiler,
559 project=self.project)
560 issue_ids = [78901, 78902]
561 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
562 self.assertEqual({}, on_hand_issues)
563 self.assertEqual([], needed_iids)
564
565 def MakeIssues(self, num_issues):
566 issues = []
567 for i in range(num_issues):
568 issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111)
569 issues.append(issue)
570 self.services.issue.TestAddIssue(issue)
571 return issues
572
573 def testChooseSampleIssues_Normal(self):
574 """We will choose at least one sample for every 10 results in a shard."""
575 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
576 self.cnxn,
577 self.services,
578 self.auth,
579 self.me_user_id,
580 self.query,
581 self.query_project_names,
582 self.items_per_page,
583 self.paginate_start,
584 self.can,
585 self.group_by_spec,
586 self.sort_spec,
587 self.warnings,
588 self.errors,
589 self.use_cached_searches,
590 self.profiler,
591 project=self.project)
592 issues = self.MakeIssues(23)
593 issue_ids = [issue.issue_id for issue in issues]
594 on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
595 self.assertEqual({}, on_hand_issues)
596 self.assertEqual(2, len(needed_iids))
597 for sample_iid in needed_iids:
598 self.assertIn(sample_iid, issue_ids)
599
600 def testLookupNeededUsers(self):
601 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
602 self.cnxn,
603 self.services,
604 self.auth,
605 self.me_user_id,
606 self.query,
607 self.query_project_names,
608 self.items_per_page,
609 self.paginate_start,
610 self.can,
611 self.group_by_spec,
612 self.sort_spec,
613 self.warnings,
614 self.errors,
615 self.use_cached_searches,
616 self.profiler,
617 project=self.project)
618
619 pipeline._LookupNeededUsers([])
620 self.assertEqual([], list(pipeline.users_by_id.keys()))
621
622 pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
623 self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
624
625 def testPaginate_List(self):
626 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
627 self.cnxn,
628 self.services,
629 self.auth,
630 self.me_user_id,
631 self.query,
632 self.query_project_names,
633 self.items_per_page,
634 self.paginate_start,
635 self.can,
636 self.group_by_spec,
637 self.sort_spec,
638 self.warnings,
639 self.errors,
640 self.use_cached_searches,
641 self.profiler,
642 project=self.project)
643 pipeline.allowed_iids = [
644 self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
645 pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
646 pipeline.total_count = len(pipeline.allowed_results)
647 pipeline.Paginate()
648 self.assertEqual(
649 [self.issue_1, self.issue_2, self.issue_3],
650 pipeline.visible_results)
651 self.assertFalse(pipeline.pagination.limit_reached)
652
653
654class FrontendSearchPipelineMethodsTest(unittest.TestCase):
655
656 def setUp(self):
657 self.mox = mox.Mox()
658 self.testbed = testbed.Testbed()
659 self.testbed.activate()
660 self.testbed.init_user_stub()
661 self.testbed.init_memcache_stub()
662
663 self.project_id = 789
664 self.default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(
665 self.project_id)
666 self.services = service_manager.Services(
667 project=fake.ProjectService())
668 self.project = self.services.project.TestAddProject(
669 'proj', project_id=self.project_id)
670
671 def tearDown(self):
672 self.testbed.deactivate()
673 self.mox.UnsetStubs()
674 self.mox.ResetAll()
675
676 def testMakeBackendCallback(self):
677 called_with = []
678
679 def func(a, b):
680 called_with.append((a, b))
681
682 callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
683 callback()
684 self.assertEqual([(10, 20)], called_with)
685
686 def testParseUserQuery_CheckQuery(self):
687 warnings = []
688 msg = frontendsearchpipeline._CheckQuery(
689 'cnxn', self.services, 'ok query', self.default_config,
690 [self.project_id], True, warnings=warnings)
691 self.assertIsNone(msg)
692 self.assertEqual([], warnings)
693
694 warnings = []
695 msg = frontendsearchpipeline._CheckQuery(
696 'cnxn', self.services, 'modified:0-0-0', self.default_config,
697 [self.project_id], True, warnings=warnings)
698 self.assertEqual(
699 'Could not parse date: 0-0-0',
700 msg)
701
702 warnings = []
703 msg = frontendsearchpipeline._CheckQuery(
704 'cnxn', self.services, 'blocking:3.14', self.default_config,
705 [self.project_id], True, warnings=warnings)
706 self.assertEqual(
707 'Could not parse issue reference: 3.14',
708 msg)
709 self.assertEqual([], warnings)
710
711 def testStartBackendSearch(self):
712 # TODO(jrobbins): write this test.
713 pass
714
715 def testFinishBackendSearch(self):
716 # TODO(jrobbins): write this test.
717 pass
718
719 def testGetProjectTimestamps_NoneSet(self):
720 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
721 [], [])
722 self.assertEqual({}, project_shard_timestamps)
723
724 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
725 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
726 self.assertEqual({}, project_shard_timestamps)
727
728 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
729 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
730 self.assertEqual({}, project_shard_timestamps)
731
732 def testGetProjectTimestamps_SpecificProjects(self):
733 memcache.set('789;0', NOW)
734 memcache.set('789;1', NOW - 1000)
735 memcache.set('789;2', NOW - 3000)
736 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
737 [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
738 self.assertEqual(
739 { (789, 0): NOW,
740 (789, 1): NOW - 1000,
741 (789, 2): NOW - 3000,
742 },
743 project_shard_timestamps)
744
745 memcache.set('790;0', NOW)
746 memcache.set('790;1', NOW - 10000)
747 memcache.set('790;2', NOW - 30000)
748 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
749 [789, 790], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
750 self.assertEqual(
751 { (789, 0): NOW,
752 (789, 1): NOW - 1000,
753 (789, 2): NOW - 3000,
754 (790, 0): NOW,
755 (790, 1): NOW - 10000,
756 (790, 2): NOW - 30000,
757 },
758 project_shard_timestamps)
759
760 def testGetProjectTimestamps_SiteWide(self):
761 memcache.set('all;0', NOW)
762 memcache.set('all;1', NOW - 10000)
763 memcache.set('all;2', NOW - 30000)
764 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
765 [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
766 self.assertEqual(
767 { ('all', 0): NOW,
768 ('all', 1): NOW - 10000,
769 ('all', 2): NOW - 30000,
770 },
771 project_shard_timestamps)
772
773 def testGetNonviewableIIDs_SearchMissSoNoOp(self):
774 """If search cache missed, don't bother looking up nonviewable IIDs."""
775 unfiltered_iids_dict = {} # No cached search results found.
776 rpc_tuples = [] # Nothing should accumulate here in this case.
777 nonviewable_iids = {} # Nothing should accumulate here in this case.
778 processed_invalidations_up_to = 12345
779 frontendsearchpipeline._GetNonviewableIIDs(
780 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
781 nonviewable_iids, {}, processed_invalidations_up_to, True)
782 self.assertEqual([], rpc_tuples)
783 self.assertEqual({}, nonviewable_iids)
784
785 def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
786 """If search cache hit, get nonviewable info from cache."""
787 unfiltered_iids_dict = {
788 1: [10001, 10021],
789 2: ['the search result issue_ids do not matter'],
790 }
791 rpc_tuples = [] # Nothing should accumulate here in this case.
792 nonviewable_iids = {} # Our mock results should end up here.
793 processed_invalidations_up_to = 12345
794 memcache.set('nonviewable:789;111;1',
795 ([10001, 10031], processed_invalidations_up_to - 10))
796 memcache.set('nonviewable:789;111;2',
797 ([10002, 10042], processed_invalidations_up_to - 30))
798
799 project_shard_timestamps = {
800 (789, 1): 0, # not stale
801 (789, 2): 0, # not stale
802 }
803 frontendsearchpipeline._GetNonviewableIIDs(
804 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
805 nonviewable_iids, project_shard_timestamps,
806 processed_invalidations_up_to, True)
807 self.assertEqual([], rpc_tuples)
808 self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
809
810 def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
811 """If search hit and n-v miss, create RPCs to get nonviewable info."""
812 self.mox.StubOutWithMock(
813 frontendsearchpipeline, '_StartBackendNonviewableCall')
814 unfiltered_iids_dict = {
815 2: ['the search result issue_ids do not matter'],
816 }
817 rpc_tuples = [] # One RPC object should accumulate here.
818 nonviewable_iids = {} # This will stay empty until RPCs complete.
819 processed_invalidations_up_to = 12345
820 # Nothing is set in memcache for this case.
821 a_fake_rpc = testing_helpers.Blank(callback=None)
822 frontendsearchpipeline._StartBackendNonviewableCall(
823 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
824 self.mox.ReplayAll()
825
826 frontendsearchpipeline._GetNonviewableIIDs(
827 [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
828 nonviewable_iids, {}, processed_invalidations_up_to, True)
829 self.mox.VerifyAll()
830 _, sid_0, rpc_0 = rpc_tuples[0]
831 self.assertEqual(2, sid_0)
832 self.assertEqual({}, nonviewable_iids)
833 self.assertEqual(a_fake_rpc, rpc_0)
834 self.assertIsNotNone(a_fake_rpc.callback)
835
836 def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
837 processed_invalidations_up_to = 12345
838 cached_dict = {
839 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
840 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
841 }
842 rpc_tuples = [] # Nothing should accumulate here.
843 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
844 project_shard_timestamps = {
845 (789, 1): 0, # not stale
846 (789, 2): 0, # not stale
847 }
848 frontendsearchpipeline._AccumulateNonviewableIIDs(
849 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
850 rpc_tuples, processed_invalidations_up_to)
851 self.assertEqual([], rpc_tuples)
852 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
853
854 def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
855 self.mox.StubOutWithMock(
856 frontendsearchpipeline, '_StartBackendNonviewableCall')
857 processed_invalidations_up_to = 12345
858 cached_dict = {
859 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
860 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
861 }
862 rpc_tuples = [] # Nothing should accumulate here.
863 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
864 project_shard_timestamps = {
865 (789, 1): 0, # not stale
866 (789, 2): processed_invalidations_up_to, # stale!
867 }
868 a_fake_rpc = testing_helpers.Blank(callback=None)
869 frontendsearchpipeline._StartBackendNonviewableCall(
870 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
871 self.mox.ReplayAll()
872
873 frontendsearchpipeline._AccumulateNonviewableIIDs(
874 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
875 rpc_tuples, processed_invalidations_up_to)
876 self.mox.VerifyAll()
877 _, sid_0, rpc_0 = rpc_tuples[0]
878 self.assertEqual(2, sid_0)
879 self.assertEqual(a_fake_rpc, rpc_0)
880 self.assertIsNotNone(a_fake_rpc.callback)
881 self.assertEqual({1: {10001}}, nonviewable_iids)
882
883 def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
884 processed_invalidations_up_to = 12345
885 cached_dict = {
886 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
887 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
888 }
889 rpc_tuples = [] # Nothing should accumulate here.
890 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
891 project_shard_timestamps = {
892 (None, 1): 0, # not stale
893 (None, 2): 0, # not stale
894 }
895 frontendsearchpipeline._AccumulateNonviewableIIDs(
896 None, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
897 rpc_tuples, processed_invalidations_up_to)
898 self.assertEqual([], rpc_tuples)
899 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
900
901 def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
902 self.mox.StubOutWithMock(
903 frontendsearchpipeline, '_StartBackendNonviewableCall')
904 cached_dict = {} # Nothing here, so it is an at-risk cache miss.
905 rpc_tuples = [] # One RPC should accumulate here.
906 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
907 processed_invalidations_up_to = 12345
908 a_fake_rpc = testing_helpers.Blank(callback=None)
909 frontendsearchpipeline._StartBackendNonviewableCall(
910 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
911 self.mox.ReplayAll()
912
913 frontendsearchpipeline._AccumulateNonviewableIIDs(
914 789, 111, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
915 processed_invalidations_up_to)
916 self.mox.VerifyAll()
917 _, sid_0, rpc_0 = rpc_tuples[0]
918 self.assertEqual(2, sid_0)
919 self.assertEqual(a_fake_rpc, rpc_0)
920 self.assertIsNotNone(a_fake_rpc.callback)
921 self.assertEqual({1: {10001}}, nonviewable_iids)
922
923 def testGetCachedSearchResults(self):
924 # TODO(jrobbins): Write this test.
925 pass
926
927 def testMakeBackendRequestHeaders(self):
928 headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
929 self.assertNotIn('X-AppEngine-FailFast', headers)
930 headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
931 self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
932
933 def testStartBackendSearchCall(self):
934 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
935 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
936 self.mox.StubOutWithMock(modules, 'get_hostname')
937 a_fake_rpc = testing_helpers.Blank(callback=None)
938 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
939 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200940 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000941 urlfetch.make_fetch_call(
942 a_fake_rpc, mox.StrContains(
943 urls.BACKEND_SEARCH + '?groupby=cc&invalidation_timestep=12345&'
944 +'logged_in_user_id=777&me_user_ids=555&'
945 +'num=201&projects=proj&q=priority%3Dhigh&shard_id=2&start=0'),
946 follow_redirects=False,
947 headers=mox.IsA(dict))
948 self.mox.ReplayAll()
949
950 processed_invalidations_up_to = 12345
951 me_user_ids = [555]
952 logged_in_user_id = 777
953 new_url_num = 201
954 frontendsearchpipeline._StartBackendSearchCall(
955 ['proj'], (2, 'priority=high'),
956 processed_invalidations_up_to,
957 me_user_ids,
958 logged_in_user_id,
959 new_url_num,
960 group_by_spec='cc')
961 self.mox.VerifyAll()
962
963 def testStartBackendSearchCall_SortAndGroup(self):
964 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
965 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
966 self.mox.StubOutWithMock(modules, 'get_hostname')
967 a_fake_rpc = testing_helpers.Blank(callback=None)
968 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
969 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +0200970 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +0000971 urlfetch.make_fetch_call(
972 a_fake_rpc,
973 mox.StrContains(
974 urls.BACKEND_SEARCH + '?groupby=bar&' +
975 'invalidation_timestep=12345&' +
976 'logged_in_user_id=777&me_user_ids=555&num=201&projects=proj&' +
977 'q=priority%3Dhigh&shard_id=2&sort=foo&start=0'),
978 follow_redirects=False,
979 headers=mox.IsA(dict))
980 self.mox.ReplayAll()
981
982 processed_invalidations_up_to = 12345
983 me_user_ids = [555]
984 logged_in_user_id = 777
985 new_url_num = 201
986 sort_spec = 'foo'
987 group_by_spec = 'bar'
988 frontendsearchpipeline._StartBackendSearchCall(
989 ['proj'], (2, 'priority=high'),
990 processed_invalidations_up_to,
991 me_user_ids,
992 logged_in_user_id,
993 new_url_num,
994 sort_spec=sort_spec,
995 group_by_spec=group_by_spec)
996 self.mox.VerifyAll()
997
998 def testStartBackendNonviewableCall(self):
999 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
1000 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
1001 self.mox.StubOutWithMock(modules, 'get_hostname')
1002 a_fake_rpc = testing_helpers.Blank(callback=None)
1003 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
1004 a_fake_rpc)
Adrià Vilanova Martínez515639b2021-07-06 16:43:59 +02001005 modules.get_hostname(module='default')
Copybara854996b2021-09-07 19:36:02 +00001006 urlfetch.make_fetch_call(
1007 a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
1008 follow_redirects=False, headers=mox.IsA(dict))
1009 self.mox.ReplayAll()
1010
1011 processed_invalidations_up_to = 12345
1012 frontendsearchpipeline._StartBackendNonviewableCall(
1013 789, 111, 2, processed_invalidations_up_to)
1014 self.mox.VerifyAll()
1015
1016 def testHandleBackendSearchResponse_500(self):
1017 response_str = 'There was a problem processing the query.'
1018 rpc = testing_helpers.Blank(
1019 get_result=lambda: testing_helpers.Blank(
1020 content=response_str, status_code=500))
1021 rpc_tuple = (NOW, 2, rpc)
1022 rpc_tuples = [] # Nothing should be added for this case.
1023 filtered_iids = {} # Search results should accumlate here, per-shard.
1024 search_limit_reached = {} # Booleans accumulate here, per-shard.
1025 processed_invalidations_up_to = 12345
1026
1027 me_user_ids = [111]
1028 logged_in_user_id = 0
1029 new_url_num = 100
1030 error_responses = set()
1031
1032 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1033 frontendsearchpipeline._HandleBackendSearchResponse(
1034 ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids, search_limit_reached,
1035 processed_invalidations_up_to, error_responses, me_user_ids,
1036 logged_in_user_id, new_url_num, 1, None, None)
1037 self.assertEqual([], rpc_tuples)
1038 self.assertIn(2, error_responses)
1039
1040 def testHandleBackendSearchResponse_Error(self):
1041 response_str = (
1042 '})]\'\n'
1043 '{'
1044 ' "unfiltered_iids": [],'
1045 ' "search_limit_reached": false,'
1046 ' "error": "Invalid query"'
1047 '}'
1048 )
1049 rpc = testing_helpers.Blank(
1050 get_result=lambda: testing_helpers.Blank(
1051 content=response_str, status_code=200))
1052 rpc_tuple = (NOW, 2, rpc)
1053 rpc_tuples = [] # Nothing should be added for this case.
1054 filtered_iids = {} # Search results should accumlate here, per-shard.
1055 search_limit_reached = {} # Booleans accumulate here, per-shard.
1056 processed_invalidations_up_to = 12345
1057
1058 me_user_ids = [111]
1059 logged_in_user_id = 0
1060 new_url_num = 100
1061 error_responses = set()
1062 frontendsearchpipeline._HandleBackendSearchResponse(
1063 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1064 processed_invalidations_up_to, error_responses, me_user_ids,
1065 logged_in_user_id, new_url_num, 1, None, None)
1066 self.assertEqual([], rpc_tuples)
1067 self.assertEqual({2: []}, filtered_iids)
1068 self.assertEqual({2: False}, search_limit_reached)
1069 self.assertEqual({2}, error_responses)
1070
1071 def testHandleBackendSearchResponse_Normal(self):
1072 response_str = (
1073 '})]\'\n'
1074 '{'
1075 ' "unfiltered_iids": [10002, 10042],'
1076 ' "search_limit_reached": false'
1077 '}'
1078 )
1079 rpc = testing_helpers.Blank(
1080 get_result=lambda: testing_helpers.Blank(
1081 content=response_str, status_code=200))
1082 rpc_tuple = (NOW, 2, rpc)
1083 rpc_tuples = [] # Nothing should be added for this case.
1084 filtered_iids = {} # Search results should accumlate here, per-shard.
1085 search_limit_reached = {} # Booleans accumulate here, per-shard.
1086 processed_invalidations_up_to = 12345
1087
1088 me_user_ids = [111]
1089 logged_in_user_id = 0
1090 new_url_num = 100
1091 error_responses = set()
1092 frontendsearchpipeline._HandleBackendSearchResponse(
1093 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1094 processed_invalidations_up_to, error_responses, me_user_ids,
1095 logged_in_user_id, new_url_num, 1, None, None)
1096 self.assertEqual([], rpc_tuples)
1097 self.assertEqual({2: [10002, 10042]}, filtered_iids)
1098 self.assertEqual({2: False}, search_limit_reached)
1099
1100 def testHandleBackendSearchResponse_TriggersRetry(self):
1101 response_str = None
1102 rpc = testing_helpers.Blank(
1103 get_result=lambda: testing_helpers.Blank(content=response_str))
1104 rpc_tuple = (NOW, 2, rpc)
1105 rpc_tuples = [] # New RPC should be appended here
1106 filtered_iids = {} # No change here until retry completes.
1107 search_limit_reached = {} # No change here until retry completes.
1108 processed_invalidations_up_to = 12345
1109 error_responses = set()
1110
1111 me_user_ids = [111]
1112 logged_in_user_id = 0
1113 new_url_num = 100
1114
1115 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
1116 a_fake_rpc = testing_helpers.Blank(callback=None)
1117 rpc = frontendsearchpipeline._StartBackendSearchCall(
1118 ['proj'],
1119 2,
1120 processed_invalidations_up_to,
1121 me_user_ids,
1122 logged_in_user_id,
1123 new_url_num,
1124 can=1,
1125 group_by_spec=None,
1126 sort_spec=None,
1127 failfast=False).AndReturn(a_fake_rpc)
1128 self.mox.ReplayAll()
1129
1130 frontendsearchpipeline._HandleBackendSearchResponse(
1131 ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
1132 processed_invalidations_up_to, error_responses, me_user_ids,
1133 logged_in_user_id, new_url_num, 1, None, None)
1134 self.mox.VerifyAll()
1135 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1136 self.assertEqual(2, retry_shard_id)
1137 self.assertEqual(a_fake_rpc, retry_rpc)
1138 self.assertIsNotNone(retry_rpc.callback)
1139 self.assertEqual({}, filtered_iids)
1140 self.assertEqual({}, search_limit_reached)
1141
1142 def testHandleBackendNonviewableResponse_Error(self):
1143 response_str = 'There was an error.'
1144 rpc = testing_helpers.Blank(
1145 get_result=lambda: testing_helpers.Blank(
1146 content=response_str,
1147 status_code=500
1148 ))
1149 rpc_tuple = (NOW, 2, rpc)
1150 rpc_tuples = [] # Nothing should be added for this case.
1151 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1152 processed_invalidations_up_to = 12345
1153
1154 self.mox.StubOutWithMock(
1155 frontendsearchpipeline, '_StartBackendNonviewableCall')
1156 frontendsearchpipeline._HandleBackendNonviewableResponse(
1157 789, 111, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
1158 processed_invalidations_up_to)
1159 self.assertEqual([], rpc_tuples)
1160 self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
1161
1162 def testHandleBackendNonviewableResponse_Normal(self):
1163 response_str = (
1164 '})]\'\n'
1165 '{'
1166 ' "nonviewable": [10002, 10042]'
1167 '}'
1168 )
1169 rpc = testing_helpers.Blank(
1170 get_result=lambda: testing_helpers.Blank(
1171 content=response_str,
1172 status_code=200
1173 ))
1174 rpc_tuple = (NOW, 2, rpc)
1175 rpc_tuples = [] # Nothing should be added for this case.
1176 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
1177 processed_invalidations_up_to = 12345
1178
1179 frontendsearchpipeline._HandleBackendNonviewableResponse(
1180 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1181 processed_invalidations_up_to)
1182 self.assertEqual([], rpc_tuples)
1183 self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
1184
1185 def testHandleBackendAtRiskResponse_TriggersRetry(self):
1186 response_str = None
1187 rpc = testing_helpers.Blank(
1188 get_result=lambda: testing_helpers.Blank(content=response_str))
1189 rpc_tuple = (NOW, 2, rpc)
1190 rpc_tuples = [] # New RPC should be appended here
1191 nonviewable_iids = {} # No change here until retry completes.
1192 processed_invalidations_up_to = 12345
1193
1194 self.mox.StubOutWithMock(
1195 frontendsearchpipeline, '_StartBackendNonviewableCall')
1196 a_fake_rpc = testing_helpers.Blank(callback=None)
1197 rpc = frontendsearchpipeline._StartBackendNonviewableCall(
1198 789, 111, 2, processed_invalidations_up_to, failfast=False
1199 ).AndReturn(a_fake_rpc)
1200 self.mox.ReplayAll()
1201
1202 frontendsearchpipeline._HandleBackendNonviewableResponse(
1203 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
1204 processed_invalidations_up_to)
1205 self.mox.VerifyAll()
1206 _, retry_shard_id, retry_rpc = rpc_tuples[0]
1207 self.assertEqual(2, retry_shard_id)
1208 self.assertIsNotNone(retry_rpc.callback)
1209 self.assertEqual(a_fake_rpc, retry_rpc)
1210 self.assertEqual({}, nonviewable_iids)
1211
1212 def testSortIssues(self):
1213 services = service_manager.Services(
1214 cache_manager=fake.CacheManager())
1215 sorting.InitializeArtValues(services)
1216
1217 issue_1 = fake.MakeTestIssue(
1218 789, 1, 'one', 'New', 111, labels=['Priority-High'])
1219 issue_2 = fake.MakeTestIssue(
1220 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
1221 issue_3 = fake.MakeTestIssue(
1222 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
1223 issues = [issue_1, issue_2, issue_3]
1224 config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
1225
1226 sorted_issues = frontendsearchpipeline._SortIssues(
1227 issues, config, {}, '', 'priority')
1228
1229 self.assertEqual(
1230 [issue_1, issue_3, issue_2], # Order is high, medium, low.
1231 sorted_issues)
1232
1233
1234class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
1235
1236 def setUp(self):
1237 self.sharded_iids = {
1238 (0, 'p:v'): [10, 20, 30, 40, 50],
1239 (1, 'p:v'): [21, 41, 61, 81],
1240 (2, 'p:v'): [42, 52, 62, 72, 102],
1241 (3, 'p:v'): [],
1242 }
1243
1244 def testTotalLength_Empty(self):
1245 """If there were no results, the length of the sharded list is zero."""
1246 self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
1247
1248 def testTotalLength_Normal(self):
1249 """The length of the sharded list is the sum of the shard lengths."""
1250 self.assertEqual(
1251 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
1252
1253 def testReverseShards_Empty(self):
1254 """Reversing an empty sharded list is still empty."""
1255 empty_sharded_iids = {}
1256 frontendsearchpipeline._ReverseShards(empty_sharded_iids)
1257 self.assertEqual({}, empty_sharded_iids)
1258
1259 def testReverseShards_Normal(self):
1260 """Reversing a sharded list reverses each shard."""
1261 frontendsearchpipeline._ReverseShards(self.sharded_iids)
1262 self.assertEqual(
1263 {(0, 'p:v'): [50, 40, 30, 20, 10],
1264 (1, 'p:v'): [81, 61, 41, 21],
1265 (2, 'p:v'): [102, 72, 62, 52, 42],
1266 (3, 'p:v'): [],
1267 },
1268 self.sharded_iids)
1269
1270 def testTrimShardedIIDs_Empty(self):
1271 """If the sharded list is empty, trimming it makes no change."""
1272 empty_sharded_iids = {}
1273 frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
1274 self.assertEqual({}, empty_sharded_iids)
1275
1276 frontendsearchpipeline._TrimEndShardedIIDs(
1277 empty_sharded_iids,
1278 [(100, (0, 'p:v')), (88, (8, 'p:v')), (99, (9, 'p:v'))],
1279 12)
1280 self.assertEqual({}, empty_sharded_iids)
1281
1282 def testTrimShardedIIDs_NoSamples(self):
1283 """If there are no samples, we don't trim off any IIDs."""
1284 orig_sharded_iids = {
1285 shard_id: iids[:] for shard_id, iids in self.sharded_iids.items()}
1286 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1287 self.sharded_iids, [], 12)
1288 self.assertEqual(0, num_trimmed)
1289 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1290
1291 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1292 self.sharded_iids, [], 1)
1293 self.assertEqual(0, num_trimmed)
1294 self.assertEqual(orig_sharded_iids, self.sharded_iids)
1295
1296 def testTrimShardedIIDs_Normal(self):
1297 """The first 3 samples contribute all needed IIDs, so trim off the rest."""
1298 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1299 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1300 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
1301 self.sharded_iids, samples, 5)
1302 self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
1303 self.assertEqual(
1304 { # shard_id: iids before lower-bound + iids before 1st excess sample.
1305 (0, 'p:v'): [10, 20] + [30],
1306 (1, 'p:v'): [21] + [41, 61],
1307 (2, 'p:v'): [42, 52] + [62, 72, 102],
1308 (3, 'p:v'): [] + []},
1309 self.sharded_iids)
1310
1311 def testCalcSamplePositions_Empty(self):
1312 sharded_iids = {0: []}
1313 samples = []
1314 self.assertEqual(
1315 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1316
1317 sharded_iids = {0: [10, 20, 30, 40]}
1318 samples = []
1319 self.assertEqual(
1320 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1321
1322 sharded_iids = {0: []}
1323 # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
1324 # But we still have them in the list for the backwards phase, and they
1325 # should just not contribute anything to the result.
1326 samples = [(2, (2, 'p:v')), (4, (4, 'p:v'))]
1327 self.assertEqual(
1328 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
1329
1330 def testCalcSamplePositions_Normal(self):
1331 samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
1332 (40, (0, 'p:v')), (81, (1, 'p:v'))]
1333 self.assertEqual(
1334 [(30, (0, 'p:v'), 2),
1335 (41, (1, 'p:v'), 1),
1336 (62, (2, 'p:v'), 2),
1337 (40, (0, 'p:v'), 3),
1338 (81, (1, 'p:v'), 3)],
1339 frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))