Project import generated by Copybara.
GitOrigin-RevId: d9e9e3fb4e31372ec1fb43b178994ca78fa8fe70
diff --git a/search/test/__init__.py b/search/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/search/test/__init__.py
diff --git a/search/test/ast2ast_test.py b/search/test/ast2ast_test.py
new file mode 100644
index 0000000..9edeaf1
--- /dev/null
+++ b/search/test/ast2ast_test.py
@@ -0,0 +1,785 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2ast module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import unittest
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import ast2ast
+from search import query2ast
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+OWNER_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['owner']
+OWNER_ID_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['owner_id']
+
+
+class AST2ASTTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 101, 789, 'UI', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 102, 789, 'UI>Search', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 201, 789, 'DB', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 301, 789, 'Search', 'doc', False, [], [], 0, 0))
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ features=fake.FeaturesService())
+ self.services.user.TestAddUser('a@example.com', 111)
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=100)
+
+ def testPreprocessAST_EmptyAST(self):
+ ast = ast_pb2.QueryAST() # No conjunctions in it.
+ new_ast = ast2ast.PreprocessAST(
+ self.cnxn, ast, [789], self.services, self.config)
+ self.assertEqual(ast, new_ast)
+
+ def testPreprocessAST_Normal(self):
+ open_field = BUILTIN_ISSUE_FIELDS['open']
+ label_field = BUILTIN_ISSUE_FIELDS['label']
+ label_id_field = BUILTIN_ISSUE_FIELDS['label_id']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+ conds = [
+ ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [open_field], [], []),
+ ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [label_field], ['Hot'], [])]
+ self.services.config.TestAddLabelsDict({'Hot': 0})
+
+ ast = ast_pb2.QueryAST()
+ ast.conjunctions.append(ast_pb2.Conjunction(conds=conds))
+ new_ast = ast2ast.PreprocessAST(
+ self.cnxn, ast, [789], self.services, self.config)
+ self.assertEqual(2, len(new_ast.conjunctions[0].conds))
+ new_cond_1, new_cond_2 = new_ast.conjunctions[0].conds
+ self.assertEqual(ast_pb2.QueryOp.NE, new_cond_1.op)
+ self.assertEqual([status_id_field], new_cond_1.field_defs)
+ self.assertEqual([7, 8, 9], new_cond_1.int_values)
+ self.assertEqual([], new_cond_1.str_values)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond_2.op)
+ self.assertEqual([label_id_field], new_cond_2.field_defs)
+ self.assertEqual([0], new_cond_2.int_values)
+ self.assertEqual([], new_cond_2.str_values)
+
+ def testPreprocessIsOpenCond(self):
+ open_field = BUILTIN_ISSUE_FIELDS['open']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+
+ # is:open -> status_id!=closed_status_ids
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [open_field], [], [])
+ new_cond = ast2ast._PreprocessIsOpenCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.NE, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([7, 8, 9], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ # -is:open -> status_id=closed_status_ids
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NE, [open_field], [], [])
+ new_cond = ast2ast._PreprocessIsOpenCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([7, 8, 9], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithSingleProjectID(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ blockedon_id_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['1'], [101]), # One existing issue.
+ (['Project1:1'], [101]), # One existing issue with project prefix.
+ (['1', '2'], [101, 102]), # Two existing issues.
+ (['3'], [])): # Non-existant issue.
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blockedon_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithMultipleProjectIDs(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ blockedon_id_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['Project1:1'], [101]),
+ (['Project1:1', 'Project2:2'], [101, 102])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1, 2], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blockedon_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithMultipleProjectIDs_NoPrefix(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids in (['1'], ['1', '2'], ['3']):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ with self.assertRaises(ValueError) as cm:
+ ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1, 2], self.services, None, True)
+ self.assertEqual(
+ 'Searching for issues accross multiple/all projects without '
+ 'project prefixes is ambiguous and is currently not supported.',
+ cm.exception.message)
+
+ def testPreprocessBlockedOnCond_WithExternalIssues(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ blockedon_id_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected_issues, expected_ext_issues in (
+ (['b/1234'], [], ['b/1234']),
+ (['Project1:1', 'b/1234'], [101], ['b/1234']),
+ (['1', 'b/1234', 'b/1551', 'Project1:2'],
+ [101, 102], ['b/1234', 'b/1551'])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blockedon_id_field], new_cond.field_defs)
+ self.assertEqual(expected_issues, new_cond.int_values)
+ self.assertEqual(expected_ext_issues, new_cond.str_values)
+
+ def testPreprocessIsBlockedCond(self):
+ blocked_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ for input_op, expected_op in (
+ (ast_pb2.QueryOp.EQ, ast_pb2.QueryOp.IS_DEFINED),
+ (ast_pb2.QueryOp.NE, ast_pb2.QueryOp.IS_NOT_DEFINED)):
+ cond = ast_pb2.MakeCond(
+ input_op, [blocked_field], [], [])
+ new_cond = ast2ast._PreprocessIsBlockedCond(
+ self.cnxn, cond, [100], self.services, None, True)
+ self.assertEqual(expected_op, new_cond.op)
+ self.assertEqual([blocked_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessHasBlockedOnCond(self):
+ blocked_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ for op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ cond = ast_pb2.MakeCond(op, [blocked_field], [], [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [100], self.services, None, True)
+ self.assertEqual(op, op)
+ self.assertEqual([blocked_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessHasBlockingCond(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ for op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ cond = ast_pb2.MakeCond(op, [blocking_field], [], [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [100], self.services, None, True)
+ self.assertEqual(op, op)
+ self.assertEqual([blocking_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithSingleProjectID(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['1'], [101]), # One existing issue.
+ (['Project1:1'], [101]), # One existing issue with project prefix.
+ (['1', '2'], [101, 102]), # Two existing issues.
+ (['3'], [])): # Non-existant issue.
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithMultipleProjectIDs(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['Project1:1'], [101]),
+ (['Project1:1', 'Project2:2'], [101, 102])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1, 2], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithMultipleProjectIDs_NoPrefix(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids in (['1'], ['1', '2'], ['3']):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ with self.assertRaises(ValueError) as cm:
+ ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1, 2], self.services, None, True)
+ self.assertEqual(
+ 'Searching for issues accross multiple/all projects without '
+ 'project prefixes is ambiguous and is currently not supported.',
+ cm.exception.message)
+
+ def testPreprocessBlockingCond_WithExternalIssues(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected_issues, expected_ext_issues in (
+ (['b/1234'], [], ['b/1234']),
+ (['Project1:1', 'b/1234'], [101], ['b/1234']),
+ (['1', 'b/1234', 'b/1551', 'Project1:2'],
+ [101, 102], ['b/1234', 'b/1551'])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected_issues, new_cond.int_values)
+ self.assertEqual(expected_ext_issues, new_cond.str_values)
+
+ def testPreprocessMergedIntoCond_WithSingleProjectID(self):
+ field = BUILTIN_ISSUE_FIELDS['mergedinto']
+ id_field = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['1'], [101]), # One existing issue.
+ (['Project1:1'], [101]), # One existing issue with project prefix.
+ (['1', '2'], [101, 102]), # Two existing issues.
+ (['3'], [])): # Non-existant issue.
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [field], local_ids, [])
+ new_cond = ast2ast._PreprocessMergedIntoCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessMergedIntoCond_WithExternalIssues(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['mergedinto']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected_issues, expected_ext_issues in (
+ (['b/1234'], [], ['b/1234']),
+ (['Project1:1', 'b/1234'], [101], ['b/1234']),
+ (['1', 'b/1234', 'b/1551', 'Project1:2'],
+ [101, 102], ['b/1234', 'b/1551'])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessMergedIntoCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected_issues, new_cond.int_values)
+ self.assertEqual(expected_ext_issues, new_cond.str_values)
+
+ def testPreprocessIsSpamCond(self):
+ spam_field = BUILTIN_ISSUE_FIELDS['spam']
+ is_spam_field = BUILTIN_ISSUE_FIELDS['is_spam']
+ for input_op, int_values in (
+ (ast_pb2.QueryOp.EQ, [1]), (ast_pb2.QueryOp.NE, [0])):
+ cond = ast_pb2.MakeCond(
+ input_op, [spam_field], [], [])
+ new_cond = ast2ast._PreprocessIsSpamCond(
+ self.cnxn, cond, [789], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([is_spam_field], new_cond.field_defs)
+ self.assertEqual(int_values, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessStatusCond(self):
+ status_field = BUILTIN_ISSUE_FIELDS['status']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [status_field], [], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [status_field], ['New', 'Assigned'], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([0, 1], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [status_field], [], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual([], new_cond.int_values)
+
+ def testPrefixRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Priority', 'Severity'], [])
+ regex = ast2ast._MakePrefixRegex(cond)
+ self.assertRegexpMatches('Priority-1', regex)
+ self.assertRegexpMatches('Severity-3', regex)
+ self.assertNotRegexpMatches('My-Priority', regex)
+
+ def testKeyValueRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Feature', 'Type-Security'], [])
+ regex = ast2ast._MakeKeyValueRegex(cond)
+ self.assertRegexpMatches('Type-Feature', regex)
+ self.assertRegexpMatches('Type-Bug-Security', regex)
+ self.assertNotRegexpMatches('Type-Bug', regex)
+ self.assertNotRegexpMatches('Security-Feature', regex)
+
+ def testKeyValueRegex_multipleKeys(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Bug', 'Security-Bug'], [])
+ with self.assertRaises(ValueError):
+ ast2ast._MakeKeyValueRegex(cond)
+
+ def testWordBoundryRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Bug'], [])
+ regex = ast2ast._MakeKeyValueRegex(cond)
+ self.assertRegexpMatches('Type-Bug-Security', regex)
+ self.assertNotRegexpMatches('Type-BugSecurity', regex)
+
+ def testPreprocessLabelCond(self):
+ label_field = BUILTIN_ISSUE_FIELDS['label']
+ label_id_field = BUILTIN_ISSUE_FIELDS['label_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [label_field], ['Priority'], [])
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([1, 2, 3], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ self.services.config.TestAddLabelsDict(
+ {
+ 'Priority-Low': 0,
+ 'Priority-High': 1
+ })
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [label_field],
+ ['Priority-Low', 'Priority-High'], [])
+ self.services.config.TestAddLabelsDict(
+ {
+ 'Priority-Low': 0,
+ 'Priority-High': 1
+ })
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([0, 1], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [label_field],
+ ['Priority-Low', 'Priority-High'], [])
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([1, 2, 3], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessComponentCond_QuickOR(self):
+ component_field = BUILTIN_ISSUE_FIELDS['component']
+ component_id_field = BUILTIN_ISSUE_FIELDS['component_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [component_field], ['UI', 'DB'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102, 201], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['UI', 'DB'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102, 201], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], [], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual([], new_cond.int_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['unknown@example.com'],
+ [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual([], new_cond.int_values)
+
+ def testPreprocessComponentCond_RootedAndNonRooted(self):
+ component_field = BUILTIN_ISSUE_FIELDS['component']
+ component_id_field = BUILTIN_ISSUE_FIELDS['component_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['UI'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [component_field], ['UI'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessExactUsers_IsDefined(self):
+ """Anyone can search for [has:owner]."""
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [OWNER_FIELD], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ # Non-members do not raise an exception.
+ ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+
+
+ def testPreprocessExactUsers_UserFound(self):
+ """Anyone can search for a know user, [owner:user@example.com]."""
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([111], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ # Non-members do not raise an exception.
+ ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+
+ def testPreprocessExactUsers_UserSpecifiedByID(self):
+ """Anyone may search for users by ID, [owner:1234]."""
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['123'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([123], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ # Non-members do not raise an exception.
+ ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+
+ def testPreprocessExactUsers_NonEquality(self):
+ """Project members may search for [owner_id>111]."""
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.GE, [OWNER_ID_FIELD], ['111'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(cond, new_cond)
+
+ with self.assertRaises(ast2ast.MalformedQuery):
+ ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+
+ def testPreprocessExactUsers_UserNotFound(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['unknown@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(cond, new_cond)
+
+ with self.assertRaises(ast2ast.MalformedQuery):
+ ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+
+ def testPreprocessExactUsers_KeywordMe(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['me'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], True)
+ self.assertEqual(cond, new_cond)
+
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD], False)
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessHotlistCond(self):
+ hotlist_field = BUILTIN_ISSUE_FIELDS['hotlist']
+ hotlist_id_field = BUILTIN_ISSUE_FIELDS['hotlist_id']
+
+ self.services.user.TestAddUser('gatsby@example.org', 111)
+ self.services.user.TestAddUser('daisy@example.com', 222)
+ self.services.user.TestAddUser('nick@example.org', 333)
+
+ # Setup hotlists
+ self.services.features.TestAddHotlist(
+ 'Hotlist1', owner_ids=[111], hotlist_id=10)
+ self.services.features.TestAddHotlist(
+ 'Hotlist2', owner_ids=[111], hotlist_id=20)
+ self.services.features.TestAddHotlist(
+ 'Hotlist3', owner_ids=[222], hotlist_id=30)
+ self.services.features.TestAddHotlist(
+ 'Hotlist4', owner_ids=[222], hotlist_id=40)
+ self.services.features.TestAddHotlist(
+ 'Hotlist5', owner_ids=[333], hotlist_id=50)
+ self.services.features.TestAddHotlist(
+ 'Hotlist6', owner_ids=[333], hotlist_id=60)
+
+ hotlist_query_vals = [
+ 'gatsby@example.org:Hotlist1',
+ 'nick@example.org:',
+ 'daisy@example.com:Hotlist3', 'Hotlist4']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [hotlist_field], hotlist_query_vals, [])
+ actual = ast2ast._PreprocessHotlistCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, actual.op)
+ self.assertEqual([hotlist_id_field], actual.field_defs)
+ self.assertItemsEqual([10, 30, 40, 50, 60], actual.int_values)
+
+ def testPreprocessHotlistCond_UserNotFound(self):
+ hotlist_field = BUILTIN_ISSUE_FIELDS['hotlist']
+ hotlist_query_vals = ['gatsby@chromium.org:Hotlist1', 'Hotlist3']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [hotlist_field], hotlist_query_vals, [])
+ actual = ast2ast._PreprocessHotlistCond(
+ self.cnxn, cond, [1], self.services, None, True)
+ self.assertEqual(cond, actual)
+
+ def testPreprocessCustomCond_User(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='TPM',
+ field_type=tracker_pb2.FieldTypes.USER_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual(cond.field_defs, new_cond.field_defs)
+ self.assertEqual([111], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['111'], [])
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual(cond.field_defs, new_cond.field_defs)
+ self.assertEqual([111], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['unknown@example.com'], [])
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessCustomCond_NonUser(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='TPM',
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['foo'], [123])
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(cond, new_cond)
+
+ fd.field_type = tracker_pb2.FieldTypes.STR_TYPE
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessCustomCond_ApprovalUser(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['a@example.com'], [],
+ key_suffix=query2ast.APPROVER_SUFFIX)
+ new_cond = ast2ast._PreprocessCustomCond(
+ self.cnxn, cond, self.services, True)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual(cond.field_defs, new_cond.field_defs)
+ self.assertEqual([111], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+ self.assertEqual(query2ast.APPROVER_SUFFIX, new_cond.key_suffix)
+
+ def testPreprocessCond_NoChange(self):
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.TEXT_HAS, [ANY_FIELD], ['foo'], [])
+ self.assertEqual(
+ cond, ast2ast._PreprocessCond(self.cnxn, cond, [], None, None, True))
+
+ def testTextOpToIntOp(self):
+ self.assertEqual(ast_pb2.QueryOp.EQ,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.TEXT_HAS))
+ self.assertEqual(ast_pb2.QueryOp.EQ,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.KEY_HAS))
+ self.assertEqual(ast_pb2.QueryOp.NE,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.NOT_TEXT_HAS))
+
+ for enum_name, _enum_id in ast_pb2.QueryOp.to_dict().items():
+ no_change_op = ast_pb2.QueryOp(enum_name)
+ if no_change_op not in (
+ ast_pb2.QueryOp.TEXT_HAS,
+ ast_pb2.QueryOp.NOT_TEXT_HAS,
+ ast_pb2.QueryOp.KEY_HAS):
+ self.assertEqual(no_change_op,
+ ast2ast._TextOpToIntOp(no_change_op))
diff --git a/search/test/ast2select_test.py b/search/test/ast2select_test.py
new file mode 100644
index 0000000..f20d524
--- /dev/null
+++ b/search/test/ast2select_test.py
@@ -0,0 +1,1731 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2select module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import datetime
+import time
+import unittest
+
+from framework import sql
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import ast2select
+from search import query2ast
+from tracker import tracker_bizobj
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+
+class AST2SelectTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ def testBuildSQLQuery_EmptyAST(self):
+ ast = ast_pb2.QueryAST(conjunctions=[ast_pb2.Conjunction()]) # No conds
+ left_joins, where, unsupported = ast2select.BuildSQLQuery(ast)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([], unsupported)
+
+ def testBuildSQLQuery_Normal(self):
+ owner_field = BUILTIN_ISSUE_FIELDS['owner']
+ reporter_id_field = BUILTIN_ISSUE_FIELDS['reporter_id']
+ conds = [
+ ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [owner_field], ['example.com'], []),
+ ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [reporter_id_field], [], [111])]
+ ast = ast_pb2.QueryAST(conjunctions=[ast_pb2.Conjunction(conds=conds)])
+ left_joins, where, unsupported = ast2select.BuildSQLQuery(ast)
+ self.assertEqual(
+ [('User AS Cond0 ON (Issue.owner_id = Cond0.user_id '
+ 'OR Issue.derived_owner_id = Cond0.user_id)', [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(LOWER(Cond0.email) LIKE %s)', ['%example.com%']),
+ ('Issue.reporter_id = %s', [111])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s AND Cond1.issue_id = %s',
+ ['blockedon', 1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.dst_issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_NegatedSingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s AND Cond1.issue_id = %s',
+ ['blockedon', 1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.dst_issue_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2, 3])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s AND Cond1.issue_id IN (%s,%s,%s)',
+ ['blockedon', 1, 2, 3])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.dst_issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_NegatedMultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1, 2, 3])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s AND Cond1.issue_id IN (%s,%s,%s)',
+ ['blockedon', 1, 2, 3])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.dst_issue_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [fd], ['b/1'], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ txt_cond, 'Cond1', 'Issue1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([txt_cond], unsupported)
+
+ def testBlockingIDCond_ExtIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ ne_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], ['b/1', 'b/2'], [])
+ eq_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [])
+
+ for cond, where_str in [(eq_cond, 'DIR.issue_id IS NOT NULL'),
+ (ne_cond, 'DIR.issue_id IS NULL')]:
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'DIR', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['blocking', 'b/1', 'b/2'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [(where_str, [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockingIDCond_CombinedIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [1, 2])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ ('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s AND Cond1.issue_id IN (%s,%s)',
+ ['blockedon', 1, 2]), left_joins[0])
+ self.assertEqual(
+ ('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['blocking', 'b/1', 'b/2']), left_joins[1])
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidJoin(left_joins[1][0]))
+ self.assertEqual(
+ [('Cond1.dst_issue_id IS NOT NULL', []),
+ ('DIR.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertTrue(sql._IsValidWhereCond(where[1][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id = %s',
+ ['blockedon', 1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_NegatedSingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id = %s',
+ ['blockedon', 1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2, 3])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id IN (%s,%s,%s)',
+ ['blockedon', 1, 2, 3])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_NegatedMultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1, 2, 3])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id IN (%s,%s,%s)',
+ ['blockedon', 1, 2, 3])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [fd], ['b/1'], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ txt_cond, 'Cond1', 'Issue1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([txt_cond], unsupported)
+
+ def testBlockedOnIDCond_ExtIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ eq_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [])
+ ne_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], ['b/1', 'b/2'], [])
+
+ for cond, where_str in [(eq_cond, 'DIR.issue_id IS NOT NULL'),
+ (ne_cond, 'DIR.issue_id IS NULL')]:
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'DIR', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['blockedon', 'b/1', 'b/2'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [(where_str, [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testBlockedOnIDCond_CombinedIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [1, 2])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ ('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id IN (%s,%s)',
+ ['blockedon', 1, 2]), left_joins[0])
+ self.assertEqual(
+ ('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['blockedon', 'b/1', 'b/2']), left_joins[1])
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidJoin(left_joins[1][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', []),
+ ('DIR.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertTrue(sql._IsValidWhereCond(where[1][0]))
+ self.assertEqual([], unsupported)
+
+ def testMergedIntoIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2, 3])
+
+ left_joins, where, unsupported = ast2select._ProcessMergedIntoIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id IN (%s,%s,%s)',
+ ['mergedinto', 1, 2, 3])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testMergedIntoIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2', 'b/3'], [])
+
+ left_joins, where, unsupported = ast2select._ProcessMergedIntoIDCond(
+ txt_cond, 'Cond1', 'Issue1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([txt_cond], unsupported)
+
+ def testMergedIntoIDCond_ExtIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ eq_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [])
+ ne_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], ['b/1', 'b/2'], [])
+
+ for cond, expected in [(eq_cond, ['b/1', 'b/2']),
+ (ne_cond, ['b/1', 'b/2'])]:
+ left_joins, where, unsupported = ast2select._ProcessMergedIntoIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['mergedinto'] + expected)],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testMergedIntoIDCond_CombinedIssues(self):
+ fd = BUILTIN_ISSUE_FIELDS['mergedinto_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['b/1', 'b/2'], [1, 2])
+
+ left_joins, where, unsupported = ast2select._ProcessMergedIntoIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s AND Cond1.dst_issue_id IN (%s,%s)',
+ ['mergedinto', 1, 2]),
+ ('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s AND DIR.ext_issue_identifier IN (%s,%s)',
+ ['mergedinto', 'b/1', 'b/2'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', []),
+ ('DIR.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testHasBlockedCond(self):
+ for op, expected in ((ast_pb2.QueryOp.IS_DEFINED, 'IS NOT NULL'),
+ (ast_pb2.QueryOp.IS_NOT_DEFINED, 'IS NULL')):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', None, snapshot_mode=False)
+ self.assertEqual(
+ ('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s', ['blockedon']),
+ left_joins[0])
+ self.assertEqual(
+ ('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s', ['blockedon']),
+ left_joins[1])
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidJoin(left_joins[1][0]))
+ self.assertEqual([('(Cond1.issue_id %s OR DIR.issue_id %s)'
+ % (expected, expected), [])], where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testHasBlockedCond_SnapshotMode(self):
+ op = ast_pb2.QueryOp.IS_DEFINED
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testHasBlockingCond(self):
+ for op, expected in ((ast_pb2.QueryOp.IS_DEFINED, 'IS NOT NULL'),
+ (ast_pb2.QueryOp.IS_NOT_DEFINED, 'IS NULL')):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(cond,
+ 'Cond1', None, snapshot_mode=False)
+ self.assertEqual(
+ ('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s', ['blockedon']),
+ left_joins[0])
+ self.assertEqual(
+ ('DanglingIssueRelation AS DIR ON Issue.id = DIR.issue_id AND '
+ 'DIR.kind = %s', ['blocking']),
+ left_joins[1])
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidJoin(left_joins[1][0]))
+ self.assertEqual([('(Cond1.dst_issue_id %s OR DIR.issue_id %s)'
+ % (expected, expected), [])], where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testHasBlockingCond_SnapshotMode(self):
+ op = ast_pb2.QueryOp.IS_DEFINED
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where, unsupported = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessOwnerCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessOwnerCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('User AS Cond1 ON (Issue.owner_id = Cond1.user_id '
+ 'OR Issue.derived_owner_id = Cond1.user_id)', [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(LOWER(Cond1.email) LIKE %s)', ['%example.com%'])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessOwnerCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessOwnerCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('User AS Cond1 ON IssueSnapshot.owner_id = Cond1.user_id', [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(LOWER(Cond1.email) LIKE %s)', ['%example.com%'])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessOwnerIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessOwnerIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.owner_id = %s OR Issue.derived_owner_id = %s)',
+ [111, 111])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessOwnerIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessOwnerIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('IssueSnapshot.owner_id = %s', [111])], where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessOwnerLastVisitCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['ownerlastvisit']
+ NOW = 1234567890
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.LT, [fd], [], [NOW])
+ left_joins, where, unsupported = ast2select._ProcessOwnerLastVisitCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('User AS Cond1 ON (Issue.owner_id = Cond1.user_id OR '
+ 'Issue.derived_owner_id = Cond1.user_id)',
+ [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.last_visit_timestamp < %s',
+ [NOW])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessOwnerLastVisitCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['ownerlastvisit']
+ NOW = 1234567890
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.LT, [fd], [], [NOW])
+ left_joins, where, unsupported = ast2select._ProcessOwnerLastVisitCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessIsOwnerBouncing(self):
+ fd = BUILTIN_ISSUE_FIELDS['ownerbouncing']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessIsOwnerBouncing(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('User AS Cond1 ON (Issue.owner_id = Cond1.user_id OR '
+ 'Issue.derived_owner_id = Cond1.user_id)',
+ [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(Cond1.email_bounce_timestamp IS NOT NULL AND'
+ ' Cond1.email_bounce_timestamp != %s)',
+ [0])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessIsOwnerBouncing_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['ownerbouncing']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessIsOwnerBouncing(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessReporterCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessReporterCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('User AS Cond1 ON Issue.reporter_id = Cond1.user_id', [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(LOWER(Cond1.email) LIKE %s)', ['%example.com%'])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessReporterCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessReporterCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('User AS Cond1 ON IssueSnapshot.reporter_id = Cond1.user_id', [])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(LOWER(Cond1.email) LIKE %s)', ['%example.com%'])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessReporterIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessReporterIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('Issue.reporter_id = %s', [111])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessReporterIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessReporterIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('IssueSnapshot.reporter_id = %s', [111])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_SinglePositive(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Issue2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND (LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id AND Issue.shard = Cond1.issue_shard',
+ ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_SinglePositive_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('(IssueSnapshot2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND (LOWER(Spare1.email) LIKE %s)) '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id',
+ ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_MultiplePositive(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['.com', '.org'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Issue2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND '
+ '(LOWER(Spare1.email) LIKE %s OR LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id AND Issue.shard = Cond1.issue_shard',
+ ['%.com%', '%.org%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_MultiplePositive_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['.com', '.org'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('(IssueSnapshot2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND '
+ '(LOWER(Spare1.email) LIKE %s OR LOWER(Spare1.email) LIKE %s)) '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id',
+ ['%.com%', '%.org%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_SingleNegative(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Issue2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND (LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id AND Issue.shard = Cond1.issue_shard',
+ ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_SingleNegative_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('(IssueSnapshot2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND (LOWER(Spare1.email) LIKE %s)) '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id',
+ ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_Multiplenegative(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [fd], ['.com', '.org'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Issue2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND '
+ '(LOWER(Spare1.email) LIKE %s OR LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id AND Issue.shard = Cond1.issue_shard',
+ ['%.com%', '%.org%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcCond_Multiplenegative_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [fd], ['.com', '.org'], [])
+ left_joins, where, unsupported = ast2select._ProcessCcCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('(IssueSnapshot2Cc AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.cc_id = Spare1.user_id AND '
+ '(LOWER(Spare1.email) LIKE %s OR LOWER(Spare1.email) LIKE %s)) '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id',
+ ['%.com%', '%.org%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessCcIDCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Cc AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.cc_id = %s',
+ [111])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.cc_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCcIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessCcIDCond(cond, 'Cond1',
+ 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Cc AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id '
+ 'AND Cond1.cc_id = %s',
+ [111])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.cc_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessStarredByCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessStarredByCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(IssueStar AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.user_id = Spare1.user_id AND '
+ '(LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id', ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessStarredByCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessStarredByCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessStarredByIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessStarredByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueStar AS Cond1 ON Issue.id = Cond1.issue_id '
+ 'AND Cond1.user_id = %s', [111])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.user_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessStarredByIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessStarredByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessCommentByCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCommentByCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Comment AS Cond1 JOIN User AS Spare1 '
+ 'ON Cond1.commenter_id = Spare1.user_id '
+ 'AND (LOWER(Spare1.email) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id AND Cond1.deleted_by IS NULL',
+ ['%example.com%'])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Spare1.email IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCommentByCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where, unsupported = ast2select._ProcessCommentByCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessCommentByIDCond_EqualsUserID(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessCommentByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Comment AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.commenter_id = %s AND Cond1.deleted_by IS NULL',
+ [111])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.commenter_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCommentByIDCond_EqualsUserID_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessCommentByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessCommentByIDCond_QuickOr(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111, 222])
+ left_joins, where, unsupported = ast2select._ProcessCommentByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Comment AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.commenter_id IN (%s,%s) '
+ 'AND Cond1.deleted_by IS NULL',
+ [111, 222])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.commenter_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCommentByIDCond_NotEqualsUserID(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [111])
+ left_joins, where, unsupported = ast2select._ProcessCommentByIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Comment AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.commenter_id = %s AND Cond1.deleted_by IS NULL',
+ [111])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.commenter_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessStatusIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['status_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [2])
+ left_joins, where, unsupported = ast2select._ProcessStatusIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.status_id = %s OR Issue.derived_status_id = %s)', [2, 2])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessStatusIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['status_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [2])
+ left_joins, where, unsupported = ast2select._ProcessStatusIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('IssueSnapshot.status_id = %s', [2])], where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessSummaryCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['summary']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['sum'], [])
+ left_joins, where, unsupported = ast2select._ProcessSummaryCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssueSummary AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.summary = %s', ['sum'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.issue_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessSummaryCond_SnapshotMode(self):
+ """Issue summary is not currently included in issue snapshot, so ignore."""
+ fd = BUILTIN_ISSUE_FIELDS['summary']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['sum'], [])
+ left_joins, where, unsupported = ast2select._ProcessSummaryCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessLabelIDCond_NoValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [])
+ with self.assertRaises(ast2select.NoPossibleResults):
+ ast2select._ProcessLabelIDCond(cond, 'Cond1', 'Spare1',
+ snapshot_mode=False)
+
+ def testProcessLabelIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Label AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.label_id = %s', [1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_SingleValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Label AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.label_id = %s', [1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_MultipleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Label AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.label_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_NegatedNoValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_NegatedSingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Label AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.label_id = %s', [1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_NegatedSingleValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Label AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.label_id = %s', [1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessLabelIDCond_NegatedMultipleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessLabelIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Label AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.label_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.label_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessComponentIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['component_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [101])
+ left_joins, where, unsupported = ast2select._ProcessComponentIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2Component AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.component_id = %s', [101])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.component_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessComponentIDCond_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['component_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [101])
+ left_joins, where, unsupported = ast2select._ProcessComponentIDCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Component AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.component_id = %s', [101])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.component_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessApprovalFieldCond_Status(self):
+ approval_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [approval_fd], ['Approved'], [],
+ key_suffix=query2ast.STATUS_SUFFIX)
+ left_joins, where, _unsupported = ast2select._ProcessApprovalFieldCond(
+ cond, 'Cond1', 'Spare1', False)
+ self.assertEqual(
+ [('Issue2ApprovalValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.approval_id = %s AND LOWER(Cond1.status) = %s',
+ [1, 'approved'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.approval_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+ def testProcessApprovalFieldCond_SetOn(self):
+ approval_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ int_time = int(time.mktime(datetime.datetime(2016, 10, 5).timetuple()))
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [approval_fd], [], [int_time],
+ key_suffix=query2ast.SET_ON_SUFFIX)
+ left_joins, where, _unsupported = ast2select._ProcessApprovalFieldCond(
+ cond, 'Cond1', 'Spare1', False)
+ self.assertEqual(
+ [('Issue2ApprovalValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.approval_id = %s AND Cond1.set_on = %s',
+ [1, int_time])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.approval_id IS NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+ def testProcessApprovalFieldCond_SetBy(self):
+ approval_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [approval_fd], ['user2@email.com'], [],
+ key_suffix=query2ast.SET_BY_SUFFIX)
+ left_joins, where, _unsupported = ast2select._ProcessApprovalFieldCond(
+ cond, 'Cond1', 'Spare1', False)
+ self.assertEqual(
+ [('User AS Spare1 ON LOWER(Spare1.email) = %s', ['user2@email.com']),
+ ('Issue2ApprovalValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.approval_id = %s AND Cond1.setter_id = Spare1.user_id',
+ [1])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.approval_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+ def testProcessApprovalFieldCond_ApproverID(self):
+ approval_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [approval_fd], [], [111],
+ key_suffix=query2ast.APPROVER_SUFFIX)
+ left_joins, where, _unsupported = ast2select._ProcessApprovalFieldCond(
+ cond, 'Cond1', 'Spare1', False)
+ self.assertEqual(
+ [('IssueApproval2Approver AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.approval_id = %s AND Cond1.approver_id = %s',
+ [1, 111])], left_joins)
+ self.assertEqual(
+ [('Cond1.approval_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+
+ def testProcessApprovalFieldCond_IsDefined(self):
+ approval_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='UXReview',
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [approval_fd], [], [])
+ left_joins, where, _unsupported = ast2select._ProcessApprovalFieldCond(
+ cond, 'Cond1', 'Spare1', False)
+ self.assertEqual(
+ [('Issue2ApprovalValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.approval_id = %s',
+ [1])], left_joins)
+ self.assertEqual(
+ [('Cond1.approval_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+ def testProcessCustomFieldCond_IntType(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='EstDays',
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ val = 42
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [val])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1', 'Phase', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND '
+ 'Cond1.int_value = %s', [1, val])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.field_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCustomFieldCond_StrType(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='Nickname',
+ field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ val = 'Fuzzy'
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [val], [])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1','Phase1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND '
+ 'Cond1.str_value = %s', [1, val])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.field_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCustomFieldCond_StrType_SnapshotMode(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='Nickname',
+ field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ val = 'Fuzzy'
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [val], [])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1', 'Phase1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessCustomFieldCond_UserType_ByID(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='ExecutiveProducer',
+ field_type=tracker_pb2.FieldTypes.USER_TYPE)
+ val = 111
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [val])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1', 'Phase1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND '
+ 'Cond1.user_id = %s', [1, val])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.field_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCustomFieldCond_UserType_ByEmail(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='ExecutiveProducer',
+ field_type=tracker_pb2.FieldTypes.USER_TYPE)
+ val = 'exec@example.com'
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [val], [])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1', 'Phase1', snapshot_mode=False)
+ self.assertEqual(
+ [('User AS Spare1 ON '
+ 'LOWER(Spare1.email) = %s', [val]),
+ ('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND '
+ 'Cond1.user_id = Spare1.user_id', [1])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertTrue(sql._IsValidJoin(left_joins[1][0]))
+ self.assertEqual(
+ [('Cond1.field_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCustomFieldCond_DateType(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='Deadline',
+ field_type=tracker_pb2.FieldTypes.DATE_TYPE)
+ val = int(time.mktime(datetime.datetime(2016, 10, 5).timetuple()))
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [val])
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'Spare1', 'Phase1', snapshot_mode=False)
+ self.assertEqual(
+ [('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND '
+ 'Cond1.date_value = %s', [1, val])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('Cond1.field_id IS NOT NULL', [])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessCustomFieldCond_PhaseName(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, project_id=789, field_name='Milestone',
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [72],
+ phase_name='Canary')
+ left_joins, where, unsupported = ast2select._ProcessCustomFieldCond(
+ cond, 'Cond1', 'User1', 'Phase1', snapshot_mode=False)
+ self.assertEqual(
+ [('IssuePhaseDef AS Phase1 ON LOWER(Phase1.name) = %s', ['Canary']),
+ ('Issue2FieldValue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.field_id = %s AND Cond1.int_value = %s AND '
+ 'Cond1.phase_id = Phase1.id', [1, 72])],
+ left_joins)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessAttachmentCond_HasAttachment(self):
+ fd = BUILTIN_ISSUE_FIELDS['attachment']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.IS_DEFINED, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.attachment_count IS NOT NULL AND '
+ 'Issue.attachment_count != %s)',
+ [0])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.IS_NOT_DEFINED, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.attachment_count IS NULL OR '
+ 'Issue.attachment_count = %s)',
+ [0])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessAttachmentCond_HasAttachment_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['attachment']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.IS_DEFINED, [fd], [], [])
+ left_joins, where, unsupported = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+ self.assertEqual([cond], unsupported)
+
+ def testProcessAttachmentCond_TextHas(self):
+ fd = BUILTIN_ISSUE_FIELDS['attachment']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.TEXT_HAS, [fd], ['jpg'], [])
+ left_joins, where, unsupported = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Attachment AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.deleted = %s',
+ [False])],
+ left_joins)
+ self.assertTrue(sql._IsValidJoin(left_joins[0][0]))
+ self.assertEqual(
+ [('(Cond1.filename LIKE %s)', ['%jpg%'])],
+ where)
+ self.assertTrue(sql._IsValidWhereCond(where[0][0]))
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Hotlist2Issue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.hotlist_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NOT NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_MultiValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Hotlist AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.hotlist_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NOT NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Hotlist2Issue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.hotlist_id = %s', [1])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NOT NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_NegatedMultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Hotlist2Issue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.hotlist_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_NegatedMultiValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1, 2])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Hotlist AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.hotlist_id IN (%s,%s)', [1, 2])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_NegatedSingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('Hotlist2Issue AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.hotlist_id = %s', [1])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistIDCond_NegatedSingleValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], [], [1])
+ left_joins, where, unsupported = ast2select._ProcessHotlistIDCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('IssueSnapshot2Hotlist AS Cond1 '
+ 'ON IssueSnapshot.id = Cond1.issuesnapshot_id AND '
+ 'Cond1.hotlist_id = %s', [1])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.hotlist_id IS NULL', [])],
+ where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['invalid:spa'], [])
+ left_joins, where, unsupported = ast2select._ProcessHotlistCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Hotlist2Issue JOIN Hotlist AS Cond1 ON '
+ 'Hotlist2Issue.hotlist_id = Cond1.id AND (LOWER(Cond1.name) LIKE %s))'
+ ' ON Issue.id = Hotlist2Issue.issue_id', ['%spa%'])],
+ left_joins)
+ self.assertEqual([('Cond1.name IS NOT NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistCond_SingleValue_SnapshotMode(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], ['invalid:spa'], [])
+ left_joins, where, unsupported = ast2select._ProcessHotlistCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=True)
+ self.assertEqual(
+ [('(IssueSnapshot2Hotlist JOIN Hotlist AS Cond1 ON '
+ 'IssueSnapshot2Hotlist.hotlist_id = Cond1.id '
+ 'AND (LOWER(Cond1.name) LIKE %s)) '
+ 'ON IssueSnapshot.id = IssueSnapshot2Hotlist.issuesnapshot_id',
+ ['%spa%'])],
+ left_joins)
+ self.assertEqual([('Cond1.name IS NOT NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistCond_SingleValue2(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd],
+ ['invalid:spa', 'port', 'invalid2:barc'], [])
+ left_joins, where, unsupported = ast2select._ProcessHotlistCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Hotlist2Issue JOIN Hotlist AS Cond1 ON '
+ 'Hotlist2Issue.hotlist_id = Cond1.id AND (LOWER(Cond1.name) LIKE %s OR '
+ 'LOWER(Cond1.name) LIKE %s OR LOWER(Cond1.name) LIKE %s)) ON '
+ 'Issue.id = Hotlist2Issue.issue_id', ['%spa%', '%port%', '%barc%'])],
+ left_joins)
+ self.assertEqual([('Cond1.name IS NOT NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistCond_SingleValue3(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NE, [fd], ['invalid:spa'], [])
+ left_joins, where, unsupported = ast2select._ProcessHotlistCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Hotlist2Issue JOIN Hotlist AS Cond1 ON '
+ 'Hotlist2Issue.hotlist_id = Cond1.id AND (LOWER(Cond1.name) LIKE %s))'
+ ' ON Issue.id = Hotlist2Issue.issue_id', ['%spa%'])],
+ left_joins)
+ self.assertEqual([('Cond1.name IS NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessHotlistCond_SingleValue4(self):
+ fd = BUILTIN_ISSUE_FIELDS['hotlist']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.NOT_TEXT_HAS, [fd],
+ ['invalid:spa', 'port', 'invalid2:barc'], [])
+ left_joins, where, unsupported = ast2select._ProcessHotlistCond(cond,
+ 'Cond1', 'Spare1', snapshot_mode=False)
+ self.assertEqual(
+ [('(Hotlist2Issue JOIN Hotlist AS Cond1 ON '
+ 'Hotlist2Issue.hotlist_id = Cond1.id AND (LOWER(Cond1.name) LIKE %s OR '
+ 'LOWER(Cond1.name) LIKE %s OR LOWER(Cond1.name) LIKE %s)) ON '
+ 'Issue.id = Hotlist2Issue.issue_id', ['%spa%', '%port%', '%barc%'])],
+ left_joins)
+ self.assertEqual([('Cond1.name IS NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessPhaseCond_HasGateEQ(self):
+ fd = BUILTIN_ISSUE_FIELDS['gate']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [fd], ['canary', 'stable'], [])
+ left_joins, where, unsupported = ast2select._ProcessPhaseCond(
+ cond, 'Cond1', 'Phase1', False)
+ self.assertEqual(
+ [('(Issue2ApprovalValue AS Cond1 JOIN IssuePhaseDef AS Phase1 '
+ 'ON Cond1.phase_id = Phase1.id AND '
+ 'LOWER(Phase1.name) IN (%s,%s)) '
+ 'ON Issue.id = Cond1.issue_id', ['canary', 'stable'])],
+ left_joins)
+ self.assertEqual([('Phase1.name IS NOT NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testProcessPhaseCond_NoGateTEXT(self):
+ fd = BUILTIN_ISSUE_FIELDS['gate']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.NOT_TEXT_HAS, [fd], ['canary', 'stable'], [])
+ left_joins, where, unsupported = ast2select._ProcessPhaseCond(
+ cond, 'Cond1', 'Phase1', False)
+ self.assertEqual(
+ [('(Issue2ApprovalValue AS Cond1 JOIN IssuePhaseDef AS Phase1 '
+ 'ON Cond1.phase_id = Phase1.id AND '
+ '(LOWER(Phase1.name) LIKE %s '
+ 'OR LOWER(Phase1.name) LIKE %s)) '
+ 'ON Issue.id = Cond1.issue_id', ['%canary%', '%stable%'])],
+ left_joins)
+ self.assertEqual([('Phase1.name IS NULL', [])], where)
+ self.assertEqual([], unsupported)
+
+ def testCompare_IntTypes(self):
+ val_type = tracker_pb2.FieldTypes.INT_TYPE
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.IS_DEFINED, val_type, 'col', [1, 2])
+ self.assertEqual('(Alias.col IS NOT NULL AND Alias.col != %s)', cond_str)
+ self.assertEqual([0], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', [1])
+ self.assertEqual('Alias.col = %s', cond_str)
+ self.assertEqual([1], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', [1, 2])
+ self.assertEqual('Alias.col IN (%s,%s)', cond_str)
+ self.assertEqual([1, 2], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [])
+ self.assertEqual('TRUE', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [1])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col != %s)', cond_str)
+ self.assertEqual([1], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [1, 2])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT IN (%s,%s))',
+ cond_str)
+ self.assertEqual([1, 2], cond_args)
+
+ def testCompare_STRTypes(self):
+ val_type = tracker_pb2.FieldTypes.STR_TYPE
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.IS_DEFINED, val_type, 'col', ['a', 'b'])
+ self.assertEqual('(Alias.col IS NOT NULL AND Alias.col != %s)', cond_str)
+ self.assertEqual([''], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', ['a'])
+ self.assertEqual('Alias.col = %s', cond_str)
+ self.assertEqual(['a'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', ['a', 'b'])
+ self.assertEqual('Alias.col IN (%s,%s)', cond_str)
+ self.assertEqual(['a', 'b'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [])
+ self.assertEqual('TRUE', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', ['a'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col != %s)', cond_str)
+ self.assertEqual(['a'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', ['a', 'b'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT IN (%s,%s))',
+ cond_str)
+ self.assertEqual(['a', 'b'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.TEXT_HAS, val_type, 'col', ['a'])
+ self.assertEqual('(Alias.col LIKE %s)', cond_str)
+ self.assertEqual(['%a%'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NOT_TEXT_HAS, val_type, 'col', ['a'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT LIKE %s)', cond_str)
+ self.assertEqual(['%a%'], cond_args)
+
+ def testCompareAlreadyJoined(self):
+ cond_str, cond_args = ast2select._CompareAlreadyJoined(
+ 'Alias', ast_pb2.QueryOp.EQ, 'col')
+ self.assertEqual('Alias.col IS NOT NULL', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._CompareAlreadyJoined(
+ 'Alias', ast_pb2.QueryOp.NE, 'col')
+ self.assertEqual('Alias.col IS NULL', cond_str)
+ self.assertEqual([], cond_args)
diff --git a/search/test/ast2sort_test.py b/search/test/ast2sort_test.py
new file mode 100644
index 0000000..9d365e8
--- /dev/null
+++ b/search/test/ast2sort_test.py
@@ -0,0 +1,373 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2sort module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import unittest
+
+from proto import tracker_pb2
+from search import ast2sort
+from search import query2ast
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+
+class AST2SortTest(unittest.TestCase):
+
+ def setUp(self):
+ self.harmonized_labels = [
+ (101, 0, 'Hot'), (102, 1, 'Cold'), (103, None, 'Odd')]
+ self.harmonized_statuses = [
+ (201, 0, 'New'), (202, 1, 'Assigned'), (203, None, 'OnHold')]
+ self.harmonized_fields = []
+ self.fmt = lambda string, **kwords: string
+
+ def testBuildSortClauses_EmptySortDirectives(self):
+ left_joins, order_by = ast2sort.BuildSortClauses(
+ [], self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], order_by)
+
+ def testBuildSortClauses_Normal(self):
+ left_joins, order_by = ast2sort.BuildSortClauses(
+ ['stars', 'status', 'pri', 'reporter', 'id'], self.harmonized_labels,
+ self.harmonized_statuses, self.harmonized_fields)
+ expected_left_joins = [
+ ('User AS Sort3 ON Issue.reporter_id = Sort3.user_id', [])]
+ expected_order_by = [
+ ('Issue.star_count ASC', []),
+ ('FIELD(IF(ISNULL(Issue.status_id), Issue.derived_status_id, '
+ 'Issue.status_id), %s,%s) DESC', [201, 202]),
+ ('FIELD(IF(ISNULL(Issue.status_id), Issue.derived_status_id, '
+ 'Issue.status_id), %s) DESC', [203]),
+ ('ISNULL(Sort3.email) ASC', []),
+ ('Sort3.email ASC', []),
+ ('Issue.local_id ASC', [])]
+ self.assertEqual(expected_left_joins, left_joins)
+ self.assertEqual(expected_order_by, order_by)
+
+ def testProcessProjectSD(self):
+ left_joins, order_by = ast2sort._ProcessProjectSD(self.fmt)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('Issue.project_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessReporterSD(self):
+ left_joins, order_by = ast2sort._ProcessReporterSD(self.fmt)
+ self.assertEqual(
+ [('User AS {alias} ON Issue.reporter_id = {alias}.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.email) {sort_dir}', []),
+ ('{alias}.email {sort_dir}', [])],
+ order_by)
+
+ def testProcessOwnerSD(self):
+ left_joins, order_by = ast2sort._ProcessOwnerSD(self.fmt)
+ self.assertEqual(
+ [('User AS {alias}_exp ON Issue.owner_id = {alias}_exp.user_id', []),
+ ('User AS {alias}_der ON '
+ 'Issue.derived_owner_id = {alias}_der.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('(ISNULL({alias}_exp.email) AND ISNULL({alias}_der.email)) '
+ '{sort_dir}', []),
+ ('CONCAT({alias}_exp.email, {alias}_der.email) {sort_dir}', [])],
+ order_by)
+
+ def testProcessCcSD(self):
+ left_joins, order_by = ast2sort._ProcessCcSD(self.fmt)
+ self.assertEqual(
+ [('Issue2Cc AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN User AS {alias}_user '
+ 'ON {alias}.cc_id = {alias}_user.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}_user.email) {sort_dir}', []),
+ ('{alias}_user.email {sort_dir}', [])],
+ order_by)
+
+ def testProcessComponentSD(self):
+ left_joins, order_by = ast2sort._ProcessComponentSD(self.fmt)
+ self.assertEqual(
+ [('Issue2Component AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN ComponentDef AS {alias}_component '
+ 'ON {alias}.component_id = {alias}_component.id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}_component.path) {sort_dir}', []),
+ ('{alias}_component.path {sort_dir}', [])],
+ order_by)
+
+ def testProcessSummarySD(self):
+ left_joins, order_by = ast2sort._ProcessSummarySD(self.fmt)
+ self.assertEqual(
+ [('IssueSummary AS {alias} ON Issue.id = {alias}.issue_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('{alias}.summary {sort_dir}', [])],
+ order_by)
+
+ def testProcessStatusSD(self):
+ pass # TODO(jrobbins): fill in this test case
+
+ def testProcessBlockedSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockedSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.dst_issue_id) {sort_dir}', [])],
+ order_by)
+
+ def testProcessBlockedOnSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockedOnSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.dst_issue_id) {sort_dir}', []),
+ ('{alias}.dst_issue_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessBlockingSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockingSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.dst_issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.issue_id) {sort_dir}', []),
+ ('{alias}.issue_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessMergedIntoSD(self):
+ left_joins, order_by = ast2sort._ProcessMergedIntoSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s', ['mergedinto'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.dst_issue_id) {sort_dir}', []),
+ ('{alias}.dst_issue_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessCustomAndLabelSD(self):
+ pass # TODO(jrobbins): fill in this test case
+
+ def testProcessCustomAndLabelSD_PhaseField(self):
+ harmonized_labels = []
+ bear_fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='DropBear', project_id=789,
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ bear2_fd = tracker_pb2.FieldDef(
+ field_id=2, field_name='DropBear', project_id=788,
+ field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ koala_fd = tracker_pb2.FieldDef(
+ field_id=3, field_name='koala', project_id=789,
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ bear_app_fd = tracker_pb2.FieldDef(
+ field_id=4, field_name='dropbear', project_id=789,
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ harmonized_fields = [bear_fd, bear2_fd, koala_fd, bear_app_fd]
+ phase_name = 'stable'
+ alias = 'Sort0'
+ sort_dir = 'DESC'
+ sd = 'stable.dropbear'
+ left_joins, order_by = ast2sort._ProcessCustomAndLabelSD(
+ sd, harmonized_labels, harmonized_fields, alias, sort_dir,
+ self.fmt)
+
+ expected_joins = []
+ expected_order = []
+ int_left_joins, int_order_by = ast2sort._CustomFieldSortClauses(
+ [bear_fd, bear2_fd], tracker_pb2.FieldTypes.INT_TYPE, 'int_value',
+ alias, sort_dir, phase_name=phase_name)
+ str_left_joins, str_order_by = ast2sort._CustomFieldSortClauses(
+ [bear_fd, bear2_fd], tracker_pb2.FieldTypes.STR_TYPE, 'str_value',
+ alias, sort_dir, phase_name=phase_name)
+ user_left_joins, user_order_by = ast2sort._CustomFieldSortClauses(
+ [bear_fd, bear2_fd], tracker_pb2.FieldTypes.USER_TYPE, 'user_id',
+ alias, sort_dir, phase_name=phase_name)
+ label_left_joinss, label_order_by = ast2sort._LabelSortClauses(
+ sd, harmonized_labels, self.fmt)
+ expected_joins.extend(
+ int_left_joins + str_left_joins + user_left_joins + label_left_joinss)
+ expected_order.extend(
+ int_order_by + str_order_by + user_order_by + label_order_by)
+ self.assertEqual(left_joins, expected_joins)
+ self.assertEqual(order_by, expected_order)
+
+ def testApprovalFieldSortClauses_Status(self):
+ approval_fd_list = [
+ tracker_pb2.FieldDef(field_id=2, project_id=789,
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE),
+ tracker_pb2.FieldDef(field_id=4, project_id=788,
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ ]
+ left_joins, order_by = ast2sort._ApprovalFieldSortClauses(
+ approval_fd_list, '-status', self.fmt)
+
+ self.assertEqual(
+ [('{tbl_name} AS {alias}_approval '
+ 'ON Issue.id = {alias}_approval.issue_id '
+ 'AND {alias}_approval.approval_id IN ({approval_ids_ph})', [2, 4])],
+ left_joins)
+
+ self.assertEqual(
+ [('FIELD({alias}_approval.status, {approval_status_ph}) {rev_sort_dir}',
+ ast2sort.APPROVAL_STATUS_SORT_ORDER)],
+ order_by)
+
+ def testApprovalFieldSortClauses_Approver(self):
+ approval_fd_list = [
+ tracker_pb2.FieldDef(field_id=2, project_id=789,
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE),
+ tracker_pb2.FieldDef(field_id=4, project_id=788,
+ field_type=tracker_pb2.FieldTypes.APPROVAL_TYPE)
+ ]
+ left_joins, order_by = ast2sort._ApprovalFieldSortClauses(
+ approval_fd_list, '-approver', self.fmt)
+
+ self.assertEqual(
+ [('{tbl_name} AS {alias}_approval '
+ 'ON Issue.id = {alias}_approval.issue_id '
+ 'AND {alias}_approval.approval_id IN ({approval_ids_ph})', [2, 4]),
+ ('User AS {alias}_approval_user '
+ 'ON {alias}_approval.approver_id = {alias}_approval_user.user_id',
+ [])],
+ left_joins)
+
+ self.assertEqual(
+ [('ISNULL({alias}_approval_user.email) {sort_dir}', []),
+ ('{alias}_approval_user.email {sort_dir}', [])],
+ order_by)
+
+ def testLabelSortClauses_NoSuchLabels(self):
+ sd = 'somethingelse'
+ harmonized_labels = [
+ (101, 0, 'Type-Defect'),
+ (102, 1, 'Type-Enhancement'),
+ (103, 2, 'Type-Task'),
+ (104, 0, 'Priority-High'),
+ (199, None, 'Type-Laundry'),
+ ]
+ left_joins, order_by = ast2sort._LabelSortClauses(
+ sd, harmonized_labels, self.fmt)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], order_by)
+
+ def testLabelSortClauses_Normal(self):
+ sd = 'type'
+ harmonized_labels = [
+ (101, 0, 'Type-Defect'),
+ (102, 1, 'Type-Enhancement'),
+ (103, 2, 'Type-Task'),
+ (104, 0, 'Priority-High'),
+ (199, None, 'Type-Laundry'),
+ ]
+ left_joins, order_by = ast2sort._LabelSortClauses(
+ sd, harmonized_labels, self.fmt)
+ self.assertEqual(1, len(left_joins))
+ self.assertEqual(
+ ('Issue2Label AS {alias} ON Issue.id = {alias}.issue_id AND '
+ '{alias}.label_id IN ({all_label_ph})',
+ [101, 102, 103, 199]),
+ left_joins[0])
+ self.assertEqual(2, len(order_by))
+ self.assertEqual(
+ ('FIELD({alias}.label_id, {wk_label_ph}) {rev_sort_dir}',
+ [101, 102, 103]),
+ order_by[0])
+ self.assertEqual(
+ ('FIELD({alias}.label_id, {odd_label_ph}) {rev_sort_dir}',
+ [199]),
+ order_by[1])
+
+ def testCustomFieldSortClauses_Normal(self):
+ fd_list = [
+ tracker_pb2.FieldDef(field_id=1, project_id=789,
+ field_type=tracker_pb2.FieldTypes.INT_TYPE),
+ tracker_pb2.FieldDef(field_id=2, project_id=788,
+ field_type=tracker_pb2.FieldTypes.STR_TYPE),
+ ]
+ left_joins, order_by = ast2sort._CustomFieldSortClauses(
+ fd_list, tracker_pb2.FieldTypes.INT_TYPE, 'int_value', 'Sort0', 'DESC')
+
+ self.assertEqual(
+ left_joins, [
+ ('Issue2FieldValue AS Sort0_int_value '
+ 'ON Issue.id = Sort0_int_value.issue_id '
+ 'AND Sort0_int_value.field_id IN (%s)', [1]),
+ ])
+ self.assertEqual(
+ order_by, [
+ ('ISNULL(Sort0_int_value.int_value) DESC', []),
+ ('Sort0_int_value.int_value DESC', []),
+ ])
+
+ def testCustomFieldSortClauses_PhaseUser(self):
+ fd_list = [
+ tracker_pb2.FieldDef(field_id=1, project_id=789,
+ field_type=tracker_pb2.FieldTypes.INT_TYPE),
+ tracker_pb2.FieldDef(field_id=2, project_id=788,
+ field_type=tracker_pb2.FieldTypes.STR_TYPE),
+ tracker_pb2.FieldDef(field_id=3, project_id=788,
+ field_type=tracker_pb2.FieldTypes.USER_TYPE),
+ ]
+ left_joins, order_by = ast2sort._CustomFieldSortClauses(
+ fd_list, tracker_pb2.FieldTypes.USER_TYPE, 'user_id', 'Sort0', 'DESC',
+ phase_name='Stable')
+
+ self.assertEqual(
+ left_joins, [
+ ('Issue2FieldValue AS Sort0_user_id '
+ 'ON Issue.id = Sort0_user_id.issue_id '
+ 'AND Sort0_user_id.field_id IN (%s)', [3]),
+ ('IssuePhaseDef AS Sort0_user_id_phase '
+ 'ON Sort0_user_id.phase_id = Sort0_user_id_phase.id '
+ 'AND LOWER(Sort0_user_id_phase.name) = LOWER(%s)', ['Stable']),
+ ('User AS Sort0_user_id_user '
+ 'ON Sort0_user_id.user_id = Sort0_user_id_user.user_id', []),
+ ])
+ self.assertEqual(
+ order_by, [
+ ('ISNULL(Sort0_user_id_user.email) DESC', []),
+ ('Sort0_user_id_user.email DESC', []),
+ ])
+
+ def testOneSortDirective_NativeSortable(self):
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, 'opened', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.opened ASC', [])], order_by)
+
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, 'stars', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.star_count ASC', [])], order_by)
+
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, '-stars', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.star_count DESC', [])], order_by)
+
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, 'componentmodified', self.harmonized_labels,
+ self.harmonized_statuses, self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.component_modified ASC', [])], order_by)
diff --git a/search/test/backendnonviewable_test.py b/search/test/backendnonviewable_test.py
new file mode 100644
index 0000000..6c50fb7
--- /dev/null
+++ b/search/test/backendnonviewable_test.py
@@ -0,0 +1,165 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.search.backendnonviewable."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import unittest
+import mox
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+from framework import permissions
+from search import backendnonviewable
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class BackendNonviewableTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ )
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest()
+ self.mr.specified_project_id = 789
+ self.mr.shard_id = 2
+ self.mr.invalidation_timestep = 12345
+
+ self.servlet = backendnonviewable.BackendNonviewable(
+ 'req', 'res', services=self.services)
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testHandleRequest(self):
+ pass # TODO(jrobbins): fill in this test.
+
+ def testGetNonviewableIIDs_OwnerOrAdmin(self):
+ """Check the special case for users who are never restricted."""
+ perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ nonviewable_iids = self.servlet.GetNonviewableIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, {111}, self.project, perms, 2)
+ self.assertEqual([], nonviewable_iids)
+
+ def testGetNonviewableIIDs_RegularUser(self):
+ pass # TODO(jrobbins)
+
+ def testGetNonviewableIIDs_Anon(self):
+ pass # TODO(jrobbins)
+
+ def testGetAtRiskIIDs_NothingEverAtRisk(self):
+ """Handle the case where the site has no restriction labels."""
+ fake_restriction_label_rows = []
+ fake_restriction_label_ids = []
+ fake_at_risk_iids = []
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([], at_risk_iids)
+
+ def testGetAtRiskIIDs_NoIssuesAtRiskRightNow(self):
+ """Handle the case where the project has no restricted issues."""
+ fake_restriction_label_rows = [
+ (123, 789, 1, 'Restrict-View-A', 'doc', False),
+ (234, 789, 2, 'Restrict-View-B', 'doc', False),
+ ]
+ fake_restriction_label_ids = [123, 234]
+ fake_at_risk_iids = []
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([], at_risk_iids)
+
+ def testGetAtRiskIIDs_SomeAtRisk(self):
+ """Handle the case where the project has some restricted issues."""
+ fake_restriction_label_rows = [
+ (123, 789, 1, 'Restrict-View-A', 'doc', False),
+ (234, 789, 2, 'Restrict-View-B', 'doc', False),
+ ]
+ fake_restriction_label_ids = [123, 234]
+ fake_at_risk_iids = [432, 543]
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([432, 543], at_risk_iids)
+
+ def testGetViewableIIDs_Anon(self):
+ """Anon users are never participants in any issues."""
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, set(), 789, 2)
+ self.assertEqual([], ok_iids)
+
+ def testGetViewableIIDs_NoIssues(self):
+ """This visitor does not participate in any issues."""
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByParticipant')
+ self.services.issue.GetIIDsByParticipant(
+ self.mr.cnxn, {111}, [789], 2).AndReturn([])
+ self.mox.ReplayAll()
+
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, {111}, 789, 2)
+ self.mox.VerifyAll()
+ self.assertEqual([], ok_iids)
+
+ def testGetViewableIIDs_SomeIssues(self):
+ """This visitor participates in some issues."""
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByParticipant')
+ self.services.issue.GetIIDsByParticipant(
+ self.mr.cnxn, {111}, [789], 2).AndReturn([543, 654])
+ self.mox.ReplayAll()
+
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, {111}, 789, 2)
+ self.mox.VerifyAll()
+ self.assertEqual([543, 654], ok_iids)
diff --git a/search/test/backendsearch_test.py b/search/test/backendsearch_test.py
new file mode 100644
index 0000000..dd5ed18
--- /dev/null
+++ b/search/test/backendsearch_test.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.search.backendsearch."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import unittest
+import mox
+
+import settings
+from search import backendsearch
+from search import backendsearchpipeline
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class BackendSearchTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ )
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/_backend/besearch?q=Priority:High&shard=2')
+ self.mr.query_project_names = ['proj']
+ self.mr.specified_logged_in_user_id = 111
+ self.mr.specified_me_user_ids = [222]
+ self.mr.shard_id = 2
+ self.servlet = backendsearch.BackendSearch(
+ 'req', 'res', services=self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testHandleRequest_NoResults(self):
+ """Handle the case where the search has no results."""
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=[],
+ search_limit_reached=False,
+ error=None)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [222]
+ ).AndReturn(pipeline)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual([], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+ self.assertEqual(None, json_data['error'])
+
+ def testHandleRequest_ResultsInOnePagainationPage(self):
+ """Prefetch all result issues and return them."""
+ allowed_iids = [1, 2, 3, 4, 5, 6, 7, 8]
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=allowed_iids,
+ search_limit_reached=False,
+ error=None)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [222]
+ ).AndReturn(pipeline)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssues')
+ # All issues are prefetched because they fit on the first pagination page.
+ self.services.issue.GetIssues(self.mr.cnxn, allowed_iids, shard_id=2)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+ self.assertEqual(None, json_data['error'])
+
+ def testHandleRequest_ResultsExceedPagainationPage(self):
+ """Return all result issue IDs, but only prefetch the first page."""
+ self.mr.num = 5
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=[1, 2, 3, 4, 5, 6, 7, 8],
+ search_limit_reached=False,
+ error=None)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [222]
+ ).AndReturn(pipeline)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssues')
+ # First 5 issues are prefetched because num=5
+ self.services.issue.GetIssues(self.mr.cnxn, [1, 2, 3, 4, 5], shard_id=2)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ # All are IDs are returned to the frontend.
+ self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+ self.assertEqual(None, json_data['error'])
+
+ def testHandleRequest_QueryError(self):
+ """Handle the case where the search has no results."""
+ error = ValueError('Malformed query')
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=[],
+ search_limit_reached=False,
+ error=error)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [222]
+ ).AndReturn(pipeline)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual([], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+ self.assertEqual(error.message, json_data['error'])
diff --git a/search/test/backendsearchpipeline_test.py b/search/test/backendsearchpipeline_test.py
new file mode 100644
index 0000000..212f5a6
--- /dev/null
+++ b/search/test/backendsearchpipeline_test.py
@@ -0,0 +1,250 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the backendsearchpipeline module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import mox
+import unittest
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+import settings
+from framework import framework_helpers
+from framework import sorting
+from framework import sql
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import backendsearchpipeline
+from search import ast2ast
+from search import query2ast
+from services import service_manager
+from services import tracker_fulltext
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+class BackendSearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.services.user.TestAddUser('a@example.com', 111)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list?q=Priority:High',
+ project=self.project)
+ self.mr.me_user_id = 999 # This value is not used by backend search
+ self.mr.shard_id = 2
+ self.mr.invalidation_timestep = 12345
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ sorting.InitializeArtValues(self.services)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpPromises(self, exp_query):
+ self.mox.StubOutWithMock(framework_helpers, 'Promise')
+ framework_helpers.Promise(
+ backendsearchpipeline._GetQueryResultIIDs, self.mr.cnxn,
+ self.services, 'is:open', exp_query, [789],
+ mox.IsA(tracker_pb2.ProjectIssueConfig), ['project', 'id'],
+ ('Issue.shard = %s', [2]), 2, self.mr.invalidation_timestep
+ ).AndReturn('fake promise 1')
+
+ def testMakePromises_Anon(self):
+ """A backend pipeline does not personalize the query of anon users."""
+ self.SetUpPromises('Priority:High')
+ self.mox.ReplayAll()
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], None, [])
+ self.mox.VerifyAll()
+
+ def testMakePromises_SignedIn(self):
+ """A backend pipeline immediately personalizes and runs the query."""
+ self.mr.query = 'owner:me'
+ self.SetUpPromises('owner:111')
+ self.mox.ReplayAll()
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [111])
+ self.mox.VerifyAll()
+
+ def testSearchForIIDs(self):
+ self.SetUpPromises('Priority:High')
+ self.mox.ReplayAll()
+ be_pipeline = backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, 100, ['proj'], 111, [111])
+ be_pipeline.result_iids_promise = testing_helpers.Blank(
+ WaitAndGetValue=lambda: ([10002, 10052], False, None))
+ be_pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], be_pipeline.result_iids)
+ self.assertEqual(False, be_pipeline.search_limit_reached)
+
+
+class BackendSearchPipelineMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.services.user.TestAddUser('a@example.com', 111)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list?q=Priority:High',
+ project=self.project)
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testSearchProjectCan_Normal(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn((None, False))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result, capped, err = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertFalse(capped)
+ self.assertEqual(None, err)
+
+ def testSearchProjectCan_DBCapped(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn((None, False))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], True))
+ self.mox.ReplayAll()
+ result, capped, err = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertTrue(capped)
+ self.assertEqual(None, err)
+
+ def testSearchProjectCan_FTSCapped(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn(([10002, 10052], True))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result, capped, err = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertTrue(capped)
+ self.assertEqual(None, err)
+
+ def testGetQueryResultIIDs(self):
+ sd = ['project', 'id']
+ slice_term = ('Issue.shard = %s', [2])
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ query_ast = backendsearchpipeline._FilterSpam(query_ast)
+
+ self.mox.StubOutWithMock(backendsearchpipeline, 'SearchProjectCan')
+ backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config,
+ sort_directives=sd, where=[slice_term],
+ query_desc='getting query issue IDs'
+ ).AndReturn(([10002, 10052], False, None))
+ self.mox.ReplayAll()
+ result, capped, err = backendsearchpipeline._GetQueryResultIIDs(
+ self.cnxn, self.services, 'is:open', 'Priority:High',
+ [789], self.config, sd, slice_term, 2, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertFalse(capped)
+ self.assertEqual(None, err)
+ self.assertEqual(
+ ([10002, 10052], 12345),
+ memcache.get('789;is:open;Priority:High;project id;2'))
+
+ def testGetSpamQueryResultIIDs(self):
+ sd = ['project', 'id']
+ slice_term = ('Issue.shard = %s', [2])
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High is:spam', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+
+ query_ast = backendsearchpipeline._FilterSpam(query_ast)
+
+ self.mox.StubOutWithMock(backendsearchpipeline, 'SearchProjectCan')
+ backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config,
+ sort_directives=sd, where=[slice_term],
+ query_desc='getting query issue IDs'
+ ).AndReturn(([10002, 10052], False, None))
+ self.mox.ReplayAll()
+ result, capped, err = backendsearchpipeline._GetQueryResultIIDs(
+ self.cnxn, self.services, 'is:open', 'Priority:High is:spam',
+ [789], self.config, sd, slice_term, 2, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertFalse(capped)
+ self.assertEqual(None, err)
+ self.assertEqual(
+ ([10002, 10052], 12345),
+ memcache.get('789;is:open;Priority:High is:spam;project id;2'))
diff --git a/search/test/frontendsearchpipeline_test.py b/search/test/frontendsearchpipeline_test.py
new file mode 100644
index 0000000..b2e7fb3
--- /dev/null
+++ b/search/test/frontendsearchpipeline_test.py
@@ -0,0 +1,1339 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the frontendsearchpipeline module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import mox
+import unittest
+
+from google.appengine.api import memcache
+from google.appengine.api import modules
+from google.appengine.ext import testbed
+from google.appengine.api import urlfetch
+
+import settings
+from framework import framework_helpers
+from framework import sorting
+from framework import urls
+from proto import ast_pb2
+from proto import project_pb2
+from proto import tracker_pb2
+from search import frontendsearchpipeline
+from search import searchpipeline
+from search import query2ast
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+# Just an example timestamp. The value does not matter.
+NOW = 2444950132
+
+
+class FrontendSearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.services.user.TestAddUser('a@example.com', 111)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list', project=self.project)
+ self.mr.me_user_id = 111
+
+ self.issue_1 = fake.MakeTestIssue(
+ 789, 1, 'one', 'New', 111, labels=['Priority-High'])
+ self.services.issue.TestAddIssue(self.issue_1)
+ self.issue_2 = fake.MakeTestIssue(
+ 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
+ self.services.issue.TestAddIssue(self.issue_2)
+ self.issue_3 = fake.MakeTestIssue(
+ 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
+ self.services.issue.TestAddIssue(self.issue_3)
+ self.mr.sort_spec = 'Priority'
+
+ self.cnxn = self.mr.cnxn
+ self.project = self.mr.project
+ self.auth = self.mr.auth
+ self.me_user_id = self.mr.me_user_id
+ self.query = self.mr.query
+ self.query_project_names = self.mr.query_project_names
+ self.items_per_page = self.mr.num # defaults to 100
+ self.paginate_start = self.mr.start
+ self.paginate_end = self.paginate_start + self.items_per_page
+ self.can = self.mr.can
+ self.group_by_spec = self.mr.group_by_spec
+ self.sort_spec = self.mr.sort_spec
+ self.warnings = self.mr.warnings
+ self.errors = self.mr.errors
+ self.use_cached_searches = self.mr.use_cached_searches
+ self.profiler = self.mr.profiler
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ sorting.InitializeArtValues(self.services)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
+ unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
+ nonviewable_iids = {1: set()}
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
+ frontendsearchpipeline._StartBackendSearch(
+ self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
+ unfiltered_iids, {}, nonviewable_iids, set(), self.services,
+ self.me_user_id, self.auth.user_id or 0, self.paginate_end,
+ self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
+ self.warnings, self.use_cached_searches).AndReturn([])
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
+ frontendsearchpipeline._FinishBackendSearch([])
+ self.mox.ReplayAll()
+
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ pipeline.unfiltered_iids = unfiltered_iids
+ pipeline.nonviewable_iids = nonviewable_iids
+ pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual(2, pipeline.total_count)
+ self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
+
+ def testSearchForIIDs_CrossProject_AllViewable(self):
+ self.services.project.TestAddProject('other', project_id=790)
+ unfiltered_iids = {(1, 'p:v'): [1001, 1011, 2001]}
+ nonviewable_iids = {1: set()}
+ self.query_project_names = ['other']
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
+ frontendsearchpipeline._StartBackendSearch(
+ self.cnxn, ['other', 'proj'], [789, 790],
+ mox.IsA(tracker_pb2.ProjectIssueConfig), unfiltered_iids, {},
+ nonviewable_iids, set(), self.services,
+ self.me_user_id, self.auth.user_id or 0, self.paginate_end,
+ self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
+ self.warnings, self.use_cached_searches).AndReturn([])
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
+ frontendsearchpipeline._FinishBackendSearch([])
+ self.mox.ReplayAll()
+
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+
+ pipeline.unfiltered_iids = unfiltered_iids
+ pipeline.nonviewable_iids = nonviewable_iids
+ pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual(3, pipeline.total_count)
+ self.assertEqual([1001, 1011, 2001], pipeline.filtered_iids[(1, 'p:v')])
+
+ def testSearchForIIDs_CrossProject_MembersOnlyOmitted(self):
+ self.services.project.TestAddProject(
+ 'other', project_id=790, access=project_pb2.ProjectAccess.MEMBERS_ONLY)
+ unfiltered_iids = {(1, 'p:v'): [1001, 1011]}
+ nonviewable_iids = {1: set()}
+ # project 'other' gets filtered out before the backend call.
+ self.mr.query_project_names = ['other']
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
+ frontendsearchpipeline._StartBackendSearch(
+ self.cnxn, ['proj'], [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
+ unfiltered_iids, {}, nonviewable_iids, set(), self.services,
+ self.me_user_id, self.auth.user_id or 0, self.paginate_end,
+ self.query.split(' OR '), self.can, self.group_by_spec, self.sort_spec,
+ self.warnings, self.use_cached_searches).AndReturn([])
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
+ frontendsearchpipeline._FinishBackendSearch([])
+ self.mox.ReplayAll()
+
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ pipeline.unfiltered_iids = unfiltered_iids
+ pipeline.nonviewable_iids = nonviewable_iids
+ pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual(2, pipeline.total_count)
+ self.assertEqual([1001, 1011], pipeline.filtered_iids[(1, 'p:v')])
+
+ def testMergeAndSortIssues_EmptyResult(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ pipeline.filtered_iids = {0: [], 1: [], 2: []}
+
+ pipeline.MergeAndSortIssues()
+ self.assertEqual([], pipeline.allowed_iids)
+ self.assertEqual([], pipeline.allowed_results)
+ self.assertEqual({}, pipeline.users_by_id)
+
+ def testMergeAndSortIssues_Normal(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: [self.issue_3.issue_id]
+ }
+
+ pipeline.MergeAndSortIssues()
+ self.assertEqual(
+ [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
+ pipeline.allowed_iids)
+ self.assertEqual(
+ [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
+ pipeline.allowed_results)
+ self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
+
+ def testDetermineIssuePosition_Normal(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: [self.issue_3.issue_id]
+ }
+
+ prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ self.assertEqual(self.issue_1.issue_id, prev_iid)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2.issue_id, next_iid)
+
+ def testDetermineIssuePosition_NotInResults(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: []
+ }
+
+ prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ self.assertEqual(None, prev_iid)
+ self.assertEqual(None, index)
+ self.assertEqual(None, next_iid)
+
+ def testDetermineIssuePositionInShard_IssueIsInShard(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ # Let's assume issues 1, 2, and 3 are all in the same shard.
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
+ }
+
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_1, {})
+ self.assertEqual(None, prev_cand)
+ self.assertEqual(0, index)
+ self.assertEqual(self.issue_3, next_cand)
+
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_3, {})
+ self.assertEqual(self.issue_1, prev_cand)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2, next_cand)
+
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_2, {})
+ self.assertEqual(self.issue_3, prev_cand)
+ self.assertEqual(2, index)
+ self.assertEqual(None, next_cand)
+
+ def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ pipeline.filtered_iids = {
+ 0: [self.issue_2.issue_id, self.issue_3.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_1, {})
+ self.assertEqual(None, prev_cand)
+ self.assertEqual(0, index)
+ self.assertEqual(self.issue_3, next_cand)
+
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_2.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_3, {})
+ self.assertEqual(self.issue_1, prev_cand)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2, next_cand)
+
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_3.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_2, {})
+ self.assertEqual(self.issue_3, prev_cand)
+ self.assertEqual(2, index)
+ self.assertEqual(None, next_cand)
+
+ def testFetchAllSamples_Empty(self):
+ filtered_iids = {}
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
+ filtered_iids)
+ self.assertEqual({}, samples_by_shard)
+ self.assertEqual({}, sample_iids_to_shard)
+
+ def testFetchAllSamples_SmallResultsPerShard(self):
+ filtered_iids = {
+ 0: [100, 110, 120],
+ 1: [101, 111, 121],
+ }
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+
+ samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
+ filtered_iids)
+ self.assertEqual(2, len(samples_by_shard))
+ self.assertEqual(0, len(sample_iids_to_shard))
+
+ def testFetchAllSamples_Normal(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ issues = self.MakeIssues(23)
+ filtered_iids = {
+ 0: [issue.issue_id for issue in issues],
+ }
+
+ samples_by_shard, sample_iids_to_shard = pipeline._FetchAllSamples(
+ filtered_iids)
+ self.assertEqual(1, len(samples_by_shard))
+ self.assertEqual(2, len(samples_by_shard[0]))
+ self.assertEqual(2, len(sample_iids_to_shard))
+ for sample_iid in sample_iids_to_shard:
+ shard_key = sample_iids_to_shard[sample_iid]
+ self.assertIn(sample_iid, filtered_iids[shard_key])
+
+ def testChooseSampleIssues_Empty(self):
+ """When the search gave no results, there cannot be any samples."""
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ issue_ids = []
+ on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
+ self.assertEqual({}, on_hand_issues)
+ self.assertEqual([], needed_iids)
+
+ def testChooseSampleIssues_Small(self):
+ """When the search gave few results, don't bother with samples."""
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ issue_ids = [78901, 78902]
+ on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
+ self.assertEqual({}, on_hand_issues)
+ self.assertEqual([], needed_iids)
+
+ def MakeIssues(self, num_issues):
+ issues = []
+ for i in range(num_issues):
+ issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111)
+ issues.append(issue)
+ self.services.issue.TestAddIssue(issue)
+ return issues
+
+ def testChooseSampleIssues_Normal(self):
+ """We will choose at least one sample for every 10 results in a shard."""
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ issues = self.MakeIssues(23)
+ issue_ids = [issue.issue_id for issue in issues]
+ on_hand_issues, needed_iids = pipeline._ChooseSampleIssues(issue_ids)
+ self.assertEqual({}, on_hand_issues)
+ self.assertEqual(2, len(needed_iids))
+ for sample_iid in needed_iids:
+ self.assertIn(sample_iid, issue_ids)
+
+ def testLookupNeededUsers(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+
+ pipeline._LookupNeededUsers([])
+ self.assertEqual([], list(pipeline.users_by_id.keys()))
+
+ pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
+ self.assertEqual([0, 111], list(pipeline.users_by_id.keys()))
+
+ def testPaginate_List(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.cnxn,
+ self.services,
+ self.auth,
+ self.me_user_id,
+ self.query,
+ self.query_project_names,
+ self.items_per_page,
+ self.paginate_start,
+ self.can,
+ self.group_by_spec,
+ self.sort_spec,
+ self.warnings,
+ self.errors,
+ self.use_cached_searches,
+ self.profiler,
+ project=self.project)
+ pipeline.allowed_iids = [
+ self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
+ pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
+ pipeline.total_count = len(pipeline.allowed_results)
+ pipeline.Paginate()
+ self.assertEqual(
+ [self.issue_1, self.issue_2, self.issue_3],
+ pipeline.visible_results)
+ self.assertFalse(pipeline.pagination.limit_reached)
+
+
+class FrontendSearchPipelineMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+
+ self.project_id = 789
+ self.default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(
+ self.project_id)
+ self.services = service_manager.Services(
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=self.project_id)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testMakeBackendCallback(self):
+ called_with = []
+
+ def func(a, b):
+ called_with.append((a, b))
+
+ callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
+ callback()
+ self.assertEqual([(10, 20)], called_with)
+
+ def testParseUserQuery_CheckQuery(self):
+ warnings = []
+ msg = frontendsearchpipeline._CheckQuery(
+ 'cnxn', self.services, 'ok query', self.default_config,
+ [self.project_id], True, warnings=warnings)
+ self.assertIsNone(msg)
+ self.assertEqual([], warnings)
+
+ warnings = []
+ msg = frontendsearchpipeline._CheckQuery(
+ 'cnxn', self.services, 'modified:0-0-0', self.default_config,
+ [self.project_id], True, warnings=warnings)
+ self.assertEqual(
+ 'Could not parse date: 0-0-0',
+ msg)
+
+ warnings = []
+ msg = frontendsearchpipeline._CheckQuery(
+ 'cnxn', self.services, 'blocking:3.14', self.default_config,
+ [self.project_id], True, warnings=warnings)
+ self.assertEqual(
+ 'Could not parse issue reference: 3.14',
+ msg)
+ self.assertEqual([], warnings)
+
+ def testStartBackendSearch(self):
+ # TODO(jrobbins): write this test.
+ pass
+
+ def testFinishBackendSearch(self):
+ # TODO(jrobbins): write this test.
+ pass
+
+ def testGetProjectTimestamps_NoneSet(self):
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [])
+ self.assertEqual({}, project_shard_timestamps)
+
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
+ self.assertEqual({}, project_shard_timestamps)
+
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
+ self.assertEqual({}, project_shard_timestamps)
+
+ def testGetProjectTimestamps_SpecificProjects(self):
+ memcache.set('789;0', NOW)
+ memcache.set('789;1', NOW - 1000)
+ memcache.set('789;2', NOW - 3000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
+ self.assertEqual(
+ { (789, 0): NOW,
+ (789, 1): NOW - 1000,
+ (789, 2): NOW - 3000,
+ },
+ project_shard_timestamps)
+
+ memcache.set('790;0', NOW)
+ memcache.set('790;1', NOW - 10000)
+ memcache.set('790;2', NOW - 30000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789, 790], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
+ self.assertEqual(
+ { (789, 0): NOW,
+ (789, 1): NOW - 1000,
+ (789, 2): NOW - 3000,
+ (790, 0): NOW,
+ (790, 1): NOW - 10000,
+ (790, 2): NOW - 30000,
+ },
+ project_shard_timestamps)
+
+ def testGetProjectTimestamps_SiteWide(self):
+ memcache.set('all;0', NOW)
+ memcache.set('all;1', NOW - 10000)
+ memcache.set('all;2', NOW - 30000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [(0, (0, 'p:v')), (1, (1, 'p:v')), (2, (2, 'p:v'))])
+ self.assertEqual(
+ { ('all', 0): NOW,
+ ('all', 1): NOW - 10000,
+ ('all', 2): NOW - 30000,
+ },
+ project_shard_timestamps)
+
+ def testGetNonviewableIIDs_SearchMissSoNoOp(self):
+ """If search cache missed, don't bother looking up nonviewable IIDs."""
+ unfiltered_iids_dict = {} # No cached search results found.
+ rpc_tuples = [] # Nothing should accumulate here in this case.
+ nonviewable_iids = {} # Nothing should accumulate here in this case.
+ processed_invalidations_up_to = 12345
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
+ nonviewable_iids, {}, processed_invalidations_up_to, True)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({}, nonviewable_iids)
+
+ def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
+ """If search cache hit, get nonviewable info from cache."""
+ unfiltered_iids_dict = {
+ 1: [10001, 10021],
+ 2: ['the search result issue_ids do not matter'],
+ }
+ rpc_tuples = [] # Nothing should accumulate here in this case.
+ nonviewable_iids = {} # Our mock results should end up here.
+ processed_invalidations_up_to = 12345
+ memcache.set('nonviewable:789;111;1',
+ ([10001, 10031], processed_invalidations_up_to - 10))
+ memcache.set('nonviewable:789;111;2',
+ ([10002, 10042], processed_invalidations_up_to - 30))
+
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): 0, # not stale
+ }
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
+ nonviewable_iids, project_shard_timestamps,
+ processed_invalidations_up_to, True)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
+ """If search hit and n-v miss, create RPCs to get nonviewable info."""
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ unfiltered_iids_dict = {
+ 2: ['the search result issue_ids do not matter'],
+ }
+ rpc_tuples = [] # One RPC object should accumulate here.
+ nonviewable_iids = {} # This will stay empty until RPCs complete.
+ processed_invalidations_up_to = 12345
+ # Nothing is set in memcache for this case.
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111, list(unfiltered_iids_dict.keys()), rpc_tuples,
+ nonviewable_iids, {}, processed_invalidations_up_to, True)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual({}, nonviewable_iids)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+
+ def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): 0, # not stale
+ }
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): processed_invalidations_up_to, # stale!
+ }
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+ self.assertEqual({1: {10001}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
+ project_shard_timestamps = {
+ (None, 1): 0, # not stale
+ (None, 2): 0, # not stale
+ }
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ None, 111, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ cached_dict = {} # Nothing here, so it is an at-risk cache miss.
+ rpc_tuples = [] # One RPC should accumulate here.
+ nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
+ processed_invalidations_up_to = 12345
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
+ processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+ self.assertEqual({1: {10001}}, nonviewable_iids)
+
+ def testGetCachedSearchResults(self):
+ # TODO(jrobbins): Write this test.
+ pass
+
+ def testMakeBackendRequestHeaders(self):
+ headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
+ self.assertNotIn('X-AppEngine-FailFast', headers)
+ headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
+ self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
+
+ def testStartBackendSearchCall(self):
+ self.mox.StubOutWithMock(urlfetch, 'create_rpc')
+ self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
+ self.mox.StubOutWithMock(modules, 'get_hostname')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
+ a_fake_rpc)
+ modules.get_hostname(module='besearch')
+ urlfetch.make_fetch_call(
+ a_fake_rpc, mox.StrContains(
+ urls.BACKEND_SEARCH + '?groupby=cc&invalidation_timestep=12345&'
+ +'logged_in_user_id=777&me_user_ids=555&'
+ +'num=201&projects=proj&q=priority%3Dhigh&shard_id=2&start=0'),
+ follow_redirects=False,
+ headers=mox.IsA(dict))
+ self.mox.ReplayAll()
+
+ processed_invalidations_up_to = 12345
+ me_user_ids = [555]
+ logged_in_user_id = 777
+ new_url_num = 201
+ frontendsearchpipeline._StartBackendSearchCall(
+ ['proj'], (2, 'priority=high'),
+ processed_invalidations_up_to,
+ me_user_ids,
+ logged_in_user_id,
+ new_url_num,
+ group_by_spec='cc')
+ self.mox.VerifyAll()
+
+ def testStartBackendSearchCall_SortAndGroup(self):
+ self.mox.StubOutWithMock(urlfetch, 'create_rpc')
+ self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
+ self.mox.StubOutWithMock(modules, 'get_hostname')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
+ a_fake_rpc)
+ modules.get_hostname(module='besearch')
+ urlfetch.make_fetch_call(
+ a_fake_rpc,
+ mox.StrContains(
+ urls.BACKEND_SEARCH + '?groupby=bar&' +
+ 'invalidation_timestep=12345&' +
+ 'logged_in_user_id=777&me_user_ids=555&num=201&projects=proj&' +
+ 'q=priority%3Dhigh&shard_id=2&sort=foo&start=0'),
+ follow_redirects=False,
+ headers=mox.IsA(dict))
+ self.mox.ReplayAll()
+
+ processed_invalidations_up_to = 12345
+ me_user_ids = [555]
+ logged_in_user_id = 777
+ new_url_num = 201
+ sort_spec = 'foo'
+ group_by_spec = 'bar'
+ frontendsearchpipeline._StartBackendSearchCall(
+ ['proj'], (2, 'priority=high'),
+ processed_invalidations_up_to,
+ me_user_ids,
+ logged_in_user_id,
+ new_url_num,
+ sort_spec=sort_spec,
+ group_by_spec=group_by_spec)
+ self.mox.VerifyAll()
+
+ def testStartBackendNonviewableCall(self):
+ self.mox.StubOutWithMock(urlfetch, 'create_rpc')
+ self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
+ self.mox.StubOutWithMock(modules, 'get_hostname')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
+ a_fake_rpc)
+ modules.get_hostname(module='besearch')
+ urlfetch.make_fetch_call(
+ a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
+ follow_redirects=False, headers=mox.IsA(dict))
+ self.mox.ReplayAll()
+
+ processed_invalidations_up_to = 12345
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111, 2, processed_invalidations_up_to)
+ self.mox.VerifyAll()
+
+ def testHandleBackendSearchResponse_500(self):
+ response_str = 'There was a problem processing the query.'
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str, status_code=500))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ filtered_iids = {} # Search results should accumlate here, per-shard.
+ search_limit_reached = {} # Booleans accumulate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ me_user_ids = [111]
+ logged_in_user_id = 0
+ new_url_num = 100
+ error_responses = set()
+
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids, search_limit_reached,
+ processed_invalidations_up_to, error_responses, me_user_ids,
+ logged_in_user_id, new_url_num, 1, None, None)
+ self.assertEqual([], rpc_tuples)
+ self.assertIn(2, error_responses)
+
+ def testHandleBackendSearchResponse_Error(self):
+ response_str = (
+ '})]\'\n'
+ '{'
+ ' "unfiltered_iids": [],'
+ ' "search_limit_reached": false,'
+ ' "error": "Invalid query"'
+ '}'
+ )
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str, status_code=200))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ filtered_iids = {} # Search results should accumlate here, per-shard.
+ search_limit_reached = {} # Booleans accumulate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ me_user_ids = [111]
+ logged_in_user_id = 0
+ new_url_num = 100
+ error_responses = set()
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
+ processed_invalidations_up_to, error_responses, me_user_ids,
+ logged_in_user_id, new_url_num, 1, None, None)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({2: []}, filtered_iids)
+ self.assertEqual({2: False}, search_limit_reached)
+ self.assertEqual({2}, error_responses)
+
+ def testHandleBackendSearchResponse_Normal(self):
+ response_str = (
+ '})]\'\n'
+ '{'
+ ' "unfiltered_iids": [10002, 10042],'
+ ' "search_limit_reached": false'
+ '}'
+ )
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str, status_code=200))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ filtered_iids = {} # Search results should accumlate here, per-shard.
+ search_limit_reached = {} # Booleans accumulate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ me_user_ids = [111]
+ logged_in_user_id = 0
+ new_url_num = 100
+ error_responses = set()
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
+ processed_invalidations_up_to, error_responses, me_user_ids,
+ logged_in_user_id, new_url_num, 1, None, None)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({2: [10002, 10042]}, filtered_iids)
+ self.assertEqual({2: False}, search_limit_reached)
+
+ def testHandleBackendSearchResponse_TriggersRetry(self):
+ response_str = None
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(content=response_str))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # New RPC should be appended here
+ filtered_iids = {} # No change here until retry completes.
+ search_limit_reached = {} # No change here until retry completes.
+ processed_invalidations_up_to = 12345
+ error_responses = set()
+
+ me_user_ids = [111]
+ logged_in_user_id = 0
+ new_url_num = 100
+
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ rpc = frontendsearchpipeline._StartBackendSearchCall(
+ ['proj'],
+ 2,
+ processed_invalidations_up_to,
+ me_user_ids,
+ logged_in_user_id,
+ new_url_num,
+ can=1,
+ group_by_spec=None,
+ sort_spec=None,
+ failfast=False).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids, search_limit_reached,
+ processed_invalidations_up_to, error_responses, me_user_ids,
+ logged_in_user_id, new_url_num, 1, None, None)
+ self.mox.VerifyAll()
+ _, retry_shard_id, retry_rpc = rpc_tuples[0]
+ self.assertEqual(2, retry_shard_id)
+ self.assertEqual(a_fake_rpc, retry_rpc)
+ self.assertIsNotNone(retry_rpc.callback)
+ self.assertEqual({}, filtered_iids)
+ self.assertEqual({}, search_limit_reached)
+
+ def testHandleBackendNonviewableResponse_Error(self):
+ response_str = 'There was an error.'
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str,
+ status_code=500
+ ))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
+
+ def testHandleBackendNonviewableResponse_Normal(self):
+ response_str = (
+ '})]\'\n'
+ '{'
+ ' "nonviewable": [10002, 10042]'
+ '}'
+ )
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str,
+ status_code=200
+ ))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
+
+ def testHandleBackendAtRiskResponse_TriggersRetry(self):
+ response_str = None
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(content=response_str))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # New RPC should be appended here
+ nonviewable_iids = {} # No change here until retry completes.
+ processed_invalidations_up_to = 12345
+
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ rpc = frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111, 2, processed_invalidations_up_to, failfast=False
+ ).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, retry_shard_id, retry_rpc = rpc_tuples[0]
+ self.assertEqual(2, retry_shard_id)
+ self.assertIsNotNone(retry_rpc.callback)
+ self.assertEqual(a_fake_rpc, retry_rpc)
+ self.assertEqual({}, nonviewable_iids)
+
+ def testSortIssues(self):
+ services = service_manager.Services(
+ cache_manager=fake.CacheManager())
+ sorting.InitializeArtValues(services)
+
+ issue_1 = fake.MakeTestIssue(
+ 789, 1, 'one', 'New', 111, labels=['Priority-High'])
+ issue_2 = fake.MakeTestIssue(
+ 789, 2, 'two', 'New', 111, labels=['Priority-Low'])
+ issue_3 = fake.MakeTestIssue(
+ 789, 3, 'three', 'New', 111, labels=['Priority-Medium'])
+ issues = [issue_1, issue_2, issue_3]
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ sorted_issues = frontendsearchpipeline._SortIssues(
+ issues, config, {}, '', 'priority')
+
+ self.assertEqual(
+ [issue_1, issue_3, issue_2], # Order is high, medium, low.
+ sorted_issues)
+
+
+class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.sharded_iids = {
+ (0, 'p:v'): [10, 20, 30, 40, 50],
+ (1, 'p:v'): [21, 41, 61, 81],
+ (2, 'p:v'): [42, 52, 62, 72, 102],
+ (3, 'p:v'): [],
+ }
+
+ def testTotalLength_Empty(self):
+ """If there were no results, the length of the sharded list is zero."""
+ self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
+
+ def testTotalLength_Normal(self):
+ """The length of the sharded list is the sum of the shard lengths."""
+ self.assertEqual(
+ 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
+
+ def testReverseShards_Empty(self):
+ """Reversing an empty sharded list is still empty."""
+ empty_sharded_iids = {}
+ frontendsearchpipeline._ReverseShards(empty_sharded_iids)
+ self.assertEqual({}, empty_sharded_iids)
+
+ def testReverseShards_Normal(self):
+ """Reversing a sharded list reverses each shard."""
+ frontendsearchpipeline._ReverseShards(self.sharded_iids)
+ self.assertEqual(
+ {(0, 'p:v'): [50, 40, 30, 20, 10],
+ (1, 'p:v'): [81, 61, 41, 21],
+ (2, 'p:v'): [102, 72, 62, 52, 42],
+ (3, 'p:v'): [],
+ },
+ self.sharded_iids)
+
+ def testTrimShardedIIDs_Empty(self):
+ """If the sharded list is empty, trimming it makes no change."""
+ empty_sharded_iids = {}
+ frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
+ self.assertEqual({}, empty_sharded_iids)
+
+ frontendsearchpipeline._TrimEndShardedIIDs(
+ empty_sharded_iids,
+ [(100, (0, 'p:v')), (88, (8, 'p:v')), (99, (9, 'p:v'))],
+ 12)
+ self.assertEqual({}, empty_sharded_iids)
+
+ def testTrimShardedIIDs_NoSamples(self):
+ """If there are no samples, we don't trim off any IIDs."""
+ orig_sharded_iids = {
+ shard_id: iids[:] for shard_id, iids in self.sharded_iids.items()}
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, [], 12)
+ self.assertEqual(0, num_trimmed)
+ self.assertEqual(orig_sharded_iids, self.sharded_iids)
+
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, [], 1)
+ self.assertEqual(0, num_trimmed)
+ self.assertEqual(orig_sharded_iids, self.sharded_iids)
+
+ def testTrimShardedIIDs_Normal(self):
+ """The first 3 samples contribute all needed IIDs, so trim off the rest."""
+ samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
+ (40, (0, 'p:v')), (81, (1, 'p:v'))]
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, samples, 5)
+ self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
+ self.assertEqual(
+ { # shard_id: iids before lower-bound + iids before 1st excess sample.
+ (0, 'p:v'): [10, 20] + [30],
+ (1, 'p:v'): [21] + [41, 61],
+ (2, 'p:v'): [42, 52] + [62, 72, 102],
+ (3, 'p:v'): [] + []},
+ self.sharded_iids)
+
+ def testCalcSamplePositions_Empty(self):
+ sharded_iids = {0: []}
+ samples = []
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ sharded_iids = {0: [10, 20, 30, 40]}
+ samples = []
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ sharded_iids = {0: []}
+ # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
+ # But we still have them in the list for the backwards phase, and they
+ # should just not contribute anything to the result.
+ samples = [(2, (2, 'p:v')), (4, (4, 'p:v'))]
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ def testCalcSamplePositions_Normal(self):
+ samples = [(30, (0, 'p:v')), (41, (1, 'p:v')), (62, (2, 'p:v')),
+ (40, (0, 'p:v')), (81, (1, 'p:v'))]
+ self.assertEqual(
+ [(30, (0, 'p:v'), 2),
+ (41, (1, 'p:v'), 1),
+ (62, (2, 'p:v'), 2),
+ (40, (0, 'p:v'), 3),
+ (81, (1, 'p:v'), 3)],
+ frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))
diff --git a/search/test/query2ast_test.py b/search/test/query2ast_test.py
new file mode 100644
index 0000000..fc92e72
--- /dev/null
+++ b/search/test/query2ast_test.py
@@ -0,0 +1,1041 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the query2ast module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import datetime
+import time
+import unittest
+import mock
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import query2ast
+from tracker import tracker_bizobj
+
+BOOL = query2ast.BOOL
+DATE = query2ast.DATE
+NUM = query2ast.NUM
+TXT = query2ast.TXT
+
+SUBQUERY = query2ast.SUBQUERY
+LEFT_PAREN = query2ast.LEFT_PAREN
+RIGHT_PAREN = query2ast.RIGHT_PAREN
+OR = query2ast.OR
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+EQ = query2ast.EQ
+NE = query2ast.NE
+LT = query2ast.LT
+GT = query2ast.GT
+LE = query2ast.LE
+GE = query2ast.GE
+TEXT_HAS = query2ast.TEXT_HAS
+NOT_TEXT_HAS = query2ast.NOT_TEXT_HAS
+IS_DEFINED = query2ast.IS_DEFINED
+IS_NOT_DEFINED = query2ast.IS_NOT_DEFINED
+KEY_HAS = query2ast.KEY_HAS
+
+MakeCond = ast_pb2.MakeCond
+NOW = 1277762224
+
+
+class QueryParsingUnitTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project_id = 789
+ self.default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(
+ self.project_id)
+
+ def testParseUserQuery_OrClause(self):
+ # an "OR" query, which should look like two separate simple querys
+ # joined together by a pipe.
+ ast = query2ast.ParseUserQuery(
+ 'ham OR fancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ conj1 = ast.conjunctions[0]
+ conj2 = ast.conjunctions[1]
+ self.assertEqual([MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], [])],
+ conj1.conds)
+ self.assertEqual([MakeCond(TEXT_HAS, [ANY_FIELD], ['fancy'], [])],
+ conj2.conds)
+
+ def testParseUserQuery_Words(self):
+ # an "ORTerm" is actually anything appearing on either side of an
+ # "OR" operator. So this could be thought of as "simple" query parsing.
+
+ # a simple query with no spaces
+ ast = query2ast.ParseUserQuery(
+ 'hamfancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['hamfancy'], []), fulltext_cond)
+
+ # negative word
+ ast = query2ast.ParseUserQuery(
+ '-hamfancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ # note: not NOT_TEXT_HAS.
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['hamfancy'], []),
+ fulltext_cond)
+
+ # invalid fulltext term
+ ast = query2ast.ParseUserQuery(
+ 'ham=fancy\\', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ self.assertEqual([], ast.conjunctions[0].conds)
+
+ # an explicit "AND" query in the "featured" context
+ warnings = []
+ query2ast.ParseUserQuery(
+ 'ham AND fancy', 'label:featured', BUILTIN_ISSUE_FIELDS,
+ self.default_config, warnings=warnings)
+ self.assertEqual(
+ ['The only supported boolean operator is OR (all capitals).'],
+ warnings)
+
+ # an implicit "AND" query
+ ast = query2ast.ParseUserQuery(
+ 'ham fancy', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['fancy'], []), ft_cond2)
+
+ # Use word with non-operator prefix.
+ word_with_non_op_prefix = '%stest' % query2ast.NON_OP_PREFIXES[0]
+ ast = query2ast.ParseUserQuery(
+ word_with_non_op_prefix, '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['"%s"' % word_with_non_op_prefix], []),
+ fulltext_cond)
+
+ # mix positive and negative words
+ ast = query2ast.ParseUserQuery(
+ 'ham -fancy', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['fancy'], []), ft_cond2)
+
+ # converts terms to lower case
+ ast = query2ast.ParseUserQuery(
+ 'AmDude', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['amdude'], []), fulltext_cond)
+
+ def testParseUserQuery_Phrases(self):
+ # positive phrases
+ ast = query2ast.ParseUserQuery(
+ '"one two"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['"one two"'], []), fulltext_cond)
+
+ # negative phrases
+ ast = query2ast.ParseUserQuery(
+ '-"one two"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['"one two"'], []), fulltext_cond)
+
+ # multiple phrases
+ ast = query2ast.ParseUserQuery(
+ '-"a b" "x y"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['"a b"'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['"x y"'], []), ft_cond2)
+
+ def testParseUserQuery_CodeSyntaxThatWeNeedToCopeWith(self):
+ # positive phrases
+ ast = query2ast.ParseUserQuery(
+ 'Base::Tuple', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD],
+ ['"base::tuple"'], []),
+ cond)
+
+ # stuff we just ignore
+ ast = query2ast.ParseUserQuery(
+ ':: - -- .', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ self.assertEqual([], ast.conjunctions[0].conds)
+
+ def testParseUserQuery_IsOperator(self):
+ """Test is:open, is:spam, and is:blocked."""
+ for keyword in ['open', 'spam', 'blocked']:
+ ast = query2ast.ParseUserQuery(
+ 'is:' + keyword, '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS[keyword]], [], []),
+ cond1)
+ ast = query2ast.ParseUserQuery(
+ '-is:' + keyword, '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS[keyword]], [], []),
+ cond1)
+
+ def testParseUserQuery_HasOperator(self):
+ # Search for issues with at least one attachment
+ ast = query2ast.ParseUserQuery(
+ 'has:attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has:attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'has=attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has=attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ # Search for numeric fields for searches with 'has' prefix
+ ast = query2ast.ParseUserQuery(
+ 'has:attachments', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachments']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has:attachments', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachments']],
+ [], []),
+ cond1)
+
+ # If it is not a field, look for any key-value label.
+ ast = query2ast.ParseUserQuery(
+ 'has:Size', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['label']], ['size'], []),
+ cond1)
+
+ def testParseUserQuery_Phase(self):
+ ast = query2ast.ParseUserQuery(
+ 'gate:Canary,Stable', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['gate']],
+ ['canary', 'stable'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-gate:Canary,Stable', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['gate']],
+ ['canary', 'stable'], []),
+ cond1)
+
+ def testParseUserQuery_Components(self):
+ """Parse user queries for components"""
+ ast = query2ast.ParseUserQuery(
+ 'component:UI', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['component']],
+ ['ui'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'Component:UI>AboutBox', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['component']],
+ ['ui>aboutbox'], []),
+ cond1)
+
+ def testParseUserQuery_OwnersReportersAndCc(self):
+ """Parse user queries for owner:, reporter: and cc:."""
+ ast = query2ast.ParseUserQuery(
+ 'owner:user', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'owner:user@example.com', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'owner=user@example.com', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-reporter=user@example.com', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['reporter']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'cc=user@example.com,user2@example.com', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['cc']],
+ ['user@example.com', 'user2@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'cc:user,user2', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['cc']],
+ ['user', 'user2'], []),
+ cond1)
+
+ def testParseUserQuery_SearchWithinFields(self):
+ # Search for issues with certain filenames
+ ast = query2ast.ParseUserQuery(
+ 'attachment:filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-attachment:filename', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ # Search for issues with a certain number of attachments
+ ast = query2ast.ParseUserQuery(
+ 'attachments:2', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachments']],
+ ['2'], [2]),
+ cond1)
+
+ # Searches with '=' syntax
+ ast = query2ast.ParseUserQuery(
+ 'attachment=filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-attachment=filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2009', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']], ['milestone-2009'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-milestone=2009', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['label']], ['milestone-2009'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2009-Q1', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2009-q1'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-milestone=2009-Q1', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2009-q1'], []),
+ cond1)
+
+ # Searches with ':' syntax
+ ast = query2ast.ParseUserQuery(
+ 'summary:foo', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['foo'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'summary:"greetings programs"', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['greetings programs'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'summary:"Ӓ"', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['Ӓ'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'priority:high', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['priority-high'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'type:security', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['type-security'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'label:priority-high', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['priority-high'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'blockedon:other:123', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['blockedon']],
+ ['other:123'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'cost=-2', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['cost--2'], []),
+ cond1)
+
+ # Searches with ':' and an email domain only.
+ ast = query2ast.ParseUserQuery(
+ 'reporter:@google.com', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['reporter']], ['@google.com'], []),
+ cond1)
+
+ # Search for issues in certain user hotlists.
+ ast = query2ast.ParseUserQuery(
+ 'hotlist=gatsby@chromium.org:Hotlist1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(
+ EQ, [BUILTIN_ISSUE_FIELDS['hotlist']],
+ ['gatsby@chromium.org:hotlist1'], []),
+ cond1)
+
+ # Search for 'Hotlist' labels.
+ ast = query2ast.ParseUserQuery(
+ 'hotlist:sublabel', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['hotlist-sublabel'], []),
+ cond1)
+
+ def testParseUserQuery_SearchWithinCustomFields(self):
+ """Enums are treated as labels, other fields are kept as fields."""
+ fd1 = tracker_bizobj.MakeFieldDef(
+ 1, self.project_id, 'Size', tracker_pb2.FieldTypes.ENUM_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False)
+ fd2 = tracker_bizobj.MakeFieldDef(
+ 1, self.project_id, 'EstDays', tracker_pb2.FieldTypes.INT_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False)
+ self.default_config.field_defs.extend([fd1, fd2])
+ ast = query2ast.ParseUserQuery(
+ 'Size:Small EstDays>3', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ cond2 = ast.conjunctions[0].conds[1]
+ self.assertEqual(
+ MakeCond(KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['size-small'], []),
+ cond1)
+ self.assertEqual(
+ MakeCond(GT, [fd2], ['3'], [3]),
+ cond2)
+
+ @mock.patch('time.time', return_value=NOW)
+ def testParseUserQuery_Approvals(self, _mock_time):
+ """Test approval queries are parsed correctly."""
+ fd1 = tracker_bizobj.MakeFieldDef(
+ 1, self.project_id, 'UIReview', tracker_pb2.FieldTypes.APPROVAL_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False)
+ fd2 = tracker_bizobj.MakeFieldDef(
+ 2, self.project_id, 'EstDays', tracker_pb2.FieldTypes.INT_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False)
+ fd3 = tracker_bizobj.MakeFieldDef(
+ 3, self.project_id, 'UXReview', tracker_pb2.FieldTypes.APPROVAL_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False)
+ self.default_config.field_defs.extend([fd1, fd2, fd3])
+ ast = query2ast.ParseUserQuery(
+ 'UXReview-approver:user1@mail.com,user2@mail.com UIReview:Approved '
+ 'UIReview-on>today-7', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ cond2 = ast.conjunctions[0].conds[1]
+ cond3 = ast.conjunctions[0].conds[2]
+ self.assertEqual(MakeCond(TEXT_HAS, [fd3],
+ ['user1@mail.com', 'user2@mail.com'], [],
+ key_suffix='-approver'), cond1)
+ self.assertEqual(MakeCond(TEXT_HAS, [fd1], ['approved'], []), cond2)
+ self.assertEqual(
+ cond3,
+ MakeCond(
+ GT, [fd1], [], [query2ast._CalculatePastDate(7, NOW)],
+ key_suffix='-on'))
+
+ def testParseUserQuery_PhaseFields(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 1, self.project_id, 'EstDays', tracker_pb2.FieldTypes.INT_TYPE,
+ 'applic', 'applic', False, False, False, None, None, None, False, None,
+ None, None, 'no_action', 'doc', False, is_phase_field=True)
+ self.default_config.field_defs.append(fd)
+ ast = query2ast.ParseUserQuery(
+ 'UXReview.EstDays>3', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(GT, [fd], ['3'], [3], phase_name='uxreview'),
+ cond1)
+
+ def testParseUserQuery_QuickOr(self):
+ # quick-or searches
+ ast = query2ast.ParseUserQuery(
+ 'milestone:2008,2009,2010', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'label:milestone-2008,milestone-2009,milestone-2010', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2008,2009,2010', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ # Duplicated and trailing commas are ignored.
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2008,,2009,2010,', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ def testParseUserQuery_Dates(self):
+ # query with a daterange
+ ast = query2ast.ParseUserQuery(
+ 'modified>=2009-5-12', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+
+ # query with quick-or
+ ast = query2ast.ParseUserQuery(
+ 'modified=2009-5-12,2009-5-13', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ ts2 = int(time.mktime(datetime.datetime(2009, 5, 13).timetuple()))
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1, ts2]), cond1)
+
+ # query with multiple dateranges
+ ast = query2ast.ParseUserQuery(
+ 'modified>=2009-5-12 opened<2008/1/1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1, cond2 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+
+ # query with multiple dateranges plus a search term
+ ast = query2ast.ParseUserQuery(
+ 'one two modified>=2009-5-12 opened<2008/1/1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ ft_cond1, ft_cond2, cond1, cond2 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['one'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['two'], []), ft_cond2)
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+
+ # query with a date field compared to "today"
+ ast = query2ast.ParseUserQuery(
+ 'modified<today', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config, now=NOW)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(0, now=NOW)
+ self.assertEqual(MakeCond(LT, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ # query with a daterange using today-N alias
+ ast = query2ast.ParseUserQuery(
+ 'modified>=today-13', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config, now=NOW)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(13, now=NOW)
+ self.assertEqual(MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'modified>today-13', '', BUILTIN_ISSUE_FIELDS, self.default_config,
+ now=NOW)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(13, now=NOW)
+ self.assertEqual(MakeCond(GT, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ # query with multiple old date query terms.
+ ast = query2ast.ParseUserQuery(
+ 'modified-after:2009-5-12 opened-before:2008/1/1 '
+ 'closed-after:2007-2-1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1, cond2, cond3 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GT, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+ ts3 = int(time.mktime(datetime.datetime(2007, 2, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(GT, [BUILTIN_ISSUE_FIELDS['closed']], [], [ts3]), cond3)
+
+ def testCalculatePastDate(self):
+ ts1 = query2ast._CalculatePastDate(0, now=NOW)
+ self.assertEqual(NOW, ts1)
+
+ ts2 = query2ast._CalculatePastDate(13, now=NOW)
+ self.assertEqual(ts2, NOW - 13 * 24 * 60 * 60)
+
+ # Try it once with time.time() instead of a known timestamp.
+ ts_system_clock = query2ast._CalculatePastDate(13)
+ self.assertTrue(ts_system_clock < int(time.time()))
+
+ def testParseUserQuery_BadDates(self):
+ bad_dates = ['today-13h', 'yesterday', '2/2', 'm/y/d',
+ '99/99/1999', '0-0-0']
+ for val in bad_dates:
+ with self.assertRaises(query2ast.InvalidQueryError) as cm:
+ query2ast.ParseUserQuery(
+ 'modified>=' + val, '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ self.assertEqual('Could not parse date: ' + val, cm.exception.message)
+
+ def testQueryToSubqueries_BasicQuery(self):
+ self.assertEqual(['owner:me'], query2ast.QueryToSubqueries('owner:me'))
+
+ def testQueryToSubqueries_EmptyQuery(self):
+ self.assertEqual([''], query2ast.QueryToSubqueries(''))
+
+ def testQueryToSubqueries_UnmatchedParenthesesThrowsError(self):
+ with self.assertRaises(query2ast.InvalidQueryError):
+ self.assertEqual(['Pri=1'], query2ast.QueryToSubqueries('Pri=1))'))
+ with self.assertRaises(query2ast.InvalidQueryError):
+ self.assertEqual(
+ ['label:Hello'], query2ast.QueryToSubqueries('((label:Hello'))
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ self.assertEqual(
+ ['owner:me'], query2ast.QueryToSubqueries('((((owner:me)))'))
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ self.assertEqual(
+ ['test=What'], query2ast.QueryToSubqueries('(((test=What))))'))
+
+ def testQueryToSubqueries_IgnoresEmptyGroups(self):
+ self.assertEqual([''], query2ast.QueryToSubqueries('()(()(()))()()'))
+
+ self.assertEqual(
+ ['owner:me'], query2ast.QueryToSubqueries('()(()owner:me)()()'))
+
+ def testQueryToSubqueries_BasicOr(self):
+ self.assertEqual(
+ ['owner:me', 'status:New', 'Pri=1'],
+ query2ast.QueryToSubqueries('owner:me OR status:New OR Pri=1'))
+
+ def testQueryToSubqueries_OrAtStartOrEnd(self):
+ self.assertEqual(
+ ['owner:me OR'], query2ast.QueryToSubqueries('owner:me OR'))
+
+ self.assertEqual(
+ ['OR owner:me'], query2ast.QueryToSubqueries('OR owner:me'))
+
+ def testQueryToSubqueries_BasicParentheses(self):
+ self.assertEqual(
+ ['owner:me status:New'],
+ query2ast.QueryToSubqueries('owner:me (status:New)'))
+
+ self.assertEqual(
+ ['owner:me status:New'],
+ query2ast.QueryToSubqueries('(owner:me) status:New'))
+
+ self.assertEqual(
+ ['owner:me status:New'],
+ query2ast.QueryToSubqueries('((owner:me) (status:New))'))
+
+ def testQueryToSubqueries_ParenthesesWithOr(self):
+ self.assertEqual(
+ ['Pri=1 owner:me', 'Pri=1 status:New'],
+ query2ast.QueryToSubqueries('Pri=1 (owner:me OR status:New)'))
+
+ self.assertEqual(
+ ['owner:me component:OhNo', 'status:New component:OhNo'],
+ query2ast.QueryToSubqueries('(owner:me OR status:New) component:OhNo'))
+
+ def testQueryToSubqueries_ParenthesesWithOr_Multiple(self):
+ self.assertEqual(
+ [
+ 'Pri=1 test owner:me', 'Pri=1 test status:New',
+ 'Pri=2 test owner:me', 'Pri=2 test status:New'
+ ],
+ query2ast.QueryToSubqueries(
+ '(Pri=1 OR Pri=2)(test (owner:me OR status:New))'))
+
+ def testQueryToSubqueries_OrNextToParentheses(self):
+ self.assertEqual(['A', 'B'], query2ast.QueryToSubqueries('(A) OR (B)'))
+
+ self.assertEqual(
+ ['A B', 'A C E', 'A D E'],
+ query2ast.QueryToSubqueries('A (B OR (C OR D) E)'))
+
+ self.assertEqual(
+ ['A B C', 'A B D', 'A E'],
+ query2ast.QueryToSubqueries('A (B (C OR D) OR E)'))
+
+ def testQueryToSubqueries_ExtraSpaces(self):
+ self.assertEqual(
+ ['A', 'B'], query2ast.QueryToSubqueries(' ( A ) OR ( B ) '))
+
+ self.assertEqual(
+ ['A B', 'A C E', 'A D E'],
+ query2ast.QueryToSubqueries(' A ( B OR ( C OR D ) E )'))
+
+ def testQueryToSubqueries_OrAtEndOfParentheses(self):
+ self.assertEqual(['A B'], query2ast.QueryToSubqueries('(A OR )(B)'))
+ self.assertEqual(
+ ['A B', 'A C'], query2ast.QueryToSubqueries('( OR A)(B OR C)'))
+ self.assertEqual(
+ ['A B', 'A C'], query2ast.QueryToSubqueries(' OR A (B OR C)'))
+
+ def testQueryToSubqueries_EmptyOrGroup(self):
+ self.assertEqual(
+ ['A C', 'C', 'B C'], query2ast.QueryToSubqueries('(A OR OR B)(C)'))
+
+ def testParseQuery_Basic(self):
+ self.assertEqual(
+ [
+ 'owner:me',
+ ],
+ query2ast._ParseQuery(
+ query2ast.PeekIterator(
+ [ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me')])))
+
+ def testParseQuery_Complex(self):
+ self.assertEqual(
+ [
+ 'owner:me',
+ 'Pri=1',
+ 'label=test',
+ ],
+ query2ast._ParseQuery(
+ query2ast.PeekIterator(
+ [
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=LEFT_PAREN),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='label=test'),
+ ])))
+
+ def testParseOrGroup_Basic(self):
+ self.assertEqual(
+ [
+ 'owner:me',
+ ],
+ query2ast._ParseOrGroup(
+ query2ast.PeekIterator(
+ [ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me')])))
+
+ def testParseOrGroup_TwoAdjacentAndGroups(self):
+ self.assertEqual(
+ [
+ 'owner:me Pri=1',
+ 'owner:me label=test',
+ ],
+ query2ast._ParseOrGroup(
+ query2ast.PeekIterator(
+ [
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me'),
+ ast_pb2.QueryToken(token_type=LEFT_PAREN),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='label=test'),
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ])))
+
+ def testParseAndGroup_Subquery(self):
+ self.assertEqual(
+ [
+ 'owner:me',
+ ],
+ query2ast._ParseAndGroup(
+ query2ast.PeekIterator(
+ [ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me')])))
+
+ def testParseAndGroup_ParenthesesGroup(self):
+ self.assertEqual(
+ [
+ 'owner:me',
+ 'Pri=1',
+ ],
+ query2ast._ParseAndGroup(
+ query2ast.PeekIterator(
+ [
+ ast_pb2.QueryToken(token_type=LEFT_PAREN),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ])))
+
+ def testParseAndGroup_Empty(self):
+ self.assertEqual([], query2ast._ParseAndGroup(query2ast.PeekIterator([])))
+
+ def testParseAndGroup_InvalidTokens(self):
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ParseAndGroup(
+ query2ast.PeekIterator(
+ [
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ]))
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ParseAndGroup(
+ query2ast.PeekIterator(
+ [
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ]))
+
+ def testValidateAndTokenizeQuery_Basic(self):
+ self.assertEqual(
+ [
+ ast_pb2.QueryToken(token_type=LEFT_PAREN),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='Pri=1'),
+ ast_pb2.QueryToken(token_type=RIGHT_PAREN),
+ ], query2ast._ValidateAndTokenizeQuery('(owner:me OR Pri=1)'))
+
+ def testValidateAndTokenizeQuery_UnmatchedParentheses(self):
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ValidateAndTokenizeQuery('(owner:me')
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ValidateAndTokenizeQuery('owner:me)')
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ValidateAndTokenizeQuery('(()owner:me))')
+
+ with self.assertRaises(query2ast.InvalidQueryError):
+ query2ast._ValidateAndTokenizeQuery('(()owner:me)())')
+
+ def testTokenizeSubqueryOnOr_NoOrOperator(self):
+ self.assertEqual(
+ [ast_pb2.QueryToken(token_type=SUBQUERY, value='owner:me')],
+ query2ast._TokenizeSubqueryOnOr('owner:me'))
+
+ def testTokenizeSubqueryOnOr_BasicOrOperator(self):
+ self.assertEqual(
+ [
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='A'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='B'),
+ ast_pb2.QueryToken(token_type=OR),
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='C'),
+ ], query2ast._TokenizeSubqueryOnOr('A OR B OR C'))
+
+ def testTokenizeSubqueryOnOr_EmptyOrOperator(self):
+ self.assertEqual(
+ [ast_pb2.QueryToken(token_type=OR)],
+ query2ast._TokenizeSubqueryOnOr(' OR '))
+
+ self.assertEqual(
+ [
+ ast_pb2.QueryToken(token_type=SUBQUERY, value='A'),
+ ast_pb2.QueryToken(token_type=OR),
+ ], query2ast._TokenizeSubqueryOnOr('A OR '))
+
+ def testMultiplySubqueries_Basic(self):
+ self.assertEqual(
+ ['owner:me Pri=1', 'owner:me Pri=2', 'test Pri=1', 'test Pri=2'],
+ query2ast._MultiplySubqueries(['owner:me', 'test'], ['Pri=1', 'Pri=2']))
+
+ def testMultiplySubqueries_OneEmpty(self):
+ self.assertEqual(
+ ['Pri=1', 'Pri=2'],
+ query2ast._MultiplySubqueries([], ['Pri=1', 'Pri=2']))
+ self.assertEqual(
+ ['Pri=1', 'Pri=2'],
+ query2ast._MultiplySubqueries([''], ['Pri=1', 'Pri=2']))
+
+ self.assertEqual(
+ ['Pri=1', 'Pri=2'],
+ query2ast._MultiplySubqueries(['Pri=1', 'Pri=2'], []))
+ self.assertEqual(
+ ['Pri=1', 'Pri=2'],
+ query2ast._MultiplySubqueries(['Pri=1', 'Pri=2'], ['']))
+
+ def testPeekIterator_Basic(self):
+ iterator = query2ast.PeekIterator([1, 2, 3])
+
+ self.assertEqual(1, iterator.peek())
+ self.assertEqual(1, iterator.next())
+
+ self.assertEqual(2, iterator.next())
+
+ self.assertEqual(3, iterator.peek())
+ self.assertEqual(3, iterator.next())
+
+ with self.assertRaises(StopIteration):
+ iterator.peek()
+
+ with self.assertRaises(StopIteration):
+ iterator.next()
diff --git a/search/test/search_helpers_test.py b/search/test/search_helpers_test.py
new file mode 100644
index 0000000..5905234
--- /dev/null
+++ b/search/test/search_helpers_test.py
@@ -0,0 +1,130 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for monorail.search.search_helpers."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import mox
+import unittest
+
+from search import search_helpers
+
+from google.appengine.ext import testbed
+from framework import permissions
+from framework import sql
+from proto import user_pb2
+from services import chart_svc
+from services import service_manager
+from testing import fake
+
+
+def MakeChartService(my_mox, config):
+ chart_service = chart_svc.ChartService(config)
+ for table_var in ['issuesnapshot_tbl', 'labeldef_tbl']:
+ setattr(chart_service, table_var, my_mox.CreateMock(sql.SQLTableManager))
+ return chart_service
+
+
+class SearchHelpersTest(unittest.TestCase):
+ """Tests for functions in search_helpers.
+
+ Also covered by search.backendnonviewable.GetAtRiskIIDs cases.
+ """
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.services = service_manager.Services()
+ self.services.chart = MakeChartService(self.mox, self.services.config)
+ self.config_service = fake.ConfigService()
+ self.user = user_pb2.User()
+
+ def testGetPersonalAtRiskLabelIDs_ReadOnly(self):
+ """Test returns risky IDs a read-only user cannot access."""
+ self.mox.StubOutWithMock(self.config_service, 'GetLabelDefRowsAnyProject')
+ self.config_service.GetLabelDefRowsAnyProject(
+ self.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn([
+ (123, 789, 0, 'Restrict-View-Google', 'docstring', 0),
+ (124, 789, 0, 'Restrict-View-SecurityTeam', 'docstring', 0),
+ ])
+
+ self.mox.ReplayAll()
+ ids = search_helpers.GetPersonalAtRiskLabelIDs(
+ self.cnxn,
+ self.user,
+ self.config_service,
+ effective_ids=[10, 20],
+ project=fake.Project(project_id=789),
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ self.mox.VerifyAll()
+
+ self.assertEqual(ids, [123, 124])
+
+ def testGetPersonalAtRiskLabelIDs_LoggedInUser(self):
+ """Test returns restricted label IDs a logged in user cannot access."""
+ self.mox.StubOutWithMock(self.config_service, 'GetLabelDefRowsAnyProject')
+ self.config_service.GetLabelDefRowsAnyProject(
+ self.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn([
+ (123, 789, 0, 'Restrict-View-Google', 'docstring', 0),
+ (124, 789, 0, 'Restrict-View-SecurityTeam', 'docstring', 0),
+ ])
+
+ self.mox.ReplayAll()
+ ids = search_helpers.GetPersonalAtRiskLabelIDs(
+ self.cnxn,
+ self.user,
+ self.config_service,
+ effective_ids=[10, 20],
+ project=fake.Project(project_id=789),
+ perms=permissions.USER_PERMISSIONSET)
+ self.mox.VerifyAll()
+
+ self.assertEqual(ids, [123, 124])
+
+ def testGetPersonalAtRiskLabelIDs_UserWithRVG(self):
+ """Test returns restricted label IDs a logged in user cannot access."""
+ self.mox.StubOutWithMock(self.config_service, 'GetLabelDefRowsAnyProject')
+ self.config_service.GetLabelDefRowsAnyProject(
+ self.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn([
+ (123, 789, 0, 'Restrict-View-Google', 'docstring', 0),
+ (124, 789, 0, 'Restrict-View-SecurityTeam', 'docstring', 0),
+ ])
+
+ self.mox.ReplayAll()
+ perms = permissions.PermissionSet(['Google'])
+ ids = search_helpers.GetPersonalAtRiskLabelIDs(
+ self.cnxn,
+ self.user,
+ self.config_service,
+ effective_ids=[10, 20],
+ project=fake.Project(project_id=789),
+ perms=perms)
+ self.mox.VerifyAll()
+
+ self.assertEqual(ids, [124])
+
+ def testGetPersonalAtRiskLabelIDs_Admin(self):
+ """Test returns nothing for an admin (who can view everything)."""
+ self.user.is_site_admin = True
+ self.mox.ReplayAll()
+ ids = search_helpers.GetPersonalAtRiskLabelIDs(
+ self.cnxn,
+ self.user,
+ self.config_service,
+ effective_ids=[10, 20],
+ project=fake.Project(project_id=789),
+ perms=permissions.ADMIN_PERMISSIONSET)
+ self.mox.VerifyAll()
+
+ self.assertEqual(ids, [])
diff --git a/search/test/searchpipeline_test.py b/search/test/searchpipeline_test.py
new file mode 100644
index 0000000..5d23316
--- /dev/null
+++ b/search/test/searchpipeline_test.py
@@ -0,0 +1,121 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the searchpipeline module."""
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+
+import unittest
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import searchpipeline
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class SearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService())
+ self.services.user.TestAddUser('a@example.com', 111)
+
+ def testIsStarredRE(self):
+ """IS_STARRED_RE matches only the is:starred term."""
+ input_output = {
+ 'something:else': 'something:else',
+ 'genesis:starred': 'genesis:starred',
+ 'is:starred-in-bookmarks': 'is:starred-in-bookmarks',
+ 'is:starred': 'foo',
+ 'Is:starred': 'foo',
+ 'is:STARRED': 'foo',
+ 'is:starred is:open': 'foo is:open',
+ 'is:open is:starred': 'is:open foo',
+ }
+ for i, o in input_output.items():
+ self.assertEqual(o, searchpipeline.IS_STARRED_RE.sub('foo', i))
+
+ def testMeRE(self):
+ """ME_RE matches only the 'me' value keyword."""
+ input_output = {
+ 'something:else': 'something:else',
+ 'else:some': 'else:some',
+ 'me': 'me', # It needs to have a ":" in front.
+ 'cc:me-team': 'cc:me-team',
+ 'cc:me=domain@otherdomain': 'cc:me=domain@otherdomain',
+ 'cc:me@example.com': 'cc:me@example.com',
+ 'me:the-boss': 'me:the-boss',
+ 'cc:me': 'cc:foo',
+ 'cc=me': 'cc=foo',
+ 'owner:Me': 'owner:foo',
+ 'reporter:ME': 'reporter:foo',
+ 'cc:me is:open': 'cc:foo is:open',
+ 'is:open cc:me': 'is:open cc:foo',
+ }
+ for i, o in input_output.items():
+ self.assertEqual(o, searchpipeline.ME_RE.sub('foo', i))
+
+ def testAccumulateIssueProjectsAndConfigs(self):
+ pass # TODO(jrobbins): write tests
+
+ def testReplaceKeywordsWithUserIDs_IsStarred(self):
+ """The term is:starred is replaced with starredby:USERID."""
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111], 'is:starred')
+ self.assertEqual('starredby:111', actual)
+ self.assertEqual([], warnings)
+
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111], 'Pri=1 is:starred M=61')
+ self.assertEqual('Pri=1 starredby:111 M=61', actual)
+ self.assertEqual([], warnings)
+
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [], 'Pri=1 is:starred M=61')
+ self.assertEqual('Pri=1 M=61', actual)
+ self.assertEqual(
+ ['"is:starred" ignored because you are not signed in.'],
+ warnings)
+
+ def testReplaceKeywordsWithUserIDs_IsStarred_linked(self):
+ """is:starred is replaced by starredby:uid1,uid2 for linked accounts."""
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111, 222], 'is:starred')
+ self.assertEqual('starredby:111,222', actual)
+ self.assertEqual([], warnings)
+
+ def testReplaceKeywordsWithUserIDs_Me(self):
+ """Terms like owner:me are replaced with owner:USERID."""
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111], 'owner:me')
+ self.assertEqual('owner:111', actual)
+ self.assertEqual([], warnings)
+
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111], 'Pri=1 cc:me M=61')
+ self.assertEqual('Pri=1 cc:111 M=61', actual)
+ self.assertEqual([], warnings)
+
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [], 'Pri=1 reporter:me M=61')
+ self.assertEqual('Pri=1 M=61', actual)
+ self.assertEqual(
+ ['"me" keyword ignored because you are not signed in.'],
+ warnings)
+
+ def testReplaceKeywordsWithUserIDs_Me_LinkedAccounts(self):
+ """owner:me is replaced with owner:uid,uid for each linked account."""
+ actual, warnings = searchpipeline.ReplaceKeywordsWithUserIDs(
+ [111, 222], 'owner:me')
+ self.assertEqual('owner:111,222', actual)
+ self.assertEqual([], warnings)