Merge branch 'main' into avm99963-monorail

Merged commit 34d8229ae2b51fb1a15bd208e6fe6185c94f6266

GitOrigin-RevId: 7ee0917f93a577e475f8e09526dd144d245593f4
diff --git a/search/query2ast.py b/search/query2ast.py
index 235f9b3..40c5cdc 100644
--- a/search/query2ast.py
+++ b/search/query2ast.py
@@ -1,7 +1,6 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style
-# license that can be found in the LICENSE file or at
-# https://developers.google.com/open-source/licenses/bsd
+# Copyright 2016 The Chromium Authors
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
 
 """A set of functions that integrate the GAE search index with Monorail."""
 from __future__ import print_function
@@ -16,8 +15,8 @@
 
 from google.appengine.api import search
 
-from proto import ast_pb2
-from proto import tracker_pb2
+from mrproto import ast_pb2
+from mrproto import tracker_pb2
 
 
 # TODO(jrobbins): Consider re-implementing this whole file by using a
@@ -191,9 +190,9 @@
 def ParseUserQuery(
     query, scope, builtin_fields, harmonized_config, warnings=None,
     now=None):
-  # type: (str, str, Mapping[str, proto.tracker_pb2.FieldDef],
-  #   proto.tracker_pb2.ProjectIssueConfig, Sequence[str], int) ->
-  #     proto.ast_pb2.QueryAST
+  # type: (str, str, Mapping[str, mrproto.tracker_pb2.FieldDef],
+  #   mrproto.tracker_pb2.ProjectIssueConfig, Sequence[str], int) ->
+  #     mrproto.ast_pb2.QueryAST
   """Parse a user query and return a set of structure terms.
 
   Args:
@@ -251,8 +250,8 @@
 
 
 def _ParseConjunction(subquery, scope, fields, warnings, now=None):
-  # type: (str, str, Mapping[str, proto.tracker_pb2.FieldDef], Sequence[str],
-  #     int) -> proto.ast_pb2.Condition
+  # type: (str, str, Mapping[str, mrproto.tracker_pb2.FieldDef], Sequence[str],
+  #     int) -> mrproto.ast_pb2.Condition
   """Parse part of a user query into a Conjunction PB."""
   scoped_query = ('%s %s' % (scope, subquery)).lower()
   cond_strs = _ExtractConds(scoped_query, warnings)
@@ -263,8 +262,8 @@
 
 
 def _ParseCond(cond_str, fields, warnings, now=None):
-  # type: (str, Mapping[str, proto.tracker_pb2.FieldDef], Sequence[str],
-  #     int) -> proto.ast_pb2.Condition
+  # type: (str, Mapping[str, mrproto.tracker_pb2.FieldDef], Sequence[str],
+  #     int) -> mrproto.ast_pb2.Condition
   """Parse one user query condition string into a Condition PB."""
   op_match = OP_RE.match(cond_str)
   # Do not treat as key:value search terms if any of the special prefixes match.
@@ -308,8 +307,8 @@
 
 
 def _ParseStructuredTerm(prefix, op_str, value, fields, now=None):
-  # type: (str, str, str, Mapping[str, proto.tracker_pb2.FieldDef]) ->
-  #     proto.ast_pb2.Condition
+  # type: (str, str, str, Mapping[str, mrproto.tracker_pb2.FieldDef]) ->
+  #     mrproto.ast_pb2.Condition
   """Parse one user structured query term into an internal representation.
 
   Args:
@@ -571,7 +570,7 @@
 
 
 def _ParseQuery(token_iterator):
-  # type (Sequence[proto.ast_pb2.QueryToken]) -> Sequence[str]
+  # type (Sequence[mrproto.ast_pb2.QueryToken]) -> Sequence[str]
   """Recursive helper to convert query tokens into a list of subqueries.
 
   Parses a Query based on the following grammar (EBNF):
@@ -633,7 +632,7 @@
 
 
 def _ParseOrGroup(token_iterator):
-  # type (Sequence[proto.ast_pb2.QueryToken]) -> Sequence[str]
+  # type (Sequence[mrproto.ast_pb2.QueryToken]) -> Sequence[str]
   """Recursive helper to convert a single "OrGroup" into subqueries.
 
   An OrGroup here is based on the following grammar:
@@ -675,7 +674,7 @@
 
 
 def _ParseAndGroup(token_iterator):
-  # type (Sequence[proto.ast_pb2.QueryToken]) -> Sequence[str]
+  # type (Sequence[mrproto.ast_pb2.QueryToken]) -> Sequence[str]
   """Recursive helper to convert a single "AndGroup" into subqueries.
 
   An OrGroup here is based on the following grammar:
@@ -724,7 +723,7 @@
 
 
 def _ValidateAndTokenizeQuery(query):
-  # type: (str) -> Sequence[proto.ast_pb2.QueryToken]
+  # type: (str) -> Sequence[mrproto.ast_pb2.QueryToken]
   """Converts the input query into a set of tokens for easier parsing.
 
   Tokenizing the query string before parsing allows us to not have to as many
@@ -777,7 +776,7 @@
 
 
 def _TokenizeSubqueryOnOr(subquery):
-  # type: (str) -> Sequence[proto.ast_pb2.QueryToken]
+  # type: (str) -> Sequence[mrproto.ast_pb2.QueryToken]
   """Helper to split a subquery by OR and convert the result into tokens.
 
   Args:
@@ -872,10 +871,24 @@
       pass
     return 'End of PeekIterator'
 
+  def __next__(self):
+    # type: () -> Any
+    """Gets the next value in the iterator and increments pointer.
+
+    Returns:
+      Next value in iterator.
+
+    Raises:
+      StopIteration if you're at the end of the iterator.
+    """
+    return self.next()
+
   def next(self):
     # type: () -> Any
     """Gets the next value in the iterator and increments pointer.
 
+    For backwards compatibility with Python 2.
+
     Returns:
       Next value in iterator.