Merge branch 'main' into avm99963-monorail

Merged commit cd4b3b336f1f14afa02990fdc2eec5d9467a827e

GitOrigin-RevId: e67bbf185d5538e1472bb42e0abb2a141f88bac1
diff --git a/framework/banned.py b/framework/banned.py
index cb0e220..231f76f 100644
--- a/framework/banned.py
+++ b/framework/banned.py
@@ -18,7 +18,7 @@
 
 import ezt
 
-from framework import permissions
+from framework import flaskservlet, permissions
 from framework import servlet
 
 
@@ -52,3 +52,6 @@
         # user back to this page after they sign out.
         'currentPageURLEncoded': None,
         }
+
+  # def GetNoAccessPage(self, **kwargs):
+  #   return self.handler(**kwargs)
diff --git a/framework/clientmon.py b/framework/clientmon.py
index cc4917c..f512f4d 100644
--- a/framework/clientmon.py
+++ b/framework/clientmon.py
@@ -18,6 +18,8 @@
 
 from infra_libs import ts_mon
 
+
+# TODO: convert to FlaskJsonFeed while convert to Flask
 class ClientMonitor(jsonfeed.JsonFeed):
   """JSON feed to track client side js errors in ts_mon."""
 
@@ -35,6 +37,8 @@
       Dict of values used by EZT for rendering the page.
     """
 
+    # TODO: uncomment while convert to flask
+    # post_data = mr.request.values
     post_data = mr.request.POST
     errors = post_data.get('errors')
     try:
@@ -50,3 +54,9 @@
       logging.error('Problem processing client monitor report: %r', e)
 
     return {}
+
+  # def GetClientMonitor(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+  # def PostClientMonitor(self, **kwargs):
+  #   return self.handler(**kwargs)
diff --git a/framework/cloud_tasks_helpers.py b/framework/cloud_tasks_helpers.py
index a00fa0d..bd9b7f9 100644
--- a/framework/cloud_tasks_helpers.py
+++ b/framework/cloud_tasks_helpers.py
@@ -12,7 +12,7 @@
 from __future__ import print_function
 
 import logging
-import urllib
+from six.moves import urllib
 
 from google.api_core import exceptions
 from google.api_core import retry
@@ -91,7 +91,7 @@
       'app_engine_http_request':
           {
               'relative_uri': url,
-              'body': urllib.urlencode(params),
+              'body': urllib.parse.urlencode(params),
               'headers': {
                   'Content-type': 'application/x-www-form-urlencoded'
               }
diff --git a/framework/deleteusers.py b/framework/deleteusers.py
index 0c23ac5..739782e 100644
--- a/framework/deleteusers.py
+++ b/framework/deleteusers.py
@@ -32,6 +32,7 @@
   return credentials.authorize(httplib2.Http(timeout=60))
 
 
+# TODO: change to FlaskInternalTask when convert to Flask
 class WipeoutSyncCron(jsonfeed.InternalTask):
   """Enqueue tasks for sending user lists to wipeout-lite and deleting deleted
      users fetched from wipeout-lite."""
@@ -61,7 +62,14 @@
     cloud_tasks_helpers.create_task(
         task, queue=framework_constants.QUEUE_FETCH_WIPEOUT_DELETED_USERS)
 
+  # def GetWipeoutSyncCron(self, **kwargs):
+  #   return self.handler(**kwargs)
 
+  # def PostWipeoutSyncCron(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+
+# TODO: Set to FlaskInternalTask when convert
 class SendWipeoutUserListsTask(jsonfeed.InternalTask):
   """Sends a batch of monorail users to wipeout-lite."""
 
@@ -87,7 +95,14 @@
     logging.info(
         'Received response, %s with contents, %s', resp, data)
 
+  # def GetSendWipeoutUserListsTask(self, **kwargs):
+  #   return self.handler(**kwargs)
 
+  # def PostSendWipeoutUserListsTask(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+
+# TODO: Set to FlaskInternalTask when convert
 class DeleteWipeoutUsersTask(jsonfeed.InternalTask):
   """Fetches deleted users from wipeout-lite and enqueues tasks to delete
      those users from Monorail's DB."""
@@ -122,7 +137,14 @@
         'Received response, %s with contents, %s', resp, data)
     return json.loads(data)
 
+  # def GetDeleteWipeoutUsersTask(self, **kwargs):
+  #   return self.handler(**kwargs)
 
+  # def PostDeleteWipeoutUsersTask(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+
+# TODO: Set to FlaskInternalTask when convert
 class DeleteUsersTask(jsonfeed.InternalTask):
   """Deletes users from Monorail's DB."""
 
@@ -137,3 +159,9 @@
       return
     with work_env.WorkEnv(mr, self.services) as we:
       we.ExpungeUsers(emails, check_perms=False)
+
+  # def GetDeleteUsersTask(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+  # def PostDeleteUsersTask(self, **kwargs):
+  #   return self.handler(**kwargs)
diff --git a/framework/excessiveactivity.py b/framework/excessiveactivity.py
index c9933db..0e54ebd 100644
--- a/framework/excessiveactivity.py
+++ b/framework/excessiveactivity.py
@@ -12,14 +12,19 @@
 from __future__ import division
 from __future__ import absolute_import
 
-from framework import flaskservlet
+from framework import servlet
 
 
-class ExcessiveActivity(flaskservlet.FlaskServlet):
+class ExcessiveActivity(servlet.Servlet):
   """ExcessiveActivity page shows an error message."""
 
   _PAGE_TEMPLATE = 'framework/excessive-activity-page.ezt'
 
+  # pylint: disable=unused-argument
+  def GetExcessiveActivity(self, **kwargs):
+    return
+    # return self.handler(**kwargs)
+
   def GatherPageData(self, _mr):
     """Build up a dictionary of data values to use when rendering the page."""
     return {}
diff --git a/framework/flaskservlet.py b/framework/flaskservlet.py
index 9599884..fce3eab 100644
--- a/framework/flaskservlet.py
+++ b/framework/flaskservlet.py
@@ -25,6 +25,7 @@
 import ezt
 from features import features_bizobj, hotlist_views
 import flask
+import httpagentparser
 from project import project_constants
 from proto import project_pb2
 from search import query2ast
@@ -38,7 +39,6 @@
 from framework import ratelimiter
 from framework import template_helpers
 from framework import xsrf
-from third_party import httpagentparser
 
 from google.appengine.api import app_identity
 from google.appengine.api import modules
@@ -61,6 +61,7 @@
   _MAIN_TAB_MODE = None  # Normally overridden in subclasses to be one of these:
 
   MAIN_TAB_ISSUES = 't2'
+  MAIN_TAB_PEOPLE = 't3'
   IN_TAB_PEOPLE = 't3'
   MAIN_TAB_PROCESS = 't4'
   MAIN_TAB_UPDATES = 't5'
@@ -111,10 +112,13 @@
     self.content_type = content_type
     self.mr = None
     self.request = flask.request
+    self.request_path = None
     self.response = None
     self.ratelimiter = ratelimiter.RateLimiter()
+    self.redirect_url = None
 
-  def handler(self):
+  # pylint: disable=unused-argument
+  def handler(self, **kwargs):
     """Do common stuff then dispatch the request to get() or put() methods."""
     self.response = flask.make_response()
     handler_start_time = time.time()
@@ -128,6 +132,7 @@
     # GC_COUNT.add(count2, {'generation': 2})
 
     self.mr = monorailrequest.MonorailRequest(self.services)
+    self.request_path = self.request.base_url[len(self.request.host_url) - 1:]
     self.response.headers.add(
         'Strict-Transport-Security', 'max-age=31536000; includeSubDomains')
 
@@ -163,6 +168,8 @@
 
       if self.request.method == 'POST':
         self.post()
+        if self.redirect_url:
+          return self.redirect(self.redirect_url)
       elif self.request.method == 'GET':
         self.get()
     except exceptions.NoSuchUserException as e:
@@ -194,7 +201,7 @@
     except ratelimiter.RateLimitExceeded as e:
       logging.info('RateLimitExceeded Exception %s', e)
       self.response.status_code = httplib.BAD_REQUEST
-      self.response.response = 'Slow your roll.'
+      self.response.set_data('Slow your roll.')
 
     finally:
       self.mr.CleanUp()
@@ -380,7 +387,7 @@
     if self.CHECK_SECURITY_TOKEN:
       try:
         xsrf.ValidateToken(
-            request.values.get('token'), mr.auth.user_id, request.path)
+            request.values.get('token'), mr.auth.user_id, self.request_path)
       except xsrf.TokenIncorrect as err:
         if self.ALLOW_XHR:
           xsrf.ValidateToken(
@@ -388,14 +395,14 @@
         else:
           raise err
 
-    redirect_url = self.ProcessFormData(mr, request.values)
+    self.redirect_url = self.ProcessFormData(mr, request.values)
 
     # Most forms redirect the user to a new URL on success.  If no
     # redirect_url was returned, the form handler must have already
     # sent a response.  E.g., bounced the user back to the form with
     # invalid form fields highlighted.
-    if redirect_url:
-      self.redirect(redirect_url, abort=True)
+    if self.redirect_url:
+      return self.redirect(self.redirect_url, abort=True)
     else:
       assert self.response.response
 
@@ -676,8 +683,7 @@
         mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
     # Always add other anti-xsrf tokens when the user is logged in.
     if mr.auth.user_id:
-      form_token_path = self._FormHandlerURL(mr.request.path)
-      form_token_path = '/'
+      form_token_path = self._FormHandlerURL(mr.request_path)
       base_data['form_token'] = xsrf.GenerateToken(
           mr.auth.user_id, form_token_path)
       base_data['form_token_path'] = form_token_path
@@ -753,7 +759,7 @@
     if not mr.project.moved_to:
       return  # This project has not moved.
     admin_url = '/p/%s%s' % (mr.project_name, urls.ADMIN_META)
-    if request.path.startswith(admin_url):
+    if self.request_path.startswith(admin_url):
       return  # It moved, but we are near the page that can un-move it.
 
     logging.info(
@@ -852,10 +858,9 @@
 
   def redirect(self, url, abort=False):
     if abort:
-      flask.redirect(url, code=302)
-      flask.abort(302)
+      return flask.redirect(url, code=302)
     else:
-      flask.redirect(url)
+      return flask.redirect(url)
 
   def PleaseCorrect(self, mr, **echo_data):
     """Show the same form again so that the user can correct their input."""
@@ -871,3 +876,6 @@
           now - framework_constants.VISIT_RESOLUTION):
         user_pb.last_visit_timestamp = now
         self.services.user.UpdateUser(mr.cnxn, user_pb.user_id, user_pb)
+
+  def abort(self, code, context):
+    return flask.abort(code, context)
diff --git a/framework/framework_helpers.py b/framework/framework_helpers.py
index b7199b1..47c34ef 100644
--- a/framework/framework_helpers.py
+++ b/framework/framework_helpers.py
@@ -17,8 +17,7 @@
 import threading
 import time
 import traceback
-import urllib
-import urlparse
+from six.moves.urllib.parse import urlparse, quote, urlunparse
 
 from google.appengine.api import app_identity
 
@@ -237,12 +236,12 @@
     The url transposed into the given destination project.
   """
   project_name = moved_to
-  _, _, path, parameters, query, fragment_identifier = urlparse.urlparse(
+  _, _, path, parameters, query, fragment_identifier = urlparse(
       mr.current_page_url)
   # Strip off leading "/p/<moved from project>"
   path = '/' + path.split('/', 3)[3]
-  rest_of_url = urlparse.urlunparse(
-    ('', '', path, parameters, query, fragment_identifier))
+  rest_of_url = urlunparse(
+      ('', '', path, parameters, query, fragment_identifier))
   return '/p/%s%s' % (project_name, rest_of_url)
 
 
@@ -302,8 +301,9 @@
     A URL with the specified query parameters.
   """
   param_string = '&'.join(
-      '%s=%s' % (name, urllib.quote(six.text_type(value).encode('utf-8')))
-      for name, value in params if value is not None)
+      '%s=%s' % (name, quote(six.text_type(value).encode('utf-8')))
+      for name, value in params
+      if value is not None)
   if not param_string:
     qs_start_char = ''
   elif '?' in url:
diff --git a/framework/gcs_helpers.py b/framework/gcs_helpers.py
index a01b565..9da5b11 100644
--- a/framework/gcs_helpers.py
+++ b/framework/gcs_helpers.py
@@ -8,11 +8,9 @@
 from __future__ import division
 from __future__ import absolute_import
 
-import base64
 import logging
 import os
-import time
-import urllib
+from six.moves import urllib
 import uuid
 
 from datetime import datetime, timedelta
@@ -21,8 +19,7 @@
 from google.appengine.api import images
 from google.appengine.api import memcache
 from google.appengine.api import urlfetch
-from third_party import cloudstorage
-from third_party.cloudstorage import errors
+from google.cloud import storage
 
 from framework import filecontent
 from framework import framework_constants
@@ -56,27 +53,39 @@
   pass
 
 
-def DeleteObjectFromGCS(object_id):
-  object_path = ('/' + app_identity.get_default_gcs_bucket_name() + object_id)
-  cloudstorage.delete(object_path)
+def _RemoveLeadingSlash(text):
+  if text.startswith('/'):
+    return text[1:]
+  return text
+
+
+def DeleteObjectFromGCS(blob_name):
+  storage_client = storage.Client()
+  bucket_name = app_identity.get_default_gcs_bucket_name()
+  bucket = storage_client.bucket(bucket_name)
+  validated_blob_name = _RemoveLeadingSlash(blob_name)
+  blob = bucket.get_blob(validated_blob_name)
+  blob.delete()
 
 
 def StoreObjectInGCS(
     content, mime_type, project_id, thumb_width=DEFAULT_THUMB_WIDTH,
     thumb_height=DEFAULT_THUMB_HEIGHT, filename=None):
+  storage_client = storage.Client()
   bucket_name = app_identity.get_default_gcs_bucket_name()
+  bucket = storage_client.bucket(bucket_name)
   guid = uuid.uuid4()
-  object_id = '/%s/attachments/%s' % (project_id, guid)
-  object_path = '/' + bucket_name + object_id
-  options = {}
+  blob_name = '%s/attachments/%s' % (project_id, guid)
+
+  blob = bucket.blob(blob_name)
   if filename:
     if not framework_constants.FILENAME_RE.match(filename):
       logging.info('bad file name: %s' % filename)
       filename = 'attachment.dat'
-    options['Content-Disposition'] = 'inline; filename="%s"' % filename
-  logging.info('Writing with options %r', options)
-  with cloudstorage.open(object_path, 'w', mime_type, options=options) as f:
-    f.write(content)
+    content_disposition = 'inline; filename="%s"' % filename
+    blob.content_disposition = content_disposition
+    logging.info('Writing with content_disposition %r', content_disposition)
+  blob.upload_from_string(content, content_type=mime_type)
 
   if mime_type in RESIZABLE_MIME_TYPES:
     # Create and save a thumbnail too.
@@ -93,11 +102,12 @@
       # detail.
       logging.exception(e)
     if thumb_content:
-      thumb_path = '%s-thumbnail' % object_path
-      with cloudstorage.open(thumb_path, 'w', 'image/png') as f:
-        f.write(thumb_content)
+      thumb_blob_name = '%s-thumbnail' % blob_name
+      thumb_blob = bucket.blob(thumb_blob_name)
+      thumb_blob.upload_from_string(thumb_content, content_type='image/png')
 
-  return object_id
+  # Our database, sadly, stores these with the leading slash.
+  return '/%s' % blob_name
 
 
 def CheckMimeTypeResizable(mime_type):
@@ -151,7 +161,7 @@
         object_id = object_id[1:]
       url = result.format(
           bucket=bucket,
-          object_id=urllib.quote_plus(object_id),
+          object_id=urllib.parse.quote_plus(object_id),
           token=app_identity.get_access_token(scopes)[0])
       attachment_url = _FetchSignedURL(url)
 
@@ -166,42 +176,39 @@
     return '/missing-gcs-url'
 
 
-def MaybeCreateDownload(bucket_name, object_id, filename):
+def MaybeCreateDownload(bucket_name, blob_name, filename):
   """If the obj is not huge, and no download version exists, create it."""
-  src = '/%s%s' % (bucket_name, object_id)
-  dst = '/%s%s-download' % (bucket_name, object_id)
-  cloudstorage.validate_file_path(src)
-  cloudstorage.validate_file_path(dst)
-  logging.info('Maybe create %r from %r', dst, src)
+  validated_blob_name = _RemoveLeadingSlash(blob_name)
+  dst_blob_name = '%s-download' % validated_blob_name
+  logging.info('Maybe create %r from %r', dst_blob_name, validated_blob_name)
 
   if IS_DEV_APPSERVER:
     logging.info('dev environment never makes download copies.')
     return False
 
-  # If "Download" object already exists, we are done.
-  try:
-    cloudstorage.stat(dst)
+  storage_client = storage.Client()
+  bucket = storage_client.bucket(bucket_name)
+
+  # Validate "View" object.
+  src_blob = bucket.get_blob(validated_blob_name)
+  if not src_blob:
+    return False
+  # If "Download" object already exists, it's already created.
+  # `Bucket.blob` doesn't make an HTTP request.
+  dst_blob = bucket.blob(dst_blob_name)
+  if dst_blob.exists():
     logging.info('Download version of attachment already exists')
     return True
-  except errors.NotFoundError:
-    pass
-
-  # If "View" object is huge, give up.
-  src_stat = cloudstorage.stat(src)
-  if src_stat.st_size > MAX_ATTACH_SIZE_TO_COPY:
+  # If "View" object is huge, don't create a download.
+  if src_blob.size > MAX_ATTACH_SIZE_TO_COPY:
     logging.info('Download version of attachment would be too big')
     return False
 
-  with cloudstorage.open(src, 'r') as infile:
-    content = infile.read()
-  logging.info('opened GCS object and read %r bytes', len(content))
-  content_type = src_stat.content_type
-  options = {
-    'Content-Disposition': 'attachment; filename="%s"' % filename,
-    }
-  logging.info('Writing with options %r', options)
-  with cloudstorage.open(dst, 'w', content_type, options=options) as outfile:
-    outfile.write(content)
+  copied_dst_blob = bucket.copy_blob(src_blob, bucket, dst_blob_name)
+  content_disposition = 'attachment; filename="%s"' % filename
+  logging.info('Copying with content_disposition %r', content_disposition)
+  copied_dst_blob.content_disposition = content_disposition
+  copied_dst_blob.patch()
   logging.info('done writing')
 
   return True
diff --git a/framework/jsonfeed.py b/framework/jsonfeed.py
index 44e9cea..b7d85ac 100644
--- a/framework/jsonfeed.py
+++ b/framework/jsonfeed.py
@@ -12,7 +12,7 @@
 from __future__ import division
 from __future__ import absolute_import
 
-import httplib
+from six.moves import http_client
 import json
 import logging
 
@@ -21,6 +21,8 @@
 import settings
 
 from framework import framework_constants
+from framework import flaskservlet
+from framework import servlet_helpers
 from framework import permissions
 from framework import servlet
 from framework import xsrf
@@ -49,7 +51,7 @@
     Returns:
       A dictionary of json data.
     """
-    raise servlet.MethodNotSupportedError()
+    raise servlet_helpers.MethodNotSupportedError()
 
   def _DoRequestHandling(self, request, mr):
     """Do permission checking, page processing, and response formatting."""
@@ -72,7 +74,7 @@
       if self.CHECK_SAME_APP and not settings.local_mode:
         calling_app_id = request.headers.get('X-Appengine-Inbound-Appid')
         if calling_app_id != app_identity.get_application_id():
-          self.response.status = httplib.FORBIDDEN
+          self.response.status = http_client.FORBIDDEN
           return
 
       self._CheckForMovedProject(mr, request)
@@ -89,7 +91,7 @@
       self.abort(400, msg)
     except permissions.PermissionException as e:
       logging.info('Trapped PermissionException %s', e)
-      self.response.status = httplib.FORBIDDEN
+      self.response.status = http_client.FORBIDDEN
 
   # pylint: disable=unused-argument
   # pylint: disable=arguments-differ
@@ -132,3 +134,99 @@
   """Internal tasks are JSON feeds that can only be reached by our own code."""
 
   CHECK_SECURITY_TOKEN = False
+
+
+class FlaskJsonFeed(flaskservlet.FlaskServlet):
+  """A convenient base class for JSON feeds."""
+
+  # By default, JSON output is compact.  Subclasses can set this to
+  # an integer, like 4, for pretty-printed output.
+  JSON_INDENT = None
+
+  # Some JSON handlers can only be accessed from our own app.
+  CHECK_SAME_APP = False
+
+  def HandleRequest(self, _mr):
+    """Override this method to implement handling of the request.
+
+    Args:
+      mr: common information parsed from the HTTP request.
+
+    Returns:
+      A dictionary of json data.
+    """
+    raise servlet_helpers.MethodNotSupportedError()
+
+  def _DoRequestHandling(self, request, mr):
+    """Do permission checking, page processing, and response formatting."""
+    try:
+      if self.CHECK_SECURITY_TOKEN and mr.auth.user_id:
+        try:
+          logging.info('request in jsonfeed is %r', request)
+          xsrf.ValidateToken(mr.token, mr.auth.user_id, request.path)
+        except xsrf.TokenIncorrect:
+          logging.info('using token path "xhr"')
+          xsrf.ValidateToken(mr.token, mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
+
+      if self.CHECK_SAME_APP and not settings.local_mode:
+        calling_app_id = request.headers.get('X-Appengine-Inbound-Appid')
+        if calling_app_id != app_identity.get_application_id():
+          self.response.status = http_client.FORBIDDEN
+          return
+
+      self._CheckForMovedProject(mr, request)
+      self.AssertBasePermission(mr)
+
+      json_data = self.HandleRequest(mr)
+
+      self._RenderJsonResponse(json_data)
+
+    except query2ast.InvalidQueryError as e:
+      logging.warning('Trapped InvalidQueryError: %s', e)
+      logging.exception(e)
+      msg = e.message if e.message else 'invalid query'
+      self.abort(400, msg)
+    except permissions.PermissionException as e:
+      logging.info('Trapped PermissionException %s', e)
+      self.response.status = http_client.FORBIDDEN
+
+  # pylint: disable=unused-argument
+  # pylint: disable=arguments-differ
+  # Note: unused arguments necessary because they are specified in
+  # registerpages.py as an extra URL validation step even though we
+  # do our own URL parsing in monorailrequest.py
+  def get(self, **kwargs):
+    """Collect page-specific and generic info, then render the page.
+
+    Args:
+      project_name: string project name parsed from the URL by webapp2,
+        but we also parse it out in our code.
+      viewed_username: string user email parsed from the URL by webapp2,
+        but we also parse it out in our code.
+      hotlist_id: string hotlist id parsed from the URL by webapp2,
+        but we also parse it out in our code.
+    """
+    self._DoRequestHandling(self.mr.request, self.mr)
+
+  # pylint: disable=unused-argument
+  # pylint: disable=arguments-differ
+  def post(self, **kwargs):
+    """Parse the request, check base perms, and call form-specific code."""
+    self._DoRequestHandling(self.mr.request, self.mr)
+
+  def _RenderJsonResponse(self, json_data):
+    """Serialize the data as JSON so that it can be sent to the browser."""
+    json_str = json.dumps(json_data, indent=self.JSON_INDENT)
+    logging.debug(
+        'Sending JSON response: %r length: %r',
+        json_str[:framework_constants.LOGGING_MAX_LENGTH], len(json_str))
+    self.response.content_type = framework_constants.CONTENT_TYPE_JSON
+    self.response.headers['X-Content-Type-Options'] = (
+        framework_constants.CONTENT_TYPE_JSON_OPTIONS)
+    self.response.set_data(XSSI_PREFIX + json_str)
+
+
+class FlaskInternalTask(FlaskJsonFeed):
+  """Internal tasks are JSON feeds that can only be reached by our own code."""
+
+  CHECK_SECURITY_TOKEN = False
diff --git a/framework/monorailrequest.py b/framework/monorailrequest.py
index 94dd9d6..7fe2918 100644
--- a/framework/monorailrequest.py
+++ b/framework/monorailrequest.py
@@ -16,7 +16,7 @@
 import endpoints
 import logging
 import re
-import urllib
+from six.moves import urllib
 
 import ezt
 import six
@@ -240,8 +240,10 @@
     """
     with self.profiler.Phase('basic parsing'):
       self.request = request
+      self.request_path = request.path
       self.current_page_url = request.url
-      self.current_page_url_encoded = urllib.quote_plus(self.current_page_url)
+      self.current_page_url_encoded = urllib.parse.quote_plus(
+          self.current_page_url)
 
       # Only accept a hostport from the request that looks valid.
       if not _HOSTPORT_RE.match(request.host):
@@ -251,9 +253,8 @@
       logging.info('Request: %s', self.current_page_url)
 
     with self.profiler.Phase('path parsing'):
-      (viewed_user_val, self.project_name,
-       self.hotlist_id, self.hotlist_name) = _ParsePathIdentifiers(
-           self.request.path)
+      (viewed_user_val, self.project_name, self.hotlist_id,
+       self.hotlist_name) = _ParsePathIdentifiers(self.request_path)
       self.viewed_username = _GetViewedEmail(
           viewed_user_val, self.cnxn, services)
     with self.profiler.Phase('qs parsing'):
@@ -296,8 +297,10 @@
     """
     with self.profiler.Phase('basic parsing'):
       self.request = request
+      self.request_path = request.base_url[len(request.host_url) - 1:]
       self.current_page_url = request.url
-      self.current_page_url_encoded = urllib.quote_plus(self.current_page_url)
+      self.current_page_url_encoded = urllib.parse.quote_plus(
+          self.current_page_url)
 
       # Only accept a hostport from the request that looks valid.
       if not _HOSTPORT_RE.match(request.host):
@@ -308,7 +311,7 @@
 
     with self.profiler.Phase('path parsing'):
       (viewed_user_val, self.project_name, self.hotlist_id,
-       self.hotlist_name) = _ParsePathIdentifiers(self.request.url)
+       self.hotlist_name) = _ParsePathIdentifiers(self.request_path)
       self.viewed_username = _GetViewedEmail(
           viewed_user_val, self.cnxn, services)
     with self.profiler.Phase('qs parsing'):
@@ -335,10 +338,10 @@
     prod_debug_allowed = self.perms.HasPerm(
         permissions.VIEW_DEBUG, self.auth.user_id, None)
     self.debug_enabled = (
-        request.args.get('debug') and
+        request.values.get('debug') and
         (settings.local_mode or prod_debug_allowed))
     # temporary option for perf testing on staging instance.
-    if request.args.get('disable_cache'):
+    if request.values.get('disable_cache'):
       if settings.local_mode or 'staging' in request.host:
         self.use_cached_searches = False
 
@@ -597,7 +600,7 @@
     if hasattr(self.request, 'params'):
       value = self.request.params.get(query_param_name)
     else:
-      value = self.request.args.get(query_param_name)
+      value = self.request.values.get(query_param_name)
     assert value is None or isinstance(value, six.text_type)
     using_default = value is None
     if using_default:
@@ -620,7 +623,7 @@
     if hasattr(self.request, 'params'):
       value = self.request.params.get(query_param_name)
     else:
-      value = self.request.args.get(query_param_name)
+      value = self.request.values.get(query_param_name)
     if value is None or value == '':
       return default_value
 
@@ -641,7 +644,7 @@
     if hasattr(self.request, 'params'):
       params = self.request.params.get(query_param_name)
     else:
-      params = self.request.args.get(query_param_name)
+      params = self.request.values.get(query_param_name)
     if params is None:
       return default_value
     if not params:
@@ -665,7 +668,7 @@
     if hasattr(self.request, 'params'):
       value = self.request.params.get(query_param_name)
     else:
-      value = self.request.args.get(query_param_name)
+      value = self.request.values.get(query_param_name)
 
     if value is None:
       return default_value
@@ -698,15 +701,14 @@
     if split_path[0] == 'p':
       project_name = split_path[1]
     if split_path[0] == 'u' or split_path[0] == 'users':
-      viewed_user_val = urllib.unquote(split_path[1])
+      viewed_user_val = urllib.parse.unquote(split_path[1])
       if len(split_path) >= 4 and split_path[2] == 'hotlists':
         try:
-          hotlist_id = int(
-              urllib.unquote(split_path[3].split('.')[0]))
+          hotlist_id = int(urllib.parse.unquote(split_path[3].split('.')[0]))
         except ValueError:
           raw_last_path = (split_path[3][:-3] if
                         split_path[3].endswith('.do') else split_path[3])
-          last_path = urllib.unquote(raw_last_path)
+          last_path = urllib.parse.unquote(raw_last_path)
           match = framework_bizobj.RE_HOTLIST_NAME.match(
               last_path)
           if not match:
@@ -716,7 +718,7 @@
             hotlist_name = last_path.lower()
 
     if split_path[0] == 'g':
-      viewed_user_val = urllib.unquote(split_path[1])
+      viewed_user_val = urllib.parse.unquote(split_path[1])
 
   return viewed_user_val, project_name, hotlist_id, hotlist_name
 
diff --git a/framework/permissions.py b/framework/permissions.py
index eb40dc7..ac46af6 100644
--- a/framework/permissions.py
+++ b/framework/permissions.py
@@ -104,7 +104,6 @@
 # Permission to flag any artifact as spam.
 FLAG_SPAM = 'FlagSpam'
 VERDICT_SPAM = 'VerdictSpam'
-MODERATE_SPAM = 'ModerateSpam'
 
 # Permissions for custom fields.
 EDIT_FIELD_DEF = 'EditFieldDef'
@@ -123,10 +122,10 @@
     tracker_pb2.ApprovalStatus.NOT_APPROVED]
 
 STANDARD_ADMIN_PERMISSIONS = [
-    EDIT_PROJECT, CREATE_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
-    EDIT_OTHER_USERS, CUSTOMIZE_PROCESS,
-    VIEW_QUOTA, EDIT_QUOTA, ADMINISTER_SITE,
-    EDIT_ANY_MEMBER_NOTES, VERDICT_SPAM, MODERATE_SPAM]
+    EDIT_PROJECT, CREATE_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG, EDIT_OTHER_USERS,
+    CUSTOMIZE_PROCESS, VIEW_QUOTA, EDIT_QUOTA, ADMINISTER_SITE,
+    EDIT_ANY_MEMBER_NOTES, VERDICT_SPAM
+]
 
 STANDARD_ISSUE_PERMISSIONS = [
     VIEW, EDIT_ISSUE, ADD_ISSUE_COMMENT, DELETE_ISSUE, FLAG_SPAM]
@@ -315,18 +314,16 @@
     consider_restrictions=False)
 
 ADMIN_PERMISSIONSET = PermissionSet(
-    [VIEW, VIEW_CONTRIBUTOR_LIST,
-     CREATE_PROJECT, EDIT_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
-     COMMIT, CUSTOMIZE_PROCESS, FLAG_SPAM, VERDICT_SPAM, SET_STAR,
-     ADMINISTER_SITE, VIEW_EXPIRED_PROJECT, EDIT_OTHER_USERS,
-     VIEW_QUOTA, EDIT_QUOTA,
-     CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE,
-     EDIT_ISSUE_APPROVAL,
-     VIEW_INBOUND_MESSAGES,
-     DELETE_ANY, EDIT_ANY_MEMBER_NOTES,
-     CREATE_GROUP, EDIT_GROUP, DELETE_GROUP, VIEW_GROUP,
-     MODERATE_SPAM, CREATE_HOTLIST],
-     consider_restrictions=False)
+    [
+        VIEW, VIEW_CONTRIBUTOR_LIST, CREATE_PROJECT, EDIT_PROJECT,
+        PUBLISH_PROJECT, VIEW_DEBUG, COMMIT, CUSTOMIZE_PROCESS, FLAG_SPAM,
+        VERDICT_SPAM, SET_STAR, ADMINISTER_SITE, VIEW_EXPIRED_PROJECT,
+        EDIT_OTHER_USERS, VIEW_QUOTA, EDIT_QUOTA, CREATE_ISSUE,
+        ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE, EDIT_ISSUE_APPROVAL,
+        VIEW_INBOUND_MESSAGES, DELETE_ANY, EDIT_ANY_MEMBER_NOTES, CREATE_GROUP,
+        EDIT_GROUP, DELETE_GROUP, VIEW_GROUP, CREATE_HOTLIST
+    ],
+    consider_restrictions=False)
 
 GROUP_IMPORT_BORG_PERMISSIONSET = PermissionSet(
     [CREATE_GROUP, VIEW_GROUP, EDIT_GROUP])
diff --git a/framework/reap.py b/framework/reap.py
index 6bc5cf0..d0b721f 100644
--- a/framework/reap.py
+++ b/framework/reap.py
@@ -16,6 +16,7 @@
 RUN_DURATION_LIMIT = 50 * 60  # 50 minutes
 
 
+# TODO: change to FlaskInternalTask when convert to Flask
 class Reap(jsonfeed.InternalTask):
   """Look for doomed and deletable projects and delete them."""
 
@@ -123,3 +124,9 @@
     for f in project_purge_functions:
       f(cnxn, project_id)
       yield project_id
+
+  # def GetReap(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+  # def PostReap(self, **kwargs):
+  #   return self.handler(**kwargs)
diff --git a/framework/redis_utils.py b/framework/redis_utils.py
deleted file mode 100644
index 440603b..0000000
--- a/framework/redis_utils.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""A utility module for interfacing with Redis conveniently. """
-import json
-import logging
-import threading
-
-import redis
-
-import settings
-from protorpc import protobuf
-
-connection_pool = None
-
-def CreateRedisClient():
-  # type: () -> redis.Redis
-  """Creates a Redis object which implements Redis protocol and connection.
-
-  Returns:
-    redis.Redis object initialized with a connection pool.
-    None on failure.
-  """
-  global connection_pool
-  if not connection_pool:
-    connection_pool = redis.BlockingConnectionPool(
-        host=settings.redis_host,
-        port=settings.redis_port,
-        max_connections=1,
-        # When Redis is not available, calls hang indefinitely without these.
-        socket_connect_timeout=2,
-        socket_timeout=2,
-    )
-  return redis.Redis(connection_pool=connection_pool)
-
-
-def AsyncVerifyRedisConnection():
-  # type: () -> None
-  """Verifies the redis connection in a separate thread.
-
-  Note that although an exception in the thread won't kill the main thread,
-  it is not risk free.
-
-  AppEngine joins with any running threads before finishing the request.
-  If this thread were to hang indefinitely, then it would cause the request
-  to hit DeadlineExceeded, thus still causing a user facing failure.
-
-  We mitigate this risk by setting socket timeouts on our connection pool.
-
-  # TODO(crbug/monorail/8221): Remove this code during this milestone.
-  """
-
-  def _AsyncVerifyRedisConnection():
-    logging.info('AsyncVerifyRedisConnection thread started.')
-    redis_client = CreateRedisClient()
-    VerifyRedisConnection(redis_client)
-
-  logging.info('Starting thread for AsyncVerifyRedisConnection.')
-  threading.Thread(target=_AsyncVerifyRedisConnection).start()
-
-
-def FormatRedisKey(key, prefix=None):
-  # type: (int, str) -> str
-  """Converts key to string and prepends the prefix.
-
-  Args:
-    key: Integer key.
-    prefix: String to prepend to the key.
-
-  Returns:
-    Formatted key with the format: "namespace:prefix:key".
-  """
-  formatted_key = ''
-  if prefix:
-    if prefix[-1] != ':':
-      prefix += ':'
-    formatted_key += prefix
-  return formatted_key + str(key)
-
-def VerifyRedisConnection(redis_client, msg=None):
-  # type: (redis.Redis, Optional[str]) -> bool
-  """Checks the connection to Redis to ensure a connection can be established.
-
-  Args:
-    redis_client: client to connect and ping redis server. This can be a redis
-      or fakeRedis object.
-    msg: string for used logging information.
-
-  Returns:
-    True when connection to server is valid.
-    False when an error occurs or redis_client is None.
-  """
-  if not redis_client:
-    logging.info('Redis client is set to None on connect in %s', msg)
-    return False
-  try:
-    redis_client.ping()
-    logging.info('Redis client successfully connected to Redis in %s', msg)
-    return True
-  except redis.RedisError as identifier:
-    # TODO(crbug/monorail/8224): We can downgrade this to warning once we are
-    # done with the switchover from memcache. Before that, log it to ensure we
-    # see it.
-    logging.exception(
-        'Redis error occurred while connecting to server in %s: %s', msg,
-        identifier)
-    return False
-
-
-def SerializeValue(value, pb_class=None):
-  # type: (Any, Optional[type|classobj]) -> str
-  """Serialize object as for storage in Redis. """
-  if pb_class and pb_class is not int:
-    return protobuf.encode_message(value)
-  else:
-    return json.dumps(value)
-
-
-def DeserializeValue(value, pb_class=None):
-  # type: (str, Optional[type|classobj]) -> Any
-  """Deserialize a string to create a python object. """
-  if pb_class and pb_class is not int:
-    return protobuf.decode_message(pb_class, value)
-  else:
-    return json.loads(value)
diff --git a/framework/registerpages_helpers.py b/framework/registerpages_helpers.py
index 9982639..0073e57 100644
--- a/framework/registerpages_helpers.py
+++ b/framework/registerpages_helpers.py
@@ -9,7 +9,7 @@
 from __future__ import absolute_import
 
 
-import httplib
+from six.moves import http_client
 import logging
 
 import webapp2
@@ -27,7 +27,7 @@
       self.response.headers.add('Strict-Transport-Security',
           'max-age=31536000; includeSubDomains')
       self.response.status = (
-          httplib.MOVED_PERMANENTLY if permanent else httplib.FOUND)
+          http_client.MOVED_PERMANENTLY if permanent else http_client.FOUND)
 
   return Redirect
 
@@ -74,8 +74,8 @@
       self.response.headers.add('Strict-Transport-Security',
           'max-age=31536000; includeSubDomains')
       if permanent and not keep_qs:
-        self.response.status = httplib.MOVED_PERMANENTLY
+        self.response.status = http_client.MOVED_PERMANENTLY
       else:
-        self.response.status = httplib.FOUND
+        self.response.status = http_client.FOUND
 
   return RedirectInScope
diff --git a/framework/servlet.py b/framework/servlet.py
index e1c0cf1..462939a 100644
--- a/framework/servlet.py
+++ b/framework/servlet.py
@@ -21,16 +21,16 @@
 from __future__ import absolute_import
 
 import gc
-import httplib
+from six.moves import http_client
 import json
 import logging
 import os
 import random
 import time
-import urllib
+from six.moves import urllib
 
 import ezt
-from third_party import httpagentparser
+import httpagentparser
 
 from google.appengine.api import app_identity
 from google.appengine.api import modules
@@ -199,8 +199,8 @@
       except MySQLdb.OperationalError as e:
         logging.exception(e)
         page_data = {
-          'http_response_code': httplib.SERVICE_UNAVAILABLE,
-          'requested_url': self.request.url,
+            'http_response_code': http_client.SERVICE_UNAVAILABLE,
+            'requested_url': self.request.url,
         }
         self.template = template_helpers.GetTemplate(
             'templates/framework/database-maintenance.ezt',
@@ -228,15 +228,15 @@
 
     except exceptions.InputException as e:
       logging.info('Rejecting invalid input: %r', e)
-      self.response.status = httplib.BAD_REQUEST
+      self.response.status = http_client.BAD_REQUEST
 
     except exceptions.NoSuchProjectException as e:
       logging.info('Rejecting invalid request: %r', e)
-      self.response.status = httplib.NOT_FOUND
+      self.response.status = http_client.NOT_FOUND
 
     except xsrf.TokenIncorrect as e:
       logging.info('Bad XSRF token: %r', e.message)
-      self.response.status = httplib.BAD_REQUEST
+      self.response.status = http_client.BAD_REQUEST
 
     except permissions.BannedUserException as e:
       logging.warning('The user has been banned')
@@ -246,7 +246,7 @@
 
     except ratelimiter.RateLimitExceeded as e:
       logging.info('RateLimitExceeded Exception %s', e)
-      self.response.status = httplib.BAD_REQUEST
+      self.response.status = http_client.BAD_REQUEST
       self.response.body = 'Slow your roll.'
 
     finally:
@@ -369,8 +369,8 @@
         # Display the missing permissions template.
         page_data = {
             'reason': e.message,
-            'http_response_code': httplib.FORBIDDEN,
-            }
+            'http_response_code': http_client.FORBIDDEN,
+        }
         with self.mr.profiler.Phase('gather base data'):
           page_data.update(self.GatherBaseData(self.mr, nonce))
         self._AddHelpDebugPageData(page_data)
@@ -431,7 +431,7 @@
     except permissions.PermissionException as e:
       logging.warning('Trapped permission-related exception "%s".', e)
       # TODO(jrobbins): can we do better than an error page? not much.
-      self.response.status = httplib.BAD_REQUEST
+      self.response.status = http_client.BAD_REQUEST
 
   def _DoCommonRequestProcessing(self, request, mr):
     """Do common processing dependent on having the user and project pbs."""
diff --git a/framework/servlet_helpers.py b/framework/servlet_helpers.py
index 89fe587..fddec26 100644
--- a/framework/servlet_helpers.py
+++ b/framework/servlet_helpers.py
@@ -12,7 +12,7 @@
 import calendar
 import datetime
 import logging
-import urllib
+from six.moves import urllib
 import time
 
 from framework import framework_constants
@@ -175,7 +175,7 @@
       perm, mr.auth.effective_ids, project, permissions.GetRestrictions(art))
 
 
-def ComputeIssueEntryURL(mr, config):
+def ComputeIssueEntryURL(mr):
   """Compute the URL to use for the "New issue" subtab.
 
   Args:
@@ -187,13 +187,12 @@
     case. Otherewise it will be a fully qualified URL that includes some
     query string parameters.
   """
-  # TODO: remove the custom_issue_entry_url since its no longer
-  if not config.custom_issue_entry_url:
+  isMember = framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)
+  if mr.project_name == 'chromium' and not isMember:
+    return '/p/chromium/issues/wizard'
+  else:
     return '/p/%s/issues/entry' % (mr.project_name)
 
-  return '/p/chromium/issues/wizard'
-
-
 def IssueListURL(mr, config, query_string=None):
   """Make an issue list URL for non-members or members."""
   url = '/p/%s%s' % (mr.project_name, urls.ISSUE_LIST)
@@ -201,7 +200,7 @@
     url += '?' + query_string
   elif framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids):
     if config and config.member_default_query:
-      url += '?q=' + urllib.quote_plus(config.member_default_query)
+      url += '?q=' + urllib.parse.quote_plus(config.member_default_query)
   return url
 
 
@@ -212,22 +211,25 @@
 
 def SafeCreateLoginURL(mr, continue_url=None):
   """Make a login URL w/ a detailed continue URL, otherwise use a short one."""
-  continue_url = continue_url or mr.current_page_url
+  current_page_url = mr.current_page_url_encoded
+  if settings.local_mode:
+    current_page_url = mr.current_page_url
+  continue_url = continue_url or current_page_url
   try:
-    url = users.create_login_url(continue_url)
+    # Check the URL length
+    generated_login_url = users.create_login_url(continue_url)
   except users.RedirectTooLongError:
     if mr.project_name:
-      url = users.create_login_url('/p/%s' % mr.project_name)
+      continue_url = '/p/%s' % mr.project_name
     else:
-      url = users.create_login_url('/')
-
-  # Give the user a choice of existing accounts in their session
-  # or the option to add an account, even if they are currently
-  # signed in to exactly one account.
-  if mr.auth.user_id:
-    # Notice: this makes assuptions about the output of users.create_login_url,
-    # which can change at any time. See https://crbug.com/monorail/3352.
-    url = url.replace('/ServiceLogin', '/AccountChooser', 1)
+      continue_url = '/'
+  if settings.local_mode:
+    return generated_login_url
+  # URL to allow user to choose an account when >1 account is logged in.
+  redirect_url = (
+      'https://accounts.google.com/AccountChooser?continue='
+      'https://uc.appengine.google.com/_ah/conflogin%3Fcontinue%3D{}')
+  url = redirect_url.format(continue_url)
   return url
 
 
diff --git a/framework/template_helpers.py b/framework/template_helpers.py
index 52a25d7..7ebb7e2 100644
--- a/framework/template_helpers.py
+++ b/framework/template_helpers.py
@@ -11,7 +11,7 @@
 
 import cgi
 import cStringIO
-import httplib
+from six.moves import http_client
 import logging
 import time
 import types
@@ -157,7 +157,7 @@
     if content_type:
       response.content_type = content_type
 
-    response.status = data.get('http_response_code', httplib.OK)
+    response.status = data.get('http_response_code', http_client.OK)
     whole_page = self.GetResponse(data)
     if data.get('prevent_sniffing'):
       for sniff_pattern, sniff_replacement in SNIFFABLE_PATTERNS.items():
@@ -171,13 +171,13 @@
     if content_type:
       response.content_type = content_type
 
-    response.status_code = data.get('http_response_code', httplib.OK)
+    response.status_code = data.get('http_response_code', http_client.OK)
     whole_page = self.GetResponse(data)
     if data.get('prevent_sniffing'):
       for sniff_pattern, sniff_replacement in SNIFFABLE_PATTERNS.items():
         whole_page = whole_page.replace(sniff_pattern, sniff_replacement)
     start = time.time()
-    response.response = whole_page
+    response.set_data(whole_page)
     logging.info('wrote response in %dms', int((time.time() - start) * 1000))
 
   def GetResponse(self, data):
diff --git a/framework/test/deleteusers_test.py b/framework/test/deleteusers_test.py
index 4cadbbd..2609867 100644
--- a/framework/test/deleteusers_test.py
+++ b/framework/test/deleteusers_test.py
@@ -11,7 +11,7 @@
 import logging
 import mock
 import unittest
-import urllib
+from six.moves import urllib
 
 from framework import cloud_tasks_helpers
 from framework import deleteusers
@@ -175,7 +175,7 @@
 
     self.assertEqual(get_client_mock().create_task.call_count, 2)
 
-    query = urllib.urlencode(
+    query = urllib.parse.urlencode(
         {'emails': 'user1@gmail.com,user2@gmail.com,user3@gmail.com'})
     expected_task = self.generate_simple_task(
         urls.DELETE_USERS_TASK + '.do', query)
@@ -185,7 +185,7 @@
         expected_task,
         retry=cloud_tasks_helpers._DEFAULT_RETRY)
 
-    query = urllib.urlencode({'emails': 'user4@gmail.com'})
+    query = urllib.parse.urlencode({'emails': 'user4@gmail.com'})
     expected_task = self.generate_simple_task(
         urls.DELETE_USERS_TASK + '.do', query)
 
@@ -204,7 +204,7 @@
     self.assertEqual(get_client_mock().create_task.call_count, 1)
 
     emails = 'user1@gmail.com,user2@gmail.com,user3@gmail.com,user4@gmail.com'
-    query = urllib.urlencode({'emails': emails})
+    query = urllib.parse.urlencode({'emails': emails})
     expected_task = self.generate_simple_task(
         urls.DELETE_USERS_TASK + '.do', query)
 
diff --git a/framework/test/flask_servlet_test.py b/framework/test/flask_servlet_test.py
index 443f919..4c47209 100644
--- a/framework/test/flask_servlet_test.py
+++ b/framework/test/flask_servlet_test.py
@@ -71,13 +71,15 @@
     self.page_class._CheckForMovedProject(mr, request)
     mock_abort.assert_not_called()
 
-  @mock.patch('flask.abort')
-  def testCheckForMovedProject_Redirect(self, mock_abort):
+  @mock.patch('flask.redirect')
+  def testCheckForMovedProject_Redirect(self, mock_redirect):
     project = fake.Project(project_name='proj', moved_to='http://example.com')
     request, mr = testing_helpers.GetRequestObjects(
         path='/p/proj', project=project)
+    self.page_class.request_path = '/p/test'
     self.page_class._CheckForMovedProject(mr, request)
-    mock_abort.assert_called_once_with(302)
+    mock_redirect.assert_called_once_with(
+        'http://127.0.0.1/hosting/moved?project=proj', code=302)
 
   def testGatherBaseData(self):
     project = self.page_class.services.project.TestAddProject(
diff --git a/framework/test/gcs_helpers_test.py b/framework/test/gcs_helpers_test.py
index 3500e40..a7c01d0 100644
--- a/framework/test/gcs_helpers_test.py
+++ b/framework/test/gcs_helpers_test.py
@@ -10,143 +10,123 @@
 
 import mock
 import unittest
-import uuid
 
-import mox
-
-from google.appengine.api import app_identity
-from google.appengine.api import images
 from google.appengine.api import urlfetch
 from google.appengine.ext import testbed
-from third_party import cloudstorage
+from google.cloud import storage
 
-from framework import filecontent
 from framework import gcs_helpers
-from testing import fake
 from testing import testing_helpers
 
 
 class GcsHelpersTest(unittest.TestCase):
 
   def setUp(self):
-    self.mox = mox.Mox()
     self.testbed = testbed.Testbed()
     self.testbed.activate()
     self.testbed.init_memcache_stub()
+    self.testbed.init_app_identity_stub()
+
+    self.test_storage_client = mock.create_autospec(
+        storage.Client, instance=True)
+    mock.patch.object(
+        storage, 'Client', return_value=self.test_storage_client).start()
 
   def tearDown(self):
-    self.mox.UnsetStubs()
-    self.mox.ResetAll()
     self.testbed.deactivate()
+    self.test_storage_client = None
+    mock.patch.stopall()
 
   def testDeleteObjectFromGCS(self):
     object_id = 'aaaaa'
-    bucket_name = 'test_bucket'
-    object_path = '/' + bucket_name + object_id
-
-    self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
-    app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
-
-    self.mox.StubOutWithMock(cloudstorage, 'delete')
-    cloudstorage.delete(object_path)
-
-    self.mox.ReplayAll()
-
     gcs_helpers.DeleteObjectFromGCS(object_id)
-    self.mox.VerifyAll()
+    # Verify order of client calls.
+    self.test_storage_client.assert_has_calls(
+        [
+            mock.call.bucket().get_blob(object_id),
+            mock.call.bucket().get_blob().delete()
+        ])
 
-  def testStoreObjectInGCS_ResizableMimeType(self):
+  def testDeleteLegacyObjectFromGCS(self):
+    # A previous python module expected object ids with leading '/'
+    object_id = '/aaaaa'
+    object_id_without_leading_slash = 'aaaaa'
+    gcs_helpers.DeleteObjectFromGCS(object_id)
+    # Verify order of client calls.
+    self.test_storage_client.assert_has_calls(
+        [
+            mock.call.bucket().get_blob(object_id_without_leading_slash),
+            mock.call.bucket().get_blob().delete()
+        ])
+
+  @mock.patch(
+      'google.appengine.api.images.resize', return_value=mock.MagicMock())
+  @mock.patch('uuid.uuid4')
+  def testStoreObjectInGCS_ResizableMimeType(self, mock_uuid4, mock_resize):
     guid = 'aaaaa'
+    mock_uuid4.return_value = guid
     project_id = 100
-    object_id = '/%s/attachments/%s' % (project_id, guid)
-    bucket_name = 'test_bucket'
-    object_path = '/' + bucket_name + object_id
+    blob_name = '%s/attachments/%s' % (project_id, guid)
+    thumb_blob_name = '%s/attachments/%s-thumbnail' % (project_id, guid)
     mime_type = 'image/png'
     content = 'content'
-    thumb_content = 'thumb_content'
-
-    self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
-    app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
-
-    self.mox.StubOutWithMock(uuid, 'uuid4')
-    uuid.uuid4().AndReturn(guid)
-
-    self.mox.StubOutWithMock(cloudstorage, 'open')
-    cloudstorage.open(
-        object_path, 'w', mime_type, options={}
-        ).AndReturn(fake.FakeFile())
-    cloudstorage.open(object_path + '-thumbnail', 'w', mime_type).AndReturn(
-        fake.FakeFile())
-
-    self.mox.StubOutWithMock(images, 'resize')
-    images.resize(content, gcs_helpers.DEFAULT_THUMB_WIDTH,
-                  gcs_helpers.DEFAULT_THUMB_HEIGHT).AndReturn(thumb_content)
-
-    self.mox.ReplayAll()
 
     ret_id = gcs_helpers.StoreObjectInGCS(
         content, mime_type, project_id, gcs_helpers.DEFAULT_THUMB_WIDTH,
         gcs_helpers.DEFAULT_THUMB_HEIGHT)
-    self.mox.VerifyAll()
-    self.assertEqual(object_id, ret_id)
+    self.assertEqual('/%s' % blob_name, ret_id)
+    self.test_storage_client.assert_has_calls(
+        [
+            mock.call.bucket().blob(blob_name),
+            mock.call.bucket().blob().upload_from_string(
+                content, content_type=mime_type),
+            mock.call.bucket().blob(thumb_blob_name),
+        ])
+    mock_resize.assert_called()
 
-  def testStoreObjectInGCS_NotResizableMimeType(self):
+  @mock.patch(
+      'google.appengine.api.images.resize', return_value=mock.MagicMock())
+  @mock.patch('uuid.uuid4')
+  def testStoreObjectInGCS_NotResizableMimeType(self, mock_uuid4, mock_resize):
     guid = 'aaaaa'
+    mock_uuid4.return_value = guid
     project_id = 100
-    object_id = '/%s/attachments/%s' % (project_id, guid)
-    bucket_name = 'test_bucket'
-    object_path = '/' + bucket_name + object_id
+    blob_name = '%s/attachments/%s' % (project_id, guid)
     mime_type = 'not_resizable_mime_type'
     content = 'content'
 
-    self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
-    app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
-
-    self.mox.StubOutWithMock(uuid, 'uuid4')
-    uuid.uuid4().AndReturn(guid)
-
-    self.mox.StubOutWithMock(cloudstorage, 'open')
-    options = {'Content-Disposition': 'inline; filename="file.ext"'}
-    cloudstorage.open(
-        object_path, 'w', mime_type, options=options
-        ).AndReturn(fake.FakeFile())
-
-    self.mox.ReplayAll()
-
     ret_id = gcs_helpers.StoreObjectInGCS(
         content, mime_type, project_id, gcs_helpers.DEFAULT_THUMB_WIDTH,
-        gcs_helpers.DEFAULT_THUMB_HEIGHT, filename='file.ext')
-    self.mox.VerifyAll()
-    self.assertEqual(object_id, ret_id)
+        gcs_helpers.DEFAULT_THUMB_HEIGHT)
+    self.assertEqual('/%s' % blob_name, ret_id)
+    self.test_storage_client.assert_has_calls(
+        [
+            mock.call.bucket().blob(blob_name),
+            mock.call.bucket().blob().upload_from_string(
+                content, content_type=mime_type),
+        ])
+    mock_resize.assert_not_called()
 
-  def testCheckMemeTypeResizable(self):
+  def testCheckMimeTypeResizable(self):
     for resizable_mime_type in gcs_helpers.RESIZABLE_MIME_TYPES:
       gcs_helpers.CheckMimeTypeResizable(resizable_mime_type)
 
     with self.assertRaises(gcs_helpers.UnsupportedMimeType):
       gcs_helpers.CheckMimeTypeResizable('not_resizable_mime_type')
 
-  def testStoreLogoInGCS(self):
-    file_name = 'test_file.png'
+  @mock.patch('framework.filecontent.GuessContentTypeFromFilename')
+  @mock.patch('framework.gcs_helpers.StoreObjectInGCS')
+  def testStoreLogoInGCS(self, mock_store_object, mock_guess_content):
+    blob_name = 123
+    mock_store_object.return_value = blob_name
     mime_type = 'image/png'
+    mock_guess_content.return_value = mime_type
+    file_name = 'test_file.png'
     content = 'test content'
     project_id = 100
-    object_id = 123
-
-    self.mox.StubOutWithMock(filecontent, 'GuessContentTypeFromFilename')
-    filecontent.GuessContentTypeFromFilename(file_name).AndReturn(mime_type)
-
-    self.mox.StubOutWithMock(gcs_helpers, 'StoreObjectInGCS')
-    gcs_helpers.StoreObjectInGCS(
-        content, mime_type, project_id,
-        thumb_width=gcs_helpers.LOGO_THUMB_WIDTH,
-        thumb_height=gcs_helpers.LOGO_THUMB_HEIGHT).AndReturn(object_id)
-
-    self.mox.ReplayAll()
 
     ret_id = gcs_helpers.StoreLogoInGCS(file_name, content, project_id)
-    self.mox.VerifyAll()
-    self.assertEqual(object_id, ret_id)
+    self.assertEqual(blob_name, ret_id)
 
   @mock.patch('google.appengine.api.urlfetch.fetch')
   def testFetchSignedURL_Success(self, mock_fetch):
diff --git a/framework/test/jsonfeed_test.py b/framework/test/jsonfeed_test.py
index 0a569e2..4ca83fa 100644
--- a/framework/test/jsonfeed_test.py
+++ b/framework/test/jsonfeed_test.py
@@ -8,7 +8,7 @@
 from __future__ import division
 from __future__ import absolute_import
 
-import httplib
+from six.moves import http_client
 import logging
 import unittest
 
@@ -102,10 +102,10 @@
     # Note that request has no X-Appengine-Inbound-Appid set.
     self.assertIsNone(feed.get())
     self.assertFalse(feed.handle_request_called)
-    self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+    self.assertEqual(http_client.FORBIDDEN, feed.response.status)
     self.assertIsNone(feed.post())
     self.assertFalse(feed.handle_request_called)
-    self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+    self.assertEqual(http_client.FORBIDDEN, feed.response.status)
 
   def testSameAppOnly_InternalOnlyCalledFromWrongApp(self):
     feed = TestableJsonFeed()
@@ -114,10 +114,10 @@
     feed.mr.request.headers['X-Appengine-Inbound-Appid'] = 'wrong'
     self.assertIsNone(feed.get())
     self.assertFalse(feed.handle_request_called)
-    self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+    self.assertEqual(http_client.FORBIDDEN, feed.response.status)
     self.assertIsNone(feed.post())
     self.assertFalse(feed.handle_request_called)
-    self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+    self.assertEqual(http_client.FORBIDDEN, feed.response.status)
 
 
 class TestableJsonFeed(jsonfeed.JsonFeed):
diff --git a/framework/test/redis_utils_test.py b/framework/test/redis_utils_test.py
deleted file mode 100644
index a4128ce..0000000
--- a/framework/test/redis_utils_test.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Tests for the Redis utility module."""
-from __future__ import print_function
-from __future__ import division
-from __future__ import absolute_import
-
-import fakeredis
-import unittest
-
-from framework import redis_utils
-from proto import features_pb2
-
-
-class RedisHelperTest(unittest.TestCase):
-
-  def testFormatRedisKey(self):
-    redis_key = redis_utils.FormatRedisKey(111)
-    self.assertEqual('111', redis_key)
-    redis_key = redis_utils.FormatRedisKey(222, prefix='foo:')
-    self.assertEqual('foo:222', redis_key)
-    redis_key = redis_utils.FormatRedisKey(333, prefix='bar')
-    self.assertEqual('bar:333', redis_key)
-
-  def testCreateRedisClient(self):
-    self.assertIsNone(redis_utils.connection_pool)
-    redis_client_1 = redis_utils.CreateRedisClient()
-    self.assertIsNotNone(redis_client_1)
-    self.assertIsNotNone(redis_utils.connection_pool)
-    redis_client_2 = redis_utils.CreateRedisClient()
-    self.assertIsNotNone(redis_client_2)
-    self.assertIsNot(redis_client_1, redis_client_2)
-
-  def testConnectionVerification(self):
-    server = fakeredis.FakeServer()
-    client = None
-    self.assertFalse(redis_utils.VerifyRedisConnection(client))
-    server.connected = True
-    client = fakeredis.FakeRedis(server=server)
-    self.assertTrue(redis_utils.VerifyRedisConnection(client))
-    server.connected = False
-    self.assertFalse(redis_utils.VerifyRedisConnection(client))
-
-  def testSerializeDeserializeInt(self):
-    serialized_int = redis_utils.SerializeValue(123)
-    self.assertEqual('123', serialized_int)
-    self.assertEquals(123, redis_utils.DeserializeValue(serialized_int))
-
-  def testSerializeDeserializeStr(self):
-    serialized = redis_utils.SerializeValue('123')
-    self.assertEqual('"123"', serialized)
-    self.assertEquals('123', redis_utils.DeserializeValue(serialized))
-
-  def testSerializeDeserializePB(self):
-    features = features_pb2.Hotlist.HotlistItem(
-        issue_id=7949, rank=0, adder_id=333, date_added=1525)
-    serialized = redis_utils.SerializeValue(
-        features, pb_class=features_pb2.Hotlist.HotlistItem)
-    self.assertIsInstance(serialized, str)
-    deserialized = redis_utils.DeserializeValue(
-        serialized, pb_class=features_pb2.Hotlist.HotlistItem)
-    self.assertIsInstance(deserialized, features_pb2.Hotlist.HotlistItem)
-    self.assertEquals(deserialized, features)
diff --git a/framework/test/servlet_helpers_test.py b/framework/test/servlet_helpers_test.py
index 19f4ea4..870de40 100644
--- a/framework/test/servlet_helpers_test.py
+++ b/framework/test/servlet_helpers_test.py
@@ -110,17 +110,14 @@
         path='/p/proj/issues/detail?id=123&q=term',
         project=self.project)
 
-    url = servlet_helpers.ComputeIssueEntryURL(mr, self.config)
+    url = servlet_helpers.ComputeIssueEntryURL(mr)
     self.assertEqual('/p/proj/issues/entry', url)
 
-  def testComputeIssueEntryURL_Customized(self):
+  def testComputeIssueEntryURL_Chromium(self):
     _request, mr = testing_helpers.GetRequestObjects(
-        path='/p/proj/issues/detail?id=123&q=term',
-        project=self.project)
-    mr.auth.user_id = 111
-    self.config.custom_issue_entry_url = FORM_URL
+        path='/p/chromium/issues/detail?id=123&q=term', project=self.project)
 
-    url = servlet_helpers.ComputeIssueEntryURL(mr, self.config)
+    url = servlet_helpers.ComputeIssueEntryURL(mr)
     self.assertIn('/issues/wizard', url)
 
 class IssueListURLTest(unittest.TestCase):
@@ -223,8 +220,27 @@
   def testCreateLoginUrl(self):
     _, mr = testing_helpers.GetRequestObjects(
         path='/p/proj/issues/detail?id=123&q=term', project=self.project)
-    url = servlet_helpers.SafeCreateLoginURL(mr, '/continue')
-    self.assertIn('/continue', url)
+    url = servlet_helpers.SafeCreateLoginURL(mr, 'current.url.to.return.to')
+    # Ensure that users can pick their account to use with Monorail.
+    self.assertIn('/AccountChooser', url)
+    self.assertIn('current.url.to.return.to', url)
+
+  def testCreateLoginUrl(self):
+    _, mr = testing_helpers.GetRequestObjects(
+        path='/p/proj/issues/detail?id=123&q=term', project=self.project)
+    url = servlet_helpers.SafeCreateLoginURL(mr, 'current.url.to.return.to')
+    # Ensure that users can pick their account to use with Monorail.
+    self.assertIn('/AccountChooser', url)
+    self.assertIn('current.url.to.return.to', url)
+
+  def testCreateEscapedLoginUrlFromMR(self):
+    _, mr = testing_helpers.GetRequestObjects(
+        path='/p/proj/issues/detail?id=123&q=term', project=self.project)
+    mr.current_page_url_encoded = (
+        'https%3A%2F%2Fbugs.chromium.org'
+        '%2Fp%2Fchromium%2Fissues%2Fentry')
+    url = servlet_helpers.SafeCreateLoginURL(mr)
+    self.assertIn('https%3A%2F%2Fbugs.chromium.org%2Fp', url)
 
   def testCreateLogoutUrl(self):
     _, mr = testing_helpers.GetRequestObjects(
diff --git a/framework/test/ts_mon_js_test.py b/framework/test/ts_mon_js_test.py
index bcd4060..4231b76 100644
--- a/framework/test/ts_mon_js_test.py
+++ b/framework/test/ts_mon_js_test.py
@@ -12,9 +12,11 @@
 import unittest
 from mock import patch
 
+import flask
 import webapp2
 from google.appengine.ext import testbed
 
+from framework.ts_mon_js import FlaskMonorailTSMonJSHandler
 from framework.ts_mon_js import MonorailTSMonJSHandler
 from services import service_manager
 
@@ -71,3 +73,69 @@
 
     self.assertEqual(res.status_int, 201)
     self.assertEqual(res.body, 'Ok.')
+
+
+class FlaskMonorailTSMonJSHandlerTest(unittest.TestCase):
+
+  def setUp(self):
+    self.testbed = testbed.Testbed()
+    self.testbed.activate()
+    self.testbed.init_user_stub()
+    self.services = service_manager.Services()
+    self.app = flask.Flask('test_app')
+    self.app.config['TESTING'] = True
+    self.app.add_url_rule(
+        '/_/ts_mon_js.do',
+        view_func=FlaskMonorailTSMonJSHandler(
+            services=self.services).PostMonorailTSMonJSHandler,
+        methods=['POST'])
+
+  def tearDown(self):
+    self.testbed.deactivate()
+
+  @patch('framework.xsrf.ValidateToken')
+  @patch('time.time')
+  def testFlaskSubmitMetrics(self, _mockTime, _mockValidateToken):
+    """Test normal case POSTing metrics."""
+    _mockTime.return_value = 1537821859
+    res = self.app.test_client().post(
+        '/_/ts_mon_js.do',
+        data = json.dumps(
+            {
+                'metrics':
+                    [
+                        {
+                            'MetricInfo':
+                                {
+                                    'Name':
+                                      'monorail/frontend/issue_update_latency',
+                                    'ValueType':
+                                      2,
+                                },
+                            'Cells':
+                                [
+                                    {
+                                        'value':
+                                            {
+                                                'sum': 1234,
+                                                'count': 4321,
+                                                'buckets':
+                                                    {
+                                                        0: 123,
+                                                        1: 321,
+                                                        2: 213,
+                                                    },
+                                            },
+                                        'fields':
+                                            {
+                                                'client_id': '789',
+                                                'host_name': 'rutabaga',
+                                                'document_visible': True,
+                                            },
+                                        'start_time': 1537821799,
+                                    }
+                                ],
+                        }
+                    ],
+            }))
+    self.assertEqual(res.status_code, 201)
diff --git a/framework/test/warmup_test.py b/framework/test/warmup_test.py
index d8ddb65..8140fc7 100644
--- a/framework/test/warmup_test.py
+++ b/framework/test/warmup_test.py
@@ -24,8 +24,7 @@
     #self.services = service_manager.Services(
     #    cache_manager=self.cache_manager)
     self.services = service_manager.Services()
-    self.servlet = warmup.Warmup(
-        'req', 'res', services=self.services)
+    self.servlet = warmup.Warmup('req', 'res', services=self.services)
 
 
   def testHandleRequest_NothingToDo(self):
diff --git a/framework/trimvisitedpages.py b/framework/trimvisitedpages.py
index 8d6ec23..f036c10 100644
--- a/framework/trimvisitedpages.py
+++ b/framework/trimvisitedpages.py
@@ -10,6 +10,8 @@
 
 from framework import jsonfeed
 
+
+# TODO: change to FlaskInternalTask when convert to Flask
 class TrimVisitedPages(jsonfeed.InternalTask):
 
   """Look for users with more than 10 visited hotlists and deletes extras."""
@@ -17,3 +19,9 @@
   def HandleRequest(self, mr):
     """Delete old RecentHotlist2User rows when there are too many"""
     self.services.user.TrimUserVisitedHotlists(mr.cnxn)
+
+  # def GetTrimVisitedPages(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+  # def PostTrimVisitedPages(self, **kwargs):
+  #   return self.handler(**kwargs)
diff --git a/framework/ts_mon_js.py b/framework/ts_mon_js.py
index 61be1a8..0dbb121 100644
--- a/framework/ts_mon_js.py
+++ b/framework/ts_mon_js.py
@@ -13,11 +13,13 @@
 from framework import xsrf
 
 from gae_ts_mon.handlers import TSMonJSHandler
+from gae_ts_mon.flask_handlers import TSMonJSFlaskHandler
 
 from google.appengine.api import users
 
 from infra_libs import ts_mon
 
+import flask
 
 STANDARD_FIELDS = [
   ts_mon.StringField('client_id'),
@@ -108,3 +110,37 @@
       return True
     except xsrf.TokenIncorrect:
       return False
+
+
+class FlaskMonorailTSMonJSHandler(TSMonJSFlaskHandler):
+
+  def __init__(self, services=None):
+    super(FlaskMonorailTSMonJSHandler, self).__init__(
+        flask=flask, services=services)
+    self.register_metrics(
+        [
+            ISSUE_CREATE_LATENCY_METRIC, ISSUE_UPDATE_LATENCY_METRIC,
+            AUTOCOMPLETE_POPULATE_LATENCY_METRIC,
+            CHARTS_SWITCH_DATE_RANGE_METRIC, ISSUE_COMMENTS_LOAD_LATENCY_METRIC,
+            DOM_CONTENT_LOADED_METRIC, ISSUE_LIST_LOAD_LATENCY_METRIC
+        ])
+
+  def xsrf_is_valid(self, body):
+    """This method expects the body dictionary to include two fields:
+    `token` and `user_id`.
+    """
+    cnxn = sql.MonorailConnection()
+    token = body.get('token')
+    user = users.get_current_user()
+    email = user.email() if user else None
+
+    services = self.service
+    auth = authdata.AuthData.FromEmail(cnxn, email, services, autocreate=False)
+    try:
+      xsrf.ValidateToken(token, auth.user_id, xsrf.XHR_SERVLET_PATH)
+      return True
+    except xsrf.TokenIncorrect:
+      return False
+
+  def PostMonorailTSMonJSHandler(self):
+    return self.post()
diff --git a/framework/urls.py b/framework/urls.py
index d7e5e3a..f8a4a4d 100644
--- a/framework/urls.py
+++ b/framework/urls.py
@@ -54,10 +54,8 @@
 NOTIFY_APPROVAL_CHANGE_TASK = '/_task/notifyApprovalChange'
 NOTIFY_RULES_DELETED_TASK = '/_task/notifyRulesDeleted'
 OUTBOUND_EMAIL_TASK = '/_task/outboundEmail'
-SPAM_DATA_EXPORT_TASK = '/_task/spamDataExport'
 BAN_SPAMMER_TASK = '/_task/banSpammer'
 ISSUE_DATE_ACTION_TASK = '/_task/issueDateAction'
-COMPONENT_DATA_EXPORT_TASK = '/_task/componentDataExportTask'
 SEND_WIPEOUT_USER_LISTS_TASK = '/_task/sendWipeoutUserListsTask'
 DELETE_WIPEOUT_USERS_TASK = '/_task/deleteWipeoutUsersTask'
 DELETE_USERS_TASK = '/_task/deleteUsersTask'
@@ -72,12 +70,9 @@
 REINDEX_QUEUE_CRON = '/_cron/reindexQueue'
 RAMCACHE_CONSOLIDATE_CRON = '/_cron/ramCacheConsolidate'
 REAP_CRON = '/_cron/reap'
-SPAM_DATA_EXPORT_CRON = '/_cron/spamDataExport'
 LOAD_API_CLIENT_CONFIGS_CRON = '/_cron/loadApiClientConfigs'
 TRIM_VISITED_PAGES_CRON = '/_cron/trimVisitedPages'
 DATE_ACTION_CRON = '/_cron/dateAction'
-SPAM_TRAINING_CRON = '/_cron/spamTraining'
-COMPONENT_DATA_EXPORT_CRON = '/_cron/componentDataExport'
 WIPEOUT_SYNC_CRON = '/_cron/wipeoutSync'
 
 # URLs of handlers needed for GAE instance management.
@@ -153,5 +148,3 @@
 TS_MON_JS = '/_/jstsmon'
 
 CSP_REPORT = '/csp'
-
-SPAM_MODERATION_QUEUE = '/spamqueue'
diff --git a/framework/warmup.py b/framework/warmup.py
index ef8a53d..ace76ce 100644
--- a/framework/warmup.py
+++ b/framework/warmup.py
@@ -13,6 +13,7 @@
 from framework import jsonfeed
 
 
+# TODO(https://crbug.com/monorail/6511): Convert to FlaskInternalTask
 class Warmup(jsonfeed.InternalTask):
   """Placeholder for warmup work.  Used only to enable min_idle_instances."""
 
@@ -25,6 +26,11 @@
       'success': 1,
       }
 
+  # def GetWarmup(self, **kwargs):
+  #   return self.handler(**kwargs)
+
+
+# TODO(https://crbug.com/monorail/6511): Convert to FlaskInternalTask
 class Start(jsonfeed.InternalTask):
   """Placeholder for start work.  Used only to enable manual_scaling."""
 
@@ -37,7 +43,11 @@
       'success': 1,
       }
 
+  # def GetStart(self, **kwargs):
+  #   return self.handler(**kwargs)
 
+
+# TODO(https://crbug.com/monorail/6511): Convert to FlaskInternalTask
 class Stop(jsonfeed.InternalTask):
   """Placeholder for stop work.  Used only to enable manual_scaling."""
 
@@ -49,3 +59,6 @@
     return {
       'success': 1,
       }
+
+  # def GetStop(self, **kwargs):
+  #   return self.handler(**kwargs)