First customization to avm99963-bugs

- Only a service is used (default) in order to keep usage within Google
  Cloud's free tier.
- Settings have been adapted.
- Only a single DB instance is used (primary).
- Some cron jobs are executed less often.
- |threadsafe| has been set to "yes" so the single instance can serve
  concurrent requests (otherwise the search/list issues function
  breaks).

GitOrigin-RevId: 52130750a30a5aa2234a6dd3f5ae23c3fb1b8343
diff --git a/Makefile b/Makefile
index f94295e..41246b7 100644
--- a/Makefile
+++ b/Makefile
@@ -8,7 +8,7 @@
 
 DEVID = monorail-dev
 STAGEID= monorail-staging
-PRODID= monorail-prod
+PRODID= avm99963-bugs
 
 GAE_PY?= python gae.py
 DEV_APPSERVER_FLAGS?= --watcher_ignore_re="(.*/lib|.*/node_modules|.*/third_party|.*/venv)"
@@ -136,51 +136,30 @@
 # Service yaml files used by gae.py are expected to be named module-<service-name>.yaml
 config_prod:
 	m4 -DPROD < app.yaml.m4 > app.yaml
-	m4 -DPROD < module-besearch.yaml.m4 > module-besearch.yaml
-	m4 -DPROD < module-latency-insensitive.yaml.m4 > module-latency-insensitive.yaml
-	m4 -DPROD < module-api.yaml.m4 > module-api.yaml
 
 # Generate yaml files used by spinnaker.
 config_prod_cloud:
 	m4 -DPROD < app.yaml.m4 > app.prod.yaml
-	m4 -DPROD < module-besearch.yaml.m4 > besearch.prod.yaml
-	m4 -DPROD < module-latency-insensitive.yaml.m4 > latency-insensitive.prod.yaml
-	m4 -DPROD < module-api.yaml.m4 > api.prod.yaml
 
 config_staging:
 	m4 -DSTAGING < app.yaml.m4 > app.yaml
-	m4 -DSTAGING < module-besearch.yaml.m4 > module-besearch.yaml
-	m4 -DSTAGING < module-latency-insensitive.yaml.m4 > module-latency-insensitive.yaml
-	m4 -DSTAGING < module-api.yaml.m4 > module-api.yaml
 
 config_staging_cloud:
 	m4 -DSTAGING < app.yaml.m4 > app.staging.yaml
-	m4 -DSTAGING < module-besearch.yaml.m4 > besearch.staging.yaml
-	m4 -DSTAGING < module-latency-insensitive.yaml.m4 > latency-insensitive.staging.yaml
-	m4 -DSTAGING < module-api.yaml.m4 > api.staging.yaml
 
 config_dev:
 	m4 -DDEV < app.yaml.m4 > app.yaml
-	m4 -DDEV < module-besearch.yaml.m4 > module-besearch.yaml
-	m4 -DDEV < module-latency-insensitive.yaml.m4 > module-latency-insensitive.yaml
-	m4 -DDEV < module-api.yaml.m4 > module-api.yaml
 
 config_dev_cloud:
 	m4 -DDEV < app.yaml.m4 > app.yaml
-	m4 -DDEV < module-besearch.yaml.m4 > besearch.yaml
-	m4 -DDEV < module-latency-insensitive.yaml.m4 > latency-insensitive.yaml
-	m4 -DDEV < module-api.yaml.m4 > api.yaml
 
 config_local:
 	m4 app.yaml.m4 > app.yaml
-	m4 module-besearch.yaml.m4 > module-besearch.yaml
-	m4 module-latency-insensitive.yaml.m4 > module-latency-insensitive.yaml
-	m4 module-api.yaml.m4 > module-api.yaml
 
 deploy_dev: clean_deps deps build_js config_dev
 	$(eval BRANCH_NAME := $(shell git rev-parse --abbrev-ref HEAD))
 	@echo "---[Dev $(DEVID)]---"
-	$(GAE_PY) upload --tag $(BRANCH_NAME) -A $(DEVID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+	$(GAE_PY) upload --tag $(BRANCH_NAME) -A $(DEVID) $(FRONTEND_MODULES)
 
 deploy_cloud_dev: clean_deps deps build_js config
 	$(eval GCB_DIR:= $(shell mktemp -d -p /tmp monorail_XXXXX))
@@ -213,12 +192,12 @@
 # throwaway databases.
 deploy_staging: clean_deps deps build_js config_staging
 	@echo "---[Staging $(STAGEID)]---"
-	$(GAE_PY) upload -A $(STAGEID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+	$(GAE_PY) upload -A $(STAGEID) $(FRONTEND_MODULES)
 
 # This is our production server that users actually use.
 deploy_prod: clean_deps deps build_js config_prod
 	@echo "---[Deploying prod instance $(PRODID)]---"
-	$(GAE_PY) upload -A $(PRODID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+	$(GAE_PY) upload -A $(PRODID) $(FRONTEND_MODULES)
 
 # Note that we do not provide a command-line way to make the newly-uploaded
 # version the default version. This is for two reasons: a) You should be using
diff --git a/app.yaml.m4 b/app.yaml.m4
index 222d27e..54e9813 100644
--- a/app.yaml.m4
+++ b/app.yaml.m4
@@ -5,30 +5,40 @@
 
 runtime: python27
 api_version: 1
-threadsafe: no
+threadsafe: yes
 
 default_expiration: "10d"
 
 define(`_VERSION', `syscmd(`echo $_VERSION')')
 
 ifdef(`PROD', `
-instance_class: F4
+instance_class: F1
 automatic_scaling:
-  min_idle_instances: 25
-  max_pending_latency: 0.2s
+  min_pending_latency: 15000ms
+  max_pending_latency: 15000ms
+  min_instances: 1
+  max_instances: 1
+  max_concurrent_requests: 80
 ')
 
 ifdef(`STAGING', `
-instance_class: F4
+instance_class: F1
 automatic_scaling:
-  min_idle_instances: 1
-  max_pending_latency: 0.2s
+  min_instances: 1
+  max_instances: 1
+  max_concurrent_requests: 50
+  min_pending_latency: automatic
+  max_pending_latency: 15000ms
 ')
 
 ifdef(`DEV', `
-instance_class: F4
+instance_class: F1
 automatic_scaling:
-  min_idle_instances: 1
+  min_instances: 1
+  max_instances: 1
+  max_concurrent_requests: 50
+  min_pending_latency: automatic
+  max_pending_latency: 15000ms
 ')
 
 handlers:
@@ -73,6 +83,24 @@
   script: monorailapp.app
   secure: always
 
+# From api service:
+- url: /prpc/.*
+  script: monorailapp.app
+  secure: always
+
+# From besearch service
+- url: /_backend/.*
+  script: monorailapp.app
+
+# From latency-insensitive service
+- url: /_task/.*
+  script: monorailapp.app
+  login: admin
+
+- url: /_cron/.*
+  script: monorailapp.app
+  login: admin
+
 inbound_services:
 - mail
 - mail_bounce
@@ -100,17 +128,6 @@
   VERSION_ID: '_VERSION'
   GAE_USE_SOCKETS_HTTPLIB : ''
 
-vpc_access_connector:
-ifdef(`DEV',`
-  name: "projects/monorail-dev/locations/us-central1/connectors/redis-connector"
-')
-ifdef(`STAGING',`
-  name: "projects/monorail-staging/locations/us-central1/connectors/redis-connector"
-')
-ifdef(`PROD', `
-  name: "projects/monorail-prod/locations/us-central1/connectors/redis-connector"
-')
-
 skip_files:
 - ^(.*/)?#.*#$
 - ^(.*/)?.*~$
diff --git a/cron.yaml b/cron.yaml
index 5a4715f..81d565b 100644
--- a/cron.yaml
+++ b/cron.yaml
@@ -17,13 +17,13 @@
   schedule: every 6 hours synchronized
 - description: index issues that were modified in big batches
   url: /_cron/reindexQueue
-  schedule: every 1 minutes synchronized
+  schedule: every 10 minutes synchronized
 - description: get rid of doomed and deletable projects
   url: /_cron/reap
   schedule: every 24 hours synchronized
 - description: send ts_mon metrics
   url: /internal/cron/ts_mon/send
-  schedule: every 1 minutes
+  schedule: every 10 minutes
 - description: export spam model training examples
   url: /_cron/spamDataExport
   schedule: every day 09:00
diff --git a/dev-services.yml b/dev-services.yml
index 6b5bbe2..0c93db3 100644
--- a/dev-services.yml
+++ b/dev-services.yml
@@ -17,7 +17,7 @@
   cloud-tasks-emulator:
     # As of 9/18/2020 latest tag is built from source at
     # https://github.com/aertje/cloud-tasks-emulator/commit/ff9a1afc8f3aeedbc6ca1f468b2c53b74c18a6e6
-    image: 'us.gcr.io/monorail-dev/cloud-tasks-emulator:latest'
+    image: 'ghcr.io/aertje/cloud-tasks-emulator:latest'
     container_name: 'cloud-tasks-emulator'
     ports:
       - '9090:9090'
diff --git a/dispatch.yaml b/dispatch.yaml
deleted file mode 100644
index 975c8cb..0000000
--- a/dispatch.yaml
+++ /dev/null
@@ -1,10 +0,0 @@
-dispatch:
-
-- url: "*/_backend/*"
-  service: besearch
-
-- url: "*/_cron/*"
-  service: latency-insensitive
-
-- url: "*/_task/*"
-  service: latency-insensitive
diff --git a/search/frontendsearchpipeline.py b/search/frontendsearchpipeline.py
index 367c52f..ec0a28e 100644
--- a/search/frontendsearchpipeline.py
+++ b/search/frontendsearchpipeline.py
@@ -951,8 +951,8 @@
     UserRPC for the created RPC call.
   """
   shard_id, subquery = shard_key
-  backend_host = modules.get_hostname(module='besearch')
-  url = 'http://%s%s' % (
+  backend_host = modules.get_hostname(module='default')
+  url = 'https://%s%s' % (
       backend_host,
       framework_helpers.FormatURL(
           [],
@@ -981,8 +981,8 @@
     project_id, logged_in_user_id, shard_id, invalidation_timestep,
     deadline=None, failfast=True):
   """Ask a backend to query one shard of the database."""
-  backend_host = modules.get_hostname(module='besearch')
-  url = 'http://%s%s' % (backend_host, framework_helpers.FormatURL(
+  backend_host = modules.get_hostname(module='default')
+  url = 'https://%s%s' % (backend_host, framework_helpers.FormatURL(
       None, urls.BACKEND_NONVIEWABLE,
       project_id=project_id or '',
       logged_in_user_id=logged_in_user_id or '',
diff --git a/search/test/frontendsearchpipeline_test.py b/search/test/frontendsearchpipeline_test.py
index b2e7fb3..432a9d1 100644
--- a/search/test/frontendsearchpipeline_test.py
+++ b/search/test/frontendsearchpipeline_test.py
@@ -937,7 +937,7 @@
     a_fake_rpc = testing_helpers.Blank(callback=None)
     urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
       a_fake_rpc)
-    modules.get_hostname(module='besearch')
+    modules.get_hostname(module='default')
     urlfetch.make_fetch_call(
       a_fake_rpc, mox.StrContains(
           urls.BACKEND_SEARCH + '?groupby=cc&invalidation_timestep=12345&'
@@ -967,7 +967,7 @@
     a_fake_rpc = testing_helpers.Blank(callback=None)
     urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
       a_fake_rpc)
-    modules.get_hostname(module='besearch')
+    modules.get_hostname(module='default')
     urlfetch.make_fetch_call(
         a_fake_rpc,
         mox.StrContains(
@@ -1002,7 +1002,7 @@
     a_fake_rpc = testing_helpers.Blank(callback=None)
     urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
       a_fake_rpc)
-    modules.get_hostname(module='besearch')
+    modules.get_hostname(module='default')
     urlfetch.make_fetch_call(
       a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
       follow_redirects=False, headers=mox.IsA(dict))
diff --git a/settings.py b/settings.py
index e5c5bd1..d1f9b61 100644
--- a/settings.py
+++ b/settings.py
@@ -3,7 +3,7 @@
 # license that can be found in the LICENSE file or at
 # https://developers.google.com/open-source/licenses/bsd
 
-"""Defines settings for monorail."""
+"""Defines settings for the avm99963 bugs monorail instance."""
 from __future__ import print_function
 from __future__ import division
 from __future__ import absolute_import
@@ -33,19 +33,19 @@
 # Part 1: settings that you must edit before deploying your site.
 
 # Email address that is offered to users who might need help using the tool.
-feedback_email = 'jrobbins+monorail.feedback@chromium.org'
+feedback_email = 'me+monorail.feedback@avm99963.com'
 
 # For debugging when running in staging: send all outbound
 # email to this address rather than to the actual address that
 # it would normally be sent to.
-send_all_email_to = 'monorail-staging-emails+all+%(user)s+%(domain)s@google.com'
+send_all_email_to = 'monorail-staging-emails+all+%(user)s+%(domain)s@dontsend.avm99963.com'
 
 # For debugging when running the server locally: send all outbound
 # email to this address rather than to the actual address that
 # it would normally be sent to.
 send_local_email_to = (
     send_all_email_to or
-    'monorail-staging-emails+dev+%(user)s+%(domain)s@google.com')
+    'monorail-staging-emails+dev+%(user)s+%(domain)s@dontsend.avm99963.com')
 
 # User to send emails from Monorail as. The reply_to sections of emails will be
 # set to appspotmail addresses.
@@ -53,8 +53,13 @@
 #       email addresses then setting these values to
 #       'reply@${app_id}.appspotmail.com' and 'noreply@{app_id}.appspotmail.com'
 #       is likely the best option.
-send_email_as_format = 'monorail@%(domain)s'
-send_noreply_email_as_format = 'monorail+noreply@%(domain)s'
+
+# Old values (TODO: set up DNS records and uncomment this):
+#send_email_as_format = 'monorail@%(domain)s'
+#send_noreply_email_as_format = 'monorail+noreply@%(domain)s'
+
+send_email_as_format = 'reply@avm99963-bugs.appspotmail.com'
+send_noreply_email_as_format = 'noreply@avm99963-bugs.appspotmail.com'
 
 # The default is to look for a database named "monorail" in replicas
 # named "replica-00" .. "replica-09"
@@ -63,20 +68,20 @@
 db_database_name = 'monorail'
 db_primary_name = 'primary'
 db_replica_prefix = 'replica'
-db_region = 'us-central1'
+db_region = 'europe-west1'
 
 # The default connection pool size for mysql connections.
 db_cnxn_pool_size = 20
 
 # The number of logical database shards used.  Each replica is complete copy
 # of the primary, so any replica DB can answer queries about any logical shard.
-num_logical_shards = 10
+num_logical_shards = 1
 
 # "Learn more" link for the site home page
 learn_more_link = None
 
 # Site name, displayed above the search box on the site home page.
-site_name = 'Monorail'
+site_name = 'avm99963 bugs'
 
 # Who is allowed to create new projects?  Set to ANYONE or ADMIN_ONLY.
 project_creation_restriction = site_pb2.UserTypeRestriction.ADMIN_ONLY
@@ -98,20 +103,15 @@
 # Text that mentions these words as shorthand host names will be autolinked
 # regardless of the lack of "https://" or ".com".
 autolink_shorthand_hosts = [
-    'go', 'g', 'shortn', 'who', 'teams',
+    'go',
     ]
-autolink_numeric_shorthand_hosts = [
-    'b', 't', 'o', 'omg', 'cl', 'cr',
-    ]
+autolink_numeric_shorthand_hosts = []
 
 
 # We only allow self-service account linking invites when the child account is
 # linking to a parent account in an allowed domain.
 linkable_domains = {
   # Child account domain: [parent account domains]
-  'chromium.org': ['google.com'],
-  'google.com': ['chromium.org'],
-  # TODO(jrobbins): webrtc.org, etc.
 }
 
 
@@ -134,7 +134,7 @@
 banned_user_domains = []
 
 # We use this for specifying cloud task parent
-CLOUD_TASKS_REGION = 'us-central1'
+CLOUD_TASKS_REGION = 'europe-west1'
 
 # We only send subscription notifications to users who have visited the
 # site in the last 6 months.
@@ -267,44 +267,31 @@
 # Users with emails in the "priviledged" domains do NOT get any advantage
 # but they do default their preference to show unobscured email addresses.
 priviledged_user_domains = [
-  'google.com', 'chromium.org', 'webrtc.org',
+  'avm99963.com',
   ]
 
 # Branded domains:  Any UI GET to a project listed below on prod or staging
 # should have the specified host, otherwise it will be redirected such that
 # the specified host is used.
 branded_domains = {}  # defaults to empty for localhost
-branded_domains_dev = {
-    'fuchsia': 'bugs-dev.fuchsia.dev',
-    '*': 'bugs-dev.chromium.org',
-}
-branded_domains_staging = {
-    'fuchsia': 'bugs-staging.fuchsia.dev',
-    '*': 'bugs-staging.chromium.org',
-}
 branded_domains_prod = {
-    'fuchsia': 'bugs.fuchsia.dev',
-    '*': 'bugs.chromium.org',
+    '*': 'bugs.avm99963.com',
 }
 
 # The site home page will immediately redirect to a default project for these
 # domains, if the project can be viewed.  Structure is {hostport: project_name}.
 domain_to_default_project = {}  # defaults to empty for localhost
-domain_to_default_project_dev = {'bugs-dev.fuchsia.dev': 'fuchsia'}
-domain_to_default_project_staging = {'bugs-staging.fuchsia.dev': 'fuchsia'}
-domain_to_default_project_prod = {'bugs.fuchsia.dev': 'fuchsia'}
+domain_to_default_project_prod = {}
 
 
 # Names of projects on code.google.com which we allow cross-linking to.
 recognized_codesite_projects = [
-  'chromium-os',
-  'chrome-os-partner',
 ]
 
 ####
 # Part 5:  Instance-specific settings that override lines above.
 # This ID is for -staging and other misc deployments. Prod is defined below.
-analytics_id = 'UA-55762617-20'
+analytics_id = ''
 
 if unit_test_mode:
   db_cloud_project = ''  # No real database is used during unit testing.
@@ -312,13 +299,13 @@
 else:
   app_id = app_identity.get_application_id()
 
-  if app_id == 'monorail-staging':
-    site_name = 'Monorail Staging'
-    banner_message = 'This staging site does not send emails.'
+  if app_id == 'avm99963-bugs':
+    send_all_email_to = None  # Deliver it to the intended users.
     # The Google Cloud SQL databases to use.
     db_cloud_project = app_id
-    branded_domains = branded_domains_staging
-    domain_to_default_project = domain_to_default_project_staging
+    analytics_id = ''
+    branded_domains = branded_domains_prod
+    domain_to_default_project = domain_to_default_project_prod
     # For each of these redis_hosts, they must match the corresponding
     # HOST address of the redis instance for the environment. You can use
     # the following command to find it.
@@ -326,29 +313,8 @@
     # gcloud redis instances list --project monorail-staging \
     #   --region us-central1
     # ````
-    redis_host = '10.228.109.51'
-
-  elif app_id == 'monorail-dev':
-    site_name = 'Monorail Dev'
-    banner_message = 'This dev site does not send emails.'
-    # The Google Cloud SQL databases to use.
-    db_cloud_project = app_id
-    branded_domains = branded_domains_dev
-    domain_to_default_project = domain_to_default_project_dev
-    # See comment above on how to find this address.
-    redis_host = '10.150.170.251'
-    # Use replicas created when testing the restore procedures on 2021-02-24
-    db_replica_prefix = 'replica-2'
-
-  elif app_id == 'monorail-prod':
-    send_all_email_to = None  # Deliver it to the intended users.
-    # The Google Cloud SQL databases to use.
-    db_cloud_project = app_id
-    analytics_id = 'UA-55762617-14'
-    branded_domains = branded_domains_prod
-    domain_to_default_project = domain_to_default_project_prod
-    # See comment above on how to find this address.
-    redis_host = '10.190.48.180'
+    # Leaving empty for now
+    redis_host = ''
 
 if local_mode:
   site_name = 'Monorail Local'
@@ -362,19 +328,17 @@
 
 # Combine the customized info above to make the names of the replica DB
 # instances.
-db_replica_names = ['{}-{:02d}'.format(db_replica_prefix, i) for i in range(10)]
+db_replica_names = [db_primary_name for i in range(num_logical_shards)]
 
 # Format string for the name of the physical database replicas.
 physical_db_name_format = (db_cloud_project + ':' + db_region + ':%s')
 
 # preferred domains to display
 preferred_domains = {
-    'monorail-prod.appspot.com': 'bugs.chromium.org',
-    'monorail-staging.appspot.com': 'bugs-staging.chromium.org',
-    'monorail-dev.appspot.com': 'bugs-dev.chromium.org'}
+    'avm99963-bugs.appspot.com': 'bugs.avm99963.com'}
 
 # Borg robot service account
-borg_service_account = 'chrome-infra-prod-borg@system.gserviceaccount.com'
+borg_service_account = 'chrome-infra-prod-borg-NOTUSEDBYAVM99963BUGS@system.gserviceaccount.com'
 
 # Prediction API params.
 classifier_project_id = 'project-id-testing-only'
@@ -413,49 +377,28 @@
     '.gserviceaccount.com',
     '@google.com',
     '@webrtc.org',
+    '@avm99963.com',
 )
 
 # New issues filed by these users in these groups
 # automatically get the Restrict-View-Google label.
-restrict_new_issues_user_groups = [
-    'chromeos-all@google.com',
-    'chromeos-acl@google.com',
-    'chromeos-fte-tvc@google.com',
-    'chromeos-fte-tvc@chromium.org',
-    'create-team@google.com',
-    'test-corp-mode@google.com',
-]
+restrict_new_issues_user_groups = []
 
 # Users in these groups see a "corp mode" warning dialog when commenting
 # on public issues, informing them that their comments are public by default.
-public_issue_notice_user_groups = [
-    'chromeos-all@google.com',
-    'chromeos-acl@google.com',
-    'chromeos-fte-tvc@google.com',
-    'chromeos-fte-tvc@chromium.org',
-    'create-team@google.com',
-    'test-corp-mode@google.com',
-    'tq-team@google.com',
-]
+public_issue_notice_user_groups = []
 
-full_emails_perm_groups = [
-    # Synced group that gives members permission to view the full
-    # emails of all users.
-    'monorail-display-names-perm@google.com',
-    # Native Monorail group that gives service account members permission
-    # to view full emails of all users.
-    'display-names-perm-sa@bugs.chromium.org'
-]
+full_emails_perm_groups = []
 
 # These email suffixes are allowed to create new alert bugs via email.
-alert_allowlisted_suffixes = ('@google.com',)
+alert_allowlisted_suffixes = ('@avm99963.com',)
 
 # The person who is notified if there is an unexpected problem in the alert
 # pipeline.
-alert_escalation_email = 'zhangtiff@google.com'
+alert_escalation_email = 'me@avm99963.com'
 
 # Bugs autogenerated from alert emails are created through this account.
-alert_service_account = 'chrome-trooper-alerts@google.com'
+alert_service_account = 'autogenerated-bug-from-alert-mails-monorail@noreply.avm99963.com'
 
 # The number of hash buckets to use when vectorizing text from Issues and
 # Comments. This should be the same value that the model was trained with.
@@ -496,12 +439,8 @@
 chart_query_max_rows = 10000
 
 # Client ID to use for loading the Google API client, gapi.js.
-if app_identity.get_application_id() == 'monorail-prod':
-  gapi_client_id = (
-    '679746765624-tqaakho939p2mc7eb65t4ecrj3gj08rt.apps.googleusercontent.com')
-else:
-  gapi_client_id = (
-    '52759169022-6918fl1hd1qoul985cs1ohgedeb8c9a0.apps.googleusercontent.com')
+# (This is not used by avm99963 bugs, this is only useful for buganizer bugs)
+gapi_client_id = ''
 
 # The pub/sub topic on which to publish issue update messages.
 if local_mode:
@@ -516,4 +455,4 @@
 
 # All users in the following domains will have API access.
 # Important: the @ symbol must be included.
-api_allowed_email_domains = ('@google.com')
+api_allowed_email_domains = ('@avm99963.com')
diff --git a/templates/framework/footer.ezt b/templates/framework/footer.ezt
index 963b61d..bc913ec 100644
--- a/templates/framework/footer.ezt
+++ b/templates/framework/footer.ezt
@@ -2,37 +2,6 @@
 
 [include "footer-shared.ezt"]
 
-<script type="text/javascript" nonce="[nonce]">
-// Google Analytics
-(function(i,s,o,g,r,a,m){i[[]'GoogleAnalyticsObject']=r;i[[]r]=i[[]r]||function(){
-(i[[]r].q=i[[]r].q||[[]]).push(arguments)},i[[]r].l=1*new Date();a=s.createElement(o),
-m=s.getElementsByTagName(o)[[]0];a.async=1;a.setAttribute('nonce','[nonce]');
-a.src=g;m.parentNode.insertBefore(a,m)
-})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
-
-(function setupGoogleAnalytics() {
-  const _EMAIL_REGEX =
-      ["/([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5})/"];
-
-  ga('create', '[analytics_id]', {'siteSpeedSampleRate': 100});
-
-  [if-any logged_in_user]
-    ga('set', 'dimension1', 'Logged in');
-  [else]
-    ga('set', 'dimension1', 'Not logged in');
-  [end]
-
-  const path = window.location.href.slice(window.location.origin.length);
-  if (path.startsWith('/u')) {
-    [# Keep anything that looks like an email address out of GA.]
-    ga('set', 'title', 'A user page');
-    ga('set', 'location', path.replace(_EMAIL_REGEX, 'user@example.com'));
-  }
-
-  ga('send', 'pageview');
-})();
-</script>
-
 <ezt-app-base [if-any logged_in_user]
   userDisplayName="[logged_in_user.email]"[end]
   projectName="[projectname]"