Merge branch 'main' into avm99963-monorail
Merged commit 34d8229ae2b51fb1a15bd208e6fe6185c94f6266
GitOrigin-RevId: 7ee0917f93a577e475f8e09526dd144d245593f4
diff --git a/third_party/README.md b/third_party/README.md
new file mode 100644
index 0000000..5d92a38
--- /dev/null
+++ b/third_party/README.md
@@ -0,0 +1,3 @@
+The third_party/ directory contains sources from other projects.
+
+Check the README.monorail file in each directory for package info.
diff --git a/third_party/appengine-python-standard/LICENSE b/third_party/appengine-python-standard/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/third_party/appengine-python-standard/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/third_party/appengine-python-standard/README.monorail b/third_party/appengine-python-standard/README.monorail
new file mode 100644
index 0000000..46f9a21
--- /dev/null
+++ b/third_party/appengine-python-standard/README.monorail
@@ -0,0 +1,12 @@
+Name: Google App Engine Services SDK
+URL: https://github.com/GoogleCloudPlatform/appengine-python-standard
+Version: July 21, 2022
+License: Apache 2.0
+License File: LICENSE
+Security Critical: no
+Description:
+Google App Engine bundled services SDK for Python 3
+Local Modifications:
+Retained only default_api_stub.py. Commit cc19a2e contains a fix for security
+ticket handling that was not released in v1.0.1rc1 and we didn't want to wait
+for another release.
diff --git a/third_party/appengine-python-standard/default_api_stub.py b/third_party/appengine-python-standard/default_api_stub.py
new file mode 100644
index 0000000..c71727a
--- /dev/null
+++ b/third_party/appengine-python-standard/default_api_stub.py
@@ -0,0 +1,320 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+"""An APIProxy stub that communicates with VMEngine service bridges."""
+
+from concurrent import futures
+import imp
+import logging
+import os
+import sys
+from google.appengine.api import apiproxy_rpc
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.ext.remote_api import remote_api_bytes_pb2 as remote_api_pb2
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.runtime import context
+import six.moves.urllib.parse
+import urllib3
+
+
+
+
+
+
+
+
+
+logging.getLogger('requests_nologs').setLevel(logging.ERROR)
+
+TICKET_HEADER = 'HTTP_X_APPENGINE_API_TICKET'
+DEV_TICKET_HEADER = 'HTTP_X_APPENGINE_DEV_REQUEST_ID'
+DAPPER_ENV_KEY = 'HTTP_X_GOOGLE_DAPPERTRACEINFO'
+SERVICE_BRIDGE_HOST = os.environ.get('API_HOST',
+ 'appengine.googleapis.internal')
+API_PORT = os.environ.get('API_PORT', '10001')
+SERVICE_ENDPOINT_NAME = 'app-engine-apis'
+APIHOST_METHOD = '/VMRemoteAPI.CallRemoteAPI'
+PROXY_PATH = '/rpc_http'
+DAPPER_HEADER = 'X-Google-DapperTraceInfo'
+SERVICE_DEADLINE_HEADER = 'X-Google-RPC-Service-Deadline'
+SERVICE_ENDPOINT_HEADER = 'X-Google-RPC-Service-Endpoint'
+SERVICE_METHOD_HEADER = 'X-Google-RPC-Service-Method'
+RPC_CONTENT_TYPE = 'application/octet-stream'
+DEFAULT_TIMEOUT = 60
+
+DEADLINE_DELTA_SECONDS = 1
+
+
+
+
+
+MAX_CONCURRENT_API_CALLS = 100
+
+URLLIB3_POOL_COUNT = 10
+
+URLLIB3_POOL_SIZE = 10
+
+
+
+_EXCEPTIONS_MAP = {
+ remote_api_pb2.RpcError.UNKNOWN:
+ (apiproxy_errors.RPCFailedError,
+ 'The remote RPC to the application server failed for call %s.%s().'),
+ remote_api_pb2.RpcError.CALL_NOT_FOUND:
+ (apiproxy_errors.CallNotFoundError,
+ 'The API package \'%s\' or call \'%s()\' was not found.'),
+ remote_api_pb2.RpcError.PARSE_ERROR:
+ (apiproxy_errors.ArgumentError,
+ 'There was an error parsing arguments for API call %s.%s().'),
+ remote_api_pb2.RpcError.OVER_QUOTA:
+ (apiproxy_errors.OverQuotaError,
+ 'The API call %s.%s() required more quota than is available.'),
+ remote_api_pb2.RpcError.REQUEST_TOO_LARGE:
+ (apiproxy_errors.RequestTooLargeError,
+ 'The request to API call %s.%s() was too large.'),
+ remote_api_pb2.RpcError.CAPABILITY_DISABLED:
+ (apiproxy_errors.CapabilityDisabledError,
+ 'The API call %s.%s() is temporarily disabled.'),
+ remote_api_pb2.RpcError.FEATURE_DISABLED:
+ (apiproxy_errors.FeatureNotEnabledError,
+ 'The API call %s.%s() is currently not enabled.'),
+ remote_api_pb2.RpcError.RESPONSE_TOO_LARGE:
+ (apiproxy_errors.ResponseTooLargeError,
+ 'The response from API call %s.%s() was too large.'),
+ remote_api_pb2.RpcError.CANCELLED:
+ (apiproxy_errors.CancelledError,
+ 'The API call %s.%s() was explicitly cancelled.'),
+ remote_api_pb2.RpcError.DEADLINE_EXCEEDED:
+ (apiproxy_errors.DeadlineExceededError,
+ 'The API call %s.%s() took too long to respond and was cancelled.')
+}
+
+_DEFAULT_EXCEPTION = _EXCEPTIONS_MAP[remote_api_pb2.RpcError.UNKNOWN]
+
+_DEADLINE_EXCEEDED_EXCEPTION = _EXCEPTIONS_MAP[
+ remote_api_pb2.RpcError.DEADLINE_EXCEEDED]
+
+
+
+
+
+
+class DefaultApiRPC(apiproxy_rpc.RPC):
+ """A class representing an RPC to a remote server."""
+
+ def _ErrorException(self, exception_class, error_details):
+ return exception_class(error_details % (self.package, self.call))
+
+ def _TranslateToError(self, response):
+ """Translates a failed APIResponse into an exception."""
+
+
+ if response.HasField('rpc_error'):
+ code = response.rpc_error.code
+ detail = response.rpc_error.detail
+ exception_type, msg = _EXCEPTIONS_MAP.get(code, _DEFAULT_EXCEPTION)
+ if detail:
+ msg = '%s -- Additional details from server: %s' % (msg, detail)
+ raise self._ErrorException(exception_type, msg)
+
+
+ raise apiproxy_errors.ApplicationError(response.application_error.code,
+ response.application_error.detail)
+
+ def _MakeCallImpl(self):
+ """Makes an asynchronous API call over the service bridge.
+
+ For this to work the following must be set:
+ self.package: the API package name;
+ self.call: the name of the API call/method to invoke;
+ self.request: the API request body as a serialized protocol buffer.
+
+ The actual API call is made by urllib3.request via a thread pool
+ (multiprocessing.dummy.Pool). The thread pool restricts the number of
+ concurrent requests to MAX_CONCURRENT_API_CALLS, so this method will
+ block if that limit is exceeded, until other asynchronous calls resolve.
+
+ If the main thread holds the import lock, waiting on thread work can cause
+ a deadlock:
+ https://docs.python.org/2/library/threading.html#importing-in-threaded-code
+
+ Therefore, we try to detect this error case and fall back to sync calls.
+ """
+ assert self._state == apiproxy_rpc.RPC.IDLE, self._state
+
+
+
+
+
+
+
+
+ if context.READ_FROM_OS_ENVIRON:
+ ticket = os.environ.get(TICKET_HEADER,
+ os.environ.get(DEV_TICKET_HEADER))
+ else:
+
+
+
+ ticket = context.gae_headers.API_TICKET.get(
+ context.gae_headers.DEV_REQUEST_ID.get(None))
+
+ request = remote_api_pb2.Request(
+ service_name=self.package,
+ method=self.call,
+ request_id=ticket,
+ request=self.request.SerializeToString())
+
+ deadline = self.deadline or DEFAULT_TIMEOUT
+
+ body_data = request.SerializeToString()
+ headers = {
+ SERVICE_DEADLINE_HEADER: str(deadline),
+ SERVICE_ENDPOINT_HEADER: SERVICE_ENDPOINT_NAME,
+ SERVICE_METHOD_HEADER: APIHOST_METHOD,
+ 'Content-type': RPC_CONTENT_TYPE,
+ }
+
+
+ dapper_header_value = context.get(DAPPER_ENV_KEY)
+ if dapper_header_value:
+ headers[DAPPER_HEADER] = dapper_header_value
+
+
+
+
+
+ api_host = os.environ.get('API_HOST', SERVICE_BRIDGE_HOST)
+ api_port = os.environ.get('API_PORT', API_PORT)
+
+ if ':' in api_host:
+ api_host = '[{}]'.format(api_host)
+ endpoint_url = six.moves.urllib.parse.urlunparse(
+ ('http', '%s:%s' % (api_host, api_port), PROXY_PATH, '', '', ''))
+
+ self._state = apiproxy_rpc.RPC.RUNNING
+
+ request_kwargs = dict(
+ url=endpoint_url,
+ method='POST',
+ timeout=DEADLINE_DELTA_SECONDS + deadline,
+ headers=headers,
+ body=body_data)
+
+
+
+
+
+
+ if six.PY2 and imp.lock_held():
+ self.future = futures.Future()
+ self.future.set_result(self._SendRequestAndFinish(**request_kwargs))
+
+ else:
+
+
+ self.future = self.stub.thread_pool.submit(self._SendRequestAndFinish,
+ **request_kwargs)
+
+ def _WaitImpl(self):
+
+ assert self.future is not None
+ futures.wait([self.future])
+ return True
+
+ def _SendRequest(self, **kwargs):
+ try:
+ response = self.stub.http.request(**kwargs)
+
+ if response.status != 200:
+ raise apiproxy_errors.RPCFailedError(
+ 'Proxy returned HTTP status %s %s' %
+ (response.status, response.reason))
+ except urllib3.exceptions.TimeoutError:
+ raise self._ErrorException(*_DEADLINE_EXCEEDED_EXCEPTION)
+ except (urllib3.exceptions.RequestError,
+ urllib3.exceptions.ConnectionError):
+
+ raise self._ErrorException(*_DEFAULT_EXCEPTION)
+
+
+ parsed_response = remote_api_pb2.Response.FromString(response.data)
+
+
+ if (parsed_response.HasField('application_error') or
+ parsed_response.HasField('rpc_error')):
+ raise self._TranslateToError(parsed_response)
+
+
+ self.response.ParseFromString(parsed_response.response)
+
+ def _CaptureTrace(self, f, **kwargs):
+ try:
+ f(**kwargs)
+ except Exception:
+
+
+ _, exc, tb = sys.exc_info()
+ self._exception = exc
+ self._traceback = tb
+
+ def _SendRequestAndFinish(self, **kwargs):
+ try:
+ self._CaptureTrace(self._SendRequest, **kwargs)
+ finally:
+ if self.callback:
+ self._CaptureTrace(self.callback)
+ self._state = apiproxy_rpc.RPC.FINISHING
+
+
+class DefaultApiStub(object):
+ """A stub for calling services through a VM service bridge.
+
+ You can use this to stub out any service that the remote server supports.
+ """
+
+
+ def __init__(self):
+ self.thread_pool = futures.ThreadPoolExecutor(MAX_CONCURRENT_API_CALLS)
+ self.http = urllib3.PoolManager(
+ num_pools=URLLIB3_POOL_COUNT, maxsize=URLLIB3_POOL_SIZE)
+
+ def MakeSyncCall(self, service, call, request, response):
+ """Make a synchronous API call.
+
+ Args:
+ service: The name of the service you are trying to use.
+ call: The name of the method.
+ request: The request protocol buffer
+ response: The response protocol buffer to be filled.
+ """
+ rpc = self.CreateRPC()
+ rpc.MakeCall(service, call, request, response)
+ rpc.Wait()
+ rpc.CheckSuccess()
+
+ def CreateRPC(self):
+ """Create a new RPC object."""
+ return DefaultApiRPC(stub=self)
+
+
+def Register(stub):
+ """Insert stubs so App Engine services are accessed via the service bridge."""
+ apiproxy_stub_map.apiproxy.SetDefaultStub(stub)
diff --git a/third_party/endpoints/LICENSE.txt b/third_party/endpoints/LICENSE.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/third_party/endpoints/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/third_party/endpoints/README.monorail b/third_party/endpoints/README.monorail
new file mode 100644
index 0000000..aa55400
--- /dev/null
+++ b/third_party/endpoints/README.monorail
@@ -0,0 +1,36 @@
+Short Name: endpoints
+URL: https://github.com/cloudendpoints/endpoints-python
+Version: 4.8.0
+License: Apache 2.0
+License File: LICENSE.txt
+Security Critical: no
+Description:
+Google Cloud Endpoints is a solution for creating RESTful web APIs.
+Local Modifications:
+1. Retain only the endpoints/ directory and LICENSE.txt file.
+2. Remove dependency on semver and move endpoints_management and
+ protorpc.wsgi imports into the functions where they're being used.
+3. Update files for Python 3.
+ Syntax changes:
+ * except Exception, e: --> except Exception as e:
+
+ Import moves:
+ * from collections import Foo -> from collections.abc import Foo
+ * import cStringIO --> from six.moves import cStringIO
+ * import httplib --> from six.moves import http_client
+ * import urllib --> from six.moves import urllib
+ * import urlparse --> from six.moves import urllib
+
+ String changes:
+ * basestring --> six.string_types
+ * if isinstance(s, unicode): s = s.encode() -> s = six.ensure_str(s)
+ * s.encode('ascii') --> six.ensure_binary(s, 'ascii')
+ * s.encode('hex') --> binascii.hexlify(s)
+
+ Integer changes:
+ * long() --> int()
+
+ Iterator changes:
+ * iteritems() --> items()
+ * iterkeys() -> keys()
+ * itervalues() --> values()
diff --git a/third_party/endpoints/__init__.py b/third_party/endpoints/__init__.py
new file mode 100644
index 0000000..76af4f8
--- /dev/null
+++ b/third_party/endpoints/__init__.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+#
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Google Cloud Endpoints module."""
+
+# pylint: disable=wildcard-import
+from __future__ import absolute_import
+
+import logging
+
+from protorpc import message_types
+from protorpc import messages
+from protorpc import remote
+
+from .api_config import api, method
+from .api_config import AUTH_LEVEL, EMAIL_SCOPE
+from .api_config import Issuer, LimitDefinition, Namespace
+from .api_exceptions import *
+from .apiserving import *
+from .constants import API_EXPLORER_CLIENT_ID
+from .endpoints_dispatcher import *
+from . import message_parser
+from .resource_container import ResourceContainer
+from .users_id_token import get_current_user, get_verified_jwt, convert_jwks_uri
+from .users_id_token import InvalidGetUserCall
+from .users_id_token import SKIP_CLIENT_ID_CHECK
+
+__version__ = '4.8.0'
+
+_logger = logging.getLogger(__name__)
+_logger.setLevel(logging.INFO)
diff --git a/third_party/endpoints/_endpointscfg_impl.py b/third_party/endpoints/_endpointscfg_impl.py
new file mode 100644
index 0000000..2d3f740
--- /dev/null
+++ b/third_party/endpoints/_endpointscfg_impl.py
@@ -0,0 +1,617 @@
+#!/usr/bin/python
+# Copyright 2017 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+r"""External script for generating Cloud Endpoints related files.
+
+The gen_discovery_doc subcommand takes a list of fully qualified ProtoRPC
+service names and calls a cloud service which generates a discovery document in
+REST or RPC style.
+
+Example:
+ endpointscfg.py gen_discovery_doc -o . -f rest postservice.GreetingsV1
+
+The gen_client_lib subcommand takes a discovery document and calls a cloud
+service to generate a client library for a target language (currently just Java)
+
+Example:
+ endpointscfg.py gen_client_lib java -o . greetings-v0.1.discovery
+
+The get_client_lib subcommand does both of the above commands at once.
+
+Example:
+ endpointscfg.py get_client_lib java -o . postservice.GreetingsV1
+
+The gen_api_config command outputs an .api configuration file for a service.
+
+Example:
+ endpointscfg.py gen_api_config -o . -a /path/to/app \
+ --hostname myhost.appspot.com postservice.GreetingsV1
+"""
+
+from __future__ import absolute_import
+
+import argparse
+import collections
+import contextlib
+import logging
+import os
+import re
+import sys
+from six.moves import urllib
+import urllib2
+
+import yaml
+from google.appengine.ext import testbed
+
+from . import api_config
+from . import discovery_generator
+from . import openapi_generator
+from . import remote
+
+# Conditional import, pylint: disable=g-import-not-at-top
+try:
+ import json
+except ImportError:
+ # If we can't find json packaged with Python import simplejson, which is
+ # packaged with the SDK.
+ import simplejson as json
+
+
+CLIENT_LIBRARY_BASE = 'https://google-api-client-libraries.appspot.com/generate'
+_VISIBLE_COMMANDS = ('get_client_lib', 'get_discovery_doc', 'get_openapi_spec')
+
+
+class ServerRequestException(Exception):
+ """Exception for problems with the request to a server."""
+
+ def __init__(self, http_error):
+ """Create a ServerRequestException from a given urllib2.HTTPError.
+
+ Args:
+ http_error: The HTTPError that the ServerRequestException will be
+ based on.
+ """
+ error_details = None
+ error_response = None
+ if http_error.fp:
+ try:
+ error_response = http_error.fp.read()
+ error_body = json.loads(error_response)
+ error_details = ['%s: %s' % (detail['message'], detail['debug_info'])
+ for detail in error_body['error']['errors']]
+ except (ValueError, TypeError, KeyError):
+ pass
+ if error_details:
+ error_details_str = ', '.join(error_details)
+ error_message = ('HTTP %s (%s) error when communicating with URL: %s. '
+ 'Details: %s' % (http_error.code, http_error.reason,
+ http_error.filename, error_details_str))
+ else:
+ error_message = ('HTTP %s (%s) error when communicating with URL: %s. '
+ 'Response: %s' % (http_error.code, http_error.reason,
+ http_error.filename,
+ error_response))
+ super(ServerRequestException, self).__init__(error_message)
+
+
+class _EndpointsParser(argparse.ArgumentParser):
+ """Create a subclass of argparse.ArgumentParser for Endpoints."""
+
+ def error(self, message):
+ """Override superclass to support customized error message.
+
+ Error message needs to be rewritten in order to display visible commands
+ only, when invalid command is called by user. Otherwise, hidden commands
+ will be displayed in stderr, which is not expected.
+
+ Refer the following argparse python documentation for detailed method
+ information:
+ http://docs.python.org/2/library/argparse.html#exiting-methods
+
+ Args:
+ message: original error message that will be printed to stderr
+ """
+ # subcommands_quoted is the same as subcommands, except each value is
+ # surrounded with double quotes. This is done to match the standard
+ # output of the ArgumentParser, while hiding commands we don't want users
+ # to use, as they are no longer documented and only here for legacy use.
+ subcommands_quoted = ', '.join(
+ [repr(command) for command in _VISIBLE_COMMANDS])
+ subcommands = ', '.join(_VISIBLE_COMMANDS)
+ message = re.sub(
+ r'(argument {%s}: invalid choice: .*) \(choose from (.*)\)$'
+ % subcommands, r'\1 (choose from %s)' % subcommands_quoted, message)
+ super(_EndpointsParser, self).error(message)
+
+
+def _WriteFile(output_path, name, content):
+ """Write given content to a file in a given directory.
+
+ Args:
+ output_path: The directory to store the file in.
+ name: The name of the file to store the content in.
+ content: The content to write to the file.close
+
+ Returns:
+ The full path to the written file.
+ """
+ path = os.path.join(output_path, name)
+ with open(path, 'wb') as f:
+ f.write(content)
+ return path
+
+
+def GenApiConfig(service_class_names, config_string_generator=None,
+ hostname=None, application_path=None, **additional_kwargs):
+ """Write an API configuration for endpoints annotated ProtoRPC services.
+
+ Args:
+ service_class_names: A list of fully qualified ProtoRPC service classes.
+ config_string_generator: A generator object that produces API config strings
+ using its pretty_print_config_to_json method.
+ hostname: A string hostname which will be used as the default version
+ hostname. If no hostname is specificied in the @endpoints.api decorator,
+ this value is the fallback.
+ application_path: A string with the path to the AppEngine application.
+
+ Raises:
+ TypeError: If any service classes don't inherit from remote.Service.
+ messages.DefinitionNotFoundError: If a service can't be found.
+
+ Returns:
+ A map from service names to a string containing the API configuration of the
+ service in JSON format.
+ """
+ # First, gather together all the different APIs implemented by these
+ # classes. There may be fewer APIs than service classes. Each API is
+ # uniquely identified by (name, version). Order needs to be preserved here,
+ # so APIs that were listed first are returned first.
+ api_service_map = collections.OrderedDict()
+ resolved_services = []
+
+ for service_class_name in service_class_names:
+ module_name, base_service_class_name = service_class_name.rsplit('.', 1)
+ module = __import__(module_name, fromlist=base_service_class_name)
+ service = getattr(module, base_service_class_name)
+ if hasattr(service, 'get_api_classes'):
+ resolved_services.extend(service.get_api_classes())
+ elif (not isinstance(service, type) or
+ not issubclass(service, remote.Service)):
+ raise TypeError('%s is not a ProtoRPC service' % service_class_name)
+ else:
+ resolved_services.append(service)
+
+ for resolved_service in resolved_services:
+ services = api_service_map.setdefault(
+ (resolved_service.api_info.name, resolved_service.api_info.api_version), [])
+ services.append(resolved_service)
+
+ # If hostname isn't specified in the API or on the command line, we'll
+ # try to build it from information in app.yaml.
+ app_yaml_hostname = _GetAppYamlHostname(application_path)
+
+ service_map = collections.OrderedDict()
+ config_string_generator = (
+ config_string_generator or api_config.ApiConfigGenerator())
+ for api_info, services in api_service_map.items():
+ assert services, 'An API must have at least one ProtoRPC service'
+ # Only override hostname if None. Hostname will be the same for all
+ # services within an API, since it's stored in common info.
+ hostname = services[0].api_info.hostname or hostname or app_yaml_hostname
+
+ # Map each API by name-version.
+ service_map['%s-%s' % api_info] = (
+ config_string_generator.pretty_print_config_to_json(
+ services, hostname=hostname, **additional_kwargs))
+
+ return service_map
+
+
+def _GetAppYamlHostname(application_path, open_func=open):
+ """Build the hostname for this app based on the name in app.yaml.
+
+ Args:
+ application_path: A string with the path to the AppEngine application. This
+ should be the directory containing the app.yaml file.
+ open_func: Function to call to open a file. Used to override the default
+ open function in unit tests.
+
+ Returns:
+ A hostname, usually in the form of "myapp.appspot.com", based on the
+ application name in the app.yaml file. If the file can't be found or
+ there's a problem building the name, this will return None.
+ """
+ try:
+ app_yaml_file = open_func(os.path.join(application_path or '.', 'app.yaml'))
+ config = yaml.safe_load(app_yaml_file.read())
+ except IOError:
+ # Couldn't open/read app.yaml.
+ return None
+
+ application = config.get('application')
+ if not application:
+ return None
+
+ if ':' in application:
+ # Don't try to deal with alternate domains.
+ return None
+
+ # If there's a prefix ending in a '~', strip it.
+ tilde_index = application.rfind('~')
+ if tilde_index >= 0:
+ application = application[tilde_index + 1:]
+ if not application:
+ return None
+
+ return '%s.appspot.com' % application
+
+
+def _GenDiscoveryDoc(service_class_names,
+ output_path, hostname=None,
+ application_path=None):
+ """Write discovery documents generated from the service classes to file.
+
+ Args:
+ service_class_names: A list of fully qualified ProtoRPC service names.
+ output_path: The directory to output the discovery docs to.
+ hostname: A string hostname which will be used as the default version
+ hostname. If no hostname is specificied in the @endpoints.api decorator,
+ this value is the fallback. Defaults to None.
+ application_path: A string containing the path to the AppEngine app.
+
+ Returns:
+ A list of discovery doc filenames.
+ """
+ output_files = []
+ service_configs = GenApiConfig(
+ service_class_names, hostname=hostname,
+ config_string_generator=discovery_generator.DiscoveryGenerator(),
+ application_path=application_path)
+ for api_name_version, config in service_configs.items():
+ discovery_name = api_name_version + '.discovery'
+ output_files.append(_WriteFile(output_path, discovery_name, config))
+
+ return output_files
+
+
+def _GenOpenApiSpec(service_class_names, output_path, hostname=None,
+ application_path=None, x_google_api_name=False):
+ """Write openapi documents generated from the service classes to file.
+
+ Args:
+ service_class_names: A list of fully qualified ProtoRPC service names.
+ output_path: The directory to which to output the OpenAPI specs.
+ hostname: A string hostname which will be used as the default version
+ hostname. If no hostname is specified in the @endpoints.api decorator,
+ this value is the fallback. Defaults to None.
+ application_path: A string containing the path to the AppEngine app.
+
+ Returns:
+ A list of OpenAPI spec filenames.
+ """
+ output_files = []
+ service_configs = GenApiConfig(
+ service_class_names, hostname=hostname,
+ config_string_generator=openapi_generator.OpenApiGenerator(),
+ application_path=application_path,
+ x_google_api_name=x_google_api_name)
+ for api_name_version, config in service_configs.items():
+ openapi_name = api_name_version.replace('-', '') + 'openapi.json'
+ output_files.append(_WriteFile(output_path, openapi_name, config))
+
+ return output_files
+
+
+def _GenClientLib(discovery_path, language, output_path, build_system):
+ """Write a client library from a discovery doc.
+
+ Args:
+ discovery_path: Path to the discovery doc used to generate the client
+ library.
+ language: The client library language to generate. (java)
+ output_path: The directory to output the client library zip to.
+ build_system: The target build system for the client library language.
+
+ Raises:
+ IOError: If reading the discovery doc fails.
+ ServerRequestException: If fetching the generated client library fails.
+
+ Returns:
+ The path to the zipped client library.
+ """
+ with open(discovery_path) as f:
+ discovery_doc = f.read()
+
+ client_name = re.sub(r'\.discovery$', '.zip',
+ os.path.basename(discovery_path))
+
+ return _GenClientLibFromContents(discovery_doc, language, output_path,
+ build_system, client_name)
+
+
+def _GenClientLibFromContents(discovery_doc, language, output_path,
+ build_system, client_name):
+ """Write a client library from a discovery doc.
+
+ Args:
+ discovery_doc: A string, the contents of the discovery doc used to
+ generate the client library.
+ language: A string, the client library language to generate. (java)
+ output_path: A string, the directory to output the client library zip to.
+ build_system: A string, the target build system for the client language.
+ client_name: A string, the filename used to save the client lib.
+
+ Raises:
+ IOError: If reading the discovery doc fails.
+ ServerRequestException: If fetching the generated client library fails.
+
+ Returns:
+ The path to the zipped client library.
+ """
+
+ body = urllib.parse.urlencode({'lang': language, 'content': discovery_doc,
+ 'layout': build_system})
+ request = urllib2.Request(CLIENT_LIBRARY_BASE, body)
+ try:
+ with contextlib.closing(urllib2.urlopen(request)) as response:
+ content = response.read()
+ return _WriteFile(output_path, client_name, content)
+ except urllib2.HTTPError as error:
+ raise ServerRequestException(error)
+
+
+def _GetClientLib(service_class_names, language, output_path, build_system,
+ hostname=None, application_path=None):
+ """Fetch client libraries from a cloud service.
+
+ Args:
+ service_class_names: A list of fully qualified ProtoRPC service names.
+ language: The client library language to generate. (java)
+ output_path: The directory to output the discovery docs to.
+ build_system: The target build system for the client library language.
+ hostname: A string hostname which will be used as the default version
+ hostname. If no hostname is specificied in the @endpoints.api decorator,
+ this value is the fallback. Defaults to None.
+ application_path: A string containing the path to the AppEngine app.
+
+ Returns:
+ A list of paths to client libraries.
+ """
+ client_libs = []
+ service_configs = GenApiConfig(
+ service_class_names, hostname=hostname,
+ config_string_generator=discovery_generator.DiscoveryGenerator(),
+ application_path=application_path)
+ for api_name_version, config in service_configs.items():
+ client_name = api_name_version + '.zip'
+ client_libs.append(
+ _GenClientLibFromContents(config, language, output_path,
+ build_system, client_name))
+ return client_libs
+
+
+def _GenApiConfigCallback(args, api_func=GenApiConfig):
+ """Generate an api file.
+
+ Args:
+ args: An argparse.Namespace object to extract parameters from.
+ api_func: A function that generates and returns an API configuration
+ for a list of services.
+ """
+ service_configs = api_func(args.service,
+ hostname=args.hostname,
+ application_path=args.application)
+
+ for api_name_version, config in service_configs.items():
+ _WriteFile(args.output, api_name_version + '.api', config)
+
+
+def _GetClientLibCallback(args, client_func=_GetClientLib):
+ """Generate discovery docs and client libraries to files.
+
+ Args:
+ args: An argparse.Namespace object to extract parameters from.
+ client_func: A function that generates client libraries and stores them to
+ files, accepting a list of service names, a client library language,
+ an output directory, a build system for the client library language, and
+ a hostname.
+ """
+ client_paths = client_func(
+ args.service, args.language, args.output, args.build_system,
+ hostname=args.hostname, application_path=args.application)
+
+ for client_path in client_paths:
+ print 'API client library written to %s' % client_path
+
+
+def _GenDiscoveryDocCallback(args, discovery_func=_GenDiscoveryDoc):
+ """Generate discovery docs to files.
+
+ Args:
+ args: An argparse.Namespace object to extract parameters from
+ discovery_func: A function that generates discovery docs and stores them to
+ files, accepting a list of service names, a discovery doc format, and an
+ output directory.
+ """
+ discovery_paths = discovery_func(args.service, args.output,
+ hostname=args.hostname,
+ application_path=args.application)
+ for discovery_path in discovery_paths:
+ print 'API discovery document written to %s' % discovery_path
+
+
+def _GenOpenApiSpecCallback(args, openapi_func=_GenOpenApiSpec):
+ """Generate OpenAPI (Swagger) specs to files.
+
+ Args:
+ args: An argparse.Namespace object to extract parameters from
+ openapi_func: A function that generates OpenAPI specs and stores them to
+ files, accepting a list of service names and an output directory.
+ """
+ openapi_paths = openapi_func(args.service, args.output,
+ hostname=args.hostname,
+ application_path=args.application,
+ x_google_api_name=args.x_google_api_name)
+ for openapi_path in openapi_paths:
+ print 'OpenAPI spec written to %s' % openapi_path
+
+
+def _GenClientLibCallback(args, client_func=_GenClientLib):
+ """Generate a client library to file.
+
+ Args:
+ args: An argparse.Namespace object to extract parameters from
+ client_func: A function that generates client libraries and stores them to
+ files, accepting a path to a discovery doc, a client library language, an
+ output directory, and a build system for the client library language.
+ """
+ client_path = client_func(args.discovery_doc[0], args.language, args.output,
+ args.build_system)
+ print 'API client library written to %s' % client_path
+
+
+def MakeParser(prog):
+ """Create an argument parser.
+
+ Args:
+ prog: The name of the program to use when outputting help text.
+
+ Returns:
+ An argparse.ArgumentParser built to specification.
+ """
+
+ def AddStandardOptions(parser, *args):
+ """Add common endpoints options to a parser.
+
+ Args:
+ parser: The parser to add options to.
+ *args: A list of option names to add. Possible names are: application,
+ format, output, language, service, and discovery_doc.
+ """
+ if 'application' in args:
+ parser.add_argument('-a', '--application', default='.',
+ help='The path to the Python App Engine App')
+ if 'format' in args:
+ # This used to be a valid option, allowing the user to select 'rest' or 'rpc',
+ # but now 'rest' is the only valid type. The argument remains so scripts using it
+ # won't break.
+ parser.add_argument('-f', '--format', default='rest',
+ choices=['rest'],
+ help='The requested API protocol type (ignored)')
+ if 'hostname' in args:
+ help_text = ('Default application hostname, if none is specified '
+ 'for API service.')
+ parser.add_argument('--hostname', help=help_text)
+ if 'output' in args:
+ parser.add_argument('-o', '--output', default='.',
+ help='The directory to store output files')
+ if 'language' in args:
+ parser.add_argument('language',
+ help='The target output programming language')
+ if 'service' in args:
+ parser.add_argument('service', nargs='+',
+ help='Fully qualified service class name')
+ if 'discovery_doc' in args:
+ parser.add_argument('discovery_doc', nargs=1,
+ help='Path to the discovery document')
+ if 'build_system' in args:
+ parser.add_argument('-bs', '--build_system', default='default',
+ help='The target build system')
+
+ parser = _EndpointsParser(prog=prog)
+ subparsers = parser.add_subparsers(
+ title='subcommands', metavar='{%s}' % ', '.join(_VISIBLE_COMMANDS))
+
+ get_client_lib = subparsers.add_parser(
+ 'get_client_lib', help=('Generates discovery documents and client '
+ 'libraries from service classes'))
+ get_client_lib.set_defaults(callback=_GetClientLibCallback)
+ AddStandardOptions(get_client_lib, 'application', 'hostname', 'output',
+ 'language', 'service', 'build_system')
+
+ get_discovery_doc = subparsers.add_parser(
+ 'get_discovery_doc',
+ help='Generates discovery documents from service classes')
+ get_discovery_doc.set_defaults(callback=_GenDiscoveryDocCallback)
+ AddStandardOptions(get_discovery_doc, 'application', 'format', 'hostname',
+ 'output', 'service')
+
+ get_openapi_spec = subparsers.add_parser(
+ 'get_openapi_spec',
+ help='Generates OpenAPI (Swagger) specs from service classes')
+ get_openapi_spec.set_defaults(callback=_GenOpenApiSpecCallback)
+ AddStandardOptions(get_openapi_spec, 'application', 'hostname', 'output',
+ 'service')
+ get_openapi_spec.add_argument('--x-google-api-name', action='store_true',
+ help="Add the 'x-google-api-name' field to the generated spec")
+
+ # Create an alias for get_openapi_spec called get_swagger_spec to support
+ # the old-style naming. This won't be a visible command, but it will still
+ # function to support legacy scripts.
+ get_swagger_spec = subparsers.add_parser(
+ 'get_swagger_spec',
+ help='Generates OpenAPI (Swagger) specs from service classes')
+ get_swagger_spec.set_defaults(callback=_GenOpenApiSpecCallback)
+ AddStandardOptions(get_swagger_spec, 'application', 'hostname', 'output',
+ 'service')
+
+ # By removing the help attribute, the following three actions won't be
+ # displayed in usage message
+ gen_api_config = subparsers.add_parser('gen_api_config')
+ gen_api_config.set_defaults(callback=_GenApiConfigCallback)
+ AddStandardOptions(gen_api_config, 'application', 'hostname', 'output',
+ 'service')
+
+ gen_discovery_doc = subparsers.add_parser('gen_discovery_doc')
+ gen_discovery_doc.set_defaults(callback=_GenDiscoveryDocCallback)
+ AddStandardOptions(gen_discovery_doc, 'application', 'format', 'hostname',
+ 'output', 'service')
+
+ gen_client_lib = subparsers.add_parser('gen_client_lib')
+ gen_client_lib.set_defaults(callback=_GenClientLibCallback)
+ AddStandardOptions(gen_client_lib, 'output', 'language', 'discovery_doc',
+ 'build_system')
+
+ return parser
+
+
+def _SetupStubs():
+ tb = testbed.Testbed()
+ tb.setup_env(CURRENT_VERSION_ID='1.0')
+ tb.activate()
+ for k, v in testbed.INIT_STUB_METHOD_NAMES.items():
+ # The old stub initialization code didn't support the image service at all
+ # so we just ignore it here.
+ if k != 'images':
+ getattr(tb, v)()
+
+
+def main(argv):
+ logging.basicConfig()
+ # silence warnings from endpoints.apiserving; they're not relevant
+ # to command-line operation.
+ logging.getLogger('endpoints.apiserving').setLevel(logging.ERROR)
+
+ _SetupStubs()
+
+ parser = MakeParser(argv[0])
+ args = parser.parse_args(argv[1:])
+
+ # Handle the common "application" argument here, since most of the handlers
+ # use this.
+ application_path = getattr(args, 'application', None)
+ if application_path is not None:
+ sys.path.insert(0, os.path.abspath(application_path))
+
+ args.callback(args)
diff --git a/third_party/endpoints/_endpointscfg_setup.py b/third_party/endpoints/_endpointscfg_setup.py
new file mode 100644
index 0000000..a286056
--- /dev/null
+++ b/third_party/endpoints/_endpointscfg_setup.py
@@ -0,0 +1,107 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for setting up App Engine library paths.
+
+This module searches for the root of the App Engine Python SDK or Google Cloud
+SDK and computes a list of library paths and adds them to sys.path. This is
+necessary for two reasons:
+
+1. The endpointscfg tool imports user code and therefore must be able to
+ import modules used in the app.
+2. As a consequence of the first item, we must call an App Engine method to
+ set up service stubs in case an app's initialization code utilizes an App
+ Engine service. For example, there exists an App Engine version of pytz
+ which uses memcache and users may use it at the global level because it
+ seems to be declarative.
+"""
+import logging
+import os
+import sys
+
+_PYTHON_EXTENSIONS_WARNING = """
+Found Cloud SDK, but App Engine Python Extensions are not
+installed. If you encounter errors, please run:
+ $ gcloud components install app-engine-python
+""".strip()
+
+
+_IMPORT_ERROR_WARNING = """
+Could not import App Engine Python libraries. If you encounter
+errors, please make sure that the SDK binary path is in your PATH environment
+variable or that the ENDPOINTS_GAE_SDK variable points to a valid SDK root.
+""".strip()
+
+
+_NOT_FOUND_WARNING = """
+Could not find either the Cloud SDK or the App Engine Python SDK.
+If you encounter errors, please make sure that the SDK binary path is in your
+PATH environment variable or that the ENDPOINTS_GAE_SDK variable points to a
+valid SDK root.""".strip()
+
+
+_NO_FIX_SYS_PATH_WARNING = """
+Could not find the fix_sys_path() function in dev_appserver.
+If you encounter errors, please make sure that your Google App Engine SDK is
+up-to-date.""".strip()
+
+
+def _FindSdkPath():
+ environ_sdk = os.environ.get('ENDPOINTS_GAE_SDK')
+ if environ_sdk:
+ maybe_cloud_sdk = os.path.join(environ_sdk, 'platform', 'google_appengine')
+ if os.path.exists(maybe_cloud_sdk):
+ return maybe_cloud_sdk
+ return environ_sdk
+
+ for path in os.environ['PATH'].split(os.pathsep):
+ if os.path.exists(os.path.join(path, 'dev_appserver.py')):
+ if (path.endswith('bin') and
+ os.path.exists(os.path.join(path, 'gcloud'))):
+ # Cloud SDK ships with dev_appserver.py in a bin directory. In the
+ # root directory, we can find the Python SDK in
+ # platform/google_appengine provided that it's installed.
+ sdk_path = os.path.join(os.path.dirname(path),
+ 'platform',
+ 'google_appengine')
+ if not os.path.exists(sdk_path):
+ logging.warning(_PYTHON_EXTENSIONS_WARNING)
+ return sdk_path
+ # App Engine SDK ships withd dev_appserver.py in the root directory.
+ return path
+
+
+def _SetupPaths():
+ """Sets up the sys.path with special directories for endpointscfg.py."""
+ sdk_path = _FindSdkPath()
+ if sdk_path:
+ sys.path.append(sdk_path)
+ try:
+ import dev_appserver # pylint: disable=g-import-not-at-top
+ if hasattr(dev_appserver, 'fix_sys_path'):
+ dev_appserver.fix_sys_path()
+ else:
+ logging.warning(_NO_FIX_SYS_PATH_WARNING)
+ except ImportError:
+ logging.warning(_IMPORT_ERROR_WARNING)
+ else:
+ logging.warning(_NOT_FOUND_WARNING)
+
+ # Add the path above this directory, so we can import the endpoints package
+ # from the user's app code (rather than from another, possibly outdated SDK).
+ # pylint: disable=g-import-not-at-top
+ from google.appengine.ext import vendor
+ vendor.add(os.path.dirname(os.path.dirname(__file__)))
+
+
+_SetupPaths()
diff --git a/third_party/endpoints/api_config.py b/third_party/endpoints/api_config.py
new file mode 100644
index 0000000..e24cd57
--- /dev/null
+++ b/third_party/endpoints/api_config.py
@@ -0,0 +1,2257 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Library for generating an API configuration document for a ProtoRPC backend.
+
+The protorpc.remote.Service is inspected and a JSON document describing
+the API is returned.
+
+ class MyResponse(messages.Message):
+ bool_value = messages.BooleanField(1)
+ int32_value = messages.IntegerField(2)
+
+ class MyService(remote.Service):
+
+ @remote.method(message_types.VoidMessage, MyResponse)
+ def entries_get(self, request):
+ pass
+
+ api = ApiConfigGenerator().pretty_print_config_to_json(MyService)
+"""
+
+# pylint: disable=g-bad-name
+
+# pylint: disable=g-statement-before-imports,g-import-not-at-top
+from __future__ import absolute_import
+
+import json
+import logging
+import re
+import six
+
+from google.appengine.api import app_identity
+
+import attr
+from protorpc import util
+
+from . import api_exceptions
+from . import constants
+from . import message_parser
+from . import message_types
+from . import messages
+from . import remote
+from . import resource_container
+from . import types as endpoints_types
+# originally in this module
+from .types import Issuer, LimitDefinition, Namespace
+from . import users_id_token
+from . import util as endpoints_util
+
+_logger = logging.getLogger(__name__)
+package = 'google.appengine.endpoints'
+
+
+__all__ = [
+ 'ApiAuth',
+ 'ApiConfigGenerator',
+ 'ApiFrontEndLimitRule',
+ 'ApiFrontEndLimits',
+ 'EMAIL_SCOPE',
+ 'Issuer',
+ 'LimitDefinition',
+ 'Namespace',
+ 'api',
+ 'method',
+ 'AUTH_LEVEL',
+ 'package',
+]
+
+
+EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
+_EMAIL_SCOPE_DESCRIPTION = 'View your email address'
+_EMAIL_SCOPE_OBJ = endpoints_types.OAuth2Scope(
+ scope=EMAIL_SCOPE, description=_EMAIL_SCOPE_DESCRIPTION)
+_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}'
+
+_MULTICLASS_MISMATCH_ERROR_TEMPLATE = (
+ 'Attempting to implement service %s, version %s, with multiple '
+ 'classes that aren\'t compatible. See docstring for api() for '
+ 'examples how to implement a multi-class API.')
+
+_INVALID_NAMESPACE_ERROR_TEMPLATE = (
+ 'Invalid namespace configuration. If a namespace is set, make sure to set '
+ '%s. package_path is optional.')
+
+
+_VALID_PART_RE = re.compile('^{[^{}]+}$')
+_VALID_LAST_PART_RE = re.compile('^{[^{}]+}(:)?(?(1)[^{}]+)$')
+
+
+
+def _Enum(docstring, *names):
+ """Utility to generate enum classes used by annotations.
+
+ Args:
+ docstring: Docstring for the generated enum class.
+ *names: Enum names.
+
+ Returns:
+ A class that contains enum names as attributes.
+ """
+ enums = dict(zip(names, range(len(names))))
+ reverse = dict((value, key) for key, value in enums.items())
+ enums['reverse_mapping'] = reverse
+ enums['__doc__'] = docstring
+ return type('Enum', (object,), enums)
+
+_AUTH_LEVEL_DOCSTRING = """
+ Define the enums used by the auth_level annotation to specify frontend
+ authentication requirement.
+
+ Frontend authentication is handled by a Google API server prior to the
+ request reaching backends. An early return before hitting the backend can
+ happen if the request does not fulfil the requirement specified by the
+ auth_level.
+
+ Valid values of auth_level and their meanings are:
+
+ AUTH_LEVEL.REQUIRED: Valid authentication credentials are required. Backend
+ will be called only if authentication credentials are present and valid.
+
+ AUTH_LEVEL.OPTIONAL: Authentication is optional. If authentication credentials
+ are supplied they must be valid. Backend will be called if the request
+ contains valid authentication credentials or no authentication credentials.
+
+ AUTH_LEVEL.OPTIONAL_CONTINUE: Authentication is optional and will be attempted
+ if authentication credentials are supplied. Invalid authentication
+ credentials will be removed but the request can always reach backend.
+
+ AUTH_LEVEL.NONE: Frontend authentication will be skipped. If authentication is
+ desired, it will need to be performed by the backend.
+ """
+
+AUTH_LEVEL = _Enum(_AUTH_LEVEL_DOCSTRING, 'REQUIRED', 'OPTIONAL',
+ 'OPTIONAL_CONTINUE', 'NONE')
+_AUTH_LEVEL_WARNING = ("Due to a design error, auth_level has never actually been functional. "
+ "It will likely be removed and replaced by a functioning alternative "
+ "in a future version of the framework. Please stop using auth_level now.")
+
+
+def _GetFieldAttributes(field):
+ """Decomposes field into the needed arguments to pass to the constructor.
+
+ This can be used to create copies of the field or to compare if two fields
+ are "equal" (since __eq__ is not implemented on messages.Field).
+
+ Args:
+ field: A ProtoRPC message field (potentially to be copied).
+
+ Raises:
+ TypeError: If the field is not an instance of messages.Field.
+
+ Returns:
+ A pair of relevant arguments to be passed to the constructor for the field
+ type. The first element is a list of positional arguments for the
+ constructor and the second is a dictionary of keyword arguments.
+ """
+ if not isinstance(field, messages.Field):
+ raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,))
+
+ positional_args = []
+ kwargs = {
+ 'required': field.required,
+ 'repeated': field.repeated,
+ 'variant': field.variant,
+ 'default': field._Field__default, # pylint: disable=protected-access
+ }
+
+ if isinstance(field, messages.MessageField):
+ # Message fields can't have a default
+ kwargs.pop('default')
+ if not isinstance(field, message_types.DateTimeField):
+ positional_args.insert(0, field.message_type)
+ elif isinstance(field, messages.EnumField):
+ positional_args.insert(0, field.type)
+
+ return positional_args, kwargs
+
+
+def _CheckType(value, check_type, name, allow_none=True):
+ """Check that the type of an object is acceptable.
+
+ Args:
+ value: The object whose type is to be checked.
+ check_type: The type that the object must be an instance of.
+ name: Name of the object, to be placed in any error messages.
+ allow_none: True if value can be None, false if not.
+
+ Raises:
+ TypeError: If value is not an acceptable type.
+ """
+ if value is None and allow_none:
+ return
+ if not isinstance(value, check_type):
+ raise TypeError('%s type doesn\'t match %s.' % (name, check_type))
+
+
+def _CheckEnum(value, check_type, name):
+ if value is None:
+ return
+ if value not in check_type.reverse_mapping:
+ raise TypeError('%s is not a valid value for %s' % (value, name))
+
+
+def _CheckNamespace(namespace):
+ _CheckType(namespace, Namespace, 'namespace')
+ if namespace:
+ if not namespace.owner_domain:
+ raise api_exceptions.InvalidNamespaceException(
+ _INVALID_NAMESPACE_ERROR_TEMPLATE % 'owner_domain')
+ if not namespace.owner_name:
+ raise api_exceptions.InvalidNamespaceException(
+ _INVALID_NAMESPACE_ERROR_TEMPLATE % 'owner_name')
+
+ _CheckType(namespace.owner_domain, six.string_types, 'namespace.owner_domain')
+ _CheckType(namespace.owner_name, six.string_types, 'namespace.owner_name')
+ _CheckType(namespace.package_path, six.string_types, 'namespace.package_path')
+
+
+def _CheckAudiences(audiences):
+ # Audiences can either be a list of audiences using the google_id_token
+ # or a dict mapping auth issuer name to the list of audiences.
+ if audiences is None or isinstance(audiences, dict):
+ return
+ else:
+ endpoints_util.check_list_type(audiences, six.string_types, 'audiences')
+
+
+def _CheckLimitDefinitions(limit_definitions):
+ _CheckType(limit_definitions, list, 'limit_definitions')
+ if limit_definitions:
+ for ld in limit_definitions:
+ if not ld.metric_name:
+ raise api_exceptions.InvalidLimitDefinitionException(
+ "Metric name must be set in all limit definitions.")
+ if not ld.display_name:
+ raise api_exceptions.InvalidLimitDefinitionException(
+ "Display name must be set in all limit definitions.")
+
+ _CheckType(ld.metric_name, six.string_types, 'limit_definition.metric_name')
+ _CheckType(ld.display_name, six.string_types, 'limit_definition.display_name')
+ _CheckType(ld.default_limit, int, 'limit_definition.default_limit')
+
+
+# pylint: disable=g-bad-name
+class _ApiInfo(object):
+ """Configurable attributes of an API.
+
+ A structured data object used to store API information associated with each
+ remote.Service-derived class that implements an API. This stores properties
+ that could be different for each class (such as the path or
+ collection/resource name), as well as properties common to all classes in
+ the API (such as API name and version).
+ """
+
+ @util.positional(2)
+ def __init__(self, common_info, resource_name=None, path=None, audiences=None,
+ scopes=None, allowed_client_ids=None, auth_level=None,
+ api_key_required=None):
+ """Constructor for _ApiInfo.
+
+ Args:
+ common_info: _ApiDecorator.__ApiCommonInfo, Information that's common for
+ all classes that implement an API.
+ resource_name: string, The collection that the annotated class will
+ implement in the API. (Default: None)
+ path: string, Base request path for all methods in this API.
+ (Default: None)
+ audiences: list of strings, Acceptable audiences for authentication.
+ (Default: None)
+ scopes: list of strings, Acceptable scopes for authentication.
+ (Default: None)
+ allowed_client_ids: list of strings, Acceptable client IDs for auth.
+ (Default: None)
+ auth_level: enum from AUTH_LEVEL, Frontend authentication level.
+ (Default: None)
+ api_key_required: bool, whether a key is required to call this API.
+ """
+ _CheckType(resource_name, six.string_types, 'resource_name')
+ _CheckType(path, six.string_types, 'path')
+ endpoints_util.check_list_type(audiences, six.string_types, 'audiences')
+ endpoints_util.check_list_type(scopes, six.string_types, 'scopes')
+ endpoints_util.check_list_type(allowed_client_ids, six.string_types,
+ 'allowed_client_ids')
+ _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
+ _CheckType(api_key_required, bool, 'api_key_required')
+
+ self.__common_info = common_info
+ self.__resource_name = resource_name
+ self.__path = path
+ self.__audiences = audiences
+ self.__scopes = endpoints_types.OAuth2Scope.convert_list(scopes)
+ self.__allowed_client_ids = allowed_client_ids
+ self.__auth_level = auth_level
+ self.__api_key_required = api_key_required
+
+ def is_same_api(self, other):
+ """Check if this implements the same API as another _ApiInfo instance."""
+ if not isinstance(other, _ApiInfo):
+ return False
+ # pylint: disable=protected-access
+ return self.__common_info is other.__common_info
+
+ @property
+ def name(self):
+ """Name of the API."""
+ return self.__common_info.name
+
+ @property
+ def api_version(self):
+ """Version of the API."""
+ return self.__common_info.api_version
+
+ @property
+ def path_version(self):
+ """Version of the API for putting in the path."""
+ return self.__common_info.path_version
+
+ @property
+ def description(self):
+ """Description of the API."""
+ return self.__common_info.description
+
+ @property
+ def hostname(self):
+ """Hostname for the API."""
+ return self.__common_info.hostname
+
+ @property
+ def audiences(self):
+ """List of audiences accepted for the API, overriding the defaults."""
+ if self.__audiences is not None:
+ return self.__audiences
+ return self.__common_info.audiences
+
+ @property
+ def scope_objs(self):
+ """List of scopes (as OAuth2Scopes) accepted for the API, overriding the defaults."""
+ if self.__scopes is not None:
+ return self.__scopes
+ return self.__common_info.scope_objs
+
+ @property
+ def scopes(self):
+ """List of scopes (as strings) accepted for the API, overriding the defaults."""
+ if self.scope_objs is not None:
+ return [_s.scope for _s in self.scope_objs]
+
+ @property
+ def allowed_client_ids(self):
+ """List of client IDs accepted for the API, overriding the defaults."""
+ if self.__allowed_client_ids is not None:
+ return self.__allowed_client_ids
+ return self.__common_info.allowed_client_ids
+
+ @property
+ def issuers(self):
+ """Dict mapping auth issuer names to auth issuers for the API."""
+ return self.__common_info.issuers
+
+ @property
+ def namespace(self):
+ """Namespace for the API."""
+ return self.__common_info.namespace
+
+ @property
+ def auth_level(self):
+ """Enum from AUTH_LEVEL specifying the frontend authentication level."""
+ if self.__auth_level is not None:
+ return self.__auth_level
+ return self.__common_info.auth_level
+
+ @property
+ def api_key_required(self):
+ """bool specifying whether a key is required to call into this API."""
+ if self.__api_key_required is not None:
+ return self.__api_key_required
+ return self.__common_info.api_key_required
+
+ @property
+ def canonical_name(self):
+ """Canonical name for the API."""
+ return self.__common_info.canonical_name
+
+ @property
+ def auth(self):
+ """Authentication configuration information for this API."""
+ return self.__common_info.auth
+
+ @property
+ def owner_domain(self):
+ """Domain of the owner of this API."""
+ return self.__common_info.owner_domain
+
+ @property
+ def owner_name(self):
+ """Name of the owner of this API."""
+ return self.__common_info.owner_name
+
+ @property
+ def package_path(self):
+ """Package this API belongs to, '/' delimited. Used by client libs."""
+ return self.__common_info.package_path
+
+ @property
+ def frontend_limits(self):
+ """Optional query limits for unregistered developers."""
+ return self.__common_info.frontend_limits
+
+ @property
+ def title(self):
+ """Human readable name of this API."""
+ return self.__common_info.title
+
+ @property
+ def documentation(self):
+ """Link to the documentation for this version of the API."""
+ return self.__common_info.documentation
+
+ @property
+ def resource_name(self):
+ """Resource name for the class this decorates."""
+ return self.__resource_name
+
+ @property
+ def path(self):
+ """Base path prepended to any method paths in the class this decorates."""
+ return self.__path
+
+ @property
+ def base_path(self):
+ """Base path for the entire API prepended before the path property."""
+ return self.__common_info.base_path
+
+ @property
+ def limit_definitions(self):
+ """Rate limiting metric definitions for this API."""
+ return self.__common_info.limit_definitions
+
+ @property
+ def use_request_uri(self):
+ """Match request paths based on the REQUEST_URI instead of PATH_INFO."""
+ return self.__common_info.use_request_uri
+
+
+class _ApiDecorator(object):
+ """Decorator for single- or multi-class APIs.
+
+ An instance of this class can be used directly as a decorator for a
+ single-class API. Or call the api_class() method to decorate a multi-class
+ API.
+ """
+
+ @util.positional(3)
+ def __init__(self, name, version, description=None, hostname=None,
+ audiences=None, scopes=None, allowed_client_ids=None,
+ canonical_name=None, auth=None, owner_domain=None,
+ owner_name=None, package_path=None, frontend_limits=None,
+ title=None, documentation=None, auth_level=None, issuers=None,
+ namespace=None, api_key_required=None, base_path=None,
+ limit_definitions=None, use_request_uri=None):
+ """Constructor for _ApiDecorator.
+
+ Args:
+ name: string, Name of the API.
+ version: string, Version of the API.
+ description: string, Short description of the API (Default: None)
+ hostname: string, Hostname of the API (Default: app engine default host)
+ audiences: list of strings, Acceptable audiences for authentication.
+ scopes: list of strings, Acceptable scopes for authentication.
+ allowed_client_ids: list of strings, Acceptable client IDs for auth.
+ canonical_name: string, the canonical name for the API, a more human
+ readable version of the name.
+ auth: ApiAuth instance, the authentication configuration information
+ for this API.
+ owner_domain: string, the domain of the person or company that owns
+ this API. Along with owner_name, this provides hints to properly
+ name client libraries for this API.
+ owner_name: string, the name of the owner of this API. Along with
+ owner_domain, this provides hints to properly name client libraries
+ for this API.
+ package_path: string, the "package" this API belongs to. This '/'
+ delimited value specifies logical groupings of APIs. This is used by
+ client libraries of this API.
+ frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
+ developers.
+ title: string, the human readable title of your API. It is exposed in the
+ discovery service.
+ documentation: string, a URL where users can find documentation about this
+ version of the API. This will be surfaced in the API Explorer and GPE
+ plugin to allow users to learn about your service.
+ auth_level: enum from AUTH_LEVEL, Frontend authentication level.
+ issuers: dict, mapping auth issuer names to endpoints.Issuer objects.
+ namespace: endpoints.Namespace, the namespace for the API.
+ api_key_required: bool, whether a key is required to call this API.
+ base_path: string, the base path for all endpoints in this API.
+ limit_definitions: list of LimitDefinition tuples used in this API.
+ use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO
+ """
+ self.__common_info = self.__ApiCommonInfo(
+ name, version, description=description, hostname=hostname,
+ audiences=audiences, scopes=scopes,
+ allowed_client_ids=allowed_client_ids,
+ canonical_name=canonical_name, auth=auth, owner_domain=owner_domain,
+ owner_name=owner_name, package_path=package_path,
+ frontend_limits=frontend_limits, title=title,
+ documentation=documentation, auth_level=auth_level, issuers=issuers,
+ namespace=namespace, api_key_required=api_key_required,
+ base_path=base_path, limit_definitions=limit_definitions,
+ use_request_uri=use_request_uri)
+ self.__classes = []
+
+ class __ApiCommonInfo(object):
+ """API information that's common among all classes that implement an API.
+
+ When a remote.Service-derived class implements part of an API, there is
+ some common information that remains constant across all such classes
+ that implement the same API. This includes things like name, version,
+ hostname, and so on. __ApiComminInfo stores that common information, and
+ a single __ApiCommonInfo instance is shared among all classes that
+ implement the same API, guaranteeing that they share the same common
+ information.
+
+ Some of these values can be overridden (such as audiences and scopes),
+ while some can't and remain the same for all classes that implement
+ the API (such as name and version).
+ """
+
+ @util.positional(3)
+ def __init__(self, name, version, description=None, hostname=None,
+ audiences=None, scopes=None, allowed_client_ids=None,
+ canonical_name=None, auth=None, owner_domain=None,
+ owner_name=None, package_path=None, frontend_limits=None,
+ title=None, documentation=None, auth_level=None, issuers=None,
+ namespace=None, api_key_required=None, base_path=None,
+ limit_definitions=None, use_request_uri=None):
+ """Constructor for _ApiCommonInfo.
+
+ Args:
+ name: string, Name of the API.
+ version: string, Version of the API.
+ description: string, Short description of the API (Default: None)
+ hostname: string, Hostname of the API (Default: app engine default host)
+ audiences: list of strings, Acceptable audiences for authentication.
+ scopes: list of strings, Acceptable scopes for authentication.
+ allowed_client_ids: list of strings, Acceptable client IDs for auth.
+ canonical_name: string, the canonical name for the API, a more human
+ readable version of the name.
+ auth: ApiAuth instance, the authentication configuration information
+ for this API.
+ owner_domain: string, the domain of the person or company that owns
+ this API. Along with owner_name, this provides hints to properly
+ name client libraries for this API.
+ owner_name: string, the name of the owner of this API. Along with
+ owner_domain, this provides hints to properly name client libraries
+ for this API.
+ package_path: string, the "package" this API belongs to. This '/'
+ delimited value specifies logical groupings of APIs. This is used by
+ client libraries of this API.
+ frontend_limits: ApiFrontEndLimits, optional query limits for
+ unregistered developers.
+ title: string, the human readable title of your API. It is exposed in
+ the discovery service.
+ documentation: string, a URL where users can find documentation about
+ this version of the API. This will be surfaced in the API Explorer and
+ GPE plugin to allow users to learn about your service.
+ auth_level: enum from AUTH_LEVEL, Frontend authentication level.
+ issuers: dict, mapping auth issuer names to endpoints.Issuer objects.
+ namespace: endpoints.Namespace, the namespace for the API.
+ api_key_required: bool, whether a key is required to call into this API.
+ base_path: string, the base path for all endpoints in this API.
+ limit_definitions: list of LimitDefinition tuples used in this API.
+ use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO
+ """
+ _CheckType(name, six.string_types, 'name', allow_none=False)
+ _CheckType(version, six.string_types, 'version', allow_none=False)
+ _CheckType(description, six.string_types, 'description')
+ _CheckType(hostname, six.string_types, 'hostname')
+ endpoints_util.check_list_type(scopes, (six.string_types, endpoints_types.OAuth2Scope), 'scopes')
+ endpoints_util.check_list_type(allowed_client_ids, six.string_types,
+ 'allowed_client_ids')
+ _CheckType(canonical_name, six.string_types, 'canonical_name')
+ _CheckType(auth, ApiAuth, 'auth')
+ _CheckType(owner_domain, six.string_types, 'owner_domain')
+ _CheckType(owner_name, six.string_types, 'owner_name')
+ _CheckType(package_path, six.string_types, 'package_path')
+ _CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits')
+ _CheckType(title, six.string_types, 'title')
+ _CheckType(documentation, six.string_types, 'documentation')
+ _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
+ _CheckType(api_key_required, bool, 'api_key_required')
+ _CheckType(base_path, six.string_types, 'base_path')
+
+ _CheckType(issuers, dict, 'issuers')
+ if issuers:
+ for issuer_name, issuer_value in issuers.items():
+ _CheckType(issuer_name, six.string_types, 'issuer %s' % issuer_name)
+ _CheckType(issuer_value, Issuer, 'issuer value for %s' % issuer_name)
+
+ _CheckNamespace(namespace)
+
+ _CheckAudiences(audiences)
+
+ _CheckLimitDefinitions(limit_definitions)
+ _CheckType(use_request_uri, bool, 'use_request_uri')
+
+ if hostname is None:
+ hostname = app_identity.get_default_version_hostname()
+ if scopes is None:
+ scopes = [_EMAIL_SCOPE_OBJ]
+ else:
+ scopes = endpoints_types.OAuth2Scope.convert_list(scopes)
+ if allowed_client_ids is None:
+ allowed_client_ids = [constants.API_EXPLORER_CLIENT_ID]
+ if auth_level is None:
+ auth_level = AUTH_LEVEL.NONE
+ if api_key_required is None:
+ api_key_required = False
+ if base_path is None:
+ base_path = '/_ah/api/'
+ if use_request_uri is None:
+ use_request_uri = False
+
+ self.__name = name
+ self.__api_version = version
+ self.__path_version = version
+ self.__description = description
+ self.__hostname = hostname
+ self.__audiences = audiences
+ self.__scopes = scopes
+ self.__allowed_client_ids = allowed_client_ids
+ self.__canonical_name = canonical_name
+ self.__auth = auth
+ self.__owner_domain = owner_domain
+ self.__owner_name = owner_name
+ self.__package_path = package_path
+ self.__frontend_limits = frontend_limits
+ self.__title = title
+ self.__documentation = documentation
+ self.__auth_level = auth_level
+ self.__issuers = issuers
+ self.__namespace = namespace
+ self.__api_key_required = api_key_required
+ self.__base_path = base_path
+ self.__limit_definitions = limit_definitions
+ self.__use_request_uri = use_request_uri
+
+ @property
+ def name(self):
+ """Name of the API."""
+ return self.__name
+
+ @property
+ def api_version(self):
+ """Version of the API."""
+ return self.__api_version
+
+ @property
+ def path_version(self):
+ """Version of the API for putting in the path."""
+ return self.__path_version
+
+ @property
+ def description(self):
+ """Description of the API."""
+ return self.__description
+
+ @property
+ def hostname(self):
+ """Hostname for the API."""
+ return self.__hostname
+
+ @property
+ def audiences(self):
+ """List of audiences accepted by default for the API."""
+ return self.__audiences
+
+ @property
+ def scope_objs(self):
+ """List of scopes (as OAuth2Scopes) accepted by default for the API."""
+ return self.__scopes
+
+ @property
+ def scopes(self):
+ """List of scopes (as strings) accepted by default for the API."""
+ if self.scope_objs is not None:
+ return [_s.scope for _s in self.scope_objs]
+
+ @property
+ def allowed_client_ids(self):
+ """List of client IDs accepted by default for the API."""
+ return self.__allowed_client_ids
+
+ @property
+ def issuers(self):
+ """List of auth issuers for the API."""
+ return self.__issuers
+
+ @property
+ def namespace(self):
+ """Namespace of the API."""
+ return self.__namespace
+
+ @property
+ def auth_level(self):
+ """Enum from AUTH_LEVEL specifying default frontend auth level."""
+ return self.__auth_level
+
+ @property
+ def canonical_name(self):
+ """Canonical name for the API."""
+ return self.__canonical_name
+
+ @property
+ def auth(self):
+ """Authentication configuration for this API."""
+ return self.__auth
+
+ @property
+ def api_key_required(self):
+ """Whether a key is required to call into this API."""
+ return self.__api_key_required
+
+ @property
+ def owner_domain(self):
+ """Domain of the owner of this API."""
+ return self.__owner_domain
+
+ @property
+ def owner_name(self):
+ """Name of the owner of this API."""
+ return self.__owner_name
+
+ @property
+ def package_path(self):
+ """Package this API belongs to, '/' delimited. Used by client libs."""
+ return self.__package_path
+
+ @property
+ def frontend_limits(self):
+ """Optional query limits for unregistered developers."""
+ return self.__frontend_limits
+
+ @property
+ def title(self):
+ """Human readable name of this API."""
+ return self.__title
+
+ @property
+ def documentation(self):
+ """Link to the documentation for this version of the API."""
+ return self.__documentation
+
+ @property
+ def base_path(self):
+ """The base path for all endpoints in this API."""
+ return self.__base_path
+
+ @property
+ def limit_definitions(self):
+ """Rate limiting metric definitions for this API."""
+ return self.__limit_definitions
+
+ @property
+ def use_request_uri(self):
+ """Match request paths based on the REQUEST_URI instead of PATH_INFO."""
+ return self.__use_request_uri
+
+ def __call__(self, service_class):
+ """Decorator for ProtoRPC class that configures Google's API server.
+
+ Args:
+ service_class: remote.Service class, ProtoRPC service class being wrapped.
+
+ Returns:
+ Same class with API attributes assigned in api_info.
+ """
+ return self.api_class()(service_class)
+
+ def api_class(self, resource_name=None, path=None, audiences=None,
+ scopes=None, allowed_client_ids=None, auth_level=None,
+ api_key_required=None):
+ """Get a decorator for a class that implements an API.
+
+ This can be used for single-class or multi-class implementations. It's
+ used implicitly in simple single-class APIs that only use @api directly.
+
+ Args:
+ resource_name: string, Resource name for the class this decorates.
+ (Default: None)
+ path: string, Base path prepended to any method paths in the class this
+ decorates. (Default: None)
+ audiences: list of strings, Acceptable audiences for authentication.
+ (Default: None)
+ scopes: list of strings, Acceptable scopes for authentication.
+ (Default: None)
+ allowed_client_ids: list of strings, Acceptable client IDs for auth.
+ (Default: None)
+ auth_level: enum from AUTH_LEVEL, Frontend authentication level.
+ (Default: None)
+ api_key_required: bool, Whether a key is required to call into this API.
+ (Default: None)
+
+ Returns:
+ A decorator function to decorate a class that implements an API.
+ """
+ if auth_level is not None:
+ _logger.warn(_AUTH_LEVEL_WARNING)
+
+ def apiserving_api_decorator(api_class):
+ """Decorator for ProtoRPC class that configures Google's API server.
+
+ Args:
+ api_class: remote.Service class, ProtoRPC service class being wrapped.
+
+ Returns:
+ Same class with API attributes assigned in api_info.
+ """
+ self.__classes.append(api_class)
+ api_class.api_info = _ApiInfo(
+ self.__common_info, resource_name=resource_name,
+ path=path, audiences=audiences, scopes=scopes,
+ allowed_client_ids=allowed_client_ids, auth_level=auth_level,
+ api_key_required=api_key_required)
+ return api_class
+
+ return apiserving_api_decorator
+
+ def get_api_classes(self):
+ """Get the list of remote.Service classes that implement this API."""
+ return self.__classes
+
+
+class ApiAuth(object):
+ """Optional authorization configuration information for an API."""
+
+ def __init__(self, allow_cookie_auth=None, blocked_regions=None):
+ """Constructor for ApiAuth, authentication information for an API.
+
+ Args:
+ allow_cookie_auth: boolean, whether cooking auth is allowed. By
+ default, API methods do not allow cookie authentication, and
+ require the use of OAuth2 or ID tokens. Setting this field to
+ True will allow cookies to be used to access the API, with
+ potentially dangerous results. Please be very cautious in enabling
+ this setting, and make sure to require appropriate XSRF tokens to
+ protect your API.
+ blocked_regions: list of Strings, a list of 2-letter ISO region codes
+ to block.
+ """
+ _CheckType(allow_cookie_auth, bool, 'allow_cookie_auth')
+ endpoints_util.check_list_type(blocked_regions, six.string_types,
+ 'blocked_regions')
+
+ self.__allow_cookie_auth = allow_cookie_auth
+ self.__blocked_regions = blocked_regions
+
+ @property
+ def allow_cookie_auth(self):
+ """Whether cookie authentication is allowed for this API."""
+ return self.__allow_cookie_auth
+
+ @property
+ def blocked_regions(self):
+ """List of 2-letter ISO region codes to block."""
+ return self.__blocked_regions
+
+
+class ApiFrontEndLimitRule(object):
+ """Custom rule to limit unregistered traffic."""
+
+ def __init__(self, match=None, qps=None, user_qps=None, daily=None,
+ analytics_id=None):
+ """Constructor for ApiFrontEndLimitRule.
+
+ Args:
+ match: string, the matching rule that defines this traffic segment.
+ qps: int, the aggregate QPS for this segment.
+ user_qps: int, the per-end-user QPS for this segment.
+ daily: int, the aggregate daily maximum for this segment.
+ analytics_id: string, the project ID under which traffic for this segment
+ will be logged.
+ """
+ _CheckType(match, six.string_types, 'match')
+ _CheckType(qps, int, 'qps')
+ _CheckType(user_qps, int, 'user_qps')
+ _CheckType(daily, int, 'daily')
+ _CheckType(analytics_id, six.string_types, 'analytics_id')
+
+ self.__match = match
+ self.__qps = qps
+ self.__user_qps = user_qps
+ self.__daily = daily
+ self.__analytics_id = analytics_id
+
+ @property
+ def match(self):
+ """The matching rule that defines this traffic segment."""
+ return self.__match
+
+ @property
+ def qps(self):
+ """The aggregate QPS for this segment."""
+ return self.__qps
+
+ @property
+ def user_qps(self):
+ """The per-end-user QPS for this segment."""
+ return self.__user_qps
+
+ @property
+ def daily(self):
+ """The aggregate daily maximum for this segment."""
+ return self.__daily
+
+ @property
+ def analytics_id(self):
+ """Project ID under which traffic for this segment will be logged."""
+ return self.__analytics_id
+
+
+class ApiFrontEndLimits(object):
+ """Optional front end limit information for an API."""
+
+ def __init__(self, unregistered_user_qps=None, unregistered_qps=None,
+ unregistered_daily=None, rules=None):
+ """Constructor for ApiFrontEndLimits, front end limit info for an API.
+
+ Args:
+ unregistered_user_qps: int, the per-end-user QPS. Users are identified
+ by their IP address. A value of 0 will block unregistered requests.
+ unregistered_qps: int, an aggregate QPS upper-bound for all unregistered
+ traffic. A value of 0 currently means unlimited, though it might change
+ in the future. To block unregistered requests, use unregistered_user_qps
+ or unregistered_daily instead.
+ unregistered_daily: int, an aggregate daily upper-bound for all
+ unregistered traffic. A value of 0 will block unregistered requests.
+ rules: A list or tuple of ApiFrontEndLimitRule instances: custom rules
+ used to apply limits to unregistered traffic.
+ """
+ _CheckType(unregistered_user_qps, int, 'unregistered_user_qps')
+ _CheckType(unregistered_qps, int, 'unregistered_qps')
+ _CheckType(unregistered_daily, int, 'unregistered_daily')
+ endpoints_util.check_list_type(rules, ApiFrontEndLimitRule, 'rules')
+
+ self.__unregistered_user_qps = unregistered_user_qps
+ self.__unregistered_qps = unregistered_qps
+ self.__unregistered_daily = unregistered_daily
+ self.__rules = rules
+
+ @property
+ def unregistered_user_qps(self):
+ """Per-end-user QPS limit."""
+ return self.__unregistered_user_qps
+
+ @property
+ def unregistered_qps(self):
+ """Aggregate QPS upper-bound for all unregistered traffic."""
+ return self.__unregistered_qps
+
+ @property
+ def unregistered_daily(self):
+ """Aggregate daily upper-bound for all unregistered traffic."""
+ return self.__unregistered_daily
+
+ @property
+ def rules(self):
+ """Custom rules used to apply limits to unregistered traffic."""
+ return self.__rules
+
+
+@util.positional(2)
+def api(name, version, description=None, hostname=None, audiences=None,
+ scopes=None, allowed_client_ids=None, canonical_name=None,
+ auth=None, owner_domain=None, owner_name=None, package_path=None,
+ frontend_limits=None, title=None, documentation=None, auth_level=None,
+ issuers=None, namespace=None, api_key_required=None, base_path=None,
+ limit_definitions=None, use_request_uri=None):
+ """Decorate a ProtoRPC Service class for use by the framework above.
+
+ This decorator can be used to specify an API name, version, description, and
+ hostname for your API.
+
+ Sample usage (python 2.7):
+ @endpoints.api(name='guestbook', version='v0.2',
+ description='Guestbook API')
+ class PostService(remote.Service):
+ ...
+
+ Sample usage (python 2.5):
+ class PostService(remote.Service):
+ ...
+ endpoints.api(name='guestbook', version='v0.2',
+ description='Guestbook API')(PostService)
+
+ Sample usage if multiple classes implement one API:
+ api_root = endpoints.api(name='library', version='v1.0')
+
+ @api_root.api_class(resource_name='shelves')
+ class Shelves(remote.Service):
+ ...
+
+ @api_root.api_class(resource_name='books', path='books')
+ class Books(remote.Service):
+ ...
+
+ Args:
+ name: string, Name of the API.
+ version: string, Version of the API.
+ description: string, Short description of the API (Default: None)
+ hostname: string, Hostname of the API (Default: app engine default host)
+ audiences: list of strings, Acceptable audiences for authentication.
+ scopes: list of strings, Acceptable scopes for authentication.
+ allowed_client_ids: list of strings, Acceptable client IDs for auth.
+ canonical_name: string, the canonical name for the API, a more human
+ readable version of the name.
+ auth: ApiAuth instance, the authentication configuration information
+ for this API.
+ owner_domain: string, the domain of the person or company that owns
+ this API. Along with owner_name, this provides hints to properly
+ name client libraries for this API.
+ owner_name: string, the name of the owner of this API. Along with
+ owner_domain, this provides hints to properly name client libraries
+ for this API.
+ package_path: string, the "package" this API belongs to. This '/'
+ delimited value specifies logical groupings of APIs. This is used by
+ client libraries of this API.
+ frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
+ developers.
+ title: string, the human readable title of your API. It is exposed in the
+ discovery service.
+ documentation: string, a URL where users can find documentation about this
+ version of the API. This will be surfaced in the API Explorer and GPE
+ plugin to allow users to learn about your service.
+ auth_level: enum from AUTH_LEVEL, frontend authentication level.
+ issuers: dict, mapping auth issuer names to endpoints.Issuer objects.
+ namespace: endpoints.Namespace, the namespace for the API.
+ api_key_required: bool, whether a key is required to call into this API.
+ base_path: string, the base path for all endpoints in this API.
+ limit_definitions: list of endpoints.LimitDefinition objects, quota metric
+ definitions for this API.
+ use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO
+
+
+ Returns:
+ Class decorated with api_info attribute, an instance of ApiInfo.
+ """
+ if auth_level is not None:
+ _logger.warn(_AUTH_LEVEL_WARNING)
+
+ return _ApiDecorator(name, version, description=description,
+ hostname=hostname, audiences=audiences, scopes=scopes,
+ allowed_client_ids=allowed_client_ids,
+ canonical_name=canonical_name, auth=auth,
+ owner_domain=owner_domain, owner_name=owner_name,
+ package_path=package_path,
+ frontend_limits=frontend_limits, title=title,
+ documentation=documentation, auth_level=auth_level,
+ issuers=issuers, namespace=namespace,
+ api_key_required=api_key_required, base_path=base_path,
+ limit_definitions=limit_definitions,
+ use_request_uri=use_request_uri)
+
+
+class _MethodInfo(object):
+ """Configurable attributes of an API method.
+
+ Consolidates settings from @method decorator and/or any settings that were
+ calculating from the ProtoRPC method name, so they only need to be calculated
+ once.
+ """
+
+ @util.positional(1)
+ def __init__(self, name=None, path=None, http_method=None,
+ scopes=None, audiences=None, allowed_client_ids=None,
+ auth_level=None, api_key_required=None, request_body_class=None,
+ request_params_class=None, metric_costs=None, use_request_uri=None):
+ """Constructor.
+
+ Args:
+ name: string, Name of the method, prepended with <apiname>. to make it
+ unique.
+ path: string, Path portion of the URL to the method, for RESTful methods.
+ http_method: string, HTTP method supported by the method.
+ scopes: list of string, OAuth2 token must contain one of these scopes.
+ audiences: list of string, IdToken must contain one of these audiences.
+ allowed_client_ids: list of string, Client IDs allowed to call the method.
+ auth_level: enum from AUTH_LEVEL, Frontend auth level for the method.
+ api_key_required: bool, whether a key is required to call the method.
+ request_body_class: The type for the request body when using a
+ ResourceContainer. Otherwise, null.
+ request_params_class: The type for the request parameters when using a
+ ResourceContainer. Otherwise, null.
+ metric_costs: dict with keys matching an API limit metric and values
+ representing the cost for each successful call against that metric.
+ use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO
+ """
+ self.__name = name
+ self.__path = path
+ self.__http_method = http_method
+ self.__scopes = endpoints_types.OAuth2Scope.convert_list(scopes)
+ self.__audiences = audiences
+ self.__allowed_client_ids = allowed_client_ids
+ self.__auth_level = auth_level
+ self.__api_key_required = api_key_required
+ self.__request_body_class = request_body_class
+ self.__request_params_class = request_params_class
+ self.__metric_costs = metric_costs
+ self.__use_request_uri = use_request_uri
+
+ def __safe_name(self, method_name):
+ """Restrict method name to a-zA-Z0-9_, first char lowercase."""
+ # Endpoints backend restricts what chars are allowed in a method name.
+ safe_name = re.sub(r'[^\.a-zA-Z0-9_]', '', method_name)
+
+ # Strip any number of leading underscores.
+ safe_name = safe_name.lstrip('_')
+
+ # Ensure the first character is lowercase.
+ # Slice from 0:1 rather than indexing [0] in case safe_name is length 0.
+ return safe_name[0:1].lower() + safe_name[1:]
+
+ @property
+ def name(self):
+ """Method name as specified in decorator or derived."""
+ return self.__name
+
+ def get_path(self, api_info):
+ """Get the path portion of the URL to the method (for RESTful methods).
+
+ Request path can be specified in the method, and it could have a base
+ path prepended to it.
+
+ Args:
+ api_info: API information for this API, possibly including a base path.
+ This is the api_info property on the class that's been annotated for
+ this API.
+
+ Returns:
+ This method's request path (not including the http://.../{base_path}
+ prefix).
+
+ Raises:
+ ApiConfigurationError: If the path isn't properly formatted.
+ """
+ path = self.__path or ''
+ if path and path[0] == '/':
+ # Absolute path, ignoring any prefixes. Just strip off the leading /.
+ path = path[1:]
+ else:
+ # Relative path.
+ if api_info.path:
+ path = '%s%s%s' % (api_info.path, '/' if path else '', path)
+
+ # Verify that the path seems valid.
+ parts = path.split('/')
+ for n, part in enumerate(parts):
+ r = _VALID_PART_RE if n < len(parts) - 1 else _VALID_LAST_PART_RE
+ if part and '{' in part and '}' in part:
+ if not r.match(part):
+ raise api_exceptions.ApiConfigurationError(
+ 'Invalid path segment: %s (part of %s)' % (part, path))
+ return path
+
+ @property
+ def http_method(self):
+ """HTTP method supported by the method (e.g. GET, POST)."""
+ return self.__http_method
+
+ @property
+ def scope_objs(self):
+ """List of scopes (as OAuth2Scopes) accepted for the API method."""
+ return self.__scopes
+
+ @property
+ def scopes(self):
+ """List of scopes (as strings) accepted for the API method."""
+ if self.scope_objs is not None:
+ return [_s.scope for _s in self.scope_objs]
+
+ @property
+ def audiences(self):
+ """List of audiences for the API method."""
+ return self.__audiences
+
+ @property
+ def allowed_client_ids(self):
+ """List of allowed client IDs for the API method."""
+ return self.__allowed_client_ids
+
+ @property
+ def auth_level(self):
+ """Enum from AUTH_LEVEL specifying default frontend auth level."""
+ return self.__auth_level
+
+ @property
+ def api_key_required(self):
+ """bool whether a key is required to call the API method."""
+ return self.__api_key_required
+
+ @property
+ def metric_costs(self):
+ """Dict mapping API limit metric names to costs against that metric."""
+ return self.__metric_costs
+
+ @property
+ def request_body_class(self):
+ """Type of request body when using a ResourceContainer."""
+ return self.__request_body_class
+
+ @property
+ def request_params_class(self):
+ """Type of request parameter message when using a ResourceContainer."""
+ return self.__request_params_class
+
+ def is_api_key_required(self, api_info):
+ if self.api_key_required is not None:
+ return self.api_key_required
+ else:
+ return api_info.api_key_required
+
+ def use_request_uri(self, api_info):
+ if self.__use_request_uri is not None:
+ return self.__use_request_uri
+ else:
+ return api_info.use_request_uri
+
+ def method_id(self, api_info):
+ """Computed method name."""
+ # This is done here for now because at __init__ time, the method is known
+ # but not the api, and thus not the api name. Later, in
+ # ApiConfigGenerator.__method_descriptor, the api name is known.
+ if api_info.resource_name:
+ resource_part = '.%s' % self.__safe_name(api_info.resource_name)
+ else:
+ resource_part = ''
+ return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part,
+ self.__safe_name(self.name))
+
+
+@util.positional(2)
+def method(request_message=message_types.VoidMessage,
+ response_message=message_types.VoidMessage,
+ name=None,
+ path=None,
+ http_method='POST',
+ scopes=None,
+ audiences=None,
+ allowed_client_ids=None,
+ auth_level=None,
+ api_key_required=None,
+ metric_costs=None,
+ use_request_uri=None):
+ """Decorate a ProtoRPC Method for use by the framework above.
+
+ This decorator can be used to specify a method name, path, http method,
+ scopes, audiences, client ids and auth_level.
+
+ Sample usage:
+ @api_config.method(RequestMessage, ResponseMessage,
+ name='insert', http_method='PUT')
+ def greeting_insert(request):
+ ...
+ return response
+
+ Args:
+ request_message: Message type of expected request.
+ response_message: Message type of expected response.
+ name: string, Name of the method, prepended with <apiname>. to make it
+ unique. (Default: python method name)
+ path: string, Path portion of the URL to the method, for RESTful methods.
+ http_method: string, HTTP method supported by the method. (Default: POST)
+ scopes: list of string, OAuth2 token must contain one of these scopes.
+ audiences: list of string, IdToken must contain one of these audiences.
+ allowed_client_ids: list of string, Client IDs allowed to call the method.
+ If None and auth_level is REQUIRED, no calls will be allowed.
+ auth_level: enum from AUTH_LEVEL, Frontend auth level for the method.
+ api_key_required: bool, whether a key is required to call the method
+ metric_costs: dict with keys matching an API limit metric and values
+ representing the cost for each successful call against that metric.
+ use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO
+
+ Returns:
+ 'apiserving_method_wrapper' function.
+
+ Raises:
+ TypeError: if the request_type or response_type parameters are not
+ proper subclasses of messages.Message.
+ """
+ if auth_level is not None:
+ _logger.warn(_AUTH_LEVEL_WARNING)
+
+ # Default HTTP method if one is not specified.
+ DEFAULT_HTTP_METHOD = 'POST'
+
+ def apiserving_method_decorator(api_method):
+ """Decorator for ProtoRPC method that configures Google's API server.
+
+ Args:
+ api_method: Original method being wrapped.
+
+ Returns:
+ Function responsible for actual invocation.
+ Assigns the following attributes to invocation function:
+ remote: Instance of RemoteInfo, contains remote method information.
+ remote.request_type: Expected request type for remote method.
+ remote.response_type: Response type returned from remote method.
+ method_info: Instance of _MethodInfo, api method configuration.
+ It is also assigned attributes corresponding to the aforementioned kwargs.
+
+ Raises:
+ TypeError: if the request_type or response_type parameters are not
+ proper subclasses of messages.Message.
+ KeyError: if the request_message is a ResourceContainer and the newly
+ created remote method has been reference by the container before. This
+ should never occur because a remote method is created once.
+ """
+ request_body_class = None
+ request_params_class = None
+ if isinstance(request_message, resource_container.ResourceContainer):
+ remote_decorator = remote.method(request_message.combined_message_class,
+ response_message)
+ request_body_class = request_message.body_message_class()
+ request_params_class = request_message.parameters_message_class()
+ else:
+ remote_decorator = remote.method(request_message, response_message)
+ remote_method = remote_decorator(api_method)
+
+ def invoke_remote(service_instance, request):
+ # If the server didn't specify any auth information, build it now.
+ # pylint: disable=protected-access
+ users_id_token._maybe_set_current_user_vars(
+ invoke_remote, api_info=getattr(service_instance, 'api_info', None),
+ request=request)
+ # pylint: enable=protected-access
+ return remote_method(service_instance, request)
+
+ invoke_remote.remote = remote_method.remote
+ if isinstance(request_message, resource_container.ResourceContainer):
+ resource_container.ResourceContainer.add_to_cache(
+ invoke_remote.remote, request_message)
+
+ invoke_remote.method_info = _MethodInfo(
+ name=name or api_method.__name__, path=path or api_method.__name__,
+ http_method=http_method or DEFAULT_HTTP_METHOD,
+ scopes=scopes, audiences=audiences,
+ allowed_client_ids=allowed_client_ids, auth_level=auth_level,
+ api_key_required=api_key_required, metric_costs=metric_costs,
+ use_request_uri=use_request_uri,
+ request_body_class=request_body_class,
+ request_params_class=request_params_class)
+ invoke_remote.__name__ = invoke_remote.method_info.name
+ return invoke_remote
+
+ endpoints_util.check_list_type(scopes, (six.string_types, endpoints_types.OAuth2Scope), 'scopes')
+ endpoints_util.check_list_type(allowed_client_ids, six.string_types,
+ 'allowed_client_ids')
+ _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
+
+ _CheckAudiences(audiences)
+
+ _CheckType(metric_costs, dict, 'metric_costs')
+
+ return apiserving_method_decorator
+
+
+class ApiConfigGenerator(object):
+ """Generates an API configuration from a ProtoRPC service.
+
+ Example:
+
+ class HelloRequest(messages.Message):
+ my_name = messages.StringField(1, required=True)
+
+ class HelloResponse(messages.Message):
+ hello = messages.StringField(1, required=True)
+
+ class HelloService(remote.Service):
+
+ @remote.method(HelloRequest, HelloResponse)
+ def hello(self, request):
+ return HelloResponse(hello='Hello there, %s!' %
+ request.my_name)
+
+ api_config = ApiConfigGenerator().pretty_print_config_to_json(HelloService)
+
+ The resulting api_config will be a JSON document describing the API
+ implemented by HelloService.
+ """
+
+ # Constants for categorizing a request method.
+ # __NO_BODY - Request without a request body, such as GET and DELETE methods.
+ # __HAS_BODY - Request (such as POST/PUT/PATCH) with info in the request body.
+ __NO_BODY = 1
+ __HAS_BODY = 2
+
+ def __init__(self):
+ self.__parser = message_parser.MessageTypeToJsonSchema()
+
+ # Maps method id to the request schema id.
+ self.__request_schema = {}
+
+ # Maps method id to the response schema id.
+ self.__response_schema = {}
+
+ # Maps from ProtoRPC name to method id.
+ self.__id_from_name = {}
+
+ def __get_request_kind(self, method_info):
+ """Categorize the type of the request.
+
+ Args:
+ method_info: _MethodInfo, method information.
+
+ Returns:
+ The kind of request.
+ """
+ if method_info.http_method in ('GET', 'DELETE'):
+ return self.__NO_BODY
+ else:
+ return self.__HAS_BODY
+
+ def __field_to_subfields(self, field):
+ """Fully describes data represented by field, including the nested case.
+
+ In the case that the field is not a message field, we have no fields nested
+ within a message definition, so we can simply return that field. However, in
+ the nested case, we can't simply describe the data with one field or even
+ with one chain of fields.
+
+ For example, if we have a message field
+
+ m_field = messages.MessageField(RefClass, 1)
+
+ which references a class with two fields:
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.IntegerField(2)
+
+ then we would need to include both one and two to represent all the
+ data contained.
+
+ Calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">],
+ ]
+
+ If the second field was instead a message field
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.MessageField(OtherRefClass, 2)
+
+ referencing another class with two fields
+
+ class OtherRefClass(messages.Message):
+ three = messages.BooleanField(1)
+ four = messages.FloatField(2)
+
+ then we would need to recurse one level deeper for two.
+
+ With this change, calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "three">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "four">],
+ ]
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A list of lists, where each sublist is a list of fields.
+ """
+ # Termination condition
+ if not isinstance(field, messages.MessageField):
+ return [[field]]
+
+ result = []
+ for subfield in sorted(field.message_type.all_fields(),
+ key=lambda f: f.number):
+ subfield_results = self.__field_to_subfields(subfield)
+ for subfields_list in subfield_results:
+ subfields_list.insert(0, field)
+ result.append(subfields_list)
+ return result
+
+ # TODO(dhermes): Support all the parameter types
+ # Currently missing DATE and ETAG
+ def __field_to_parameter_type(self, field):
+ """Converts the field variant type into a string describing the parameter.
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A string corresponding to the variant enum of the field, with a few
+ exceptions. In the case of signed ints, the 's' is dropped; for the BOOL
+ variant, 'boolean' is used; and for the ENUM variant, 'string' is used.
+
+ Raises:
+ TypeError: if the field variant is a message variant.
+ """
+ # We use lowercase values for types (e.g. 'string' instead of 'STRING').
+ variant = field.variant
+ if variant == messages.Variant.MESSAGE:
+ raise TypeError('A message variant can\'t be used in a parameter.')
+
+ custom_variant_map = {
+ messages.Variant.SINT32: 'int32',
+ messages.Variant.SINT64: 'int64',
+ messages.Variant.BOOL: 'boolean',
+ messages.Variant.ENUM: 'string',
+ }
+ return custom_variant_map.get(variant) or variant.name.lower()
+
+ def __get_path_parameters(self, path):
+ """Parses path paremeters from a URI path and organizes them by parameter.
+
+ Some of the parameters may correspond to message fields, and so will be
+ represented as segments corresponding to each subfield; e.g. first.second if
+ the field "second" in the message field "first" is pulled from the path.
+
+ The resulting dictionary uses the first segments as keys and each key has as
+ value the list of full parameter values with first segment equal to the key.
+
+ If the match path parameter is null, that part of the path template is
+ ignored; this occurs if '{}' is used in a template.
+
+ Args:
+ path: String; a URI path, potentially with some parameters.
+
+ Returns:
+ A dictionary with strings as keys and list of strings as values.
+ """
+ path_parameters_by_segment = {}
+ for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
+ first_segment = format_var_name.split('.', 1)[0]
+ matches = path_parameters_by_segment.setdefault(first_segment, [])
+ matches.append(format_var_name)
+
+ return path_parameters_by_segment
+
+ def __validate_simple_subfield(self, parameter, field, segment_list,
+ _segment_index=0):
+ """Verifies that a proposed subfield actually exists and is a simple field.
+
+ Here, simple means it is not a MessageField (nested).
+
+ Args:
+ parameter: String; the '.' delimited name of the current field being
+ considered. This is relative to some root.
+ field: An instance of a subclass of messages.Field. Corresponds to the
+ previous segment in the path (previous relative to _segment_index),
+ since this field should be a message field with the current segment
+ as a field in the message class.
+ segment_list: The full list of segments from the '.' delimited subfield
+ being validated.
+ _segment_index: Integer; used to hold the position of current segment so
+ that segment_list can be passed as a reference instead of having to
+ copy using segment_list[1:] at each step.
+
+ Raises:
+ TypeError: If the final subfield (indicated by _segment_index relative
+ to the length of segment_list) is a MessageField.
+ TypeError: If at any stage the lookup at a segment fails, e.g if a.b
+ exists but a.b.c does not exist. This can happen either if a.b is not
+ a message field or if a.b.c is not a property on the message class from
+ a.b.
+ """
+ if _segment_index >= len(segment_list):
+ # In this case, the field is the final one, so should be simple type
+ if isinstance(field, messages.MessageField):
+ field_class = field.__class__.__name__
+ raise TypeError('Can\'t use messages in path. Subfield %r was '
+ 'included but is a %s.' % (parameter, field_class))
+ return
+
+ segment = segment_list[_segment_index]
+ parameter += '.' + segment
+ try:
+ field = field.type.field_by_name(segment)
+ except (AttributeError, KeyError):
+ raise TypeError('Subfield %r from path does not exist.' % (parameter,))
+
+ self.__validate_simple_subfield(parameter, field, segment_list,
+ _segment_index=_segment_index + 1)
+
+ def __validate_path_parameters(self, field, path_parameters):
+ """Verifies that all path parameters correspond to an existing subfield.
+
+ Args:
+ field: An instance of a subclass of messages.Field. Should be the root
+ level property name in each path parameter in path_parameters. For
+ example, if the field is called 'foo', then each path parameter should
+ begin with 'foo.'.
+ path_parameters: A list of Strings representing URI parameter variables.
+
+ Raises:
+ TypeError: If one of the path parameters does not start with field.name.
+ """
+ for param in path_parameters:
+ segment_list = param.split('.')
+ if segment_list[0] != field.name:
+ raise TypeError('Subfield %r can\'t come from field %r.'
+ % (param, field.name))
+ self.__validate_simple_subfield(field.name, field, segment_list[1:])
+
+ def __parameter_default(self, final_subfield):
+ """Returns default value of final subfield if it has one.
+
+ If this subfield comes from a field list returned from __field_to_subfields,
+ none of the fields in the subfield list can have a default except the final
+ one since they all must be message fields.
+
+ Args:
+ final_subfield: A simple field from the end of a subfield list.
+
+ Returns:
+ The default value of the subfield, if any exists, with the exception of an
+ enum field, which will have its value cast to a string.
+ """
+ if final_subfield.default:
+ if isinstance(final_subfield, messages.EnumField):
+ return final_subfield.default.name
+ else:
+ return final_subfield.default
+
+ def __parameter_enum(self, final_subfield):
+ """Returns enum descriptor of final subfield if it is an enum.
+
+ An enum descriptor is a dictionary with keys as the names from the enum and
+ each value is a dictionary with a single key "backendValue" and value equal
+ to the same enum name used to stored it in the descriptor.
+
+ The key "description" can also be used next to "backendValue", but protorpc
+ Enum classes have no way of supporting a description for each value.
+
+ Args:
+ final_subfield: A simple field from the end of a subfield list.
+
+ Returns:
+ The enum descriptor for the field, if it's an enum descriptor, else
+ returns None.
+ """
+ if isinstance(final_subfield, messages.EnumField):
+ enum_descriptor = {}
+ for enum_value in final_subfield.type.to_dict().keys():
+ enum_descriptor[enum_value] = {'backendValue': enum_value}
+ return enum_descriptor
+
+ def __parameter_descriptor(self, subfield_list):
+ """Creates descriptor for a parameter using the subfields that define it.
+
+ Each parameter is defined by a list of fields, with all but the last being
+ a message field and the final being a simple (non-message) field.
+
+ Many of the fields in the descriptor are determined solely by the simple
+ field at the end, though some (such as repeated and required) take the whole
+ chain of fields into consideration.
+
+ Args:
+ subfield_list: List of fields describing the parameter.
+
+ Returns:
+ Dictionary containing a descriptor for the parameter described by the list
+ of fields.
+ """
+ descriptor = {}
+ final_subfield = subfield_list[-1]
+
+ # Required
+ if all(subfield.required for subfield in subfield_list):
+ descriptor['required'] = True
+
+ # Type
+ descriptor['type'] = self.__field_to_parameter_type(final_subfield)
+
+ # Default
+ default = self.__parameter_default(final_subfield)
+ if default is not None:
+ descriptor['default'] = default
+
+ # Repeated
+ if any(subfield.repeated for subfield in subfield_list):
+ descriptor['repeated'] = True
+
+ # Enum
+ enum_descriptor = self.__parameter_enum(final_subfield)
+ if enum_descriptor is not None:
+ descriptor['enum'] = enum_descriptor
+
+ return descriptor
+
+ def __add_parameters_from_field(self, field, path_parameters,
+ params, param_order):
+ """Adds all parameters in a field to a method parameters descriptor.
+
+ Simple fields will only have one parameter, but a message field 'x' that
+ corresponds to a message class with fields 'y' and 'z' will result in
+ parameters 'x.y' and 'x.z', for example. The mapping from field to
+ parameters is mostly handled by __field_to_subfields.
+
+ Args:
+ field: Field from which parameters will be added to the method descriptor.
+ path_parameters: A list of parameters matched from a path for this field.
+ For example for the hypothetical 'x' from above if the path was
+ '/a/{x.z}/b/{other}' then this list would contain only the element
+ 'x.z' since 'other' does not match to this field.
+ params: Dictionary with parameter names as keys and parameter descriptors
+ as values. This will be updated for each parameter in the field.
+ param_order: List of required parameter names to give them an order in the
+ descriptor. All required parameters in the field will be added to this
+ list.
+ """
+ for subfield_list in self.__field_to_subfields(field):
+ descriptor = self.__parameter_descriptor(subfield_list)
+
+ qualified_name = '.'.join(subfield.name for subfield in subfield_list)
+ in_path = qualified_name in path_parameters
+ if descriptor.get('required', in_path):
+ descriptor['required'] = True
+ param_order.append(qualified_name)
+
+ params[qualified_name] = descriptor
+
+ def __params_descriptor_without_container(self, message_type,
+ request_kind, path):
+ """Describe parameters of a method which does not use a ResourceContainer.
+
+ Makes sure that the path parameters are included in the message definition
+ and adds any required fields and URL query parameters.
+
+ This method is to preserve backwards compatibility and will be removed in
+ a future release.
+
+ Args:
+ message_type: messages.Message class, Message with parameters to describe.
+ request_kind: The type of request being made.
+ path: string, HTTP path to method.
+
+ Returns:
+ A tuple (dict, list of string): Descriptor of the parameters, Order of the
+ parameters.
+ """
+ params = {}
+ param_order = []
+
+ path_parameter_dict = self.__get_path_parameters(path)
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__validate_path_parameters(field, matched_path_parameters)
+ if matched_path_parameters or request_kind == self.__NO_BODY:
+ self.__add_parameters_from_field(field, matched_path_parameters,
+ params, param_order)
+
+ return params, param_order
+
+ # TODO(user): request_kind is only used by
+ # __params_descriptor_without_container so can be removed
+ # once that method is fully deprecated.
+ def __params_descriptor(self, message_type, request_kind, path, method_id):
+ """Describe the parameters of a method.
+
+ If the message_type is not a ResourceContainer, will fall back to
+ __params_descriptor_without_container (which will eventually be deprecated).
+
+ If the message type is a ResourceContainer, then all path/query parameters
+ will come from the ResourceContainer This method will also make sure all
+ path parameters are covered by the message fields.
+
+ Args:
+ message_type: messages.Message or ResourceContainer class, Message with
+ parameters to describe.
+ request_kind: The type of request being made.
+ path: string, HTTP path to method.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+
+ Returns:
+ A tuple (dict, list of string): Descriptor of the parameters, Order of the
+ parameters.
+ """
+ path_parameter_dict = self.__get_path_parameters(path)
+
+ if not isinstance(message_type, resource_container.ResourceContainer):
+ if path_parameter_dict:
+ _logger.warning('Method %s specifies path parameters but you are not '
+ 'using a ResourceContainer; instead, you are using %r. '
+ 'This will fail in future releases; please switch to '
+ 'using ResourceContainer as soon as possible.',
+ method_id, type(message_type))
+ return self.__params_descriptor_without_container(
+ message_type, request_kind, path)
+
+ # From here, we can assume message_type is a ResourceContainer
+ message_type = message_type.parameters_message_class()
+
+ params = {}
+ param_order = []
+
+ # Make sure all path parameters are covered.
+ for field_name, matched_path_parameters in path_parameter_dict.items():
+ field = message_type.field_by_name(field_name)
+ self.__validate_path_parameters(field, matched_path_parameters)
+
+ # Add all fields, sort by field.number since we have parameterOrder.
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__add_parameters_from_field(field, matched_path_parameters,
+ params, param_order)
+
+ return params, param_order
+
+ def __request_message_descriptor(self, request_kind, message_type, method_id,
+ path):
+ """Describes the parameters and body of the request.
+
+ Args:
+ request_kind: The type of request being made.
+ message_type: messages.Message or ResourceContainer class. The message to
+ describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+ path: string, HTTP path to method.
+
+ Returns:
+ Dictionary describing the request.
+
+ Raises:
+ ValueError: if the method path and request required fields do not match
+ """
+ descriptor = {}
+
+ params, param_order = self.__params_descriptor(message_type, request_kind,
+ path, method_id)
+
+ if isinstance(message_type, resource_container.ResourceContainer):
+ message_type = message_type.body_message_class()
+
+ if (request_kind == self.__NO_BODY or
+ message_type == message_types.VoidMessage()):
+ descriptor['body'] = 'empty'
+ else:
+ descriptor['body'] = 'autoTemplate(backendRequest)'
+ descriptor['bodyName'] = 'resource'
+ self.__request_schema[method_id] = self.__parser.add_message(
+ message_type.__class__)
+
+ if params:
+ descriptor['parameters'] = params
+
+ if param_order:
+ descriptor['parameterOrder'] = param_order
+
+ return descriptor
+
+ def __response_message_descriptor(self, message_type, method_id):
+ """Describes the response.
+
+ Args:
+ message_type: messages.Message class, The message to describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+
+ Returns:
+ Dictionary describing the response.
+ """
+ descriptor = {}
+
+ self.__parser.add_message(message_type.__class__)
+ if message_type == message_types.VoidMessage():
+ descriptor['body'] = 'empty'
+ else:
+ descriptor['body'] = 'autoTemplate(backendResponse)'
+ descriptor['bodyName'] = 'resource'
+ self.__response_schema[method_id] = self.__parser.ref_for_message_type(
+ message_type.__class__)
+
+ return descriptor
+
+ def __method_descriptor(self, service, method_info,
+ rosy_method, protorpc_method_info):
+ """Describes a method.
+
+ Args:
+ service: endpoints.Service, Implementation of the API as a service.
+ method_info: _MethodInfo, Configuration for the method.
+ rosy_method: string, ProtoRPC method name prefixed with the
+ name of the service.
+ protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC
+ description of the method.
+
+ Returns:
+ Dictionary describing the method.
+ """
+ descriptor = {}
+
+ request_message_type = (resource_container.ResourceContainer.
+ get_request_message(protorpc_method_info.remote))
+ request_kind = self.__get_request_kind(method_info)
+ remote_method = protorpc_method_info.remote
+
+ descriptor['path'] = method_info.get_path(service.api_info)
+ descriptor['httpMethod'] = method_info.http_method
+ descriptor['rosyMethod'] = rosy_method
+ descriptor['request'] = self.__request_message_descriptor(
+ request_kind, request_message_type,
+ method_info.method_id(service.api_info),
+ descriptor['path'])
+ descriptor['response'] = self.__response_message_descriptor(
+ remote_method.response_type(), method_info.method_id(service.api_info))
+
+ # Audiences, scopes, allowed_client_ids and auth_level could be set at
+ # either the method level or the API level. Allow an empty list at the
+ # method level to override the setting at the API level.
+ scopes = (method_info.scopes
+ if method_info.scopes is not None
+ else service.api_info.scopes)
+ if scopes:
+ descriptor['scopes'] = scopes
+ audiences = (method_info.audiences
+ if method_info.audiences is not None
+ else service.api_info.audiences)
+ if audiences:
+ descriptor['audiences'] = audiences
+ allowed_client_ids = (method_info.allowed_client_ids
+ if method_info.allowed_client_ids is not None
+ else service.api_info.allowed_client_ids)
+ if allowed_client_ids:
+ descriptor['clientIds'] = allowed_client_ids
+
+ if remote_method.method.__doc__:
+ descriptor['description'] = remote_method.method.__doc__
+
+ auth_level = (method_info.auth_level
+ if method_info.auth_level is not None
+ else service.api_info.auth_level)
+ if auth_level is not None:
+ descriptor['authLevel'] = AUTH_LEVEL.reverse_mapping[auth_level]
+
+ descriptor['useRequestUri'] = method_info.use_request_uri(service.api_info)
+
+ return descriptor
+
+ def __schema_descriptor(self, services):
+ """Descriptor for the all the JSON Schema used.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+
+ Returns:
+ Dictionary containing all the JSON Schema used in the service.
+ """
+ methods_desc = {}
+
+ for service in services:
+ protorpc_methods = service.all_remote_methods()
+ for protorpc_method_name in protorpc_methods.keys():
+ rosy_method = '%s.%s' % (service.__name__, protorpc_method_name)
+ method_id = self.__id_from_name[rosy_method]
+
+ request_response = {}
+
+ request_schema_id = self.__request_schema.get(method_id)
+ if request_schema_id:
+ request_response['request'] = {
+ '$ref': request_schema_id
+ }
+
+ response_schema_id = self.__response_schema.get(method_id)
+ if response_schema_id:
+ request_response['response'] = {
+ '$ref': response_schema_id
+ }
+
+ methods_desc[rosy_method] = request_response
+
+ descriptor = {
+ 'methods': methods_desc,
+ 'schemas': self.__parser.schemas(),
+ }
+
+ return descriptor
+
+ def __get_merged_api_info(self, services):
+ """Builds a description of an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+
+ Returns:
+ The _ApiInfo object to use for the API that the given services implement.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()).
+ """
+ merged_api_info = services[0].api_info
+
+ # Verify that, if there are multiple classes here, they're allowed to
+ # implement the same API.
+ for service in services[1:]:
+ if not merged_api_info.is_same_api(service.api_info):
+ raise api_exceptions.ApiConfigurationError(
+ _MULTICLASS_MISMATCH_ERROR_TEMPLATE % (service.api_info.name,
+ service.api_info.api_version))
+
+ return merged_api_info
+
+ def __auth_descriptor(self, api_info):
+ """Builds an auth descriptor from API info.
+
+ Args:
+ api_info: An _ApiInfo object.
+
+ Returns:
+ A dictionary with 'allowCookieAuth' and/or 'blockedRegions' keys.
+ """
+ if api_info.auth is None:
+ return None
+
+ auth_descriptor = {}
+ if api_info.auth.allow_cookie_auth is not None:
+ auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth
+ if api_info.auth.blocked_regions:
+ auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions
+
+ return auth_descriptor
+
+ def __frontend_limit_descriptor(self, api_info):
+ """Builds a frontend limit descriptor from API info.
+
+ Args:
+ api_info: An _ApiInfo object.
+
+ Returns:
+ A dictionary with frontend limit information.
+ """
+ if api_info.frontend_limits is None:
+ return None
+
+ descriptor = {}
+ for propname, descname in (('unregistered_user_qps', 'unregisteredUserQps'),
+ ('unregistered_qps', 'unregisteredQps'),
+ ('unregistered_daily', 'unregisteredDaily')):
+ if getattr(api_info.frontend_limits, propname) is not None:
+ descriptor[descname] = getattr(api_info.frontend_limits, propname)
+
+ rules = self.__frontend_limit_rules_descriptor(api_info)
+ if rules:
+ descriptor['rules'] = rules
+
+ return descriptor
+
+ def __frontend_limit_rules_descriptor(self, api_info):
+ """Builds a frontend limit rules descriptor from API info.
+
+ Args:
+ api_info: An _ApiInfo object.
+
+ Returns:
+ A list of dictionaries with frontend limit rules information.
+ """
+ if not api_info.frontend_limits.rules:
+ return None
+
+ rules = []
+ for rule in api_info.frontend_limits.rules:
+ descriptor = {}
+ for propname, descname in (('match', 'match'),
+ ('qps', 'qps'),
+ ('user_qps', 'userQps'),
+ ('daily', 'daily'),
+ ('analytics_id', 'analyticsId')):
+ if getattr(rule, propname) is not None:
+ descriptor[descname] = getattr(rule, propname)
+ if descriptor:
+ rules.append(descriptor)
+
+ return rules
+
+ def __api_descriptor(self, services, hostname=None):
+ """Builds a description of an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary that can be deserialized into JSON and stored as an API
+ description document.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()), or a repeated method
+ signature.
+ """
+ merged_api_info = self.__get_merged_api_info(services)
+ descriptor = self.get_descriptor_defaults(merged_api_info,
+ hostname=hostname)
+ description = merged_api_info.description
+ if not description and len(services) == 1:
+ description = services[0].__doc__
+ if description:
+ descriptor['description'] = description
+
+ auth_descriptor = self.__auth_descriptor(merged_api_info)
+ if auth_descriptor:
+ descriptor['auth'] = auth_descriptor
+
+ frontend_limit_descriptor = self.__frontend_limit_descriptor(
+ merged_api_info)
+ if frontend_limit_descriptor:
+ descriptor['frontendLimits'] = frontend_limit_descriptor
+
+ method_map = {}
+ method_collision_tracker = {}
+ rest_collision_tracker = {}
+
+ for service in services:
+ remote_methods = service.all_remote_methods()
+ for protorpc_meth_name, protorpc_meth_info in remote_methods.items():
+ method_info = getattr(protorpc_meth_info, 'method_info', None)
+ # Skip methods that are not decorated with @method
+ if method_info is None:
+ continue
+ method_id = method_info.method_id(service.api_info)
+ rosy_method = '%s.%s' % (service.__name__, protorpc_meth_name)
+ self.__id_from_name[rosy_method] = method_id
+ method_map[method_id] = self.__method_descriptor(
+ service, method_info, rosy_method, protorpc_meth_info)
+
+ # Make sure the same method name isn't repeated.
+ if method_id in method_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ 'Method %s used multiple times, in classes %s and %s' %
+ (method_id, method_collision_tracker[method_id],
+ service.__name__))
+ else:
+ method_collision_tracker[method_id] = service.__name__
+
+ # Make sure the same HTTP method & path aren't repeated.
+ rest_identifier = (method_info.http_method,
+ method_info.get_path(service.api_info))
+ if rest_identifier in rest_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ '%s path "%s" used multiple times, in classes %s and %s' %
+ (method_info.http_method, method_info.get_path(service.api_info),
+ rest_collision_tracker[rest_identifier],
+ service.__name__))
+ else:
+ rest_collision_tracker[rest_identifier] = service.__name__
+
+ if method_map:
+ descriptor['methods'] = method_map
+ descriptor['descriptor'] = self.__schema_descriptor(services)
+
+ return descriptor
+
+ def get_descriptor_defaults(self, api_info, hostname=None):
+ """Gets a default configuration for a service.
+
+ Args:
+ api_info: _ApiInfo object for this service.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary with the default configuration.
+ """
+ hostname = (hostname or endpoints_util.get_app_hostname() or
+ api_info.hostname)
+ protocol = 'http' if ((hostname and hostname.startswith('localhost')) or
+ endpoints_util.is_running_on_devserver()) else 'https'
+ base_path = api_info.base_path.strip('/')
+ defaults = {
+ 'extends': 'thirdParty.api',
+ 'root': '{0}://{1}/{2}'.format(protocol, hostname, base_path),
+ 'name': api_info.name,
+ 'version': api_info.api_version,
+ 'api_version': api_info.api_version,
+ 'path_version': api_info.path_version,
+ 'defaultVersion': True,
+ 'abstract': False,
+ 'adapter': {
+ 'bns': '{0}://{1}/{2}'.format(protocol, hostname, base_path),
+ 'type': 'lily',
+ 'deadline': 10.0
+ }
+ }
+ if api_info.canonical_name:
+ defaults['canonicalName'] = api_info.canonical_name
+ if api_info.owner_domain:
+ defaults['ownerDomain'] = api_info.owner_domain
+ if api_info.owner_name:
+ defaults['ownerName'] = api_info.owner_name
+ if api_info.package_path:
+ defaults['packagePath'] = api_info.package_path
+ if api_info.title:
+ defaults['title'] = api_info.title
+ if api_info.documentation:
+ defaults['documentation'] = api_info.documentation
+ return defaults
+
+ def get_config_dict(self, services, hostname=None):
+ """JSON dict description of a protorpc.remote.Service in API format.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ dict, The API descriptor document as a JSON dict.
+ """
+ if not isinstance(services, (tuple, list)):
+ services = [services]
+ # The type of a class that inherits from remote.Service is actually
+ # remote._ServiceClass, thanks to metaclass strangeness.
+ # pylint: disable=protected-access
+ endpoints_util.check_list_type(services, remote._ServiceClass, 'services',
+ allow_none=False)
+
+ return self.__api_descriptor(services, hostname=hostname)
+
+ def pretty_print_config_to_json(self, services, hostname=None):
+ """JSON string description of a protorpc.remote.Service in API format.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ string, The API descriptor document as a JSON string.
+ """
+ descriptor = self.get_config_dict(services, hostname)
+ return json.dumps(descriptor, sort_keys=True, indent=2,
+ separators=(',', ': '))
diff --git a/third_party/endpoints/api_config_manager.py b/third_party/endpoints/api_config_manager.py
new file mode 100644
index 0000000..08f2c8e
--- /dev/null
+++ b/third_party/endpoints/api_config_manager.py
@@ -0,0 +1,350 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Configuration manager to store API configurations."""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+import base64
+import logging
+import re
+import threading
+from six.moves import urllib
+
+from . import discovery_service
+
+_logger = logging.getLogger(__name__)
+
+# Internal constants
+_PATH_VARIABLE_PATTERN = r'[a-zA-Z_][a-zA-Z_.\d]*'
+_PATH_VALUE_PATTERN = r'[^/?#\[\]{}]*'
+
+
+class ApiConfigManager(object):
+ """Manages loading api configs and method lookup."""
+
+ def __init__(self):
+ self._rest_methods = []
+ self._configs = {}
+ self._config_lock = threading.Lock()
+
+ @property
+ def configs(self):
+ """Return a dict with the current configuration mappings.
+
+ Returns:
+ A dict with the current configuration mappings.
+ """
+ with self._config_lock:
+ return self._configs.copy()
+
+ def process_api_config_response(self, config_json):
+ """Parses a JSON API config and registers methods for dispatch.
+
+ Side effects:
+ Parses method name, etc. for all methods and updates the indexing
+ data structures with the information.
+
+ Args:
+ config_json: A dict, the JSON body of the getApiConfigs response.
+ """
+ with self._config_lock:
+ self._add_discovery_config()
+ for config in config_json.get('items', []):
+ lookup_key = config.get('name', ''), config.get('version', '')
+ self._configs[lookup_key] = config
+
+ for config in self._configs.values():
+ name = config.get('name', '')
+ api_version = config.get('api_version', '')
+ path_version = config.get('path_version', '')
+ sorted_methods = self._get_sorted_methods(config.get('methods', {}))
+
+
+ for method_name, method in sorted_methods:
+ self._save_rest_method(method_name, name, path_version, method)
+
+ def _get_sorted_methods(self, methods):
+ """Get a copy of 'methods' sorted the way they would be on the live server.
+
+ Args:
+ methods: JSON configuration of an API's methods.
+
+ Returns:
+ The same configuration with the methods sorted based on what order
+ they'll be checked by the server.
+ """
+ if not methods:
+ return methods
+
+ # Comparison function we'll use to sort the methods:
+ def _sorted_methods_comparison(method_info1, method_info2):
+ """Sort method info by path and http_method.
+
+ Args:
+ method_info1: Method name and info for the first method to compare.
+ method_info2: Method name and info for the method to compare to.
+
+ Returns:
+ Negative if the first method should come first, positive if the
+ first method should come after the second. Zero if they're
+ equivalent.
+ """
+
+ def _score_path(path):
+ """Calculate the score for this path, used for comparisons.
+
+ Higher scores have priority, and if scores are equal, the path text
+ is sorted alphabetically. Scores are based on the number and location
+ of the constant parts of the path. The server has some special handling
+ for variables with regexes, which we don't handle here.
+
+ Args:
+ path: The request path that we're calculating a score for.
+
+ Returns:
+ The score for the given path.
+ """
+ score = 0
+ parts = path.split('/')
+ for part in parts:
+ score <<= 1
+ if not part or part[0] != '{':
+ # Found a constant.
+ score += 1
+ # Shift by 31 instead of 32 because some (!) versions of Python like
+ # to convert the int to a long if we shift by 32, and the sorted()
+ # function that uses this blows up if it receives anything but an int.
+ score <<= 31 - len(parts)
+ return score
+
+ # Higher path scores come first.
+ path_score1 = _score_path(method_info1[1].get('path', ''))
+ path_score2 = _score_path(method_info2[1].get('path', ''))
+ if path_score1 != path_score2:
+ return path_score2 - path_score1
+
+ # Compare by path text next, sorted alphabetically.
+ path_result = cmp(method_info1[1].get('path', ''),
+ method_info2[1].get('path', ''))
+ if path_result != 0:
+ return path_result
+
+ # All else being equal, sort by HTTP method.
+ method_result = cmp(method_info1[1].get('httpMethod', ''),
+ method_info2[1].get('httpMethod', ''))
+ return method_result
+
+ return sorted(methods.items(), _sorted_methods_comparison)
+
+ @staticmethod
+ def _get_path_params(match):
+ """Gets path parameters from a regular expression match.
+
+ Args:
+ match: A regular expression Match object for a path.
+
+ Returns:
+ A dictionary containing the variable names converted from base64.
+ """
+ result = {}
+ for var_name, value in match.groupdict().items():
+ actual_var_name = ApiConfigManager._from_safe_path_param_name(var_name)
+ result[actual_var_name] = urllib.parse.unquote_plus(value)
+ return result
+
+ def lookup_rest_method(self, path, request_uri, http_method):
+ """Look up the rest method at call time.
+
+ The method is looked up in self._rest_methods, the list it is saved
+ in for SaveRestMethod.
+
+ Args:
+ path: A string containing the path from the URL of the request.
+ http_method: A string containing HTTP method of the request.
+
+ Returns:
+ Tuple of (<method name>, <method>, <params>)
+ Where:
+ <method name> is the string name of the method that was matched.
+ <method> is the descriptor as specified in the API configuration. -and-
+ <params> is a dict of path parameters matched in the rest request.
+ """
+ method_key = http_method.lower()
+ with self._config_lock:
+ for compiled_path_pattern, unused_path, methods in self._rest_methods:
+ if method_key not in methods:
+ continue
+ candidate_method_info = methods[method_key]
+ match_against = request_uri if candidate_method_info[1].get('useRequestUri') else path
+ match = compiled_path_pattern.match(match_against)
+ if match:
+ params = self._get_path_params(match)
+ method_name, method = candidate_method_info
+ break
+ else:
+ _logger.warn('No endpoint found for path: %r, method: %r', path, http_method)
+ method_name = None
+ method = None
+ params = None
+ return method_name, method, params
+
+ def _add_discovery_config(self):
+ """Add the Discovery configuration to our list of configs.
+
+ This should only be called with self._config_lock. The code here assumes
+ the lock is held.
+ """
+ lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'],
+ discovery_service.DiscoveryService.API_CONFIG['version'])
+ self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
+
+ def save_config(self, lookup_key, config):
+ """Save a configuration to the cache of configs.
+
+ Args:
+ lookup_key: A string containing the cache lookup key.
+ config: The dict containing the configuration to save to the cache.
+ """
+ with self._config_lock:
+ self._configs[lookup_key] = config
+
+ @staticmethod
+ def _to_safe_path_param_name(matched_parameter):
+ """Creates a safe string to be used as a regex group name.
+
+ Only alphanumeric characters and underscore are allowed in variable name
+ tokens, and numeric are not allowed as the first character.
+
+ We cast the matched_parameter to base32 (since the alphabet is safe),
+ strip the padding (= not safe) and prepend with _, since we know a token
+ can begin with underscore.
+
+ Args:
+ matched_parameter: A string containing the parameter matched from the URL
+ template.
+
+ Returns:
+ A string that's safe to be used as a regex group name.
+ """
+ return '_' + base64.b32encode(matched_parameter).rstrip('=')
+
+ @staticmethod
+ def _from_safe_path_param_name(safe_parameter):
+ """Takes a safe regex group name and converts it back to the original value.
+
+ Only alphanumeric characters and underscore are allowed in variable name
+ tokens, and numeric are not allowed as the first character.
+
+ The safe_parameter is a base32 representation of the actual value.
+
+ Args:
+ safe_parameter: A string that was generated by _to_safe_path_param_name.
+
+ Returns:
+ A string, the parameter matched from the URL template.
+ """
+ assert safe_parameter.startswith('_')
+ safe_parameter_as_base32 = safe_parameter[1:]
+
+ padding_length = - len(safe_parameter_as_base32) % 8
+ padding = '=' * padding_length
+ return base64.b32decode(safe_parameter_as_base32 + padding)
+
+ @staticmethod
+ def _compile_path_pattern(pattern):
+ r"""Generates a compiled regex pattern for a path pattern.
+
+ e.g. '/MyApi/v1/notes/{id}'
+ returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\[\]{}]*)')
+
+ Args:
+ pattern: A string, the parameterized path pattern to be checked.
+
+ Returns:
+ A compiled regex object to match this path pattern.
+ """
+
+ def replace_variable(match):
+ """Replaces a {variable} with a regex to match it by name.
+
+ Changes the string corresponding to the variable name to the base32
+ representation of the string, prepended by an underscore. This is
+ necessary because we can have message variable names in URL patterns
+ (e.g. via {x.y}) but the character '.' can't be in a regex group name.
+
+ Args:
+ match: A regex match object, the matching regex group as sent by
+ re.sub().
+
+ Returns:
+ A string regex to match the variable by name, if the full pattern was
+ matched.
+ """
+ if match.lastindex > 1:
+ var_name = ApiConfigManager._to_safe_path_param_name(match.group(2))
+ return '%s(?P<%s>%s)' % (match.group(1), var_name,
+ _PATH_VALUE_PATTERN)
+ return match.group(0)
+
+ pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN,
+ replace_variable, pattern)
+ return re.compile(pattern + '/?$')
+
+ def _save_rest_method(self, method_name, api_name, version, method):
+ """Store Rest api methods in a list for lookup at call time.
+
+ The list is self._rest_methods, a list of tuples:
+ [(<compiled_path>, <path_pattern>, <method_dict>), ...]
+ where:
+ <compiled_path> is a compiled regex to match against the incoming URL
+ <path_pattern> is a string representing the original path pattern,
+ checked on insertion to prevent duplicates. -and-
+ <method_dict> is a dict of httpMethod => (method_name, method)
+
+ This structure is a bit complex, it supports use in two contexts:
+ Creation time:
+ - SaveRestMethod is called repeatedly, each method will have a path,
+ which we want to be compiled for fast lookup at call time
+ - We want to prevent duplicate incoming path patterns, so store the
+ un-compiled path, not counting on a compiled regex being a stable
+ comparison as it is not documented as being stable for this use.
+ - Need to store the method that will be mapped at calltime.
+ - Different methods may have the same path but different http method.
+ Call time:
+ - Quickly scan through the list attempting .match(path) on each
+ compiled regex to find the path that matches.
+ - When a path is matched, look up the API method from the request
+ and get the method name and method config for the matching
+ API method and method name.
+
+ Args:
+ method_name: A string containing the name of the API method.
+ api_name: A string containing the name of the API.
+ version: A string containing the version of the API.
+ method: A dict containing the method descriptor (as in the api config
+ file).
+ """
+ path_pattern = '/'.join((api_name, version, method.get('path', '')))
+ http_method = method.get('httpMethod', '').lower()
+ for _, path, methods in self._rest_methods:
+ if path == path_pattern:
+ methods[http_method] = method_name, method
+ break
+ else:
+ self._rest_methods.append(
+ (self._compile_path_pattern(path_pattern),
+ path_pattern,
+ {http_method: (method_name, method)}))
diff --git a/third_party/endpoints/api_exceptions.py b/third_party/endpoints/api_exceptions.py
new file mode 100644
index 0000000..66fbe86
--- /dev/null
+++ b/third_party/endpoints/api_exceptions.py
@@ -0,0 +1,94 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library containing exception types used by Endpoints."""
+
+from __future__ import absolute_import
+
+from six.moves import http_client
+
+from . import remote
+
+
+class ServiceException(remote.ApplicationError):
+ """Base class for request/service exceptions in Endpoints."""
+
+ def __init__(self, message=None):
+ super(ServiceException, self).__init__(message,
+ http_client.responses[self.http_status])
+
+
+class BadRequestException(ServiceException):
+ """Bad request exception that is mapped to a 400 response."""
+ http_status = http_client.BAD_REQUEST
+
+
+class UnauthorizedException(ServiceException):
+ """Unauthorized exception that is mapped to a 401 response."""
+ http_status = http_client.UNAUTHORIZED
+
+
+class ForbiddenException(ServiceException):
+ """Forbidden exception that is mapped to a 403 response."""
+ http_status = http_client.FORBIDDEN
+
+
+class NotFoundException(ServiceException):
+ """Not found exception that is mapped to a 404 response."""
+ http_status = http_client.NOT_FOUND
+
+
+class ConflictException(ServiceException):
+ """Conflict exception that is mapped to a 409 response."""
+ http_status = http_client.CONFLICT
+
+
+class GoneException(ServiceException):
+ """Resource Gone exception that is mapped to a 410 response."""
+ http_status = http_client.GONE
+
+
+class PreconditionFailedException(ServiceException):
+ """Precondition Failed exception that is mapped to a 412 response."""
+ http_status = http_client.PRECONDITION_FAILED
+
+
+class RequestEntityTooLargeException(ServiceException):
+ """Request entity too large exception that is mapped to a 413 response."""
+ http_status = http_client.REQUEST_ENTITY_TOO_LARGE
+
+
+class InternalServerErrorException(ServiceException):
+ """Internal server exception that is mapped to a 500 response."""
+ http_status = http_client.INTERNAL_SERVER_ERROR
+
+
+class ApiConfigurationError(Exception):
+ """Exception thrown if there's an error in the configuration/annotations."""
+
+
+class InvalidNamespaceException(Exception):
+ """Exception thrown if there's an invalid namespace declaration."""
+
+
+class InvalidLimitDefinitionException(Exception):
+ """Exception thrown if there's an invalid rate limit definition."""
+
+
+class InvalidApiNameException(Exception):
+ """Exception thrown if the api name does not match the required character set."""
+
+
+class ToolError(Exception):
+ """Exception thrown if there's a general error in the endpointscfg.py tool."""
diff --git a/third_party/endpoints/api_request.py b/third_party/endpoints/api_request.py
new file mode 100644
index 0000000..8b95047
--- /dev/null
+++ b/third_party/endpoints/api_request.py
@@ -0,0 +1,193 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Cloud Endpoints API request-related data and functions."""
+
+from __future__ import absolute_import
+
+# pylint: disable=g-bad-name
+import copy
+import json
+import logging
+from six.moves import urllib
+import zlib
+
+from . import util
+
+_logger = logging.getLogger(__name__)
+
+_METHOD_OVERRIDE = 'X-HTTP-METHOD-OVERRIDE'
+
+
+class ApiRequest(object):
+ """Simple data object representing an API request.
+
+ Parses the request from environment variables into convenient pieces
+ and stores them as members.
+ """
+ def __init__(self, environ, base_paths=None):
+ """Constructor.
+
+ Args:
+ environ: An environ dict for the request as defined in PEP-333.
+
+ Raises:
+ ValueError: If the path for the request is invalid.
+ """
+ self.headers = util.get_headers_from_environ(environ)
+ self.http_method = environ['REQUEST_METHOD']
+ self.url_scheme = environ['wsgi.url_scheme']
+ self.server = environ['SERVER_NAME']
+ self.port = environ['SERVER_PORT']
+ self.path = environ['PATH_INFO']
+ self.request_uri = environ.get('REQUEST_URI')
+ if self.request_uri is not None and len(self.request_uri) < len(self.path):
+ self.request_uri = None
+ self.query = environ.get('QUERY_STRING')
+ self.body = environ['wsgi.input'].read()
+ if self.body and self.headers.get('CONTENT-ENCODING') == 'gzip':
+ # Increasing wbits to 16 + MAX_WBITS is necessary to be able to decode
+ # gzipped content (as opposed to zlib-encoded content).
+ # If there's an error in the decompression, it could be due to another
+ # part of the serving chain that already decompressed it without clearing
+ # the header. If so, just ignore it and continue.
+ try:
+ self.body = zlib.decompress(self.body, 16 + zlib.MAX_WBITS)
+ except zlib.error:
+ pass
+ if _METHOD_OVERRIDE in self.headers:
+ # the query arguments in the body will be handled by ._process_req_body()
+ self.http_method = self.headers[_METHOD_OVERRIDE]
+ del self.headers[_METHOD_OVERRIDE] # wsgiref.headers.Headers doesn't implement .pop()
+ self.source_ip = environ.get('REMOTE_ADDR')
+ self.relative_url = self._reconstruct_relative_url(environ)
+
+ if not base_paths:
+ base_paths = set()
+ elif isinstance(base_paths, list):
+ base_paths = set(base_paths)
+
+ # Find a base_path in the path
+ for base_path in base_paths:
+ if self.path.startswith(base_path):
+ self.path = self.path[len(base_path):]
+ if self.request_uri is not None:
+ self.request_uri = self.request_uri[len(base_path):]
+ self.base_path = base_path
+ break
+ else:
+ raise ValueError('Invalid request path: %s' % self.path)
+
+ if self.query:
+ self.parameters = urllib.parse.parse_qs(self.query, keep_blank_values=True)
+ else:
+ self.parameters = {}
+ self.body_json = self._process_req_body(self.body) if self.body else {}
+ self.request_id = None
+
+ # Check if it's a batch request. We'll only handle single-element batch
+ # requests on the dev server (and we need to handle them because that's
+ # what RPC and JS calls typically show up as). Pull the request out of the
+ # list and record the fact that we're processing a batch.
+ if isinstance(self.body_json, list):
+ if len(self.body_json) != 1:
+ _logger.warning('Batch requests with more than 1 element aren\'t '
+ 'supported in devappserver2. Only the first element '
+ 'will be handled. Found %d elements.',
+ len(self.body_json))
+ else:
+ _logger.info('Converting batch request to single request.')
+ self.body_json = self.body_json[0]
+ self.body = json.dumps(self.body_json)
+ self._is_batch = True
+ else:
+ self._is_batch = False
+
+ def _process_req_body(self, body):
+ """Process the body of the HTTP request.
+
+ If the body is valid JSON, return the JSON as a dict.
+ Else, convert the key=value format to a dict and return that.
+
+ Args:
+ body: The body of the HTTP request.
+ """
+ try:
+ return json.loads(body)
+ except ValueError:
+ return urllib.parse.parse_qs(body, keep_blank_values=True)
+
+ def _reconstruct_relative_url(self, environ):
+ """Reconstruct the relative URL of this request.
+
+ This is based on the URL reconstruction code in Python PEP 333:
+ http://www.python.org/dev/peps/pep-0333/#url-reconstruction. Rebuild the
+ URL from the pieces available in the environment.
+
+ Args:
+ environ: An environ dict for the request as defined in PEP-333
+
+ Returns:
+ The portion of the URL from the request after the server and port.
+ """
+ url = urllib.parse.quote(environ.get('SCRIPT_NAME', ''))
+ url += urllib.parse.quote(environ.get('PATH_INFO', ''))
+ if environ.get('QUERY_STRING'):
+ url += '?' + environ['QUERY_STRING']
+ return url
+
+ def reconstruct_hostname(self, port_override=None):
+ """Reconstruct the hostname of a request.
+
+ This is based on the URL reconstruction code in Python PEP 333:
+ http://www.python.org/dev/peps/pep-0333/#url-reconstruction. Rebuild the
+ hostname from the pieces available in the environment.
+
+ Args:
+ port_override: str, An override for the port on the returned hostname.
+
+ Returns:
+ The hostname portion of the URL from the request, not including the
+ URL scheme.
+ """
+ url = self.server
+ port = port_override or self.port
+ if port and ((self.url_scheme == 'https' and str(port) != '443') or
+ (self.url_scheme != 'https' and str(port) != '80')):
+ url += ':{0}'.format(port)
+
+ return url
+
+ def reconstruct_full_url(self, port_override=None):
+ """Reconstruct the full URL of a request.
+
+ This is based on the URL reconstruction code in Python PEP 333:
+ http://www.python.org/dev/peps/pep-0333/#url-reconstruction. Rebuild the
+ hostname from the pieces available in the environment.
+
+ Args:
+ port_override: str, An override for the port on the returned full URL.
+
+ Returns:
+ The full URL from the request, including the URL scheme.
+ """
+ return '{0}://{1}{2}'.format(self.url_scheme,
+ self.reconstruct_hostname(port_override),
+ self.relative_url)
+
+ def copy(self):
+ return copy.deepcopy(self)
+
+ def is_batch(self):
+ return self._is_batch
diff --git a/third_party/endpoints/apiserving.py b/third_party/endpoints/apiserving.py
new file mode 100644
index 0000000..9c8cfca
--- /dev/null
+++ b/third_party/endpoints/apiserving.py
@@ -0,0 +1,606 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library supporting use of the Google API Server.
+
+This library helps you configure a set of ProtoRPC services to act as
+Endpoints backends. In addition to translating ProtoRPC to Endpoints
+compatible errors, it exposes a helper service that describes your services.
+
+ Usage:
+ 1) Create an endpoints.api_server instead of a webapp.WSGIApplication.
+ 2) Annotate your ProtoRPC Service class with @endpoints.api to give your
+ API a name, version, and short description
+ 3) To return an error from Google API Server raise an endpoints.*Exception
+ The ServiceException classes specify the http status code returned.
+
+ For example:
+ raise endpoints.UnauthorizedException("Please log in as an admin user")
+
+
+ Sample usage:
+ - - - - app.yaml - - - -
+
+ handlers:
+ # Path to your API backend.
+ # /_ah/api/.* is the default. Using the base_path parameter, you can
+ # customize this to whichever base path you desire.
+ - url: /_ah/api/.*
+ # For the legacy python runtime this would be "script: services.py"
+ script: services.app
+
+ - - - - services.py - - - -
+
+ import endpoints
+ import postservice
+
+ app = endpoints.api_server([postservice.PostService], debug=True)
+
+ - - - - postservice.py - - - -
+
+ @endpoints.api(name='guestbook', version='v0.2', description='Guestbook API')
+ class PostService(remote.Service):
+ ...
+ @endpoints.method(GetNotesRequest, Notes, name='notes.list', path='notes',
+ http_method='GET')
+ def list(self, request):
+ raise endpoints.UnauthorizedException("Please log in as an admin user")
+"""
+
+from __future__ import absolute_import
+
+import cgi
+from six.moves import http_client
+import json
+import logging
+import os
+
+from google.appengine.api import app_identity
+
+from . import api_config
+from . import api_exceptions
+from . import endpoints_dispatcher
+from . import message_types
+from . import messages
+from . import protojson
+from . import remote
+from . import util
+
+_logger = logging.getLogger(__name__)
+package = 'google.appengine.endpoints'
+
+
+__all__ = [
+ 'ApiConfigRegistry',
+ 'api_server',
+ 'EndpointsErrorMessage',
+ 'package',
+]
+
+
+class _Remapped405Exception(api_exceptions.ServiceException):
+ """Method Not Allowed (405) ends up being remapped to 501.
+
+ This is included here for compatibility with the Java implementation. The
+ Google Cloud Endpoints server remaps HTTP 405 to 501.
+ """
+ http_status = http_client.METHOD_NOT_ALLOWED
+
+
+class _Remapped408Exception(api_exceptions.ServiceException):
+ """Request Timeout (408) ends up being remapped to 503.
+
+ This is included here for compatibility with the Java implementation. The
+ Google Cloud Endpoints server remaps HTTP 408 to 503.
+ """
+ http_status = http_client.REQUEST_TIMEOUT
+
+
+_ERROR_NAME_MAP = dict((http_client.responses[c.http_status], c) for c in [
+ api_exceptions.BadRequestException,
+ api_exceptions.UnauthorizedException,
+ api_exceptions.ForbiddenException,
+ api_exceptions.NotFoundException,
+ _Remapped405Exception,
+ _Remapped408Exception,
+ api_exceptions.ConflictException,
+ api_exceptions.GoneException,
+ api_exceptions.PreconditionFailedException,
+ api_exceptions.RequestEntityTooLargeException,
+ api_exceptions.InternalServerErrorException
+ ])
+
+_ALL_JSON_CONTENT_TYPES = frozenset(
+ [protojson.EndpointsProtoJson.CONTENT_TYPE] +
+ protojson.EndpointsProtoJson.ALTERNATIVE_CONTENT_TYPES)
+
+
+# Message format for returning error back to Google Endpoints frontend.
+class EndpointsErrorMessage(messages.Message):
+ """Message for returning error back to Google Endpoints frontend.
+
+ Fields:
+ state: State of RPC, should be 'APPLICATION_ERROR'.
+ error_message: Error message associated with status.
+ """
+
+ class State(messages.Enum):
+ """Enumeration of possible RPC states.
+
+ Values:
+ OK: Completed successfully.
+ RUNNING: Still running, not complete.
+ REQUEST_ERROR: Request was malformed or incomplete.
+ SERVER_ERROR: Server experienced an unexpected error.
+ NETWORK_ERROR: An error occured on the network.
+ APPLICATION_ERROR: The application is indicating an error.
+ When in this state, RPC should also set application_error.
+ """
+ OK = 0
+ RUNNING = 1
+
+ REQUEST_ERROR = 2
+ SERVER_ERROR = 3
+ NETWORK_ERROR = 4
+ APPLICATION_ERROR = 5
+ METHOD_NOT_FOUND_ERROR = 6
+
+ state = messages.EnumField(State, 1, required=True)
+ error_message = messages.StringField(2)
+
+
+# pylint: disable=g-bad-name
+def _get_app_revision(environ=None):
+ """Gets the app revision (minor app version) of the current app.
+
+ Args:
+ environ: A dictionary with a key CURRENT_VERSION_ID that maps to a version
+ string of the format <major>.<minor>.
+
+ Returns:
+ The app revision (minor version) of the current app, or None if one couldn't
+ be found.
+ """
+ if environ is None:
+ environ = os.environ
+ if 'CURRENT_VERSION_ID' in environ:
+ return environ['CURRENT_VERSION_ID'].split('.')[1]
+
+
+class ApiConfigRegistry(object):
+ """Registry of active APIs"""
+
+ def __init__(self):
+ # Set of API classes that have been registered.
+ self.__registered_classes = set()
+ # Set of API config contents served by this App Engine AppId/version
+ self.__api_configs = []
+ # Map of API method name to ProtoRPC method name.
+ self.__api_methods = {}
+
+ # pylint: disable=g-bad-name
+ def register_backend(self, config_contents):
+ """Register a single API and its config contents.
+
+ Args:
+ config_contents: Dict containing API configuration.
+ """
+ if config_contents is None:
+ return
+ self.__register_class(config_contents)
+ self.__api_configs.append(config_contents)
+ self.__register_methods(config_contents)
+
+ def __register_class(self, parsed_config):
+ """Register the class implementing this config, so we only add it once.
+
+ Args:
+ parsed_config: The JSON object with the API configuration being added.
+
+ Raises:
+ ApiConfigurationError: If the class has already been registered.
+ """
+ methods = parsed_config.get('methods')
+ if not methods:
+ return
+
+ # Determine the name of the class that implements this configuration.
+ service_classes = set()
+ for method in methods.values():
+ rosy_method = method.get('rosyMethod')
+ if rosy_method and '.' in rosy_method:
+ method_class = rosy_method.split('.', 1)[0]
+ service_classes.add(method_class)
+
+ for service_class in service_classes:
+ if service_class in self.__registered_classes:
+ raise api_exceptions.ApiConfigurationError(
+ 'API class %s has already been registered.' % service_class)
+ self.__registered_classes.add(service_class)
+
+ def __register_methods(self, parsed_config):
+ """Register all methods from the given api config file.
+
+ Methods are stored in a map from method_name to rosyMethod,
+ the name of the ProtoRPC method to be called on the backend.
+ If no rosyMethod was specified the value will be None.
+
+ Args:
+ parsed_config: The JSON object with the API configuration being added.
+ """
+ methods = parsed_config.get('methods')
+ if not methods:
+ return
+
+ for method_name, method in methods.items():
+ self.__api_methods[method_name] = method.get('rosyMethod')
+
+ def lookup_api_method(self, api_method_name):
+ """Looks an API method up by name to find the backend method to call.
+
+ Args:
+ api_method_name: Name of the method in the API that was called.
+
+ Returns:
+ Name of the ProtoRPC method called on the backend, or None if not found.
+ """
+ return self.__api_methods.get(api_method_name)
+
+ def all_api_configs(self):
+ """Return a list of all API configration specs as registered above."""
+ return self.__api_configs
+
+
+class _ApiServer(object):
+ """ProtoRPC wrapper, registers APIs and formats errors for Google API Server.
+
+ - - - - ProtoRPC error format - - - -
+ HTTP/1.0 400 Please log in as an admin user.
+ content-type: application/json
+
+ {
+ "state": "APPLICATION_ERROR",
+ "error_message": "Please log in as an admin user",
+ "error_name": "unauthorized",
+ }
+
+ - - - - Reformatted error format - - - -
+ HTTP/1.0 401 UNAUTHORIZED
+ content-type: application/json
+
+ {
+ "state": "APPLICATION_ERROR",
+ "error_message": "Please log in as an admin user"
+ }
+ """
+ # Silence lint warning about invalid const name
+ # pylint: disable=g-bad-name
+ __SERVER_SOFTWARE = 'SERVER_SOFTWARE'
+ __HEADER_NAME_PEER = 'HTTP_X_APPENGINE_PEER'
+ __GOOGLE_PEER = 'apiserving'
+ # A common EndpointsProtoJson for all _ApiServer instances. At the moment,
+ # EndpointsProtoJson looks to be thread safe.
+ __PROTOJSON = protojson.EndpointsProtoJson()
+
+ def __init__(self, api_services, **kwargs):
+ """Initialize an _ApiServer instance.
+
+ The primary function of this method is to set up the WSGIApplication
+ instance for the service handlers described by the services passed in.
+ Additionally, it registers each API in ApiConfigRegistry for later use
+ in the BackendService.getApiConfigs() (API config enumeration service).
+
+ Args:
+ api_services: List of protorpc.remote.Service classes implementing the API
+ or a list of _ApiDecorator instances that decorate the service classes
+ for an API.
+ **kwargs: Passed through to protorpc.wsgi.service.service_handlers except:
+ protocols - ProtoRPC protocols are not supported, and are disallowed.
+
+ Raises:
+ TypeError: if protocols are configured (this feature is not supported).
+ ApiConfigurationError: if there's a problem with the API config.
+ """
+ self.base_paths = set()
+
+ for entry in api_services[:]:
+ # pylint: disable=protected-access
+ if isinstance(entry, api_config._ApiDecorator):
+ api_services.remove(entry)
+ api_services.extend(entry.get_api_classes())
+
+ # Record the API services for quick discovery doc generation
+ self.api_services = api_services
+
+ # Record the base paths
+ for entry in api_services:
+ self.base_paths.add(entry.api_info.base_path)
+
+ self.api_config_registry = ApiConfigRegistry()
+ self.api_name_version_map = self.__create_name_version_map(api_services)
+ protorpc_services = self.__register_services(self.api_name_version_map,
+ self.api_config_registry)
+
+ # Disallow protocol configuration for now, Lily is json-only.
+ if 'protocols' in kwargs:
+ raise TypeError('__init__() got an unexpected keyword argument '
+ "'protocols'")
+ protocols = remote.Protocols()
+ protocols.add_protocol(self.__PROTOJSON, 'protojson')
+ remote.Protocols.set_default(protocols)
+
+ # This variable is not used in Endpoints 1.1, but let's pop it out here
+ # so it doesn't result in an unexpected keyword argument downstream.
+ kwargs.pop('restricted', None)
+
+ from protorpc.wsgi import service as wsgi_service
+ self.service_app = wsgi_service.service_mappings(protorpc_services,
+ **kwargs)
+
+ @staticmethod
+ def __create_name_version_map(api_services):
+ """Create a map from API name/version to Service class/factory.
+
+ This creates a map from an API name and version to a list of remote.Service
+ factories that implement that API.
+
+ Args:
+ api_services: A list of remote.Service-derived classes or factories
+ created with remote.Service.new_factory.
+
+ Returns:
+ A mapping from (api name, api version) to a list of service factories,
+ for service classes that implement that API.
+
+ Raises:
+ ApiConfigurationError: If a Service class appears more than once
+ in api_services.
+ """
+ api_name_version_map = {}
+ for service_factory in api_services:
+ try:
+ service_class = service_factory.service_class
+ except AttributeError:
+ service_class = service_factory
+ service_factory = service_class.new_factory()
+
+ key = service_class.api_info.name, service_class.api_info.api_version
+ service_factories = api_name_version_map.setdefault(key, [])
+ if service_factory in service_factories:
+ raise api_config.ApiConfigurationError(
+ 'Can\'t add the same class to an API twice: %s' %
+ service_factory.service_class.__name__)
+
+ service_factories.append(service_factory)
+ return api_name_version_map
+
+ @staticmethod
+ def __register_services(api_name_version_map, api_config_registry):
+ """Register & return a list of each URL and class that handles that URL.
+
+ This finds every service class in api_name_version_map, registers it with
+ the given ApiConfigRegistry, builds the URL for that class, and adds
+ the URL and its factory to a list that's returned.
+
+ Args:
+ api_name_version_map: A mapping from (api name, api version) to a list of
+ service factories, as returned by __create_name_version_map.
+ api_config_registry: The ApiConfigRegistry where service classes will
+ be registered.
+
+ Returns:
+ A list of (URL, service_factory) for each service class in
+ api_name_version_map.
+
+ Raises:
+ ApiConfigurationError: If a Service class appears more than once
+ in api_name_version_map. This could happen if one class is used to
+ implement multiple APIs.
+ """
+ generator = api_config.ApiConfigGenerator()
+ protorpc_services = []
+ for service_factories in api_name_version_map.values():
+ service_classes = [service_factory.service_class
+ for service_factory in service_factories]
+ config_dict = generator.get_config_dict(service_classes)
+ api_config_registry.register_backend(config_dict)
+
+ for service_factory in service_factories:
+ protorpc_class_name = service_factory.service_class.__name__
+ root = '%s%s' % (service_factory.service_class.api_info.base_path,
+ protorpc_class_name)
+ if any(service_map[0] == root or service_map[1] == service_factory
+ for service_map in protorpc_services):
+ raise api_config.ApiConfigurationError(
+ 'Can\'t reuse the same class in multiple APIs: %s' %
+ protorpc_class_name)
+ protorpc_services.append((root, service_factory))
+ return protorpc_services
+
+ def __is_json_error(self, status, headers):
+ """Determine if response is an error.
+
+ Args:
+ status: HTTP status code.
+ headers: Dictionary of (lowercase) header name to value.
+
+ Returns:
+ True if the response was an error, else False.
+ """
+ content_header = headers.get('content-type', '')
+ content_type, unused_params = cgi.parse_header(content_header)
+ return (status.startswith('400') and
+ content_type.lower() in _ALL_JSON_CONTENT_TYPES)
+
+ def __write_error(self, status_code, error_message=None):
+ """Return the HTTP status line and body for a given error code and message.
+
+ Args:
+ status_code: HTTP status code to be returned.
+ error_message: Error message to be returned.
+
+ Returns:
+ Tuple (http_status, body):
+ http_status: HTTP status line, e.g. 200 OK.
+ body: Body of the HTTP request.
+ """
+ if error_message is None:
+ error_message = http_client.responses[status_code]
+ status = '%d %s' % (status_code, http_client.responses[status_code])
+ message = EndpointsErrorMessage(
+ state=EndpointsErrorMessage.State.APPLICATION_ERROR,
+ error_message=error_message)
+ return status, self.__PROTOJSON.encode_message(message)
+
+ def protorpc_to_endpoints_error(self, status, body):
+ """Convert a ProtoRPC error to the format expected by Google Endpoints.
+
+ If the body does not contain an ProtoRPC message in state APPLICATION_ERROR
+ the status and body will be returned unchanged.
+
+ Args:
+ status: HTTP status of the response from the backend
+ body: JSON-encoded error in format expected by Endpoints frontend.
+
+ Returns:
+ Tuple of (http status, body)
+ """
+ try:
+ rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body)
+ except (ValueError, messages.ValidationError):
+ rpc_error = remote.RpcStatus()
+
+ if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR:
+
+ # Try to map to HTTP error code.
+ error_class = _ERROR_NAME_MAP.get(rpc_error.error_name)
+ if error_class:
+ status, body = self.__write_error(error_class.http_status,
+ rpc_error.error_message)
+ return status, body
+
+ def get_api_configs(self):
+ return {
+ 'items': self.api_config_registry.all_api_configs()}
+
+ def __call__(self, environ, start_response):
+ """Wrapper for the Endpoints server app.
+
+ Args:
+ environ: WSGI request environment.
+ start_response: WSGI start response function.
+
+ Returns:
+ Response from service_app or appropriately transformed error response.
+ """
+ # Call the ProtoRPC App and capture its response
+ with util.StartResponseProxy() as start_response_proxy:
+ body_iter = self.service_app(environ, start_response_proxy.Proxy)
+ status = start_response_proxy.response_status
+ headers = start_response_proxy.response_headers
+ exception = start_response_proxy.response_exc_info
+
+ # Get response body
+ body = start_response_proxy.response_body
+ # In case standard WSGI behavior is implemented later...
+ if not body:
+ body = ''.join(body_iter)
+
+ # Transform ProtoRPC error into format expected by endpoints.
+ headers_dict = dict([(k.lower(), v) for k, v in headers])
+ if self.__is_json_error(status, headers_dict):
+ status, body = self.protorpc_to_endpoints_error(status, body)
+ # If the content-length header is present, update it with the new
+ # body length.
+ if 'content-length' in headers_dict:
+ for index, (header_name, _) in enumerate(headers):
+ if header_name.lower() == 'content-length':
+ headers[index] = (header_name, str(len(body)))
+ break
+
+ start_response(status, headers, exception)
+ return [body]
+
+
+# Silence lint warning about invalid function name
+# pylint: disable=g-bad-name
+def api_server(api_services, **kwargs):
+ """Create an api_server.
+
+ The primary function of this method is to set up the WSGIApplication
+ instance for the service handlers described by the services passed in.
+ Additionally, it registers each API in ApiConfigRegistry for later use
+ in the BackendService.getApiConfigs() (API config enumeration service).
+ It also configures service control.
+
+ Args:
+ api_services: List of protorpc.remote.Service classes implementing the API
+ or a list of _ApiDecorator instances that decorate the service classes
+ for an API.
+ **kwargs: Passed through to protorpc.wsgi.service.service_handlers except:
+ protocols - ProtoRPC protocols are not supported, and are disallowed.
+
+ Returns:
+ A new WSGIApplication that serves the API backend and config registry.
+
+ Raises:
+ TypeError: if protocols are configured (this feature is not supported).
+ """
+ # Disallow protocol configuration for now, Lily is json-only.
+ if 'protocols' in kwargs:
+ raise TypeError("__init__() got an unexpected keyword argument 'protocols'")
+
+ from . import _logger as endpoints_logger
+ from . import __version__ as endpoints_version
+ endpoints_logger.info('Initializing Endpoints Framework version %s', endpoints_version)
+
+ # Construct the api serving app
+ apis_app = _ApiServer(api_services, **kwargs)
+ dispatcher = endpoints_dispatcher.EndpointsDispatcherMiddleware(apis_app)
+
+ # Determine the service name
+ service_name = os.environ.get('ENDPOINTS_SERVICE_NAME')
+ if not service_name:
+ _logger.warn('Did not specify the ENDPOINTS_SERVICE_NAME environment'
+ ' variable so service control is disabled. Please specify'
+ ' the name of service in ENDPOINTS_SERVICE_NAME to enable'
+ ' it.')
+ return dispatcher
+
+ from endpoints_management.control import client as control_client
+ from endpoints_management.control import wsgi as control_wsgi
+
+ # If we're using a local server, just return the dispatcher now to bypass
+ # control client.
+ if control_wsgi.running_on_devserver():
+ _logger.warn('Running on local devserver, so service control is disabled.')
+ return dispatcher
+
+ from endpoints_management import _logger as management_logger
+ from endpoints_management import __version__ as management_version
+ management_logger.info('Initializing Endpoints Management Framework version %s', management_version)
+
+ # The DEFAULT 'config' should be tuned so that it's always OK for python
+ # App Engine workloads. The config can be adjusted, but that's probably
+ # unnecessary on App Engine.
+ controller = control_client.Loaders.DEFAULT.load(service_name)
+
+ # Start the GAE background thread that powers the control client's cache.
+ control_client.use_gae_thread()
+ controller.start()
+
+ return control_wsgi.add_all(
+ dispatcher,
+ app_identity.get_application_id(),
+ controller)
diff --git a/third_party/endpoints/constants.py b/third_party/endpoints/constants.py
new file mode 100644
index 0000000..29b683e
--- /dev/null
+++ b/third_party/endpoints/constants.py
@@ -0,0 +1,29 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provide various constants needed by Endpoints Framework.
+
+Putting them in this file makes it easier to avoid circular imports,
+as well as keep from complicating tests due to importing code that
+uses App Engine apis.
+"""
+
+from __future__ import absolute_import
+
+__all__ = [
+ 'API_EXPLORER_CLIENT_ID',
+]
+
+
+API_EXPLORER_CLIENT_ID = '292824132082.apps.googleusercontent.com'
diff --git a/third_party/endpoints/directory_list_generator.py b/third_party/endpoints/directory_list_generator.py
new file mode 100644
index 0000000..40f26b6
--- /dev/null
+++ b/third_party/endpoints/directory_list_generator.py
@@ -0,0 +1,162 @@
+# Copyright 2017 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library for converting service configs to discovery directory lists."""
+
+from __future__ import absolute_import
+
+import collections
+import json
+import re
+from six.moves import urllib
+
+from . import util
+
+
+class DirectoryListGenerator(object):
+ """Generates a discovery directory list from a ProtoRPC service.
+
+ Example:
+
+ class HelloRequest(messages.Message):
+ my_name = messages.StringField(1, required=True)
+
+ class HelloResponse(messages.Message):
+ hello = messages.StringField(1, required=True)
+
+ class HelloService(remote.Service):
+
+ @remote.method(HelloRequest, HelloResponse)
+ def hello(self, request):
+ return HelloResponse(hello='Hello there, %s!' %
+ request.my_name)
+
+ api_config = DirectoryListGenerator().pretty_print_config_to_json(
+ HelloService)
+
+ The resulting document will be a JSON directory list describing the APIs
+ implemented by HelloService.
+ """
+
+ def __init__(self, request=None):
+ # The ApiRequest that called this generator
+ self.__request = request
+
+ def __item_descriptor(self, config):
+ """Builds an item descriptor for a service configuration.
+
+ Args:
+ config: A dictionary containing the service configuration to describe.
+
+ Returns:
+ A dictionary that describes the service configuration.
+ """
+ descriptor = {
+ 'kind': 'discovery#directoryItem',
+ 'icons': {
+ 'x16': 'https://www.gstatic.com/images/branding/product/1x/'
+ 'googleg_16dp.png',
+ 'x32': 'https://www.gstatic.com/images/branding/product/1x/'
+ 'googleg_32dp.png',
+ },
+ 'preferred': True,
+ }
+
+ description = config.get('description')
+ root_url = config.get('root')
+ name = config.get('name')
+ version = config.get('api_version')
+ relative_path = '/apis/{0}/{1}/rest'.format(name, version)
+
+ if description:
+ descriptor['description'] = description
+
+ descriptor['name'] = name
+ descriptor['version'] = version
+ descriptor['discoveryLink'] = '.{0}'.format(relative_path)
+
+ root_url_port = urllib.parse.urlparse(root_url).port
+
+ original_path = self.__request.reconstruct_full_url(
+ port_override=root_url_port)
+ descriptor['discoveryRestUrl'] = '{0}/{1}/{2}/rest'.format(
+ original_path, name, version)
+
+ if name and version:
+ descriptor['id'] = '{0}:{1}'.format(name, version)
+
+ return descriptor
+
+ def __directory_list_descriptor(self, configs):
+ """Builds a directory list for an API.
+
+ Args:
+ configs: List of dicts containing the service configurations to list.
+
+ Returns:
+ A dictionary that can be deserialized into JSON in discovery list format.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()), or a repeated method
+ signature.
+ """
+ descriptor = {
+ 'kind': 'discovery#directoryList',
+ 'discoveryVersion': 'v1',
+ }
+
+ items = []
+ for config in configs:
+ item_descriptor = self.__item_descriptor(config)
+ if item_descriptor:
+ items.append(item_descriptor)
+
+ if items:
+ descriptor['items'] = items
+
+ return descriptor
+
+ def get_directory_list_doc(self, configs):
+ """JSON dict description of a protorpc.remote.Service in list format.
+
+ Args:
+ configs: Either a single dict or a list of dicts containing the service
+ configurations to list.
+
+ Returns:
+ dict, The directory list document as a JSON dict.
+ """
+
+ if not isinstance(configs, (tuple, list)):
+ configs = [configs]
+
+ util.check_list_type(configs, dict, 'configs', allow_none=False)
+
+ return self.__directory_list_descriptor(configs)
+
+ def pretty_print_config_to_json(self, configs):
+ """JSON string description of a protorpc.remote.Service in a discovery doc.
+
+ Args:
+ configs: Either a single dict or a list of dicts containing the service
+ configurations to list.
+
+ Returns:
+ string, The directory list document as a JSON string.
+ """
+ descriptor = self.get_directory_list_doc(configs)
+ return json.dumps(descriptor, sort_keys=True, indent=2,
+ separators=(',', ': '))
diff --git a/third_party/endpoints/discovery_generator.py b/third_party/endpoints/discovery_generator.py
new file mode 100644
index 0000000..72c0533
--- /dev/null
+++ b/third_party/endpoints/discovery_generator.py
@@ -0,0 +1,1057 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library for converting service configs to discovery docs."""
+
+from __future__ import absolute_import
+
+import collections
+import json
+import logging
+import re
+
+from . import api_exceptions
+from . import message_parser
+from . import message_types
+from . import messages
+from . import remote
+from . import resource_container
+from . import util
+
+_logger = logging.getLogger(__name__)
+_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}'
+
+_MULTICLASS_MISMATCH_ERROR_TEMPLATE = (
+ 'Attempting to implement service %s, version %s, with multiple '
+ 'classes that are not compatible. See docstring for api() for '
+ 'examples how to implement a multi-class API.')
+
+_INVALID_AUTH_ISSUER = 'No auth issuer named %s defined in this Endpoints API.'
+
+_API_KEY = 'api_key'
+_API_KEY_PARAM = 'key'
+
+CUSTOM_VARIANT_MAP = {
+ messages.Variant.DOUBLE: ('number', 'double'),
+ messages.Variant.FLOAT: ('number', 'float'),
+ messages.Variant.INT64: ('string', 'int64'),
+ messages.Variant.SINT64: ('string', 'int64'),
+ messages.Variant.UINT64: ('string', 'uint64'),
+ messages.Variant.INT32: ('integer', 'int32'),
+ messages.Variant.SINT32: ('integer', 'int32'),
+ messages.Variant.UINT32: ('integer', 'uint32'),
+ messages.Variant.BOOL: ('boolean', None),
+ messages.Variant.STRING: ('string', None),
+ messages.Variant.BYTES: ('string', 'byte'),
+ messages.Variant.ENUM: ('string', None),
+}
+
+
+
+class DiscoveryGenerator(object):
+ """Generates a discovery doc from a ProtoRPC service.
+
+ Example:
+
+ class HelloRequest(messages.Message):
+ my_name = messages.StringField(1, required=True)
+
+ class HelloResponse(messages.Message):
+ hello = messages.StringField(1, required=True)
+
+ class HelloService(remote.Service):
+
+ @remote.method(HelloRequest, HelloResponse)
+ def hello(self, request):
+ return HelloResponse(hello='Hello there, %s!' %
+ request.my_name)
+
+ api_config = DiscoveryGenerator().pretty_print_config_to_json(HelloService)
+
+ The resulting api_config will be a JSON discovery document describing the API
+ implemented by HelloService.
+ """
+
+ # Constants for categorizing a request method.
+ # __NO_BODY - Request without a request body, such as GET and DELETE methods.
+ # __HAS_BODY - Request (such as POST/PUT/PATCH) with info in the request body.
+ __NO_BODY = 1 # pylint: disable=invalid-name
+ __HAS_BODY = 2 # pylint: disable=invalid-name
+
+ def __init__(self, request=None):
+ self.__parser = message_parser.MessageTypeToJsonSchema()
+
+ # Maps method id to the request schema id.
+ self.__request_schema = {}
+
+ # Maps method id to the response schema id.
+ self.__response_schema = {}
+
+ # The ApiRequest that called this generator
+ self.__request = request
+
+ def _get_resource_path(self, method_id):
+ """Return the resource path for a method or an empty array if none."""
+ return method_id.split('.')[1:-1]
+
+ def _get_canonical_method_id(self, method_id):
+ return method_id.split('.')[-1]
+
+ def __get_request_kind(self, method_info):
+ """Categorize the type of the request.
+
+ Args:
+ method_info: _MethodInfo, method information.
+
+ Returns:
+ The kind of request.
+ """
+ if method_info.http_method in ('GET', 'DELETE'):
+ return self.__NO_BODY
+ else:
+ return self.__HAS_BODY
+
+ def __field_to_subfields(self, field, cycle=tuple()):
+ """Fully describes data represented by field, including the nested case.
+
+ In the case that the field is not a message field, we have no fields nested
+ within a message definition, so we can simply return that field. However, in
+ the nested case, we can't simply describe the data with one field or even
+ with one chain of fields.
+
+ For example, if we have a message field
+
+ m_field = messages.MessageField(RefClass, 1)
+
+ which references a class with two fields:
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.IntegerField(2)
+
+ then we would need to include both one and two to represent all the
+ data contained.
+
+ Calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">],
+ ]
+
+ If the second field was instead a message field
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.MessageField(OtherRefClass, 2)
+
+ referencing another class with two fields
+
+ class OtherRefClass(messages.Message):
+ three = messages.BooleanField(1)
+ four = messages.FloatField(2)
+
+ then we would need to recurse one level deeper for two.
+
+ With this change, calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "three">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "four">],
+ ]
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A list of lists, where each sublist is a list of fields.
+ """
+ # Termination condition
+ if not isinstance(field, messages.MessageField):
+ return [[field]]
+
+ if field.message_type.__name__ in cycle:
+ # We have a recursive cycle of messages. Call it quits.
+ return []
+
+ result = []
+ for subfield in sorted(field.message_type.all_fields(),
+ key=lambda f: f.number):
+ cycle = cycle + (field.message_type.__name__, )
+ subfield_results = self.__field_to_subfields(subfield, cycle=cycle)
+ for subfields_list in subfield_results:
+ subfields_list.insert(0, field)
+ result.append(subfields_list)
+ return result
+
+ def __field_to_parameter_type_and_format(self, field):
+ """Converts the field variant type into a tuple describing the parameter.
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A tuple with the type and format of the field, respectively.
+
+ Raises:
+ TypeError: if the field variant is a message variant.
+ """
+ # We use lowercase values for types (e.g. 'string' instead of 'STRING').
+ variant = field.variant
+ if variant == messages.Variant.MESSAGE:
+ raise TypeError('A message variant cannot be used in a parameter.')
+
+ # Note that the 64-bit integers are marked as strings -- this is to
+ # accommodate JavaScript, which would otherwise demote them to 32-bit
+ # integers.
+
+ return CUSTOM_VARIANT_MAP.get(variant) or (variant.name.lower(), None)
+
+ def __get_path_parameters(self, path):
+ """Parses path paremeters from a URI path and organizes them by parameter.
+
+ Some of the parameters may correspond to message fields, and so will be
+ represented as segments corresponding to each subfield; e.g. first.second if
+ the field "second" in the message field "first" is pulled from the path.
+
+ The resulting dictionary uses the first segments as keys and each key has as
+ value the list of full parameter values with first segment equal to the key.
+
+ If the match path parameter is null, that part of the path template is
+ ignored; this occurs if '{}' is used in a template.
+
+ Args:
+ path: String; a URI path, potentially with some parameters.
+
+ Returns:
+ A dictionary with strings as keys and list of strings as values.
+ """
+ path_parameters_by_segment = {}
+ for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
+ first_segment = format_var_name.split('.', 1)[0]
+ matches = path_parameters_by_segment.setdefault(first_segment, [])
+ matches.append(format_var_name)
+
+ return path_parameters_by_segment
+
+ def __validate_simple_subfield(self, parameter, field, segment_list,
+ segment_index=0):
+ """Verifies that a proposed subfield actually exists and is a simple field.
+
+ Here, simple means it is not a MessageField (nested).
+
+ Args:
+ parameter: String; the '.' delimited name of the current field being
+ considered. This is relative to some root.
+ field: An instance of a subclass of messages.Field. Corresponds to the
+ previous segment in the path (previous relative to _segment_index),
+ since this field should be a message field with the current segment
+ as a field in the message class.
+ segment_list: The full list of segments from the '.' delimited subfield
+ being validated.
+ segment_index: Integer; used to hold the position of current segment so
+ that segment_list can be passed as a reference instead of having to
+ copy using segment_list[1:] at each step.
+
+ Raises:
+ TypeError: If the final subfield (indicated by _segment_index relative
+ to the length of segment_list) is a MessageField.
+ TypeError: If at any stage the lookup at a segment fails, e.g if a.b
+ exists but a.b.c does not exist. This can happen either if a.b is not
+ a message field or if a.b.c is not a property on the message class from
+ a.b.
+ """
+ if segment_index >= len(segment_list):
+ # In this case, the field is the final one, so should be simple type
+ if isinstance(field, messages.MessageField):
+ field_class = field.__class__.__name__
+ raise TypeError('Can\'t use messages in path. Subfield %r was '
+ 'included but is a %s.' % (parameter, field_class))
+ return
+
+ segment = segment_list[segment_index]
+ parameter += '.' + segment
+ try:
+ field = field.type.field_by_name(segment)
+ except (AttributeError, KeyError):
+ raise TypeError('Subfield %r from path does not exist.' % (parameter,))
+
+ self.__validate_simple_subfield(parameter, field, segment_list,
+ segment_index=segment_index + 1)
+
+ def __validate_path_parameters(self, field, path_parameters):
+ """Verifies that all path parameters correspond to an existing subfield.
+
+ Args:
+ field: An instance of a subclass of messages.Field. Should be the root
+ level property name in each path parameter in path_parameters. For
+ example, if the field is called 'foo', then each path parameter should
+ begin with 'foo.'.
+ path_parameters: A list of Strings representing URI parameter variables.
+
+ Raises:
+ TypeError: If one of the path parameters does not start with field.name.
+ """
+ for param in path_parameters:
+ segment_list = param.split('.')
+ if segment_list[0] != field.name:
+ raise TypeError('Subfield %r can\'t come from field %r.'
+ % (param, field.name))
+ self.__validate_simple_subfield(field.name, field, segment_list[1:])
+
+ def __parameter_default(self, field):
+ """Returns default value of field if it has one.
+
+ Args:
+ field: A simple field.
+
+ Returns:
+ The default value of the field, if any exists, with the exception of an
+ enum field, which will have its value cast to a string.
+ """
+ if field.default:
+ if isinstance(field, messages.EnumField):
+ return field.default.name
+ elif isinstance(field, messages.BooleanField):
+ # The Python standard representation of a boolean value causes problems
+ # when generating client code.
+ return 'true' if field.default else 'false'
+ else:
+ return str(field.default)
+
+ def __parameter_enum(self, param):
+ """Returns enum descriptor of a parameter if it is an enum.
+
+ An enum descriptor is a list of keys.
+
+ Args:
+ param: A simple field.
+
+ Returns:
+ The enum descriptor for the field, if it's an enum descriptor, else
+ returns None.
+ """
+ if isinstance(param, messages.EnumField):
+ return [enum_entry[0] for enum_entry in sorted(
+ param.type.to_dict().items(), key=lambda v: v[1])]
+
+ def __parameter_descriptor(self, param):
+ """Creates descriptor for a parameter.
+
+ Args:
+ param: The parameter to be described.
+
+ Returns:
+ Dictionary containing a descriptor for the parameter.
+ """
+ descriptor = {}
+
+ param_type, param_format = self.__field_to_parameter_type_and_format(param)
+
+ # Required
+ if param.required:
+ descriptor['required'] = True
+
+ # Type
+ descriptor['type'] = param_type
+
+ # Format (optional)
+ if param_format:
+ descriptor['format'] = param_format
+
+ # Default
+ default = self.__parameter_default(param)
+ if default is not None:
+ descriptor['default'] = default
+
+ # Repeated
+ if param.repeated:
+ descriptor['repeated'] = True
+
+ # Enum
+ # Note that enumDescriptions are not currently supported using the
+ # framework's annotations, so just insert blank strings.
+ enum_descriptor = self.__parameter_enum(param)
+ if enum_descriptor is not None:
+ descriptor['enum'] = enum_descriptor
+ descriptor['enumDescriptions'] = [''] * len(enum_descriptor)
+
+ return descriptor
+
+ def __add_parameter(self, param, path_parameters, params):
+ """Adds all parameters in a field to a method parameters descriptor.
+
+ Simple fields will only have one parameter, but a message field 'x' that
+ corresponds to a message class with fields 'y' and 'z' will result in
+ parameters 'x.y' and 'x.z', for example. The mapping from field to
+ parameters is mostly handled by __field_to_subfields.
+
+ Args:
+ param: Parameter to be added to the descriptor.
+ path_parameters: A list of parameters matched from a path for this field.
+ For example for the hypothetical 'x' from above if the path was
+ '/a/{x.z}/b/{other}' then this list would contain only the element
+ 'x.z' since 'other' does not match to this field.
+ params: List of parameters. Each parameter in the field.
+ """
+ # If this is a simple field, just build the descriptor and append it.
+ # Otherwise, build a schema and assign it to this descriptor
+ descriptor = None
+ if not isinstance(param, messages.MessageField):
+ name = param.name
+ descriptor = self.__parameter_descriptor(param)
+ descriptor['location'] = 'path' if name in path_parameters else 'query'
+
+ if descriptor:
+ params[name] = descriptor
+ else:
+ for subfield_list in self.__field_to_subfields(param):
+ name = '.'.join(subfield.name for subfield in subfield_list)
+ descriptor = self.__parameter_descriptor(subfield_list[-1])
+ if name in path_parameters:
+ descriptor['required'] = True
+ descriptor['location'] = 'path'
+ else:
+ descriptor.pop('required', None)
+ descriptor['location'] = 'query'
+
+ if descriptor:
+ params[name] = descriptor
+
+
+ def __params_descriptor_without_container(self, message_type,
+ request_kind, path):
+ """Describe parameters of a method which does not use a ResourceContainer.
+
+ Makes sure that the path parameters are included in the message definition
+ and adds any required fields and URL query parameters.
+
+ This method is to preserve backwards compatibility and will be removed in
+ a future release.
+
+ Args:
+ message_type: messages.Message class, Message with parameters to describe.
+ request_kind: The type of request being made.
+ path: string, HTTP path to method.
+
+ Returns:
+ A list of dicts: Descriptors of the parameters
+ """
+ params = {}
+
+ path_parameter_dict = self.__get_path_parameters(path)
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__validate_path_parameters(field, matched_path_parameters)
+ if matched_path_parameters or request_kind == self.__NO_BODY:
+ self.__add_parameter(field, matched_path_parameters, params)
+
+ return params
+
+ def __params_descriptor(self, message_type, request_kind, path, method_id,
+ request_params_class):
+ """Describe the parameters of a method.
+
+ If the message_type is not a ResourceContainer, will fall back to
+ __params_descriptor_without_container (which will eventually be deprecated).
+
+ If the message type is a ResourceContainer, then all path/query parameters
+ will come from the ResourceContainer. This method will also make sure all
+ path parameters are covered by the message fields.
+
+ Args:
+ message_type: messages.Message or ResourceContainer class, Message with
+ parameters to describe.
+ request_kind: The type of request being made.
+ path: string, HTTP path to method.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+ request_params_class: messages.Message, the original params message when
+ using a ResourceContainer. Otherwise, this should be null.
+
+ Returns:
+ A tuple (dict, list of string): Descriptor of the parameters, Order of the
+ parameters.
+ """
+ path_parameter_dict = self.__get_path_parameters(path)
+
+ if request_params_class is None:
+ if path_parameter_dict:
+ _logger.warning('Method %s specifies path parameters but you are not '
+ 'using a ResourceContainer; instead, you are using %r. '
+ 'This will fail in future releases; please switch to '
+ 'using ResourceContainer as soon as possible.',
+ method_id, type(message_type))
+ return self.__params_descriptor_without_container(
+ message_type, request_kind, path)
+
+ # From here, we can assume message_type is from a ResourceContainer.
+ message_type = request_params_class
+
+ params = {}
+
+ # Make sure all path parameters are covered.
+ for field_name, matched_path_parameters in path_parameter_dict.items():
+ field = message_type.field_by_name(field_name)
+ self.__validate_path_parameters(field, matched_path_parameters)
+
+ # Add all fields, sort by field.number since we have parameterOrder.
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__add_parameter(field, matched_path_parameters, params)
+
+ return params
+
+ def __params_order_descriptor(self, message_type, path, is_params_class=False):
+ """Describe the order of path parameters.
+
+ Args:
+ message_type: messages.Message class, Message with parameters to describe.
+ path: string, HTTP path to method.
+ is_params_class: boolean, Whether the message represents URL parameters.
+
+ Returns:
+ Descriptor list for the parameter order.
+ """
+ path_params = []
+ query_params = []
+ path_parameter_dict = self.__get_path_parameters(path)
+
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ if not isinstance(field, messages.MessageField):
+ name = field.name
+ if name in matched_path_parameters:
+ path_params.append(name)
+ elif is_params_class and field.required:
+ query_params.append(name)
+ else:
+ for subfield_list in self.__field_to_subfields(field):
+ name = '.'.join(subfield.name for subfield in subfield_list)
+ if name in matched_path_parameters:
+ path_params.append(name)
+ elif is_params_class and field.required:
+ query_params.append(name)
+
+ return path_params + sorted(query_params)
+
+ def __schemas_descriptor(self):
+ """Describes the schemas section of the discovery document.
+
+ Returns:
+ Dictionary describing the schemas of the document.
+ """
+ # Filter out any keys that aren't 'properties', 'type', or 'id'
+ result = {}
+ for schema_key, schema_value in self.__parser.schemas().items():
+ field_keys = schema_value.keys()
+ key_result = {}
+
+ # Some special processing for the properties value
+ if 'properties' in field_keys:
+ key_result['properties'] = schema_value['properties'].copy()
+ # Add in enumDescriptions for any enum properties and strip out
+ # the required tag for consistency with Java framework
+ for prop_key, prop_value in schema_value['properties'].items():
+ if 'enum' in prop_value:
+ num_enums = len(prop_value['enum'])
+ key_result['properties'][prop_key]['enumDescriptions'] = (
+ [''] * num_enums)
+ elif 'default' in prop_value:
+ # stringify default values
+ if prop_value.get('type') == 'boolean':
+ prop_value['default'] = 'true' if prop_value['default'] else 'false'
+ else:
+ prop_value['default'] = str(prop_value['default'])
+ key_result['properties'][prop_key].pop('required', None)
+
+ for key in ('type', 'id', 'description'):
+ if key in field_keys:
+ key_result[key] = schema_value[key]
+
+ if key_result:
+ result[schema_key] = key_result
+
+ # Add 'type': 'object' to all object properties
+ for schema_value in result.values():
+ for field_value in schema_value.values():
+ if isinstance(field_value, dict):
+ if '$ref' in field_value:
+ field_value['type'] = 'object'
+
+ return result
+
+ def __request_message_descriptor(self, request_kind, message_type, method_id,
+ request_body_class):
+ """Describes the parameters and body of the request.
+
+ Args:
+ request_kind: The type of request being made.
+ message_type: messages.Message or ResourceContainer class. The message to
+ describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+ request_body_class: messages.Message of the original body when using
+ a ResourceContainer. Otherwise, this should be null.
+
+ Returns:
+ Dictionary describing the request.
+
+ Raises:
+ ValueError: if the method path and request required fields do not match
+ """
+ if request_body_class:
+ message_type = request_body_class
+
+ if (request_kind != self.__NO_BODY and
+ message_type != message_types.VoidMessage()):
+ self.__request_schema[method_id] = self.__parser.add_message(
+ message_type.__class__)
+ return {
+ '$ref': self.__request_schema[method_id],
+ 'parameterName': 'resource',
+ }
+
+ def __response_message_descriptor(self, message_type, method_id):
+ """Describes the response.
+
+ Args:
+ message_type: messages.Message class, The message to describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+
+ Returns:
+ Dictionary describing the response.
+ """
+ if message_type != message_types.VoidMessage():
+ self.__parser.add_message(message_type.__class__)
+ self.__response_schema[method_id] = self.__parser.ref_for_message_type(
+ message_type.__class__)
+ return {'$ref': self.__response_schema[method_id]}
+ else:
+ return None
+
+ def __method_descriptor(self, service, method_info,
+ protorpc_method_info):
+ """Describes a method.
+
+ Args:
+ service: endpoints.Service, Implementation of the API as a service.
+ method_info: _MethodInfo, Configuration for the method.
+ protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC
+ description of the method.
+
+ Returns:
+ Dictionary describing the method.
+ """
+ descriptor = {}
+
+ request_message_type = (resource_container.ResourceContainer.
+ get_request_message(protorpc_method_info.remote))
+ request_kind = self.__get_request_kind(method_info)
+ remote_method = protorpc_method_info.remote
+
+ method_id = method_info.method_id(service.api_info)
+
+ path = method_info.get_path(service.api_info)
+
+ description = protorpc_method_info.remote.method.__doc__
+
+ descriptor['id'] = method_id
+ descriptor['path'] = path
+ descriptor['httpMethod'] = method_info.http_method
+
+ if description:
+ descriptor['description'] = description
+
+ descriptor['scopes'] = [
+ 'https://www.googleapis.com/auth/userinfo.email'
+ ]
+
+ parameters = self.__params_descriptor(
+ request_message_type, request_kind, path, method_id,
+ method_info.request_params_class)
+ if parameters:
+ descriptor['parameters'] = parameters
+
+ if method_info.request_params_class:
+ parameter_order = self.__params_order_descriptor(
+ method_info.request_params_class, path, is_params_class=True)
+ else:
+ parameter_order = self.__params_order_descriptor(
+ request_message_type, path, is_params_class=False)
+ if parameter_order:
+ descriptor['parameterOrder'] = parameter_order
+
+ request_descriptor = self.__request_message_descriptor(
+ request_kind, request_message_type, method_id,
+ method_info.request_body_class)
+ if request_descriptor is not None:
+ descriptor['request'] = request_descriptor
+
+ response_descriptor = self.__response_message_descriptor(
+ remote_method.response_type(), method_info.method_id(service.api_info))
+ if response_descriptor is not None:
+ descriptor['response'] = response_descriptor
+
+ return descriptor
+
+ def __resource_descriptor(self, resource_path, methods):
+ """Describes a resource.
+
+ Args:
+ resource_path: string, the path of the resource (e.g., 'entries.items')
+ methods: list of tuples of type
+ (endpoints.Service, protorpc.remote._RemoteMethodInfo), the methods
+ that serve this resource.
+
+ Returns:
+ Dictionary describing the resource.
+ """
+ descriptor = {}
+ method_map = {}
+ sub_resource_index = collections.defaultdict(list)
+ sub_resource_map = {}
+
+ resource_path_tokens = resource_path.split('.')
+ for service, protorpc_meth_info in methods:
+ method_info = getattr(protorpc_meth_info, 'method_info', None)
+ path = method_info.get_path(service.api_info)
+ method_id = method_info.method_id(service.api_info)
+ canonical_method_id = self._get_canonical_method_id(method_id)
+
+ current_resource_path = self._get_resource_path(method_id)
+
+ # Sanity-check that this method belongs to the resource path
+ if (current_resource_path[:len(resource_path_tokens)] !=
+ resource_path_tokens):
+ raise api_exceptions.ToolError(
+ 'Internal consistency error in resource path {0}'.format(
+ current_resource_path))
+
+ # Remove the portion of the current method's resource path that's already
+ # part of the resource path at this level.
+ effective_resource_path = current_resource_path[
+ len(resource_path_tokens):]
+
+ # If this method is part of a sub-resource, note it and skip it for now
+ if effective_resource_path:
+ sub_resource_name = effective_resource_path[0]
+ new_resource_path = '.'.join([resource_path, sub_resource_name])
+ sub_resource_index[new_resource_path].append(
+ (service, protorpc_meth_info))
+ else:
+ method_map[canonical_method_id] = self.__method_descriptor(
+ service, method_info, protorpc_meth_info)
+
+ # Process any sub-resources
+ for sub_resource, sub_resource_methods in sub_resource_index.items():
+ sub_resource_name = sub_resource.split('.')[-1]
+ sub_resource_map[sub_resource_name] = self.__resource_descriptor(
+ sub_resource, sub_resource_methods)
+
+ if method_map:
+ descriptor['methods'] = method_map
+
+ if sub_resource_map:
+ descriptor['resources'] = sub_resource_map
+
+ return descriptor
+
+ def __standard_parameters_descriptor(self):
+ return {
+ 'alt': {
+ 'type': 'string',
+ 'description': 'Data format for the response.',
+ 'default': 'json',
+ 'enum': ['json'],
+ 'enumDescriptions': [
+ 'Responses with Content-Type of application/json'
+ ],
+ 'location': 'query',
+ },
+ 'fields': {
+ 'type': 'string',
+ 'description': 'Selector specifying which fields to include in a '
+ 'partial response.',
+ 'location': 'query',
+ },
+ 'key': {
+ 'type': 'string',
+ 'description': 'API key. Your API key identifies your project and '
+ 'provides you with API access, quota, and reports. '
+ 'Required unless you provide an OAuth 2.0 token.',
+ 'location': 'query',
+ },
+ 'oauth_token': {
+ 'type': 'string',
+ 'description': 'OAuth 2.0 token for the current user.',
+ 'location': 'query',
+ },
+ 'prettyPrint': {
+ 'type': 'boolean',
+ 'description': 'Returns response with indentations and line '
+ 'breaks.',
+ 'default': 'true',
+ 'location': 'query',
+ },
+ 'quotaUser': {
+ 'type': 'string',
+ 'description': 'Available to use for quota purposes for '
+ 'server-side applications. Can be any arbitrary '
+ 'string assigned to a user, but should not exceed '
+ '40 characters. Overrides userIp if both are '
+ 'provided.',
+ 'location': 'query',
+ },
+ 'userIp': {
+ 'type': 'string',
+ 'description': 'IP address of the site where the request '
+ 'originates. Use this if you want to enforce '
+ 'per-user limits.',
+ 'location': 'query',
+ },
+ }
+
+ def __standard_auth_descriptor(self, services):
+ scopes = {}
+ for service in services:
+ for scope in service.api_info.scope_objs:
+ scopes[scope.scope] = {'description': scope.description}
+ return {
+ 'oauth2': {
+ 'scopes': scopes
+ }
+ }
+
+ def __get_merged_api_info(self, services):
+ """Builds a description of an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+
+ Returns:
+ The _ApiInfo object to use for the API that the given services implement.
+ """
+ base_paths = sorted(set(s.api_info.base_path for s in services))
+ if len(base_paths) != 1:
+ raise api_exceptions.ApiConfigurationError(
+ 'Multiple base_paths found: {!r}'.format(base_paths))
+ names_versions = sorted(set(
+ (s.api_info.name, s.api_info.api_version) for s in services))
+ if len(names_versions) != 1:
+ raise api_exceptions.ApiConfigurationError(
+ 'Multiple apis/versions found: {!r}'.format(names_versions))
+ return services[0].api_info
+
+ def __discovery_doc_descriptor(self, services, hostname=None):
+ """Builds a discovery doc for an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary that can be deserialized into JSON in discovery doc format.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()), or a repeated method
+ signature.
+ """
+ merged_api_info = self.__get_merged_api_info(services)
+ descriptor = self.get_descriptor_defaults(merged_api_info,
+ hostname=hostname)
+
+ description = merged_api_info.description
+ if not description and len(services) == 1:
+ description = services[0].__doc__
+ if description:
+ descriptor['description'] = description
+
+ descriptor['parameters'] = self.__standard_parameters_descriptor()
+ descriptor['auth'] = self.__standard_auth_descriptor(services)
+
+ # Add namespace information, if provided
+ if merged_api_info.namespace:
+ descriptor['ownerDomain'] = merged_api_info.namespace.owner_domain
+ descriptor['ownerName'] = merged_api_info.namespace.owner_name
+ descriptor['packagePath'] = merged_api_info.namespace.package_path or ''
+ else:
+ if merged_api_info.owner_domain is not None:
+ descriptor['ownerDomain'] = merged_api_info.owner_domain
+ if merged_api_info.owner_name is not None:
+ descriptor['ownerName'] = merged_api_info.owner_name
+ if merged_api_info.package_path is not None:
+ descriptor['packagePath'] = merged_api_info.package_path
+
+ method_map = {}
+ method_collision_tracker = {}
+ rest_collision_tracker = {}
+
+ resource_index = collections.defaultdict(list)
+ resource_map = {}
+
+ # For the first pass, only process top-level methods (that is, those methods
+ # that are unattached to a resource).
+ for service in services:
+ remote_methods = service.all_remote_methods()
+
+ for protorpc_meth_name, protorpc_meth_info in remote_methods.items():
+ method_info = getattr(protorpc_meth_info, 'method_info', None)
+ # Skip methods that are not decorated with @method
+ if method_info is None:
+ continue
+ path = method_info.get_path(service.api_info)
+ method_id = method_info.method_id(service.api_info)
+ canonical_method_id = self._get_canonical_method_id(method_id)
+ resource_path = self._get_resource_path(method_id)
+
+ # Make sure the same method name isn't repeated.
+ if method_id in method_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ 'Method %s used multiple times, in classes %s and %s' %
+ (method_id, method_collision_tracker[method_id],
+ service.__name__))
+ else:
+ method_collision_tracker[method_id] = service.__name__
+
+ # Make sure the same HTTP method & path aren't repeated.
+ rest_identifier = (method_info.http_method, path)
+ if rest_identifier in rest_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ '%s path "%s" used multiple times, in classes %s and %s' %
+ (method_info.http_method, path,
+ rest_collision_tracker[rest_identifier],
+ service.__name__))
+ else:
+ rest_collision_tracker[rest_identifier] = service.__name__
+
+ # If this method is part of a resource, note it and skip it for now
+ if resource_path:
+ resource_index[resource_path[0]].append((service, protorpc_meth_info))
+ else:
+ method_map[canonical_method_id] = self.__method_descriptor(
+ service, method_info, protorpc_meth_info)
+
+ # Do another pass for methods attached to resources
+ for resource, resource_methods in resource_index.items():
+ resource_map[resource] = self.__resource_descriptor(resource,
+ resource_methods)
+
+ if method_map:
+ descriptor['methods'] = method_map
+
+ if resource_map:
+ descriptor['resources'] = resource_map
+
+ # Add schemas, if any
+ schemas = self.__schemas_descriptor()
+ if schemas:
+ descriptor['schemas'] = schemas
+
+ return descriptor
+
+ def get_descriptor_defaults(self, api_info, hostname=None):
+ """Gets a default configuration for a service.
+
+ Args:
+ api_info: _ApiInfo object for this service.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary with the default configuration.
+ """
+ if self.__request:
+ hostname = self.__request.reconstruct_hostname()
+ protocol = self.__request.url_scheme
+ else:
+ hostname = (hostname or util.get_app_hostname() or
+ api_info.hostname)
+ protocol = 'http' if ((hostname and hostname.startswith('localhost')) or
+ util.is_running_on_devserver()) else 'https'
+ full_base_path = '{0}{1}/{2}/'.format(api_info.base_path,
+ api_info.name,
+ api_info.path_version)
+ base_url = '{0}://{1}{2}'.format(protocol, hostname, full_base_path)
+ root_url = '{0}://{1}{2}'.format(protocol, hostname, api_info.base_path)
+ defaults = {
+ 'kind': 'discovery#restDescription',
+ 'discoveryVersion': 'v1',
+ 'id': '{0}:{1}'.format(api_info.name, api_info.path_version),
+ 'name': api_info.name,
+ 'version': api_info.api_version,
+ 'icons': {
+ 'x16': 'https://www.gstatic.com/images/branding/product/1x/googleg_16dp.png',
+ 'x32': 'https://www.gstatic.com/images/branding/product/1x/googleg_32dp.png'
+ },
+ 'protocol': 'rest',
+ 'servicePath': '{0}/{1}/'.format(api_info.name, api_info.path_version),
+ 'batchPath': 'batch',
+ 'basePath': full_base_path,
+ 'rootUrl': root_url,
+ 'baseUrl': base_url,
+ 'description': 'This is an API',
+ }
+ if api_info.description:
+ defaults['description'] = api_info.description
+ if api_info.title:
+ defaults['title'] = api_info.title
+ if api_info.documentation:
+ defaults['documentationLink'] = api_info.documentation
+ if api_info.canonical_name:
+ defaults['canonicalName'] = api_info.canonical_name
+
+ return defaults
+
+ def get_discovery_doc(self, services, hostname=None):
+ """JSON dict description of a protorpc.remote.Service in discovery format.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ dict, The discovery document as a JSON dict.
+ """
+
+ if not isinstance(services, (tuple, list)):
+ services = [services]
+
+ # The type of a class that inherits from remote.Service is actually
+ # remote._ServiceClass, thanks to metaclass strangeness.
+ # pylint: disable=protected-access
+ util.check_list_type(services, remote._ServiceClass, 'services',
+ allow_none=False)
+
+ return self.__discovery_doc_descriptor(services, hostname=hostname)
+
+ def pretty_print_config_to_json(self, services, hostname=None):
+ """JSON string description of a protorpc.remote.Service in a discovery doc.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ string, The discovery doc descriptor document as a JSON string.
+ """
+ descriptor = self.get_discovery_doc(services, hostname)
+ return json.dumps(descriptor, sort_keys=True, indent=2,
+ separators=(',', ': '))
diff --git a/third_party/endpoints/discovery_service.py b/third_party/endpoints/discovery_service.py
new file mode 100644
index 0000000..51409a5
--- /dev/null
+++ b/third_party/endpoints/discovery_service.py
@@ -0,0 +1,220 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Hook into the live Discovery service and get API configuration info."""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+import json
+import logging
+
+from . import api_config
+from . import directory_list_generator
+from . import discovery_generator
+from . import util
+
+_logger = logging.getLogger(__name__)
+
+
+class DiscoveryService(object):
+ """Implements the local discovery service.
+
+ This has a static minimal version of the discoverable part of the
+ discovery .api file.
+
+ It only handles returning the discovery doc and directory, and ignores
+ directory parameters to filter the results.
+
+ The discovery docs/directory are created by calling a Cloud Endpoints
+ discovery service to generate the discovery docs/directory from an .api
+ file/set of .api files.
+ """
+
+ _GET_REST_API = 'apisdev.getRest'
+ _GET_RPC_API = 'apisdev.getRpc'
+ _LIST_API = 'apisdev.list'
+ API_CONFIG = {
+ 'name': 'discovery',
+ 'version': 'v1',
+ 'api_version': 'v1',
+ 'path_version': 'v1',
+ 'methods': {
+ 'discovery.apis.getRest': {
+ 'path': 'apis/{api}/{version}/rest',
+ 'httpMethod': 'GET',
+ 'rosyMethod': _GET_REST_API,
+ },
+ 'discovery.apis.getRpc': {
+ 'path': 'apis/{api}/{version}/rpc',
+ 'httpMethod': 'GET',
+ 'rosyMethod': _GET_RPC_API,
+ },
+ 'discovery.apis.list': {
+ 'path': 'apis',
+ 'httpMethod': 'GET',
+ 'rosyMethod': _LIST_API,
+ },
+ }
+ }
+
+ def __init__(self, config_manager, backend):
+ """Initializes an instance of the DiscoveryService.
+
+ Args:
+ config_manager: An instance of ApiConfigManager.
+ backend: An _ApiServer instance for API config generation.
+ """
+ self._config_manager = config_manager
+ self._backend = backend
+
+ def _send_success_response(self, response, start_response):
+ """Sends an HTTP 200 json success response.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ response: A string containing the response body to return.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string, the response body.
+ """
+ headers = [('Content-Type', 'application/json; charset=UTF-8')]
+ return util.send_wsgi_response('200 OK', headers, response, start_response)
+
+ def _get_rest_doc(self, request, start_response):
+ """Sends back HTTP response with API directory.
+
+ This calls start_response and returns the response body. It will return
+ the discovery doc for the requested api/version.
+
+ Args:
+ request: An ApiRequest, the transformed request sent to the Discovery API.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string, the response body.
+ """
+ api = request.body_json['api']
+ version = request.body_json['version']
+
+ generator = discovery_generator.DiscoveryGenerator(request=request)
+ services = [s for s in self._backend.api_services if
+ s.api_info.name == api and s.api_info.api_version == version]
+ doc = generator.pretty_print_config_to_json(services)
+ if not doc:
+ error_msg = ('Failed to convert .api to discovery doc for '
+ 'version %s of api %s') % (version, api)
+ _logger.error('%s', error_msg)
+ return util.send_wsgi_error_response(error_msg, start_response)
+ return self._send_success_response(doc, start_response)
+
+ def _generate_api_config_with_root(self, request):
+ """Generate an API config with a specific root hostname.
+
+ This uses the backend object and the ApiConfigGenerator to create an API
+ config specific to the hostname of the incoming request. This allows for
+ flexible API configs for non-standard environments, such as localhost.
+
+ Args:
+ request: An ApiRequest, the transformed request sent to the Discovery API.
+
+ Returns:
+ A string representation of the generated API config.
+ """
+ actual_root = self._get_actual_root(request)
+ generator = api_config.ApiConfigGenerator()
+ api = request.body_json['api']
+ version = request.body_json['version']
+ lookup_key = (api, version)
+
+ service_factories = self._backend.api_name_version_map.get(lookup_key)
+ if not service_factories:
+ return None
+
+ service_classes = [service_factory.service_class
+ for service_factory in service_factories]
+ config_dict = generator.get_config_dict(
+ service_classes, hostname=actual_root)
+
+ # Save to cache
+ for config in config_dict.get('items', []):
+ lookup_key_with_root = (
+ config.get('name', ''), config.get('version', ''), actual_root)
+ self._config_manager.save_config(lookup_key_with_root, config)
+
+ return config_dict
+
+ def _get_actual_root(self, request):
+ url = request.server
+
+ # Append the port if not the default
+ if ((request.url_scheme == 'https' and request.port != '443') or
+ (request.url_scheme != 'https' and request.port != '80')):
+ url += ':%s' % request.port
+
+ return url
+
+ def _list(self, request, start_response):
+ """Sends HTTP response containing the API directory.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ request: An ApiRequest, the transformed request sent to the Discovery API.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body.
+ """
+ configs = []
+ generator = directory_list_generator.DirectoryListGenerator(request)
+ for config in self._config_manager.configs.values():
+ if config != self.API_CONFIG:
+ configs.append(config)
+ directory = generator.pretty_print_config_to_json(configs)
+ if not directory:
+ _logger.error('Failed to get API directory')
+ # By returning a 404, code explorer still works if you select the
+ # API in the URL
+ return util.send_wsgi_not_found_response(start_response)
+ return self._send_success_response(directory, start_response)
+
+ def handle_discovery_request(self, path, request, start_response):
+ """Returns the result of a discovery service request.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ path: A string containing the API path (the portion of the path
+ after /_ah/api/).
+ request: An ApiRequest, the transformed request sent to the Discovery API.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ The response body. Or returns False if the request wasn't handled by
+ DiscoveryService.
+ """
+ if path == self._GET_REST_API:
+ return self._get_rest_doc(request, start_response)
+ elif path == self._GET_RPC_API:
+ error_msg = ('RPC format documents are no longer supported with the '
+ 'Endpoints Framework for Python. Please use the REST '
+ 'format.')
+ _logger.error('%s', error_msg)
+ return util.send_wsgi_error_response(error_msg, start_response)
+ elif path == self._LIST_API:
+ return self._list(request, start_response)
+ return False
diff --git a/third_party/endpoints/endpoints_dispatcher.py b/third_party/endpoints/endpoints_dispatcher.py
new file mode 100644
index 0000000..83e7acb
--- /dev/null
+++ b/third_party/endpoints/endpoints_dispatcher.py
@@ -0,0 +1,718 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Dispatcher middleware for Cloud Endpoints API server.
+
+This middleware does simple transforms on requests that come into the base path
+and then re-dispatches them to the main backend. It does not do any
+authentication, quota checking, DoS checking, etc.
+
+In addition, the middleware loads API configs prior to each call, in case the
+configuration has changed.
+"""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+from six.moves import cStringIO
+from six.moves import http_client
+import json
+import logging
+import re
+import six
+from six.moves import urllib
+import wsgiref
+
+import pkg_resources
+
+from . import api_config_manager
+from . import api_exceptions
+from . import api_request
+from . import discovery_service
+from . import errors
+from . import parameter_converter
+from . import util
+
+_logger = logging.getLogger(__name__)
+
+
+__all__ = ['EndpointsDispatcherMiddleware']
+
+_SERVER_SOURCE_IP = '0.2.0.3'
+
+# Internal constants
+_CORS_HEADER_ORIGIN = 'Origin'
+_CORS_HEADER_REQUEST_METHOD = 'Access-Control-Request-Method'
+_CORS_HEADER_REQUEST_HEADERS = 'Access-Control-Request-Headers'
+_CORS_HEADER_ALLOW_ORIGIN = 'Access-Control-Allow-Origin'
+_CORS_HEADER_ALLOW_METHODS = 'Access-Control-Allow-Methods'
+_CORS_HEADER_ALLOW_HEADERS = 'Access-Control-Allow-Headers'
+_CORS_HEADER_ALLOW_CREDS = 'Access-Control-Allow-Credentials'
+_CORS_HEADER_EXPOSE_HEADERS = 'Access-Control-Expose-Headers'
+_CORS_ALLOWED_METHODS = frozenset(('DELETE', 'GET', 'PATCH', 'POST', 'PUT'))
+_CORS_EXPOSED_HEADERS = frozenset(
+ ('Content-Encoding', 'Content-Length', 'Date', 'ETag', 'Server')
+)
+
+PROXY_HTML = pkg_resources.resource_string('endpoints', 'proxy.html')
+PROXY_PATH = 'static/proxy.html'
+
+
+class EndpointsDispatcherMiddleware(object):
+ """Dispatcher that handles requests to the built-in apiserver handlers."""
+
+ _API_EXPLORER_URL = 'https://apis-explorer.appspot.com/apis-explorer/?base='
+
+ def __init__(self, backend_wsgi_app, config_manager=None):
+ """Constructor for EndpointsDispatcherMiddleware.
+
+ Args:
+ backend_wsgi_app: A WSGI server that serves the app's endpoints.
+ config_manager: An ApiConfigManager instance that allows a caller to
+ set up an existing configuration for testing.
+ """
+ if config_manager is None:
+ config_manager = api_config_manager.ApiConfigManager()
+ self.config_manager = config_manager
+
+ self._backend = backend_wsgi_app
+ self._dispatchers = []
+ for base_path in self._backend.base_paths:
+ self._add_dispatcher('%sexplorer/?$' % base_path,
+ self.handle_api_explorer_request)
+ self._add_dispatcher('%sstatic/.*$' % base_path,
+ self.handle_api_static_request)
+
+ # Get API configuration so we know how to call the backend.
+ api_config_response = self.get_api_configs()
+ if api_config_response:
+ self.config_manager.process_api_config_response(api_config_response)
+ else:
+ raise api_exceptions.ApiConfigurationError('get_api_configs() returned no configs')
+
+ def _add_dispatcher(self, path_regex, dispatch_function):
+ """Add a request path and dispatch handler.
+
+ Args:
+ path_regex: A string regex, the path to match against incoming requests.
+ dispatch_function: The function to call for these requests. The function
+ should take (request, start_response) as arguments and
+ return the contents of the response body.
+ """
+ self._dispatchers.append((re.compile(path_regex), dispatch_function))
+
+ def _get_explorer_base_url(self, protocol, server, port, base_path):
+ show_port = ((protocol == 'http' and port != 80) or
+ (protocol != 'http' and port != 443))
+ url = ('{0}://{1}:{2}/{3}'.format(
+ protocol, server, port, base_path.lstrip('/\\')) if show_port else
+ '{0}://{1}/{2}'.format(protocol, server, base_path.lstrip('/\\')))
+
+ return url.rstrip('/\\')
+
+ def _get_explorer_redirect_url(self, server, port, base_path):
+ protocol = 'http' if 'localhost' in server else 'https'
+ base_url = self._get_explorer_base_url(protocol, server, port, base_path)
+ return self._API_EXPLORER_URL + base_url
+
+ def __call__(self, environ, start_response):
+ """Handle an incoming request.
+
+ Args:
+ environ: An environ dict for the request as defined in PEP-333.
+ start_response: A function used to begin the response to the caller.
+ This follows the semantics defined in PEP-333. In particular, it's
+ called with (status, response_headers, exc_info=None), and it returns
+ an object with a write(body_data) function that can be used to write
+ the body of the response.
+
+ Yields:
+ An iterable over strings containing the body of the HTTP response.
+ """
+ request = api_request.ApiRequest(environ,
+ base_paths=self._backend.base_paths)
+
+ # PEP-333 requires that we return an iterator that iterates over the
+ # response body. Yielding the returned body accomplishes this.
+ yield self.dispatch(request, start_response)
+
+ def dispatch(self, request, start_response):
+ """Handles dispatch to apiserver handlers.
+
+ This typically ends up calling start_response and returning the entire
+ body of the response.
+
+ Args:
+ request: An ApiRequest, the request from the user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string, the body of the response.
+ """
+ # Check if this matches any of our special handlers.
+ dispatched_response = self.dispatch_non_api_requests(request,
+ start_response)
+ if dispatched_response is not None:
+ return dispatched_response
+
+ # Call the service.
+ try:
+ return self.call_backend(request, start_response)
+ except errors.RequestError as error:
+ return self._handle_request_error(request, error, start_response)
+
+ def dispatch_non_api_requests(self, request, start_response):
+ """Dispatch this request if this is a request to a reserved URL.
+
+ If the request matches one of our reserved URLs, this calls
+ start_response and returns the response body. This also handles OPTIONS
+ CORS requests.
+
+ Args:
+ request: An ApiRequest, the request from the user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ None if the request doesn't match one of the reserved URLs this
+ handles. Otherwise, returns the response body.
+ """
+ for path_regex, dispatch_function in self._dispatchers:
+ if path_regex.match(request.relative_url):
+ return dispatch_function(request, start_response)
+
+ if request.http_method == 'OPTIONS':
+ cors_handler = self._create_cors_handler(request)
+ if cors_handler.allow_cors_request:
+ # The server returns 200 rather than 204, for some reason.
+ return util.send_wsgi_response('200', [], '', start_response,
+ cors_handler)
+
+ return None
+
+ def handle_api_explorer_request(self, request, start_response):
+ """Handler for requests to {base_path}/explorer.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ request: An ApiRequest, the request from the user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body (which is empty, in this case).
+ """
+ redirect_url = self._get_explorer_redirect_url(
+ request.server, request.port, request.base_path)
+ return util.send_wsgi_redirect_response(redirect_url, start_response)
+
+ def handle_api_static_request(self, request, start_response):
+ """Handler for requests to {base_path}/static/.*.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ request: An ApiRequest, the request from the user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body.
+ """
+ if request.path == PROXY_PATH:
+ return util.send_wsgi_response('200 OK',
+ [('Content-Type',
+ 'text/html')],
+ PROXY_HTML, start_response)
+ else:
+ _logger.debug('Unknown static url requested: %s',
+ request.relative_url)
+ return util.send_wsgi_response('404 Not Found', [('Content-Type',
+ 'text/plain')], 'Not Found',
+ start_response)
+
+ def get_api_configs(self):
+ return self._backend.get_api_configs()
+
+ @staticmethod
+ def verify_response(response, status_code, content_type=None):
+ """Verifies that a response has the expected status and content type.
+
+ Args:
+ response: The ResponseTuple to be checked.
+ status_code: An int, the HTTP status code to be compared with response
+ status.
+ content_type: A string with the acceptable Content-Type header value.
+ None allows any content type.
+
+ Returns:
+ True if both status_code and content_type match, else False.
+ """
+ status = int(response.status.split(' ', 1)[0])
+ if status != status_code:
+ return False
+
+ if content_type is None:
+ return True
+
+ for header, value in response.headers:
+ if header.lower() == 'content-type':
+ return value == content_type
+
+ # If we fall through to here, the verification has failed, so return False.
+ return False
+
+ def prepare_backend_environ(self, host, method, relative_url, headers, body,
+ source_ip, port):
+ """Build an environ object for the backend to consume.
+
+ Args:
+ host: A string containing the host serving the request.
+ method: A string containing the HTTP method of the request.
+ relative_url: A string containing path and query string of the request.
+ headers: A list of (key, value) tuples where key and value are both
+ strings.
+ body: A string containing the request body.
+ source_ip: The source IP address for the request.
+ port: The port to which to direct the request.
+
+ Returns:
+ An environ object with all the information necessary for the backend to
+ process the request.
+ """
+ body = six.ensure_str(body, 'ascii')
+
+ url = urllib.parse.urlsplit(relative_url)
+ if port != 80:
+ host = '%s:%s' % (host, port)
+ else:
+ host = host
+ environ = {'CONTENT_LENGTH': str(len(body)),
+ 'PATH_INFO': url.path,
+ 'QUERY_STRING': url.query,
+ 'REQUEST_METHOD': method,
+ 'REMOTE_ADDR': source_ip,
+ 'SERVER_NAME': host,
+ 'SERVER_PORT': str(port),
+ 'SERVER_PROTOCOL': 'HTTP/1.1',
+ 'wsgi.version': (1, 0),
+ 'wsgi.url_scheme': 'http',
+ 'wsgi.errors': cStringIO.StringIO(),
+ 'wsgi.multithread': True,
+ 'wsgi.multiprocess': True,
+ 'wsgi.input': cStringIO.StringIO(body)}
+ util.put_headers_in_environ(headers, environ)
+ environ['HTTP_HOST'] = host
+ return environ
+
+ def call_backend(self, orig_request, start_response):
+ """Generate API call (from earlier-saved request).
+
+ This calls start_response and returns the response body.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body.
+ """
+ method_config, params = self.lookup_rest_method(orig_request)
+ if not method_config:
+ cors_handler = self._create_cors_handler(orig_request)
+ return util.send_wsgi_not_found_response(start_response,
+ cors_handler=cors_handler)
+
+ # Prepare the request for the back end.
+ transformed_request = self.transform_request(
+ orig_request, params, method_config)
+
+ # Check if this call is for the Discovery service. If so, route
+ # it to our Discovery handler.
+ discovery = discovery_service.DiscoveryService(
+ self.config_manager, self._backend)
+ discovery_response = discovery.handle_discovery_request(
+ transformed_request.path, transformed_request, start_response)
+ if discovery_response:
+ return discovery_response
+
+ url = transformed_request.base_path + transformed_request.path
+ transformed_request.headers['Content-Type'] = 'application/json'
+ transformed_environ = self.prepare_backend_environ(
+ orig_request.server, 'POST', url, transformed_request.headers.items(),
+ transformed_request.body, transformed_request.source_ip,
+ orig_request.port)
+
+ # Send the transformed request to the backend app and capture the response.
+ with util.StartResponseProxy() as start_response_proxy:
+ body_iter = self._backend(transformed_environ, start_response_proxy.Proxy)
+ status = start_response_proxy.response_status
+ headers = start_response_proxy.response_headers
+
+ # Get response body
+ body = start_response_proxy.response_body
+ # In case standard WSGI behavior is implemented later...
+ if not body:
+ body = ''.join(body_iter)
+
+ return self.handle_backend_response(orig_request, transformed_request,
+ status, headers, body, method_config,
+ start_response)
+
+ class __CheckCorsHeaders(object):
+ """Track information about CORS headers and our response to them."""
+
+ def __init__(self, request):
+ self.allow_cors_request = False
+ self.origin = None
+ self.cors_request_method = None
+ self.cors_request_headers = None
+
+ self.__check_cors_request(request)
+
+ def __check_cors_request(self, request):
+ """Check for a CORS request, and see if it gets a CORS response."""
+ # Check for incoming CORS headers.
+ self.origin = request.headers[_CORS_HEADER_ORIGIN]
+ self.cors_request_method = request.headers[_CORS_HEADER_REQUEST_METHOD]
+ self.cors_request_headers = request.headers[
+ _CORS_HEADER_REQUEST_HEADERS]
+
+ # Check if the request should get a CORS response.
+ if (self.origin and
+ ((self.cors_request_method is None) or
+ (self.cors_request_method.upper() in _CORS_ALLOWED_METHODS))):
+ self.allow_cors_request = True
+
+ def update_headers(self, headers_in):
+ """Add CORS headers to the response, if needed."""
+ if not self.allow_cors_request:
+ return
+
+ # Add CORS headers.
+ headers = wsgiref.headers.Headers(headers_in)
+ headers[_CORS_HEADER_ALLOW_CREDS] = 'true'
+ headers[_CORS_HEADER_ALLOW_ORIGIN] = self.origin
+ headers[_CORS_HEADER_ALLOW_METHODS] = ','.join(tuple(
+ _CORS_ALLOWED_METHODS))
+ headers[_CORS_HEADER_EXPOSE_HEADERS] = ','.join(tuple(
+ _CORS_EXPOSED_HEADERS))
+ if self.cors_request_headers is not None:
+ headers[_CORS_HEADER_ALLOW_HEADERS] = self.cors_request_headers
+
+ def _create_cors_handler(self, request):
+ return EndpointsDispatcherMiddleware.__CheckCorsHeaders(request)
+
+ def handle_backend_response(self, orig_request, backend_request,
+ response_status, response_headers,
+ response_body, method_config, start_response):
+ """Handle backend response, transforming output as needed.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ backend_request: An ApiRequest, the transformed request that was
+ sent to the backend handler.
+ response_status: A string, the status from the response.
+ response_headers: A dict, the headers from the response.
+ response_body: A string, the body of the response.
+ method_config: A dict, the API config of the method to be called.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body.
+ """
+ # Verify that the response is json. If it isn't treat, the body as an
+ # error message and wrap it in a json error response.
+ for header, value in response_headers:
+ if (header.lower() == 'content-type' and
+ not value.lower().startswith('application/json')):
+ return self.fail_request(orig_request,
+ 'Non-JSON reply: %s' % response_body,
+ start_response)
+
+ self.check_error_response(response_body, response_status)
+
+ # Check if the response from the API was empty. Empty REST responses
+ # generate a HTTP 204.
+ empty_response = self.check_empty_response(orig_request, method_config,
+ start_response)
+ if empty_response is not None:
+ return empty_response
+
+ body = self.transform_rest_response(response_body)
+
+ cors_handler = self._create_cors_handler(orig_request)
+ return util.send_wsgi_response(response_status, response_headers, body,
+ start_response, cors_handler=cors_handler)
+
+ def fail_request(self, orig_request, message, start_response):
+ """Write an immediate failure response to outfile, no redirect.
+
+ This calls start_response and returns the error body.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ message: A string containing the error message to be displayed to user.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the body of the error response.
+ """
+ cors_handler = self._create_cors_handler(orig_request)
+ return util.send_wsgi_error_response(
+ message, start_response, cors_handler=cors_handler)
+
+ def lookup_rest_method(self, orig_request):
+ """Looks up and returns rest method for the currently-pending request.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+
+ Returns:
+ A tuple of (method descriptor, parameters), or (None, None) if no method
+ was found for the current request.
+ """
+ method_name, method, params = self.config_manager.lookup_rest_method(
+ orig_request.path, orig_request.request_uri, orig_request.http_method)
+ orig_request.method_name = method_name
+ return method, params
+
+ def transform_request(self, orig_request, params, method_config):
+ """Transforms orig_request to apiserving request.
+
+ This method uses orig_request to determine the currently-pending request
+ and returns a new transformed request ready to send to the backend. This
+ method accepts a rest-style or RPC-style request.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ params: A dictionary containing path parameters for rest requests, or
+ None for an RPC request.
+ method_config: A dict, the API config of the method to be called.
+
+ Returns:
+ An ApiRequest that's a copy of the current request, modified so it can
+ be sent to the backend. The path is updated and parts of the body or
+ other properties may also be changed.
+ """
+ method_params = method_config.get('request', {}).get('parameters', {})
+ request = self.transform_rest_request(orig_request, params, method_params)
+ request.path = method_config.get('rosyMethod', '')
+ return request
+
+ def _add_message_field(self, field_name, value, params):
+ """Converts a . delimitied field name to a message field in parameters.
+
+ This adds the field to the params dict, broken out so that message
+ parameters appear as sub-dicts within the outer param.
+
+ For example:
+ {'a.b.c': ['foo']}
+ becomes:
+ {'a': {'b': {'c': ['foo']}}}
+
+ Args:
+ field_name: A string containing the '.' delimitied name to be converted
+ into a dictionary.
+ value: The value to be set.
+ params: The dictionary holding all the parameters, where the value is
+ eventually set.
+ """
+ if '.' not in field_name:
+ params[field_name] = value
+ return
+
+ root, remaining = field_name.split('.', 1)
+ sub_params = params.setdefault(root, {})
+ self._add_message_field(remaining, value, sub_params)
+
+ def _update_from_body(self, destination, source):
+ """Updates the dictionary for an API payload with the request body.
+
+ The values from the body should override those already in the payload, but
+ for nested fields (message objects) the values can be combined
+ recursively.
+
+ Args:
+ destination: A dictionary containing an API payload parsed from the
+ path and query parameters in a request.
+ source: A dictionary parsed from the body of the request.
+ """
+ for key, value in source.items():
+ destination_value = destination.get(key)
+ if isinstance(value, dict) and isinstance(destination_value, dict):
+ self._update_from_body(destination_value, value)
+ else:
+ destination[key] = value
+
+ def transform_rest_request(self, orig_request, params, method_parameters):
+ """Translates a Rest request into an apiserving request.
+
+ This makes a copy of orig_request and transforms it to apiserving
+ format (moving request parameters to the body).
+
+ The request can receive values from the path, query and body and combine
+ them before sending them along to the backend. In cases of collision,
+ objects from the body take precedence over those from the query, which in
+ turn take precedence over those from the path.
+
+ In the case that a repeated value occurs in both the query and the path,
+ those values can be combined, but if that value also occurred in the body,
+ it would override any other values.
+
+ In the case of nested values from message fields, non-colliding values
+ from subfields can be combined. For example, if '?a.c=10' occurs in the
+ query string and "{'a': {'b': 11}}" occurs in the body, then they will be
+ combined as
+
+ {
+ 'a': {
+ 'b': 11,
+ 'c': 10,
+ }
+ }
+
+ before being sent to the backend.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ params: A dict with URL path parameters extracted by the config_manager
+ lookup.
+ method_parameters: A dictionary containing the API configuration for the
+ parameters for the request.
+
+ Returns:
+ A copy of the current request that's been modified so it can be sent
+ to the backend. The body is updated to include parameters from the
+ URL.
+ """
+ request = orig_request.copy()
+ body_json = {}
+
+ # Handle parameters from the URL path.
+ for key, value in params.items():
+ # Values need to be in a list to interact with query parameter values
+ # and to account for case of repeated parameters
+ body_json[key] = [value]
+
+ # Add in parameters from the query string.
+ if request.parameters:
+ # For repeated elements, query and path work together
+ for key, value in request.parameters.items():
+ if key in body_json:
+ body_json[key] = value + body_json[key]
+ else:
+ body_json[key] = value
+
+ # Validate all parameters we've merged so far and convert any '.' delimited
+ # parameters to nested parameters. We don't use items since we may
+ # modify body_json within the loop. For instance, 'a.b' is not a valid key
+ # and would be replaced with 'a'.
+ for key, value in body_json.items():
+ current_parameter = method_parameters.get(key, {})
+ repeated = current_parameter.get('repeated', False)
+
+ if not repeated:
+ body_json[key] = body_json[key][0]
+
+ # Order is important here. Parameter names are dot-delimited in
+ # parameters instead of nested in dictionaries as a message field is, so
+ # we need to call transform_parameter_value on them before calling
+ # _add_message_field.
+ body_json[key] = parameter_converter.transform_parameter_value(
+ key, body_json[key], current_parameter)
+ # Remove the old key and try to convert to nested message value
+ message_value = body_json.pop(key)
+ self._add_message_field(key, message_value, body_json)
+
+ # Add in values from the body of the request.
+ if request.body_json:
+ self._update_from_body(body_json, request.body_json)
+
+ request.body_json = body_json
+ request.body = json.dumps(request.body_json)
+ return request
+
+ def check_error_response(self, body, status):
+ """Raise an exception if the response from the backend was an error.
+
+ Args:
+ body: A string containing the backend response body.
+ status: A string containing the backend response status.
+
+ Raises:
+ BackendError if the response is an error.
+ """
+ status_code = int(status.split(' ', 1)[0])
+ if status_code >= 300:
+ raise errors.BackendError(body, status)
+
+ def check_empty_response(self, orig_request, method_config, start_response):
+ """If the response from the backend is empty, return a HTTP 204 No Content.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ method_config: A dict, the API config of the method to be called.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ If the backend response was empty, this returns a string containing the
+ response body that should be returned to the user. If the backend
+ response wasn't empty, this returns None, indicating that we should not
+ exit early with a 204.
+ """
+ response_config = method_config.get('response', {}).get('body')
+ if response_config == 'empty':
+ # The response to this function should be empty. We should return a 204.
+ # Note that it's possible that the backend returned something, but we'll
+ # ignore it. This matches the behavior in the Endpoints server.
+ cors_handler = self._create_cors_handler(orig_request)
+ return util.send_wsgi_no_content_response(start_response, cors_handler)
+
+ def transform_rest_response(self, response_body):
+ """Translates an apiserving REST response so it's ready to return.
+
+ Currently, the only thing that needs to be fixed here is indentation,
+ so it's consistent with what the live app will return.
+
+ Args:
+ response_body: A string containing the backend response.
+
+ Returns:
+ A reformatted version of the response JSON.
+ """
+ body_json = json.loads(response_body)
+ return json.dumps(body_json, indent=1, sort_keys=True)
+
+ def _handle_request_error(self, orig_request, error, start_response):
+ """Handle a request error, converting it to a WSGI response.
+
+ Args:
+ orig_request: An ApiRequest, the original request from the user.
+ error: A RequestError containing information about the error.
+ start_response: A function with semantics defined in PEP-333.
+
+ Returns:
+ A string containing the response body.
+ """
+ headers = [('Content-Type', 'application/json')]
+ status_code = error.status_code()
+ body = error.rest_error()
+
+ response_status = '%d %s' % (status_code,
+ http_client.responses.get(status_code,
+ 'Unknown Error'))
+ cors_handler = self._create_cors_handler(orig_request)
+ return util.send_wsgi_response(response_status, headers, body,
+ start_response, cors_handler=cors_handler)
diff --git a/third_party/endpoints/endpointscfg.py b/third_party/endpoints/endpointscfg.py
new file mode 100755
index 0000000..1557cb7
--- /dev/null
+++ b/third_party/endpoints/endpointscfg.py
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+# Copyright 2017 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+r"""Wrapper script to set up import paths for endpointscfg.
+
+The actual implementation is in _endpointscfg_impl, but we have to set
+up import paths properly before we can import that module.
+
+See the docstring for endpoints._endpointscfg_impl for more
+information about this script's capabilities.
+"""
+
+import sys
+
+import _endpointscfg_setup # pylint: disable=unused-import
+from endpoints._endpointscfg_impl import main
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/third_party/endpoints/errors.py b/third_party/endpoints/errors.py
new file mode 100644
index 0000000..e98c76d
--- /dev/null
+++ b/third_party/endpoints/errors.py
@@ -0,0 +1,285 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Error handling and exceptions used in the local Cloud Endpoints server."""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+import json
+import logging
+
+from . import generated_error_info
+
+__all__ = ['BackendError',
+ 'BasicTypeParameterError',
+ 'EnumRejectionError',
+ 'InvalidParameterError',
+ 'RequestError',
+ 'RequestRejectionError']
+
+_logger = logging.getLogger(__name__)
+
+_INVALID_ENUM_TEMPLATE = 'Invalid string value: %r. Allowed values: %r'
+_INVALID_BASIC_PARAM_TEMPLATE = 'Invalid %s value: %r.'
+
+
+class RequestError(Exception):
+ """Base class for errors that happen while processing a request."""
+
+ def status_code(self):
+ """HTTP status code number associated with this error.
+
+ Subclasses must implement this, returning an integer with the status
+ code number for the error.
+
+ Example: 400
+
+ Raises:
+ NotImplementedError: Subclasses must override this function.
+ """
+ raise NotImplementedError
+
+ def message(self):
+ """Text message explaining the error.
+
+ Subclasses must implement this, returning a string that explains the
+ error.
+
+ Raises:
+ NotImplementedError: Subclasses must override this function.
+ """
+ raise NotImplementedError
+
+ def reason(self):
+ """Get the reason for the error.
+
+ Error reason is a custom string in the Cloud Endpoints server. When
+ possible, this should match the reason that the live server will generate,
+ based on the error's status code. If this returns None, the error formatter
+ will attempt to generate a reason from the status code.
+
+ Returns:
+ None, by default. Subclasses can override this if they have a specific
+ error reason.
+ """
+ raise NotImplementedError
+
+ def domain(self):
+ """Get the domain for this error.
+
+ Returns:
+ The string 'global' by default. Subclasses can override this if they have
+ a different domain.
+ """
+ return 'global'
+
+ def extra_fields(self):
+ """Return a dict of extra fields to add to the error response.
+
+ Some errors have additional information. This provides a way for subclasses
+ to provide that information.
+
+ Returns:
+ None, by default. Subclasses can return a dict with values to add
+ to the error response.
+ """
+ return None
+
+ def __format_error(self, error_list_tag):
+ """Format this error into a JSON response.
+
+ Args:
+ error_list_tag: A string specifying the name of the tag to use for the
+ error list.
+
+ Returns:
+ A dict containing the reformatted JSON error response.
+ """
+ error = {'domain': self.domain(),
+ 'reason': self.reason(),
+ 'message': self.message()}
+ error.update(self.extra_fields() or {})
+ return {'error': {error_list_tag: [error],
+ 'code': self.status_code(),
+ 'message': self.message()}}
+
+ def rest_error(self):
+ """Format this error into a response to a REST request.
+
+ Returns:
+ A string containing the reformatted error response.
+ """
+ error_json = self.__format_error('errors')
+ return json.dumps(error_json, indent=1, sort_keys=True)
+
+ def rpc_error(self):
+ """Format this error into a response to a JSON RPC request.
+
+
+ Returns:
+ A dict containing the reformatted JSON error response.
+ """
+ return self.__format_error('data')
+
+
+class RequestRejectionError(RequestError):
+ """Base class for invalid/rejected requests.
+
+ To be raised when parsing the request values and comparing them against the
+ generated discovery document.
+ """
+
+ def status_code(self):
+ return 400
+
+
+class InvalidParameterError(RequestRejectionError):
+ """Base class for invalid parameter errors.
+
+ Child classes only need to implement the message() function.
+ """
+
+ def __init__(self, parameter_name, value):
+ """Constructor for InvalidParameterError.
+
+ Args:
+ parameter_name: String; the name of the parameter which had a value
+ rejected.
+ value: The actual value passed in for the parameter. Usually string.
+ """
+ super(InvalidParameterError, self).__init__()
+ self.parameter_name = parameter_name
+ self.value = value
+
+ def reason(self):
+ """Returns the server's reason for this error.
+
+ Returns:
+ A string containing a short error reason.
+ """
+ return 'invalidParameter'
+
+ def extra_fields(self):
+ """Returns extra fields to add to the error response.
+
+ Returns:
+ A dict containing extra fields to add to the error response.
+ """
+ return {'locationType': 'parameter',
+ 'location': self.parameter_name}
+
+
+class BasicTypeParameterError(InvalidParameterError):
+ """Request rejection exception for basic types (int, float)."""
+
+ def __init__(self, parameter_name, value, type_name):
+ """Constructor for BasicTypeParameterError.
+
+ Args:
+ parameter_name: String; the name of the parameter which had a value
+ rejected.
+ value: The actual value passed in for the enum. Usually string.
+ type_name: Descriptive name of the data type expected.
+ """
+ super(BasicTypeParameterError, self).__init__(parameter_name, value)
+ self.type_name = type_name
+
+ def message(self):
+ """A descriptive message describing the error."""
+ return _INVALID_BASIC_PARAM_TEMPLATE % (self.type_name, self.value)
+
+
+class EnumRejectionError(InvalidParameterError):
+ """Custom request rejection exception for enum values."""
+
+ def __init__(self, parameter_name, value, allowed_values):
+ """Constructor for EnumRejectionError.
+
+ Args:
+ parameter_name: String; the name of the enum parameter which had a value
+ rejected.
+ value: The actual value passed in for the enum. Usually string.
+ allowed_values: List of strings allowed for the enum.
+ """
+ super(EnumRejectionError, self).__init__(parameter_name, value)
+ self.allowed_values = allowed_values
+
+ def message(self):
+ """A descriptive message describing the error."""
+ return _INVALID_ENUM_TEMPLATE % (self.value, self.allowed_values)
+
+
+class BackendError(RequestError):
+ """Exception raised when the backend returns an error code."""
+
+ def __init__(self, body, status):
+ super(BackendError, self).__init__()
+ # Convert backend error status to whatever the live server would return.
+ status_code = self._get_status_code(status)
+ self._error_info = generated_error_info.get_error_info(status_code)
+
+ try:
+ error_json = json.loads(body)
+ self._message = error_json.get('error_message')
+ except TypeError:
+ self._message = body
+
+ def _get_status_code(self, http_status):
+ """Get the HTTP status code from an HTTP status string.
+
+ Args:
+ http_status: A string containing a HTTP status code and reason.
+
+ Returns:
+ An integer with the status code number from http_status.
+ """
+ try:
+ return int(http_status.split(' ', 1)[0])
+ except TypeError:
+ _logger.warning('Unable to find status code in HTTP status %r.',
+ http_status)
+ return 500
+
+ def status_code(self):
+ """Return the HTTP status code number for this error.
+
+ Returns:
+ An integer containing the status code for this error.
+ """
+ return self._error_info.http_status
+
+ def message(self):
+ """Return a descriptive message for this error.
+
+ Returns:
+ A string containing a descriptive message for this error.
+ """
+ return self._message
+
+ def reason(self):
+ """Return the short reason for this error.
+
+ Returns:
+ A string with the reason for this error.
+ """
+ return self._error_info.reason
+
+ def domain(self):
+ """Return the remapped domain for this error.
+
+ Returns:
+ A string containing the remapped domain for this error.
+ """
+ return self._error_info.domain
diff --git a/third_party/endpoints/generated_error_info.py b/third_party/endpoints/generated_error_info.py
new file mode 100644
index 0000000..d0c31c3
--- /dev/null
+++ b/third_party/endpoints/generated_error_info.py
@@ -0,0 +1,69 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Automatically generated mapping of error codes."""
+
+# pylint: disable=g-bad-name
+
+from __future__ import absolute_import
+
+import collections
+
+_ErrorInfo = collections.namedtuple(
+ '_ErrorInfo', ['http_status', 'rpc_status', 'reason', 'domain'])
+
+_UNSUPPORTED_ERROR = _ErrorInfo(404,
+ 404,
+ 'unsupportedProtocol',
+ 'global')
+_BACKEND_ERROR = _ErrorInfo(503,
+ -32099,
+ 'backendError',
+ 'global')
+_ERROR_MAP = {
+ 400: _ErrorInfo(400, 400, 'badRequest', 'global'),
+ 401: _ErrorInfo(401, 401, 'required', 'global'),
+ 402: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 403: _ErrorInfo(403, 403, 'forbidden', 'global'),
+ 404: _ErrorInfo(404, 404, 'notFound', 'global'),
+ 405: _ErrorInfo(501, 501, 'unsupportedMethod', 'global'),
+ 406: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 407: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 408: _ErrorInfo(503, -32099, 'backendError', 'global'),
+ 409: _ErrorInfo(409, 409, 'conflict', 'global'),
+ 410: _ErrorInfo(410, 410, 'deleted', 'global'),
+ 411: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 412: _ErrorInfo(412, 412, 'conditionNotMet', 'global'),
+ 413: _ErrorInfo(413, 413, 'uploadTooLarge', 'global'),
+ 414: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 415: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 416: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ 417: _ErrorInfo(404, 404, 'unsupportedProtocol', 'global'),
+ }
+
+
+def get_error_info(lily_status):
+ """Get info that would be returned by the server for this HTTP status.
+
+ Args:
+ lily_status: An integer containing the HTTP status returned by the SPI.
+
+ Returns:
+ An _ErrorInfo object containing information that would be returned by the
+ live server for the provided lily_status.
+ """
+ if lily_status >= 500:
+ return _BACKEND_ERROR
+
+ return _ERROR_MAP.get(lily_status, _UNSUPPORTED_ERROR)
diff --git a/third_party/endpoints/message_parser.py b/third_party/endpoints/message_parser.py
new file mode 100644
index 0000000..28d6f47
--- /dev/null
+++ b/third_party/endpoints/message_parser.py
@@ -0,0 +1,227 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Describe ProtoRPC Messages in JSON Schema.
+
+Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON
+Schema description of all the messages.
+"""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+import re
+
+from . import message_types
+from . import messages
+
+__all__ = ['MessageTypeToJsonSchema']
+
+
+class MessageTypeToJsonSchema(object):
+ """Describe ProtoRPC messages in JSON Schema.
+
+ Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON
+ Schema description of all the messages. MessageTypeToJsonSchema handles
+ all the types of fields that can appear in a message.
+ """
+
+ # Field to schema type and format. If the field maps to tuple, the
+ # first entry is set as the type, the second the format (or left alone if
+ # None). If the field maps to a dictionary, we'll grab the value from the
+ # field's Variant in that dictionary.
+ # The variant dictionary should include an element that None maps to,
+ # to fall back on as a default.
+ __FIELD_TO_SCHEMA_TYPE_MAP = {
+ messages.IntegerField: {messages.Variant.INT32: ('integer', 'int32'),
+ messages.Variant.INT64: ('string', 'int64'),
+ messages.Variant.UINT32: ('integer', 'uint32'),
+ messages.Variant.UINT64: ('string', 'uint64'),
+ messages.Variant.SINT32: ('integer', 'int32'),
+ messages.Variant.SINT64: ('string', 'int64'),
+ None: ('integer', 'int64')},
+ messages.FloatField: {messages.Variant.FLOAT: ('number', 'float'),
+ messages.Variant.DOUBLE: ('number', 'double'),
+ None: ('number', 'float')},
+ messages.BooleanField: ('boolean', None),
+ messages.BytesField: ('string', 'byte'),
+ message_types.DateTimeField: ('string', 'date-time'),
+ messages.StringField: ('string', None),
+ messages.MessageField: ('object', None),
+ messages.EnumField: ('string', None),
+ }
+
+ __DEFAULT_SCHEMA_TYPE = ('string', None)
+
+ def __init__(self):
+ # A map of schema ids to schemas.
+ self.__schemas = {}
+
+ # A map from schema id to non-normalized definition name.
+ self.__normalized_names = {}
+
+ def add_message(self, message_type):
+ """Add a new message.
+
+ Args:
+ message_type: protorpc.message.Message class to be parsed.
+
+ Returns:
+ string, The JSON Schema id.
+
+ Raises:
+ KeyError if the Schema id for this message_type would collide with the
+ Schema id of a different message_type that was already added.
+ """
+ name = self.__normalized_name(message_type)
+ if name not in self.__schemas:
+ # Set a placeholder to prevent infinite recursion.
+ self.__schemas[name] = None
+ schema = self.__message_to_schema(message_type)
+ self.__schemas[name] = schema
+ return name
+
+ def ref_for_message_type(self, message_type):
+ """Returns the JSON Schema id for the given message.
+
+ Args:
+ message_type: protorpc.message.Message class to be parsed.
+
+ Returns:
+ string, The JSON Schema id.
+
+ Raises:
+ KeyError: if the message hasn't been parsed via add_message().
+ """
+ name = self.__normalized_name(message_type)
+ if name not in self.__schemas:
+ raise KeyError('Message has not been parsed: %s', name)
+ return name
+
+ def schemas(self):
+ """Returns the JSON Schema of all the messages.
+
+ Returns:
+ object: JSON Schema description of all messages.
+ """
+ return self.__schemas.copy()
+
+ def __normalized_name(self, message_type):
+ """Normalized schema name.
+
+ Generate a normalized schema name, taking the class name and stripping out
+ everything but alphanumerics, and camel casing the remaining words.
+ A normalized schema name is a name that matches [a-zA-Z][a-zA-Z0-9]*
+
+ Args:
+ message_type: protorpc.message.Message class being parsed.
+
+ Returns:
+ A string, the normalized schema name.
+
+ Raises:
+ KeyError: A collision was found between normalized names.
+ """
+ # Normalization is applied to match the constraints that Discovery applies
+ # to Schema names.
+ name = message_type.definition_name()
+
+ split_name = re.split(r'[^0-9a-zA-Z]', name)
+ normalized = ''.join(
+ part[0].upper() + part[1:] for part in split_name if part)
+
+ previous = self.__normalized_names.get(normalized)
+ if previous:
+ if previous != name:
+ raise KeyError('Both %s and %s normalize to the same schema name: %s' %
+ (name, previous, normalized))
+ else:
+ self.__normalized_names[normalized] = name
+
+ return normalized
+
+ def __message_to_schema(self, message_type):
+ """Parse a single message into JSON Schema.
+
+ Will recursively descend the message structure
+ and also parse other messages references via MessageFields.
+
+ Args:
+ message_type: protorpc.messages.Message class to parse.
+
+ Returns:
+ An object representation of the schema.
+ """
+ name = self.__normalized_name(message_type)
+ schema = {
+ 'id': name,
+ 'type': 'object',
+ }
+ if message_type.__doc__:
+ schema['description'] = message_type.__doc__
+ properties = {}
+ for field in message_type.all_fields():
+ descriptor = {}
+ # Info about the type of this field. This is either merged with
+ # the descriptor or it's placed within the descriptor's 'items'
+ # property, depending on whether this is a repeated field or not.
+ type_info = {}
+
+ if type(field) == messages.MessageField:
+ field_type = field.type().__class__
+ type_info['$ref'] = self.add_message(field_type)
+ if field_type.__doc__:
+ descriptor['description'] = field_type.__doc__
+ else:
+ schema_type = self.__FIELD_TO_SCHEMA_TYPE_MAP.get(
+ type(field), self.__DEFAULT_SCHEMA_TYPE)
+ # If the map pointed to a dictionary, check if the field's variant
+ # is in that dictionary and use the type specified there.
+ if isinstance(schema_type, dict):
+ variant_map = schema_type
+ variant = getattr(field, 'variant', None)
+ if variant in variant_map:
+ schema_type = variant_map[variant]
+ else:
+ # The variant map needs to specify a default value, mapped by None.
+ schema_type = variant_map[None]
+ type_info['type'] = schema_type[0]
+ if schema_type[1]:
+ type_info['format'] = schema_type[1]
+
+ if type(field) == messages.EnumField:
+ sorted_enums = sorted([enum_info for enum_info in field.type],
+ key=lambda enum_info: enum_info.number)
+ type_info['enum'] = [enum_info.name for enum_info in sorted_enums]
+
+ if field.required:
+ descriptor['required'] = True
+
+ if field.default:
+ if type(field) == messages.EnumField:
+ descriptor['default'] = str(field.default)
+ else:
+ descriptor['default'] = field.default
+
+ if field.repeated:
+ descriptor['items'] = type_info
+ descriptor['type'] = 'array'
+ else:
+ descriptor.update(type_info)
+
+ properties[field.name] = descriptor
+
+ schema['properties'] = properties
+
+ return schema
diff --git a/third_party/endpoints/openapi_generator.py b/third_party/endpoints/openapi_generator.py
new file mode 100644
index 0000000..058bf8d
--- /dev/null
+++ b/third_party/endpoints/openapi_generator.py
@@ -0,0 +1,1073 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library for converting service configs to OpenAPI (Swagger) specs."""
+from __future__ import absolute_import
+
+import hashlib
+import json
+import logging
+import re
+
+from . import api_exceptions
+from . import message_parser
+from . import message_types
+from . import messages
+from . import remote
+from . import resource_container
+from . import util
+
+_logger = logging.getLogger(__name__)
+
+_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}'
+
+_MULTICLASS_MISMATCH_ERROR_TEMPLATE = (
+ 'Attempting to implement service %s, version %s, with multiple '
+ 'classes that aren\'t compatible. See docstring for api() for '
+ 'examples how to implement a multi-class API.')
+
+_INVALID_AUTH_ISSUER = 'No auth issuer named %s defined in this Endpoints API.'
+
+_API_KEY = 'api_key'
+_API_KEY_PARAM = 'key'
+_DEFAULT_SECURITY_DEFINITION = 'google_id_token'
+
+
+_VALID_API_NAME = re.compile('^[a-z][a-z0-9]{0,39}$')
+
+
+def _validate_api_name(name):
+ valid = (_VALID_API_NAME.match(name) is not None)
+ if not valid:
+ raise api_exceptions.InvalidApiNameException(
+ 'The API name must match the regular expression {}'.format(
+ _VALID_API_NAME.pattern[1:-1]))
+ return name
+
+
+class OpenApiGenerator(object):
+ """Generates an OpenAPI spec from a ProtoRPC service.
+
+ Example:
+
+ class HelloRequest(messages.Message):
+ my_name = messages.StringField(1, required=True)
+
+ class HelloResponse(messages.Message):
+ hello = messages.StringField(1, required=True)
+
+ class HelloService(remote.Service):
+
+ @remote.method(HelloRequest, HelloResponse)
+ def hello(self, request):
+ return HelloResponse(hello='Hello there, %s!' %
+ request.my_name)
+
+ api_config = OpenApiGenerator().pretty_print_config_to_json(HelloService)
+
+ The resulting api_config will be a JSON OpenAPI document describing the API
+ implemented by HelloService.
+ """
+
+ # Constants for categorizing a request method.
+ # __NO_BODY - Request without a request body, such as GET and DELETE methods.
+ # __HAS_BODY - Request (such as POST/PUT/PATCH) with info in the request body.
+ __NO_BODY = 1 # pylint: disable=invalid-name
+ __HAS_BODY = 2 # pylint: disable=invalid-name
+
+ def __init__(self):
+ self.__parser = message_parser.MessageTypeToJsonSchema()
+
+ # Maps method id to the request schema id.
+ self.__request_schema = {}
+
+ # Maps method id to the response schema id.
+ self.__response_schema = {}
+
+ def _add_def_paths(self, prop_dict):
+ """Recursive method to add relative paths for any $ref objects.
+
+ Args:
+ prop_dict: The property dict to alter.
+
+ Side Effects:
+ Alters prop_dict in-place.
+ """
+ for prop_key, prop_value in prop_dict.items():
+ if prop_key == '$ref' and not 'prop_value'.startswith('#'):
+ prop_dict[prop_key] = '#/definitions/' + prop_dict[prop_key]
+ elif isinstance(prop_value, dict):
+ self._add_def_paths(prop_value)
+
+ def _construct_operation_id(self, service_name, protorpc_method_name):
+ """Return an operation id for a service method.
+
+ Args:
+ service_name: The name of the service.
+ protorpc_method_name: The ProtoRPC method name.
+
+ Returns:
+ A string representing the operation id.
+ """
+
+ # camelCase the ProtoRPC method name
+ method_name_camel = util.snake_case_to_headless_camel_case(
+ protorpc_method_name)
+
+ return '{0}_{1}'.format(service_name, method_name_camel)
+
+ def __get_request_kind(self, method_info):
+ """Categorize the type of the request.
+
+ Args:
+ method_info: _MethodInfo, method information.
+
+ Returns:
+ The kind of request.
+ """
+ if method_info.http_method in ('GET', 'DELETE'):
+ return self.__NO_BODY
+ else:
+ return self.__HAS_BODY
+
+ def __field_to_subfields(self, field):
+ """Fully describes data represented by field, including the nested case.
+
+ In the case that the field is not a message field, we have no fields nested
+ within a message definition, so we can simply return that field. However, in
+ the nested case, we can't simply describe the data with one field or even
+ with one chain of fields.
+
+ For example, if we have a message field
+
+ m_field = messages.MessageField(RefClass, 1)
+
+ which references a class with two fields:
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.IntegerField(2)
+
+ then we would need to include both one and two to represent all the
+ data contained.
+
+ Calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">],
+ ]
+
+ If the second field was instead a message field
+
+ class RefClass(messages.Message):
+ one = messages.StringField(1)
+ two = messages.MessageField(OtherRefClass, 2)
+
+ referencing another class with two fields
+
+ class OtherRefClass(messages.Message):
+ three = messages.BooleanField(1)
+ four = messages.FloatField(2)
+
+ then we would need to recurse one level deeper for two.
+
+ With this change, calling __field_to_subfields(m_field) would return:
+ [
+ [<MessageField "m_field">, <StringField "one">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "three">],
+ [<MessageField "m_field">, <StringField "two">, <StringField "four">],
+ ]
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A list of lists, where each sublist is a list of fields.
+ """
+ # Termination condition
+ if not isinstance(field, messages.MessageField):
+ return [[field]]
+
+ result = []
+ for subfield in sorted(field.message_type.all_fields(),
+ key=lambda f: f.number):
+ subfield_results = self.__field_to_subfields(subfield)
+ for subfields_list in subfield_results:
+ subfields_list.insert(0, field)
+ result.append(subfields_list)
+ return result
+
+ def __field_to_parameter_type_and_format(self, field):
+ """Converts the field variant type into a tuple describing the parameter.
+
+ Args:
+ field: An instance of a subclass of messages.Field.
+
+ Returns:
+ A tuple with the type and format of the field, respectively.
+
+ Raises:
+ TypeError: if the field variant is a message variant.
+ """
+ # We use lowercase values for types (e.g. 'string' instead of 'STRING').
+ variant = field.variant
+ if variant == messages.Variant.MESSAGE:
+ raise TypeError('A message variant can\'t be used in a parameter.')
+
+ # Note that the 64-bit integers are marked as strings -- this is to
+ # accommodate JavaScript, which would otherwise demote them to 32-bit
+ # integers.
+
+ custom_variant_map = {
+ messages.Variant.DOUBLE: ('number', 'double'),
+ messages.Variant.FLOAT: ('number', 'float'),
+ messages.Variant.INT64: ('string', 'int64'),
+ messages.Variant.SINT64: ('string', 'int64'),
+ messages.Variant.UINT64: ('string', 'uint64'),
+ messages.Variant.INT32: ('integer', 'int32'),
+ messages.Variant.SINT32: ('integer', 'int32'),
+ messages.Variant.UINT32: ('integer', 'uint32'),
+ messages.Variant.BOOL: ('boolean', None),
+ messages.Variant.STRING: ('string', None),
+ messages.Variant.BYTES: ('string', 'byte'),
+ messages.Variant.ENUM: ('string', None),
+ }
+ return custom_variant_map.get(variant) or (variant.name.lower(), None)
+
+ def __get_path_parameters(self, path):
+ """Parses path paremeters from a URI path and organizes them by parameter.
+
+ Some of the parameters may correspond to message fields, and so will be
+ represented as segments corresponding to each subfield; e.g. first.second if
+ the field "second" in the message field "first" is pulled from the path.
+
+ The resulting dictionary uses the first segments as keys and each key has as
+ value the list of full parameter values with first segment equal to the key.
+
+ If the match path parameter is null, that part of the path template is
+ ignored; this occurs if '{}' is used in a template.
+
+ Args:
+ path: String; a URI path, potentially with some parameters.
+
+ Returns:
+ A dictionary with strings as keys and list of strings as values.
+ """
+ path_parameters_by_segment = {}
+ for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
+ first_segment = format_var_name.split('.', 1)[0]
+ matches = path_parameters_by_segment.setdefault(first_segment, [])
+ matches.append(format_var_name)
+
+ return path_parameters_by_segment
+
+ def __validate_simple_subfield(self, parameter, field, segment_list,
+ segment_index=0):
+ """Verifies that a proposed subfield actually exists and is a simple field.
+
+ Here, simple means it is not a MessageField (nested).
+
+ Args:
+ parameter: String; the '.' delimited name of the current field being
+ considered. This is relative to some root.
+ field: An instance of a subclass of messages.Field. Corresponds to the
+ previous segment in the path (previous relative to _segment_index),
+ since this field should be a message field with the current segment
+ as a field in the message class.
+ segment_list: The full list of segments from the '.' delimited subfield
+ being validated.
+ segment_index: Integer; used to hold the position of current segment so
+ that segment_list can be passed as a reference instead of having to
+ copy using segment_list[1:] at each step.
+
+ Raises:
+ TypeError: If the final subfield (indicated by _segment_index relative
+ to the length of segment_list) is a MessageField.
+ TypeError: If at any stage the lookup at a segment fails, e.g if a.b
+ exists but a.b.c does not exist. This can happen either if a.b is not
+ a message field or if a.b.c is not a property on the message class from
+ a.b.
+ """
+ if segment_index >= len(segment_list):
+ # In this case, the field is the final one, so should be simple type
+ if isinstance(field, messages.MessageField):
+ field_class = field.__class__.__name__
+ raise TypeError('Can\'t use messages in path. Subfield %r was '
+ 'included but is a %s.' % (parameter, field_class))
+ return
+
+ segment = segment_list[segment_index]
+ parameter += '.' + segment
+ try:
+ field = field.type.field_by_name(segment)
+ except (AttributeError, KeyError):
+ raise TypeError('Subfield %r from path does not exist.' % (parameter,))
+
+ self.__validate_simple_subfield(parameter, field, segment_list,
+ segment_index=segment_index + 1)
+
+ def __validate_path_parameters(self, field, path_parameters):
+ """Verifies that all path parameters correspond to an existing subfield.
+
+ Args:
+ field: An instance of a subclass of messages.Field. Should be the root
+ level property name in each path parameter in path_parameters. For
+ example, if the field is called 'foo', then each path parameter should
+ begin with 'foo.'.
+ path_parameters: A list of Strings representing URI parameter variables.
+
+ Raises:
+ TypeError: If one of the path parameters does not start with field.name.
+ """
+ for param in path_parameters:
+ segment_list = param.split('.')
+ if segment_list[0] != field.name:
+ raise TypeError('Subfield %r can\'t come from field %r.'
+ % (param, field.name))
+ self.__validate_simple_subfield(field.name, field, segment_list[1:])
+
+ def __parameter_default(self, field):
+ """Returns default value of field if it has one.
+
+ Args:
+ field: A simple field.
+
+ Returns:
+ The default value of the field, if any exists, with the exception of an
+ enum field, which will have its value cast to a string.
+ """
+ if field.default:
+ if isinstance(field, messages.EnumField):
+ return field.default.name
+ else:
+ return field.default
+
+ def __parameter_enum(self, param):
+ """Returns enum descriptor of a parameter if it is an enum.
+
+ An enum descriptor is a list of keys.
+
+ Args:
+ param: A simple field.
+
+ Returns:
+ The enum descriptor for the field, if it's an enum descriptor, else
+ returns None.
+ """
+ if isinstance(param, messages.EnumField):
+ return [enum_entry[0] for enum_entry in sorted(
+ param.type.to_dict().items(), key=lambda v: v[1])]
+
+ def __body_parameter_descriptor(self, method_id):
+ return {
+ 'name': 'body',
+ 'in': 'body',
+ 'required': True,
+ 'schema': {
+ '$ref': '#/definitions/{0}'.format(
+ self.__request_schema[method_id])
+ }
+ }
+
+ def __non_body_parameter_descriptor(self, param):
+ """Creates descriptor for a parameter.
+
+ Args:
+ param: The parameter to be described.
+
+ Returns:
+ Dictionary containing a descriptor for the parameter.
+ """
+ descriptor = {}
+
+ descriptor['name'] = param.name
+
+ param_type, param_format = self.__field_to_parameter_type_and_format(param)
+
+ # Required
+ if param.required:
+ descriptor['required'] = True
+
+ # Type
+ descriptor['type'] = param_type
+
+ # Format (optional)
+ if param_format:
+ descriptor['format'] = param_format
+
+ # Default
+ default = self.__parameter_default(param)
+ if default is not None:
+ descriptor['default'] = default
+
+ # Repeated
+ if param.repeated:
+ descriptor['repeated'] = True
+
+ # Enum
+ enum_descriptor = self.__parameter_enum(param)
+ if enum_descriptor is not None:
+ descriptor['enum'] = enum_descriptor
+
+ return descriptor
+
+ def __path_parameter_descriptor(self, param):
+ descriptor = self.__non_body_parameter_descriptor(param)
+ descriptor['required'] = True
+ descriptor['in'] = 'path'
+
+ return descriptor
+
+ def __query_parameter_descriptor(self, param):
+ descriptor = self.__non_body_parameter_descriptor(param)
+ descriptor['in'] = 'query'
+
+ # If this is a repeated field, convert it to the collectionFormat: multi
+ # style.
+ if param.repeated:
+ descriptor['collectionFormat'] = 'multi'
+ descriptor['items'] = {
+ 'type': descriptor['type']
+ }
+ descriptor['type'] = 'array'
+ descriptor.pop('repeated', None)
+
+ return descriptor
+
+ def __add_parameter(self, param, path_parameters, params):
+ """Adds all parameters in a field to a method parameters descriptor.
+
+ Simple fields will only have one parameter, but a message field 'x' that
+ corresponds to a message class with fields 'y' and 'z' will result in
+ parameters 'x.y' and 'x.z', for example. The mapping from field to
+ parameters is mostly handled by __field_to_subfields.
+
+ Args:
+ param: Parameter to be added to the descriptor.
+ path_parameters: A list of parameters matched from a path for this field.
+ For example for the hypothetical 'x' from above if the path was
+ '/a/{x.z}/b/{other}' then this list would contain only the element
+ 'x.z' since 'other' does not match to this field.
+ params: List of parameters. Each parameter in the field.
+ """
+ # If this is a simple field, just build the descriptor and append it.
+ # Otherwise, build a schema and assign it to this descriptor
+ if not isinstance(param, messages.MessageField):
+ if param.name in path_parameters:
+ descriptor = self.__path_parameter_descriptor(param)
+ else:
+ descriptor = self.__query_parameter_descriptor(param)
+
+ params.append(descriptor)
+ else:
+ # If a subfield of a MessageField is found in the path, build a descriptor
+ # for the path parameter.
+ for subfield_list in self.__field_to_subfields(param):
+ qualified_name = '.'.join(subfield.name for subfield in subfield_list)
+ if qualified_name in path_parameters:
+ descriptor = self.__path_parameter_descriptor(subfield_list[-1])
+ descriptor['required'] = True
+
+ params.append(descriptor)
+
+ def __params_descriptor_without_container(self, message_type,
+ request_kind, method_id, path):
+ """Describe parameters of a method which does not use a ResourceContainer.
+
+ Makes sure that the path parameters are included in the message definition
+ and adds any required fields and URL query parameters.
+
+ This method is to preserve backwards compatibility and will be removed in
+ a future release.
+
+ Args:
+ message_type: messages.Message class, Message with parameters to describe.
+ request_kind: The type of request being made.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+ path: string, HTTP path to method.
+
+ Returns:
+ A list of dicts: Descriptors of the parameters
+ """
+ params = []
+
+ path_parameter_dict = self.__get_path_parameters(path)
+ for field in sorted(message_type.all_fields(), key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__validate_path_parameters(field, matched_path_parameters)
+
+ if matched_path_parameters or request_kind == self.__NO_BODY:
+ self.__add_parameter(field, matched_path_parameters, params)
+
+ # If the request has a body, add the body parameter
+ if (message_type != message_types.VoidMessage() and
+ request_kind == self.__HAS_BODY):
+ params.append(self.__body_parameter_descriptor(method_id))
+
+ return params
+
+ def __params_descriptor(self, message_type, request_kind, path, method_id):
+ """Describe the parameters of a method.
+
+ If the message_type is not a ResourceContainer, will fall back to
+ __params_descriptor_without_container (which will eventually be deprecated).
+
+ If the message type is a ResourceContainer, then all path/query parameters
+ will come from the ResourceContainer. This method will also make sure all
+ path parameters are covered by the message fields.
+
+ Args:
+ message_type: messages.Message or ResourceContainer class, Message with
+ parameters to describe.
+ request_kind: The type of request being made.
+ path: string, HTTP path to method.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+
+ Returns:
+ A tuple (dict, list of string): Descriptor of the parameters, Order of the
+ parameters.
+ """
+ path_parameter_dict = self.__get_path_parameters(path)
+
+ if not isinstance(message_type, resource_container.ResourceContainer):
+ if path_parameter_dict:
+ _logger.warning('Method %s specifies path parameters but you are not '
+ 'using a ResourceContainer; instead, you are using %r. '
+ 'This will fail in future releases; please switch to '
+ 'using ResourceContainer as soon as possible.',
+ method_id, type(message_type))
+ return self.__params_descriptor_without_container(
+ message_type, request_kind, method_id, path)
+
+ # From here, we can assume message_type is a ResourceContainer.
+ params = []
+
+ # Process body parameter, if any
+ if message_type.body_message_class != message_types.VoidMessage:
+ params.append(self.__body_parameter_descriptor(method_id))
+
+ # Process path/querystring parameters
+ params_message_type = message_type.parameters_message_class()
+
+ # Make sure all path parameters are covered.
+ for field_name, matched_path_parameters in path_parameter_dict.items():
+ field = params_message_type.field_by_name(field_name)
+ self.__validate_path_parameters(field, matched_path_parameters)
+
+ # Add all fields, sort by field.number since we have parameterOrder.
+ for field in sorted(params_message_type.all_fields(),
+ key=lambda f: f.number):
+ matched_path_parameters = path_parameter_dict.get(field.name, [])
+ self.__add_parameter(field, matched_path_parameters, params)
+
+ return params
+
+ def __request_message_descriptor(self, request_kind, message_type, method_id,
+ path):
+ """Describes the parameters and body of the request.
+
+ Args:
+ request_kind: The type of request being made.
+ message_type: messages.Message or ResourceContainer class. The message to
+ describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+ path: string, HTTP path to method.
+
+ Returns:
+ Dictionary describing the request.
+
+ Raises:
+ ValueError: if the method path and request required fields do not match
+ """
+ if isinstance(message_type, resource_container.ResourceContainer):
+ base_message_type = message_type.body_message_class()
+ if (request_kind == self.__NO_BODY and
+ base_message_type != message_types.VoidMessage()):
+ msg = ('Method %s specifies a body message in its ResourceContainer, but '
+ 'is a HTTP method type that cannot accept a body.') % method_id
+ raise api_exceptions.ApiConfigurationError(msg)
+ else:
+ base_message_type = message_type
+
+ if (request_kind != self.__NO_BODY and
+ base_message_type != message_types.VoidMessage()):
+ self.__request_schema[method_id] = self.__parser.add_message(
+ base_message_type.__class__)
+
+ params = self.__params_descriptor(message_type, request_kind, path,
+ method_id)
+
+ return params
+
+ def __definitions_descriptor(self):
+ """Describes the definitions section of the OpenAPI spec.
+
+ Returns:
+ Dictionary describing the definitions of the spec.
+ """
+ # Filter out any keys that aren't 'properties' or 'type'
+ result = {}
+ for def_key, def_value in self.__parser.schemas().items():
+ if 'properties' in def_value or 'type' in def_value:
+ key_result = {}
+ required_keys = set()
+ if 'type' in def_value:
+ key_result['type'] = def_value['type']
+ if 'properties' in def_value:
+ for prop_key, prop_value in def_value['properties'].items():
+ if isinstance(prop_value, dict) and 'required' in prop_value:
+ required_keys.add(prop_key)
+ del prop_value['required']
+ key_result['properties'] = def_value['properties']
+ # Add in the required fields, if any
+ if required_keys:
+ key_result['required'] = sorted(required_keys)
+ result[def_key] = key_result
+
+ # Add 'type': 'object' to all object properties
+ # Also, recursively add relative path to all $ref values
+ for def_value in result.values():
+ for prop_value in def_value.values():
+ if isinstance(prop_value, dict):
+ if '$ref' in prop_value:
+ prop_value['type'] = 'object'
+ self._add_def_paths(prop_value)
+
+ return result
+
+ def __response_message_descriptor(self, message_type, method_id):
+ """Describes the response.
+
+ Args:
+ message_type: messages.Message class, The message to describe.
+ method_id: string, Unique method identifier (e.g. 'myapi.items.method')
+
+ Returns:
+ Dictionary describing the response.
+ """
+
+ # Skeleton response descriptor, common to all response objects
+ descriptor = {'200': {'description': 'A successful response'}}
+
+ if message_type != message_types.VoidMessage():
+ self.__parser.add_message(message_type.__class__)
+ self.__response_schema[method_id] = self.__parser.ref_for_message_type(
+ message_type.__class__)
+ descriptor['200']['schema'] = {'$ref': '#/definitions/{0}'.format(
+ self.__response_schema[method_id])}
+
+ return dict(descriptor)
+
+ def __x_google_quota_descriptor(self, metric_costs):
+ """Describes the metric costs for a call.
+
+ Args:
+ metric_costs: Dict of metric definitions to the integer cost value against
+ that metric.
+
+ Returns:
+ A dict descriptor describing the Quota limits for the endpoint.
+ """
+ return {
+ 'metricCosts': {
+ metric: cost for (metric, cost) in metric_costs.items()
+ }
+ } if metric_costs else None
+
+ def __x_google_quota_definitions_descriptor(self, limit_definitions):
+ """Describes the quota limit definitions for an API.
+
+ Args:
+ limit_definitions: List of endpoints.LimitDefinition tuples
+
+ Returns:
+ A dict descriptor of the API's quota limit definitions.
+ """
+ if not limit_definitions:
+ return None
+
+ definitions_list = [{
+ 'name': ld.metric_name,
+ 'metric': ld.metric_name,
+ 'unit': '1/min/{project}',
+ 'values': {'STANDARD': ld.default_limit},
+ 'displayName': ld.display_name,
+ } for ld in limit_definitions]
+
+ metrics = [{
+ 'name': ld.metric_name,
+ 'valueType': 'INT64',
+ 'metricKind': 'GAUGE',
+ } for ld in limit_definitions]
+
+ return {
+ 'quota': {'limits': definitions_list},
+ 'metrics': metrics,
+ }
+
+ def __method_descriptor(self, service, method_info, operation_id,
+ protorpc_method_info, security_definitions):
+ """Describes a method.
+
+ Args:
+ service: endpoints.Service, Implementation of the API as a service.
+ method_info: _MethodInfo, Configuration for the method.
+ operation_id: string, Operation ID of the method
+ protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC
+ description of the method.
+ security_definitions: list of dicts, security definitions for the API.
+
+ Returns:
+ Dictionary describing the method.
+ """
+ descriptor = {}
+
+ request_message_type = (resource_container.ResourceContainer.
+ get_request_message(protorpc_method_info.remote))
+ request_kind = self.__get_request_kind(method_info)
+ remote_method = protorpc_method_info.remote
+
+ path = method_info.get_path(service.api_info)
+
+ descriptor['parameters'] = self.__request_message_descriptor(
+ request_kind, request_message_type,
+ method_info.method_id(service.api_info),
+ path)
+ descriptor['responses'] = self.__response_message_descriptor(
+ remote_method.response_type(), method_info.method_id(service.api_info))
+ descriptor['operationId'] = operation_id
+
+ # Insert the auth audiences, if any
+ api_key_required = method_info.is_api_key_required(service.api_info)
+ if method_info.audiences is not None:
+ descriptor['security'] = self.__security_descriptor(
+ method_info.audiences, security_definitions,
+ api_key_required=api_key_required)
+ elif service.api_info.audiences is not None or api_key_required:
+ descriptor['security'] = self.__security_descriptor(
+ service.api_info.audiences, security_definitions,
+ api_key_required=api_key_required)
+
+ # Insert the metric costs, if any
+ if method_info.metric_costs:
+ descriptor['x-google-quota'] = self.__x_google_quota_descriptor(
+ method_info.metric_costs)
+
+ return descriptor
+
+ def __security_descriptor(self, audiences, security_definitions,
+ api_key_required=False):
+ if not audiences:
+ if not api_key_required:
+ # no security
+ return []
+ # api key only
+ return [{_API_KEY: []}]
+
+ if isinstance(audiences, (tuple, list)):
+ # security_definitions includes not just the base issuers, but also the
+ # hash-appended versions, so we need to filter them out
+ security_issuers = set()
+ for definition_key in security_definitions.keys():
+ if definition_key == _API_KEY:
+ # API key definitions don't count for these purposes
+ continue
+ if '-' in definition_key:
+ split_key = definition_key.rsplit('-', 1)[0]
+ if split_key in security_definitions:
+ continue
+ security_issuers.add(definition_key)
+
+ if security_issuers != {_DEFAULT_SECURITY_DEFINITION}:
+ raise api_exceptions.ApiConfigurationError(
+ 'audiences must be a dict when third-party issuers '
+ '(auth0, firebase, etc) are in use.'
+ )
+ audiences = {_DEFAULT_SECURITY_DEFINITION: audiences}
+
+ results = []
+ for issuer, issuer_audiences in audiences.items():
+ result_dict = {}
+ if issuer not in security_definitions:
+ raise TypeError('Missing issuer {}'.format(issuer))
+ audience_string = ','.join(sorted(issuer_audiences))
+ audience_hash = hashfunc(audience_string)
+ full_definition_key = '-'.join([issuer, audience_hash])
+ result_dict[full_definition_key] = []
+ if api_key_required:
+ result_dict[_API_KEY] = []
+ if full_definition_key not in security_definitions:
+ new_definition = dict(security_definitions[issuer])
+ new_definition['x-google-audiences'] = audience_string
+ security_definitions[full_definition_key] = new_definition
+ results.append(result_dict)
+
+ return results
+
+ def __security_definitions_descriptor(self, issuers):
+ """Create a descriptor for the security definitions.
+
+ Args:
+ issuers: dict, mapping issuer names to Issuer tuples
+
+ Returns:
+ The dict representing the security definitions descriptor.
+ """
+ if not issuers:
+ result = {
+ _DEFAULT_SECURITY_DEFINITION: {
+ 'authorizationUrl': '',
+ 'flow': 'implicit',
+ 'type': 'oauth2',
+ 'x-google-issuer': 'https://accounts.google.com',
+ 'x-google-jwks_uri': 'https://www.googleapis.com/oauth2/v3/certs',
+ }
+ }
+ return result
+
+ result = {}
+
+ for issuer_key, issuer_value in issuers.items():
+ result[issuer_key] = {
+ 'authorizationUrl': '',
+ 'flow': 'implicit',
+ 'type': 'oauth2',
+ 'x-google-issuer': issuer_value.issuer,
+ }
+
+ # If jwks_uri is omitted, the auth library will use OpenID discovery
+ # to find it. Otherwise, include it in the descriptor explicitly.
+ if issuer_value.jwks_uri:
+ result[issuer_key]['x-google-jwks_uri'] = issuer_value.jwks_uri
+
+ return result
+
+ def __get_merged_api_info(self, services):
+ """Builds a description of an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+
+ Returns:
+ The _ApiInfo object to use for the API that the given services implement.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()).
+ """
+ merged_api_info = services[0].api_info
+
+ # Verify that, if there are multiple classes here, they're allowed to
+ # implement the same API.
+ for service in services[1:]:
+ if not merged_api_info.is_same_api(service.api_info):
+ raise api_exceptions.ApiConfigurationError(
+ _MULTICLASS_MISMATCH_ERROR_TEMPLATE % (service.api_info.name,
+ service.api_info.api_version))
+
+ return merged_api_info
+
+ def __api_openapi_descriptor(self, services, hostname=None, x_google_api_name=False):
+ """Builds an OpenAPI description of an API.
+
+ Args:
+ services: List of protorpc.remote.Service instances implementing an
+ api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary that can be deserialized into JSON and stored as an API
+ description document in OpenAPI format.
+
+ Raises:
+ ApiConfigurationError: If there's something wrong with the API
+ configuration, such as a multiclass API decorated with different API
+ descriptors (see the docstring for api()), or a repeated method
+ signature.
+ """
+ merged_api_info = self.__get_merged_api_info(services)
+ descriptor = self.get_descriptor_defaults(merged_api_info,
+ hostname=hostname,
+ x_google_api_name=x_google_api_name)
+
+ description = merged_api_info.description
+ if not description and len(services) == 1:
+ description = services[0].__doc__
+ if description:
+ descriptor['info']['description'] = description
+
+ security_definitions = self.__security_definitions_descriptor(
+ merged_api_info.issuers)
+
+ method_map = {}
+ method_collision_tracker = {}
+ rest_collision_tracker = {}
+
+ for service in services:
+ remote_methods = service.all_remote_methods()
+
+ for protorpc_meth_name in sorted(remote_methods.keys()):
+ protorpc_meth_info = remote_methods[protorpc_meth_name]
+ method_info = getattr(protorpc_meth_info, 'method_info', None)
+ # Skip methods that are not decorated with @method
+ if method_info is None:
+ continue
+ method_id = method_info.method_id(service.api_info)
+ is_api_key_required = method_info.is_api_key_required(service.api_info)
+ path = '/{0}/{1}/{2}'.format(merged_api_info.name,
+ merged_api_info.path_version,
+ method_info.get_path(service.api_info))
+ verb = method_info.http_method.lower()
+
+ if path not in method_map:
+ method_map[path] = {}
+
+ # If an API key is required and the security definitions don't already
+ # have the apiKey issuer, add the appropriate notation now
+ if is_api_key_required and _API_KEY not in security_definitions:
+ security_definitions[_API_KEY] = {
+ 'type': 'apiKey',
+ 'name': _API_KEY_PARAM,
+ 'in': 'query'
+ }
+
+ # Derive an OperationId from the method name data
+ operation_id = self._construct_operation_id(
+ service.__name__, protorpc_meth_name)
+
+ method_map[path][verb] = self.__method_descriptor(
+ service, method_info, operation_id, protorpc_meth_info,
+ security_definitions)
+
+ # Make sure the same method name isn't repeated.
+ if method_id in method_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ 'Method %s used multiple times, in classes %s and %s' %
+ (method_id, method_collision_tracker[method_id],
+ service.__name__))
+ else:
+ method_collision_tracker[method_id] = service.__name__
+
+ # Make sure the same HTTP method & path aren't repeated.
+ rest_identifier = (method_info.http_method,
+ method_info.get_path(service.api_info))
+ if rest_identifier in rest_collision_tracker:
+ raise api_exceptions.ApiConfigurationError(
+ '%s path "%s" used multiple times, in classes %s and %s' %
+ (method_info.http_method, method_info.get_path(service.api_info),
+ rest_collision_tracker[rest_identifier],
+ service.__name__))
+ else:
+ rest_collision_tracker[rest_identifier] = service.__name__
+
+ if method_map:
+ descriptor['paths'] = method_map
+
+ # Add request and/or response definitions, if any
+ definitions = self.__definitions_descriptor()
+ if definitions:
+ descriptor['definitions'] = definitions
+
+ descriptor['securityDefinitions'] = security_definitions
+
+ # Add quota limit metric definitions, if any
+ limit_definitions = self.__x_google_quota_definitions_descriptor(
+ merged_api_info.limit_definitions)
+ if limit_definitions:
+ descriptor['x-google-management'] = limit_definitions
+
+ return descriptor
+
+ def get_descriptor_defaults(self, api_info, hostname=None, x_google_api_name=False):
+ """Gets a default configuration for a service.
+
+ Args:
+ api_info: _ApiInfo object for this service.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ A dictionary with the default configuration.
+ """
+ hostname = (hostname or util.get_app_hostname() or
+ api_info.hostname)
+ protocol = 'http' if ((hostname and hostname.startswith('localhost')) or
+ util.is_running_on_devserver()) else 'https'
+ base_path = api_info.base_path
+ if base_path != '/':
+ base_path = base_path.rstrip('/')
+ defaults = {
+ 'swagger': '2.0',
+ 'info': {
+ 'version': api_info.api_version,
+ 'title': api_info.name
+ },
+ 'host': hostname,
+ 'consumes': ['application/json'],
+ 'produces': ['application/json'],
+ 'schemes': [protocol],
+ 'basePath': base_path,
+ }
+
+ if x_google_api_name:
+ defaults['x-google-api-name'] = _validate_api_name(api_info.name)
+
+ return defaults
+
+ def get_openapi_dict(self, services, hostname=None, x_google_api_name=False):
+ """JSON dict description of a protorpc.remote.Service in OpenAPI format.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ dict, The OpenAPI descriptor document as a JSON dict.
+ """
+
+ if not isinstance(services, (tuple, list)):
+ services = [services]
+
+ # The type of a class that inherits from remote.Service is actually
+ # remote._ServiceClass, thanks to metaclass strangeness.
+ # pylint: disable=protected-access
+ util.check_list_type(services, remote._ServiceClass, 'services',
+ allow_none=False)
+
+ return self.__api_openapi_descriptor(services, hostname=hostname, x_google_api_name=x_google_api_name)
+
+ def pretty_print_config_to_json(self, services, hostname=None, x_google_api_name=False):
+ """JSON string description of a protorpc.remote.Service in OpenAPI format.
+
+ Args:
+ services: Either a single protorpc.remote.Service or a list of them
+ that implements an api/version.
+ hostname: string, Hostname of the API, to override the value set on the
+ current service. Defaults to None.
+
+ Returns:
+ string, The OpenAPI descriptor document as a JSON string.
+ """
+ descriptor = self.get_openapi_dict(services, hostname, x_google_api_name=x_google_api_name)
+ return json.dumps(descriptor, sort_keys=True, indent=2,
+ separators=(',', ': '))
+
+
+def hashfunc(string):
+ return hashlib.md5(string).hexdigest()[:8]
diff --git a/third_party/endpoints/parameter_converter.py b/third_party/endpoints/parameter_converter.py
new file mode 100644
index 0000000..5e2743f
--- /dev/null
+++ b/third_party/endpoints/parameter_converter.py
@@ -0,0 +1,200 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper that converts parameter values to the type expected by the API.
+
+Parameter values that appear in the URL and the query string are usually
+converted to native types before being passed to the backend. This code handles
+that conversion and some validation.
+"""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+from . import errors
+
+__all__ = ['transform_parameter_value']
+
+
+def _check_enum(parameter_name, value, parameter_config):
+ """Checks if an enum value is valid.
+
+ This is called by the transform_parameter_value function and shouldn't be
+ called directly.
+
+ This verifies that the value of an enum parameter is valid.
+
+ Args:
+ parameter_name: A string containing the name of the parameter, which is
+ either just a variable name or the name with the index appended. For
+ example 'var' or 'var[2]'.
+ value: A string containing the value passed in for the parameter.
+ parameter_config: The dictionary containing information specific to the
+ parameter in question. This is retrieved from request.parameters in
+ the method config.
+
+ Raises:
+ EnumRejectionError: If the given value is not among the accepted
+ enum values in the field parameter.
+ """
+ enum_values = [enum['backendValue']
+ for enum in parameter_config['enum'].values()
+ if 'backendValue' in enum]
+ if value not in enum_values:
+ raise errors.EnumRejectionError(parameter_name, value, enum_values)
+
+
+def _check_boolean(parameter_name, value, parameter_config):
+ """Checks if a boolean value is valid.
+
+ This is called by the transform_parameter_value function and shouldn't be
+ called directly.
+
+ This checks that the string value passed in can be converted to a valid
+ boolean value.
+
+ Args:
+ parameter_name: A string containing the name of the parameter, which is
+ either just a variable name or the name with the index appended. For
+ example 'var' or 'var[2]'.
+ value: A string containing the value passed in for the parameter.
+ parameter_config: The dictionary containing information specific to the
+ parameter in question. This is retrieved from request.parameters in
+ the method config.
+
+ Raises:
+ BasicTypeParameterError: If the given value is not a valid boolean
+ value.
+ """
+ if parameter_config.get('type') != 'boolean':
+ return
+
+ if value.lower() not in ('1', 'true', '0', 'false'):
+ raise errors.BasicTypeParameterError(parameter_name, value, 'boolean')
+
+
+def _convert_boolean(value):
+ """Convert a string to a boolean value the same way the server does.
+
+ This is called by the transform_parameter_value function and shouldn't be
+ called directly.
+
+ Args:
+ value: A string value to be converted to a boolean.
+
+ Returns:
+ True or False, based on whether the value in the string would be interpreted
+ as true or false by the server. In the case of an invalid entry, this
+ returns False.
+ """
+ if value.lower() in ('1', 'true'):
+ return True
+ return False
+
+
+# Map to convert parameters from strings to their desired back-end format.
+# Anything not listed here will remain a string. Note that the server
+# keeps int64 and uint64 as strings when passed to the backend.
+# This maps a type name from the .api method configuration to a (validation
+# function, conversion function, descriptive type name) tuple. The
+# descriptive type name is only used in conversion error messages, and the
+# names here are chosen to match the error messages from the server.
+# Note that the 'enum' entry is special cased. Enums have 'type': 'string',
+# so we have special case code to recognize them and use the 'enum' map
+# entry.
+_PARAM_CONVERSION_MAP = {'boolean': (_check_boolean,
+ _convert_boolean,
+ 'boolean'),
+ 'int32': (None, int, 'integer'),
+ 'uint32': (None, int, 'integer'),
+ 'float': (None, float, 'float'),
+ 'double': (None, float, 'double'),
+ 'enum': (_check_enum, None, None)}
+
+
+def _get_parameter_conversion_entry(parameter_config):
+ """Get information needed to convert the given parameter to its API type.
+
+ Args:
+ parameter_config: The dictionary containing information specific to the
+ parameter in question. This is retrieved from request.parameters in the
+ method config.
+
+ Returns:
+ The entry from _PARAM_CONVERSION_MAP with functions/information needed to
+ validate and convert the given parameter from a string to the type expected
+ by the API.
+ """
+ entry = _PARAM_CONVERSION_MAP.get(parameter_config.get('type'))
+
+ # Special handling for enum parameters. An enum's type is 'string', so we
+ # need to detect them by the presence of an 'enum' property in their
+ # configuration.
+ if entry is None and 'enum' in parameter_config:
+ entry = _PARAM_CONVERSION_MAP['enum']
+
+ return entry
+
+
+def transform_parameter_value(parameter_name, value, parameter_config):
+ """Validates and transforms parameters to the type expected by the API.
+
+ If the value is a list this will recursively call _transform_parameter_value
+ on the values in the list. Otherwise, it checks all parameter rules for the
+ the current value and converts its type from a string to whatever format
+ the API expects.
+
+ In the list case, '[index-of-value]' is appended to the parameter name for
+ error reporting purposes.
+
+ Args:
+ parameter_name: A string containing the name of the parameter, which is
+ either just a variable name or the name with the index appended, in the
+ recursive case. For example 'var' or 'var[2]'.
+ value: A string or list of strings containing the value(s) passed in for
+ the parameter. These are the values from the request, to be validated,
+ transformed, and passed along to the backend.
+ parameter_config: The dictionary containing information specific to the
+ parameter in question. This is retrieved from request.parameters in the
+ method config.
+
+ Returns:
+ The converted parameter value(s). Not all types are converted, so this
+ may be the same string that's passed in.
+ """
+ if isinstance(value, list):
+ # We're only expecting to handle path and query string parameters here.
+ # The way path and query string parameters are passed in, they'll likely
+ # only be single values or singly-nested lists (no lists nested within
+ # lists). But even if there are nested lists, we'd want to preserve that
+ # structure. These recursive calls should preserve it and convert all
+ # parameter values. See the docstring for information about the parameter
+ # renaming done here.
+ return [transform_parameter_value('%s[%d]' % (parameter_name, index),
+ element, parameter_config)
+ for index, element in enumerate(value)]
+
+ # Validate and convert the parameter value.
+ entry = _get_parameter_conversion_entry(parameter_config)
+ if entry:
+ validation_func, conversion_func, type_name = entry
+ if validation_func:
+ validation_func(parameter_name, value, parameter_config)
+ if conversion_func:
+ try:
+ return conversion_func(value)
+ except ValueError:
+ raise errors.BasicTypeParameterError(parameter_name, value, type_name)
+
+ return value
diff --git a/third_party/endpoints/protojson.py b/third_party/endpoints/protojson.py
new file mode 100644
index 0000000..6f0b2f9
--- /dev/null
+++ b/third_party/endpoints/protojson.py
@@ -0,0 +1,108 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Endpoints-specific implementation of ProtoRPC's ProtoJson class."""
+from __future__ import absolute_import
+
+import base64
+
+from protorpc import protojson
+
+from . import messages
+
+# pylint: disable=g-bad-name
+
+
+__all__ = ['EndpointsProtoJson']
+
+
+class EndpointsProtoJson(protojson.ProtoJson):
+ """Endpoints-specific implementation of ProtoRPC's ProtoJson class.
+
+ We need to adjust the way some types of data are encoded to ensure they're
+ consistent with the existing API pipeline. This class adjusts the JSON
+ encoding as needed.
+
+ This may be used in a multithreaded environment, so take care to ensure
+ that this class (and its parent, protojson.ProtoJson) remain thread-safe.
+ """
+
+ def encode_field(self, field, value):
+ """Encode a python field value to a JSON value.
+
+ Args:
+ field: A ProtoRPC field instance.
+ value: A python value supported by field.
+
+ Returns:
+ A JSON serializable value appropriate for field.
+ """
+ # Override the handling of 64-bit integers, so they're always encoded
+ # as strings.
+ if (isinstance(field, messages.IntegerField) and
+ field.variant in (messages.Variant.INT64,
+ messages.Variant.UINT64,
+ messages.Variant.SINT64)):
+ if value not in (None, [], ()):
+ # Convert and replace the value.
+ if isinstance(value, list):
+ value = [str(subvalue) for subvalue in value]
+ else:
+ value = str(value)
+ return value
+
+ return super(EndpointsProtoJson, self).encode_field(field, value)
+
+ @staticmethod
+ def __pad_value(value, pad_len_multiple, pad_char):
+ """Add padding characters to the value if needed.
+
+ Args:
+ value: The string value to be padded.
+ pad_len_multiple: Pad the result so its length is a multiple
+ of pad_len_multiple.
+ pad_char: The character to use for padding.
+
+ Returns:
+ The string value with padding characters added.
+ """
+ assert pad_len_multiple > 0
+ assert len(pad_char) == 1
+ padding_length = (pad_len_multiple -
+ (len(value) % pad_len_multiple)) % pad_len_multiple
+ return value + pad_char * padding_length
+
+ def decode_field(self, field, value):
+ """Decode a JSON value to a python value.
+
+ Args:
+ field: A ProtoRPC field instance.
+ value: A serialized JSON value.
+
+ Returns:
+ A Python value compatible with field.
+ """
+ # Override BytesField handling. Client libraries typically use a url-safe
+ # encoding. b64decode doesn't handle these gracefully. urlsafe_b64decode
+ # handles both cases safely. Also add padding if the padding is incorrect.
+ if isinstance(field, messages.BytesField):
+ try:
+ # Need to call str(value) because ProtoRPC likes to pass values
+ # as unicode, and urlsafe_b64decode can only handle bytes.
+ padded_value = self.__pad_value(str(value), 4, '=')
+ return base64.urlsafe_b64decode(padded_value)
+ except (TypeError, UnicodeEncodeError) as err:
+ raise messages.DecodeError('Base64 decoding error: %s' % err)
+
+ return super(EndpointsProtoJson, self).decode_field(field, value)
diff --git a/third_party/endpoints/proxy.html b/third_party/endpoints/proxy.html
new file mode 100644
index 0000000..cb9d96f
--- /dev/null
+++ b/third_party/endpoints/proxy.html
@@ -0,0 +1,31 @@
+<!DOCTYPE html>
+<html>
+<head>
+<title></title>
+<meta http-equiv="X-UA-Compatible" content="IE=edge" />
+<!--
+Copyright 2016 Google Inc. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<script type="text/javascript">
+ window['startup'] = function() {
+ googleapis.server.init();
+ };
+</script>
+<script type="text/javascript"
+ src="https://apis.google.com/js/googleapis.proxy.js?onload=startup" async defer></script>
+</head>
+<body>
+</body>
+</html>
diff --git a/third_party/endpoints/resource_container.py b/third_party/endpoints/resource_container.py
new file mode 100644
index 0000000..19519db
--- /dev/null
+++ b/third_party/endpoints/resource_container.py
@@ -0,0 +1,218 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Module for a class that contains a request body resource and parameters."""
+from __future__ import absolute_import
+
+from . import message_types
+from . import messages
+
+
+class ResourceContainer(object):
+ """Container for a request body resource combined with parameters.
+
+ Used for API methods which may also have path or query parameters in addition
+ to a request body.
+
+ Attributes:
+ body_message_class: A message class to represent a request body.
+ parameters_message_class: A placeholder message class for request
+ parameters.
+ """
+
+ __remote_info_cache = {} # pylint: disable=g-bad-name
+
+ __combined_message_class = None # pylint: disable=invalid-name
+
+ def __init__(self, _body_message_class=message_types.VoidMessage, **kwargs):
+ """Constructor for ResourceContainer.
+
+ Stores a request body message class and attempts to create one from the
+ keyword arguments passed in.
+
+ Args:
+ _body_message_class: A keyword argument to be treated like a positional
+ argument. This will not conflict with the potential names of fields
+ since they can't begin with underscore. We make this a keyword
+ argument since the default VoidMessage is a very common choice given
+ the prevalence of GET methods.
+ **kwargs: Keyword arguments specifying field names (the named arguments)
+ and instances of ProtoRPC fields as the values.
+ """
+ self.body_message_class = _body_message_class
+ self.parameters_message_class = type('ParameterContainer',
+ (messages.Message,), kwargs)
+
+ @property
+ def combined_message_class(self):
+ """A ProtoRPC message class with both request and parameters fields.
+
+ Caches the result in a local private variable. Uses _CopyField to create
+ copies of the fields from the existing request and parameters classes since
+ those fields are "owned" by the message classes.
+
+ Raises:
+ TypeError: If a field name is used in both the request message and the
+ parameters but the two fields do not represent the same type.
+
+ Returns:
+ Value of combined message class for this property.
+ """
+ if self.__combined_message_class is not None:
+ return self.__combined_message_class
+
+ fields = {}
+ # We don't need to preserve field.number since this combined class is only
+ # used for the protorpc remote.method and is not needed for the API config.
+ # The only place field.number matters is in parameterOrder, but this is set
+ # based on container.parameters_message_class which will use the field
+ # numbers originally passed in.
+
+ # Counter for fields.
+ field_number = 1
+ for field in self.body_message_class.all_fields():
+ fields[field.name] = _CopyField(field, number=field_number)
+ field_number += 1
+ for field in self.parameters_message_class.all_fields():
+ if field.name in fields:
+ if not _CompareFields(field, fields[field.name]):
+ raise TypeError('Field %r contained in both parameters and request '
+ 'body, but the fields differ.' % (field.name,))
+ else:
+ # Skip a field that's already there.
+ continue
+ fields[field.name] = _CopyField(field, number=field_number)
+ field_number += 1
+
+ self.__combined_message_class = type('CombinedContainer',
+ (messages.Message,), fields)
+ return self.__combined_message_class
+
+ @classmethod
+ def add_to_cache(cls, remote_info, container): # pylint: disable=g-bad-name
+ """Adds a ResourceContainer to a cache tying it to a protorpc method.
+
+ Args:
+ remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding
+ to a method.
+ container: An instance of ResourceContainer.
+
+ Raises:
+ TypeError: if the container is not an instance of cls.
+ KeyError: if the remote method has been reference by a container before.
+ This created remote method should never occur because a remote method
+ is created once.
+ """
+ if not isinstance(container, cls):
+ raise TypeError('%r not an instance of %r, could not be added to cache.' %
+ (container, cls))
+ if remote_info in cls.__remote_info_cache:
+ raise KeyError('Cache has collision but should not.')
+ cls.__remote_info_cache[remote_info] = container
+
+ @classmethod
+ def get_request_message(cls, remote_info): # pylint: disable=g-bad-name
+ """Gets request message or container from remote info.
+
+ Args:
+ remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding
+ to a method.
+
+ Returns:
+ Either an instance of the request type from the remote or the
+ ResourceContainer that was cached with the remote method.
+ """
+ if remote_info in cls.__remote_info_cache:
+ return cls.__remote_info_cache[remote_info]
+ else:
+ return remote_info.request_type()
+
+
+def _GetFieldAttributes(field):
+ """Decomposes field into the needed arguments to pass to the constructor.
+
+ This can be used to create copies of the field or to compare if two fields
+ are "equal" (since __eq__ is not implemented on messages.Field).
+
+ Args:
+ field: A ProtoRPC message field (potentially to be copied).
+
+ Raises:
+ TypeError: If the field is not an instance of messages.Field.
+
+ Returns:
+ A pair of relevant arguments to be passed to the constructor for the field
+ type. The first element is a list of positional arguments for the
+ constructor and the second is a dictionary of keyword arguments.
+ """
+ if not isinstance(field, messages.Field):
+ raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,))
+
+ positional_args = []
+ kwargs = {
+ 'required': field.required,
+ 'repeated': field.repeated,
+ 'variant': field.variant,
+ 'default': field._Field__default, # pylint: disable=protected-access
+ }
+
+ if isinstance(field, messages.MessageField):
+ # Message fields can't have a default
+ kwargs.pop('default')
+ if not isinstance(field, message_types.DateTimeField):
+ positional_args.insert(0, field.message_type)
+ elif isinstance(field, messages.EnumField):
+ positional_args.insert(0, field.type)
+
+ return positional_args, kwargs
+
+
+def _CompareFields(field, other_field):
+ """Checks if two ProtoRPC fields are "equal".
+
+ Compares the arguments, rather than the id of the elements (which is
+ the default __eq__ behavior) as well as the class of the fields.
+
+ Args:
+ field: A ProtoRPC message field to be compared.
+ other_field: A ProtoRPC message field to be compared.
+
+ Returns:
+ Boolean indicating whether the fields are equal.
+ """
+ field_attrs = _GetFieldAttributes(field)
+ other_field_attrs = _GetFieldAttributes(other_field)
+ if field_attrs != other_field_attrs:
+ return False
+ return field.__class__ == other_field.__class__
+
+
+def _CopyField(field, number=None):
+ """Copies a (potentially) owned ProtoRPC field instance into a new copy.
+
+ Args:
+ field: A ProtoRPC message field to be copied.
+ number: An integer for the field to override the number of the field.
+ Defaults to None.
+
+ Raises:
+ TypeError: If the field is not an instance of messages.Field.
+
+ Returns:
+ A copy of the ProtoRPC message field.
+ """
+ positional_args, kwargs = _GetFieldAttributes(field)
+ number = number or field.number
+ positional_args.append(number)
+ return field.__class__(*positional_args, **kwargs)
diff --git a/third_party/endpoints/types.py b/third_party/endpoints/types.py
new file mode 100644
index 0000000..e6291fd
--- /dev/null
+++ b/third_party/endpoints/types.py
@@ -0,0 +1,57 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provide various utility/container types needed by Endpoints Framework.
+
+Putting them in this file makes it easier to avoid circular imports,
+as well as keep from complicating tests due to importing code that
+uses App Engine apis.
+"""
+
+from __future__ import absolute_import
+
+import six
+
+import attr
+
+__all__ = [
+ 'OAuth2Scope', 'Issuer', 'LimitDefinition', 'Namespace',
+]
+
+
+@attr.s(frozen=True, slots=True)
+class OAuth2Scope(object):
+ scope = attr.ib(validator=attr.validators.instance_of(six.string_types))
+ description = attr.ib(validator=attr.validators.instance_of(six.string_types))
+
+ @classmethod
+ def convert_scope(cls, scope):
+ "Convert string scopes into OAuth2Scope objects."
+ if isinstance(scope, cls):
+ return scope
+ return cls(scope=scope, description=scope)
+
+ @classmethod
+ def convert_list(cls, values):
+ "Convert a list of scopes into a list of OAuth2Scope objects."
+ if values is not None:
+ return [cls.convert_scope(value) for value in values]
+
+Issuer = attr.make_class('Issuer', ['issuer', 'jwks_uri'])
+LimitDefinition = attr.make_class('LimitDefinition', ['metric_name',
+ 'display_name',
+ 'default_limit'])
+Namespace = attr.make_class('Namespace', ['owner_domain',
+ 'owner_name',
+ 'package_path'])
diff --git a/third_party/endpoints/users_id_token.py b/third_party/endpoints/users_id_token.py
new file mode 100644
index 0000000..2080805
--- /dev/null
+++ b/third_party/endpoints/users_id_token.py
@@ -0,0 +1,844 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility library for reading user information from an id_token.
+
+This is an experimental library that can temporarily be used to extract
+a user from an id_token. The functionality provided by this library
+will be provided elsewhere in the future.
+"""
+
+from __future__ import absolute_import
+
+import base64
+import binascii
+import hmac
+import json
+import logging
+import os
+import re
+import six
+import time
+from six.moves import urllib
+from collections.abc import Container as _Container
+from collections.abc import Iterable as _Iterable
+from collections.abc import Mapping as _Mapping
+
+from google.appengine.api import memcache
+from google.appengine.api import oauth
+from google.appengine.api import urlfetch
+from google.appengine.api import users
+
+from . import constants
+from . import types as endpoints_types
+
+try:
+ # PyCrypto may not be installed for the import_aeta_test or in dev's
+ # individual Python installations. It is available on AppEngine in prod.
+
+ # Disable "Import not at top of file" warning.
+ # pylint: disable=g-import-not-at-top
+ from Crypto.Hash import SHA256
+ from Crypto.PublicKey import RSA
+ # pylint: enable=g-import-not-at-top
+ _CRYPTO_LOADED = True
+except ImportError:
+ _CRYPTO_LOADED = False
+
+
+__all__ = [
+ 'convert_jwks_uri',
+ 'get_current_user',
+ 'get_verified_jwt',
+ 'InvalidGetUserCall',
+ 'SKIP_CLIENT_ID_CHECK',
+]
+
+_logger = logging.getLogger(__name__)
+
+SKIP_CLIENT_ID_CHECK = ['*'] # This needs to be a list, for comparisons.
+_CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
+_MAX_TOKEN_LIFETIME_SECS = 86400 # 1 day in seconds
+_DEFAULT_CERT_URI = ('https://www.googleapis.com/service_accounts/v1/metadata/'
+ 'raw/federated-signon@system.gserviceaccount.com')
+_ENDPOINTS_USER_INFO = 'google.api.auth.user_info'
+_ENV_USE_OAUTH_SCOPE = 'ENDPOINTS_USE_OAUTH_SCOPE'
+_ENV_AUTH_EMAIL = 'ENDPOINTS_AUTH_EMAIL'
+_ENV_AUTH_DOMAIN = 'ENDPOINTS_AUTH_DOMAIN'
+_EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
+_TOKENINFO_URL = 'https://www.googleapis.com/oauth2/v3/tokeninfo'
+_MAX_AGE_REGEX = re.compile(r'\s*max-age\s*=\s*(\d+)\s*')
+_CERT_NAMESPACE = '__verify_jwt'
+_ISSUERS = ('accounts.google.com', 'https://accounts.google.com')
+_DEFAULT_GOOGLE_ISSUER = {
+ 'google_id_token': endpoints_types.Issuer(_ISSUERS, _DEFAULT_CERT_URI)
+}
+
+
+class _AppIdentityError(Exception):
+ pass
+
+
+class InvalidGetUserCall(Exception):
+ """Called get_current_user when the environment was not set up for it."""
+
+
+# pylint: disable=g-bad-name
+def get_current_user():
+ """Get user information from the id_token or oauth token in the request.
+
+ This should only be called from within an Endpoints request handler,
+ decorated with an @endpoints.method decorator. The decorator should include
+ the https://www.googleapis.com/auth/userinfo.email scope.
+
+ If `endpoints_management.control.wsgi.AuthenticationMiddleware` is enabled,
+ this returns the user info decoded by the middleware. Otherwise, if the
+ current request uses an id_token, this validates and parses the token against
+ the info in the current request handler and returns the user. Or, for an
+ Oauth token, this call validates the token against the tokeninfo endpoint and
+ oauth.get_current_user with the scopes provided in the method's decorator.
+
+ Returns:
+ None if there is no token or it's invalid. If the token was valid, this
+ returns a User. Only the user's email field is guaranteed to be set.
+ Other fields may be empty.
+
+ Raises:
+ InvalidGetUserCall: if the environment variables necessary to determine the
+ endpoints user are not set. These are typically set when processing a
+ request using an Endpoints handler. If they are not set, it likely
+ indicates that this function was called from outside an Endpoints request
+ handler.
+ """
+ if not _is_auth_info_available():
+ raise InvalidGetUserCall('No valid endpoints user in environment.')
+
+ if _ENDPOINTS_USER_INFO in os.environ:
+ user_info = os.environ[_ENDPOINTS_USER_INFO]
+ return users.User(user_info.email)
+
+ if _ENV_USE_OAUTH_SCOPE in os.environ:
+ # We can get more information from the oauth.get_current_user function,
+ # as long as we know what scope to use. Since that scope has been
+ # cached, we can just return this:
+ return oauth.get_current_user(os.environ[_ENV_USE_OAUTH_SCOPE].split())
+
+ if (_ENV_AUTH_EMAIL in os.environ and
+ _ENV_AUTH_DOMAIN in os.environ):
+ if not os.environ[_ENV_AUTH_EMAIL]:
+ # Either there was no id token or we were unable to validate it,
+ # so there's no user.
+ return None
+
+ return users.User(os.environ[_ENV_AUTH_EMAIL],
+ os.environ[_ENV_AUTH_DOMAIN] or None)
+
+ # Shouldn't hit this, because all the _is_auth_info_available cases were
+ # checked, but just in case.
+ return None
+
+
+# pylint: disable=g-bad-name
+def _is_auth_info_available():
+ """Check if user auth info has been set in environment variables."""
+ return (_ENDPOINTS_USER_INFO in os.environ or
+ (_ENV_AUTH_EMAIL in os.environ and _ENV_AUTH_DOMAIN in os.environ) or
+ _ENV_USE_OAUTH_SCOPE in os.environ)
+
+
+def _maybe_set_current_user_vars(method, api_info=None, request=None):
+ """Get user information from the id_token or oauth token in the request.
+
+ Used internally by Endpoints to set up environment variables for user
+ authentication.
+
+ Args:
+ method: The class method that's handling this request. This method
+ should be annotated with @endpoints.method.
+ api_info: An api_config._ApiInfo instance. Optional. If None, will attempt
+ to parse api_info from the implicit instance of the method.
+ request: The current request, or None.
+ """
+ if _is_auth_info_available():
+ return
+
+ # By default, there's no user.
+ os.environ[_ENV_AUTH_EMAIL] = ''
+ os.environ[_ENV_AUTH_DOMAIN] = ''
+
+ # Choose settings on the method, if specified. Otherwise, choose settings
+ # from the API. Specifically check for None, so that methods can override
+ # with empty lists.
+ try:
+ api_info = api_info or method.im_self.api_info
+ except AttributeError:
+ # The most common case for this is someone passing an unbound method
+ # to this function, which most likely only happens in our unit tests.
+ # We could propagate the exception, but this results in some really
+ # difficult to debug behavior. Better to log a warning and pretend
+ # there are no API-level settings.
+ _logger.warning('AttributeError when accessing %s.im_self. An unbound '
+ 'method was probably passed as an endpoints handler.',
+ method.__name__)
+ scopes = method.method_info.scopes
+ audiences = method.method_info.audiences
+ allowed_client_ids = method.method_info.allowed_client_ids
+ else:
+ scopes = (method.method_info.scopes
+ if method.method_info.scopes is not None
+ else api_info.scopes)
+ audiences = (method.method_info.audiences
+ if method.method_info.audiences is not None
+ else api_info.audiences)
+ allowed_client_ids = (method.method_info.allowed_client_ids
+ if method.method_info.allowed_client_ids is not None
+ else api_info.allowed_client_ids)
+
+ if not scopes and not audiences and not allowed_client_ids:
+ # The user hasn't provided any information to allow us to parse either
+ # an id_token or an Oauth token. They appear not to be interested in
+ # auth.
+ return
+
+ token = _get_token(request)
+ if not token:
+ return None
+
+ if allowed_client_ids and _is_local_dev():
+ allowed_client_ids = (constants.API_EXPLORER_CLIENT_ID,) + tuple(allowed_client_ids)
+
+ # When every item in the acceptable scopes list is
+ # "https://www.googleapis.com/auth/userinfo.email", and there is a non-empty
+ # allowed_client_ids list, the API code will first attempt OAuth 2/OpenID
+ # Connect ID token processing for any incoming bearer token.
+ if ((scopes == [_EMAIL_SCOPE] or scopes == (_EMAIL_SCOPE,)) and
+ allowed_client_ids):
+ _logger.debug('Checking for id_token.')
+ issuers = api_info.issuers
+ if issuers is None:
+ issuers = _DEFAULT_GOOGLE_ISSUER
+ elif 'google_id_token' not in issuers:
+ issuers.update(_DEFAULT_GOOGLE_ISSUER)
+ time_now = int(time.time())
+ user = _get_id_token_user(token, issuers, audiences, allowed_client_ids,
+ time_now, memcache)
+ if user:
+ os.environ[_ENV_AUTH_EMAIL] = user.email()
+ os.environ[_ENV_AUTH_DOMAIN] = user.auth_domain()
+ return
+
+ # Check if the user is interested in an oauth token.
+ if scopes:
+ _logger.debug('Checking for oauth token.')
+ if _is_local_dev():
+ _set_bearer_user_vars_local(token, allowed_client_ids, scopes)
+ else:
+ _set_bearer_user_vars(allowed_client_ids, scopes)
+
+
+def _get_token(
+ request=None, allowed_auth_schemes=('OAuth', 'Bearer'),
+ allowed_query_keys=('bearer_token', 'access_token')):
+ """Get the auth token for this request.
+
+ Auth token may be specified in either the Authorization header or
+ as a query param (either access_token or bearer_token). We'll check in
+ this order:
+ 1. Authorization header.
+ 2. bearer_token query param.
+ 3. access_token query param.
+
+ Args:
+ request: The current request, or None.
+
+ Returns:
+ The token in the request or None.
+ """
+ allowed_auth_schemes = _listlike_guard(
+ allowed_auth_schemes, 'allowed_auth_schemes', iterable_only=True)
+ # Check if the token is in the Authorization header.
+ auth_header = os.environ.get('HTTP_AUTHORIZATION')
+ if auth_header:
+ for auth_scheme in allowed_auth_schemes:
+ if auth_header.startswith(auth_scheme):
+ return auth_header[len(auth_scheme) + 1:]
+ # If an auth header was specified, even if it's an invalid one, we won't
+ # look for the token anywhere else.
+ return None
+
+ # Check if the token is in the query string.
+ if request:
+ allowed_query_keys = _listlike_guard(
+ allowed_query_keys, 'allowed_query_keys', iterable_only=True)
+ for key in allowed_query_keys:
+ token, _ = request.get_unrecognized_field_info(key)
+ if token:
+ return token
+
+
+def _get_id_token_user(token, issuers, audiences, allowed_client_ids, time_now, cache):
+ """Get a User for the given id token, if the token is valid.
+
+ Args:
+ token: The id_token to check.
+ issuers: dict of Issuers
+ audiences: List of audiences that are acceptable.
+ allowed_client_ids: List of client IDs that are acceptable.
+ time_now: The current time as an int (eg. int(time.time())).
+ cache: Cache to use (eg. the memcache module).
+
+ Returns:
+ A User if the token is valid, None otherwise.
+ """
+ # Verify that the token is valid before we try to extract anything from it.
+ # This verifies the signature and some of the basic info in the token.
+ for issuer_key, issuer in issuers.items():
+ issuer_cert_uri = convert_jwks_uri(issuer.jwks_uri)
+ try:
+ parsed_token = _verify_signed_jwt_with_certs(
+ token, time_now, cache, cert_uri=issuer_cert_uri)
+ except Exception: # pylint: disable=broad-except
+ _logger.debug(
+ 'id_token verification failed for issuer %s', issuer_key, exc_info=True)
+ continue
+
+ issuer_values = _listlike_guard(issuer.issuer, 'issuer', log_warning=False)
+ if isinstance(audiences, _Mapping):
+ audiences = audiences[issuer_key]
+ if _verify_parsed_token(
+ parsed_token, issuer_values, audiences, allowed_client_ids,
+ # There's some special handling we do for Google issuers.
+ # ESP doesn't do this, and it's both unnecessary and invalid for other issuers.
+ # So we'll turn it off except in the Google issuer case.
+ is_legacy_google_auth=(issuer.issuer == _ISSUERS)):
+ email = parsed_token['email']
+ # The token might have an id, but it's a Gaia ID that's been
+ # obfuscated with the Focus key, rather than the AppEngine (igoogle)
+ # key. If the developer ever put this email into the user DB
+ # and retrieved the ID from that, it'd be different from the ID we'd
+ # return here, so it's safer to not return the ID.
+ # Instead, we'll only return the email.
+ return users.User(email)
+
+
+# pylint: disable=unused-argument
+def _set_oauth_user_vars(token_info, audiences, allowed_client_ids, scopes,
+ local_dev):
+ _logger.warning('_set_oauth_user_vars is deprecated and will be removed '
+ 'soon.')
+ return _set_bearer_user_vars(allowed_client_ids, scopes)
+# pylint: enable=unused-argument
+
+
+def _process_scopes(scopes):
+ """Parse a scopes list into a set of all scopes and a set of sufficient scope sets.
+
+ scopes: A list of strings, each of which is a space-separated list of scopes.
+ Examples: ['scope1']
+ ['scope1', 'scope2']
+ ['scope1', 'scope2 scope3']
+
+ Returns:
+ all_scopes: a set of strings, each of which is one scope to check for
+ sufficient_scopes: a set of sets of strings; each inner set is
+ a set of scopes which are sufficient for access.
+ Example: {{'scope1'}, {'scope2', 'scope3'}}
+ """
+ all_scopes = set()
+ sufficient_scopes = set()
+ for scope_set in scopes:
+ scope_set_scopes = frozenset(scope_set.split())
+ all_scopes.update(scope_set_scopes)
+ sufficient_scopes.add(scope_set_scopes)
+ return all_scopes, sufficient_scopes
+
+
+def _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
+ """Check if a list of authorized scopes satisfies any set of sufficient scopes.
+
+ Args:
+ authorized_scopes: a list of strings, return value from oauth.get_authorized_scopes
+ sufficient_scopes: a set of sets of strings, return value from _process_scopes
+ """
+ for sufficient_scope_set in sufficient_scopes:
+ if sufficient_scope_set.issubset(authorized_scopes):
+ return True
+ return False
+
+
+
+def _set_bearer_user_vars(allowed_client_ids, scopes):
+ """Validate the oauth bearer token and set endpoints auth user variables.
+
+ If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This
+ provides enough information that our endpoints.get_current_user() function
+ can get the user.
+
+ Args:
+ allowed_client_ids: List of client IDs that are acceptable.
+ scopes: List of acceptable scopes.
+ """
+ all_scopes, sufficient_scopes = _process_scopes(scopes)
+ try:
+ authorized_scopes = oauth.get_authorized_scopes(sorted(all_scopes))
+ except oauth.Error:
+ _logger.debug('Unable to get authorized scopes.', exc_info=True)
+ return
+ if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
+ _logger.warning('Authorized scopes did not satisfy scope requirements.')
+ return
+ client_id = oauth.get_client_id(authorized_scopes)
+
+ # The client ID must be in allowed_client_ids. If allowed_client_ids is
+ # empty, don't allow any client ID. If allowed_client_ids is set to
+ # SKIP_CLIENT_ID_CHECK, all client IDs will be allowed.
+ if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
+ client_id not in allowed_client_ids):
+ _logger.warning('Client ID is not allowed: %s', client_id)
+ return
+
+ os.environ[_ENV_USE_OAUTH_SCOPE] = ' '.join(authorized_scopes)
+ _logger.debug('get_current_user() will return user from matched oauth_user.')
+
+
+def _set_bearer_user_vars_local(token, allowed_client_ids, scopes):
+ """Validate the oauth bearer token on the dev server.
+
+ Since the functions in the oauth module return only example results in local
+ development, this hits the tokeninfo endpoint and attempts to validate the
+ token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we
+ can get the user from the token.
+
+ Args:
+ token: String with the oauth token to validate.
+ allowed_client_ids: List of client IDs that are acceptable.
+ scopes: List of acceptable scopes.
+ """
+ # Get token info from the tokeninfo endpoint.
+ result = urlfetch.fetch(
+ '%s?%s' % (_TOKENINFO_URL, urllib.parse.urlencode({'access_token': token})))
+ if result.status_code != 200:
+ try:
+ error_description = json.loads(result.content)['error_description']
+ except (ValueError, KeyError):
+ error_description = ''
+ _logger.error('Token info endpoint returned status %s: %s',
+ result.status_code, error_description)
+ return
+ token_info = json.loads(result.content)
+
+ # Validate email.
+ if 'email' not in token_info:
+ _logger.warning('Oauth token doesn\'t include an email address.')
+ return
+ if token_info.get('email_verified') != 'true':
+ _logger.warning('Oauth token email isn\'t verified.')
+ return
+
+ # Validate client ID.
+ client_id = token_info.get('azp')
+ if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
+ client_id not in allowed_client_ids):
+ _logger.warning('Client ID is not allowed: %s', client_id)
+ return
+
+ # Verify at least one of the scopes matches.
+ _, sufficient_scopes = _process_scopes(scopes)
+ authorized_scopes = token_info.get('scope', '').split(' ')
+ if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
+ _logger.warning('Oauth token scopes don\'t match any acceptable scopes.')
+ return
+
+ os.environ[_ENV_AUTH_EMAIL] = token_info['email']
+ os.environ[_ENV_AUTH_DOMAIN] = ''
+ _logger.debug('Local dev returning user from token.')
+
+
+def _is_local_dev():
+ return os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
+
+
+def _verify_parsed_token(parsed_token, issuers, audiences, allowed_client_ids, is_legacy_google_auth=True):
+ """Verify a parsed user ID token.
+
+ Args:
+ parsed_token: The parsed token information.
+ issuers: A list of allowed issuers
+ audiences: The allowed audiences.
+ allowed_client_ids: The allowed client IDs.
+
+ Returns:
+ True if the token is verified, False otherwise.
+ """
+ # Verify the issuer.
+ if parsed_token.get('iss') not in issuers:
+ _logger.warning('Issuer was not valid: %s', parsed_token.get('iss'))
+ return False
+
+ # Check audiences.
+ aud = parsed_token.get('aud')
+ if not aud:
+ _logger.warning('No aud field in token')
+ return False
+ # Special legacy handling if aud == cid. This occurs with iOS and browsers.
+ # As long as audience == client_id and cid is allowed, we need to accept
+ # the audience for compatibility.
+ cid = parsed_token.get('azp')
+ audience_allowed = (aud in audiences) or (is_legacy_google_auth and aud == cid)
+ if not audience_allowed:
+ _logger.warning('Audience not allowed: %s', aud)
+ return False
+
+ # Check allowed client IDs, for legacy auth.
+ if is_legacy_google_auth:
+ if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK:
+ _logger.warning('Client ID check can\'t be skipped for ID tokens. '
+ 'Id_token cannot be verified.')
+ return False
+ elif not cid or cid not in allowed_client_ids:
+ _logger.warning('Client ID is not allowed: %s', cid)
+ return False
+
+ if 'email' not in parsed_token:
+ return False
+
+ return True
+
+
+def _urlsafe_b64decode(b64string):
+ # Guard against unicode strings, which base64 can't handle.
+ b64string = six.ensure_binary(b64string, 'ascii')
+ padded = b64string + '=' * ((4 - len(b64string)) % 4)
+ return base64.urlsafe_b64decode(padded)
+
+
+def _get_cert_expiration_time(headers):
+ """Get the expiration time for a cert, given the response headers.
+
+ Get expiration time from the headers in the result. If we can't get
+ a time from the headers, this returns 0, indicating that the cert
+ shouldn't be cached.
+
+ Args:
+ headers: A dict containing the response headers from the request to get
+ certs.
+
+ Returns:
+ An integer with the number of seconds the cert should be cached. This
+ value is guaranteed to be >= 0.
+ """
+ # Check the max age of the cert.
+ cache_control = headers.get('Cache-Control', '')
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 indicates only
+ # a comma-separated header is valid, so it should be fine to split this on
+ # commas.
+ for entry in cache_control.split(','):
+ match = _MAX_AGE_REGEX.match(entry)
+ if match:
+ cache_time_seconds = int(match.group(1))
+ break
+ else:
+ return 0
+
+ # Subtract the cert's age.
+ age = headers.get('Age')
+ if age is not None:
+ try:
+ age = int(age)
+ except ValueError:
+ age = 0
+ cache_time_seconds -= age
+
+ return max(0, cache_time_seconds)
+
+
+def _get_cached_certs(cert_uri, cache):
+ """Get certs from cache if present; otherwise, gets from URI and caches them.
+
+ Args:
+ cert_uri: URI from which to retrieve certs if cache is stale or empty.
+ cache: Cache of pre-fetched certs.
+
+ Returns:
+ The retrieved certs.
+ """
+ certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE)
+ if certs is None:
+ _logger.debug('Cert cache miss for %s', cert_uri)
+ try:
+ result = urlfetch.fetch(cert_uri)
+ except AssertionError:
+ # This happens in unit tests. Act as if we couldn't get any certs.
+ return None
+
+ if result.status_code == 200:
+ certs = json.loads(result.content)
+ expiration_time_seconds = _get_cert_expiration_time(result.headers)
+ if expiration_time_seconds:
+ cache.set(cert_uri, certs, time=expiration_time_seconds,
+ namespace=_CERT_NAMESPACE)
+ else:
+ _logger.error(
+ 'Certs not available, HTTP request returned %d', result.status_code)
+
+ return certs
+
+
+def _b64_to_int(b):
+ b = six.ensure_binary(b, 'ascii')
+ b += b'=' * ((4 - len(b)) % 4)
+ b = base64.b64decode(b)
+ return int(binascii.hexlify(b), 16)
+
+
+def _verify_signed_jwt_with_certs(
+ jwt, time_now, cache,
+ cert_uri=_DEFAULT_CERT_URI):
+ """Verify a JWT against public certs.
+
+ See http://self-issued.info/docs/draft-jones-json-web-token.html.
+
+ The PyCrypto library included with Google App Engine is severely limited and
+ so you have to use it very carefully to verify JWT signatures. The first
+ issue is that the library can't read X.509 files, so we make a call to a
+ special URI that has the public cert in modulus/exponent form in JSON.
+
+ The second issue is that the RSA.verify method doesn't work, at least for
+ how the JWT tokens are signed, so we have to manually verify the signature
+ of the JWT, which means hashing the signed part of the JWT and comparing
+ that to the signature that's been encrypted with the public key.
+
+ Args:
+ jwt: string, A JWT.
+ time_now: The current time, as an int (eg. int(time.time())).
+ cache: Cache to use (eg. the memcache module).
+ cert_uri: string, URI to get cert modulus and exponent in JSON format.
+
+ Returns:
+ dict, The deserialized JSON payload in the JWT.
+
+ Raises:
+ _AppIdentityError: if any checks are failed.
+ """
+
+ segments = jwt.split('.')
+
+ if len(segments) != 3:
+ # Note that anywhere we print the jwt or its json body, we need to use
+ # %r instead of %s, so that non-printable characters are escaped safely.
+ raise _AppIdentityError('Token is not an id_token (Wrong number of '
+ 'segments)')
+ signed = '%s.%s' % (segments[0], segments[1])
+
+ signature = _urlsafe_b64decode(segments[2])
+
+ # pycrypto only deals in integers, so we have to convert the string of bytes
+ # into an int.
+ lsignature = int(binascii.hexlify(signature), 16)
+
+ # Verify expected header.
+ header_body = _urlsafe_b64decode(segments[0])
+ try:
+ header = json.loads(header_body)
+ except:
+ raise _AppIdentityError("Can't parse header")
+ if header.get('alg') != 'RS256':
+ raise _AppIdentityError('Unexpected encryption algorithm: %r' %
+ header.get('alg'))
+
+ # Formerly we would parse the token body here.
+ # However, it's not safe to do that without first checking the signature.
+
+ certs = _get_cached_certs(cert_uri, cache)
+ if certs is None:
+ raise _AppIdentityError(
+ 'Unable to retrieve certs needed to verify the signed JWT')
+
+ # Verify that we were able to load the Crypto libraries, before we try
+ # to use them.
+ if not _CRYPTO_LOADED:
+ raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify '
+ 'id_token signature. See http://www.pycrypto.org '
+ 'for more information on pycrypto.')
+
+ # SHA256 hash of the already 'signed' segment from the JWT. Since a SHA256
+ # hash, will always have length 64.
+ local_hash = SHA256.new(signed).hexdigest()
+
+ # Check signature.
+ verified = False
+ for keyvalue in certs['keyvalues']:
+ try:
+ modulus = _b64_to_int(keyvalue['modulus'])
+ exponent = _b64_to_int(keyvalue['exponent'])
+ key = RSA.construct((modulus, exponent))
+
+ # Encrypt, and convert to a hex string.
+ hexsig = '%064x' % key.encrypt(lsignature, '')[0]
+ # Make sure we have only last 64 base64 chars
+ hexsig = hexsig[-64:]
+
+ # Check the signature on 'signed' by encrypting 'signature' with the
+ # public key and confirming the result matches the SHA256 hash of
+ # 'signed'. hmac.compare_digest(a, b) is used to avoid timing attacks.
+ verified = hmac.compare_digest(hexsig, local_hash)
+ if verified:
+ break
+ except Exception as e: # pylint: disable=broad-except
+ # Log the exception for debugging purpose.
+ _logger.debug(
+ 'Signature verification error: %s; continuing with the next cert.', e)
+ continue
+ if not verified:
+ raise _AppIdentityError('Invalid token signature')
+
+ # Parse token.
+ json_body = _urlsafe_b64decode(segments[1])
+ try:
+ parsed = json.loads(json_body)
+ except:
+ raise _AppIdentityError("Can't parse token body")
+
+ # Check creation timestamp.
+ iat = parsed.get('iat')
+ if iat is None:
+ raise _AppIdentityError('No iat field in token')
+ earliest = iat - _CLOCK_SKEW_SECS
+
+ # Check expiration timestamp.
+ exp = parsed.get('exp')
+ if exp is None:
+ raise _AppIdentityError('No exp field in token')
+ if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS:
+ raise _AppIdentityError('exp field too far in future')
+ latest = exp + _CLOCK_SKEW_SECS
+
+ if time_now < earliest:
+ raise _AppIdentityError('Token used too early, %d < %d' %
+ (time_now, earliest))
+ if time_now > latest:
+ raise _AppIdentityError('Token used too late, %d > %d' %
+ (time_now, latest))
+
+ return parsed
+
+
+_TEXT_CERT_PREFIX = 'https://www.googleapis.com/robot/v1/metadata/x509/'
+_JSON_CERT_PREFIX = 'https://www.googleapis.com/service_accounts/v1/metadata/raw/'
+
+
+def convert_jwks_uri(jwks_uri):
+ """
+ The PyCrypto library included with Google App Engine is severely limited and
+ can't read X.509 files, so we change the URI to a special URI that has the
+ public cert in modulus/exponent form in JSON.
+ """
+ if not jwks_uri.startswith(_TEXT_CERT_PREFIX):
+ return jwks_uri
+ return jwks_uri.replace(_TEXT_CERT_PREFIX, _JSON_CERT_PREFIX)
+
+
+def get_verified_jwt(
+ providers, audiences,
+ check_authorization_header=True, check_query_arg=True,
+ request=None, cache=memcache):
+ """
+ This function will extract, verify, and parse a JWT token from the
+ Authorization header or access_token query argument.
+
+ The JWT is assumed to contain an issuer and audience claim, as well
+ as issued-at and expiration timestamps. The signature will be
+ cryptographically verified, the claims and timestamps will be
+ checked, and the resulting parsed JWT body is returned.
+
+ If at any point the JWT is missing or found to be invalid, the
+ return result will be None.
+
+ Arguments:
+ providers - An iterable of dicts each containing 'issuer' and 'cert_uri' keys
+ audiences - An iterable of valid audiences
+
+ check_authorization_header - Boolean; check 'Authorization: Bearer' header
+ check_query_arg - Boolean; check 'access_token' query arg
+
+ request - Must be the request object if check_query_arg is true; otherwise ignored.
+ cache - In testing, override the certificate cache
+ """
+ if not (check_authorization_header or check_query_arg):
+ raise ValueError(
+ 'Either check_authorization_header or check_query_arg must be True.')
+ if check_query_arg and request is None:
+ raise ValueError('Cannot check query arg without request object.')
+ schemes = ('Bearer',) if check_authorization_header else ()
+ keys = ('access_token',) if check_query_arg else ()
+ token = _get_token(
+ request=request, allowed_auth_schemes=schemes, allowed_query_keys=keys)
+ if token is None:
+ return None
+ time_now = int(time.time())
+ for provider in providers:
+ parsed_token = _parse_and_verify_jwt(
+ token, time_now, (provider['issuer'],), audiences, provider['cert_uri'], cache)
+ if parsed_token is not None:
+ return parsed_token
+ return None
+
+
+def _parse_and_verify_jwt(token, time_now, issuers, audiences, cert_uri, cache):
+ try:
+ parsed_token = _verify_signed_jwt_with_certs(token, time_now, cache, cert_uri)
+ except (_AppIdentityError, TypeError) as e:
+ _logger.debug('id_token verification failed: %s', e)
+ return None
+
+ issuers = _listlike_guard(issuers, 'issuers')
+ audiences = _listlike_guard(audiences, 'audiences')
+ # We can't use _verify_parsed_token because there's no client id (azp) or email in these JWTs
+ # Verify the issuer.
+ if parsed_token.get('iss') not in issuers:
+ _logger.warning('Issuer was not valid: %s', parsed_token.get('iss'))
+ return None
+
+ # Check audiences.
+ aud = parsed_token.get('aud')
+ if not aud:
+ _logger.warning('No aud field in token')
+ return None
+ if aud not in audiences:
+ _logger.warning('Audience not allowed: %s', aud)
+ return None
+
+ return parsed_token
+
+
+def _listlike_guard(obj, name, iterable_only=False, log_warning=True):
+ """
+ We frequently require passed objects to support iteration or
+ containment expressions, but not be strings. (Of course, strings
+ support iteration and containment, but not usefully.) If the passed
+ object is a string, we'll wrap it in a tuple and return it. If it's
+ already an iterable, we'll return it as-is. Otherwise, we'll raise a
+ TypeError.
+ """
+ required_type = (_Iterable,) if iterable_only else (_Container, _Iterable)
+ required_type_name = ' or '.join(t.__name__ for t in required_type)
+
+ if not isinstance(obj, required_type):
+ raise ValueError('{} must be of type {}'.format(name, required_type_name))
+ # at this point it is definitely the right type, but might be a string
+ if isinstance(obj, six.string_types):
+ if log_warning:
+ _logger.warning('{} passed as a string; should be list-like'.format(name))
+ return (obj,)
+ return obj
diff --git a/third_party/endpoints/util.py b/third_party/endpoints/util.py
new file mode 100644
index 0000000..fe883d0
--- /dev/null
+++ b/third_party/endpoints/util.py
@@ -0,0 +1,300 @@
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper utilities for the endpoints package."""
+
+# pylint: disable=g-bad-name
+from __future__ import absolute_import
+
+from six.moves import cStringIO
+import json
+import os
+import wsgiref.headers
+
+from google.appengine.api import app_identity
+from google.appengine.api.modules import modules
+
+
+class StartResponseProxy(object):
+ """Proxy for the typical WSGI start_response object."""
+
+ def __init__(self):
+ self.call_context = {}
+ self.body_buffer = cStringIO.StringIO()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ # Close out the cStringIO.StringIO buffer to prevent memory leakage.
+ if self.body_buffer:
+ self.body_buffer.close()
+
+ def Proxy(self, status, headers, exc_info=None):
+ """Save args, defer start_response until response body is parsed.
+
+ Create output buffer for body to be written into.
+ Note: this is not quite WSGI compliant: The body should come back as an
+ iterator returned from calling service_app() but instead, StartResponse
+ returns a writer that will be later called to output the body.
+ See google/appengine/ext/webapp/__init__.py::Response.wsgi_write()
+ write = start_response('%d %s' % self.__status, self.__wsgi_headers)
+ write(body)
+
+ Args:
+ status: Http status to be sent with this response
+ headers: Http headers to be sent with this response
+ exc_info: Exception info to be displayed for this response
+ Returns:
+ callable that takes as an argument the body content
+ """
+ self.call_context['status'] = status
+ self.call_context['headers'] = headers
+ self.call_context['exc_info'] = exc_info
+
+ return self.body_buffer.write
+
+ @property
+ def response_body(self):
+ return self.body_buffer.getvalue()
+
+ @property
+ def response_headers(self):
+ return self.call_context.get('headers')
+
+ @property
+ def response_status(self):
+ return self.call_context.get('status')
+
+ @property
+ def response_exc_info(self):
+ return self.call_context.get('exc_info')
+
+
+def send_wsgi_not_found_response(start_response, cors_handler=None):
+ return send_wsgi_response('404 Not Found', [('Content-Type', 'text/plain')],
+ 'Not Found', start_response,
+ cors_handler=cors_handler)
+
+
+def send_wsgi_error_response(message, start_response, cors_handler=None):
+ body = json.dumps({'error': {'message': message}})
+ return send_wsgi_response('500', [('Content-Type', 'application/json')], body,
+ start_response, cors_handler=cors_handler)
+
+
+def send_wsgi_rejected_response(rejection_error, start_response,
+ cors_handler=None):
+ body = rejection_error.to_json()
+ return send_wsgi_response('400', [('Content-Type', 'application/json')], body,
+ start_response, cors_handler=cors_handler)
+
+
+def send_wsgi_redirect_response(redirect_location, start_response,
+ cors_handler=None):
+ return send_wsgi_response('302', [('Location', redirect_location)], '',
+ start_response, cors_handler=cors_handler)
+
+
+def send_wsgi_no_content_response(start_response, cors_handler=None):
+ return send_wsgi_response('204 No Content', [], '', start_response,
+ cors_handler)
+
+
+def send_wsgi_response(status, headers, content, start_response,
+ cors_handler=None):
+ """Dump reformatted response to CGI start_response.
+
+ This calls start_response and returns the response body.
+
+ Args:
+ status: A string containing the HTTP status code to send.
+ headers: A list of (header, value) tuples, the headers to send in the
+ response.
+ content: A string containing the body content to write.
+ start_response: A function with semantics defined in PEP-333.
+ cors_handler: A handler to process CORS request headers and update the
+ headers in the response. Or this can be None, to bypass CORS checks.
+
+ Returns:
+ A string containing the response body.
+ """
+ if cors_handler:
+ cors_handler.update_headers(headers)
+
+ # Update content length.
+ content_len = len(content) if content else 0
+ headers = [(header, value) for header, value in headers
+ if header.lower() != 'content-length']
+ headers.append(('Content-Length', '%s' % content_len))
+
+ start_response(status, headers)
+ return content
+
+
+def get_headers_from_environ(environ):
+ """Get a wsgiref.headers.Headers object with headers from the environment.
+
+ Headers in environ are prefixed with 'HTTP_', are all uppercase, and have
+ had dashes replaced with underscores. This strips the HTTP_ prefix and
+ changes underscores back to dashes before adding them to the returned set
+ of headers.
+
+ Args:
+ environ: An environ dict for the request as defined in PEP-333.
+
+ Returns:
+ A wsgiref.headers.Headers object that's been filled in with any HTTP
+ headers found in environ.
+ """
+ headers = wsgiref.headers.Headers([])
+ for header, value in environ.items():
+ if header.startswith('HTTP_'):
+ headers[header[5:].replace('_', '-')] = value
+ # Content-Type is special; it does not start with 'HTTP_'.
+ if 'CONTENT_TYPE' in environ:
+ headers['CONTENT-TYPE'] = environ['CONTENT_TYPE']
+ return headers
+
+
+def put_headers_in_environ(headers, environ):
+ """Given a list of headers, put them into environ based on PEP-333.
+
+ This converts headers to uppercase, prefixes them with 'HTTP_', and
+ converts dashes to underscores before adding them to the environ dict.
+
+ Args:
+ headers: A list of (header, value) tuples. The HTTP headers to add to the
+ environment.
+ environ: An environ dict for the request as defined in PEP-333.
+ """
+ for key, value in headers:
+ environ['HTTP_%s' % key.upper().replace('-', '_')] = value
+
+
+def is_running_on_app_engine():
+ return os.environ.get('GAE_MODULE_NAME') is not None
+
+
+def is_running_on_devserver():
+ server_software = os.environ.get('SERVER_SOFTWARE', '')
+ return (server_software.startswith('Development/') and
+ server_software != 'Development/1.0 (testbed)')
+
+
+def is_running_on_localhost():
+ return os.environ.get('SERVER_NAME') == 'localhost'
+
+
+def get_hostname_prefix():
+ """Returns the hostname prefix of a running Endpoints service.
+
+ The prefix is the portion of the hostname that comes before the API name.
+ For example, if a non-default version and a non-default service are in use,
+ the returned result would be '{VERSION}-dot-{SERVICE}-'.
+
+ Returns:
+ str, the hostname prefix.
+ """
+ parts = []
+
+ # Check if this is the default version
+ version = modules.get_current_version_name()
+ default_version = modules.get_default_version()
+ if version != default_version:
+ parts.append(version)
+
+ # Check if this is the default module
+ module = modules.get_current_module_name()
+ if module != 'default':
+ parts.append(module)
+
+ # If there is anything to prepend, add an extra blank entry for the trailing
+ # -dot-
+ if parts:
+ parts.append('')
+
+ return '-dot-'.join(parts)
+
+
+def get_app_hostname():
+ """Return hostname of a running Endpoints service.
+
+ Returns hostname of an running Endpoints API. It can be 1) "localhost:PORT"
+ if running on development server, or 2) "app_id.appspot.com" if running on
+ external app engine prod, or "app_id.googleplex.com" if running as Google
+ first-party Endpoints API, or 4) None if not running on App Engine
+ (e.g. Tornado Endpoints API).
+
+ Returns:
+ A string representing the hostname of the service.
+ """
+ if not is_running_on_app_engine() or is_running_on_localhost():
+ return None
+
+ app_id = app_identity.get_application_id()
+
+ prefix = get_hostname_prefix()
+ suffix = 'appspot.com'
+
+ if ':' in app_id:
+ tokens = app_id.split(':')
+ api_name = tokens[1]
+ if tokens[0] == 'google.com':
+ suffix = 'googleplex.com'
+ else:
+ api_name = app_id
+
+ return '{0}{1}.{2}'.format(prefix, api_name, suffix)
+
+
+def check_list_type(objects, allowed_type, name, allow_none=True):
+ """Verify that objects in list are of the allowed type or raise TypeError.
+
+ Args:
+ objects: The list of objects to check.
+ allowed_type: The allowed type of items in 'settings'.
+ name: Name of the list of objects, added to the exception.
+ allow_none: If set, None is also allowed.
+
+ Raises:
+ TypeError: if object is not of the allowed type.
+
+ Returns:
+ The list of objects, for convenient use in assignment.
+ """
+ if objects is None:
+ if not allow_none:
+ raise TypeError('%s is None, which is not allowed.' % name)
+ return objects
+ if not isinstance(objects, (tuple, list)):
+ raise TypeError('%s is not a list.' % name)
+ if not all(isinstance(i, allowed_type) for i in objects):
+ type_list = sorted(list(set(type(obj) for obj in objects)))
+ raise TypeError('%s contains types that don\'t match %s: %s' %
+ (name, allowed_type.__name__, type_list))
+ return objects
+
+
+def snake_case_to_headless_camel_case(snake_string):
+ """Convert snake_case to headlessCamelCase.
+
+ Args:
+ snake_string: The string to be converted.
+ Returns:
+ The input string converted to headlessCamelCase.
+ """
+ return ''.join([snake_string.split('_')[0]] +
+ list(sub_string.capitalize()
+ for sub_string in snake_string.split('_')[1:]))
diff --git a/third_party/google/LICENSE b/third_party/google/LICENSE
new file mode 100644
index 0000000..53d6fcb
--- /dev/null
+++ b/third_party/google/LICENSE
@@ -0,0 +1,132 @@
+GOOGLE APP ENGINE SDK
+=====================
+Copyright 2008 Google Inc.
+All rights reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+DJANGO FRAMEWORK
+================
+Copyright (c) 2005, the Lawrence Journal-World
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of Django nor the names of its contributors may be used
+ to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+WebOb
+======
+
+Copyright (c) 2007 Ian Bicking and Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+PyYaml
+=======
+Copyright (c) 2006 Kirill Simonov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+cacerts
+=======
+Version: MPL 1.1/GPL 2.0/LGPL 2.1
+
+The contents of this file are subject to the Mozilla Public License Version
+1.1 (the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+http://www.mozilla.org/MPL/
+
+Software distributed under the License is distributed on an "AS IS" basis,
+WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+for the specific language governing rights and limitations under the
+License.
+
+The Original Code is the Netscape security libraries.
+
+The Initial Developer of the Original Code is
+Netscape Communications Corporation.
+Portions created by the Initial Developer are Copyright (C) 1994-2000
+the Initial Developer. All Rights Reserved.
+
+Contributor(s):
+
+Alternatively, the contents of this file may be used under the terms of
+either the GNU General Public License Version 2 or later (the "GPL"), or
+the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+in which case the provisions of the GPL or the LGPL are applicable instead
+of those above. If you wish to allow use of your version of this file only
+under the terms of either the GPL or the LGPL, and not to allow others to
+use your version of this file under the terms of the MPL, indicate your
+decision by deleting the provisions above and replace them with the notice
+and other provisions required by the GPL or the LGPL. If you do not delete
+the provisions above, a recipient may use your version of this file under
+the terms of any one of the MPL, the GPL or the LGPL.
+
diff --git a/third_party/google/README.monorail b/third_party/google/README.monorail
new file mode 100644
index 0000000..c9043a3
--- /dev/null
+++ b/third_party/google/README.monorail
@@ -0,0 +1,35 @@
+Name: Google App Engine SDK
+URL: https://github.com/GoogleCloudPlatform/appengine-python-standard
+Version: May 18, 2022
+License: Apache 2.0
+License File: LICENSE
+Security Critical: no
+Description:
+Development tools for Google App Engine
+Local Modifications:
+While most App Engine APIs have been updated for Python 3 in the above GitHub
+repository, ProtocolBuffer is not available. Therefore, we have copied the file
+from the old Python 2 API and are updating it ourselves for Python 3.
+
+1. Install the Google Cloud SDK (https://cloud.google.com/sdk)
+ The App Engine Python SDK is located in gcloud/platform/google_appengine/
+2. Retain only:
+ google/net/__init__.py
+ google/net/proto/__init__.py
+ google/net/proto/ProtocolBuffer.py
+ LICENSE
+3. Strip trailing whitespace from all files.
+4. Update files for Python 3.
+ Syntax changes:
+ * raise Exception, s --> raise Exception(s)
+
+ Import moves:
+ * import httplib --> from six.moves import http_client
+
+ String changes:
+ * a.fromstring(s) --> a.frombytes(six.ensure_binary(s))
+ * a.tostring() --> a.tobytes()
+
+ Integer changes:
+ * 1234L --> 1234
+ * long(1234) --> 1234
diff --git a/third_party/google/net/__init__.py b/third_party/google/net/__init__.py
new file mode 100644
index 0000000..93e6786
--- /dev/null
+++ b/third_party/google/net/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/third_party/google/net/proto/ProtocolBuffer.py b/third_party/google/net/proto/ProtocolBuffer.py
new file mode 100644
index 0000000..a45183d
--- /dev/null
+++ b/third_party/google/net/proto/ProtocolBuffer.py
@@ -0,0 +1,1203 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+import array
+import itertools
+import re
+import six
+from six.moves import http_client
+import struct
+try:
+
+
+ import google.net.proto.proto1 as proto1
+except ImportError:
+
+ class ProtocolBufferDecodeError(Exception): pass
+ class ProtocolBufferEncodeError(Exception): pass
+ class ProtocolBufferReturnError(Exception): pass
+else:
+ ProtocolBufferDecodeError = proto1.ProtocolBufferDecodeError
+ ProtocolBufferEncodeError = proto1.ProtocolBufferEncodeError
+ ProtocolBufferReturnError = proto1.ProtocolBufferReturnError
+
+__all__ = ['ProtocolMessage', 'Encoder', 'Decoder',
+ 'ExtendableProtocolMessage',
+ 'ProtocolBufferDecodeError',
+ 'ProtocolBufferEncodeError',
+ 'ProtocolBufferReturnError']
+
+URL_RE = re.compile('^(https?)://([^/]+)(/.*)$')
+
+
+class ProtocolMessage:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def __init__(self, contents=None):
+
+
+ raise NotImplementedError
+
+ def Clear(self):
+
+
+ raise NotImplementedError
+
+ def IsInitialized(self, debug_strs=None):
+
+ raise NotImplementedError
+
+ def Encode(self):
+
+ try:
+ return self._CEncode()
+ except (NotImplementedError, AttributeError):
+ e = Encoder()
+ self.Output(e)
+ return e.buffer().tobytes()
+
+ def SerializeToString(self):
+
+ return self.Encode()
+
+ def SerializePartialToString(self):
+
+
+
+ try:
+ return self._CEncodePartial()
+ except (NotImplementedError, AttributeError):
+ e = Encoder()
+ self.OutputPartial(e)
+ return e.buffer().tobytes()
+
+ def _CEncode(self):
+
+
+
+
+
+
+
+ raise NotImplementedError
+
+ def _CEncodePartial(self):
+
+ raise NotImplementedError
+
+ def ParseFromString(self, s):
+
+
+
+ self.Clear()
+ self.MergeFromString(s)
+
+ def ParsePartialFromString(self, s):
+
+
+ self.Clear()
+ self.MergePartialFromString(s)
+
+ def MergeFromString(self, s):
+
+
+
+ self.MergePartialFromString(s)
+ dbg = []
+ if not self.IsInitialized(dbg):
+ raise ProtocolBufferDecodeError('\n\t'.join(dbg))
+
+ def MergePartialFromString(self, s):
+
+
+ try:
+ self._CMergeFromString(s)
+ except (NotImplementedError, AttributeError):
+
+
+ a = array.array('B')
+ a.frombytes(six.ensure_binary(s))
+ d = Decoder(a, 0, len(a))
+ self.TryMerge(d)
+
+ def _CMergeFromString(self, s):
+
+
+
+
+
+
+
+
+
+ raise NotImplementedError
+
+ def __getstate__(self):
+
+
+ return self.Encode()
+
+ def __setstate__(self, contents_):
+
+
+ self.__init__(contents=contents_)
+
+ def sendCommand(self, server, url, response, follow_redirects=1,
+ secure=0, keyfile=None, certfile=None):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ data = self.Encode()
+ if secure:
+ if keyfile and certfile:
+ conn = http_client.HTTPSConnection(server, key_file=keyfile,
+ cert_file=certfile)
+ else:
+ conn = http_client.HTTPSConnection(server)
+ else:
+ conn = http_client.HTTPConnection(server)
+ conn.putrequest("POST", url)
+ conn.putheader("Content-Length", "%d" %len(data))
+ conn.endheaders()
+ conn.send(data)
+ resp = conn.getresponse()
+ if follow_redirects > 0 and resp.status == 302:
+ m = URL_RE.match(resp.getheader('Location'))
+ if m:
+ protocol, server, url = m.groups()
+ return self.sendCommand(server, url, response,
+ follow_redirects=follow_redirects - 1,
+ secure=(protocol == 'https'),
+ keyfile=keyfile,
+ certfile=certfile)
+ if resp.status != 200:
+ raise ProtocolBufferReturnError(resp.status)
+ if response is not None:
+ response.ParseFromString(resp.read())
+ return response
+
+ def sendSecureCommand(self, server, keyfile, certfile, url, response,
+ follow_redirects=1):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ return self.sendCommand(server, url, response,
+ follow_redirects=follow_redirects,
+ secure=1, keyfile=keyfile, certfile=certfile)
+
+ def __str__(self, prefix="", printElemNumber=0):
+
+ raise NotImplementedError
+
+ def ToASCII(self):
+
+ return self._CToASCII(ProtocolMessage._SYMBOLIC_FULL_ASCII)
+
+ def ToShortASCII(self):
+
+
+
+
+ return self._CToASCII(ProtocolMessage._SYMBOLIC_SHORT_ASCII)
+
+
+
+ _NUMERIC_ASCII = 0
+ _SYMBOLIC_SHORT_ASCII = 1
+ _SYMBOLIC_FULL_ASCII = 2
+
+ def _CToASCII(self, output_format):
+
+
+
+
+
+ raise NotImplementedError
+
+ def ParseASCII(self, ascii_string):
+
+
+
+
+ raise NotImplementedError
+
+ def ParseASCIIIgnoreUnknown(self, ascii_string):
+
+
+
+
+ raise NotImplementedError
+
+ def Equals(self, other):
+
+
+
+
+ raise NotImplementedError
+
+ def __eq__(self, other):
+
+
+
+
+
+
+ if other.__class__ is self.__class__:
+ return self.Equals(other)
+ return NotImplemented
+
+ def __ne__(self, other):
+
+
+
+
+
+
+ if other.__class__ is self.__class__:
+ return not self.Equals(other)
+ return NotImplemented
+
+
+
+
+
+ def Output(self, e):
+
+ dbg = []
+ if not self.IsInitialized(dbg):
+ raise ProtocolBufferEncodeError('\n\t'.join(dbg))
+ self.OutputUnchecked(e)
+ return
+
+ def OutputUnchecked(self, e):
+
+ raise NotImplementedError
+
+ def OutputPartial(self, e):
+
+
+ raise NotImplementedError
+
+ def Parse(self, d):
+
+ self.Clear()
+ self.Merge(d)
+ return
+
+ def Merge(self, d):
+
+ self.TryMerge(d)
+ dbg = []
+ if not self.IsInitialized(dbg):
+ raise ProtocolBufferDecodeError('\n\t'.join(dbg))
+ return
+
+ def TryMerge(self, d):
+
+ raise NotImplementedError
+
+ def CopyFrom(self, pb):
+
+ if (pb == self): return
+ self.Clear()
+ self.MergeFrom(pb)
+
+ def MergeFrom(self, pb):
+
+ raise NotImplementedError
+
+
+
+
+
+ def lengthVarInt32(self, n):
+ return self.lengthVarInt64(n)
+
+ def lengthVarInt64(self, n):
+ if n < 0:
+ return 10
+ result = 0
+ while 1:
+ result += 1
+ n >>= 7
+ if n == 0:
+ break
+ return result
+
+ def lengthString(self, n):
+ return self.lengthVarInt32(n) + n
+
+ def DebugFormat(self, value):
+ return "%s" % value
+ def DebugFormatInt32(self, value):
+ if (value <= -2000000000 or value >= 2000000000):
+ return self.DebugFormatFixed32(value)
+ return "%d" % value
+ def DebugFormatInt64(self, value):
+ if (value <= -20000000000000 or value >= 20000000000000):
+ return self.DebugFormatFixed64(value)
+ return "%d" % value
+ def DebugFormatString(self, value):
+
+
+
+ def escape(c):
+ o = ord(c)
+ if o == 10: return r"\n"
+ if o == 39: return r"\'"
+
+ if o == 34: return r'\"'
+ if o == 92: return r"\\"
+
+ if o >= 127 or o < 32: return "\\%03o" % o
+ return c
+ return '"' + "".join(escape(c) for c in value) + '"'
+ def DebugFormatFloat(self, value):
+ return "%ff" % value
+ def DebugFormatFixed32(self, value):
+ if (value < 0): value += (1<<32)
+ return "0x%x" % value
+ def DebugFormatFixed64(self, value):
+ if (value < 0): value += (1<<64)
+ return "0x%x" % value
+ def DebugFormatBool(self, value):
+ if value:
+ return "true"
+ else:
+ return "false"
+
+
+TYPE_DOUBLE = 1
+TYPE_FLOAT = 2
+TYPE_INT64 = 3
+TYPE_UINT64 = 4
+TYPE_INT32 = 5
+TYPE_FIXED64 = 6
+TYPE_FIXED32 = 7
+TYPE_BOOL = 8
+TYPE_STRING = 9
+TYPE_GROUP = 10
+TYPE_FOREIGN = 11
+
+
+_TYPE_TO_DEBUG_STRING = {
+ TYPE_INT32: ProtocolMessage.DebugFormatInt32,
+ TYPE_INT64: ProtocolMessage.DebugFormatInt64,
+ TYPE_UINT64: ProtocolMessage.DebugFormatInt64,
+ TYPE_FLOAT: ProtocolMessage.DebugFormatFloat,
+ TYPE_STRING: ProtocolMessage.DebugFormatString,
+ TYPE_FIXED32: ProtocolMessage.DebugFormatFixed32,
+ TYPE_FIXED64: ProtocolMessage.DebugFormatFixed64,
+ TYPE_BOOL: ProtocolMessage.DebugFormatBool }
+
+
+
+class Encoder:
+
+
+ NUMERIC = 0
+ DOUBLE = 1
+ STRING = 2
+ STARTGROUP = 3
+ ENDGROUP = 4
+ FLOAT = 5
+ MAX_TYPE = 6
+
+ def __init__(self):
+ self.buf = array.array('B')
+ return
+
+ def buffer(self):
+ return self.buf
+
+ def put8(self, v):
+ if v < 0 or v >= (1<<8): raise ProtocolBufferEncodeError("u8 too big")
+ self.buf.append(v & 255)
+ return
+
+ def put16(self, v):
+ if v < 0 or v >= (1<<16): raise ProtocolBufferEncodeError("u16 too big")
+ self.buf.append((v >> 0) & 255)
+ self.buf.append((v >> 8) & 255)
+ return
+
+ def put32(self, v):
+ if v < 0 or v >= (1<<32): raise ProtocolBufferEncodeError("u32 too big")
+ self.buf.append((v >> 0) & 255)
+ self.buf.append((v >> 8) & 255)
+ self.buf.append((v >> 16) & 255)
+ self.buf.append((v >> 24) & 255)
+ return
+
+ def put64(self, v):
+ if v < 0 or v >= (1<<64): raise ProtocolBufferEncodeError("u64 too big")
+ self.buf.append((v >> 0) & 255)
+ self.buf.append((v >> 8) & 255)
+ self.buf.append((v >> 16) & 255)
+ self.buf.append((v >> 24) & 255)
+ self.buf.append((v >> 32) & 255)
+ self.buf.append((v >> 40) & 255)
+ self.buf.append((v >> 48) & 255)
+ self.buf.append((v >> 56) & 255)
+ return
+
+ def putVarInt32(self, v):
+
+
+
+
+
+
+
+
+ buf_append = self.buf.append
+ if v & 127 == v:
+ buf_append(v)
+ return
+ if v >= 0x80000000 or v < -0x80000000:
+ raise ProtocolBufferEncodeError("int32 too big")
+ if v < 0:
+ v += 0x10000000000000000
+ while True:
+ bits = v & 127
+ v >>= 7
+ if v:
+ bits |= 128
+ buf_append(bits)
+ if not v:
+ break
+ return
+
+ def putVarInt64(self, v):
+ buf_append = self.buf.append
+ if v >= 0x8000000000000000 or v < -0x8000000000000000:
+ raise ProtocolBufferEncodeError("int64 too big")
+ if v < 0:
+ v += 0x10000000000000000
+ while True:
+ bits = v & 127
+ v >>= 7
+ if v:
+ bits |= 128
+ buf_append(bits)
+ if not v:
+ break
+ return
+
+ def putVarUint64(self, v):
+ buf_append = self.buf.append
+ if v < 0 or v >= 0x10000000000000000:
+ raise ProtocolBufferEncodeError("uint64 too big")
+ while True:
+ bits = v & 127
+ v >>= 7
+ if v:
+ bits |= 128
+ buf_append(bits)
+ if not v:
+ break
+ return
+
+ def putFloat(self, v):
+ a = array.array('B')
+ a.frombytes(struct.pack("<f", v))
+ self.buf.extend(a)
+ return
+
+ def putDouble(self, v):
+ a = array.array('B')
+ a.frombytes(struct.pack("<d", v))
+ self.buf.extend(a)
+ return
+
+ def putBoolean(self, v):
+ if v:
+ self.buf.append(1)
+ else:
+ self.buf.append(0)
+ return
+
+ def putPrefixedString(self, v):
+
+
+
+ v = six.ensure_binary(v)
+ self.putVarInt32(len(v))
+ self.buf.frombytes(v)
+
+ def putRawString(self, v):
+ self.buf.frombytes(six.ensure_binary(v))
+
+ _TYPE_TO_METHOD = {
+ TYPE_DOUBLE: putDouble,
+ TYPE_FLOAT: putFloat,
+ TYPE_FIXED64: put64,
+ TYPE_FIXED32: put32,
+ TYPE_INT32: putVarInt32,
+ TYPE_INT64: putVarInt64,
+ TYPE_UINT64: putVarUint64,
+ TYPE_BOOL: putBoolean,
+ TYPE_STRING: putPrefixedString }
+
+ _TYPE_TO_BYTE_SIZE = {
+ TYPE_DOUBLE: 8,
+ TYPE_FLOAT: 4,
+ TYPE_FIXED64: 8,
+ TYPE_FIXED32: 4,
+ TYPE_BOOL: 1 }
+
+class Decoder:
+ def __init__(self, buf, idx, limit):
+ self.buf = buf
+ self.idx = idx
+ self.limit = limit
+ return
+
+ def avail(self):
+ return self.limit - self.idx
+
+ def buffer(self):
+ return self.buf
+
+ def pos(self):
+ return self.idx
+
+ def skip(self, n):
+ if self.idx + n > self.limit: raise ProtocolBufferDecodeError("truncated")
+ self.idx += n
+ return
+
+ def skipData(self, tag):
+ t = tag & 7
+ if t == Encoder.NUMERIC:
+ self.getVarInt64()
+ elif t == Encoder.DOUBLE:
+ self.skip(8)
+ elif t == Encoder.STRING:
+ n = self.getVarInt32()
+ self.skip(n)
+ elif t == Encoder.STARTGROUP:
+ while 1:
+ t = self.getVarInt32()
+ if (t & 7) == Encoder.ENDGROUP:
+ break
+ else:
+ self.skipData(t)
+ if (t - Encoder.ENDGROUP) != (tag - Encoder.STARTGROUP):
+ raise ProtocolBufferDecodeError("corrupted")
+ elif t == Encoder.ENDGROUP:
+ raise ProtocolBufferDecodeError("corrupted")
+ elif t == Encoder.FLOAT:
+ self.skip(4)
+ else:
+ raise ProtocolBufferDecodeError("corrupted")
+
+
+ def get8(self):
+ if self.idx >= self.limit: raise ProtocolBufferDecodeError("truncated")
+ c = self.buf[self.idx]
+ self.idx += 1
+ return c
+
+ def get16(self):
+ if self.idx + 2 > self.limit: raise ProtocolBufferDecodeError("truncated")
+ c = self.buf[self.idx]
+ d = self.buf[self.idx + 1]
+ self.idx += 2
+ return (d << 8) | c
+
+ def get32(self):
+ if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError("truncated")
+ c = self.buf[self.idx]
+ d = self.buf[self.idx + 1]
+ e = self.buf[self.idx + 2]
+ f = self.buf[self.idx + 3]
+ self.idx += 4
+ return (f << 24) | (e << 16) | (d << 8) | c
+
+ def get64(self):
+ if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError("truncated")
+ c = self.buf[self.idx]
+ d = self.buf[self.idx + 1]
+ e = self.buf[self.idx + 2]
+ f = self.buf[self.idx + 3]
+ g = self.buf[self.idx + 4]
+ h = self.buf[self.idx + 5]
+ i = self.buf[self.idx + 6]
+ j = self.buf[self.idx + 7]
+ self.idx += 8
+ return ((j << 56) | (i << 48) | (h << 40) | (g << 32) | (f << 24)
+ | (e << 16) | (d << 8) | c)
+
+ def getVarInt32(self):
+
+
+
+ b = self.get8()
+ if not (b & 128):
+ return b
+
+ result = 0
+ shift = 0
+
+ while 1:
+ result |= ((b & 127) << shift)
+ shift += 7
+ if not (b & 128):
+ if result >= 0x10000000000000000:
+ raise ProtocolBufferDecodeError("corrupted")
+ break
+ if shift >= 64: raise ProtocolBufferDecodeError("corrupted")
+ b = self.get8()
+
+ if result >= 0x8000000000000000:
+ result -= 0x10000000000000000
+ if result >= 0x80000000 or result < -0x80000000:
+ raise ProtocolBufferDecodeError("corrupted")
+ return result
+
+ def getVarInt64(self):
+ result = self.getVarUint64()
+ if result >= (1 << 63):
+ result -= (1 << 64)
+ return result
+
+ def getVarUint64(self):
+ result = 0
+ shift = 0
+ while 1:
+ if shift >= 64: raise ProtocolBufferDecodeError("corrupted")
+ b = self.get8()
+ result |= ((b & 127) << shift)
+ shift += 7
+ if not (b & 128):
+ if result >= (1 << 64): raise ProtocolBufferDecodeError("corrupted")
+ return result
+ return result
+
+ def getFloat(self):
+ if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError("truncated")
+ a = self.buf[self.idx:self.idx+4]
+ self.idx += 4
+ return struct.unpack("<f", a)[0]
+
+ def getDouble(self):
+ if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError("truncated")
+ a = self.buf[self.idx:self.idx+8]
+ self.idx += 8
+ return struct.unpack("<d", a)[0]
+
+ def getBoolean(self):
+ b = self.get8()
+ if b != 0 and b != 1: raise ProtocolBufferDecodeError("corrupted")
+ return b
+
+ def getPrefixedString(self):
+ length = self.getVarInt32()
+ if self.idx + length > self.limit:
+ raise ProtocolBufferDecodeError("truncated")
+ r = self.buf[self.idx : self.idx + length]
+ self.idx += length
+ return r.tobytes()
+
+ def getRawString(self):
+ r = self.buf[self.idx:self.limit]
+ self.idx = self.limit
+ return r.tobytes()
+
+ _TYPE_TO_METHOD = {
+ TYPE_DOUBLE: getDouble,
+ TYPE_FLOAT: getFloat,
+ TYPE_FIXED64: get64,
+ TYPE_FIXED32: get32,
+ TYPE_INT32: getVarInt32,
+ TYPE_INT64: getVarInt64,
+ TYPE_UINT64: getVarUint64,
+ TYPE_BOOL: getBoolean,
+ TYPE_STRING: getPrefixedString }
+
+
+
+
+
+class ExtensionIdentifier(object):
+ __slots__ = ('full_name', 'number', 'field_type', 'wire_tag', 'is_repeated',
+ 'default', 'containing_cls', 'composite_cls', 'message_name')
+ def __init__(self, full_name, number, field_type, wire_tag, is_repeated,
+ default):
+ self.full_name = full_name
+ self.number = number
+ self.field_type = field_type
+ self.wire_tag = wire_tag
+ self.is_repeated = is_repeated
+ self.default = default
+
+class ExtendableProtocolMessage(ProtocolMessage):
+ def HasExtension(self, extension):
+
+ self._VerifyExtensionIdentifier(extension)
+ return extension in self._extension_fields
+
+ def ClearExtension(self, extension):
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if extension in self._extension_fields:
+ del self._extension_fields[extension]
+
+ def GetExtension(self, extension, index=None):
+
+
+
+
+
+
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if extension in self._extension_fields:
+ result = self._extension_fields[extension]
+ else:
+ if extension.is_repeated:
+ result = []
+ elif extension.composite_cls:
+ result = extension.composite_cls()
+ else:
+ result = extension.default
+ if extension.is_repeated:
+ result = result[index]
+ return result
+
+ def SetExtension(self, extension, *args):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if extension.composite_cls:
+ raise TypeError(
+ 'Cannot assign to extension "%s" because it is a composite type.' %
+ extension.full_name)
+ if extension.is_repeated:
+ try:
+ index, value = args
+ except ValueError:
+ raise TypeError(
+ "SetExtension(extension, index, value) for repeated extension "
+ "takes exactly 4 arguments: (%d given)" % (len(args) + 2))
+ self._extension_fields[extension][index] = value
+ else:
+ try:
+ (value,) = args
+ except ValueError:
+ raise TypeError(
+ "SetExtension(extension, value) for singular extension "
+ "takes exactly 3 arguments: (%d given)" % (len(args) + 2))
+ self._extension_fields[extension] = value
+
+ def MutableExtension(self, extension, index=None):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if extension.composite_cls is None:
+ raise TypeError(
+ 'MutableExtension() cannot be applied to "%s", because it is not a '
+ 'composite type.' % extension.full_name)
+ if extension.is_repeated:
+ if index is None:
+ raise TypeError(
+ 'MutableExtension(extension, index) for repeated extension '
+ 'takes exactly 2 arguments: (1 given)')
+ return self.GetExtension(extension, index)
+ if extension in self._extension_fields:
+ return self._extension_fields[extension]
+ else:
+ result = extension.composite_cls()
+ self._extension_fields[extension] = result
+ return result
+
+ def ExtensionList(self, extension):
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'ExtensionList() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ return self._extension_fields[extension]
+ result = []
+ self._extension_fields[extension] = result
+ return result
+
+ def ExtensionSize(self, extension):
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'ExtensionSize() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ return len(self._extension_fields[extension])
+ return 0
+
+ def AddExtension(self, extension, value=None):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'AddExtension() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ field = self._extension_fields[extension]
+ else:
+ field = []
+ self._extension_fields[extension] = field
+
+ if extension.composite_cls:
+ if value is not None:
+ raise TypeError(
+ 'value must not be set in AddExtension() for "%s", because it is '
+ 'a message type extension. Set values on the returned message '
+ 'instead.' % extension.full_name)
+ msg = extension.composite_cls()
+ field.append(msg)
+ return msg
+
+ field.append(value)
+
+ def _VerifyExtensionIdentifier(self, extension):
+ if extension.containing_cls != self.__class__:
+ raise TypeError("Containing type of %s is %s, but not %s."
+ % (extension.full_name,
+ extension.containing_cls.__name__,
+ self.__class__.__name__))
+
+ def _MergeExtensionFields(self, x):
+ for ext, val in x._extension_fields.items():
+ if ext.is_repeated:
+ for single_val in val:
+ if ext.composite_cls is None:
+ self.AddExtension(ext, single_val)
+ else:
+ self.AddExtension(ext).MergeFrom(single_val)
+ else:
+ if ext.composite_cls is None:
+ self.SetExtension(ext, val)
+ else:
+ self.MutableExtension(ext).MergeFrom(val)
+
+ def _ListExtensions(self):
+ return sorted(
+ (ext for ext in self._extension_fields
+ if (not ext.is_repeated) or self.ExtensionSize(ext) > 0),
+ key=lambda item: item.number)
+
+ def _ExtensionEquals(self, x):
+ extensions = self._ListExtensions()
+ if extensions != x._ListExtensions():
+ return False
+ for ext in extensions:
+ if ext.is_repeated:
+ if self.ExtensionSize(ext) != x.ExtensionSize(ext): return False
+ for e1, e2 in itertools.izip(self.ExtensionList(ext),
+ x.ExtensionList(ext)):
+ if e1 != e2: return False
+ else:
+ if self.GetExtension(ext) != x.GetExtension(ext): return False
+ return True
+
+ def _OutputExtensionFields(self, out, partial, extensions, start_index,
+ end_field_number):
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def OutputSingleField(ext, value):
+ out.putVarInt32(ext.wire_tag)
+ if ext.field_type == TYPE_GROUP:
+ if partial:
+ value.OutputPartial(out)
+ else:
+ value.OutputUnchecked(out)
+ out.putVarInt32(ext.wire_tag + 1)
+ elif ext.field_type == TYPE_FOREIGN:
+ if partial:
+ out.putVarInt32(value.ByteSizePartial())
+ value.OutputPartial(out)
+ else:
+ out.putVarInt32(value.ByteSize())
+ value.OutputUnchecked(out)
+ else:
+ Encoder._TYPE_TO_METHOD[ext.field_type](out, value)
+
+ for ext_index, ext in enumerate(
+ itertools.islice(extensions, start_index, None), start=start_index):
+ if ext.number >= end_field_number:
+
+ return ext_index
+ if ext.is_repeated:
+ for field in self._extension_fields[ext]:
+ OutputSingleField(ext, field)
+ else:
+ OutputSingleField(ext, self._extension_fields[ext])
+ return len(extensions)
+
+ def _ParseOneExtensionField(self, wire_tag, d):
+ number = wire_tag >> 3
+ if number in self._extensions_by_field_number:
+ ext = self._extensions_by_field_number[number]
+ if wire_tag != ext.wire_tag:
+
+ return
+ if ext.field_type == TYPE_FOREIGN:
+ length = d.getVarInt32()
+ tmp = Decoder(d.buffer(), d.pos(), d.pos() + length)
+ if ext.is_repeated:
+ self.AddExtension(ext).TryMerge(tmp)
+ else:
+ self.MutableExtension(ext).TryMerge(tmp)
+ d.skip(length)
+ elif ext.field_type == TYPE_GROUP:
+ if ext.is_repeated:
+ self.AddExtension(ext).TryMerge(d)
+ else:
+ self.MutableExtension(ext).TryMerge(d)
+ else:
+ value = Decoder._TYPE_TO_METHOD[ext.field_type](d)
+ if ext.is_repeated:
+ self.AddExtension(ext, value)
+ else:
+ self.SetExtension(ext, value)
+ else:
+
+ d.skipData(wire_tag)
+
+ def _ExtensionByteSize(self, partial):
+ size = 0
+ for extension, value in self._extension_fields.iteritems():
+ ftype = extension.field_type
+ tag_size = self.lengthVarInt64(extension.wire_tag)
+ if ftype == TYPE_GROUP:
+ tag_size *= 2
+ if extension.is_repeated:
+ size += tag_size * len(value)
+ for single_value in value:
+ size += self._FieldByteSize(ftype, single_value, partial)
+ else:
+ size += tag_size + self._FieldByteSize(ftype, value, partial)
+ return size
+
+ def _FieldByteSize(self, ftype, value, partial):
+ size = 0
+ if ftype == TYPE_STRING:
+ size = self.lengthString(len(value))
+ elif ftype == TYPE_FOREIGN or ftype == TYPE_GROUP:
+ if partial:
+ size = self.lengthString(value.ByteSizePartial())
+ else:
+ size = self.lengthString(value.ByteSize())
+ elif ftype == TYPE_INT64 or ftype == TYPE_UINT64 or ftype == TYPE_INT32:
+ size = self.lengthVarInt64(value)
+ else:
+ if ftype in Encoder._TYPE_TO_BYTE_SIZE:
+ size = Encoder._TYPE_TO_BYTE_SIZE[ftype]
+ else:
+ raise AssertionError(
+ 'Extension type %d is not recognized.' % ftype)
+ return size
+
+ def _ExtensionDebugString(self, prefix, printElemNumber):
+ res = ''
+ extensions = self._ListExtensions()
+ for extension in extensions:
+ value = self._extension_fields[extension]
+ if extension.is_repeated:
+ cnt = 0
+ for e in value:
+ elm=""
+ if printElemNumber: elm = "(%d)" % cnt
+ if extension.composite_cls is not None:
+ res += prefix + "[%s%s] {\n" % (extension.full_name, elm)
+ res += e.__str__(prefix + " ", printElemNumber)
+ res += prefix + "}\n"
+ else:
+ if extension.composite_cls is not None:
+ res += prefix + "[%s] {\n" % extension.full_name
+ res += value.__str__(
+ prefix + " ", printElemNumber)
+ res += prefix + "}\n"
+ else:
+ if extension.field_type in _TYPE_TO_DEBUG_STRING:
+ text_value = _TYPE_TO_DEBUG_STRING[
+ extension.field_type](self, value)
+ else:
+ text_value = self.DebugFormat(value)
+ res += prefix + "[%s]: %s\n" % (extension.full_name, text_value)
+ return res
+
+ @staticmethod
+ def _RegisterExtension(cls, extension, composite_cls=None):
+ extension.containing_cls = cls
+ extension.composite_cls = composite_cls
+ if composite_cls is not None:
+ extension.message_name = composite_cls._PROTO_DESCRIPTOR_NAME
+ actual_handle = cls._extensions_by_field_number.setdefault(
+ extension.number, extension)
+ if actual_handle is not extension:
+ raise AssertionError(
+ 'Extensions "%s" and "%s" both try to extend message type "%s" with '
+ 'field number %d.' %
+ (extension.full_name, actual_handle.full_name,
+ cls.__name__, extension.number))
diff --git a/third_party/google/net/proto/__init__.py b/third_party/google/net/proto/__init__.py
new file mode 100644
index 0000000..93e6786
--- /dev/null
+++ b/third_party/google/net/proto/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/third_party/protorpc/LICENSE b/third_party/protorpc/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/third_party/protorpc/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/third_party/protorpc/README.monorail b/third_party/protorpc/README.monorail
new file mode 100644
index 0000000..5f8eaf1
--- /dev/null
+++ b/third_party/protorpc/README.monorail
@@ -0,0 +1,14 @@
+Name: ProtoRPC
+Short Name: protorpc
+URL: https://github.com/google/protorpc
+Version: 0.12.0
+License: Apache 2.0
+License File: LICENSE
+Security Critical: no
+Description:
+Local Modifications:
+1. Retain only the protorpc/remote.py file.
+2. Rename my_service.async to my_service.async_
+ We don't use the async feature, and it's a reserved keyword in Python 3.
+3. array.array.tostring() and array.array.fromstring() are renamed in Python 3.
+ Use array.array.tobytes() and array.array.frombytes(), respectively.
diff --git a/third_party/protorpc/protobuf.py b/third_party/protorpc/protobuf.py
new file mode 100644
index 0000000..6de3bce
--- /dev/null
+++ b/third_party/protorpc/protobuf.py
@@ -0,0 +1,360 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Protocol buffer support for message types.
+
+For more details about protocol buffer encoding and decoding please see:
+
+ http://code.google.com/apis/protocolbuffers/docs/encoding.html
+
+Public Exceptions:
+ DecodeError: Raised when a decode error occurs from incorrect protobuf format.
+
+Public Functions:
+ encode_message: Encodes a message in to a protocol buffer string.
+ decode_message: Decode from a protocol buffer string to a message.
+"""
+import six
+
+__author__ = 'rafek@google.com (Rafe Kaplan)'
+
+
+import array
+
+from . import message_types
+from . import messages
+from . import util
+from .google_imports import ProtocolBuffer
+
+
+__all__ = ['ALTERNATIVE_CONTENT_TYPES',
+ 'CONTENT_TYPE',
+ 'encode_message',
+ 'decode_message',
+ ]
+
+CONTENT_TYPE = 'application/octet-stream'
+
+ALTERNATIVE_CONTENT_TYPES = ['application/x-google-protobuf']
+
+
+class _Encoder(ProtocolBuffer.Encoder):
+ """Extension of protocol buffer encoder.
+
+ Original protocol buffer encoder does not have complete set of methods
+ for handling required encoding. This class adds them.
+ """
+
+ # TODO(rafek): Implement the missing encoding types.
+ def no_encoding(self, value):
+ """No encoding available for type.
+
+ Args:
+ value: Value to encode.
+
+ Raises:
+ NotImplementedError at all times.
+ """
+ raise NotImplementedError()
+
+ def encode_enum(self, value):
+ """Encode an enum value.
+
+ Args:
+ value: Enum to encode.
+ """
+ self.putVarInt32(value.number)
+
+ def encode_message(self, value):
+ """Encode a Message in to an embedded message.
+
+ Args:
+ value: Message instance to encode.
+ """
+ self.putPrefixedString(encode_message(value))
+
+
+ def encode_unicode_string(self, value):
+ """Helper to properly pb encode unicode strings to UTF-8.
+
+ Args:
+ value: String value to encode.
+ """
+ if isinstance(value, six.text_type):
+ value = value.encode('utf-8')
+ self.putPrefixedString(value)
+
+
+class _Decoder(ProtocolBuffer.Decoder):
+ """Extension of protocol buffer decoder.
+
+ Original protocol buffer decoder does not have complete set of methods
+ for handling required decoding. This class adds them.
+ """
+
+ # TODO(rafek): Implement the missing encoding types.
+ def no_decoding(self):
+ """No decoding available for type.
+
+ Raises:
+ NotImplementedError at all times.
+ """
+ raise NotImplementedError()
+
+ def decode_string(self):
+ """Decode a unicode string.
+
+ Returns:
+ Next value in stream as a unicode string.
+ """
+ return self.getPrefixedString().decode('UTF-8')
+
+ def decode_boolean(self):
+ """Decode a boolean value.
+
+ Returns:
+ Next value in stream as a boolean.
+ """
+ return bool(self.getBoolean())
+
+
+# Number of bits used to describe a protocol buffer bits used for the variant.
+_WIRE_TYPE_BITS = 3
+_WIRE_TYPE_MASK = 7
+
+
+# Maps variant to underlying wire type. Many variants map to same type.
+_VARIANT_TO_WIRE_TYPE = {
+ messages.Variant.DOUBLE: _Encoder.DOUBLE,
+ messages.Variant.FLOAT: _Encoder.FLOAT,
+ messages.Variant.INT64: _Encoder.NUMERIC,
+ messages.Variant.UINT64: _Encoder.NUMERIC,
+ messages.Variant.INT32: _Encoder.NUMERIC,
+ messages.Variant.BOOL: _Encoder.NUMERIC,
+ messages.Variant.STRING: _Encoder.STRING,
+ messages.Variant.MESSAGE: _Encoder.STRING,
+ messages.Variant.BYTES: _Encoder.STRING,
+ messages.Variant.UINT32: _Encoder.NUMERIC,
+ messages.Variant.ENUM: _Encoder.NUMERIC,
+ messages.Variant.SINT32: _Encoder.NUMERIC,
+ messages.Variant.SINT64: _Encoder.NUMERIC,
+}
+
+
+# Maps variant to encoder method.
+_VARIANT_TO_ENCODER_MAP = {
+ messages.Variant.DOUBLE: _Encoder.putDouble,
+ messages.Variant.FLOAT: _Encoder.putFloat,
+ messages.Variant.INT64: _Encoder.putVarInt64,
+ messages.Variant.UINT64: _Encoder.putVarUint64,
+ messages.Variant.INT32: _Encoder.putVarInt32,
+ messages.Variant.BOOL: _Encoder.putBoolean,
+ messages.Variant.STRING: _Encoder.encode_unicode_string,
+ messages.Variant.MESSAGE: _Encoder.encode_message,
+ messages.Variant.BYTES: _Encoder.encode_unicode_string,
+ messages.Variant.UINT32: _Encoder.no_encoding,
+ messages.Variant.ENUM: _Encoder.encode_enum,
+ messages.Variant.SINT32: _Encoder.no_encoding,
+ messages.Variant.SINT64: _Encoder.no_encoding,
+}
+
+
+# Basic wire format decoders. Used for reading unknown values.
+_WIRE_TYPE_TO_DECODER_MAP = {
+ _Encoder.NUMERIC: _Decoder.getVarInt64,
+ _Encoder.DOUBLE: _Decoder.getDouble,
+ _Encoder.STRING: _Decoder.getPrefixedString,
+ _Encoder.FLOAT: _Decoder.getFloat,
+}
+
+
+# Map wire type to variant. Used to find a variant for unknown values.
+_WIRE_TYPE_TO_VARIANT_MAP = {
+ _Encoder.NUMERIC: messages.Variant.INT64,
+ _Encoder.DOUBLE: messages.Variant.DOUBLE,
+ _Encoder.STRING: messages.Variant.STRING,
+ _Encoder.FLOAT: messages.Variant.FLOAT,
+}
+
+
+# Wire type to name mapping for error messages.
+_WIRE_TYPE_NAME = {
+ _Encoder.NUMERIC: 'NUMERIC',
+ _Encoder.DOUBLE: 'DOUBLE',
+ _Encoder.STRING: 'STRING',
+ _Encoder.FLOAT: 'FLOAT',
+}
+
+
+# Maps variant to decoder method.
+_VARIANT_TO_DECODER_MAP = {
+ messages.Variant.DOUBLE: _Decoder.getDouble,
+ messages.Variant.FLOAT: _Decoder.getFloat,
+ messages.Variant.INT64: _Decoder.getVarInt64,
+ messages.Variant.UINT64: _Decoder.getVarUint64,
+ messages.Variant.INT32: _Decoder.getVarInt32,
+ messages.Variant.BOOL: _Decoder.decode_boolean,
+ messages.Variant.STRING: _Decoder.decode_string,
+ messages.Variant.MESSAGE: _Decoder.getPrefixedString,
+ messages.Variant.BYTES: _Decoder.getPrefixedString,
+ messages.Variant.UINT32: _Decoder.no_decoding,
+ messages.Variant.ENUM: _Decoder.getVarInt32,
+ messages.Variant.SINT32: _Decoder.no_decoding,
+ messages.Variant.SINT64: _Decoder.no_decoding,
+}
+
+
+def encode_message(message):
+ """Encode Message instance to protocol buffer.
+
+ Args:
+ Message instance to encode in to protocol buffer.
+
+ Returns:
+ String encoding of Message instance in protocol buffer format.
+
+ Raises:
+ messages.ValidationError if message is not initialized.
+ """
+ message.check_initialized()
+ encoder = _Encoder()
+
+ # Get all fields, from the known fields we parsed and the unknown fields
+ # we saved. Note which ones were known, so we can process them differently.
+ all_fields = [(field.number, field) for field in message.all_fields()]
+ all_fields.extend((key, None)
+ for key in message.all_unrecognized_fields()
+ if isinstance(key, six.integer_types))
+ all_fields.sort()
+ for field_num, field in all_fields:
+ if field:
+ # Known field.
+ value = message.get_assigned_value(field.name)
+ if value is None:
+ continue
+ variant = field.variant
+ repeated = field.repeated
+ else:
+ # Unrecognized field.
+ value, variant = message.get_unrecognized_field_info(field_num)
+ if not isinstance(variant, messages.Variant):
+ continue
+ repeated = isinstance(value, (list, tuple))
+
+ tag = ((field_num << _WIRE_TYPE_BITS) | _VARIANT_TO_WIRE_TYPE[variant])
+
+ # Write value to wire.
+ if repeated:
+ values = value
+ else:
+ values = [value]
+ for next in values:
+ encoder.putVarInt32(tag)
+ if isinstance(field, messages.MessageField):
+ next = field.value_to_message(next)
+ field_encoder = _VARIANT_TO_ENCODER_MAP[variant]
+ field_encoder(encoder, next)
+
+ buffer = encoder.buffer()
+ return buffer.tobytes()
+
+
+def decode_message(message_type, encoded_message):
+ """Decode protocol buffer to Message instance.
+
+ Args:
+ message_type: Message type to decode data to.
+ encoded_message: Encoded version of message as string.
+
+ Returns:
+ Decoded instance of message_type.
+
+ Raises:
+ DecodeError if an error occurs during decoding, such as incompatible
+ wire format for a field.
+ messages.ValidationError if merged message is not initialized.
+ """
+ message = message_type()
+ message_array = array.array('B')
+ message_array.frombytes(encoded_message)
+ try:
+ decoder = _Decoder(message_array, 0, len(message_array))
+
+ while decoder.avail() > 0:
+ # Decode tag and variant information.
+ encoded_tag = decoder.getVarInt32()
+ tag = encoded_tag >> _WIRE_TYPE_BITS
+ wire_type = encoded_tag & _WIRE_TYPE_MASK
+ try:
+ found_wire_type_decoder = _WIRE_TYPE_TO_DECODER_MAP[wire_type]
+ except:
+ raise messages.DecodeError('No such wire type %d' % wire_type)
+
+ if tag < 1:
+ raise messages.DecodeError('Invalid tag value %d' % tag)
+
+ try:
+ field = message.field_by_number(tag)
+ except KeyError:
+ # Unexpected tags are ok.
+ field = None
+ wire_type_decoder = found_wire_type_decoder
+ else:
+ expected_wire_type = _VARIANT_TO_WIRE_TYPE[field.variant]
+ if expected_wire_type != wire_type:
+ raise messages.DecodeError('Expected wire type %s but found %s' % (
+ _WIRE_TYPE_NAME[expected_wire_type],
+ _WIRE_TYPE_NAME[wire_type]))
+
+ wire_type_decoder = _VARIANT_TO_DECODER_MAP[field.variant]
+
+ value = wire_type_decoder(decoder)
+
+ # Save unknown fields and skip additional processing.
+ if not field:
+ # When saving this, save it under the tag number (which should
+ # be unique), and set the variant and value so we know how to
+ # interpret the value later.
+ variant = _WIRE_TYPE_TO_VARIANT_MAP.get(wire_type)
+ if variant:
+ message.set_unrecognized_field(tag, value, variant)
+ continue
+
+ # Special case Enum and Message types.
+ if isinstance(field, messages.EnumField):
+ try:
+ value = field.type(value)
+ except TypeError:
+ raise messages.DecodeError('Invalid enum value %s' % value)
+ elif isinstance(field, messages.MessageField):
+ value = decode_message(field.message_type, value)
+ value = field.value_from_message(value)
+
+ # Merge value in to message.
+ if field.repeated:
+ values = getattr(message, field.name)
+ if values is None:
+ setattr(message, field.name, [value])
+ else:
+ values.append(value)
+ else:
+ setattr(message, field.name, value)
+ except ProtocolBuffer.ProtocolBufferDecodeError as err:
+ raise messages.DecodeError('Decoding error: %s' % str(err))
+
+ message.check_initialized()
+ return message
diff --git a/third_party/protorpc/remote.py b/third_party/protorpc/remote.py
new file mode 100644
index 0000000..7983573
--- /dev/null
+++ b/third_party/protorpc/remote.py
@@ -0,0 +1,1248 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Remote service library.
+
+This module contains classes that are useful for building remote services that
+conform to a standard request and response model. To conform to this model
+a service must be like the following class:
+
+ # Each service instance only handles a single request and is then discarded.
+ # Make these objects light weight.
+ class Service(object):
+
+ # It must be possible to construct service objects without any parameters.
+ # If your constructor needs extra information you should provide a
+ # no-argument factory function to create service instances.
+ def __init__(self):
+ ...
+
+ # Each remote method must use the 'method' decorator, passing the request
+ # and response message types. The remote method itself must take a single
+ # parameter which is an instance of RequestMessage and return an instance
+ # of ResponseMessage.
+ @method(RequestMessage, ResponseMessage)
+ def remote_method(self, request):
+ # Return an instance of ResponseMessage.
+
+ # A service object may optionally implement an 'initialize_request_state'
+ # method that takes as a parameter a single instance of a RequestState. If
+ # a service does not implement this method it will not receive the request
+ # state.
+ def initialize_request_state(self, state):
+ ...
+
+The 'Service' class is provided as a convenient base class that provides the
+above functionality. It implements all required and optional methods for a
+service. It also has convenience methods for creating factory functions that
+can pass persistent global state to a new service instance.
+
+The 'method' decorator is used to declare which methods of a class are
+meant to service RPCs. While this decorator is not responsible for handling
+actual remote method invocations, such as handling sockets, handling various
+RPC protocols and checking messages for correctness, it does attach information
+to methods that responsible classes can examine and ensure the correctness
+of the RPC.
+
+When the method decorator is used on a method, the wrapper method will have a
+'remote' property associated with it. The 'remote' property contains the
+request_type and response_type expected by the methods implementation.
+
+On its own, the method decorator does not provide any support for subclassing
+remote methods. In order to extend a service, one would need to redecorate
+the sub-classes methods. For example:
+
+ class MyService(Service):
+
+ @method(DoSomethingRequest, DoSomethingResponse)
+ def do_stuff(self, request):
+ ... implement do_stuff ...
+
+ class MyBetterService(MyService):
+
+ @method(DoSomethingRequest, DoSomethingResponse)
+ def do_stuff(self, request):
+ response = super(MyBetterService, self).do_stuff.remote.method(request)
+ ... do stuff with response ...
+ return response
+
+A Service subclass also has a Stub class that can be used with a transport for
+making RPCs. When a stub is created, it is capable of doing both synchronous
+and asynchronous RPCs if the underlying transport supports it. To make a stub
+using an HTTP transport do:
+
+ my_service = MyService.Stub(HttpTransport('<my service URL>'))
+
+For synchronous calls, just call the expected methods on the service stub:
+
+ request = DoSomethingRequest()
+ ...
+ response = my_service.do_something(request)
+
+Each stub instance has an async object that can be used for initiating
+asynchronous RPCs if the underlying protocol transport supports it. To
+make an asynchronous call, do:
+
+ rpc = my_service.async_.do_something(request)
+ response = rpc.get_response()
+"""
+
+from __future__ import with_statement
+import six
+
+__author__ = 'rafek@google.com (Rafe Kaplan)'
+
+import functools
+import logging
+import sys
+import threading
+from wsgiref import headers as wsgi_headers
+
+from . import message_types
+from . import messages
+from . import protobuf
+from . import protojson
+from . import util
+
+
+__all__ = [
+ 'ApplicationError',
+ 'MethodNotFoundError',
+ 'NetworkError',
+ 'RequestError',
+ 'RpcError',
+ 'ServerError',
+ 'ServiceConfigurationError',
+ 'ServiceDefinitionError',
+
+ 'HttpRequestState',
+ 'ProtocolConfig',
+ 'Protocols',
+ 'RequestState',
+ 'RpcState',
+ 'RpcStatus',
+ 'Service',
+ 'StubBase',
+ 'check_rpc_status',
+ 'get_remote_method_info',
+ 'is_error_status',
+ 'method',
+ 'remote',
+]
+
+
+class ServiceDefinitionError(messages.Error):
+ """Raised when a service is improperly defined."""
+
+
+class ServiceConfigurationError(messages.Error):
+ """Raised when a service is incorrectly configured."""
+
+
+# TODO: Use error_name to map to specific exception message types.
+class RpcStatus(messages.Message):
+ """Status of on-going or complete RPC.
+
+ Fields:
+ state: State of RPC.
+ error_name: Error name set by application. Only set when
+ status is APPLICATION_ERROR. For use by application to transmit
+ specific reason for error.
+ error_message: Error message associated with status.
+ """
+
+ class State(messages.Enum):
+ """Enumeration of possible RPC states.
+
+ Values:
+ OK: Completed successfully.
+ RUNNING: Still running, not complete.
+ REQUEST_ERROR: Request was malformed or incomplete.
+ SERVER_ERROR: Server experienced an unexpected error.
+ NETWORK_ERROR: An error occured on the network.
+ APPLICATION_ERROR: The application is indicating an error.
+ When in this state, RPC should also set application_error.
+ """
+ OK = 0
+ RUNNING = 1
+
+ REQUEST_ERROR = 2
+ SERVER_ERROR = 3
+ NETWORK_ERROR = 4
+ APPLICATION_ERROR = 5
+ METHOD_NOT_FOUND_ERROR = 6
+
+ state = messages.EnumField(State, 1, required=True)
+ error_message = messages.StringField(2)
+ error_name = messages.StringField(3)
+
+
+RpcState = RpcStatus.State
+
+
+class RpcError(messages.Error):
+ """Base class for RPC errors.
+
+ Each sub-class of RpcError is associated with an error value from RpcState
+ and has an attribute STATE that refers to that value.
+ """
+
+ def __init__(self, message, cause=None):
+ super(RpcError, self).__init__(message)
+ self.cause = cause
+
+ @classmethod
+ def from_state(cls, state):
+ """Get error class from RpcState.
+
+ Args:
+ state: RpcState value. Can be enum value itself, string or int.
+
+ Returns:
+ Exception class mapped to value if state is an error. Returns None
+ if state is OK or RUNNING.
+ """
+ return _RPC_STATE_TO_ERROR.get(RpcState(state))
+
+
+class RequestError(RpcError):
+ """Raised when wrong request objects received during method invocation."""
+
+ STATE = RpcState.REQUEST_ERROR
+
+
+class MethodNotFoundError(RequestError):
+ """Raised when unknown method requested by RPC."""
+
+ STATE = RpcState.METHOD_NOT_FOUND_ERROR
+
+
+class NetworkError(RpcError):
+ """Raised when network error occurs during RPC."""
+
+ STATE = RpcState.NETWORK_ERROR
+
+
+class ServerError(RpcError):
+ """Unexpected error occured on server."""
+
+ STATE = RpcState.SERVER_ERROR
+
+
+class ApplicationError(RpcError):
+ """Raised for application specific errors.
+
+ Attributes:
+ error_name: Application specific error name for exception.
+ """
+
+ STATE = RpcState.APPLICATION_ERROR
+
+ def __init__(self, message, error_name=None):
+ """Constructor.
+
+ Args:
+ message: Application specific error message.
+ error_name: Application specific error name. Must be None, string
+ or unicode string.
+ """
+ super(ApplicationError, self).__init__(message)
+ self.error_name = error_name
+
+ def __str__(self):
+ return self.args[0] or ''
+
+ def __repr__(self):
+ if self.error_name is None:
+ error_format = ''
+ else:
+ error_format = ', %r' % self.error_name
+ return '%s(%r%s)' % (type(self).__name__, self.args[0], error_format)
+
+
+_RPC_STATE_TO_ERROR = {
+ RpcState.REQUEST_ERROR: RequestError,
+ RpcState.NETWORK_ERROR: NetworkError,
+ RpcState.SERVER_ERROR: ServerError,
+ RpcState.APPLICATION_ERROR: ApplicationError,
+ RpcState.METHOD_NOT_FOUND_ERROR: MethodNotFoundError,
+}
+
+class _RemoteMethodInfo(object):
+ """Object for encapsulating remote method information.
+
+ An instance of this method is associated with the 'remote' attribute
+ of the methods 'invoke_remote_method' instance.
+
+ Instances of this class are created by the remote decorator and should not
+ be created directly.
+ """
+
+ def __init__(self,
+ method,
+ request_type,
+ response_type):
+ """Constructor.
+
+ Args:
+ method: The method which implements the remote method. This is a
+ function that will act as an instance method of a class definition
+ that is decorated by '@method'. It must always take 'self' as its
+ first parameter.
+ request_type: Expected request type for the remote method.
+ response_type: Expected response type for the remote method.
+ """
+ self.__method = method
+ self.__request_type = request_type
+ self.__response_type = response_type
+
+ @property
+ def method(self):
+ """Original undecorated method."""
+ return self.__method
+
+ @property
+ def request_type(self):
+ """Expected request type for remote method."""
+ if isinstance(self.__request_type, six.string_types):
+ self.__request_type = messages.find_definition(
+ self.__request_type,
+ relative_to=sys.modules[self.__method.__module__])
+ return self.__request_type
+
+ @property
+ def response_type(self):
+ """Expected response type for remote method."""
+ if isinstance(self.__response_type, six.string_types):
+ self.__response_type = messages.find_definition(
+ self.__response_type,
+ relative_to=sys.modules[self.__method.__module__])
+ return self.__response_type
+
+
+def method(request_type=message_types.VoidMessage,
+ response_type=message_types.VoidMessage):
+ """Method decorator for creating remote methods.
+
+ Args:
+ request_type: Message type of expected request.
+ response_type: Message type of expected response.
+
+ Returns:
+ 'remote_method_wrapper' function.
+
+ Raises:
+ TypeError: if the request_type or response_type parameters are not
+ proper subclasses of messages.Message.
+ """
+ if (not isinstance(request_type, six.string_types) and
+ (not isinstance(request_type, type) or
+ not issubclass(request_type, messages.Message) or
+ request_type is messages.Message)):
+ raise TypeError(
+ 'Must provide message class for request-type. Found %s',
+ request_type)
+
+ if (not isinstance(response_type, six.string_types) and
+ (not isinstance(response_type, type) or
+ not issubclass(response_type, messages.Message) or
+ response_type is messages.Message)):
+ raise TypeError(
+ 'Must provide message class for response-type. Found %s',
+ response_type)
+
+ def remote_method_wrapper(method):
+ """Decorator used to wrap method.
+
+ Args:
+ method: Original method being wrapped.
+
+ Returns:
+ 'invoke_remote_method' function responsible for actual invocation.
+ This invocation function instance is assigned an attribute 'remote'
+ which contains information about the remote method:
+ request_type: Expected request type for remote method.
+ response_type: Response type returned from remote method.
+
+ Raises:
+ TypeError: If request_type or response_type is not a subclass of Message
+ or is the Message class itself.
+ """
+
+ @functools.wraps(method)
+ def invoke_remote_method(service_instance, request):
+ """Function used to replace original method.
+
+ Invoke wrapped remote method. Checks to ensure that request and
+ response objects are the correct types.
+
+ Does not check whether messages are initialized.
+
+ Args:
+ service_instance: The service object whose method is being invoked.
+ This is passed to 'self' during the invocation of the original
+ method.
+ request: Request message.
+
+ Returns:
+ Results of calling wrapped remote method.
+
+ Raises:
+ RequestError: Request object is not of the correct type.
+ ServerError: Response object is not of the correct type.
+ """
+ if not isinstance(request, remote_method_info.request_type):
+ raise RequestError('Method %s.%s expected request type %s, '
+ 'received %s' %
+ (type(service_instance).__name__,
+ method.__name__,
+ remote_method_info.request_type,
+ type(request)))
+ response = method(service_instance, request)
+ if not isinstance(response, remote_method_info.response_type):
+ raise ServerError('Method %s.%s expected response type %s, '
+ 'sent %s' %
+ (type(service_instance).__name__,
+ method.__name__,
+ remote_method_info.response_type,
+ type(response)))
+ return response
+
+ remote_method_info = _RemoteMethodInfo(method,
+ request_type,
+ response_type)
+
+ invoke_remote_method.remote = remote_method_info
+ return invoke_remote_method
+
+ return remote_method_wrapper
+
+
+def remote(request_type, response_type):
+ """Temporary backward compatibility alias for method."""
+ logging.warning('The remote decorator has been renamed method. It will be '
+ 'removed in very soon from future versions of ProtoRPC.')
+ return method(request_type, response_type)
+
+
+def get_remote_method_info(method):
+ """Get remote method info object from remote method.
+
+ Returns:
+ Remote method info object if method is a remote method, else None.
+ """
+ if not callable(method):
+ return None
+
+ try:
+ method_info = method.remote
+ except AttributeError:
+ return None
+
+ if not isinstance(method_info, _RemoteMethodInfo):
+ return None
+
+ return method_info
+
+
+class StubBase(object):
+ """Base class for client side service stubs.
+
+ The remote method stubs are created by the _ServiceClass meta-class
+ when a Service class is first created. The resulting stub will
+ extend both this class and the service class it handles communications for.
+
+ Assume that there is a service:
+
+ class NewContactRequest(messages.Message):
+
+ name = messages.StringField(1, required=True)
+ phone = messages.StringField(2)
+ email = messages.StringField(3)
+
+ class NewContactResponse(message.Message):
+
+ contact_id = messages.StringField(1)
+
+ class AccountService(remote.Service):
+
+ @remote.method(NewContactRequest, NewContactResponse):
+ def new_contact(self, request):
+ ... implementation ...
+
+ A stub of this service can be called in two ways. The first is to pass in a
+ correctly initialized NewContactRequest message:
+
+ request = NewContactRequest()
+ request.name = 'Bob Somebody'
+ request.phone = '+1 415 555 1234'
+
+ response = account_service_stub.new_contact(request)
+
+ The second way is to pass in keyword parameters that correspond with the root
+ request message type:
+
+ account_service_stub.new_contact(name='Bob Somebody',
+ phone='+1 415 555 1234')
+
+ The second form will create a request message of the appropriate type.
+ """
+
+ def __init__(self, transport):
+ """Constructor.
+
+ Args:
+ transport: Underlying transport to communicate with remote service.
+ """
+ self.__transport = transport
+
+ @property
+ def transport(self):
+ """Transport used to communicate with remote service."""
+ return self.__transport
+
+
+class _ServiceClass(type):
+ """Meta-class for service class."""
+
+ def __new_async_method(cls, remote):
+ """Create asynchronous method for Async handler.
+
+ Args:
+ remote: RemoteInfo to create method for.
+ """
+ def async_method(self, *args, **kwargs):
+ """Asynchronous remote method.
+
+ Args:
+ self: Instance of StubBase.Async subclass.
+
+ Stub methods either take a single positional argument when a full
+ request message is passed in, or keyword arguments, but not both.
+
+ See docstring for StubBase for more information on how to use remote
+ stub methods.
+
+ Returns:
+ Rpc instance used to represent asynchronous RPC.
+ """
+ if args and kwargs:
+ raise TypeError('May not provide both args and kwargs')
+
+ if not args:
+ # Construct request object from arguments.
+ request = remote.request_type()
+ for name, value in six.iteritems(kwargs):
+ setattr(request, name, value)
+ else:
+ # First argument is request object.
+ request = args[0]
+
+ return self.transport.send_rpc(remote, request)
+
+ async_method.__name__ = remote.method.__name__
+ async_method = util.positional(2)(async_method)
+ async_method.remote = remote
+ return async_method
+
+ def __new_sync_method(cls, async_method):
+ """Create synchronous method for stub.
+
+ Args:
+ async_method: asynchronous method to delegate calls to.
+ """
+ def sync_method(self, *args, **kwargs):
+ """Synchronous remote method.
+
+ Args:
+ self: Instance of StubBase.Async subclass.
+ args: Tuple (request,):
+ request: Request object.
+ kwargs: Field values for request. Must be empty if request object
+ is provided.
+
+ Returns:
+ Response message from synchronized RPC.
+ """
+ return async_method(self.async_, *args, **kwargs).response
+ sync_method.__name__ = async_method.__name__
+ sync_method.remote = async_method.remote
+ return sync_method
+
+ def __create_async_methods(cls, remote_methods):
+ """Construct a dictionary of asynchronous methods based on remote methods.
+
+ Args:
+ remote_methods: Dictionary of methods with associated RemoteInfo objects.
+
+ Returns:
+ Dictionary of asynchronous methods with assocaited RemoteInfo objects.
+ Results added to AsyncStub subclass.
+ """
+ async_methods = {}
+ for method_name, method in remote_methods.items():
+ async_methods[method_name] = cls.__new_async_method(method.remote)
+ return async_methods
+
+ def __create_sync_methods(cls, async_methods):
+ """Construct a dictionary of synchronous methods based on remote methods.
+
+ Args:
+ async_methods: Dictionary of async methods to delegate calls to.
+
+ Returns:
+ Dictionary of synchronous methods with assocaited RemoteInfo objects.
+ Results added to Stub subclass.
+ """
+ sync_methods = {}
+ for method_name, async_method in async_methods.items():
+ sync_methods[method_name] = cls.__new_sync_method(async_method)
+ return sync_methods
+
+ def __new__(cls, name, bases, dct):
+ """Instantiate new service class instance."""
+ if StubBase not in bases:
+ # Collect existing remote methods.
+ base_methods = {}
+ for base in bases:
+ try:
+ remote_methods = base.__remote_methods
+ except AttributeError:
+ pass
+ else:
+ base_methods.update(remote_methods)
+
+ # Set this class private attribute so that base_methods do not have
+ # to be recacluated in __init__.
+ dct['_ServiceClass__base_methods'] = base_methods
+
+ for attribute, value in dct.items():
+ base_method = base_methods.get(attribute, None)
+ if base_method:
+ if not callable(value):
+ raise ServiceDefinitionError(
+ 'Must override %s in %s with a method.' % (
+ attribute, name))
+
+ if get_remote_method_info(value):
+ raise ServiceDefinitionError(
+ 'Do not use method decorator when overloading remote method %s '
+ 'on service %s.' %
+ (attribute, name))
+
+ base_remote_method_info = get_remote_method_info(base_method)
+ remote_decorator = method(
+ base_remote_method_info.request_type,
+ base_remote_method_info.response_type)
+ new_remote_method = remote_decorator(value)
+ dct[attribute] = new_remote_method
+
+ return type.__new__(cls, name, bases, dct)
+
+ def __init__(cls, name, bases, dct):
+ """Create uninitialized state on new class."""
+ type.__init__(cls, name, bases, dct)
+
+ # Only service implementation classes should have remote methods and stub
+ # sub classes created. Stub implementations have their own methods passed
+ # in to the type constructor.
+ if StubBase not in bases:
+ # Create list of remote methods.
+ cls.__remote_methods = dict(cls.__base_methods)
+
+ for attribute, value in dct.items():
+ value = getattr(cls, attribute)
+ remote_method_info = get_remote_method_info(value)
+ if remote_method_info:
+ cls.__remote_methods[attribute] = value
+
+ # Build asynchronous stub class.
+ stub_attributes = {'Service': cls}
+ async_methods = cls.__create_async_methods(cls.__remote_methods)
+ stub_attributes.update(async_methods)
+ async_class = type('AsyncStub', (StubBase, cls), stub_attributes)
+ cls.AsyncStub = async_class
+
+ # Constructor for synchronous stub class.
+ def __init__(self, transport):
+ """Constructor.
+
+ Args:
+ transport: Underlying transport to communicate with remote service.
+ """
+ super(cls.Stub, self).__init__(transport)
+ self.async_ = cls.AsyncStub(transport)
+
+ # Build synchronous stub class.
+ stub_attributes = {'Service': cls,
+ '__init__': __init__}
+ stub_attributes.update(cls.__create_sync_methods(async_methods))
+
+ cls.Stub = type('Stub', (StubBase, cls), stub_attributes)
+
+ @staticmethod
+ def all_remote_methods(cls):
+ """Get all remote methods of service.
+
+ Returns:
+ Dict from method name to unbound method.
+ """
+ return dict(cls.__remote_methods)
+
+
+class RequestState(object):
+ """Request state information.
+
+ Properties:
+ remote_host: Remote host name where request originated.
+ remote_address: IP address where request originated.
+ server_host: Host of server within which service resides.
+ server_port: Post which service has recevied request from.
+ """
+
+ @util.positional(1)
+ def __init__(self,
+ remote_host=None,
+ remote_address=None,
+ server_host=None,
+ server_port=None):
+ """Constructor.
+
+ Args:
+ remote_host: Assigned to property.
+ remote_address: Assigned to property.
+ server_host: Assigned to property.
+ server_port: Assigned to property.
+ """
+ self.__remote_host = remote_host
+ self.__remote_address = remote_address
+ self.__server_host = server_host
+ self.__server_port = server_port
+
+ @property
+ def remote_host(self):
+ return self.__remote_host
+
+ @property
+ def remote_address(self):
+ return self.__remote_address
+
+ @property
+ def server_host(self):
+ return self.__server_host
+
+ @property
+ def server_port(self):
+ return self.__server_port
+
+ def _repr_items(self):
+ for name in ['remote_host',
+ 'remote_address',
+ 'server_host',
+ 'server_port']:
+ yield name, getattr(self, name)
+
+ def __repr__(self):
+ """String representation of state."""
+ state = [self.__class__.__name__]
+ for name, value in self._repr_items():
+ if value:
+ state.append('%s=%r' % (name, value))
+
+ return '<%s>' % (' '.join(state),)
+
+
+class HttpRequestState(RequestState):
+ """HTTP request state information.
+
+ NOTE: Does not attempt to represent certain types of information from the
+ request such as the query string as query strings are not permitted in
+ ProtoRPC URLs unless required by the underlying message format.
+
+ Properties:
+ headers: wsgiref.headers.Headers instance of HTTP request headers.
+ http_method: HTTP method as a string.
+ service_path: Path on HTTP service where service is mounted. This path
+ will not include the remote method name.
+ """
+
+ @util.positional(1)
+ def __init__(self,
+ http_method=None,
+ service_path=None,
+ headers=None,
+ **kwargs):
+ """Constructor.
+
+ Args:
+ Same as RequestState, including:
+ http_method: Assigned to property.
+ service_path: Assigned to property.
+ headers: HTTP request headers. If instance of Headers, assigned to
+ property without copying. If dict, will convert to name value pairs
+ for use with Headers constructor. Otherwise, passed as parameters to
+ Headers constructor.
+ """
+ super(HttpRequestState, self).__init__(**kwargs)
+
+ self.__http_method = http_method
+ self.__service_path = service_path
+
+ # Initialize headers.
+ if isinstance(headers, dict):
+ header_list = []
+ for key, value in sorted(headers.items()):
+ if not isinstance(value, list):
+ value = [value]
+ for item in value:
+ header_list.append((key, item))
+ headers = header_list
+ self.__headers = wsgi_headers.Headers(headers or [])
+
+ @property
+ def http_method(self):
+ return self.__http_method
+
+ @property
+ def service_path(self):
+ return self.__service_path
+
+ @property
+ def headers(self):
+ return self.__headers
+
+ def _repr_items(self):
+ for item in super(HttpRequestState, self)._repr_items():
+ yield item
+
+ for name in ['http_method', 'service_path']:
+ yield name, getattr(self, name)
+
+ yield 'headers', list(self.headers.items())
+
+
+class Service(six.with_metaclass(_ServiceClass, object)):
+ """Service base class.
+
+ Base class used for defining remote services. Contains reflection functions,
+ useful helpers and built-in remote methods.
+
+ Services are expected to be constructed via either a constructor or factory
+ which takes no parameters. However, it might be required that some state or
+ configuration is passed in to a service across multiple requests.
+
+ To do this, define parameters to the constructor of the service and use
+ the 'new_factory' class method to build a constructor that will transmit
+ parameters to the constructor. For example:
+
+ class MyService(Service):
+
+ def __init__(self, configuration, state):
+ self.configuration = configuration
+ self.state = state
+
+ configuration = MyServiceConfiguration()
+ global_state = MyServiceState()
+
+ my_service_factory = MyService.new_factory(configuration,
+ state=global_state)
+
+ The contract with any service handler is that a new service object is created
+ to handle each user request, and that the construction does not take any
+ parameters. The factory satisfies this condition:
+
+ new_instance = my_service_factory()
+ assert new_instance.state is global_state
+
+ Attributes:
+ request_state: RequestState set via initialize_request_state.
+ """
+
+ __request_state = None
+
+ @classmethod
+ def all_remote_methods(cls):
+ """Get all remote methods for service class.
+
+ Built-in methods do not appear in the dictionary of remote methods.
+
+ Returns:
+ Dictionary mapping method name to remote method.
+ """
+ return _ServiceClass.all_remote_methods(cls)
+
+ @classmethod
+ def new_factory(cls, *args, **kwargs):
+ """Create factory for service.
+
+ Useful for passing configuration or state objects to the service. Accepts
+ arbitrary parameters and keywords, however, underlying service must accept
+ also accept not other parameters in its constructor.
+
+ Args:
+ args: Args to pass to service constructor.
+ kwargs: Keyword arguments to pass to service constructor.
+
+ Returns:
+ Factory function that will create a new instance and forward args and
+ keywords to the constructor.
+ """
+
+ def service_factory():
+ return cls(*args, **kwargs)
+
+ # Update docstring so that it is easier to debug.
+ full_class_name = '%s.%s' % (cls.__module__, cls.__name__)
+ service_factory.__doc__ = (
+ 'Creates new instances of service %s.\n\n'
+ 'Returns:\n'
+ ' New instance of %s.'
+ % (cls.__name__, full_class_name))
+
+ # Update name so that it is easier to debug the factory function.
+ service_factory.__name__ = '%s_service_factory' % cls.__name__
+
+ service_factory.service_class = cls
+
+ return service_factory
+
+ def initialize_request_state(self, request_state):
+ """Save request state for use in remote method.
+
+ Args:
+ request_state: RequestState instance.
+ """
+ self.__request_state = request_state
+
+ @classmethod
+ def definition_name(cls):
+ """Get definition name for Service class.
+
+ Package name is determined by the global 'package' attribute in the
+ module that contains the Service definition. If no 'package' attribute
+ is available, uses module name. If no module is found, just uses class
+ name as name.
+
+ Returns:
+ Fully qualified service name.
+ """
+ try:
+ return cls.__definition_name
+ except AttributeError:
+ outer_definition_name = cls.outer_definition_name()
+ if outer_definition_name is None:
+ cls.__definition_name = cls.__name__
+ else:
+ cls.__definition_name = '%s.%s' % (outer_definition_name, cls.__name__)
+
+ return cls.__definition_name
+
+ @classmethod
+ def outer_definition_name(cls):
+ """Get outer definition name.
+
+ Returns:
+ Package for service. Services are never nested inside other definitions.
+ """
+ return cls.definition_package()
+
+ @classmethod
+ def definition_package(cls):
+ """Get package for service.
+
+ Returns:
+ Package name for service.
+ """
+ try:
+ return cls.__definition_package
+ except AttributeError:
+ cls.__definition_package = util.get_package_for_module(cls.__module__)
+
+ return cls.__definition_package
+
+ @property
+ def request_state(self):
+ """Request state associated with this Service instance."""
+ return self.__request_state
+
+
+def is_error_status(status):
+ """Function that determines whether the RPC status is an error.
+
+ Args:
+ status: Initialized RpcStatus message to check for errors.
+ """
+ status.check_initialized()
+ return RpcError.from_state(status.state) is not None
+
+
+def check_rpc_status(status):
+ """Function converts an error status to a raised exception.
+
+ Args:
+ status: Initialized RpcStatus message to check for errors.
+
+ Raises:
+ RpcError according to state set on status, if it is an error state.
+ """
+ status.check_initialized()
+ error_class = RpcError.from_state(status.state)
+ if error_class is not None:
+ if error_class is ApplicationError:
+ raise error_class(status.error_message, status.error_name)
+ else:
+ raise error_class(status.error_message)
+
+
+class ProtocolConfig(object):
+ """Configuration for single protocol mapping.
+
+ A read-only protocol configuration provides a given protocol implementation
+ with a name and a set of content-types that it recognizes.
+
+ Properties:
+ protocol: The protocol implementation for configuration (usually a module,
+ for example, protojson, protobuf, etc.). This is an object that has the
+ following attributes:
+ CONTENT_TYPE: Used as the default content-type if default_content_type
+ is not set.
+ ALTERNATIVE_CONTENT_TYPES (optional): A list of alternative
+ content-types to the default that indicate the same protocol.
+ encode_message: Function that matches the signature of
+ ProtocolConfig.encode_message. Used for encoding a ProtoRPC message.
+ decode_message: Function that matches the signature of
+ ProtocolConfig.decode_message. Used for decoding a ProtoRPC message.
+ name: Name of protocol configuration.
+ default_content_type: The default content type for the protocol. Overrides
+ CONTENT_TYPE defined on protocol.
+ alternative_content_types: A list of alternative content-types supported
+ by the protocol. Must not contain the default content-type, nor
+ duplicates. Overrides ALTERNATIVE_CONTENT_TYPE defined on protocol.
+ content_types: A list of all content-types supported by configuration.
+ Combination of default content-type and alternatives.
+ """
+
+ def __init__(self,
+ protocol,
+ name,
+ default_content_type=None,
+ alternative_content_types=None):
+ """Constructor.
+
+ Args:
+ protocol: The protocol implementation for configuration.
+ name: The name of the protocol configuration.
+ default_content_type: The default content-type for protocol. If none
+ provided it will check protocol.CONTENT_TYPE.
+ alternative_content_types: A list of content-types. If none provided,
+ it will check protocol.ALTERNATIVE_CONTENT_TYPES. If that attribute
+ does not exist, will be an empty tuple.
+
+ Raises:
+ ServiceConfigurationError if there are any duplicate content-types.
+ """
+ self.__protocol = protocol
+ self.__name = name
+ self.__default_content_type = (default_content_type or
+ protocol.CONTENT_TYPE).lower()
+ if alternative_content_types is None:
+ alternative_content_types = getattr(protocol,
+ 'ALTERNATIVE_CONTENT_TYPES',
+ ())
+ self.__alternative_content_types = tuple(
+ content_type.lower() for content_type in alternative_content_types)
+ self.__content_types = (
+ (self.__default_content_type,) + self.__alternative_content_types)
+
+ # Detect duplicate content types in definition.
+ previous_type = None
+ for content_type in sorted(self.content_types):
+ if content_type == previous_type:
+ raise ServiceConfigurationError(
+ 'Duplicate content-type %s' % content_type)
+ previous_type = content_type
+
+ @property
+ def protocol(self):
+ return self.__protocol
+
+ @property
+ def name(self):
+ return self.__name
+
+ @property
+ def default_content_type(self):
+ return self.__default_content_type
+
+ @property
+ def alternate_content_types(self):
+ return self.__alternative_content_types
+
+ @property
+ def content_types(self):
+ return self.__content_types
+
+ def encode_message(self, message):
+ """Encode message.
+
+ Args:
+ message: Message instance to encode.
+
+ Returns:
+ String encoding of Message instance encoded in protocol's format.
+ """
+ return self.__protocol.encode_message(message)
+
+ def decode_message(self, message_type, encoded_message):
+ """Decode buffer to Message instance.
+
+ Args:
+ message_type: Message type to decode data to.
+ encoded_message: Encoded version of message as string.
+
+ Returns:
+ Decoded instance of message_type.
+ """
+ return self.__protocol.decode_message(message_type, encoded_message)
+
+
+class Protocols(object):
+ """Collection of protocol configurations.
+
+ Used to describe a complete set of content-type mappings for multiple
+ protocol configurations.
+
+ Properties:
+ names: Sorted list of the names of registered protocols.
+ content_types: Sorted list of supported content-types.
+ """
+
+ __default_protocols = None
+ __lock = threading.Lock()
+
+ def __init__(self):
+ """Constructor."""
+ self.__by_name = {}
+ self.__by_content_type = {}
+
+ def add_protocol_config(self, config):
+ """Add a protocol configuration to protocol mapping.
+
+ Args:
+ config: A ProtocolConfig.
+
+ Raises:
+ ServiceConfigurationError if protocol.name is already registered
+ or any of it's content-types are already registered.
+ """
+ if config.name in self.__by_name:
+ raise ServiceConfigurationError(
+ 'Protocol name %r is already in use' % config.name)
+ for content_type in config.content_types:
+ if content_type in self.__by_content_type:
+ raise ServiceConfigurationError(
+ 'Content type %r is already in use' % content_type)
+
+ self.__by_name[config.name] = config
+ self.__by_content_type.update((t, config) for t in config.content_types)
+
+ def add_protocol(self, *args, **kwargs):
+ """Add a protocol configuration from basic parameters.
+
+ Simple helper method that creates and registeres a ProtocolConfig instance.
+ """
+ self.add_protocol_config(ProtocolConfig(*args, **kwargs))
+
+ @property
+ def names(self):
+ return tuple(sorted(self.__by_name))
+
+ @property
+ def content_types(self):
+ return tuple(sorted(self.__by_content_type))
+
+ def lookup_by_name(self, name):
+ """Look up a ProtocolConfig by name.
+
+ Args:
+ name: Name of protocol to look for.
+
+ Returns:
+ ProtocolConfig associated with name.
+
+ Raises:
+ KeyError if there is no protocol for name.
+ """
+ return self.__by_name[name.lower()]
+
+ def lookup_by_content_type(self, content_type):
+ """Look up a ProtocolConfig by content-type.
+
+ Args:
+ content_type: Content-type to find protocol configuration for.
+
+ Returns:
+ ProtocolConfig associated with content-type.
+
+ Raises:
+ KeyError if there is no protocol for content-type.
+ """
+ return self.__by_content_type[content_type.lower()]
+
+ @classmethod
+ def new_default(cls):
+ """Create default protocols configuration.
+
+ Returns:
+ New Protocols instance configured for protobuf and protorpc.
+ """
+ protocols = cls()
+ protocols.add_protocol(protobuf, 'protobuf')
+ protocols.add_protocol(protojson.ProtoJson.get_default(), 'protojson')
+ return protocols
+
+ @classmethod
+ def get_default(cls):
+ """Get the global default Protocols instance.
+
+ Returns:
+ Current global default Protocols instance.
+ """
+ default_protocols = cls.__default_protocols
+ if default_protocols is None:
+ with cls.__lock:
+ default_protocols = cls.__default_protocols
+ if default_protocols is None:
+ default_protocols = cls.new_default()
+ cls.__default_protocols = default_protocols
+ return default_protocols
+
+ @classmethod
+ def set_default(cls, protocols):
+ """Set the global default Protocols instance.
+
+ Args:
+ protocols: A Protocols instance.
+
+ Raises:
+ TypeError: If protocols is not an instance of Protocols.
+ """
+ if not isinstance(protocols, Protocols):
+ raise TypeError(
+ 'Expected value of type "Protocols", found %r' % protocols)
+ with cls.__lock:
+ cls.__default_protocols = protocols