From f864d257d8400bfc61db03d22c5b91d1bded11c8 Mon Sep 17 00:00:00 2001 From: Lyuyang Hu Date: Sat, 7 May 2022 23:36:06 -0400 Subject: [PATCH] feat: use openapi codegen for py client --- .github/workflows/py-client-release.yml | 17 +- .gitignore | 3 +- .../README.handlebars | 57 + .../README_common.handlebars | 111 + .../README_onlypackage.handlebars | 43 + .../__init__.handlebars | 0 .../__init__api.handlebars | 9 + .../__init__apis.handlebars | 24 + .../__init__model.handlebars | 5 + .../__init__models.handlebars | 18 + .../__init__package.handlebars | 28 + .../api.handlebars | 26 + .../api_client.handlebars | 1379 +++++++++++ .../api_doc.handlebars | 212 ++ .../api_doc_example.handlebars | 163 ++ .../api_doc_schema_type_hint.handlebars | 10 + .../api_test.handlebars | 34 + .../configuration.handlebars | 636 +++++ .../doc_auth_partial.handlebars | 109 + .../endpoint.handlebars | 549 +++++ .../endpoint_body_serialization.handlebars | 6 + .../endpoint_parameter.handlebars | 17 + .../exceptions.handlebars | 129 ++ .../git_push.sh.handlebars | 58 + .../gitignore.handlebars | 67 + .../gitlab-ci.handlebars | 29 + .../model.handlebars | 17 + .../model_doc.handlebars | 9 + .../composed_schemas.handlebars | 86 + .../model_templates/dict_partial.handlebars | 54 + .../enum_value_to_name.handlebars | 12 + .../model_templates/enums.handlebars | 16 + .../imports_schema_types.handlebars | 42 + .../imports_schemas.handlebars | 6 + .../model_templates/new.handlebars | 53 + .../model_templates/schema.handlebars | 46 + .../schema_composed_or_anytype.handlebars | 48 + .../model_templates/schema_dict.handlebars | 23 + .../model_templates/schema_list.handlebars | 30 + .../model_templates/schema_simple.handlebars | 27 + .../model_templates/validations.handlebars | 50 + .../model_templates/var_equals_cls.handlebars | 1 + .../model_templates/xbase_schema.handlebars | 51 + .../model_test.handlebars | 32 + .../partial_header.handlebars | 17 + .../requirements.handlebars | 5 + .../rest.handlebars | 251 ++ .../schema_doc.handlebars | 32 + .../schemas.handlebars | 2038 +++++++++++++++++ .../setup.handlebars | 51 + .../setup_cfg.handlebars | 13 + .../signing.handlebars | 409 ++++ .../test-requirements.handlebars | 15 + .../tox.handlebars | 9 + .../travis.handlebars | 18 + 55 files changed, 7191 insertions(+), 9 deletions(-) create mode 100644 openapi/python_explerimental_client_template/README.handlebars create mode 100644 openapi/python_explerimental_client_template/README_common.handlebars create mode 100644 openapi/python_explerimental_client_template/README_onlypackage.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__api.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__apis.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__model.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__models.handlebars create mode 100644 openapi/python_explerimental_client_template/__init__package.handlebars create mode 100644 openapi/python_explerimental_client_template/api.handlebars create mode 100644 openapi/python_explerimental_client_template/api_client.handlebars create mode 100644 openapi/python_explerimental_client_template/api_doc.handlebars create mode 100644 openapi/python_explerimental_client_template/api_doc_example.handlebars create mode 100644 openapi/python_explerimental_client_template/api_doc_schema_type_hint.handlebars create mode 100644 openapi/python_explerimental_client_template/api_test.handlebars create mode 100644 openapi/python_explerimental_client_template/configuration.handlebars create mode 100644 openapi/python_explerimental_client_template/doc_auth_partial.handlebars create mode 100644 openapi/python_explerimental_client_template/endpoint.handlebars create mode 100644 openapi/python_explerimental_client_template/endpoint_body_serialization.handlebars create mode 100644 openapi/python_explerimental_client_template/endpoint_parameter.handlebars create mode 100644 openapi/python_explerimental_client_template/exceptions.handlebars create mode 100644 openapi/python_explerimental_client_template/git_push.sh.handlebars create mode 100644 openapi/python_explerimental_client_template/gitignore.handlebars create mode 100644 openapi/python_explerimental_client_template/gitlab-ci.handlebars create mode 100644 openapi/python_explerimental_client_template/model.handlebars create mode 100644 openapi/python_explerimental_client_template/model_doc.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/composed_schemas.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/dict_partial.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/enum_value_to_name.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/enums.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/imports_schema_types.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/imports_schemas.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/new.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/schema.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/schema_composed_or_anytype.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/schema_dict.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/schema_list.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/schema_simple.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/validations.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/var_equals_cls.handlebars create mode 100644 openapi/python_explerimental_client_template/model_templates/xbase_schema.handlebars create mode 100644 openapi/python_explerimental_client_template/model_test.handlebars create mode 100644 openapi/python_explerimental_client_template/partial_header.handlebars create mode 100644 openapi/python_explerimental_client_template/requirements.handlebars create mode 100644 openapi/python_explerimental_client_template/rest.handlebars create mode 100644 openapi/python_explerimental_client_template/schema_doc.handlebars create mode 100644 openapi/python_explerimental_client_template/schemas.handlebars create mode 100644 openapi/python_explerimental_client_template/setup.handlebars create mode 100644 openapi/python_explerimental_client_template/setup_cfg.handlebars create mode 100644 openapi/python_explerimental_client_template/signing.handlebars create mode 100644 openapi/python_explerimental_client_template/test-requirements.handlebars create mode 100644 openapi/python_explerimental_client_template/tox.handlebars create mode 100644 openapi/python_explerimental_client_template/travis.handlebars diff --git a/.github/workflows/py-client-release.yml b/.github/workflows/py-client-release.yml index 28885440..c2460419 100644 --- a/.github/workflows/py-client-release.yml +++ b/.github/workflows/py-client-release.yml @@ -2,8 +2,8 @@ name: "Python API Client Release" on: push: - branches: [main] - paths: ["openapi/openapi.yaml"] + branches: [main, "feat/openapi-py-client"] + paths: ["openapi/openapi.yaml", ".github/workflows/py-client-release.yml"] jobs: build-and-release: @@ -16,23 +16,24 @@ jobs: uses: actions/setup-python@v1 with: python-version: 3.9 + + - name: "generate tag from SHA" + id: gen_tag + run: echo "::set-output name=tag::$(echo ${GITHUB_SHA} | cut -c1-7)" + - name: Install dependencies run: | apt-get update && apt-get install -y default-jre pip install wheel - name: Generate Python Client run: | - wget https://repo1.maven.org/maven2/io/swagger/codegen/v3/swagger-codegen-cli/3.0.29/swagger-codegen-cli-3.0.29.jar -O swagger-codegen-cli-3.0.29.jar - java -jar swagger-codegen-cli-3.0.29.jar generate -i openapi/openapi.yaml -l python -o python_client -DpackageName=explainaboard_api_client + wget https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/5.4.0/openapi-generator-cli-5.4.0.jar -O openapi-generator-cli.jar + java -jar openapi-generator-cli.jar generate -i openapi/openapi.yaml -g python-experimental -o python_client -t openapi/python_explerimental_client_template "--additional-properties=packageName=explainaboard_api_client,packageVersion=${{ steps.gen_tag.outputs.tag }}" - name: "build" run: | cd python_client python setup.py bdist_wheel - - name: "generate tag from SHA" - id: gen_tag - run: echo "::set-output name=tag::$(echo ${GITHUB_SHA} | cut -c1-7)" - - uses: "marvinpinto/action-automatic-releases@latest" with: repo_token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.gitignore b/.gitignore index c58871d6..b167700f 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ __pycache__ # swagger codegen backend/src/gen swagger-codegen-cli*.jar +openapi-generator-cli*.jar frontend/src/clients/openapi - +openapi/python_client .env diff --git a/openapi/python_explerimental_client_template/README.handlebars b/openapi/python_explerimental_client_template/README.handlebars new file mode 100644 index 00000000..aee6b66d --- /dev/null +++ b/openapi/python_explerimental_client_template/README.handlebars @@ -0,0 +1,57 @@ +# {{{projectName}}} +{{#if appDescriptionWithNewLines}} +{{{appDescriptionWithNewLines}}} +{{/if}} + +This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: {{appVersion}} +- Package version: {{packageVersion}} +{{#unless hideGenerationTimestamp}} +- Build date: {{generatedDate}} +{{/unless}} +- Build package: {{generatorClass}} +{{#if infoUrl}} +For more information, please visit [{{{infoUrl}}}]({{{infoUrl}}}) +{{/if}} + +## Requirements. + +Python {{generatorLanguageVersion}} +v3.9 is needed so one can combine classmethod and property decorators to define +object schema properties as classes + +## Installation & Usage +### pip install + +If the python package is hosted on a repository, you can install directly using: + +```sh +pip install git+https://{{gitHost}}/{{{gitUserId}}}/{{{gitRepoId}}}.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://{{gitHost}}/{{{gitUserId}}}/{{{gitRepoId}}}.git`) + +Then import the package: +```python +import {{{packageName}}} +``` + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) + +Then import the package: +```python +import {{{packageName}}} +``` + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +{{> README_common }} diff --git a/openapi/python_explerimental_client_template/README_common.handlebars b/openapi/python_explerimental_client_template/README_common.handlebars new file mode 100644 index 00000000..5c577351 --- /dev/null +++ b/openapi/python_explerimental_client_template/README_common.handlebars @@ -0,0 +1,111 @@ +```python +{{#with apiInfo}}{{#each apis}}{{#unless hasMore}}{{#if hasHttpSignatureMethods}}import datetime{{/if}}{{/unless}}{{/each}}{{/with}} +import time +import {{{packageName}}} +from pprint import pprint +{{#with apiInfo}} +{{#each apis}} +{{#if @first}} +from {{packageName}}.{{apiPackage}} import {{classFilename}} +{{#each imports}} +{{{import}}} +{{/each}} +{{#with operations}} +{{#each operation}} +{{#if @first}} +{{> doc_auth_partial}} + +# Enter a context with an instance of the API client +with {{{packageName}}}.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = {{classFilename}}.{{{classname}}}(api_client) + {{#each allParams}}{{paramName}} = {{{example}}} # {{{dataType}}} | {{{description}}}{{#unless required}} (optional){{/unless}}{{#if defaultValue}} (default to {{{.}}}){{/if}} + {{/each}} + + try: + {{#if summary}} # {{{summary}}} + {{/if}} {{#if returnType}}api_response = {{/if}}api_instance.{{{operationId}}}({{#each allParams}}{{#if required}}{{paramName}}{{/if}}{{#unless required}}{{paramName}}={{paramName}}{{/unless}}{{#if hasMore}}, {{/if}}{{/each}}){{#if returnType}} + pprint(api_response){{/if}} + except {{{packageName}}}.ApiException as e: + print("Exception when calling {{classname}}->{{operationId}}: %s\n" % e) +{{/if}} +{{/each}} +{{/with}} +{{/if}} +{{/each}} +{{/with}} +``` + +## Documentation for API Endpoints + +All URIs are relative to *{{basePath}}* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +{{#with apiInfo}}{{#each apis}}{{#with operations}}{{#each operation}}*{{classname}}* | [**{{operationId}}**]({{apiDocPath}}{{classname}}.md#{{operationIdLowerCase}}) | **{{httpMethod}}** {{path}} | {{#if summary}}{{summary}}{{/if}} +{{/each}}{{/with}}{{/each}}{{/with}} + +## Documentation For Models + +{{#each models}}{{#with model}} - [{{{classname}}}]({{modelDocPath}}{{{classname}}}.md) +{{/with}}{{/each}} + +## Documentation For Authorization + +{{#unless authMethods}} + All endpoints do not require authorization. +{{/unless}} +{{#each authMethods}} +{{#if @last}} Authentication schemes defined for the API:{{/if}} +## {{{name}}} + +{{#if isApiKey}} +- **Type**: API key +- **API key parameter name**: {{{keyParamName}}} +- **Location**: {{#if isKeyInQuery}}URL query string{{/if}}{{#if isKeyInHeader}}HTTP header{{/if}} +{{/if}} +{{#if isBasic}} +{{#if isBasicBasic}} +- **Type**: HTTP basic authentication +{{/if}} +{{#if isBasicBearer}} +- **Type**: Bearer authentication{{#if bearerFormat}} ({{{bearerFormat}}}){{/if}} +{{/if}} +{{#if isHttpSignature}} +- **Type**: HTTP signature authentication +{{/if}} +{{/if}} +{{#if isOAuth}} +- **Type**: OAuth +- **Flow**: {{{flow}}} +- **Authorization URL**: {{{authorizationUrl}}} +- **Scopes**: {{#unless scopes}}N/A{{/unless}} +{{#each scopes}} - **{{{scope}}}**: {{{description}}} +{{/each}} +{{/if}} + +{{/each}} + +## Author + +{{#with apiInfo}}{{#each apis}}{{#unless hasMore}}{{infoEmail}} +{{/unless}}{{/each}}{{/with}} + +## Notes for Large OpenAPI documents +If the OpenAPI document is large, imports in {{{packageName}}}.apis and {{{packageName}}}.models may fail with a +RecursionError indicating the maximum recursion limit has been exceeded. In that case, there are a couple of solutions: + +Solution 1: +Use specific imports for apis and models like: +- `from {{{packageName}}}.{{apiPackage}}.default_api import DefaultApi` +- `from {{{packageName}}}.{{modelPackage}}.pet import Pet` + +Solution 1: +Before importing the package, adjust the maximum recursion limit as shown below: +``` +import sys +sys.setrecursionlimit(1500) +import {{{packageName}}} +from {{{packageName}}}.apis import * +from {{{packageName}}}.models import * +``` diff --git a/openapi/python_explerimental_client_template/README_onlypackage.handlebars b/openapi/python_explerimental_client_template/README_onlypackage.handlebars new file mode 100644 index 00000000..63f95937 --- /dev/null +++ b/openapi/python_explerimental_client_template/README_onlypackage.handlebars @@ -0,0 +1,43 @@ +# {{{projectName}}} +{{#if appDescription}} +{{{appDescription}}} +{{/if}} + +The `{{packageName}}` package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: {{appVersion}} +- Package version: {{packageVersion}} +{{#unless hideGenerationTimestamp}} +- Build date: {{generatedDate}} +{{/unless}} +- Build package: {{generatorClass}} +{{#if infoUrl}} +For more information, please visit [{{{infoUrl}}}]({{{infoUrl}}}) +{{/if}} + +## Requirements. + +Python {{generatorLanguageVersion}} + +## Installation & Usage + +This python library package is generated without supporting files like setup.py or requirements files + +To be able to use it, you will need these dependencies in your own package that uses this library: + +* urllib3 >= 1.15 +* certifi +* python-dateutil +{{#if asyncio}} +* aiohttp +{{/if}} +{{#if tornado}} +* tornado>=4.2,<5 +{{/if}} + +## Getting Started + +In your own code, to use this library to connect and interact with {{{projectName}}}, +you can run the following: + +{{> README_common }} diff --git a/openapi/python_explerimental_client_template/__init__.handlebars b/openapi/python_explerimental_client_template/__init__.handlebars new file mode 100644 index 00000000..e69de29b diff --git a/openapi/python_explerimental_client_template/__init__api.handlebars b/openapi/python_explerimental_client_template/__init__api.handlebars new file mode 100644 index 00000000..1e059f73 --- /dev/null +++ b/openapi/python_explerimental_client_template/__init__api.handlebars @@ -0,0 +1,9 @@ +{{#with apiInfo}} +{{#each apis}} +{{#if @first}} +# do not import all apis into this module because that uses a lot of memory and stack frames +# if you need the ability to import all apis from one package, import them with +# from {{packageName}}.apis import {{classname}} +{{/if}} +{{/each}} +{{/with}} diff --git a/openapi/python_explerimental_client_template/__init__apis.handlebars b/openapi/python_explerimental_client_template/__init__apis.handlebars new file mode 100644 index 00000000..06fb3361 --- /dev/null +++ b/openapi/python_explerimental_client_template/__init__apis.handlebars @@ -0,0 +1,24 @@ +{{#with apiInfo}} +{{#each apis}} +{{#if @first}} +# coding: utf-8 + +# flake8: noqa + +# Import all APIs into this package. +# If you have many APIs here with many many models used in each API this may +# raise a `RecursionError`. +# In order to avoid this, import only the API that you directly need like: +# +# from {{packagename}}.{{apiPackage}}.{{classFilename}} import {{classname}} +# +# or import this package, but before doing it, use: +# +# import sys +# sys.setrecursionlimit(n) + +# Import APIs into API package: +{{/if}} +from {{packageName}}.{{apiPackage}}.{{classFilename}} import {{classname}} +{{/each}} +{{/with}} diff --git a/openapi/python_explerimental_client_template/__init__model.handlebars b/openapi/python_explerimental_client_template/__init__model.handlebars new file mode 100644 index 00000000..b6b698b0 --- /dev/null +++ b/openapi/python_explerimental_client_template/__init__model.handlebars @@ -0,0 +1,5 @@ +# we can not import model classes here because that would create a circular +# reference which would not work in python2 +# do not import all models into this module because that uses a lot of memory and stack frames +# if you need the ability to import all models from one package, import them with +# from {{packageName}}.models import ModelA, ModelB diff --git a/openapi/python_explerimental_client_template/__init__models.handlebars b/openapi/python_explerimental_client_template/__init__models.handlebars new file mode 100644 index 00000000..31eac9cd --- /dev/null +++ b/openapi/python_explerimental_client_template/__init__models.handlebars @@ -0,0 +1,18 @@ +# coding: utf-8 + +# flake8: noqa + +# import all models into this package +# if you have many models here with many references from one model to another this may +# raise a RecursionError +# to avoid this, import only the models that you directly need like: +# from from {{packageName}}.{{modelPackage}}.pet import Pet +# or import this package, but before doing it, use: +# import sys +# sys.setrecursionlimit(n) + +{{#each models}} +{{#with model}} +from {{packageName}}.{{modelPackage}}.{{classFilename}} import {{classname}} +{{/with}} +{{/each}} diff --git a/openapi/python_explerimental_client_template/__init__package.handlebars b/openapi/python_explerimental_client_template/__init__package.handlebars new file mode 100644 index 00000000..26350c72 --- /dev/null +++ b/openapi/python_explerimental_client_template/__init__package.handlebars @@ -0,0 +1,28 @@ +# coding: utf-8 + +# flake8: noqa + +{{>partial_header}} + +__version__ = "{{packageVersion}}" + +# import ApiClient +from {{packageName}}.api_client import ApiClient + +# import Configuration +from {{packageName}}.configuration import Configuration +{{#if hasHttpSignatureMethods}} +from {{packageName}}.signing import HttpSigningConfiguration +{{/if}} + +# import exceptions +from {{packageName}}.exceptions import OpenApiException +from {{packageName}}.exceptions import ApiAttributeError +from {{packageName}}.exceptions import ApiTypeError +from {{packageName}}.exceptions import ApiValueError +from {{packageName}}.exceptions import ApiKeyError +from {{packageName}}.exceptions import ApiException +{{#if recursionLimit}} + +__import__('sys').setrecursionlimit({{recursionLimit}}) +{{/if}} diff --git a/openapi/python_explerimental_client_template/api.handlebars b/openapi/python_explerimental_client_template/api.handlebars new file mode 100644 index 00000000..c6c0b423 --- /dev/null +++ b/openapi/python_explerimental_client_template/api.handlebars @@ -0,0 +1,26 @@ +# coding: utf-8 + +{{>partial_header}} + +from {{packageName}}.api_client import ApiClient +{{#with operations}} +{{#each operation}} +from {{packageName}}.api.{{classFilename}}_endpoints.{{operationId}} import {{operationIdCamelCase}} +{{/each}} +{{/with}} + + +{{#with operations}} +class {{classname}}( +{{#each operation}} + {{operationIdCamelCase}}, +{{/each}} + ApiClient, +): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + pass +{{/with}} diff --git a/openapi/python_explerimental_client_template/api_client.handlebars b/openapi/python_explerimental_client_template/api_client.handlebars new file mode 100644 index 00000000..4dfc613a --- /dev/null +++ b/openapi/python_explerimental_client_template/api_client.handlebars @@ -0,0 +1,1379 @@ +# coding: utf-8 +{{>partial_header}} + +from dataclasses import dataclass +from decimal import Decimal +import enum +import json +import os +import io +import atexit +from multiprocessing.pool import ThreadPool +import re +import tempfile +import typing +import urllib3 +from urllib3._collections import HTTPHeaderDict +from urllib.parse import quote +from urllib3.fields import RequestField as RequestFieldBase + +{{#if tornado}} +import tornado.gen +{{/if}} + +from {{packageName}} import rest +from {{packageName}}.configuration import Configuration +from {{packageName}}.exceptions import ApiTypeError, ApiValueError +from {{packageName}}.schemas import ( + NoneClass, + BoolClass, + Schema, + FileIO, + BinarySchema, + InstantiationMetadata, + date, + datetime, + none_type, + frozendict, + Unset, + unset, +) + + +class RequestField(RequestFieldBase): + def __eq__(self, other): + if not isinstance(other, RequestField): + return False + return self.__dict__ == other.__dict__ + + +class JSONEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, (str, int, float)): + # instances based on primitive classes + return obj + elif isinstance(obj, Decimal): + if obj.as_tuple().exponent >= 0: + return int(obj) + return float(obj) + elif isinstance(obj, NoneClass): + return None + elif isinstance(obj, BoolClass): + return bool(obj) + elif isinstance(obj, (dict, frozendict)): + return {key: self.default(val) for key, val in obj.items()} + elif isinstance(obj, (list, tuple)): + return [self.default(item) for item in obj] + raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__)) + + +class ParameterInType(enum.Enum): + QUERY = 'query' + HEADER = 'header' + PATH = 'path' + COOKIE = 'cookie' + + +class ParameterStyle(enum.Enum): + MATRIX = 'matrix' + LABEL = 'label' + FORM = 'form' + SIMPLE = 'simple' + SPACE_DELIMITED = 'spaceDelimited' + PIPE_DELIMITED = 'pipeDelimited' + DEEP_OBJECT = 'deepObject' + + +class ParameterSerializerBase: + @staticmethod + def __serialize_number( + in_data: typing.Union[int, float], name: str, prefix='' + ) -> typing.Tuple[typing.Tuple[str, str]]: + return tuple([(name, prefix + str(in_data))]) + + @staticmethod + def __serialize_str( + in_data: str, name: str, prefix='' + ) -> typing.Tuple[typing.Tuple[str, str]]: + return tuple([(name, prefix + quote(in_data))]) + + @staticmethod + def __serialize_bool(in_data: bool, name: str, prefix='') -> typing.Tuple[typing.Tuple[str, str]]: + if in_data: + return tuple([(name, prefix + 'true')]) + return tuple([(name, prefix + 'false')]) + + @staticmethod + def __urlencode(in_data: typing.Any) -> str: + return quote(str(in_data)) + + def __serialize_list( + self, + in_data: typing.List[typing.Any], + style: ParameterStyle, + name: str, + explode: bool, + empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = tuple(), + prefix: str = '', + separator: str = ',', + ) -> typing.Tuple[typing.Union[typing.Tuple[str, str], typing.Tuple], ...]: + if not in_data: + return empty_val + if explode and style in { + ParameterStyle.FORM, + ParameterStyle.MATRIX, + ParameterStyle.SPACE_DELIMITED, + ParameterStyle.PIPE_DELIMITED + }: + if style is ParameterStyle.FORM: + return tuple((name, prefix + self.__urlencode(val)) for val in in_data) + else: + joined_vals = prefix + separator.join(name + '=' + self.__urlencode(val) for val in in_data) + else: + joined_vals = prefix + separator.join(map(self.__urlencode, in_data)) + return tuple([(name, joined_vals)]) + + def __form_item_representation(self, in_data: typing.Any) -> typing.Optional[str]: + if isinstance(in_data, none_type): + return None + elif isinstance(in_data, list): + if not in_data: + return None + raise ApiValueError('Unable to generate a form representation of {}'.format(in_data)) + elif isinstance(in_data, dict): + if not in_data: + return None + raise ApiValueError('Unable to generate a form representation of {}'.format(in_data)) + elif isinstance(in_data, (bool, bytes)): + raise ApiValueError('Unable to generate a form representation of {}'.format(in_data)) + # str, float, int + return self.__urlencode(in_data) + + def __serialize_dict( + self, + in_data: typing.Dict[str, typing.Any], + style: ParameterStyle, + name: str, + explode: bool, + empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = tuple(), + prefix: str = '', + separator: str = ',', + ) -> typing.Tuple[typing.Tuple[str, str]]: + if not in_data: + return empty_val + if all(val is None for val in in_data.values()): + return empty_val + + form_items = {} + if style is ParameterStyle.FORM: + for key, val in in_data.items(): + new_val = self.__form_item_representation(val) + if new_val is None: + continue + form_items[key] = new_val + + if explode: + if style is ParameterStyle.FORM: + return tuple((key, prefix + val) for key, val in form_items.items()) + elif style in { + ParameterStyle.SIMPLE, + ParameterStyle.LABEL, + ParameterStyle.MATRIX, + ParameterStyle.SPACE_DELIMITED, + ParameterStyle.PIPE_DELIMITED + }: + joined_vals = prefix + separator.join(key + '=' + self.__urlencode(val) for key, val in in_data.items()) + else: + raise ApiValueError(f'Invalid style {style} for dict serialization with explode=True') + elif style is ParameterStyle.FORM: + joined_vals = prefix + separator.join(key + separator + val for key, val in form_items.items()) + else: + joined_vals = prefix + separator.join( + key + separator + self.__urlencode(val) for key, val in in_data.items()) + return tuple([(name, joined_vals)]) + + def _serialize_x( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list], + style: ParameterStyle, + name: str, + explode: bool, + empty_val: typing.Union[typing.Tuple[str, str], typing.Tuple] = (), + prefix: str = '', + separator: str = ',', + ) -> typing.Tuple[typing.Tuple[str, str], ...]: + if isinstance(in_data, none_type): + return empty_val + elif isinstance(in_data, bool): + # must be before int check + return self.__serialize_bool(in_data, name=name, prefix=prefix) + elif isinstance(in_data, (int, float)): + return self.__serialize_number(in_data, name=name, prefix=prefix) + elif isinstance(in_data, str): + return self.__serialize_str(in_data, name=name, prefix=prefix) + elif isinstance(in_data, list): + return self.__serialize_list( + in_data, + style=style, + name=name, + explode=explode, + empty_val=empty_val, + prefix=prefix, + separator=separator + ) + elif isinstance(in_data, dict): + return self.__serialize_dict( + in_data, + style=style, + name=name, + explode=explode, + empty_val=empty_val, + prefix=prefix, + separator=separator + ) + + +class StyleFormSerializer(ParameterSerializerBase): + + def _serialize_form( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list], + name: str, + explode: bool, + ) -> typing.Tuple[typing.Tuple[str, str], ...]: + return self._serialize_x(in_data, style=ParameterStyle.FORM, name=name, explode=explode) + + +class StyleSimpleSerializer(ParameterSerializerBase): + + def _serialize_simple_tuple( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list], + name: str, + explode: bool, + in_type: ParameterInType, + ) -> typing.Tuple[typing.Tuple[str, str], ...]: + if in_type is ParameterInType.HEADER: + empty_val = () + else: + empty_val = ((name, ''),) + return self._serialize_x(in_data, style=ParameterStyle.SIMPLE, name=name, explode=explode, empty_val=empty_val) + + +@dataclass +class ParameterBase: + name: str + in_type: ParameterInType + required: bool + style: typing.Optional[ParameterStyle] + explode: typing.Optional[bool] + allow_reserved: typing.Optional[bool] + schema: typing.Optional[typing.Type[Schema]] + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] + + __style_to_in_type = { + ParameterStyle.MATRIX: {ParameterInType.PATH}, + ParameterStyle.LABEL: {ParameterInType.PATH}, + ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE}, + ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER}, + ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY}, + ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY}, + ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY}, + } + __in_type_to_default_style = { + ParameterInType.QUERY: ParameterStyle.FORM, + ParameterInType.PATH: ParameterStyle.SIMPLE, + ParameterInType.HEADER: ParameterStyle.SIMPLE, + ParameterInType.COOKIE: ParameterStyle.FORM, + } + __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'} + _json_encoder = JSONEncoder() + _json_content_type = 'application/json' + + @classmethod + def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType): + if style is None: + return + in_type_set = cls.__style_to_in_type[style] + if in_type not in in_type_set: + raise ValueError( + 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format( + style, in_type_set + ) + ) + + def __init__( + self, + name: str, + in_type: ParameterInType, + required: bool = False, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: typing.Optional[bool] = None, + schema: typing.Optional[typing.Type[Schema]] = None, + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None + ): + if schema is None and content is None: + raise ValueError('Value missing; Pass in either schema or content') + if schema and content: + raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input') + if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER: + raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names)) + self.__verify_style_to_in_type(style, in_type) + if content is None and style is None: + style = self.__in_type_to_default_style[in_type] + if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1: + raise ValueError('Invalid content length, content length must equal 1') + self.in_type = in_type + self.name = name + self.required = required + self.style = style + self.explode = explode + self.allow_reserved = allow_reserved + self.schema = schema + self.content = content + + @staticmethod + def _remove_empty_and_cast( + in_data: typing.Tuple[typing.Tuple[str, str]], + ) -> typing.Dict[str, str]: + data = tuple(t for t in in_data if t) + if not data: + return dict() + return dict(data) + + def _serialize_json( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list] + ) -> typing.Tuple[typing.Tuple[str, str]]: + return tuple([(self.name, json.dumps(in_data))]) + + +class PathParameter(ParameterBase, StyleSimpleSerializer): + + def __init__( + self, + name: str, + required: bool = False, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: typing.Optional[bool] = None, + schema: typing.Optional[typing.Type[Schema]] = None, + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None + ): + super().__init__( + name, + in_type=ParameterInType.PATH, + required=required, + style=style, + explode=explode, + allow_reserved=allow_reserved, + schema=schema, + content=content + ) + + def __serialize_label( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list] + ) -> typing.Dict[str, str]: + empty_val = ((self.name, ''),) + prefix = '.' + separator = '.' + return self._remove_empty_and_cast( + self._serialize_x( + in_data, + style=ParameterStyle.LABEL, + name=self.name, + explode=self.explode, + empty_val=empty_val, + prefix=prefix, + separator=separator + ) + ) + + def __serialize_matrix( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list] + ) -> typing.Dict[str, str]: + separator = ',' + if in_data == '': + prefix = ';' + self.name + elif isinstance(in_data, (dict, list)) and self.explode: + prefix = ';' + separator = ';' + else: + prefix = ';' + self.name + '=' + empty_val = ((self.name, ''),) + return self._remove_empty_and_cast( + self._serialize_x( + in_data, + style=ParameterStyle.MATRIX, + name=self.name, + explode=self.explode, + prefix=prefix, + empty_val=empty_val, + separator=separator + ) + ) + + def _serialize_simple( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list], + ) -> typing.Dict[str, str]: + tuple_data = self._serialize_simple_tuple(in_data, self.name, self.explode, self.in_type) + return self._remove_empty_and_cast(tuple_data) + + def serialize( + self, + in_data: typing.Union[ + Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict] + ) -> typing.Dict[str, str]: + if self.schema: + cast_in_data = self.schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + """ + simple -> path + path: + returns path_params: dict + label -> path + returns path_params + matrix -> path + returns path_params + """ + if self.style: + if self.style is ParameterStyle.SIMPLE: + return self._serialize_simple(cast_in_data) + elif self.style is ParameterStyle.LABEL: + return self.__serialize_label(cast_in_data) + elif self.style is ParameterStyle.MATRIX: + return self.__serialize_matrix(cast_in_data) + # self.content will be length one + for content_type, schema in self.content.items(): + cast_in_data = schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + if content_type == self._json_content_type: + tuple_data = self._serialize_json(cast_in_data) + return self._remove_empty_and_cast(tuple_data) + raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type)) + + +class QueryParameter(ParameterBase, StyleFormSerializer): + + def __init__( + self, + name: str, + required: bool = False, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: typing.Optional[bool] = None, + schema: typing.Optional[typing.Type[Schema]] = None, + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None + ): + super().__init__( + name, + in_type=ParameterInType.QUERY, + required=required, + style=style, + explode=explode, + allow_reserved=allow_reserved, + schema=schema, + content=content + ) + + def __serialize_space_delimited( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list] + ) -> typing.Tuple[typing.Tuple[str, str], ...]: + separator = '%20' + empty_val = () + return self._serialize_x( + in_data, + style=ParameterStyle.SPACE_DELIMITED, + name=self.name, + explode=self.explode, + separator=separator, + empty_val=empty_val + ) + + def __serialize_pipe_delimited( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list] + ) -> typing.Tuple[typing.Tuple[str, str], ...]: + separator = '|' + empty_val = () + return self._serialize_x( + in_data, + style=ParameterStyle.PIPE_DELIMITED, + name=self.name, + explode=self.explode, + separator=separator, + empty_val=empty_val + ) + + def serialize( + self, + in_data: typing.Union[ + Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict] + ) -> typing.Tuple[typing.Tuple[str, str]]: + if self.schema: + cast_in_data = self.schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + """ + form -> query + query: + - GET/HEAD/DELETE: could use fields + - PUT/POST: must use urlencode to send parameters + returns fields: tuple + spaceDelimited -> query + returns fields + pipeDelimited -> query + returns fields + deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706 + returns fields + """ + if self.style: + # TODO update query ones to omit setting values when [] {} or None is input + if self.style is ParameterStyle.FORM: + return self._serialize_form(cast_in_data, explode=self.explode, name=self.name) + elif self.style is ParameterStyle.SPACE_DELIMITED: + return self.__serialize_space_delimited(cast_in_data) + elif self.style is ParameterStyle.PIPE_DELIMITED: + return self.__serialize_pipe_delimited(cast_in_data) + # self.content will be length one + for content_type, schema in self.content.items(): + cast_in_data = schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + if content_type == self._json_content_type: + return self._serialize_json(cast_in_data) + raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type)) + + +class CookieParameter(ParameterBase, StyleFormSerializer): + + def __init__( + self, + name: str, + required: bool = False, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: typing.Optional[bool] = None, + schema: typing.Optional[typing.Type[Schema]] = None, + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None + ): + super().__init__( + name, + in_type=ParameterInType.COOKIE, + required=required, + style=style, + explode=explode, + allow_reserved=allow_reserved, + schema=schema, + content=content + ) + + def serialize( + self, + in_data: typing.Union[ + Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict] + ) -> typing.Tuple[typing.Tuple[str, str]]: + if self.schema: + cast_in_data = self.schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + """ + form -> cookie + returns fields: tuple + """ + if self.style: + return self._serialize_form(cast_in_data, explode=self.explode, name=self.name) + # self.content will be length one + for content_type, schema in self.content.items(): + cast_in_data = schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + if content_type == self._json_content_type: + return self._serialize_json(cast_in_data) + raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type)) + + +class HeaderParameter(ParameterBase, StyleSimpleSerializer): + def __init__( + self, + name: str, + required: bool = False, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: typing.Optional[bool] = None, + schema: typing.Optional[typing.Type[Schema]] = None, + content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None + ): + super().__init__( + name, + in_type=ParameterInType.HEADER, + required=required, + style=style, + explode=explode, + allow_reserved=allow_reserved, + schema=schema, + content=content + ) + + @staticmethod + def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict[str, str]: + data = tuple(t for t in in_data if t) + headers = HTTPHeaderDict() + if not data: + return headers + headers.extend(data) + return headers + + def _serialize_simple( + self, + in_data: typing.Union[None, int, float, str, bool, dict, list], + ) -> HTTPHeaderDict[str, str]: + tuple_data = self._serialize_simple_tuple(in_data, self.name, self.explode, self.in_type) + return self.__to_headers(tuple_data) + + def serialize( + self, + in_data: typing.Union[ + Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict] + ) -> HTTPHeaderDict[str, str]: + if self.schema: + cast_in_data = self.schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + """ + simple -> header + headers: PoolManager needs a mapping, tuple is close + returns headers: dict + """ + if self.style: + return self._serialize_simple(cast_in_data) + # self.content will be length one + for content_type, schema in self.content.items(): + cast_in_data = schema(in_data) + cast_in_data = self._json_encoder.default(cast_in_data) + if content_type == self._json_content_type: + tuple_data = self._serialize_json(cast_in_data) + return self.__to_headers(tuple_data) + raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type)) + + +class Encoding: + def __init__( + self, + content_type: str, + headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None, + style: typing.Optional[ParameterStyle] = None, + explode: bool = False, + allow_reserved: bool = False, + ): + self.content_type = content_type + self.headers = headers + self.style = style + self.explode = explode + self.allow_reserved = allow_reserved + + +class MediaType: + """ + Used to store request and response body schema information + encoding: + A map between a property name and its encoding information. + The key, being the property name, MUST exist in the schema as a property. + The encoding object SHALL only apply to requestBody objects when the media type is + multipart or application/x-www-form-urlencoded. + """ + + def __init__( + self, + schema: typing.Type[Schema], + encoding: typing.Optional[typing.Dict[str, Encoding]] = None, + ): + self.schema = schema + self.encoding = encoding + + +@dataclass +class ApiResponse: + response: urllib3.HTTPResponse + body: typing.Union[Unset, typing.Type[Schema]] + headers: typing.Union[Unset, typing.List[HeaderParameter]] + + def __init__( + self, + response: urllib3.HTTPResponse, + body: typing.Union[Unset, typing.Type[Schema]], + headers: typing.Union[Unset, typing.List[HeaderParameter]] + ): + """ + pycharm needs this to prevent 'Unexpected argument' warnings + """ + self.response = response + self.body = body + self.headers = headers + + +@dataclass +class ApiResponseWithoutDeserialization(ApiResponse): + response: urllib3.HTTPResponse + body: typing.Union[Unset, typing.Type[Schema]] = unset + headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset + + +class OpenApiResponse: + def __init__( + self, + response_cls: typing.Type[ApiResponse] = ApiResponse, + content: typing.Optional[typing.Dict[str, MediaType]] = None, + headers: typing.Optional[typing.List[HeaderParameter]] = None, + ): + self.headers = headers + if content is not None and len(content) == 0: + raise ValueError('Invalid value for content, the content dict must have >= 1 entry') + self.content = content + self.response_cls = response_cls + + @staticmethod + def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any: + decoded_data = response.data.decode("utf-8") + return json.loads(decoded_data) + + @staticmethod + def __file_name_from_content_disposition(content_disposition: typing.Optional[str]) -> typing.Optional[str]: + if content_disposition is None: + return None + match = re.search('filename="(.+?)"', content_disposition) + if not match: + return None + return match.group(1) + + def __deserialize_application_octet_stream( + self, response: urllib3.HTTPResponse + ) -> typing.Union[bytes, io.BufferedReader]: + """ + urllib3 use cases: + 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned + 2. when preload_content=False (stream=True) then supports_chunked_reads is True and + a file will be written and returned + """ + if response.supports_chunked_reads(): + file_name = self.__file_name_from_content_disposition(response.headers.get('content-disposition')) + + if file_name is None: + _fd, path = tempfile.mkstemp() + else: + path = os.path.join(tempfile.gettempdir(), file_name) + # TODO get file_name from the filename at the end of the url if it exists + with open(path, 'wb') as new_file: + chunk_size = 1024 + while True: + data = response.read(chunk_size) + if not data: + break + new_file.write(data) + # release_conn is needed for streaming connections only + response.release_conn() + new_file = open(path, 'rb') + return new_file + else: + return response.data + + def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse: + content_type = response.getheader('content-type') + deserialized_body = unset + streamed = response.supports_chunked_reads() + if self.content is not None: + if content_type == 'application/json': + body_data = self.__deserialize_json(response) + elif content_type == 'application/octet-stream': + body_data = self.__deserialize_application_octet_stream(response) + else: + raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type)) + body_schema = self.content[content_type].schema + _instantiation_metadata = InstantiationMetadata(from_server=True, configuration=configuration) + deserialized_body = body_schema._from_openapi_data( + body_data, _instantiation_metadata=_instantiation_metadata) + elif streamed: + response.release_conn() + + deserialized_headers = unset + if self.headers is not None: + deserialized_headers = unset + + return self.response_cls( + response=response, + headers=deserialized_headers, + body=deserialized_body + ) + + +class ApiClient: + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + _pool = None + __json_encoder = JSONEncoder() + + def __init__( + self, + configuration: typing.Optional[Configuration] = None, + header_name: typing.Optional[str] = None, + header_value: typing.Optional[str] = None, + cookie: typing.Optional[str] = None, + pool_threads: int = 1 + ): + if configuration is None: + configuration = Configuration() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = '{{#if httpUserAgent}}{{{httpUserAgent}}}{{/if}}{{#unless httpUserAgent}}OpenAPI-Generator/{{{packageVersion}}}/python{{/unless}}' + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if hasattr(atexit, 'unregister'): + atexit.unregister(self.close) + + @property + def pool(self): + """Create thread pool on first request + avoids instantiating unused threadpool for blocking clients. + """ + if self._pool is None: + atexit.register(self.close) + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + {{#if tornado}} + @tornado.gen.coroutine + {{/if}} + {{#if asyncio}}async {{/if}}def __call_api( + self, + resource_path: str, + method: str, + path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + headers: typing.Optional[HTTPHeaderDict] = None, + body: typing.Optional[typing.Union[str, bytes]] = None, + fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + auth_settings: typing.Optional[typing.List[str]] = None, + stream: bool = False, + timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, + host: typing.Optional[str] = None, + ) -> urllib3.HTTPResponse: + + # header parameters + headers = headers or {} + headers.update(self.default_headers) + if self.cookie: + headers['Cookie'] = self.cookie + + # path parameters + if path_params: + for k, v in path_params.items(): + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=self.configuration.safe_chars_for_path_param) + ) + + # auth setting + self.update_params_for_auth(headers, query_params, + auth_settings, resource_path, method, body) + + # request url + if host is None: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = host + resource_path + + # perform request and return response + response = {{#if asyncio}}await {{/if}}{{#if tornado}}yield {{/if}}self.request( + method, + url, + query_params=query_params, + headers=headers, + fields=fields, + body=body, + stream=stream, + timeout=timeout, + ) + return response + + def call_api( + self, + resource_path: str, + method: str, + path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + headers: typing.Optional[HTTPHeaderDict] = None, + body: typing.Optional[typing.Union[str, bytes]] = None, + fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + auth_settings: typing.Optional[typing.List[str]] = None, + async_req: typing.Optional[bool] = None, + stream: bool = False, + timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, + host: typing.Optional[str] = None, + ) -> urllib3.HTTPResponse: + """Makes the HTTP request (synchronous) and returns deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param headers: Header parameters to be + placed in the request header. + :param body: Request body. + :param fields: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings: Auth Settings names for the request. + :param async_req: execute request asynchronously + :type async_req: bool, optional TODO remove, unused + :param stream: if True, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Also when True, if the openapi spec describes a file download, + the data will be written to a local filesystme file and the BinarySchema + instance will also inherit from FileSchema and FileIO + Default is False. + :type stream: bool, optional + :param timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param host: api endpoint host + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + + if not async_req: + return self.__call_api( + resource_path, + method, + path_params, + query_params, + headers, + body, + fields, + auth_settings, + stream, + timeout, + host, + ) + + return self.pool.apply_async( + self.__call_api, + ( + resource_path, + method, + path_params, + query_params, + headers, + body, + json, + fields, + auth_settings, + stream, + timeout, + host, + ) + ) + + def request( + self, + method: str, + url: str, + query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + headers: typing.Optional[HTTPHeaderDict] = None, + fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + body: typing.Optional[typing.Union[str, bytes]] = None, + stream: bool = False, + timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, + ) -> urllib3.HTTPResponse: + """Makes the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + stream=stream, + timeout=timeout, + headers=headers) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + stream=stream, + timeout=timeout, + headers=headers) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + fields=fields, + stream=stream, + timeout=timeout, + body=body) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + fields=fields, + stream=stream, + timeout=timeout, + body=body) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + fields=fields, + stream=stream, + timeout=timeout, + body=body) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + fields=fields, + stream=stream, + timeout=timeout, + body=body) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + stream=stream, + timeout=timeout, + body=body) + else: + raise ApiValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def update_params_for_auth(self, headers, querys, auth_settings, + resource_path, method, body): + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + :param resource_path: A string representation of the HTTP request resource path. + :param method: A string representation of the HTTP request method. + :param body: A object representing the body of the HTTP request. + The object type is the return value of _encoder.default(). + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if auth_setting['in'] == 'cookie': + headers.add('Cookie', auth_setting['value']) + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers.add(auth_setting['key'], auth_setting['value']) +{{#if hasHttpSignatureMethods}} + else: + # The HTTP signature scheme requires multiple HTTP headers + # that are calculated dynamically. + signing_info = self.configuration.signing_info + auth_headers = signing_info.get_http_signature_headers( + resource_path, method, headers, body, querys) + for key, value in auth_headers.items(): + headers.add(key, value) +{{/if}} + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + +class Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client: typing.Optional[ApiClient] = None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + @staticmethod + def _verify_typed_dict_inputs(cls: typing.Type[typing.TypedDict], data: typing.Dict[str, typing.Any]): + """ + Ensures that: + - required keys are present + - additional properties are not input + - value stored under required keys do not have the value unset + Note: detailed value checking is done in schema classes + """ + missing_required_keys = [] + required_keys_with_unset_values = [] + for required_key in cls.__required_keys__: + if required_key not in data: + missing_required_keys.append(required_key) + continue + value = data[required_key] + if value is unset: + required_keys_with_unset_values.append(required_key) + if missing_required_keys: + raise ApiTypeError( + '{} missing {} required arguments: {}'.format( + cls.__name__, len(missing_required_keys), missing_required_keys + ) + ) + if required_keys_with_unset_values: + raise ApiValueError( + '{} contains invalid unset values for {} required keys: {}'.format( + cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values + ) + ) + + disallowed_additional_keys = [] + for key in data: + if key in cls.__required_keys__ or key in cls.__optional_keys__: + continue + disallowed_additional_keys.append(key) + if disallowed_additional_keys: + raise ApiTypeError( + '{} got {} unexpected keyword arguments: {}'.format( + cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys + ) + ) + + def get_host( + self, + operation_id: str, + servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(), + host_index: typing.Optional[int] = None + ) -> typing.Optional[str]: + configuration = self.api_client.configuration + try: + if host_index is None: + index = configuration.server_operation_index.get( + operation_id, configuration.server_index + ) + else: + index = host_index + server_variables = configuration.server_operation_variables.get( + operation_id, configuration.server_variables + ) + host = configuration.get_host_from_settings( + index, variables=server_variables, servers=servers + ) + except IndexError: + if servers: + raise ApiValueError( + "Invalid host index. Must be 0 <= index < %s" % + len(servers) + ) + host = None + return host + + +class SerializedRequestBody(typing.TypedDict, total=False): + body: typing.Union[str, bytes] + fields: typing.Tuple[typing.Union[RequestField, tuple[str, str]], ...] + + +class RequestBody(StyleFormSerializer): + """ + A request body parameter + content: content_type to MediaType Schema info + """ + __json_encoder = JSONEncoder() + + def __init__( + self, + content: typing.Dict[str, MediaType], + required: bool = False, + ): + self.required = required + if len(content) == 0: + raise ValueError('Invalid value for content, the content dict must have >= 1 entry') + self.content = content + + def __serialize_json( + self, + in_data: typing.Any + ) -> typing.Dict[str, bytes]: + in_data = self.__json_encoder.default(in_data) + json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode( + "utf-8" + ) + return dict(body=json_str) + + @staticmethod + def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]: + if isinstance(in_data, frozendict): + raise ValueError('Unable to serialize type frozendict to text/plain') + elif isinstance(in_data, tuple): + raise ValueError('Unable to serialize type tuple to text/plain') + elif isinstance(in_data, NoneClass): + raise ValueError('Unable to serialize type NoneClass to text/plain') + elif isinstance(in_data, BoolClass): + raise ValueError('Unable to serialize type BoolClass to text/plain') + return dict(body=str(in_data)) + + def __multipart_json_item(self, key: str, value: Schema) -> RequestField: + json_value = self.__json_encoder.default(value) + return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'}) + + def __multipart_form_item(self, key: str, value: Schema) -> RequestField: + if isinstance(value, str): + return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'}) + elif isinstance(value, bytes): + return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'}) + elif isinstance(value, FileIO): + request_field = RequestField( + name=key, + data=value.read(), + filename=os.path.basename(value.name), + headers={'Content-Type': 'application/octet-stream'} + ) + value.close() + return request_field + else: + return self.__multipart_json_item(key=key, value=value) + + def __serialize_multipart_form_data( + self, in_data: Schema + ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]: + if not isinstance(in_data, frozendict): + raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data') + """ + In a multipart/form-data request body, each schema property, or each element of a schema array property, + takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy + for each property of a multipart/form-data request body can be specified in an associated Encoding Object. + + When passing in multipart types, boundaries MAY be used to separate sections of the content being + transferred – thus, the following default Content-Types are defined for multipart: + + If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain + If the property is complex, or an array of complex values, the default Content-Type is application/json + Question: how is the array of primitives encoded? + If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream + """ + fields = [] + for key, value in in_data.items(): + if isinstance(value, tuple): + if value: + # values use explode = True, so the code makes a RequestField for each item with name=key + for item in value: + request_field = self.__multipart_form_item(key=key, value=item) + fields.append(request_field) + else: + # send an empty array as json because exploding will not send it + request_field = self.__multipart_json_item(key=key, value=value) + fields.append(request_field) + else: + request_field = self.__multipart_form_item(key=key, value=value) + fields.append(request_field) + + return dict(fields=tuple(fields)) + + def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]: + if isinstance(in_data, bytes): + return dict(body=in_data) + # FileIO type + result = dict(body=in_data.read()) + in_data.close() + return result + + def __serialize_application_x_www_form_data( + self, in_data: typing.Any + ) -> typing.Dict[str, tuple[tuple[str, str], ...]]: + if not isinstance(in_data, frozendict): + raise ValueError( + f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data') + cast_in_data = self.__json_encoder.default(in_data) + fields = self._serialize_form(cast_in_data, explode=True, name='') + if not fields: + return {} + return {'fields': fields} + + def serialize( + self, in_data: typing.Any, content_type: str + ) -> SerializedRequestBody: + """ + If a str is returned then the result will be assigned to data when making the request + If a tuple is returned then the result will be used as fields input in encode_multipart_formdata + Return a tuple of + + The key of the return dict is + - body for application/json + - encode_multipart and fields for multipart/form-data + """ + media_type = self.content[content_type] + if isinstance(in_data, media_type.schema): + cast_in_data = in_data + elif isinstance(in_data, (dict, frozendict)) and in_data: + cast_in_data = media_type.schema(**in_data) + else: + cast_in_data = media_type.schema(in_data) + # TODO check for and use encoding if it exists + # and content_type is multipart or application/x-www-form-urlencoded + if content_type == 'application/json': + return self.__serialize_json(cast_in_data) + elif content_type == 'text/plain': + return self.__serialize_text_plain(cast_in_data) + elif content_type == 'multipart/form-data': + return self.__serialize_multipart_form_data(cast_in_data) + elif content_type == 'application/x-www-form-urlencoded': + return self.__serialize_application_x_www_form_data(cast_in_data) + elif content_type == 'application/octet-stream': + return self.__serialize_application_octet_stream(cast_in_data) + raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type)) diff --git a/openapi/python_explerimental_client_template/api_doc.handlebars b/openapi/python_explerimental_client_template/api_doc.handlebars new file mode 100644 index 00000000..6e74b8ab --- /dev/null +++ b/openapi/python_explerimental_client_template/api_doc.handlebars @@ -0,0 +1,212 @@ +# {{packageName}}.{{classname}}{{#if description}} +{{description}}{{/if}} + +All URIs are relative to *{{basePath}}* + +Method | HTTP request | Description +------------- | ------------- | ------------- +{{#with operations}}{{#each operation}}[**{{operationId}}**]({{classname}}.md#{{operationId}}) | **{{httpMethod}}** {{path}} | {{#if summary}}{{summary}}{{/if}} +{{/each}}{{/with}} + +{{#with operations}} +{{#each operation}} +# **{{{operationId}}}** +> {{#if returnType}}{{{returnType}}} {{/if}}{{{operationId}}}({{#each requiredParams}}{{#unless defaultValue}}{{paramName}}{{#if hasMore}}, {{/if}}{{/unless}}{{/each}}) + +{{{summary}}}{{#if notes}} + +{{{notes}}}{{/if}} + +### Example + +{{#if hasAuthMethods}} +{{#each authMethods}} +{{#if isBasic}} +{{#if isBasicBasic}} +* Basic Authentication ({{name}}): +{{/if}} +{{#if isBasicBearer}} +* Bearer{{#if bearerFormat}} ({{{bearerFormat}}}){{/if}} Authentication ({{name}}): +{{/if}} +{{/if}} +{{#if isApiKey}} +* Api Key Authentication ({{name}}): +{{/if}} +{{#if isOAuth}} +* OAuth Authentication ({{name}}): +{{/if}} +{{/each}} +{{/if}} +{{> api_doc_example }} +### Parameters +{{#if allParams}} + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#with bodyParam}} +{{baseName}} | typing.Union[{{#each content}}{{#unless @first}}, {{/unless}}{{this.schema.baseName}}{{/each}}{{#unless required}}, Unset]{{else}}]{{/unless}} | {{#if required}}required{{else}}optional, default is unset{{/if}} | + {{/with}} + {{#if queryParams}} +query_params | RequestQueryParams | | + {{/if}} + {{#if headerParams}} +header_params | RequestHeaderParams | | + {{/if}} + {{#if pathParams}} +path_params | RequestPathParams | | + {{/if}} + {{#if cookieParams}} +cookie_params | RequestCookieParams | | + {{/if}} + {{#with bodyParam}} + {{#each content}} + {{#if @first}} +content_type | str | optional, default is '{{@key}}' | Selects the schema and serialization of the request body + {{/if}} + {{/each}} + {{/with}} + {{#if produces}} +accept_content_types | typing.Tuple[str] | default is ({{#each produces}}'{{this.mediaType}}', {{/each}}) | Tells the server the content type(s) that are accepted by the client + {{/if}} + {{#if servers}} +host_index | typing.Optional[int] | default is None | Allows one to select a different host + {{/if}} +stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file +timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client +skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned + {{#with bodyParam}} + +### body + {{#each content}} + {{#with this.schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + {{/with}} + {{#if queryParams}} + +### query_params +#### RequestQueryParams + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#each queryParams}} +{{baseName}} | {{#with schema}}{{baseName}}{{/with}} | | {{#unless required}}optional{{/unless}} + {{/each}} + + {{#each queryParams}} + {{#with schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + {{/if}} + {{#if headerParams}} + +### header_params +#### RequestHeaderParams + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#each headerParams}} +{{baseName}} | {{#with schema}}{{baseName}}{{/with}} | | {{#unless required}}optional{{/unless}} + {{/each}} + {{#each headerParams}} + {{#with schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + {{/if}} + {{#if pathParams}} + +### path_params +#### RequestPathParams + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#each pathParams}} +{{baseName}} | {{#with schema}}{{baseName}}{{/with}} | | {{#unless required}}optional{{/unless}} + {{/each}} + {{#each pathParams}} + {{#with schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + {{/if}} + {{#if cookieParams}} + +### cookie_params +#### RequestCookieParams + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#each cookieParams}} +{{baseName}} | {{#with schema}}{{baseName}}{{/with}} | | {{#unless required}}optional{{/unless}} + {{/each}} + {{#each cookieParams}} + {{#with schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + {{/if}} +{{else}} +This endpoint does not need any parameter. +{{/if}} + +### Return Types, Responses + +Code | Class | Description +------------- | ------------- | ------------- +n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned +{{#each responses}} +{{#if isDefault}} +default | ApiResponseForDefault | {{message}} {{description}} +{{else}} +{{code}} | ApiResponseFor{{code}} | {{message}} {{description}} +{{/if}} +{{/each}} +{{#each responses}} +{{#if isDefault}} + +#### ApiResponseForDefault +{{else}} + +#### ApiResponseFor{{code}} +{{/if}} +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- +response | urllib3.HTTPResponse | Raw response | +body | {{#unless content}}Unset{{else}}typing.Union[{{#each content}}{{this.schema.baseName}}, {{/each}}]{{/unless}} | {{#unless content}}body was not defined{{/unless}} | +headers | {{#unless responseHeaders}}Unset{{else}}ResponseHeadersFor{{code}}{{/unless}} | {{#unless responseHeaders}}headers were not defined{{/unless}} | +{{#each content}} +{{#with this.schema}} +{{> api_doc_schema_type_hint }} +{{/with}} +{{/each}} +{{#if responseHeaders}} +#### ResponseHeadersFor{{code}} + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + {{#each responseHeaders}} +{{baseName}} | {{#with schema}}{{baseName}}{{/with}} | | {{#unless required}}optional{{/unless}} + {{/each}} + {{#each responseHeaders}} + {{#with schema}} +{{> api_doc_schema_type_hint }} + {{/with}} + {{/each}} + +{{/if}} +{{/each}} + + +{{#if returnType}}{{#if returnTypeIsPrimitive}}**{{{returnType}}}**{{/if}}{{#unless returnTypeIsPrimitive}}[**{{{returnType}}}**]({{returnBaseType}}.md){{/unless}}{{/if}}{{#unless returnType}}void (empty response body){{/unless}} + +### Authorization + +{{#unless authMethods}}No authorization required{{/unless}}{{#each authMethods}}[{{{name}}}](../README.md#{{{name}}}){{#unless @last}}, {{/unless}}{{/each}} + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +{{/each}} +{{/with}} diff --git a/openapi/python_explerimental_client_template/api_doc_example.handlebars b/openapi/python_explerimental_client_template/api_doc_example.handlebars new file mode 100644 index 00000000..e6fa0bd4 --- /dev/null +++ b/openapi/python_explerimental_client_template/api_doc_example.handlebars @@ -0,0 +1,163 @@ +```python +import {{{packageName}}} +from {{packageName}}.{{apiPackage}} import {{classFilename}} +{{#each imports}} +{{{.}}} +{{/each}} +from pprint import pprint +{{> doc_auth_partial}} +# Enter a context with an instance of the API client +with {{{packageName}}}.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = {{classFilename}}.{{{classname}}}(api_client) +{{#if requiredParams}} + + # example passing only required values which don't have defaults set +{{#if pathParams}} + path_params = { + {{#each pathParams}} + {{#if required}} + '{{baseName}}': {{{example}}}, + {{/if}} + {{/each}} + } +{{/if}} +{{#if queryParams}} + query_params = { + {{#each queryParams}} + {{#if required}} + '{{baseName}}': {{{example}}}, + {{/if}} + {{/each}} + } +{{/if}} +{{#if cookieParams}} + cookie_params = { + {{#each cookieParams}} + {{#if required}} + '{{baseName}}': {{{example}}}, + {{/if}} + {{/each}} + } +{{/if}} +{{#if headerParams}} + header_params = { + {{#each headerParams}} + {{#if required}} + '{{baseName}}': {{{example}}}, + {{/if}} + {{/each}} + } +{{/if}} +{{#with bodyParam}} + {{#if required}} + body = {{{example}}} + {{/if}} +{{/with}} + try: +{{#if summary}} + # {{{summary}}} +{{/if}} + api_response = api_instance.{{{operationId}}}( + {{#if pathParams}} + path_params=path_params, + {{/if}} + {{#if queryParams}} + query_params=query_params, + {{/if}} + {{#if headerParams}} + header_params=header_params, + {{/if}} + {{#if cookieParams}} + cookie_params=cookie_params, + {{/if}} + {{#with bodyParam}} + {{#if required}} + body=body, + {{/if}} + {{/with}} + ) +{{#if returnType}} + pprint(api_response) +{{/if}} + except {{{packageName}}}.ApiException as e: + print("Exception when calling {{classname}}->{{operationId}}: %s\n" % e) +{{/if}} +{{#if optionalParams}} + + # example passing only optional values +{{#if pathParams}} + path_params = { + {{#each pathParams}} + '{{baseName}}': {{{example}}}, + {{/each}} + } +{{/if}} +{{#if queryParams}} + query_params = { + {{#each queryParams}} + '{{baseName}}': {{{example}}}, + {{/each}} + } +{{/if}} +{{#if cookieParams}} + cookie_params = { + {{#each cookieParams}} + '{{baseName}}': {{{example}}}, + {{/each}} + } +{{/if}} +{{#if headerParams}} + header_params = { + {{#each headerParams}} + '{{baseName}}': {{{example}}}, + {{/each}} + } +{{/if}} +{{#with bodyParam}} + body = {{{example}}} +{{/with}} + try: +{{#if summary}} + # {{{summary}}} +{{/if}} + api_response = api_instance.{{{operationId}}}( + {{#if pathParams}} + path_params=path_params, + {{/if}} + {{#if queryParams}} + query_params=query_params, + {{/if}} + {{#if headerParams}} + header_params=header_params, + {{/if}} + {{#if cookieParams}} + cookie_params=cookie_params, + {{/if}} + {{#if bodyParam}} + body=body, + {{/if}} + ) +{{#if returnType}} + pprint(api_response) +{{/if}} + except {{{packageName}}}.ApiException as e: + print("Exception when calling {{classname}}->{{operationId}}: %s\n" % e) +{{/if}} +{{#unless requiredParams}} +{{#unless optionalParams}} + + # example, this endpoint has no required or optional parameters + try: +{{#if summary}} + # {{{summary}}} +{{/if}} + api_response = api_instance.{{{operationId}}}() +{{#if returnType}} + pprint(api_response) +{{/if}} + except {{{packageName}}}.ApiException as e: + print("Exception when calling {{classname}}->{{operationId}}: %s\n" % e) +{{/unless}} +{{/unless}} +``` diff --git a/openapi/python_explerimental_client_template/api_doc_schema_type_hint.handlebars b/openapi/python_explerimental_client_template/api_doc_schema_type_hint.handlebars new file mode 100644 index 00000000..0698320a --- /dev/null +++ b/openapi/python_explerimental_client_template/api_doc_schema_type_hint.handlebars @@ -0,0 +1,10 @@ + +#### {{baseName}} +{{#if complexType}} +Type | Description | Notes +------------- | ------------- | ------------- +[**{{dataType}}**]({{complexType}}.md) | {{description}} | {{#if isReadOnly}}[readonly] {{/if}} + +{{else}} +{{> schema_doc }} +{{/if}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/api_test.handlebars b/openapi/python_explerimental_client_template/api_test.handlebars new file mode 100644 index 00000000..2f52783c --- /dev/null +++ b/openapi/python_explerimental_client_template/api_test.handlebars @@ -0,0 +1,34 @@ +# coding: utf-8 + +{{>partial_header}} + +import unittest + +import {{packageName}} +from {{packageName}}.{{apiPackage}}.{{classFilename}} import {{classname}} # noqa: E501 + + +class {{#with operations}}Test{{classname}}(unittest.TestCase): + """{{classname}} unit test stubs""" + + def setUp(self): + self.api = {{classname}}() # noqa: E501 + + def tearDown(self): + pass + + {{#each operation}} + def test_{{operationId}}(self): + """Test case for {{{operationId}}} + +{{#if summary}} + {{{summary}}} # noqa: E501 +{{/if}} + """ + pass + + {{/each}} +{{/with}} + +if __name__ == '__main__': + unittest.main() diff --git a/openapi/python_explerimental_client_template/configuration.handlebars b/openapi/python_explerimental_client_template/configuration.handlebars new file mode 100644 index 00000000..c0a0dc7d --- /dev/null +++ b/openapi/python_explerimental_client_template/configuration.handlebars @@ -0,0 +1,636 @@ +# coding: utf-8 + +{{>partial_header}} + +import copy +import logging +{{#unless asyncio}} +import multiprocessing +{{/unless}} +import sys +import urllib3 + +from http import client as http_client +from {{packageName}}.exceptions import ApiValueError + + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', + 'minimum', 'exclusiveMinimum', 'maxLength', + 'minLength', 'pattern', 'maxItems', 'minItems', + 'uniqueItems', 'maxProperties', 'minProperties', +} + +class Configuration(object): + """NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + :param disabled_client_side_validations (string): Comma-separated list of + JSON schema validation keywords to disable JSON schema structural validation + rules. The following keywords may be specified: multipleOf, maximum, + exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, + maxItems, minItems. + By default, the validation is performed for data generated locally by the client + and data received from the server, independent of any validation performed by + the server side. If the input data does not satisfy the JSON schema validation + rules specified in the OpenAPI document, an exception is raised. + If disabled_client_side_validations is set, structural validation is + disabled. This can be useful to troubleshoot data validation problem, such as + when the OpenAPI document validation rules do not match the actual API data + received by the server. +{{#if hasHttpSignatureMethods}} + :param signing_info: Configuration parameters for the HTTP signature security scheme. + Must be an instance of {{{packageName}}}.signing.HttpSigningConfiguration +{{/if}} + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum values before. + +{{#if hasAuthMethods}} + :Example: +{{#if hasApiKeyMethods}} + + API Key Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + cookieAuth: # name for the security scheme + type: apiKey + in: cookie + name: JSESSIONID # cookie name + + You can programmatically set the cookie: + +conf = {{{packageName}}}.Configuration( + api_key={'cookieAuth': 'abc123'} + api_key_prefix={'cookieAuth': 'JSESSIONID'} +) + + The following cookie will be added to the HTTP request: + Cookie: JSESSIONID abc123 +{{/if}} +{{#if hasHttpBasicMethods}} + + HTTP Basic Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + http_basic_auth: + type: http + scheme: basic + + Configure API client with HTTP basic authentication: + +conf = {{{packageName}}}.Configuration( + username='the-user', + password='the-password', +) + +{{/if}} +{{#if hasHttpSignatureMethods}} + + HTTP Signature Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + http_basic_auth: + type: http + scheme: signature + + Configure API client with HTTP signature authentication. Use the 'hs2019' signature scheme, + sign the HTTP requests with the RSA-SSA-PSS signature algorithm, and set the expiration time + of the signature to 5 minutes after the signature has been created. + Note you can use the constants defined in the {{{packageName}}}.signing module, and you can + also specify arbitrary HTTP headers to be included in the HTTP signature, except for the + 'Authorization' header, which is used to carry the signature. + + One may be tempted to sign all headers by default, but in practice it rarely works. + This is beccause explicit proxies, transparent proxies, TLS termination endpoints or + load balancers may add/modify/remove headers. Include the HTTP headers that you know + are not going to be modified in transit. + +conf = {{{packageName}}}.Configuration( + signing_info = {{{packageName}}}.signing.HttpSigningConfiguration( + key_id = 'my-key-id', + private_key_path = 'rsa.pem', + signing_scheme = {{{packageName}}}.signing.SCHEME_HS2019, + signing_algorithm = {{{packageName}}}.signing.ALGORITHM_RSASSA_PSS, + signed_headers = [{{{packageName}}}.signing.HEADER_REQUEST_TARGET, + {{{packageName}}}.signing.HEADER_CREATED, + {{{packageName}}}.signing.HEADER_EXPIRES, + {{{packageName}}}.signing.HEADER_HOST, + {{{packageName}}}.signing.HEADER_DATE, + {{{packageName}}}.signing.HEADER_DIGEST, + 'Content-Type', + 'User-Agent' + ], + signature_max_validity = datetime.timedelta(minutes=5) + ) +) +{{/if}} +{{/if}} + """ + + _default = None + + def __init__(self, host=None, + api_key=None, api_key_prefix=None, + username=None, password=None, + discard_unknown_keys=False, + disabled_client_side_validations="", +{{#if hasHttpSignatureMethods}} + signing_info=None, +{{/if}} + server_index=None, server_variables=None, + server_operation_index=None, server_operation_variables=None, + ): + """Constructor + """ + self._base_path = "{{{basePath}}}" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.discard_unknown_keys = discard_unknown_keys + self.disabled_client_side_validations = disabled_client_side_validations +{{#if hasHttpSignatureMethods}} + if signing_info is not None: + signing_info.host = host + self.signing_info = signing_info + """The HTTP signing configuration + """ +{{/if}} +{{#if hasOAuthMethods}} + self.access_token = None + """access token for OAuth/Bearer + """ +{{/if}} +{{#unless hasOAuthMethods}} +{{#if hasBearerMethods}} + self.access_token = None + """access token for OAuth/Bearer + """ +{{/if}} +{{/unless}} + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("{{packageName}}") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = None + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + {{#if asyncio}} + self.connection_pool_maxsize = 100 + """This value is passed to the aiohttp to limit simultaneous connections. + Default values is 100, None means no-limit. + """ + {{/if}} + {{#unless asyncio}} + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + {{/unless}} + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + # Options to pass down to the underlying urllib3 socket + self.socket_options = None + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + if name == 'disabled_client_side_validations': + s = set(filter(None, value.split(','))) + for v in s: + if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: + raise ApiValueError( + "Invalid keyword: '{0}''".format(v)) + self._disabled_client_side_validations = s +{{#if hasHttpSignatureMethods}} + if name == "signing_info" and value is not None: + # Ensure the host paramater from signing info is the same as + # Configuration.host. + value.host = self.host +{{/if}} + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on http_client debug + http_client.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off http_client debug + http_client.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} +{{#each authMethods}} +{{#if isApiKey}} + if '{{name}}' in self.api_key{{#each vendorExtensions.x-auth-id-alias}} or '{{.}}' in self.api_key{{/each}}: + auth['{{name}}'] = { + 'type': 'api_key', + 'in': {{#if isKeyInCookie}}'cookie'{{/if}}{{#if isKeyInHeader}}'header'{{/if}}{{#if isKeyInQuery}}'query'{{/if}}, + 'key': '{{keyParamName}}', + 'value': self.get_api_key_with_prefix( + '{{name}}',{{#each vendorExtensions.x-auth-id-alias}} + alias='{{.}}',{{/each}} + ), + } +{{/if}} +{{#if isBasic}} + {{#if isBasicBasic}} + if self.username is not None and self.password is not None: + auth['{{name}}'] = { + 'type': 'basic', + 'in': 'header', + 'key': 'Authorization', + 'value': self.get_basic_auth_token() + } + {{/if}} + {{#if isBasicBearer}} + if self.access_token is not None: + auth['{{name}}'] = { + 'type': 'bearer', + 'in': 'header', + {{#if bearerFormat}} + 'format': '{{{bearerFormat}}}', + {{/if}} + 'key': 'Authorization', + 'value': 'Bearer ' + self.access_token + } + {{/if}} + {{#if isHttpSignature}} + if self.signing_info is not None: + auth['{{name}}'] = { + 'type': 'http-signature', + 'in': 'header', + 'key': 'Authorization', + 'value': None # Signature headers are calculated for every HTTP request + } + {{/if}} +{{/if}} +{{#if isOAuth}} + if self.access_token is not None: + auth['{{name}}'] = { + 'type': 'oauth2', + 'in': 'header', + 'key': 'Authorization', + 'value': 'Bearer ' + self.access_token + } +{{/if}} +{{/each}} + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: {{version}}\n"\ + "SDK Package Version: {{packageVersion}}".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + {{#each servers}} + { + 'url': "{{{url}}}", + 'description': "{{{description}}}{{#unless description}}No description provided{{/unless}}", + {{#each variables}} + {{#if @first}} + 'variables': { + {{/if}} + '{{{name}}}': { + 'description': "{{{description}}}{{#unless description}}No description provided{{/unless}}", + 'default_value': "{{{defaultValue}}}", + {{#each enumValues}} + {{#if @first}} + 'enum_values': [ + {{/if}} + "{{{.}}}"{{#unless @last}},{{/unless}} + {{#if @last}} + ] + {{/if}} + {{/each}} + }{{#unless @last}},{{/unless}} + {{#if @last}} + } + {{/if}} + {{/each}} + }{{#unless @last}},{{/unless}} + {{/each}} + ] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server.get('variables', {}).items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/openapi/python_explerimental_client_template/doc_auth_partial.handlebars b/openapi/python_explerimental_client_template/doc_auth_partial.handlebars new file mode 100644 index 00000000..b16451d8 --- /dev/null +++ b/openapi/python_explerimental_client_template/doc_auth_partial.handlebars @@ -0,0 +1,109 @@ +# Defining the host is optional and defaults to {{{basePath}}} +# See configuration.py for a list of all supported configuration parameters. +configuration = {{{packageName}}}.Configuration( + host = "{{{basePath}}}" +) + +{{#if hasAuthMethods}} +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. +{{#each authMethods}} +{{#if isBasic}} +{{#if isBasicBasic}} + +# Configure HTTP basic authorization: {{{name}}} +configuration = {{{packageName}}}.Configuration( + username = 'YOUR_USERNAME', + password = 'YOUR_PASSWORD' +) +{{/if}} +{{#if isBasicBearer}} + +# Configure Bearer authorization{{#if bearerFormat}} ({{{bearerFormat}}}){{/if}}: {{{name}}} +configuration = {{{packageName}}}.Configuration( + access_token = 'YOUR_BEARER_TOKEN' +) +{{/if}} +{{#if isHttpSignature}} + +# Configure HTTP message signature: {{{name}}} +# The HTTP Signature Header mechanism that can be used by a client to +# authenticate the sender of a message and ensure that particular headers +# have not been modified in transit. +# +# You can specify the signing key-id, private key path, signing scheme, +# signing algorithm, list of signed headers and signature max validity. +# The 'key_id' parameter is an opaque string that the API server can use +# to lookup the client and validate the signature. +# The 'private_key_path' parameter should be the path to a file that +# contains a DER or base-64 encoded private key. +# The 'private_key_passphrase' parameter is optional. Set the passphrase +# if the private key is encrypted. +# The 'signed_headers' parameter is used to specify the list of +# HTTP headers included when generating the signature for the message. +# You can specify HTTP headers that you want to protect with a cryptographic +# signature. Note that proxies may add, modify or remove HTTP headers +# for legitimate reasons, so you should only add headers that you know +# will not be modified. For example, if you want to protect the HTTP request +# body, you can specify the Digest header. In that case, the client calculates +# the digest of the HTTP request body and includes the digest in the message +# signature. +# The 'signature_max_validity' parameter is optional. It is configured as a +# duration to express when the signature ceases to be valid. The client calculates +# the expiration date every time it generates the cryptographic signature +# of an HTTP request. The API server may have its own security policy +# that controls the maximum validity of the signature. The client max validity +# must be lower than the server max validity. +# The time on the client and server must be synchronized, otherwise the +# server may reject the client signature. +# +# The client must use a combination of private key, signing scheme, +# signing algorithm and hash algorithm that matches the security policy of +# the API server. +# +# See {{{packageName}}}.signing for a list of all supported parameters. +configuration = {{{packageName}}}.Configuration( + host = "{{{basePath}}}", + signing_info = {{{packageName}}}.signing.HttpSigningConfiguration( + key_id = 'my-key-id', + private_key_path = 'private_key.pem', + private_key_passphrase = 'YOUR_PASSPHRASE', + signing_scheme = {{{packageName}}}.signing.SCHEME_HS2019, + signing_algorithm = {{{packageName}}}.signing.ALGORITHM_ECDSA_MODE_FIPS_186_3, + hash_algorithm = {{{packageName}}}.signing.SCHEME_RSA_SHA256, + signed_headers = [ + {{{packageName}}}.signing.HEADER_REQUEST_TARGET, + {{{packageName}}}.signing.HEADER_CREATED, + {{{packageName}}}.signing.HEADER_EXPIRES, + {{{packageName}}}.signing.HEADER_HOST, + {{{packageName}}}.signing.HEADER_DATE, + {{{packageName}}}.signing.HEADER_DIGEST, + 'Content-Type', + 'Content-Length', + 'User-Agent' + ], + signature_max_validity = datetime.timedelta(minutes=5) + ) +) +{{/if}} +{{/if}} +{{#if isApiKey}} + +# Configure API key authorization: {{{name}}} +configuration.api_key['{{{name}}}'] = 'YOUR_API_KEY' + +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['{{name}}'] = 'Bearer' +{{/if}} +{{#if isOAuth}} + +# Configure OAuth2 access token for authorization: {{{name}}} +configuration = {{{packageName}}}.Configuration( + host = "{{{basePath}}}" +) +configuration.access_token = 'YOUR_ACCESS_TOKEN' +{{/if}} +{{/each}} +{{/if}} diff --git a/openapi/python_explerimental_client_template/endpoint.handlebars b/openapi/python_explerimental_client_template/endpoint.handlebars new file mode 100644 index 00000000..f6de978e --- /dev/null +++ b/openapi/python_explerimental_client_template/endpoint.handlebars @@ -0,0 +1,549 @@ +# coding: utf-8 + +{{>partial_header}} + +from dataclasses import dataclass +import re # noqa: F401 +import sys # noqa: F401 +import typing +import urllib3 +{{#with operation}} +{{#or headerParams bodyParam produces}} +from urllib3._collections import HTTPHeaderDict +{{/or}} +{{/with}} + +from {{packageName}} import api_client, exceptions +{{> model_templates/imports_schema_types }} +{{> model_templates/imports_schemas }} + +{{#with operation}} +{{#if queryParams}} +# query params +{{#each queryParams}} +{{#with schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} +RequestRequiredQueryParams = typing.TypedDict( + 'RequestRequiredQueryParams', + { +{{#each queryParams}} +{{#if required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/if}} +{{/each}} + } +) +RequestOptionalQueryParams = typing.TypedDict( + 'RequestOptionalQueryParams', + { +{{#each queryParams}} +{{#unless required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/unless}} +{{/each}} + }, + total=False +) + + +class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams): + pass + + +{{#each queryParams}} +{{> endpoint_parameter }} +{{/each}} +{{/if}} +{{#if headerParams}} +# header params +{{#each headerParams}} +{{#with schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} +RequestRequiredHeaderParams = typing.TypedDict( + 'RequestRequiredHeaderParams', + { +{{#each headerParams}} +{{#if required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/if}} +{{/each}} + } +) +RequestOptionalHeaderParams = typing.TypedDict( + 'RequestOptionalHeaderParams', + { +{{#each headerParams}} +{{#unless required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/unless}} +{{/each}} + }, + total=False +) + + +class RequestHeaderParams(RequestRequiredHeaderParams, RequestOptionalHeaderParams): + pass + + +{{#each headerParams}} +{{> endpoint_parameter }} +{{/each}} +{{/if}} +{{#if pathParams}} +# path params +{{#each pathParams}} +{{#with schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} +RequestRequiredPathParams = typing.TypedDict( + 'RequestRequiredPathParams', + { +{{#each pathParams}} +{{#if required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/if}} +{{/each}} + } +) +RequestOptionalPathParams = typing.TypedDict( + 'RequestOptionalPathParams', + { +{{#each pathParams}} +{{#unless required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/unless}} +{{/each}} + }, + total=False +) + + +class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams): + pass + + +{{#each pathParams}} +{{> endpoint_parameter }} +{{/each}} +{{/if}} +{{#if cookieParams}} +# cookie params +{{#each cookieParams}} +{{#with schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} +RequestRequiredCookieParams = typing.TypedDict( + 'RequestRequiredCookieParams', + { +{{#each cookieParams}} +{{#if required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/if}} +{{/each}} + } +) +RequestOptionalCookieParams = typing.TypedDict( + 'RequestOptionalCookieParams', + { +{{#each cookieParams}} +{{#unless required}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/unless}} +{{/each}} + }, + total=False +) + + +class RequestCookieParams(RequestRequiredCookieParams, RequestOptionalCookieParams): + pass + + +{{#each cookieParams}} +{{> endpoint_parameter }} +{{/each}} +{{/if}} +{{#with bodyParam}} +# body param +{{#each content}} +{{#with this.schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} + + +request_body_{{paramName}} = api_client.RequestBody( + content={ +{{#each content}} + '{{@key}}': api_client.MediaType( + schema={{this.schema.baseName}}), +{{/each}} + }, +{{#if required}} + required=True, +{{/if}} +) +{{/with}} +_path = '{{{path}}}' +_method = '{{httpMethod}}' +{{#each authMethods}} +{{#if @first}} +_auth = [ +{{/if}} + '{{name}}', +{{#if @last}} +] +{{/if}} +{{/each}} +{{#each servers}} +{{#if @first}} +_servers = ( +{{/if}} + { + 'url': "{{{url}}}", + 'description': "{{{description}}}{{#unless description}}No description provided{{/unless}}", + {{#each variables}} + {{#if @first}} + 'variables': { + {{/if}} + '{{{name}}}': { + 'description': "{{{description}}}{{#unless description}}No description provided{{/unless}}", + 'default_value': "{{{defaultValue}}}", + {{#each enumValues}} + {{#if @first}} + 'enum_values': [ + {{/if}} + "{{{.}}}"{{#unless @last}},{{/unless}} + {{#if @last}} + ] + {{/if}} + {{/each}} + }{{#unless @last}},{{/unless}} + {{#if @last}} + } + {{/if}} + {{/each}} + }, +{{#if @last}} +) +{{/if}} +{{/each}} +{{#each responses}} +{{#each responseHeaders}} +{{#with schema}} +{{> model_templates/schema }} +{{/with}} +{{paramName}}_parameter = api_client.HeaderParameter( + name="{{baseName}}", +{{#if style}} + style=api_client.ParameterStyle.{{style}}, +{{/if}} +{{#if schema}} +{{#with schema}} + schema={{baseName}}, +{{/with}} +{{/if}} +{{#if required}} + required=True, +{{/if}} +{{#if isExplode}} + explode=True, +{{/if}} +) +{{/each}} +{{#each content}} +{{#with this.schema}} +{{> model_templates/schema }} +{{/with}} +{{/each}} +{{#if responseHeaders}} +ResponseHeadersFor{{code}} = typing.TypedDict( + 'ResponseHeadersFor{{code}}', + { +{{#each responseHeaders}} + '{{baseName}}': {{#with schema}}{{baseName}},{{/with}} +{{/each}} + } +) +{{/if}} + + +@dataclass +{{#if isDefault}} +class ApiResponseForDefault(api_client.ApiResponse): +{{else}} +class ApiResponseFor{{code}}(api_client.ApiResponse): +{{/if}} + response: urllib3.HTTPResponse +{{#and responseHeaders content}} + body: typing.Union[ +{{#each content}} + {{this.schema.baseName}}, +{{/each}} + ] + headers: ResponseHeadersFor{{code}} +{{else}} +{{#or responseHeaders content}} +{{#if responseHeaders}} + headers: ResponseHeadersFor{{code}} + body: Unset = unset +{{else}} + body: typing.Union[ +{{#each content}} + {{this.schema.baseName}}, +{{/each}} + ] + headers: Unset = unset +{{/if}} +{{/or}} +{{/and}} +{{#unless responseHeaders}} +{{#unless content}} + body: Unset = unset + headers: Unset = unset +{{/unless}} +{{/unless}} + + +{{#if isDefault}} +_response_for_default = api_client.OpenApiResponse( + response_cls=ApiResponseForDefault, +{{else}} +_response_for_{{code}} = api_client.OpenApiResponse( + response_cls=ApiResponseFor{{code}}, +{{/if}} +{{#each content}} +{{#if @first}} + content={ +{{/if}} + '{{@key}}': api_client.MediaType( + schema={{this.schema.baseName}}), +{{#if @last}} + }, +{{/if}} +{{/each}} +{{#if responseHeaders}} + headers=[ +{{#each responseHeaders}} + {{paramName}}_parameter, +{{/each}} + ] +{{/if}} +) +{{/each}} +_status_code_to_response = { +{{#each responses}} +{{#if isDefault}} + 'default': _response_for_default, +{{else}} + '{{code}}': _response_for_{{code}}, +{{/if}} +{{/each}} +} +{{#each produces}} +{{#if @first}} +_all_accept_content_types = ( +{{/if}} + '{{this.mediaType}}', +{{#if @last}} +) +{{/if}} +{{/each}} + + +class {{operationIdCamelCase}}(api_client.Api): + + def {{operationId}}( + self: api_client.Api, + {{#if bodyParam}} + {{#with bodyParam}} + {{baseName}}: typing.Union[{{#each content}}{{#unless @first}}, {{/unless}}{{this.schema.baseName}}{{/each}}{{#unless required}}, Unset] = unset{{else}}]{{/unless}}, + {{/with}} + {{/if}} + {{#if queryParams}} + query_params: RequestQueryParams = frozendict(), + {{/if}} + {{#if headerParams}} + header_params: RequestHeaderParams = frozendict(), + {{/if}} + {{#if pathParams}} + path_params: RequestPathParams = frozendict(), + {{/if}} + {{#if cookieParams}} + cookie_params: RequestCookieParams = frozendict(), + {{/if}} + {{#with bodyParam}} + {{#each content}} + {{#if @first}} + content_type: str = '{{@key}}', + {{/if}} + {{/each}} + {{/with}} + {{#if produces}} + accept_content_types: typing.Tuple[str] = _all_accept_content_types, + {{/if}} + {{#if servers}} + host_index: typing.Optional[int] = None, + {{/if}} + stream: bool = False, + timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, + skip_deserialization: bool = False, + ) -> typing.Union[ + {{#each responses}} + {{#if isDefault}} + ApiResponseForDefault, + {{else}} + {{#if is2xx}} + ApiResponseFor{{code}}, + {{/if}} + {{/if}} + {{/each}} + api_client.ApiResponseWithoutDeserialization + ]: + """ + {{#if summary}} + {{summary}} + {{/if}} + :param skip_deserialization: If true then api_response.response will be set but + api_response.body and api_response.headers will not be deserialized into schema + class instances + """ + {{#if queryParams}} + self._verify_typed_dict_inputs(RequestQueryParams, query_params) + {{/if}} + {{#if headerParams}} + self._verify_typed_dict_inputs(RequestHeaderParams, header_params) + {{/if}} + {{#if pathParams}} + self._verify_typed_dict_inputs(RequestPathParams, path_params) + {{/if}} + {{#if cookieParams}} + self._verify_typed_dict_inputs(RequestCookieParams, cookie_params) + {{/if}} + {{#if pathParams}} + + _path_params = {} + for parameter in ( + {{#each pathParams}} + request_path_{{paramName}}, + {{/each}} + ): + parameter_data = path_params.get(parameter.name, unset) + if parameter_data is unset: + continue + serialized_data = parameter.serialize(parameter_data) + _path_params.update(serialized_data) + {{/if}} + {{#if queryParams}} + + _query_params = [] + for parameter in ( + {{#each queryParams}} + request_query_{{paramName}}, + {{/each}} + ): + parameter_data = query_params.get(parameter.name, unset) + if parameter_data is unset: + continue + serialized_data = parameter.serialize(parameter_data) + _query_params.extend(serialized_data) + {{/if}} + {{#or headerParams bodyParam produces}} + + _headers = HTTPHeaderDict() + {{else}} + {{/or}} + {{#if headerParams}} + for parameter in ( + {{#each headerParams}} + request_header_{{paramName}}, + {{/each}} + ): + parameter_data = header_params.get(parameter.name, unset) + if parameter_data is unset: + continue + serialized_data = parameter.serialize(parameter_data) + _headers.extend(serialized_data) + {{/if}} + # TODO add cookie handling + {{#if produces}} + if accept_content_types: + for accept_content_type in accept_content_types: + _headers.add('Accept', accept_content_type) + {{/if}} + {{#with bodyParam}} + + {{#if required}} + if body is unset: + raise exceptions.ApiValueError( + 'The required body parameter has an invalid value of: unset. Set a valid value instead') + {{/if}} + _fields = None + _body = None + {{#if required}} + {{> endpoint_body_serialization }} + {{else}} + if body is not unset: + {{> endpoint_body_serialization }} + {{/if}} + {{/with}} + {{#if servers}} + + host = self.get_host('{{operationId}}', _servers, host_index) + {{/if}} + + response = self.api_client.call_api( + resource_path=_path, + method=_method, + {{#if pathParams}} + path_params=_path_params, + {{/if}} + {{#if queryParams}} + query_params=tuple(_query_params), + {{/if}} + {{#or headerParams bodyParam produces}} + headers=_headers, + {{/or}} + {{#if bodyParam}} + fields=_fields, + body=_body, + {{/if}} + {{#if hasAuthMethods}} + auth_settings=_auth, + {{/if}} + {{#if servers}} + host=host, + {{/if}} + stream=stream, + timeout=timeout, + ) + + if skip_deserialization: + api_response = api_client.ApiResponseWithoutDeserialization(response=response) + else: + response_for_status = _status_code_to_response.get(str(response.status)) + if response_for_status: + api_response = response_for_status.deserialize(response, self.api_client.configuration) + else: + {{#if hasDefaultResponse}} + default_response = _status_code_to_response.get('default') + if default_response: + api_response = default_response.deserialize(response, self.api_client.configuration) + else: + api_response = api_client.ApiResponseWithoutDeserialization(response=response) + {{else}} + api_response = api_client.ApiResponseWithoutDeserialization(response=response) + {{/if}} + + if not 200 <= response.status <= 299: + raise exceptions.ApiException(api_response=api_response) + + return api_response +{{/with}} diff --git a/openapi/python_explerimental_client_template/endpoint_body_serialization.handlebars b/openapi/python_explerimental_client_template/endpoint_body_serialization.handlebars new file mode 100644 index 00000000..f00d9f05 --- /dev/null +++ b/openapi/python_explerimental_client_template/endpoint_body_serialization.handlebars @@ -0,0 +1,6 @@ +serialized_data = request_body_{{paramName}}.serialize(body, content_type) +_headers.add('Content-Type', content_type) +if 'fields' in serialized_data: + _fields = serialized_data['fields'] +elif 'body' in serialized_data: + _body = serialized_data['body'] \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/endpoint_parameter.handlebars b/openapi/python_explerimental_client_template/endpoint_parameter.handlebars new file mode 100644 index 00000000..4b9d815a --- /dev/null +++ b/openapi/python_explerimental_client_template/endpoint_parameter.handlebars @@ -0,0 +1,17 @@ +request_{{#if isQueryParam}}query{{/if}}{{#if isPathParam}}path{{/if}}{{#if isHeaderParam}}header{{/if}}{{#if isCookieParam}}cookie{{/if}}_{{paramName}} = api_client.{{#if isQueryParam}}Query{{/if}}{{#if isPathParam}}Path{{/if}}{{#if isHeaderParam}}Header{{/if}}{{#if isCookieParam}}Cookie{{/if}}Parameter( + name="{{baseName}}", +{{#if style}} + style=api_client.ParameterStyle.{{style}}, +{{/if}} +{{#if schema}} +{{#with schema}} + schema={{baseName}}, +{{/with}} +{{/if}} +{{#if required}} + required=True, +{{/if}} +{{#if isExplode}} + explode=True, +{{/if}} +) diff --git a/openapi/python_explerimental_client_template/exceptions.handlebars b/openapi/python_explerimental_client_template/exceptions.handlebars new file mode 100644 index 00000000..fa5f7534 --- /dev/null +++ b/openapi/python_explerimental_client_template/exceptions.handlebars @@ -0,0 +1,129 @@ +# coding: utf-8 + +{{>partial_header}} + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None): + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None): + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__(self, status=None, reason=None, api_response: '{{packageName}}.api_client.ApiResponse' = None): + if api_response: + self.status = api_response.response.status + self.reason = api_response.response.reason + self.body = api_response.response.data + self.headers = api_response.response.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/openapi/python_explerimental_client_template/git_push.sh.handlebars b/openapi/python_explerimental_client_template/git_push.sh.handlebars new file mode 100644 index 00000000..8b3f689c --- /dev/null +++ b/openapi/python_explerimental_client_template/git_push.sh.handlebars @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="{{{gitHost}}}" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="{{{gitUserId}}}" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="{{{gitRepoId}}}" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="{{{releaseNote}}}" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/openapi/python_explerimental_client_template/gitignore.handlebars b/openapi/python_explerimental_client_template/gitignore.handlebars new file mode 100644 index 00000000..a62e8aba --- /dev/null +++ b/openapi/python_explerimental_client_template/gitignore.handlebars @@ -0,0 +1,67 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt +dev-requirements.txt.log + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/openapi/python_explerimental_client_template/gitlab-ci.handlebars b/openapi/python_explerimental_client_template/gitlab-ci.handlebars new file mode 100644 index 00000000..0cfe8f74 --- /dev/null +++ b/openapi/python_explerimental_client_template/gitlab-ci.handlebars @@ -0,0 +1,29 @@ +# ref: https://docs.gitlab.com/ee/ci/README.html + +stages: + - test + +.tests: + stage: test + script: + - pip install -r requirements.txt + - pip install -r test-requirements.txt + {{#if useNose}} + - nosetests + {{/if}} + {{#unless useNose}} + - pytest --cov={{{packageName}}} + {{/unless}} + +test-3.5: + extends: .tests + image: python:3.5-alpine +test-3.6: + extends: .tests + image: python:3.6-alpine +test-3.7: + extends: .tests + image: python:3.7-alpine +test-3.8: + extends: .tests + image: python:3.8-alpine diff --git a/openapi/python_explerimental_client_template/model.handlebars b/openapi/python_explerimental_client_template/model.handlebars new file mode 100644 index 00000000..d8c1e63a --- /dev/null +++ b/openapi/python_explerimental_client_template/model.handlebars @@ -0,0 +1,17 @@ +# coding: utf-8 + +{{>partial_header}} + +import re # noqa: F401 +import sys # noqa: F401 +import typing # noqa: F401 + +from frozendict import frozendict # noqa: F401 + +{{#each models}} +{{#with model}} +{{> model_templates/imports_schema_types }} +{{> model_templates/schema }} +{{> model_templates/imports_schemas }} +{{/with}} +{{/each}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_doc.handlebars b/openapi/python_explerimental_client_template/model_doc.handlebars new file mode 100644 index 00000000..2c4136a1 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_doc.handlebars @@ -0,0 +1,9 @@ +{{#each models}} +{{#with model}} +# {{classname}} +{{> schema_doc }} +{{/with}} +{{/each}} + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + diff --git a/openapi/python_explerimental_client_template/model_templates/composed_schemas.handlebars b/openapi/python_explerimental_client_template/model_templates/composed_schemas.handlebars new file mode 100644 index 00000000..6aef3311 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/composed_schemas.handlebars @@ -0,0 +1,86 @@ +@classmethod +@property +def _composed_schemas(cls): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading +{{#with composedSchemas}} +{{#each allOf}} +{{#unless complexType}} +{{#unless isAnyType}} + {{> model_templates/schema }} +{{/unless}} +{{/unless}} +{{#if isAnyType}} + {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}} = AnyTypeSchema +{{/if}} +{{/each}} +{{#each oneOf}} +{{#unless complexType}} +{{#unless isAnyType}} + {{> model_templates/schema }} +{{/unless}} +{{/unless}} +{{#if isAnyType}} + {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}} = AnyTypeSchema +{{/if}} +{{/each}} +{{#each anyOf}} +{{#unless complexType}} +{{#unless isAnyType}} + {{> model_templates/schema }} +{{/unless}} +{{/unless}} +{{#if isAnyType}} + {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}} = AnyTypeSchema +{{/if}} +{{/each}} +{{/with}} + return { + 'allOf': [ +{{#with composedSchemas}} +{{#each allOf}} +{{#if complexType}} + {{complexType}}, +{{/if}} +{{#unless complexType}} + {{#if nameInSnakeCase}}{{name}}{{/if}}{{#unless nameInSnakeCase}}{{baseName}}{{/unless}}, +{{/unless}} +{{/each}} + ], + 'oneOf': [ +{{#each oneOf}} +{{#if complexType}} + {{complexType}}, +{{/if}} +{{#unless complexType}} +{{#if isAnyType}} + AnyTypeSchema, +{{/if}} +{{#unless isAnyType}} + {{#if nameInSnakeCase}}{{name}}{{/if}}{{#unless nameInSnakeCase}}{{baseName}}{{/unless}}, +{{/unless}} +{{/unless}} +{{/each}} + ], + 'anyOf': [ +{{#each anyOf}} +{{#if complexType}} + {{complexType}}, +{{/if}} +{{#unless complexType}} +{{#if isAnyType}} + AnyTypeSchema, +{{/if}} +{{#unless isAnyType}} + {{#if nameInSnakeCase}}{{name}}{{/if}}{{#unless nameInSnakeCase}}{{baseName}}{{/unless}}, +{{/unless}} +{{/unless}} +{{/each}} +{{/with}} + ], + } diff --git a/openapi/python_explerimental_client_template/model_templates/dict_partial.handlebars b/openapi/python_explerimental_client_template/model_templates/dict_partial.handlebars new file mode 100644 index 00000000..15338b38 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/dict_partial.handlebars @@ -0,0 +1,54 @@ +{{#if getHasRequired}} + _required_property_names = set(( + {{#each requiredVars}} + '{{baseName}}', + {{/each}} + )) +{{/if}} +{{#each vars}} +{{#if complexType}} + + @classmethod + @property + def {{baseName}}(cls) -> typing.Type['{{complexType}}']: + return {{complexType}} +{{else}} + {{> model_templates/schema }} +{{/if}} +{{/each}} +{{#if getHasDiscriminatorWithNonEmptyMapping}} +{{#with discriminator}} +{{#each mappedModels}} +{{#if @first}} + + @classmethod + @property + def _discriminator(cls): + return { + '{{{propertyBaseName}}}': { +{{/if}} + '{{mappingName}}': {{{modelName}}}, +{{#if @last}} + } + } +{{/if}} +{{/each}} +{{/with}} +{{/if}} +{{#with additionalProperties}} +{{#if complexType}} + + @classmethod + @property + def _additional_properties(cls) -> typing.Type['{{complexType}}']: + return {{complexType}} +{{/if}} +{{#unless complexType}} +{{#unless isAnyType}} + {{> model_templates/schema }} +{{/unless}} +{{/unless}} +{{/with}} +{{#unless additionalProperties}} + _additional_properties = None +{{/unless}} diff --git a/openapi/python_explerimental_client_template/model_templates/enum_value_to_name.handlebars b/openapi/python_explerimental_client_template/model_templates/enum_value_to_name.handlebars new file mode 100644 index 00000000..ea2cb759 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/enum_value_to_name.handlebars @@ -0,0 +1,12 @@ +_SchemaEnumMaker( + enum_value_to_name={ +{{#if isNull}} + None: "NONE", +{{/if}} +{{#with allowableValues}} +{{#each enumVars}} + {{{value}}}: "{{name}}", +{{/each}} +{{/with}} + } +), diff --git a/openapi/python_explerimental_client_template/model_templates/enums.handlebars b/openapi/python_explerimental_client_template/model_templates/enums.handlebars new file mode 100644 index 00000000..5cbc68dc --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/enums.handlebars @@ -0,0 +1,16 @@ +{{#if isNull}} + +@classmethod +@property +def NONE(cls): + return cls._enum_by_value[None](None) +{{/if}} +{{#with allowableValues}} +{{#each enumVars}} + +@classmethod +@property +def {{name}}(cls): + return cls._enum_by_value[{{{value}}}]({{{value}}}) +{{/each}} +{{/with}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_templates/imports_schema_types.handlebars b/openapi/python_explerimental_client_template/model_templates/imports_schema_types.handlebars new file mode 100644 index 00000000..38015e35 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/imports_schema_types.handlebars @@ -0,0 +1,42 @@ +import decimal # noqa: F401 +from datetime import date, datetime # noqa: F401 +from frozendict import frozendict # noqa: F401 + +from {{packageName}}.schemas import ( # noqa: F401 + AnyTypeSchema, + ComposedSchema, + DictSchema, + ListSchema, + StrSchema, + IntSchema, + Int32Schema, + Int64Schema, + Float32Schema, + Float64Schema, + NumberSchema, + DateSchema, + DateTimeSchema, + DecimalSchema, + BoolSchema, + BinarySchema, + NoneSchema, + none_type, + InstantiationMetadata, + Unset, + unset, + ComposedBase, + ListBase, + DictBase, + NoneBase, + StrBase, + IntBase, + NumberBase, + DateBase, + DateTimeBase, + BoolBase, + BinaryBase, + Schema, + _SchemaValidator, + _SchemaTypeChecker, + _SchemaEnumMaker +) diff --git a/openapi/python_explerimental_client_template/model_templates/imports_schemas.handlebars b/openapi/python_explerimental_client_template/model_templates/imports_schemas.handlebars new file mode 100644 index 00000000..b925cf92 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/imports_schemas.handlebars @@ -0,0 +1,6 @@ +{{#each imports}} +{{#if @first}} + +{{/if}} +{{{.}}} +{{/each}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_templates/new.handlebars b/openapi/python_explerimental_client_template/model_templates/new.handlebars new file mode 100644 index 00000000..e74461e8 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/new.handlebars @@ -0,0 +1,53 @@ +def __new__( + cls, + *args: typing.Union[{{#if isAnyType}}dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes{{/if}}{{#if isUnboundedInteger}}int, {{/if}}{{#if isNumber}}float, {{/if}}{{#if isBoolean}}bool, {{/if}}{{#if isArray}}list, tuple, {{/if}}{{#if isMap}}dict, frozendict, {{/if}}{{#if isString}}str, {{/if}}{{#if isNull}}None, {{/if}}], +{{#unless isNull}} +{{#if getHasRequired}} +{{#each requiredVars}} +{{#unless nameInSnakeCase}} + {{baseName}}: {{baseName}}, +{{/unless}} +{{/each}} +{{/if}} +{{/unless}} +{{#each vars}} +{{#unless nameInSnakeCase}} +{{#unless getRequired}} +{{#unless complexType}} + {{baseName}}: typing.Union[{{baseName}}, Unset] = unset, +{{/unless}} +{{#if complexType}} + {{baseName}}: typing.Union['{{complexType}}', Unset] = unset, +{{/if}} +{{/unless}} +{{/unless}} +{{/each}} + _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, +{{#with additionalProperties}} + **kwargs: typing.Type[Schema], +{{/with}} +) -> '{{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}}': + return super().__new__( + cls, + *args, +{{#unless isNull}} +{{#if getHasRequired}} +{{#each requiredVars}} +{{#unless nameInSnakeCase}} + {{baseName}}={{baseName}}, +{{/unless}} +{{/each}} +{{/if}} +{{/unless}} +{{#each vars}} +{{#unless getRequired}} +{{#unless nameInSnakeCase}} + {{baseName}}={{baseName}}, +{{/unless}} +{{/unless}} +{{/each}} + _instantiation_metadata=_instantiation_metadata, +{{#with additionalProperties}} + **kwargs, +{{/with}} + ) diff --git a/openapi/python_explerimental_client_template/model_templates/schema.handlebars b/openapi/python_explerimental_client_template/model_templates/schema.handlebars new file mode 100644 index 00000000..7523d39f --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/schema.handlebars @@ -0,0 +1,46 @@ +{{#if composedSchemas}} +{{> model_templates/schema_composed_or_anytype }} +{{/if}} +{{#unless composedSchemas}} + {{#if getHasMultipleTypes}} +{{> model_templates/schema_composed_or_anytype }} + {{else}} + {{#or isMap isArray isAnyType}} + {{#if isMap}} + {{#or hasVars hasValidation hasRequiredVars getHasDiscriminatorWithNonEmptyMapping}} +{{> model_templates/schema_dict }} + {{else}} + {{#if additionalPropertiesIsAnyType}} +{{> model_templates/var_equals_cls }} + {{else}} +{{> model_templates/schema_dict }} + {{/if}} + {{/or}} + {{/if}} + {{#if isArray}} + {{#or hasItems hasValidation}} +{{> model_templates/schema_list }} + {{else}} +{{> model_templates/var_equals_cls }} + {{/or}} + {{/if}} + {{#if isAnyType}} + {{#or isEnum hasVars hasValidation hasRequiredVars getHasDiscriminatorWithNonEmptyMapping items}} +{{> model_templates/schema_composed_or_anytype }} + {{else}} +{{> model_templates/var_equals_cls }} + {{/or}} + {{/if}} + {{else}} + {{#or isEnum hasValidation}} +{{> model_templates/schema_simple }} + {{else}} +{{> model_templates/var_equals_cls }} + {{/or}} + {{/or}} + {{#if nameInSnakeCase}} +locals()['{{baseName}}'] = {{name}} +del locals()['{{name}}'] + {{/if}} + {{/if}} +{{/unless}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_templates/schema_composed_or_anytype.handlebars b/openapi/python_explerimental_client_template/model_templates/schema_composed_or_anytype.handlebars new file mode 100644 index 00000000..763da96f --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/schema_composed_or_anytype.handlebars @@ -0,0 +1,48 @@ + + +class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}}( +{{#if hasValidation}} + {{> model_templates/validations }} +{{/if}} +{{#if getIsAnyType}} + {{#if composedSchemas}} + ComposedSchema + {{else}} + AnyTypeSchema + {{/if}} +{{else}} + {{#if getHasMultipleTypes}} + _SchemaTypeChecker(typing.Union[{{#if isArray}}tuple, {{/if}}{{#if isMap}}frozendict, {{/if}}{{#if isNull}}none_type, {{/if}}{{#if isString}}str, {{/if}}{{#if isByteArray}}str, {{/if}}{{#if isUnboundedInteger}}decimal.Decimal, {{/if}}{{#if isShort}}decimal.Decimal, {{/if}}{{#if isLong}}decimal.Decimal, {{/if}}{{#if isFloat}}decimal.Decimal, {{/if}}{{#if isDouble}}decimal.Decimal, {{/if}}{{#if isNumber}}decimal.Decimal, {{/if}}{{#if isDate}}str, {{/if}}{{#if isDateTime}}str, {{/if}}{{#if isDecimal}}str, {{/if}}{{#if isBoolean}}bool, {{/if}}]), + {{/if}} + {{#if composedSchemas}} + ComposedBase, + {{/if}} + {{#if isEnum}} + {{> model_templates/enum_value_to_name }} + {{/if}} + {{> model_templates/xbase_schema }} +{{/if}} +): +{{#if this.classname}} + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. +{{#if description}} + + {{{unescapedDescription}}} +{{/if}} + """ +{{/if}} +{{#or isMap isAnyType}} +{{> model_templates/dict_partial }} +{{/or}} +{{#if composedSchemas}} + + {{> model_templates/composed_schemas }} +{{/if}} +{{#if isEnum}} + {{> model_templates/enums }} +{{/if}} + + {{> model_templates/new }} diff --git a/openapi/python_explerimental_client_template/model_templates/schema_dict.handlebars b/openapi/python_explerimental_client_template/model_templates/schema_dict.handlebars new file mode 100644 index 00000000..c17e0f57 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/schema_dict.handlebars @@ -0,0 +1,23 @@ + + +class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}}( +{{#if hasValidation}} + {{> model_templates/validations }} +{{/if}} + DictSchema +): +{{#if this.classname}} + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. +{{#if description}} + + {{{unescapedDescription}}} +{{/if}} + """ +{{/if}} +{{> model_templates/dict_partial }} + + + {{> model_templates/new }} diff --git a/openapi/python_explerimental_client_template/model_templates/schema_list.handlebars b/openapi/python_explerimental_client_template/model_templates/schema_list.handlebars new file mode 100644 index 00000000..542b486a --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/schema_list.handlebars @@ -0,0 +1,30 @@ + + +class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}}( +{{#if hasValidation}} + {{> model_templates/validations }} +{{/if}} + ListSchema +): +{{#if this.classname}} + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. +{{#if description}} + + {{{unescapedDescription}}} +{{/if}} + """ +{{/if}} +{{#with items}} +{{#if complexType}} + + @classmethod + @property + def _items(cls) -> typing.Type['{{complexType}}']: + return {{complexType}} +{{else}} + {{> model_templates/schema }} +{{/if}} +{{/with}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_templates/schema_simple.handlebars b/openapi/python_explerimental_client_template/model_templates/schema_simple.handlebars new file mode 100644 index 00000000..ea12a00f --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/schema_simple.handlebars @@ -0,0 +1,27 @@ + + +class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}}( +{{#if hasValidation}} + {{> model_templates/validations }} +{{/if}} +{{#if isEnum}} + {{> model_templates/enum_value_to_name }} +{{/if}} + {{> model_templates/xbase_schema }} +): +{{#if this.classname}} + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. +{{#if description}} + + {{{unescapedDescription}}} +{{/if}} + """ +{{/if}} +{{#if isEnum}} + {{> model_templates/enums }} +{{else}} + pass +{{/if}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/model_templates/validations.handlebars b/openapi/python_explerimental_client_template/model_templates/validations.handlebars new file mode 100644 index 00000000..2384ac06 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/validations.handlebars @@ -0,0 +1,50 @@ +_SchemaValidator( +{{#if getUniqueItems}} + unique_items=True, +{{/if}} +{{#if maxLength}} + max_length={{maxLength}}, +{{/if}} +{{#if minLength}} + min_length={{minLength}}, +{{/if}} +{{#if maxItems}} + max_items={{maxItems}}, +{{/if}} +{{#if minItems}} + min_items={{minItems}}, +{{/if}} +{{#if maxProperties}} + max_properties={{maxProperties}}, +{{/if}} +{{#if minProperties}} + min_properties={{minProperties}}, +{{/if}} +{{#if maximum}} + {{#if exclusiveMaximum}}exclusive_maximum{{/if}}inclusive_maximum{{#unless exclusiveMaximum}}{{/unless}}={{maximum}}, +{{/if}} +{{#if minimum}} + {{#if exclusiveMinimum}}exclusive_minimum{{/if}}inclusive_minimum{{#unless exclusiveMinimum}}{{/unless}}={{minimum}}, +{{/if}} +{{#if pattern}} + regex=[{ +{{#if vendorExtensions.x-regex}} + 'pattern': r'{{{vendorExtensions.x-regex}}}', # noqa: E501 +{{else}} + 'pattern': r'{{{pattern}}}', # noqa: E501 +{{/if}} +{{#each vendorExtensions.x-modifiers}} +{{#if @first}} + 'flags': ( +{{/if}} + {{#unless @first}}| {{/unless}}re.{{.}} +{{#if @last}} + ) +{{/if}} +{{/each}} + }], +{{/if}} +{{#if multipleOf}} + multiple_of=[{{multipleOf}}], +{{/if}} +), diff --git a/openapi/python_explerimental_client_template/model_templates/var_equals_cls.handlebars b/openapi/python_explerimental_client_template/model_templates/var_equals_cls.handlebars new file mode 100644 index 00000000..24f52cac --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/var_equals_cls.handlebars @@ -0,0 +1 @@ +{{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}{{else}}{{baseName}}{{/if}}{{/if}} = {{#if complexType}}{{complexType}}{{else}}{{#if isNullable}}Nullable{{/if}}{{#if getIsNull}}None{{/if}}{{#if isAnyType}}AnyType{{/if}}{{#if isMap}}Dict{{/if}}{{#if isArray}}List{{/if}}{{#if isString}}Str{{/if}}{{#if isByteArray}}Str{{/if}}{{#if isDate}}Date{{/if}}{{#if isDateTime}}DateTime{{/if}}{{#if isDecimal}}Decimal{{/if}}{{#if isUnboundedInteger}}Int{{/if}}{{#if isShort}}Int32{{/if}}{{#if isLong}}Int64{{/if}}{{#if isFloat}}Float32{{/if}}{{#if isDouble}}Float64{{/if}}{{#if isNumber}}Number{{/if}}{{#if isBoolean}}Bool{{/if}}{{#if isBinary}}Binary{{/if}}Schema{{/if}} diff --git a/openapi/python_explerimental_client_template/model_templates/xbase_schema.handlebars b/openapi/python_explerimental_client_template/model_templates/xbase_schema.handlebars new file mode 100644 index 00000000..87fe3d0b --- /dev/null +++ b/openapi/python_explerimental_client_template/model_templates/xbase_schema.handlebars @@ -0,0 +1,51 @@ +{{#if isArray}} +List{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isMap}} +Dict{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isString}} +Str{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isByteArray}} +Str{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isUnboundedInteger}} +Int{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isNumber}} +Number{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#isShort}} +Int32{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/isShort}} +{{#isLong}} +Int64{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/isLong}} +{{#isFloat}} +Float32{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/isFloat}} +{{#isDouble}} +Float64{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/isDouble}} +{{#if isDate}} +Date{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isDateTime}} +DateTime{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isDecimal}} +Decimal{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isBoolean}} +Bool{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isBinary}} +Binary{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if isNull}} +None{{#if getHasMultipleTypes}}Base,{{else}}Schema{{/if}} +{{/if}} +{{#if getHasMultipleTypes}} +Schema +{{/if}} diff --git a/openapi/python_explerimental_client_template/model_test.handlebars b/openapi/python_explerimental_client_template/model_test.handlebars new file mode 100644 index 00000000..48a4a7c8 --- /dev/null +++ b/openapi/python_explerimental_client_template/model_test.handlebars @@ -0,0 +1,32 @@ +# coding: utf-8 + +{{>partial_header}} + +import unittest + +import {{packageName}} +{{#each models}} +{{#with model}} +from {{packageName}}.{{modelPackage}}.{{classFilename}} import {{classname}} + + +class Test{{classname}}(unittest.TestCase): + """{{classname}} unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def test_{{classname}}(self): + """Test {{classname}}""" + # FIXME: construct object with mandatory attributes with example values + # model = {{classname}}() # noqa: E501 + pass + +{{/with}} +{{/each}} + +if __name__ == '__main__': + unittest.main() diff --git a/openapi/python_explerimental_client_template/partial_header.handlebars b/openapi/python_explerimental_client_template/partial_header.handlebars new file mode 100644 index 00000000..19b633be --- /dev/null +++ b/openapi/python_explerimental_client_template/partial_header.handlebars @@ -0,0 +1,17 @@ +""" +{{#if appName}} + {{{appName}}} +{{/if}} + +{{#if appDescription}} + {{{appDescription}}} # noqa: E501 +{{/if}} + + {{#if version}} + The version of the OpenAPI document: {{{version}}} + {{/if}} + {{#if infoEmail}} + Contact: {{{infoEmail}}} + {{/if}} + Generated by: https://openapi-generator.tech +""" diff --git a/openapi/python_explerimental_client_template/requirements.handlebars b/openapi/python_explerimental_client_template/requirements.handlebars new file mode 100644 index 00000000..c9227e58 --- /dev/null +++ b/openapi/python_explerimental_client_template/requirements.handlebars @@ -0,0 +1,5 @@ +certifi >= 14.05.14 +frozendict >= 2.0.3 +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.15.1 diff --git a/openapi/python_explerimental_client_template/rest.handlebars b/openapi/python_explerimental_client_template/rest.handlebars new file mode 100644 index 00000000..e3083193 --- /dev/null +++ b/openapi/python_explerimental_client_template/rest.handlebars @@ -0,0 +1,251 @@ +# coding: utf-8 + +{{>partial_header}} + +import logging +import ssl +from urllib.parse import urlencode +import typing + +import certifi +import urllib3 +from urllib3._collections import HTTPHeaderDict + +from {{packageName}}.exceptions import ApiException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + # ca_certs + if configuration.ssl_ca_cert: + ca_certs = configuration.ssl_ca_cert + else: + # if not set certificate file, use Mozilla's root certificates. + ca_certs = certifi.where() + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if configuration.socket_options is not None: + addition_pool_args['socket_options'] = configuration.socket_options + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy: + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request( + self, + method: str, + url: str, + query_params: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None, + headers: typing.Optional[HTTPHeaderDict] = None, + fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None, + body: typing.Optional[typing.Union[str, bytes]] = None, + stream: bool = False, + timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, + ) -> urllib3.HTTPResponse: + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request body, for other types + :param fields: request parameters for + `application/x-www-form-urlencoded` + or `multipart/form-data` + :param stream: if True, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is False. + :param timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if fields and body: + raise ApiValueError( + "body parameter cannot be used with fields parameter." + ) + + fields = fields or {} + headers = headers or {} + + if timeout: + if isinstance(timeout, (int, float)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=timeout) + elif (isinstance(timeout, tuple) and + len(timeout) == 2): + timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1]) + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if query_params: + url += '?' + urlencode(query_params) + if headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=fields, + encode_multipart=False, + preload_content=not stream, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=fields, + encode_multipart=True, + preload_content=not stream, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=not stream, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=not stream, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if not stream: + # log response body + logger.debug("response body: %s", r.data) + + return r + + def GET(self, url, headers=None, query_params=None, stream=False, + timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("GET", url, + headers=headers, + stream=stream, + timeout=timeout, + query_params=query_params, fields=fields) + + def HEAD(self, url, headers=None, query_params=None, stream=False, + timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("HEAD", url, + headers=headers, + stream=stream, + timeout=timeout, + query_params=query_params, fields=fields) + + def OPTIONS(self, url, headers=None, query_params=None, + body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + stream=stream, + timeout=timeout, + body=body, fields=fields) + + def DELETE(self, url, headers=None, query_params=None, body=None, + stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + stream=stream, + timeout=timeout, + body=body, fields=fields) + + def POST(self, url, headers=None, query_params=None, + body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("POST", url, + headers=headers, + query_params=query_params, + stream=stream, + timeout=timeout, + body=body, fields=fields) + + def PUT(self, url, headers=None, query_params=None, + body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("PUT", url, + headers=headers, + query_params=query_params, + stream=stream, + timeout=timeout, + body=body, fields=fields) + + def PATCH(self, url, headers=None, query_params=None, + body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse: + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + stream=stream, + timeout=timeout, + body=body, fields=fields) diff --git a/openapi/python_explerimental_client_template/schema_doc.handlebars b/openapi/python_explerimental_client_template/schema_doc.handlebars new file mode 100644 index 00000000..556d2cbb --- /dev/null +++ b/openapi/python_explerimental_client_template/schema_doc.handlebars @@ -0,0 +1,32 @@ + +{{#if description}} +{{&description}} + +{{/if}} +{{#or vars additionalProperties}} +#### Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + {{#each vars}} +**{{baseName}}** | {{#unless complexType}}**{{dataType}}**{{/unless}}{{#if complexType}}[**{{dataType}}**]({{complexType}}.md){{/if}} | {{description}} | {{#unless required}}[optional] {{/unless}}{{#if isReadOnly}}[readonly] {{/if}}{{#if defaultValue}} if omitted the server will use the default value of {{{defaultValue}}}{{/if}} + {{/each}} + {{#with additionalProperties}} +**any string name** | **{{dataType}}** | any string name can be used but the value must be the correct type | [optional] + {{/with}} +{{else}} +Type | Description | Notes +------------- | ------------- | ------------- + {{#if isAnyType}} +typing.Union[dict, frozendict, str, date, datetime, int, float, bool, Decimal, None, list, tuple, bytes] | | + {{else}} + {{#if hasMultipleTypes}} +typing.Union[{{#if isMap}}dict, frozendict, {{/if}}{{#if isString}}str, {{/if}}{{#if isDate}}date, {{/if}}{{#if isDataTime}}datetime, {{/if}}{{#or isLong isShort isUnboundedInteger}}int, {{/or}}{{#or isFloat isDouble}}float, {{/or}}{{#if isNumber}}Decimal, {{/if}}{{#if isBoolean}}bool, {{/if}}{{#if isNull}}None, {{/if}}{{#if isArray}}list, tuple, {{/if}}{{#if isBinary}}bytes{{/if}}] | | {{#with allowableValues}}{{#if defaultValue}}, {{/if}} must be one of [{{#each enumVars}}{{{value}}}, {{/each}}]{{/with}} + {{else}} + {{#if isArray}} +{{#unless arrayModelType}}**{{dataType}}**{{/unless}}{{#if arrayModelType}}[**{{dataType}}**]({{arrayModelType}}.md){{/if}} | {{description}} | {{#if defaultValue}}{{#if hasRequired}} if omitted the server will use the default value of {{/if}}{{#unless hasRequired}}defaults to {{/unless}}{{{defaultValue}}}{{/if}} + {{else}} +{{#unless arrayModelType}}**{{dataType}}**{{/unless}} | {{description}} | {{#if defaultValue}}{{#if hasRequired}} if omitted the server will use the default value of {{/if}}{{#unless hasRequired}}defaults to {{/unless}}{{{defaultValue}}}{{/if}}{{#with allowableValues}}{{#if defaultValue}}, {{/if}} must be one of [{{#each enumVars}}{{{value}}}, {{/each}}]{{/with}} + {{/if}} + {{/if}} + {{/if}} +{{/or}} diff --git a/openapi/python_explerimental_client_template/schemas.handlebars b/openapi/python_explerimental_client_template/schemas.handlebars new file mode 100644 index 00000000..9450487c --- /dev/null +++ b/openapi/python_explerimental_client_template/schemas.handlebars @@ -0,0 +1,2038 @@ +# coding: utf-8 + +{{>partial_header}} + +from collections import defaultdict +from datetime import date, datetime, timedelta # noqa: F401 +from dataclasses import dataclass +import functools +import decimal +import io +import os +import re +import tempfile +import typing + +from dateutil.parser.isoparser import isoparser, _takes_ascii +from frozendict import frozendict + +from {{packageName}}.exceptions import ( + ApiTypeError, + ApiValueError, +) +from {{packageName}}.configuration import ( + Configuration, +) + + +class Unset(object): + """ + An instance of this class is set as the default value for object type(dict) properties that are optional + When a property has an unset value, that property will not be assigned in the dict + """ + pass + +unset = Unset() + +none_type = type(None) +file_type = io.IOBase + + +class FileIO(io.FileIO): + """ + A class for storing files + Note: this class is not immutable + """ + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]): + if isinstance(arg, (io.FileIO, io.BufferedReader)): + arg.close() + inst = super(FileIO, cls).__new__(cls, arg.name) + super(FileIO, inst).__init__(arg.name) + return inst + raise ApiValueError('FileIO must be passed arg which contains the open file') + + +def update(d: dict, u: dict): + """ + Adds u to d + Where each dict is defaultdict(set) + """ + for k, v in u.items(): + d[k] = d[k].union(v) + return d + + +class InstantiationMetadata: + """ + A class to store metadata that is needed when instantiating OpenApi Schema subclasses + """ + def __init__( + self, + path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']), + from_server: bool = False, + configuration: typing.Optional[Configuration] = None, + base_classes: typing.FrozenSet[typing.Type] = frozenset(), + path_to_schemas: typing.Optional[typing.Dict[str, typing.Set[typing.Type]]] = None, + ): + """ + Args: + path_to_item: the path to the current data being instantiated. + For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0) + from_server: whether or not this data came form the server + True when receiving server data + False when instantiating model with client side data not form the server + configuration: the Configuration instance to use + This is needed because in Configuration: + - one can disable validation checking + base_classes: when deserializing data that matches multiple schemas, this is used to store + the schemas that have been traversed. This is used to stop processing when a cycle is seen. + path_to_schemas: a dict that goes from path to a list of classes at each path location + """ + self.path_to_item = path_to_item + self.from_server = from_server + self.configuration = configuration + self.base_classes = base_classes + if path_to_schemas is None: + path_to_schemas = defaultdict(set) + self.path_to_schemas = path_to_schemas + + def __repr__(self): + return str(self.__dict__) + + def __eq__(self, other): + if not isinstance(other, InstantiationMetadata): + return False + return self.__dict__ == other.__dict__ + + +class ValidatorBase: + @staticmethod + def __is_json_validation_enabled(schema_keyword, configuration=None): + """Returns true if JSON schema validation is enabled for the specified + validation keyword. This can be used to skip JSON schema structural validation + as requested in the configuration. + + Args: + schema_keyword (string): the name of a JSON schema validation keyword. + configuration (Configuration): the configuration class. + """ + + return (configuration is None or + not hasattr(configuration, '_disabled_client_side_validations') or + schema_keyword not in configuration._disabled_client_side_validations) + + @staticmethod + def __raise_validation_error_message(value, constraint_msg, constraint_value, path_to_item, additional_txt=""): + raise ApiValueError( + "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format( + value=value, + constraint_msg=constraint_msg, + constraint_value=constraint_value, + additional_txt=additional_txt, + path_to_item=path_to_item, + ) + ) + + @classmethod + def __check_str_validations(cls, + validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxLength', _instantiation_metadata.configuration) and + 'max_length' in validations and + len(input_values) > validations['max_length']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="length must be less than or equal to", + constraint_value=validations['max_length'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minLength', _instantiation_metadata.configuration) and + 'min_length' in validations and + len(input_values) < validations['min_length']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="length must be greater than or equal to", + constraint_value=validations['min_length'], + path_to_item=_instantiation_metadata.path_to_item + ) + + checked_value = input_values + if (cls.__is_json_validation_enabled('pattern', _instantiation_metadata.configuration) and + 'regex' in validations): + for regex_dict in validations['regex']: + flags = regex_dict.get('flags', 0) + if not re.search(regex_dict['pattern'], checked_value, flags=flags): + if flags != 0: + # Don't print the regex flags if the flags are not + # specified in the OAS document. + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must match regular expression", + constraint_value=regex_dict['pattern'], + path_to_item=_instantiation_metadata.path_to_item, + additional_txt=" with flags=`{}`".format(flags) + ) + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must match regular expression", + constraint_value=regex_dict['pattern'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_tuple_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxItems', _instantiation_metadata.configuration) and + 'max_items' in validations and + len(input_values) > validations['max_items']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of items must be less than or equal to", + constraint_value=validations['max_items'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minItems', _instantiation_metadata.configuration) and + 'min_items' in validations and + len(input_values) < validations['min_items']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of items must be greater than or equal to", + constraint_value=validations['min_items'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('uniqueItems', _instantiation_metadata.configuration) and + 'unique_items' in validations and validations['unique_items'] and input_values): + unique_items = [] + # print(validations) + for item in input_values: + if item not in unique_items: + unique_items.append(item) + if len(input_values) > len(unique_items): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="duplicate items were found, and the tuple must not contain duplicates because", + constraint_value='unique_items==True', + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_dict_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxProperties', _instantiation_metadata.configuration) and + 'max_properties' in validations and + len(input_values) > validations['max_properties']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of properties must be less than or equal to", + constraint_value=validations['max_properties'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minProperties', _instantiation_metadata.configuration) and + 'min_properties' in validations and + len(input_values) < validations['min_properties']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of properties must be greater than or equal to", + constraint_value=validations['min_properties'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_numeric_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if cls.__is_json_validation_enabled('multipleOf', + _instantiation_metadata.configuration) and 'multiple_of' in validations: + multiple_of_values = validations['multiple_of'] + for multiple_of_value in multiple_of_values: + if (isinstance(input_values, decimal.Decimal) and + not (float(input_values) / multiple_of_value).is_integer() + ): + # Note 'multipleOf' will be as good as the floating point arithmetic. + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="value must be a multiple of", + constraint_value=multiple_of_value, + path_to_item=_instantiation_metadata.path_to_item + ) + + checking_max_or_min_values = {'exclusive_maximum', 'inclusive_maximum', 'exclusive_minimum', + 'inclusive_minimum'}.isdisjoint(validations) is False + if not checking_max_or_min_values: + return + max_val = input_values + min_val = input_values + + if (cls.__is_json_validation_enabled('exclusiveMaximum', _instantiation_metadata.configuration) and + 'exclusive_maximum' in validations and + max_val >= validations['exclusive_maximum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value less than", + constraint_value=validations['exclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('maximum', _instantiation_metadata.configuration) and + 'inclusive_maximum' in validations and + max_val > validations['inclusive_maximum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value less than or equal to", + constraint_value=validations['inclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('exclusiveMinimum', _instantiation_metadata.configuration) and + 'exclusive_minimum' in validations and + min_val <= validations['exclusive_minimum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value greater than", + constraint_value=validations['exclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minimum', _instantiation_metadata.configuration) and + 'inclusive_minimum' in validations and + min_val < validations['inclusive_minimum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value greater than or equal to", + constraint_value=validations['inclusive_minimum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def _check_validations_for_types( + cls, + validations, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + if isinstance(input_values, str): + cls.__check_str_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, tuple): + cls.__check_tuple_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, frozendict): + cls.__check_dict_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, decimal.Decimal): + cls.__check_numeric_validations(validations, input_values, _instantiation_metadata) + try: + return super()._validate_validations_pass(input_values, _instantiation_metadata) + except AttributeError: + return True + + +class Validator(typing.Protocol): + def _validate_validations_pass( + cls, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + pass + + +def _SchemaValidator(**validations: typing.Union[str, bool, None, int, float, list[dict[str, typing.Union[str, int, float]]]]) -> Validator: + class SchemaValidator(ValidatorBase): + @classmethod + def _validate_validations_pass( + cls, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + cls._check_validations_for_types(validations, input_values, _instantiation_metadata) + try: + return super()._validate_validations_pass(input_values, _instantiation_metadata) + except AttributeError: + return True + + return SchemaValidator + + +class TypeChecker(typing.Protocol): + @classmethod + def _validate_type( + cls, arg_simple_class: type + ) -> typing.Tuple[type]: + pass + + +def _SchemaTypeChecker(union_type_cls: typing.Union[typing.Any]) -> TypeChecker: + if typing.get_origin(union_type_cls) is typing.Union: + union_classes = typing.get_args(union_type_cls) + else: + # note: when a union of a single class is passed in, the union disappears + union_classes = tuple([union_type_cls]) + """ + I want the type hint... union_type_cls + and to use it as a base class but when I do, I get + TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases + """ + class SchemaTypeChecker: + @classmethod + def _validate_type(cls, arg_simple_class: type): + if arg_simple_class not in union_classes: + return union_classes + try: + return super()._validate_type(arg_simple_class) + except AttributeError: + return tuple() + + return SchemaTypeChecker + + +class EnumMakerBase: + @classmethod + @property + def _enum_by_value( + cls + ) -> type: + enum_classes = {} + if not hasattr(cls, "_enum_value_to_name"): + return enum_classes + for enum_value, enum_name in cls._enum_value_to_name.items(): + base_class = type(enum_value) + if base_class is none_type: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, NoneClass)) + log_cache_usage(get_new_class) + elif base_class is bool: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, BoolClass)) + log_cache_usage(get_new_class) + else: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, Singleton, base_class)) + log_cache_usage(get_new_class) + return enum_classes + + +class EnumMakerInterface(typing.Protocol): + @classmethod + @property + def _enum_value_to_name( + cls + ) -> typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]: + pass + + @classmethod + @property + def _enum_by_value( + cls + ) -> type: + pass + + +def _SchemaEnumMaker(enum_value_to_name: typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]) -> EnumMakerInterface: + class SchemaEnumMaker(EnumMakerBase): + @classmethod + @property + def _enum_value_to_name( + cls + ) -> typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]: + pass + try: + super_enum_value_to_name = super()._enum_value_to_name + except AttributeError: + return enum_value_to_name + intersection = dict(enum_value_to_name.items() & super_enum_value_to_name.items()) + return intersection + + return SchemaEnumMaker + + +class Singleton: + """ + Enums and singletons are the same + The same instance is returned for a given key of (cls, arg) + """ + # TODO use bidict to store this so boolean enums can move through it in reverse to get their own arg value? + _instances = {} + + def __new__(cls, *args, **kwargs): + if not args: + raise ValueError('arg must be passed') + arg = args[0] + key = (cls, arg) + if key not in cls._instances: + if arg in {None, True, False}: + inst = super().__new__(cls) + # inst._value = arg + cls._instances[key] = inst + else: + cls._instances[key] = super().__new__(cls, arg) + return cls._instances[key] + + def __repr__(self): + return '({}, {})'.format(self.__class__.__name__, self) + + +class NoneClass(Singleton): + @classmethod + @property + def NONE(cls): + return cls(None) + + def is_none(self) -> bool: + return True + + def __bool__(self) -> bool: + return False + + +class BoolClass(Singleton): + @classmethod + @property + def TRUE(cls): + return cls(True) + + @classmethod + @property + def FALSE(cls): + return cls(False) + + @functools.cache + def __bool__(self) -> bool: + for key, instance in self._instances.items(): + if self is instance: + return key[1] + raise ValueError('Unable to find the boolean value of this instance') + + def is_true(self): + return bool(self) + + def is_false(self): + return bool(self) + + +class BoolBase: + pass + + +class NoneBase: + pass + + +class StrBase: + @property + def as_str(self) -> str: + return self + + @property + def as_date(self) -> date: + raise Exception('not implemented') + + @property + def as_datetime(self) -> datetime: + raise Exception('not implemented') + + @property + def as_decimal(self) -> decimal.Decimal: + raise Exception('not implemented') + + +class CustomIsoparser(isoparser): + + @_takes_ascii + def parse_isodatetime(self, dt_str): + components, pos = self._parse_isodate(dt_str) + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + if len(components) <= 3: + raise ValueError('Value is not a datetime') + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + components, pos = self._parse_isodate(datestr) + + if len(datestr) > pos: + raise ValueError('String contains invalid time components') + + if len(components) > 3: + raise ValueError('String contains invalid time components') + + return date(*components) + + +DEFAULT_ISOPARSER = CustomIsoparser() + + +class DateBase(StrBase): + @property + @functools.cache + def as_date(self) -> date: + return DEFAULT_ISOPARSER.parse_isodate(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + DEFAULT_ISOPARSER.parse_isodate(arg) + return True + except ValueError: + raise ApiValueError( + "Value does not conform to the required ISO-8601 date format. " + "Invalid value '{}' for type date at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DateBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class DateTimeBase: + @property + @functools.cache + def as_datetime(self) -> datetime: + return DEFAULT_ISOPARSER.parse_isodatetime(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + DEFAULT_ISOPARSER.parse_isodatetime(arg) + return True + except ValueError: + raise ApiValueError( + "Value does not conform to the required ISO-8601 datetime format. " + "Invalid value '{}' for type datetime at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DateTimeBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class DecimalBase(StrBase): + """ + A class for storing decimals that are sent over the wire as strings + These schemas must remain based on StrBase rather than NumberBase + because picking base classes must be deterministic + """ + + @property + @functools.cache + def as_decimal(self) -> decimal.Decimal: + return decimal.Decimal(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + decimal.Decimal(arg) + return True + except decimal.InvalidOperation: + raise ApiValueError( + "Value cannot be converted to a decimal. " + "Invalid value '{}' for type decimal at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DecimalBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class NumberBase: + @property + def as_int(self) -> int: + try: + return self._as_int + except AttributeError: + """ + Note: for some numbers like 9.0 they could be represented as an + integer but our code chooses to store them as + >>> Decimal('9.0').as_tuple() + DecimalTuple(sign=0, digits=(9, 0), exponent=-1) + so we can tell that the value came from a float and convert it back to a float + during later serialization + """ + if self.as_tuple().exponent < 0: + # this could be represented as an integer but should be represented as a float + # because that's what it was serialized from + raise ApiValueError(f'{self} is not an integer') + self._as_int = int(self) + return self._as_int + + @property + def as_float(self) -> float: + try: + return self._as_float + except AttributeError: + if self.as_tuple().exponent >= 0: + raise ApiValueError(f'{self} is not an float') + self._as_float = float(self) + return self._as_float + + +class ListBase: + @classmethod + def _validate_items(cls, list_items, _instantiation_metadata: InstantiationMetadata): + """ + Ensures that: + - values passed in for items are valid + Exceptions will be raised if: + - invalid arguments were passed in + + Args: + list_items: the input list of items + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + + # if we have definitions for an items schema, use it + # otherwise accept anything + item_cls = getattr(cls, '_items', AnyTypeSchema) + path_to_schemas = defaultdict(set) + for i, value in enumerate(list_items): + if isinstance(value, item_cls): + continue + item_instantiation_metadata = InstantiationMetadata( + from_server=_instantiation_metadata.from_server, + configuration=_instantiation_metadata.configuration, + path_to_item=_instantiation_metadata.path_to_item+(i,) + ) + other_path_to_schemas = item_cls._validate( + value, _instantiation_metadata=item_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + ListBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + arg = args[0] + _path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + if not isinstance(arg, tuple): + return _path_to_schemas + if cls in _instantiation_metadata.base_classes: + # we have already moved through this class so stop here + return _path_to_schemas + _instantiation_metadata.base_classes |= frozenset({cls}) + other_path_to_schemas = cls._validate_items(arg, _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + return _path_to_schemas + + @classmethod + def _get_items(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + ''' + ListBase _get_items + ''' + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + + list_items = args[0] + cast_items = [] + # if we have definitions for an items schema, use it + # otherwise accept anything + + cls_item_cls = getattr(cls, '_items', AnyTypeSchema) + for i, value in enumerate(list_items): + item_path_to_item = _instantiation_metadata.path_to_item+(i,) + if item_path_to_item in _instantiation_metadata.path_to_schemas: + item_cls = _instantiation_metadata.path_to_schemas[item_path_to_item] + else: + item_cls = cls_item_cls + + if isinstance(value, item_cls): + cast_items.append(value) + continue + item_instantiation_metadata = InstantiationMetadata( + configuration=_instantiation_metadata.configuration, + from_server=_instantiation_metadata.from_server, + path_to_item=item_path_to_item, + path_to_schemas=_instantiation_metadata.path_to_schemas, + ) + + if _instantiation_metadata.from_server: + new_value = item_cls._from_openapi_data(value, _instantiation_metadata=item_instantiation_metadata) + else: + new_value = item_cls(value, _instantiation_metadata=item_instantiation_metadata) + cast_items.append(new_value) + + return cast_items + + +class Discriminable: + @classmethod + def _ensure_discriminator_value_present(cls, disc_property_name: str, _instantiation_metadata: InstantiationMetadata, *args): + if not args or args and disc_property_name not in args[0]: + # The input data does not contain the discriminator property + raise ApiValueError( + "Cannot deserialize input data due to missing discriminator. " + "The discriminator property '{}' is missing at path: {}".format(disc_property_name, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _get_discriminated_class(cls, disc_property_name: str, disc_payload_value: str): + """ + Used in schemas with discriminators + """ + if not hasattr(cls, '_discriminator'): + return None + disc = cls._discriminator + if disc_property_name not in disc: + return None + discriminated_cls = disc[disc_property_name].get(disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + elif not hasattr(cls, '_composed_schemas'): + return None + # TODO stop traveling if a cycle is hit + for allof_cls in cls._composed_schemas['allOf']: + discriminated_cls = allof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + for oneof_cls in cls._composed_schemas['oneOf']: + discriminated_cls = oneof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + for anyof_cls in cls._composed_schemas['anyOf']: + discriminated_cls = anyof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + return None + + +class DictBase(Discriminable): + # subclass properties + _required_property_names = set() + + @classmethod + def _validate_arg_presence(cls, arg): + """ + Ensures that: + - all required arguments are passed in + - the input variable names are valid + - present in properties or + - accepted because additionalProperties exists + Exceptions will be raised if: + - invalid arguments were passed in + - a var_name is invalid if additionProperties == None and var_name not in _properties + - required properties were not passed in + + Args: + arg: the input dict + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + seen_required_properties = set() + invalid_arguments = [] + for property_name in arg: + if property_name in cls._required_property_names: + seen_required_properties.add(property_name) + elif property_name in cls._property_names: + continue + elif cls._additional_properties: + continue + else: + invalid_arguments.append(property_name) + missing_required_arguments = list(cls._required_property_names - seen_required_properties) + if missing_required_arguments: + missing_required_arguments.sort() + raise ApiTypeError( + "{} is missing {} required argument{}: {}".format( + cls.__name__, + len(missing_required_arguments), + "s" if len(missing_required_arguments) > 1 else "", + missing_required_arguments + ) + ) + if invalid_arguments: + invalid_arguments.sort() + raise ApiTypeError( + "{} was passed {} invalid argument{}: {}".format( + cls.__name__, + len(invalid_arguments), + "s" if len(invalid_arguments) > 1 else "", + invalid_arguments + ) + ) + + @classmethod + def _validate_args(cls, arg, _instantiation_metadata: InstantiationMetadata): + """ + Ensures that: + - values passed in for properties are valid + Exceptions will be raised if: + - invalid arguments were passed in + + Args: + arg: the input dict + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + path_to_schemas = defaultdict(set) + for property_name, value in arg.items(): + if property_name in cls._required_property_names or property_name in cls._property_names: + schema = getattr(cls, property_name) + elif cls._additional_properties: + schema = cls._additional_properties + else: + raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format( + value, cls, _instantiation_metadata.path_to_item+(property_name,) + )) + if isinstance(value, schema): + continue + arg_instantiation_metadata = InstantiationMetadata( + from_server=_instantiation_metadata.from_server, + configuration=_instantiation_metadata.configuration, + path_to_item=_instantiation_metadata.path_to_item+(property_name,) + ) + other_path_to_schemas = schema._validate(value, _instantiation_metadata=arg_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + _instantiation_metadata.path_to_schemas.update(arg_instantiation_metadata.path_to_schemas) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DictBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + if args and isinstance(args[0], cls): + # an instance of the correct type was passed in + return {} + arg = args[0] + _path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + if not isinstance(arg, frozendict): + return _path_to_schemas + cls._validate_arg_presence(args[0]) + other_path_to_schemas = cls._validate_args(args[0], _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + try: + _discriminator = cls._discriminator + except AttributeError: + return _path_to_schemas + # discriminator exists + disc_prop_name = list(_discriminator.keys())[0] + cls._ensure_discriminator_value_present(disc_prop_name, _instantiation_metadata, *args) + discriminated_cls = cls._get_discriminated_class( + disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name]) + if discriminated_cls is None: + raise ApiValueError( + "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format( + cls.__name__, + disc_prop_name, + list(_discriminator[disc_prop_name].keys()), + _instantiation_metadata.path_to_item + (disc_prop_name,) + ) + ) + if discriminated_cls in _instantiation_metadata.base_classes: + # we have already moved through this class so stop here + return _path_to_schemas + _instantiation_metadata.base_classes |= frozenset({cls}) + other_path_to_schemas = discriminated_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + return _path_to_schemas + + @classmethod + @property + def _additional_properties(cls): + return AnyTypeSchema + + @classmethod + @property + @functools.cache + def _property_names(cls): + property_names = set() + for var_name, var_value in cls.__dict__.items(): + # referenced models are classmethods + is_classmethod = type(var_value) is classmethod + if is_classmethod: + property_names.add(var_name) + continue + is_class = type(var_value) is type + if not is_class: + continue + if not issubclass(var_value, Schema): + continue + if var_name == '_additional_properties': + continue + property_names.add(var_name) + property_names = list(property_names) + property_names.sort() + return tuple(property_names) + + @classmethod + def _get_properties(cls, arg: typing.Dict[str, typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DictBase _get_properties, this is how properties are set + These values already passed validation + """ + dict_items = {} + # if we have definitions for property schemas convert values using it + # otherwise accept anything + + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + + for property_name_js, value in arg.items(): + property_cls = getattr(cls, property_name_js, cls._additional_properties) + property_path_to_item = _instantiation_metadata.path_to_item+(property_name_js,) + stored_property_cls = _instantiation_metadata.path_to_schemas.get(property_path_to_item) + if stored_property_cls: + property_cls = stored_property_cls + + if isinstance(value, property_cls): + dict_items[property_name_js] = value + continue + + prop_instantiation_metadata = InstantiationMetadata( + configuration=_instantiation_metadata.configuration, + from_server=_instantiation_metadata.from_server, + path_to_item=property_path_to_item, + path_to_schemas=_instantiation_metadata.path_to_schemas, + ) + if _instantiation_metadata.from_server: + new_value = property_cls._from_openapi_data(value, _instantiation_metadata=prop_instantiation_metadata) + else: + new_value = property_cls(value, _instantiation_metadata=prop_instantiation_metadata) + dict_items[property_name_js] = new_value + return dict_items + + def __setattr__(self, name, value): + if not isinstance(self, FileIO): + raise AttributeError('property setting not supported on immutable instances') + + def __getattr__(self, name): + if isinstance(self, frozendict): + # if an attribute does not exist + try: + return self[name] + except KeyError as ex: + raise AttributeError(str(ex)) + # print(('non-frozendict __getattr__', name)) + return super().__getattr__(self, name) + + def __getattribute__(self, name): + # print(('__getattribute__', name)) + # if an attribute does exist (for example as a class property but not as an instance method) + try: + return self[name] + except (KeyError, TypeError): + return super().__getattribute__(name) + + +inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict, FileIO, bytes} + + +class Schema: + """ + the base class of all swagger/openapi schemas/models + + ensures that: + - payload passes required validations + - payload is of allowed types + - payload value is an allowed enum value + """ + + @staticmethod + def __get_simple_class(input_value): + """Returns an input_value's simple class that we will use for type checking + + Args: + input_value (class/class_instance): the item for which we will return + the simple class + """ + if isinstance(input_value, tuple): + return tuple + elif isinstance(input_value, frozendict): + return frozendict + elif isinstance(input_value, none_type): + return none_type + elif isinstance(input_value, bytes): + return bytes + elif isinstance(input_value, (io.FileIO, io.BufferedReader)): + return FileIO + elif isinstance(input_value, bool): + # this must be higher than the int check because + # isinstance(True, int) == True + return bool + elif isinstance(input_value, int): + return int + elif isinstance(input_value, float): + return float + elif isinstance(input_value, datetime): + # this must be higher than the date check because + # isinstance(datetime_instance, date) == True + return datetime + elif isinstance(input_value, date): + return date + elif isinstance(input_value, str): + return str + return type(input_value) + + @staticmethod + def __get_valid_classes_phrase(input_classes): + """Returns a string phrase describing what types are allowed""" + all_classes = list(input_classes) + all_classes = sorted(all_classes, key=lambda cls: cls.__name__) + all_class_names = [cls.__name__ for cls in all_classes] + if len(all_class_names) == 1: + return "is {0}".format(all_class_names[0]) + return "is one of [{0}]".format(", ".join(all_class_names)) + + @classmethod + def __type_error_message( + cls, var_value=None, var_name=None, valid_classes=None, key_type=None + ): + """ + Keyword Args: + var_value (any): the variable which has the type_error + var_name (str): the name of the variable which has the typ error + valid_classes (tuple): the accepted classes for current_item's + value + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a tuple + """ + key_or_value = "value" + if key_type: + key_or_value = "key" + valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes) + msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format( + var_name, + key_or_value, + valid_classes_phrase, + type(var_value).__name__, + ) + return msg + + @classmethod + def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False): + error_msg = cls.__type_error_message( + var_name=path_to_item[-1], + var_value=var_value, + valid_classes=valid_classes, + key_type=key_type, + ) + return ApiTypeError( + error_msg, + path_to_item=path_to_item, + valid_classes=valid_classes, + key_type=key_type, + ) + + @classmethod + def _class_by_base_class(cls, base_cls: type) -> type: + cls_name = "Dynamic"+cls.__name__ + if base_cls is bool: + new_cls = get_new_class(cls_name, (cls, BoolBase, BoolClass)) + elif base_cls is str: + new_cls = get_new_class(cls_name, (cls, StrBase, str)) + elif base_cls is decimal.Decimal: + new_cls = get_new_class(cls_name, (cls, NumberBase, decimal.Decimal)) + elif base_cls is tuple: + new_cls = get_new_class(cls_name, (cls, ListBase, tuple)) + elif base_cls is frozendict: + new_cls = get_new_class(cls_name, (cls, DictBase, frozendict)) + elif base_cls is none_type: + new_cls = get_new_class(cls_name, (cls, NoneBase, NoneClass)) + log_cache_usage(get_new_class) + return new_cls + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + Schema _validate + Runs all schema validation logic and + returns a dynamic class of different bases depending upon the input + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Use cases: + 1. inheritable type: string/decimal.Decimal/frozendict/tuple + 2. enum value cases: 'hi', 1 -> no base_class set because the enum includes the base class + 3. uninheritable type: True/False/None -> no base_class because the base class is not inheritable + _enum_by_value will handle this use case + + Required Steps: + 1. verify type of input is valid vs the allowed _types + 2. check validations that are applicable for this type of input + 3. if enums exist, check that the value exists in the enum + + Returns: + path_to_schemas: a map of path to schemas + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + arg = args[0] + + base_class = cls.__get_simple_class(arg) + failed_type_check_classes = cls._validate_type(base_class) + if failed_type_check_classes: + raise cls.__get_type_error( + arg, + _instantiation_metadata.path_to_item, + failed_type_check_classes, + key_type=False, + ) + if hasattr(cls, '_validate_validations_pass'): + cls._validate_validations_pass(arg, _instantiation_metadata) + path_to_schemas = defaultdict(set) + path_to_schemas[_instantiation_metadata.path_to_item].add(cls) + + if hasattr(cls, "_enum_by_value"): + cls._validate_enum_value(arg) + return path_to_schemas + + if base_class is none_type or base_class is bool: + return path_to_schemas + + path_to_schemas[_instantiation_metadata.path_to_item].add(base_class) + return path_to_schemas + + @classmethod + def _validate_enum_value(cls, arg): + try: + cls._enum_by_value[arg] + except KeyError: + raise ApiValueError("Invalid value {} passed in to {}, {}".format(arg, cls, cls._enum_value_to_name)) + + @classmethod + def __get_new_cls(cls, arg, _instantiation_metadata: InstantiationMetadata): + """ + PATH 1 - make a new dynamic class and return an instance of that class + We are making an instance of cls, but instead of making cls + make a new class, new_cls + which includes dynamic bases including cls + return an instance of that new class + """ + if ( + _instantiation_metadata.path_to_schemas and + _instantiation_metadata.path_to_item in _instantiation_metadata.path_to_schemas): + chosen_new_cls = _instantiation_metadata.path_to_schemas[_instantiation_metadata.path_to_item] + # print('leaving __get_new_cls early for cls {} because path_to_schemas exists'.format(cls)) + # print(_instantiation_metadata.path_to_item) + # print(chosen_new_cls) + return chosen_new_cls + """ + Dict property + List Item Assignment Use cases: + 1. value is NOT an instance of the required schema class + the value is validated by _validate + _validate returns a key value pair + where the key is the path to the item, and the value will be the required manufactured class + made out of the matching schemas + 2. value is an instance of the the correct schema type + the value is NOT validated by _validate, _validate only checks that the instance is of the correct schema type + for this value, _validate does NOT return an entry for it in _path_to_schemas + and in list/dict _get_items,_get_properties the value will be directly assigned + because value is of the correct type, and validation was run earlier when the instance was created + """ + _path_to_schemas = cls._validate(arg, _instantiation_metadata=_instantiation_metadata) + from pprint import pprint + pprint(dict(_path_to_schemas)) + # loop through it make a new class for each entry + for path, schema_classes in _path_to_schemas.items(): + enum_schema = any( + hasattr(this_cls, '_enum_by_value') for this_cls in schema_classes) + inheritable_primitive_type = schema_classes.intersection(inheritable_primitive_types_set) + chosen_schema_classes = schema_classes + suffix = tuple() + if inheritable_primitive_type: + chosen_schema_classes = schema_classes - inheritable_primitive_types_set + if not enum_schema: + # include the inheritable_primitive_type + suffix = tuple(inheritable_primitive_type) + + if len(chosen_schema_classes) == 1 and not suffix: + mfg_cls = tuple(chosen_schema_classes)[0] + else: + x_schema = schema_descendents & chosen_schema_classes + if x_schema: + x_schema = x_schema.pop() + if any(c is not x_schema and issubclass(c, x_schema) for c in chosen_schema_classes): + # needed to not have a mro error in get_new_class + chosen_schema_classes.remove(x_schema) + used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix + mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes) + + if inheritable_primitive_type and not enum_schema: + _instantiation_metadata.path_to_schemas[path] = mfg_cls + continue + + # Use case: value is None, True, False, or an enum value + # print('choosing enum class for path {} in arg {}'.format(path, arg)) + value = arg + for key in path[1:]: + value = value[key] + if hasattr(mfg_cls, '_enum_by_value'): + mfg_cls = mfg_cls._enum_by_value[value] + elif value in {True, False}: + mfg_cls = mfg_cls._class_by_base_class(bool) + elif value is None: + mfg_cls = mfg_cls._class_by_base_class(none_type) + else: + raise ApiValueError('Unhandled case value={} bases={}'.format(value, mfg_cls.__bases__)) + _instantiation_metadata.path_to_schemas[path] = mfg_cls + + return _instantiation_metadata.path_to_schemas[_instantiation_metadata.path_to_item] + + @classmethod + def __get_new_instance_without_conversion(cls, arg, _instantiation_metadata): + # PATH 2 - we have a Dynamic class and we are making an instance of it + if issubclass(cls, tuple): + items = cls._get_items(arg, _instantiation_metadata=_instantiation_metadata) + return super(Schema, cls).__new__(cls, items) + elif issubclass(cls, frozendict): + properties = cls._get_properties(arg, _instantiation_metadata=_instantiation_metadata) + return super(Schema, cls).__new__(cls, properties) + """ + str = openapi str, date, and datetime + decimal.Decimal = openapi int and float + FileIO = openapi binary type and the user inputs a file + bytes = openapi binary type and the user inputs bytes + """ + return super(Schema, cls).__new__(cls, arg) + + @classmethod + def _from_openapi_data( + cls, + arg: typing.Union[ + str, + date, + datetime, + int, + float, + decimal.Decimal, + bool, + None, + 'Schema', + dict, + frozendict, + tuple, + list, + io.FileIO, + io.BufferedReader, + bytes + ], + _instantiation_metadata: typing.Optional[InstantiationMetadata] + ): + arg = cast_to_allowed_types(arg, from_server=True) + _instantiation_metadata = InstantiationMetadata(from_server=True) if _instantiation_metadata is None else _instantiation_metadata + if not _instantiation_metadata.from_server: + raise ApiValueError( + 'from_server must be True in this code path, if you need it to be False, use cls()' + ) + new_cls = cls.__get_new_cls(arg, _instantiation_metadata) + new_inst = new_cls.__get_new_instance_without_conversion(arg, _instantiation_metadata) + return new_inst + + @staticmethod + def __get_input_dict(*args, **kwargs) -> frozendict: + input_dict = {} + if args and isinstance(args[0], (dict, frozendict)): + input_dict.update(args[0]) + if kwargs: + input_dict.update(kwargs) + return frozendict(input_dict) + + @staticmethod + def __remove_unsets(kwargs): + return {key: val for key, val in kwargs.items() if val is not unset} + + def __new__(cls, *args: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, **kwargs: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]): + """ + Schema __new__ + + Args: + args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict/bool/None): the value + kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict/bool/None): dict values + _instantiation_metadata: contains the needed from_server, configuration, path_to_item + """ + kwargs = cls.__remove_unsets(kwargs) + if not args and not kwargs: + raise TypeError( + 'No input given. args or kwargs must be given.' + ) + if not kwargs and args and not isinstance(args[0], dict): + arg = args[0] + else: + arg = cls.__get_input_dict(*args, **kwargs) + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + if _instantiation_metadata.from_server: + raise ApiValueError( + 'from_server must be False in this code path, if you need it to be True, use cls._from_openapi_data()' + ) + arg = cast_to_allowed_types(arg, from_server=_instantiation_metadata.from_server) + new_cls = cls.__get_new_cls(arg, _instantiation_metadata) + return new_cls.__get_new_instance_without_conversion(arg, _instantiation_metadata) + + def __init__( + self, + *args: typing.Union[ + dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], + _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, + **kwargs: typing.Union[ + dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset + ] + ): + """ + this is needed to fix 'Unexpected argument' warning in pycharm + this code does nothing because all Schema instances are immutable + this means that all input data is passed into and used in new, and after the new instance is made + no new attributes are assigned and init is not used + """ + pass + + +def cast_to_allowed_types(arg: typing.Union[str, date, datetime, decimal.Decimal, int, float, None, dict, frozendict, list, tuple, bytes, Schema], from_server=False) -> typing.Union[str, bytes, decimal.Decimal, None, frozendict, tuple, Schema]: + """ + from_server=False date, datetime -> str + int, float -> Decimal + StrSchema will convert that to bytes and remember the encoding when we pass in str input + """ + if isinstance(arg, (date, datetime)): + if not from_server: + return arg.isoformat() + # ApiTypeError will be thrown later by _validate_type + return arg + elif isinstance(arg, bool): + """ + this check must come before isinstance(arg, (int, float)) + because isinstance(True, int) is True + """ + return arg + elif isinstance(arg, decimal.Decimal): + return arg + elif isinstance(arg, int): + return decimal.Decimal(arg) + elif isinstance(arg, float): + decimal_from_float = decimal.Decimal(arg) + if decimal_from_float.as_integer_ratio()[1] == 1: + # 9.0 -> Decimal('9.0') + # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0') + return decimal.Decimal(str(decimal_from_float)+'.0') + return decimal_from_float + elif isinstance(arg, str): + return arg + elif isinstance(arg, bytes): + return arg + elif isinstance(arg, (io.FileIO, io.BufferedReader)): + if arg.closed: + raise ApiValueError('Invalid file state; file is closed and must be open') + return arg + elif type(arg) is list or type(arg) is tuple: + return tuple([cast_to_allowed_types(item) for item in arg]) + elif type(arg) is dict or type(arg) is frozendict: + return frozendict({key: cast_to_allowed_types(val) for key, val in arg.items() if val is not unset}) + elif arg is None: + return arg + elif isinstance(arg, Schema): + return arg + raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg))) + + +class ComposedBase(Discriminable): + + @classmethod + def __get_allof_classes(cls, *args, _instantiation_metadata: InstantiationMetadata): + path_to_schemas = defaultdict(set) + for allof_cls in cls._composed_schemas['allOf']: + if allof_cls in _instantiation_metadata.base_classes: + continue + other_path_to_schemas = allof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + return path_to_schemas + + @classmethod + def __get_oneof_class( + cls, + *args, + discriminated_cls, + _instantiation_metadata: InstantiationMetadata, + path_to_schemas: typing.Dict[typing.Tuple, typing.Set[typing.Type[Schema]]] + ): + oneof_classes = [] + chosen_oneof_cls = None + original_base_classes = _instantiation_metadata.base_classes + new_base_classes = _instantiation_metadata.base_classes + path_to_schemas = defaultdict(set) + for oneof_cls in cls._composed_schemas['oneOf']: + if oneof_cls in path_to_schemas[_instantiation_metadata.path_to_item]: + oneof_classes.append(oneof_cls) + continue + if isinstance(args[0], oneof_cls): + # passed in instance is the correct type + chosen_oneof_cls = oneof_cls + oneof_classes.append(oneof_cls) + continue + _instantiation_metadata.base_classes = original_base_classes + try: + path_to_schemas = oneof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + new_base_classes = _instantiation_metadata.base_classes + except (ApiValueError, ApiTypeError) as ex: + if discriminated_cls is not None and oneof_cls is discriminated_cls: + raise ex + continue + chosen_oneof_cls = oneof_cls + oneof_classes.append(oneof_cls) + if not oneof_classes: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. None " + "of the oneOf schemas matched the input data.".format(cls) + ) + elif len(oneof_classes) > 1: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. Multiple " + "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes) + ) + _instantiation_metadata.base_classes = new_base_classes + return path_to_schemas + + @classmethod + def __get_anyof_classes( + cls, + *args, + discriminated_cls, + _instantiation_metadata: InstantiationMetadata + ): + anyof_classes = [] + chosen_anyof_cls = None + original_base_classes = _instantiation_metadata.base_classes + path_to_schemas = defaultdict(set) + for anyof_cls in cls._composed_schemas['anyOf']: + if anyof_cls in _instantiation_metadata.base_classes: + continue + if isinstance(args[0], anyof_cls): + # passed in instance is the correct type + chosen_anyof_cls = anyof_cls + anyof_classes.append(anyof_cls) + continue + + _instantiation_metadata.base_classes = original_base_classes + try: + other_path_to_schemas = anyof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + except (ApiValueError, ApiTypeError) as ex: + if discriminated_cls is not None and anyof_cls is discriminated_cls: + raise ex + continue + original_base_classes = _instantiation_metadata.base_classes + chosen_anyof_cls = anyof_cls + anyof_classes.append(anyof_cls) + update(path_to_schemas, other_path_to_schemas) + if not anyof_classes: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. None " + "of the anyOf schemas matched the input data.".format(cls) + ) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + ComposedBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + if args and isinstance(args[0], Schema) and _instantiation_metadata.from_server is False: + if isinstance(args[0], cls): + # an instance of the correct type was passed in + return {} + raise ApiTypeError( + 'Incorrect type passed in, required type was {} and passed type was {} at {}'.format( + cls, + type(args[0]), + _instantiation_metadata.path_to_item + ) + ) + + # validation checking on types, validations, and enums + path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + _instantiation_metadata.base_classes |= frozenset({cls}) + + # process composed schema + _discriminator = getattr(cls, '_discriminator', None) + discriminated_cls = None + if _discriminator and args and isinstance(args[0], frozendict): + disc_property_name = list(_discriminator.keys())[0] + cls._ensure_discriminator_value_present(disc_property_name, _instantiation_metadata, *args) + # get discriminated_cls by looking at the dict in the current class + discriminated_cls = cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=args[0][disc_property_name]) + if discriminated_cls is None: + raise ApiValueError( + "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format( + args[0][disc_property_name], + cls.__name__, + disc_property_name, + list(_discriminator[disc_property_name].keys()), + _instantiation_metadata.path_to_item + (disc_property_name,) + ) + ) + + if cls._composed_schemas['allOf']: + other_path_to_schemas = cls.__get_allof_classes(*args, _instantiation_metadata=_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + if cls._composed_schemas['oneOf']: + other_path_to_schemas = cls.__get_oneof_class( + *args, + discriminated_cls=discriminated_cls, + _instantiation_metadata=_instantiation_metadata, + path_to_schemas=path_to_schemas + ) + update(path_to_schemas, other_path_to_schemas) + if cls._composed_schemas['anyOf']: + other_path_to_schemas = cls.__get_anyof_classes( + *args, + discriminated_cls=discriminated_cls, + _instantiation_metadata=_instantiation_metadata + ) + update(path_to_schemas, other_path_to_schemas) + + if discriminated_cls is not None: + # TODO use an exception from this package here + assert discriminated_cls in path_to_schemas[_instantiation_metadata.path_to_item] + return path_to_schemas + + +# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase +class ComposedSchema( + _SchemaTypeChecker(typing.Union[none_type, str, decimal.Decimal, bool, tuple, frozendict]), + ComposedBase, + DictBase, + ListBase, + NumberBase, + StrBase, + BoolBase, + NoneBase, + Schema +): + + # subclass properties + _composed_schemas = {} + + @classmethod + def _from_openapi_data(cls, *args: typing.Any, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, **kwargs): + if not args: + if not kwargs: + raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__)) + args = (kwargs, ) + return super()._from_openapi_data(args[0], _instantiation_metadata=_instantiation_metadata) + + +class ListSchema( + _SchemaTypeChecker(typing.Union[tuple]), + ListBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.List[typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[list, tuple], **kwargs: InstantiationMetadata): + return super().__new__(cls, arg, **kwargs) + + +class NoneSchema( + _SchemaTypeChecker(typing.Union[none_type]), + NoneBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: None, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: None, **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class NumberSchema( + _SchemaTypeChecker(typing.Union[decimal.Decimal]), + NumberBase, + Schema +): + """ + This is used for type: number with no format + Both integers AND floats are accepted + """ + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[int, float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class IntBase(NumberBase): + @property + def as_int(self) -> int: + try: + return self._as_int + except AttributeError: + self._as_int = int(self) + return self._as_int + + @classmethod + def _validate_format(cls, arg: typing.Optional[decimal.Decimal], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, decimal.Decimal): + exponent = arg.as_tuple().exponent + if exponent != 0: + raise ApiValueError( + "Invalid value '{}' for type integer at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + IntBase _validate + TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class IntSchema(IntBase, NumberSchema): + + @classmethod + def _from_openapi_data(cls, arg: int, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class Int32Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-2147483648), + inclusive_maximum=decimal.Decimal(2147483647) + ), + IntSchema +): + pass + +class Int64Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-9223372036854775808), + inclusive_maximum=decimal.Decimal(9223372036854775807) + ), + IntSchema +): + pass + + +class Float32Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-3.4028234663852886e+38), + inclusive_maximum=decimal.Decimal(3.4028234663852886e+38) + ), + NumberSchema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + # todo check format + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + +class Float64Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-1.7976931348623157E+308), + inclusive_maximum=decimal.Decimal(1.7976931348623157E+308) + ), + NumberSchema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + # todo check format + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + +class StrSchema( + _SchemaTypeChecker(typing.Union[str]), + StrBase, + Schema +): + """ + date + datetime string types must inherit from this class + That is because one can validate a str payload as both: + - type: string (format unset) + - type: string, format: date + """ + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[str], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None) -> 'StrSchema': + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[str, date, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DateSchema(DateBase, StrSchema): + + def __new__(cls, arg: typing.Union[str, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DateTimeSchema(DateTimeBase, StrSchema): + + def __new__(cls, arg: typing.Union[str, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DecimalSchema(DecimalBase, StrSchema): + + def __new__(cls, arg: typing.Union[str], **kwargs: typing.Union[InstantiationMetadata]): + """ + Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads + which can be simple (str) or complex (dicts or lists with nested values) + Because casting is only done once and recursively casts all values prior to validation then for a potential + client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know + if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema + where it should stay as Decimal. + """ + return super().__new__(cls, arg, **kwargs) + + +class BytesSchema( + _SchemaTypeChecker(typing.Union[bytes]), + Schema, +): + """ + this class will subclass bytes and is immutable + """ + def __new__(cls, arg: typing.Union[bytes], **kwargs: typing.Union[InstantiationMetadata]): + return super(Schema, cls).__new__(cls, arg) + + +class FileSchema( + _SchemaTypeChecker(typing.Union[FileIO]), + Schema, +): + """ + This class is NOT immutable + Dynamic classes are built using it for example when AnyType allows in binary data + Al other schema classes ARE immutable + If one wanted to make this immutable one could make this a DictSchema with required properties: + - data = BytesSchema (which would be an immutable bytes based schema) + - file_name = StrSchema + and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name + The downside would be that data would be stored in memory which one may not want to do for very large files + + The developer is responsible for closing this file and deleting it + + This class was kept as mutable: + - to allow file reading and writing to disk + - to be able to preserve file name info + """ + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: typing.Union[InstantiationMetadata]): + return super(Schema, cls).__new__(cls, arg) + + +class BinaryBase: + pass + + +class BinarySchema( + _SchemaTypeChecker(typing.Union[bytes, FileIO]), + ComposedBase, + BinaryBase, + Schema, +): + + @classmethod + @property + def _composed_schemas(cls): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + return { + 'allOf': [], + 'oneOf': [ + BytesSchema, + FileSchema, + ], + 'anyOf': [ + ], + } + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg) + + +class BoolSchema( + _SchemaTypeChecker(typing.Union[bool]), + BoolBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: bool, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: bool, **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class AnyTypeSchema( + _SchemaTypeChecker( + typing.Union[frozendict, tuple, decimal.Decimal, str, bool, none_type, bytes, FileIO] + ), + DictBase, + ListBase, + NumberBase, + StrBase, + BoolBase, + NoneBase, + Schema +): + pass + + +class DictSchema( + _SchemaTypeChecker(typing.Union[frozendict]), + DictBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Dict[str, typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, *args: typing.Union[dict, frozendict], **kwargs: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, InstantiationMetadata]): + return super().__new__(cls, *args, **kwargs) + + +schema_descendents = set([NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema]) + + +def deserialize_file(response_data, configuration, content_disposition=None): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + Args: + param response_data (str): the file data to write + configuration (Configuration): the instance to use to convert files + + Keyword Args: + content_disposition (str): the value of the Content-Disposition + header + + Returns: + (file_type): the deserialized file which is open + The user is responsible for closing and reading the file + """ + fd, path = tempfile.mkstemp(dir=configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + if isinstance(response_data, str): + # change str to bytes so we can write it + response_data = response_data.encode('utf-8') + f.write(response_data) + + f = open(path, "rb") + return f + + +@functools.cache +def get_new_class( + class_name: str, + bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...] +) -> typing.Type[Schema]: + """ + Returns a new class that is made with the subclass bases + """ + return type(class_name, bases, {}) + + +LOG_CACHE_USAGE = False + + +def log_cache_usage(cache_fn): + if LOG_CACHE_USAGE: + print(cache_fn.__name__, cache_fn.cache_info()) diff --git a/openapi/python_explerimental_client_template/setup.handlebars b/openapi/python_explerimental_client_template/setup.handlebars new file mode 100644 index 00000000..fb3e16a9 --- /dev/null +++ b/openapi/python_explerimental_client_template/setup.handlebars @@ -0,0 +1,51 @@ +# coding: utf-8 + +{{>partial_header}} + +from setuptools import setup, find_namespace_packages # noqa: H301 + +NAME = "{{{projectName}}}" +VERSION = "{{packageVersion}}" +{{#with apiInfo}} +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = [ + "urllib3 >= 1.15", + "certifi", + "python-dateutil", + "frozendict >= 2.0.3", +{{#if asyncio}} + "aiohttp >= 3.0.0", +{{/if}} +{{#if tornado}} + "tornado>=4.2,<5", +{{/if}} +{{#if hasHttpSignatureMethods}} + "pem>=19.3.0", + "pycryptodome>=3.9.0", +{{/if}} +] + +setup( + name=NAME, + version=VERSION, + description="{{appName}}", + author="{{#if infoName}}{{infoName}}{{/if}}{{#unless infoName}}OpenAPI Generator community{{/unless}}", + author_email="{{#if infoEmail}}{{infoEmail}}{{/if}}{{#unless infoEmail}}team@openapitools.org{{/unless}}", + url="{{packageUrl}}", + keywords=["OpenAPI", "OpenAPI-Generator", "{{{appName}}}"], + python_requires="{{{generatorLanguageVersion}}}", + install_requires=REQUIRES, + packages=find_namespace_packages(exclude=["test", "tests"]), + include_package_data=True, + {{#if licenseInfo}}license="{{licenseInfo}}", + {{/if}}long_description="""\ + {{appDescription}} # noqa: E501 + """ +) +{{/with}} diff --git a/openapi/python_explerimental_client_template/setup_cfg.handlebars b/openapi/python_explerimental_client_template/setup_cfg.handlebars new file mode 100644 index 00000000..8cb28d8c --- /dev/null +++ b/openapi/python_explerimental_client_template/setup_cfg.handlebars @@ -0,0 +1,13 @@ +{{#if useNose}} +[nosetests] +logging-clear-handlers=true +verbosity=2 +randomize=true +exe=true +with-coverage=true +cover-package={{{packageName}}} +cover-erase=true + +{{/if}} +[flake8] +max-line-length=99 diff --git a/openapi/python_explerimental_client_template/signing.handlebars b/openapi/python_explerimental_client_template/signing.handlebars new file mode 100644 index 00000000..26d2b8cb --- /dev/null +++ b/openapi/python_explerimental_client_template/signing.handlebars @@ -0,0 +1,409 @@ +# coding: utf-8 +{{>partial_header}} + +from base64 import b64encode +from Crypto.IO import PEM, PKCS8 +from Crypto.Hash import SHA256, SHA512 +from Crypto.PublicKey import RSA, ECC +from Crypto.Signature import PKCS1_v1_5, pss, DSS +from email.utils import formatdate +import json +import os +import re +from time import time +from urllib.parse import urlencode, urlparse + +# The constants below define a subset of HTTP headers that can be included in the +# HTTP signature scheme. Additional headers may be included in the signature. + +# The '(request-target)' header is a calculated field that includes the HTTP verb, +# the URL path and the URL query. +HEADER_REQUEST_TARGET = '(request-target)' +# The time when the HTTP signature was generated. +HEADER_CREATED = '(created)' +# The time when the HTTP signature expires. The API server should reject HTTP requests +# that have expired. +HEADER_EXPIRES = '(expires)' +# The 'Host' header. +HEADER_HOST = 'Host' +# The 'Date' header. +HEADER_DATE = 'Date' +# When the 'Digest' header is included in the HTTP signature, the client automatically +# computes the digest of the HTTP request body, per RFC 3230. +HEADER_DIGEST = 'Digest' +# The 'Authorization' header is automatically generated by the client. It includes +# the list of signed headers and a base64-encoded signature. +HEADER_AUTHORIZATION = 'Authorization' + +# The constants below define the cryptographic schemes for the HTTP signature scheme. +SCHEME_HS2019 = 'hs2019' +SCHEME_RSA_SHA256 = 'rsa-sha256' +SCHEME_RSA_SHA512 = 'rsa-sha512' + +# The constants below define the signature algorithms that can be used for the HTTP +# signature scheme. +ALGORITHM_RSASSA_PSS = 'RSASSA-PSS' +ALGORITHM_RSASSA_PKCS1v15 = 'RSASSA-PKCS1-v1_5' + +ALGORITHM_ECDSA_MODE_FIPS_186_3 = 'fips-186-3' +ALGORITHM_ECDSA_MODE_DETERMINISTIC_RFC6979 = 'deterministic-rfc6979' +ALGORITHM_ECDSA_KEY_SIGNING_ALGORITHMS = { + ALGORITHM_ECDSA_MODE_FIPS_186_3, + ALGORITHM_ECDSA_MODE_DETERMINISTIC_RFC6979 +} + +# The cryptographic hash algorithm for the message signature. +HASH_SHA256 = 'sha256' +HASH_SHA512 = 'sha512' + + +class HttpSigningConfiguration(object): + """The configuration parameters for the HTTP signature security scheme. + The HTTP signature security scheme is used to sign HTTP requests with a private key + which is in possession of the API client. + An 'Authorization' header is calculated by creating a hash of select headers, + and optionally the body of the HTTP request, then signing the hash value using + a private key. The 'Authorization' header is added to outbound HTTP requests. + + NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param key_id: A string value specifying the identifier of the cryptographic key, + when signing HTTP requests. + :param signing_scheme: A string value specifying the signature scheme, when + signing HTTP requests. + Supported value are hs2019, rsa-sha256, rsa-sha512. + Avoid using rsa-sha256, rsa-sha512 as they are deprecated. These values are + available for server-side applications that only support the older + HTTP signature algorithms. + :param private_key_path: A string value specifying the path of the file containing + a private key. The private key is used to sign HTTP requests. + :param private_key_passphrase: A string value specifying the passphrase to decrypt + the private key. + :param signed_headers: A list of strings. Each value is the name of a HTTP header + that must be included in the HTTP signature calculation. + The two special signature headers '(request-target)' and '(created)' SHOULD be + included in SignedHeaders. + The '(created)' header expresses when the signature was created. + The '(request-target)' header is a concatenation of the lowercased :method, an + ASCII space, and the :path pseudo-headers. + When signed_headers is not specified, the client defaults to a single value, + '(created)', in the list of HTTP headers. + When SignedHeaders contains the 'Digest' value, the client performs the + following operations: + 1. Calculate a digest of request body, as specified in RFC3230, section 4.3.2. + 2. Set the 'Digest' header in the request body. + 3. Include the 'Digest' header and value in the HTTP signature. + :param signing_algorithm: A string value specifying the signature algorithm, when + signing HTTP requests. + Supported values are: + 1. For RSA keys: RSASSA-PSS, RSASSA-PKCS1-v1_5. + 2. For ECDSA keys: fips-186-3, deterministic-rfc6979. + If None, the signing algorithm is inferred from the private key. + The default signing algorithm for RSA keys is RSASSA-PSS. + The default signing algorithm for ECDSA keys is fips-186-3. + :param hash_algorithm: The hash algorithm for the signature. Supported values are + sha256 and sha512. + If the signing_scheme is rsa-sha256, the hash algorithm must be set + to None or sha256. + If the signing_scheme is rsa-sha512, the hash algorithm must be set + to None or sha512. + :param signature_max_validity: The signature max validity, expressed as + a datetime.timedelta value. It must be a positive value. + """ + def __init__(self, key_id, signing_scheme, private_key_path, + private_key_passphrase=None, + signed_headers=None, + signing_algorithm=None, + hash_algorithm=None, + signature_max_validity=None): + self.key_id = key_id + if signing_scheme not in {SCHEME_HS2019, SCHEME_RSA_SHA256, SCHEME_RSA_SHA512}: + raise Exception("Unsupported security scheme: {0}".format(signing_scheme)) + self.signing_scheme = signing_scheme + if not os.path.exists(private_key_path): + raise Exception("Private key file does not exist") + self.private_key_path = private_key_path + self.private_key_passphrase = private_key_passphrase + self.signing_algorithm = signing_algorithm + self.hash_algorithm = hash_algorithm + if signing_scheme == SCHEME_RSA_SHA256: + if self.hash_algorithm is None: + self.hash_algorithm = HASH_SHA256 + elif self.hash_algorithm != HASH_SHA256: + raise Exception("Hash algorithm must be sha256 when security scheme is %s" % + SCHEME_RSA_SHA256) + elif signing_scheme == SCHEME_RSA_SHA512: + if self.hash_algorithm is None: + self.hash_algorithm = HASH_SHA512 + elif self.hash_algorithm != HASH_SHA512: + raise Exception("Hash algorithm must be sha512 when security scheme is %s" % + SCHEME_RSA_SHA512) + elif signing_scheme == SCHEME_HS2019: + if self.hash_algorithm is None: + self.hash_algorithm = HASH_SHA256 + elif self.hash_algorithm not in {HASH_SHA256, HASH_SHA512}: + raise Exception("Invalid hash algorithm") + if signature_max_validity is not None and signature_max_validity.total_seconds() < 0: + raise Exception("The signature max validity must be a positive value") + self.signature_max_validity = signature_max_validity + # If the user has not provided any signed_headers, the default must be set to '(created)', + # as specified in the 'HTTP signature' standard. + if signed_headers is None or len(signed_headers) == 0: + signed_headers = [HEADER_CREATED] + if self.signature_max_validity is None and HEADER_EXPIRES in signed_headers: + raise Exception( + "Signature max validity must be set when " + "'(expires)' signature parameter is specified") + if len(signed_headers) != len(set(signed_headers)): + raise Exception("Cannot have duplicates in the signed_headers parameter") + if HEADER_AUTHORIZATION in signed_headers: + raise Exception("'Authorization' header cannot be included in signed headers") + self.signed_headers = signed_headers + self.private_key = None + """The private key used to sign HTTP requests. + Initialized when the PEM-encoded private key is loaded from a file. + """ + self.host = None + """The host name, optionally followed by a colon and TCP port number. + """ + self._load_private_key() + + def get_http_signature_headers(self, resource_path, method, headers, body, query_params): + """Create a cryptographic message signature for the HTTP request and add the signed headers. + + :param resource_path : A string representation of the HTTP request resource path. + :param method: A string representation of the HTTP request method, e.g. GET, POST. + :param headers: A dict containing the HTTP request headers. + :param body: The object representing the HTTP request body. + :param query_params: A string representing the HTTP request query parameters. + :return: A dict of HTTP headers that must be added to the outbound HTTP request. + """ + if method is None: + raise Exception("HTTP method must be set") + if resource_path is None: + raise Exception("Resource path must be set") + + signed_headers_list, request_headers_dict = self._get_signed_header_info( + resource_path, method, headers, body, query_params) + + header_items = [ + "{0}: {1}".format(key.lower(), value) for key, value in signed_headers_list] + string_to_sign = "\n".join(header_items) + + digest, digest_prefix = self._get_message_digest(string_to_sign.encode()) + b64_signed_msg = self._sign_digest(digest) + + request_headers_dict[HEADER_AUTHORIZATION] = self._get_authorization_header( + signed_headers_list, b64_signed_msg) + + return request_headers_dict + + def get_public_key(self): + """Returns the public key object associated with the private key. + """ + pubkey = None + if isinstance(self.private_key, RSA.RsaKey): + pubkey = self.private_key.publickey() + elif isinstance(self.private_key, ECC.EccKey): + pubkey = self.private_key.public_key() + return pubkey + + def _load_private_key(self): + """Load the private key used to sign HTTP requests. + The private key is used to sign HTTP requests as defined in + https://datatracker.ietf.org/doc/draft-cavage-http-signatures/. + """ + if self.private_key is not None: + return + with open(self.private_key_path, 'r') as f: + pem_data = f.read() + # Verify PEM Pre-Encapsulation Boundary + r = re.compile(r"\s*-----BEGIN (.*)-----\s+") + m = r.match(pem_data) + if not m: + raise ValueError("Not a valid PEM pre boundary") + pem_header = m.group(1) + if pem_header == 'RSA PRIVATE KEY': + self.private_key = RSA.importKey(pem_data, self.private_key_passphrase) + elif pem_header == 'EC PRIVATE KEY': + self.private_key = ECC.import_key(pem_data, self.private_key_passphrase) + elif pem_header in {'PRIVATE KEY', 'ENCRYPTED PRIVATE KEY'}: + # Key is in PKCS8 format, which is capable of holding many different + # types of private keys, not just EC keys. + (key_binary, pem_header, is_encrypted) = \ + PEM.decode(pem_data, self.private_key_passphrase) + (oid, privkey, params) = \ + PKCS8.unwrap(key_binary, passphrase=self.private_key_passphrase) + if oid == '1.2.840.10045.2.1': + self.private_key = ECC.import_key(pem_data, self.private_key_passphrase) + else: + raise Exception("Unsupported key: {0}. OID: {1}".format(pem_header, oid)) + else: + raise Exception("Unsupported key: {0}".format(pem_header)) + # Validate the specified signature algorithm is compatible with the private key. + if self.signing_algorithm is not None: + supported_algs = None + if isinstance(self.private_key, RSA.RsaKey): + supported_algs = {ALGORITHM_RSASSA_PSS, ALGORITHM_RSASSA_PKCS1v15} + elif isinstance(self.private_key, ECC.EccKey): + supported_algs = ALGORITHM_ECDSA_KEY_SIGNING_ALGORITHMS + if supported_algs is not None and self.signing_algorithm not in supported_algs: + raise Exception( + "Signing algorithm {0} is not compatible with private key".format( + self.signing_algorithm)) + + def _get_signed_header_info(self, resource_path, method, headers, body, query_params): + """Build the HTTP headers (name, value) that need to be included in + the HTTP signature scheme. + + :param resource_path : A string representation of the HTTP request resource path. + :param method: A string representation of the HTTP request method, e.g. GET, POST. + :param headers: A dict containing the HTTP request headers. + :param body: The object (e.g. a dict) representing the HTTP request body. + :param query_params: A string representing the HTTP request query parameters. + :return: A tuple containing two dict objects: + The first dict contains the HTTP headers that are used to calculate + the HTTP signature. + The second dict contains the HTTP headers that must be added to + the outbound HTTP request. + """ + + if body is None: + body = '' + else: + body = json.dumps(body) + + # Build the '(request-target)' HTTP signature parameter. + target_host = urlparse(self.host).netloc + target_path = urlparse(self.host).path + request_target = method.lower() + " " + target_path + resource_path + if query_params: + request_target += "?" + urlencode(query_params) + + # Get UNIX time, e.g. seconds since epoch, not including leap seconds. + now = time() + # Format date per RFC 7231 section-7.1.1.2. An example is: + # Date: Wed, 21 Oct 2015 07:28:00 GMT + cdate = formatdate(timeval=now, localtime=False, usegmt=True) + # The '(created)' value MUST be a Unix timestamp integer value. + # Subsecond precision is not supported. + created = int(now) + if self.signature_max_validity is not None: + expires = now + self.signature_max_validity.total_seconds() + + signed_headers_list = [] + request_headers_dict = {} + for hdr_key in self.signed_headers: + hdr_key = hdr_key.lower() + if hdr_key == HEADER_REQUEST_TARGET: + value = request_target + elif hdr_key == HEADER_CREATED: + value = '{0}'.format(created) + elif hdr_key == HEADER_EXPIRES: + value = '{0}'.format(expires) + elif hdr_key == HEADER_DATE.lower(): + value = cdate + request_headers_dict[HEADER_DATE] = '{0}'.format(cdate) + elif hdr_key == HEADER_DIGEST.lower(): + request_body = body.encode() + body_digest, digest_prefix = self._get_message_digest(request_body) + b64_body_digest = b64encode(body_digest.digest()) + value = digest_prefix + b64_body_digest.decode('ascii') + request_headers_dict[HEADER_DIGEST] = '{0}{1}'.format( + digest_prefix, b64_body_digest.decode('ascii')) + elif hdr_key == HEADER_HOST.lower(): + value = target_host + request_headers_dict[HEADER_HOST] = '{0}'.format(target_host) + else: + value = next((v for k, v in headers.items() if k.lower() == hdr_key), None) + if value is None: + raise Exception( + "Cannot sign HTTP request. " + "Request does not contain the '{0}' header".format(hdr_key)) + signed_headers_list.append((hdr_key, value)) + + return signed_headers_list, request_headers_dict + + def _get_message_digest(self, data): + """Calculates and returns a cryptographic digest of a specified HTTP request. + + :param data: The string representation of the date to be hashed with a cryptographic hash. + :return: A tuple of (digest, prefix). + The digest is a hashing object that contains the cryptographic digest of + the HTTP request. + The prefix is a string that identifies the cryptographc hash. It is used + to generate the 'Digest' header as specified in RFC 3230. + """ + if self.hash_algorithm == HASH_SHA512: + digest = SHA512.new() + prefix = 'SHA-512=' + elif self.hash_algorithm == HASH_SHA256: + digest = SHA256.new() + prefix = 'SHA-256=' + else: + raise Exception("Unsupported hash algorithm: {0}".format(self.hash_algorithm)) + digest.update(data) + return digest, prefix + + def _sign_digest(self, digest): + """Signs a message digest with a private key specified in the signing_info. + + :param digest: A hashing object that contains the cryptographic digest of the HTTP request. + :return: A base-64 string representing the cryptographic signature of the input digest. + """ + sig_alg = self.signing_algorithm + if isinstance(self.private_key, RSA.RsaKey): + if sig_alg is None or sig_alg == ALGORITHM_RSASSA_PSS: + # RSASSA-PSS in Section 8.1 of RFC8017. + signature = pss.new(self.private_key).sign(digest) + elif sig_alg == ALGORITHM_RSASSA_PKCS1v15: + # RSASSA-PKCS1-v1_5 in Section 8.2 of RFC8017. + signature = PKCS1_v1_5.new(self.private_key).sign(digest) + else: + raise Exception("Unsupported signature algorithm: {0}".format(sig_alg)) + elif isinstance(self.private_key, ECC.EccKey): + if sig_alg is None: + sig_alg = ALGORITHM_ECDSA_MODE_FIPS_186_3 + if sig_alg in ALGORITHM_ECDSA_KEY_SIGNING_ALGORITHMS: + # draft-ietf-httpbis-message-signatures-00 does not specify the ECDSA encoding. + # Issue: https://github.com/w3c-ccg/http-signatures/issues/107 + signature = DSS.new(key=self.private_key, mode=sig_alg, + encoding='der').sign(digest) + else: + raise Exception("Unsupported signature algorithm: {0}".format(sig_alg)) + else: + raise Exception("Unsupported private key: {0}".format(type(self.private_key))) + return b64encode(signature) + + def _get_authorization_header(self, signed_headers, signed_msg): + """Calculates and returns the value of the 'Authorization' header when signing HTTP requests. + + :param signed_headers : A list of tuples. Each value is the name of a HTTP header that + must be included in the HTTP signature calculation. + :param signed_msg: A base-64 encoded string representation of the signature. + :return: The string value of the 'Authorization' header, representing the signature + of the HTTP request. + """ + created_ts = None + expires_ts = None + for k, v in signed_headers: + if k == HEADER_CREATED: + created_ts = v + elif k == HEADER_EXPIRES: + expires_ts = v + lower_keys = [k.lower() for k, v in signed_headers] + headers_value = " ".join(lower_keys) + + auth_str = "Signature keyId=\"{0}\",algorithm=\"{1}\",".format( + self.key_id, self.signing_scheme) + if created_ts is not None: + auth_str = auth_str + "created={0},".format(created_ts) + if expires_ts is not None: + auth_str = auth_str + "expires={0},".format(expires_ts) + auth_str = auth_str + "headers=\"{0}\",signature=\"{1}\"".format( + headers_value, signed_msg.decode('ascii')) + + return auth_str diff --git a/openapi/python_explerimental_client_template/test-requirements.handlebars b/openapi/python_explerimental_client_template/test-requirements.handlebars new file mode 100644 index 00000000..3529726b --- /dev/null +++ b/openapi/python_explerimental_client_template/test-requirements.handlebars @@ -0,0 +1,15 @@ +{{#if useNose}} +coverage>=4.0.3 +nose>=1.3.7 +pluggy>=0.3.1 +py>=1.4.31 +randomize>=0.13 +{{/if}} +{{#unless useNose}} +pytest~=4.6.7 # needed for python 3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 3.4 +{{/unless}} +{{#if hasHttpSignatureMethods}} +pycryptodome>=3.9.0 +{{/if}} \ No newline at end of file diff --git a/openapi/python_explerimental_client_template/tox.handlebars b/openapi/python_explerimental_client_template/tox.handlebars new file mode 100644 index 00000000..d1b68916 --- /dev/null +++ b/openapi/python_explerimental_client_template/tox.handlebars @@ -0,0 +1,9 @@ +[tox] +envlist = py39 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + {{#unless useNose}}pytest --cov={{{packageName}}}{{/unless}}{{#if useNose}}nosetests{{/if}} diff --git a/openapi/python_explerimental_client_template/travis.handlebars b/openapi/python_explerimental_client_template/travis.handlebars new file mode 100644 index 00000000..5e4e1f0c --- /dev/null +++ b/openapi/python_explerimental_client_template/travis.handlebars @@ -0,0 +1,18 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: + - "pip install -r requirements.txt" + - "pip install -r test-requirements.txt" +# command to run tests +{{#if useNose}} +script: nosetests +{{/if}} +{{#unless useNose}} +script: pytest --cov={{{packageName}}} +{{/unless}}