[python-experimental] creates + uses MetaOapg cls to store non-properties schema keywords (#13256)

* Uses MetaOapg

* Fixes some tests

* Fixes tests

* Removes pass

* Generates unit test sample

* Samples regenerates

* Reverts version files

* Changes uniqueItems to Boolean type

* Samples regenerated

* Fixes code in addImportsForPropertyType

* Fixes codegenModel and codegenProperty

* Fixes CodegenParameter

* Adds getUniqueItemsBoolean/setUniqueItemsBoolean

* Updates template to use getUniqueItemsBoolean

* Adds printing of uniqueItemsBoolean in codegenModel

* Samples regenerated
This commit is contained in:
Justin Black
2022-08-23 17:26:43 -07:00
committed by GitHub
parent 8ee22b8e97
commit 3f2b167360
123 changed files with 2823 additions and 2539 deletions

View File

@@ -147,6 +147,7 @@ public class CodegenModel implements IJsonSchemaValidationProperties {
private Integer maxProperties;
private Integer minProperties;
private boolean uniqueItems;
private Boolean uniqueItemsBoolean;
private Integer maxItems;
private Integer minItems;
private Integer maxLength;
@@ -558,6 +559,16 @@ public class CodegenModel implements IJsonSchemaValidationProperties {
this.uniqueItems = uniqueItems;
}
@Override
public Boolean getUniqueItemsBoolean() {
return uniqueItemsBoolean;
}
@Override
public void setUniqueItemsBoolean(Boolean uniqueItemsBoolean) {
this.uniqueItemsBoolean = uniqueItemsBoolean;
}
@Override
public Integer getMinProperties() {
return minProperties;
@@ -940,6 +951,7 @@ public class CodegenModel implements IJsonSchemaValidationProperties {
getUniqueItems() == that.getUniqueItems() &&
getExclusiveMinimum() == that.getExclusiveMinimum() &&
getExclusiveMaximum() == that.getExclusiveMaximum() &&
Objects.equals(uniqueItemsBoolean, that.getUniqueItemsBoolean()) &&
Objects.equals(ref, that.getRef()) &&
Objects.equals(requiredVarsMap, that.getRequiredVarsMap()) &&
Objects.equals(composedSchemas, that.composedSchemas) &&
@@ -1014,7 +1026,8 @@ public class CodegenModel implements IJsonSchemaValidationProperties {
getMinItems(), getMaxLength(), getMinLength(), getExclusiveMinimum(), getExclusiveMaximum(), getMinimum(),
getMaximum(), getPattern(), getMultipleOf(), getItems(), getAdditionalProperties(), getIsModel(),
getAdditionalPropertiesIsAnyType(), hasDiscriminatorWithNonEmptyMapping,
isAnyType, getComposedSchemas(), hasMultipleTypes, isDecimal, isUuid, requiredVarsMap, ref);
isAnyType, getComposedSchemas(), hasMultipleTypes, isDecimal, isUuid, requiredVarsMap, ref,
uniqueItemsBoolean);
}
@Override
@@ -1089,6 +1102,7 @@ public class CodegenModel implements IJsonSchemaValidationProperties {
sb.append(", maxProperties=").append(maxProperties);
sb.append(", minProperties=").append(minProperties);
sb.append(", uniqueItems=").append(uniqueItems);
sb.append(", uniqueItemsBoolean=").append(uniqueItemsBoolean);
sb.append(", maxItems=").append(maxItems);
sb.append(", minItems=").append(minItems);
sb.append(", maxLength=").append(maxLength);

View File

@@ -99,6 +99,7 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
* See http://json-schema.org/latest/json-schema-validation.html#anchor49
*/
public boolean uniqueItems;
private Boolean uniqueItemsBoolean;
/**
* See http://json-schema.org/latest/json-schema-validation.html#anchor14
*/
@@ -148,6 +149,7 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
output.maxItems = this.maxItems;
output.minItems = this.minItems;
output.uniqueItems = this.uniqueItems;
output.setUniqueItemsBoolean(this.uniqueItemsBoolean);
output.multipleOf = this.multipleOf;
output.jsonSchema = this.jsonSchema;
output.defaultValue = this.defaultValue;
@@ -238,7 +240,7 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
@Override
public int hashCode() {
return Objects.hash(isFormParam, isQueryParam, isPathParam, isHeaderParam, isCookieParam, isBodyParam, isContainer, isCollectionFormatMulti, isPrimitiveType, isModel, isExplode, baseName, paramName, dataType, datatypeWithEnum, dataFormat, collectionFormat, description, unescapedDescription, baseType, defaultValue, enumName, style, isDeepObject, isAllowEmptyValue, example, jsonSchema, isString, isNumeric, isInteger, isLong, isNumber, isFloat, isDouble, isDecimal, isByteArray, isBinary, isBoolean, isDate, isDateTime, isUuid, isUri, isEmail, isFreeFormObject, isAnyType, isArray, isMap, isFile, isEnum, _enum, allowableValues, items, mostInnerItems, additionalProperties, vars, requiredVars, vendorExtensions, hasValidation, getMaxProperties(), getMinProperties(), isNullable, isDeprecated, required, getMaximum(), getExclusiveMaximum(), getMinimum(), getExclusiveMinimum(), getMaxLength(), getMinLength(), getPattern(), getMaxItems(), getMinItems(), getUniqueItems(), contentType, multipleOf, isNull, additionalPropertiesIsAnyType, hasVars, hasRequired, isShort, isUnboundedInteger, hasDiscriminatorWithNonEmptyMapping, composedSchemas, hasMultipleTypes, schema, content, requiredVarsMap, ref);
return Objects.hash(isFormParam, isQueryParam, isPathParam, isHeaderParam, isCookieParam, isBodyParam, isContainer, isCollectionFormatMulti, isPrimitiveType, isModel, isExplode, baseName, paramName, dataType, datatypeWithEnum, dataFormat, collectionFormat, description, unescapedDescription, baseType, defaultValue, enumName, style, isDeepObject, isAllowEmptyValue, example, jsonSchema, isString, isNumeric, isInteger, isLong, isNumber, isFloat, isDouble, isDecimal, isByteArray, isBinary, isBoolean, isDate, isDateTime, isUuid, isUri, isEmail, isFreeFormObject, isAnyType, isArray, isMap, isFile, isEnum, _enum, allowableValues, items, mostInnerItems, additionalProperties, vars, requiredVars, vendorExtensions, hasValidation, getMaxProperties(), getMinProperties(), isNullable, isDeprecated, required, getMaximum(), getExclusiveMaximum(), getMinimum(), getExclusiveMinimum(), getMaxLength(), getMinLength(), getPattern(), getMaxItems(), getMinItems(), getUniqueItems(), contentType, multipleOf, isNull, additionalPropertiesIsAnyType, hasVars, hasRequired, isShort, isUnboundedInteger, hasDiscriminatorWithNonEmptyMapping, composedSchemas, hasMultipleTypes, schema, content, requiredVarsMap, ref, uniqueItemsBoolean);
}
@Override
@@ -294,6 +296,7 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
getExclusiveMaximum() == that.getExclusiveMaximum() &&
getExclusiveMinimum() == that.getExclusiveMinimum() &&
getUniqueItems() == that.getUniqueItems() &&
Objects.equals(uniqueItemsBoolean, that.getUniqueItemsBoolean()) &&
Objects.equals(ref, that.getRef()) &&
Objects.equals(requiredVarsMap, that.getRequiredVarsMap()) &&
Objects.equals(content, that.getContent()) &&
@@ -414,6 +417,7 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
sb.append(", maxItems=").append(maxItems);
sb.append(", minItems=").append(minItems);
sb.append(", uniqueItems=").append(uniqueItems);
sb.append(", uniqueItemsBoolean=").append(uniqueItemsBoolean);
sb.append(", contentType=").append(contentType);
sb.append(", multipleOf=").append(multipleOf);
sb.append(", isNull=").append(isNull);
@@ -531,6 +535,16 @@ public class CodegenParameter implements IJsonSchemaValidationProperties {
this.uniqueItems = uniqueItems;
}
@Override
public Boolean getUniqueItemsBoolean() {
return uniqueItemsBoolean;
}
@Override
public void setUniqueItemsBoolean(Boolean uniqueItemsBoolean) {
this.uniqueItemsBoolean = uniqueItemsBoolean;
}
@Override
public Integer getMinProperties() {
return minProperties;

View File

@@ -181,6 +181,7 @@ public class CodegenProperty implements Cloneable, IJsonSchemaValidationProperti
private Integer maxProperties;
private Integer minProperties;
private boolean uniqueItems;
private Boolean uniqueItemsBoolean;
// XML
public boolean isXmlAttribute = false;
@@ -725,6 +726,16 @@ public class CodegenProperty implements Cloneable, IJsonSchemaValidationProperti
this.uniqueItems = uniqueItems;
}
@Override
public Boolean getUniqueItemsBoolean() {
return uniqueItemsBoolean;
}
@Override
public void setUniqueItemsBoolean(Boolean uniqueItemsBoolean) {
this.uniqueItemsBoolean = uniqueItemsBoolean;
}
@Override
public Integer getMinProperties() {
return minProperties;
@@ -977,6 +988,7 @@ public class CodegenProperty implements Cloneable, IJsonSchemaValidationProperti
sb.append(", maxProperties=").append(maxProperties);
sb.append(", minProperties=").append(minProperties);
sb.append(", uniqueItems=").append(uniqueItems);
sb.append(", uniqueItemsBoolean=").append(uniqueItemsBoolean);
sb.append(", multipleOf=").append(multipleOf);
sb.append(", isXmlAttribute=").append(isXmlAttribute);
sb.append(", xmlPrefix='").append(xmlPrefix).append('\'');
@@ -1050,6 +1062,7 @@ public class CodegenProperty implements Cloneable, IJsonSchemaValidationProperti
getAdditionalPropertiesIsAnyType() == that.getAdditionalPropertiesIsAnyType() &&
getHasVars() == that.getHasVars() &&
getHasRequired() == that.getHasRequired() &&
Objects.equals(uniqueItemsBoolean, that.getUniqueItemsBoolean()) &&
Objects.equals(ref, that.getRef()) &&
Objects.equals(requiredVarsMap, that.getRequiredVarsMap()) &&
Objects.equals(composedSchemas, that.composedSchemas) &&
@@ -1116,6 +1129,6 @@ public class CodegenProperty implements Cloneable, IJsonSchemaValidationProperti
nameInSnakeCase, enumName, maxItems, minItems, isXmlAttribute, xmlPrefix, xmlName,
xmlNamespace, isXmlWrapped, isNull, additionalPropertiesIsAnyType, hasVars, hasRequired,
hasDiscriminatorWithNonEmptyMapping, composedSchemas, hasMultipleTypes, requiredVarsMap,
ref);
ref, uniqueItemsBoolean);
}
}

View File

@@ -67,6 +67,7 @@ public class CodegenResponse implements IJsonSchemaValidationProperties {
private Integer maxProperties;
private Integer minProperties;
private boolean uniqueItems;
private Boolean uniqueItemsBoolean;
private Integer maxItems;
private Integer minItems;
private Integer maxLength;
@@ -103,7 +104,7 @@ public class CodegenResponse implements IJsonSchemaValidationProperties {
getMinLength(), exclusiveMinimum, exclusiveMaximum, getMinimum(), getMaximum(), getPattern(),
is1xx, is2xx, is3xx, is4xx, is5xx, additionalPropertiesIsAnyType, hasVars, hasRequired,
hasDiscriminatorWithNonEmptyMapping, composedSchemas, hasMultipleTypes, responseHeaders, content,
requiredVarsMap, ref);
requiredVarsMap, ref,uniqueItemsBoolean);
}
@Override
@@ -152,6 +153,7 @@ public class CodegenResponse implements IJsonSchemaValidationProperties {
getAdditionalPropertiesIsAnyType() == that.getAdditionalPropertiesIsAnyType() &&
getHasVars() == that.getHasVars() &&
getHasRequired() == that.getHasRequired() &&
Objects.equals(uniqueItemsBoolean, that.getUniqueItemsBoolean()) &&
Objects.equals(ref, that.getRef()) &&
Objects.equals(requiredVarsMap, that.getRequiredVarsMap()) &&
Objects.equals(content, that.getContent()) &&
@@ -301,6 +303,17 @@ public class CodegenResponse implements IJsonSchemaValidationProperties {
this.uniqueItems = uniqueItems;
}
@Override
public Boolean getUniqueItemsBoolean() {
return uniqueItemsBoolean;
}
@Override
public void setUniqueItemsBoolean(Boolean uniqueItemsBoolean) {
this.uniqueItemsBoolean = uniqueItemsBoolean;
}
@Override
public Integer getMinProperties() {
return minProperties;
@@ -519,6 +532,7 @@ public class CodegenResponse implements IJsonSchemaValidationProperties {
sb.append(", maxProperties=").append(maxProperties);
sb.append(", minProperties=").append(minProperties);
sb.append(", uniqueItems=").append(uniqueItems);
sb.append(", uniqueItemsBoolean=").append(uniqueItemsBoolean);
sb.append(", maxItems=").append(maxItems);
sb.append(", minItems=").append(minItems);
sb.append(", maxLength=").append(maxLength);

View File

@@ -5582,7 +5582,7 @@ public class DefaultCodegen implements CodegenConfig {
*/
protected void addImportsForPropertyType(CodegenModel model, CodegenProperty property) {
if (property.isArray) {
if (property.getUniqueItems()) { // set
if (Boolean.TRUE.equals(property.getUniqueItemsBoolean())) { // set
addImport(model.imports, typeMapping.get("set"));
} else { // array
addImport(model.imports, typeMapping.get("array"));

View File

@@ -50,10 +50,18 @@ public interface IJsonSchemaValidationProperties {
void setMaxItems(Integer maxItems);
// TODO update this value to Boolean in 7.0.0
boolean getUniqueItems();
// TODO update this value to Boolean in 7.0.0
void setUniqueItems(boolean uniqueItems);
// TODO remove in 7.0.0
Boolean getUniqueItemsBoolean();
// TODO remove in 7.0.0
void setUniqueItemsBoolean(Boolean uniqueItems);
Integer getMinProperties();
void setMinProperties(Integer minProperties);

View File

@@ -1677,6 +1677,7 @@ public class ModelUtils {
if (minItems != null) target.setMinItems(minItems);
if (maxItems != null) target.setMaxItems(maxItems);
if (uniqueItems != null) target.setUniqueItems(uniqueItems);
if (uniqueItems != null) target.setUniqueItemsBoolean(uniqueItems);
}
private static void setObjectValidations(Integer minProperties, Integer maxProperties, IJsonSchemaValidationProperties target) {

View File

@@ -14,11 +14,6 @@
{{> model_templates/schema }}
{{/unless}}
{{/each}}
{{#with not}}
{{#unless complexType}}
{{> model_templates/schema }}
{{/unless}}
{{/with}}
{{/with}}
{{#with composedSchemas}}
{{#if allOf}}
@@ -26,7 +21,7 @@
@classmethod
@property
@functools.cache
def _all_of(cls):
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -53,7 +48,7 @@ def _all_of(cls):
@classmethod
@property
@functools.cache
def _one_of(cls):
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -80,7 +75,7 @@ def _one_of(cls):
@classmethod
@property
@functools.cache
def _any_of(cls):
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -103,32 +98,16 @@ def _any_of(cls):
]
{{/if}}
{{#if not}}
{{#with not}}
{{#if complexType}}
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
{{#with not}}
{{#if complexType}}
{{complexType}}
def {{baseName}}(cls) -> typing.Type['{{complexType}}']:
return {{complexType}}
{{else}}
{{#if nameInSnakeCase}}
cls.{{name}}
{{else}}
cls.{{baseName}}
{{/if}}
{{/if}}
{{else}}
None
{{/with}}
)
{{> model_templates/schema }}
{{/if}}
{{/with}}
{{/if}}
{{/with}}

View File

@@ -31,16 +31,20 @@ class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}
{{/if}}
"""
{{/if}}
{{#if hasValidation}}
{{> model_templates/validations }}
{{/if}}
{{#or isMap isAnyType}}
{{> model_templates/dict_partial }}
{{/or}}
{{#if composedSchemas}}
{{#or hasValidation composedSchemas}}
{{> model_templates/composed_schemas }}
class MetaOapg:
{{#if hasValidation}}
{{> model_templates/validations }}
{{/if}}
{{#if composedSchemas}}
{{> model_templates/composed_schemas }}
{{/if}}
{{/or}}
{{#if isEnum}}
{{> model_templates/enums }}
{{/if}}

View File

@@ -14,10 +14,13 @@ class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}
{{/if}}
"""
{{/if}}
{{#if hasValidation}}
{{> model_templates/validations }}
{{/if}}
{{> model_templates/dict_partial }}
{{#if hasValidation}}
class MetaOapg:
{{> model_templates/validations }}
{{/if}}
{{> model_templates/new }}

View File

@@ -14,9 +14,6 @@ class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}
{{/if}}
"""
{{/if}}
{{#if hasValidation}}
{{> model_templates/validations }}
{{/if}}
{{#with items}}
{{#if complexType}}
@@ -27,4 +24,10 @@ class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}
{{else}}
{{> model_templates/schema }}
{{/if}}
{{/with}}
{{/with}}
{{#if hasValidation}}
class MetaOapg:
{{> model_templates/validations }}
{{/if}}

View File

@@ -18,10 +18,11 @@ class {{#if this.classname}}{{classname}}{{else}}{{#if nameInSnakeCase}}{{name}}
"""
{{/if}}
{{#if hasValidation}}
{{> model_templates/validations }}
class MetaOapg:
{{> model_templates/validations }}
{{/if}}
{{#if isEnum}}
{{> model_templates/enums }}
{{else}}
pass
{{/if}}

View File

@@ -1,32 +1,32 @@
{{#if getUniqueItems}}
_unique_items=True
{{/if}}
{{#neq getUniqueItemsBoolean null}}
unique_items = {{#if getUniqueItemsBoolean}}True{{else}}False{{/if}}
{{/neq}}
{{#neq maxLength null}}
_max_length={{maxLength}}
max_length = {{maxLength}}
{{/neq}}
{{#neq minLength null}}
_min_length={{minLength}}
min_length = {{minLength}}
{{/neq}}
{{#neq maxItems null}}
_max_items={{maxItems}}
max_items = {{maxItems}}
{{/neq}}
{{#neq minItems null}}
_min_items={{minItems}}
min_items = {{minItems}}
{{/neq}}
{{#neq maxProperties null }}
_max_properties={{maxProperties}}
max_properties = {{maxProperties}}
{{/neq}}
{{#neq minProperties null}}
_min_properties={{minProperties}}
min_properties = {{minProperties}}
{{/neq}}
{{#neq maximum null}}
_{{#if exclusiveMaximum}}exclusive_maximum{{/if}}inclusive_maximum{{#unless exclusiveMaximum}}{{/unless}}={{maximum}}
{{#if exclusiveMaximum}}exclusive_maximum{{/if}}inclusive_maximum{{#unless exclusiveMaximum}}{{/unless}} = {{maximum}}
{{/neq}}
{{#neq minimum null}}
_{{#if exclusiveMinimum}}exclusive_minimum{{/if}}inclusive_minimum{{#unless exclusiveMinimum}}{{/unless}}={{minimum}}
{{#if exclusiveMinimum}}exclusive_minimum{{/if}}inclusive_minimum{{#unless exclusiveMinimum}}{{/unless}} = {{minimum}}
{{/neq}}
{{#neq pattern null}}
_regex=[{
regex=[{
{{#if vendorExtensions.x-regex}}
'pattern': r'{{{vendorExtensions.x-regex}}}', # noqa: E501
{{else}}
@@ -44,5 +44,5 @@ _regex=[{
}]
{{/neq}}
{{#neq multipleOf null}}
_multiple_of={{multipleOf}}
multiple_of = {{multipleOf}}
{{/neq}}

View File

@@ -439,29 +439,31 @@ class StrBase(ValidatorBase):
arg: str,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
hasattr(cls, '_max_length') and
len(arg) > cls._max_length):
hasattr(cls.MetaOapg, 'max_length') and
len(arg) > cls.MetaOapg.max_length):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="length must be less than or equal to",
constraint_value=cls._max_length,
constraint_value=cls.MetaOapg.max_length,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
hasattr(cls, '_min_length') and
len(arg) < cls._min_length):
hasattr(cls.MetaOapg, 'min_length') and
len(arg) < cls.MetaOapg.min_length):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="length must be greater than or equal to",
constraint_value=cls._min_length,
constraint_value=cls.MetaOapg.min_length,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
hasattr(cls, '_regex')):
for regex_dict in cls._regex:
hasattr(cls.MetaOapg, 'regex')):
for regex_dict in cls.MetaOapg.regex:
flags = regex_dict.get('flags', 0)
if not re.search(regex_dict['pattern'], arg, flags=flags):
if flags != 0:
@@ -698,9 +700,11 @@ class NumberBase(ValidatorBase):
arg,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if cls.is_json_validation_enabled_oapg('multipleOf',
validation_metadata.configuration) and hasattr(cls, '_multiple_of'):
multiple_of_value = cls._multiple_of
validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
multiple_of_value = cls.MetaOapg.multiple_of
if (not (float(arg) / multiple_of_value).is_integer()):
# Note 'multipleOf' will be as good as the floating point arithmetic.
cls.raise_validation_error_message_oapg(
@@ -711,53 +715,53 @@ class NumberBase(ValidatorBase):
)
checking_max_or_min_values = any(
hasattr(cls, validation_key) for validation_key in {
'_exclusive_maximum',
'_inclusive_maximum',
'_exclusive_minimum',
'_inclusive_minimum',
hasattr(cls.MetaOapg, validation_key) for validation_key in {
'exclusive_maximum',
'inclusive_maximum',
'exclusive_minimum',
'inclusive_minimum',
}
)
if not checking_max_or_min_values:
return
if (cls.is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
hasattr(cls, '_exclusive_maximum') and
arg >= cls._exclusive_maximum):
hasattr(cls.MetaOapg, 'exclusive_maximum') and
arg >= cls.MetaOapg.exclusive_maximum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value less than",
constraint_value=cls._exclusive_maximum,
constraint_value=cls.MetaOapg.exclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
hasattr(cls, '_inclusive_maximum') and
arg > cls._inclusive_maximum):
hasattr(cls.MetaOapg, 'inclusive_maximum') and
arg > cls.MetaOapg.inclusive_maximum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value less than or equal to",
constraint_value=cls._inclusive_maximum,
constraint_value=cls.MetaOapg.inclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
hasattr(cls, '_exclusive_minimum') and
arg <= cls._exclusive_minimum):
hasattr(cls.MetaOapg, 'exclusive_minimum') and
arg <= cls.MetaOapg.exclusive_minimum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value greater than",
constraint_value=cls._exclusive_maximum,
constraint_value=cls.MetaOapg.exclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
hasattr(cls, '_inclusive_minimum') and
arg < cls._inclusive_minimum):
hasattr(cls.MetaOapg, 'inclusive_minimum') and
arg < cls.MetaOapg.inclusive_minimum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value greater than or equal to",
constraint_value=cls._inclusive_minimum,
constraint_value=cls.MetaOapg.inclusive_minimum,
path_to_item=validation_metadata.path_to_item
)
@@ -814,29 +818,30 @@ class ListBase(ValidatorBase):
def __check_tuple_validations(
cls, arg,
validation_metadata: ValidationMetadata):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
hasattr(cls, '_max_items') and
len(arg) > cls._max_items):
hasattr(cls.MetaOapg, 'max_items') and
len(arg) > cls.MetaOapg.max_items):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of items must be less than or equal to",
constraint_value=cls._max_items,
constraint_value=cls.MetaOapg.max_items,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
hasattr(cls, '_min_items') and
len(arg) < cls._min_items):
hasattr(cls.MetaOapg, 'min_items') and
len(arg) < cls.MetaOapg.min_items):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of items must be greater than or equal to",
constraint_value=cls._min_items,
constraint_value=cls.MetaOapg.min_items,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
hasattr(cls, '_unique_items') and cls._unique_items and arg):
hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
unique_items = set(arg)
if len(arg) > len(unique_items):
cls.raise_validation_error_message_oapg(
@@ -942,20 +947,26 @@ class Discriminable:
discriminated_cls = disc[disc_property_name].get(disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
elif not (hasattr(cls, '_all_of') or hasattr(cls, '_one_of') or hasattr(cls, '_any_of')):
if not hasattr(cls, 'MetaOapg'):
return None
elif not (
hasattr(cls.MetaOapg, 'all_of') or
hasattr(cls.MetaOapg, 'one_of') or
hasattr(cls.MetaOapg, 'any_of')
):
return None
# TODO stop traveling if a cycle is hit
for allof_cls in getattr(cls, '_all_of', []):
for allof_cls in getattr(cls.MetaOapg, 'all_of', []):
discriminated_cls = allof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
for oneof_cls in getattr(cls, '_one_of', []):
for oneof_cls in getattr(cls.MetaOapg, 'one_of', []):
discriminated_cls = oneof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
for anyof_cls in getattr(cls, '_any_of', []):
for anyof_cls in getattr(cls.MetaOapg, 'any_of', []):
discriminated_cls = anyof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
@@ -1061,23 +1072,25 @@ class DictBase(Discriminable, ValidatorBase):
arg,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
hasattr(cls, '_max_properties') and
len(arg) > cls._max_properties):
hasattr(cls.MetaOapg, 'max_properties') and
len(arg) > cls.MetaOapg.max_properties):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of properties must be less than or equal to",
constraint_value=cls._max_properties,
constraint_value=cls.MetaOapg.max_properties,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
hasattr(cls, '_min_properties') and
len(arg) < cls._min_properties):
hasattr(cls.MetaOapg, 'min_properties') and
len(arg) < cls.MetaOapg.min_properties):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of properties must be greater than or equal to",
constraint_value=cls._min_properties,
constraint_value=cls.MetaOapg.min_properties,
path_to_item=validation_metadata.path_to_item
)
@@ -1560,7 +1573,7 @@ class ComposedBase(Discriminable):
@classmethod
def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
path_to_schemas = defaultdict(set)
for allof_cls in cls._all_of:
for allof_cls in cls.MetaOapg.all_of:
if validation_metadata.validation_ran_earlier(allof_cls):
continue
other_path_to_schemas = allof_cls._validate(arg, validation_metadata=validation_metadata)
@@ -1576,7 +1589,7 @@ class ComposedBase(Discriminable):
):
oneof_classes = []
path_to_schemas = defaultdict(set)
for oneof_cls in cls._one_of:
for oneof_cls in cls.MetaOapg.one_of:
if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
oneof_classes.append(oneof_cls)
continue
@@ -1611,7 +1624,7 @@ class ComposedBase(Discriminable):
):
anyof_classes = []
path_to_schemas = defaultdict(set)
for anyof_cls in cls._any_of:
for anyof_cls in cls.MetaOapg.any_of:
if validation_metadata.validation_ran_earlier(anyof_cls):
anyof_classes.append(anyof_cls)
continue
@@ -1683,24 +1696,26 @@ class ComposedBase(Discriminable):
)
)
if hasattr(cls, '_all_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
update(path_to_schemas, other_path_to_schemas)
if hasattr(cls, '_one_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
other_path_to_schemas = cls.__get_oneof_class(
arg,
discriminated_cls=discriminated_cls,
validation_metadata=updated_vm
)
update(path_to_schemas, other_path_to_schemas)
if hasattr(cls, '_any_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
other_path_to_schemas = cls.__get_anyof_classes(
arg,
discriminated_cls=discriminated_cls,
validation_metadata=updated_vm
)
update(path_to_schemas, other_path_to_schemas)
not_cls = getattr(cls, '_not', None)
not_cls = None
if hasattr(cls, 'MetaOapg'):
not_cls = getattr(cls.MetaOapg, 'not_schema', None)
if not_cls:
other_path_to_schemas = None
not_exception = ApiValueError(
@@ -1841,8 +1856,10 @@ class IntSchema(IntBase, NumberSchema):
class Int32Base:
_inclusive_minimum = decimal.Decimal(-2147483648)
_inclusive_maximum = decimal.Decimal(2147483647)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-2147483648)
inclusive_maximum = decimal.Decimal(2147483647)
class Int32Schema(
@@ -1853,8 +1870,10 @@ class Int32Schema(
class Int64Base:
_inclusive_minimum = decimal.Decimal(-9223372036854775808)
_inclusive_maximum = decimal.Decimal(9223372036854775807)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-9223372036854775808)
inclusive_maximum = decimal.Decimal(9223372036854775807)
class Int64Schema(
@@ -1865,8 +1884,10 @@ class Int64Schema(
class Float32Base:
_inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
_inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
class Float32Schema(
@@ -1881,8 +1902,10 @@ class Float32Schema(
class Float64Base:
_inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
_inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
class Float64Schema(
@@ -1994,10 +2017,11 @@ class BinarySchema(
BinaryBase,
Schema,
):
_one_of = [
BytesSchema,
FileSchema,
]
class MetaOapg:
one_of = [
BytesSchema,
FileSchema,
]
def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: typing.Union[ValidationMetadata]):
return super().__new__(cls, arg)

View File

@@ -432,7 +432,7 @@ public class JavaModelTest {
Assert.assertEquals(property.containerType, "set");
Assert.assertFalse(property.required);
Assert.assertTrue(property.isContainer);
Assert.assertTrue(property.getUniqueItems());
Assert.assertTrue(property.getUniqueItemsBoolean());
}
@Test(description = "convert a model with an array property with item name")
public void arrayModelWithItemNameTest() {

View File

@@ -33,42 +33,44 @@ class AdditionalpropertiesShouldNotLookInApplicators(
"""
_additional_properties = schemas.BoolSchema
class all_of_0(
schemas.AnyTypeSchema,
):
foo = schemas.AnyTypeSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
class MetaOapg:
class all_of_0(
schemas.AnyTypeSchema,
):
foo = schemas.AnyTypeSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -32,70 +32,72 @@ class Allof(
Do not edit the class manually.
"""
class all_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
class MetaOapg:
class all_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,108 +32,119 @@ class AllofCombinedWithAnyofOneof(
Do not edit the class manually.
"""
class all_of_0(
schemas.AnyTypeSchema,
):
_multiple_of=2
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
class MetaOapg:
class all_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
multiple_of = 2
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_0(
schemas.AnyTypeSchema,
):
_multiple_of=5
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
multiple_of = 5
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class any_of_0(
schemas.AnyTypeSchema,
):
_multiple_of=3
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class any_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
multiple_of = 3
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
]
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
]
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
]
def __new__(
cls,

View File

@@ -32,60 +32,68 @@ class AllofSimpleTypes(
Do not edit the class manually.
"""
class all_of_0(
schemas.AnyTypeSchema,
):
_inclusive_maximum=30
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
class MetaOapg:
class all_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
inclusive_maximum = 30
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
_inclusive_minimum=20
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
class MetaOapg:
inclusive_minimum = 20
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -36,70 +36,72 @@ class AllofWithBaseSchema(
}
bar = schemas.IntSchema
class all_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
class MetaOapg:
class all_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"baz",
}
baz = schemas.NoneSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
baz: baz,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
class all_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"baz",
}
baz = schemas.NoneSchema
def __new__(
cls,
*args,
baz=baz,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
baz: baz,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
baz=baz,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,22 +32,24 @@ class AllofWithOneEmptySchema(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class AllofWithTheFirstEmptySchema(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
all_of_1 = schemas.NumberSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
all_of_1 = schemas.NumberSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class AllofWithTheLastEmptySchema(
Do not edit the class manually.
"""
all_of_0 = schemas.NumberSchema
all_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
class MetaOapg:
all_of_0 = schemas.NumberSchema
all_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class AllofWithTwoEmptySchemas(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
all_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
all_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,42 +32,47 @@ class Anyof(
Do not edit the class manually.
"""
any_of_0 = schemas.IntSchema
class any_of_1(
schemas.AnyTypeSchema,
):
_inclusive_minimum=2
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
class MetaOapg:
any_of_0 = schemas.IntSchema
class any_of_1(
schemas.AnyTypeSchema,
):
class MetaOapg:
inclusive_minimum = 2
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
def __new__(
cls,

View File

@@ -32,70 +32,72 @@ class AnyofComplexTypes(
Do not edit the class manually.
"""
class any_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
class MetaOapg:
class any_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class any_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class any_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
def __new__(
cls,

View File

@@ -33,60 +33,68 @@ class AnyofWithBaseSchema(
Do not edit the class manually.
"""
class any_of_0(
schemas.AnyTypeSchema,
):
_max_length=2
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
class MetaOapg:
class any_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
max_length = 2
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class any_of_1(
schemas.AnyTypeSchema,
):
_min_length=4
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class any_of_1(
schemas.AnyTypeSchema,
):
class MetaOapg:
min_length = 4
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class AnyofWithOneEmptySchema(
Do not edit the class manually.
"""
any_of_0 = schemas.NumberSchema
any_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
class MetaOapg:
any_of_0 = schemas.NumberSchema
any_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
]
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class ByInt(
Do not edit the class manually.
"""
_multiple_of=2
class MetaOapg:
multiple_of = 2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class ByNumber(
Do not edit the class manually.
"""
_multiple_of=1.5
class MetaOapg:
multiple_of = 1.5
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class BySmallNumber(
Do not edit the class manually.
"""
_multiple_of=0.00010
class MetaOapg:
multiple_of = 0.00010
def __new__(
cls,

View File

@@ -37,22 +37,9 @@ class ForbiddenProperty(
schemas.ComposedSchema,
):
not_schema = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
class MetaOapg:
not_schema = schemas.AnyTypeSchema
def __new__(
cls,

View File

@@ -31,5 +31,7 @@ class InvalidInstanceShouldNotRaiseErrorWhenFloatDivisionInf(
Do not edit the class manually.
"""
_multiple_of=0.123456789
pass
class MetaOapg:
multiple_of = 0.123456789

View File

@@ -36,8 +36,10 @@ class InvalidStringValueForDefault(
class bar(
schemas.StrSchema
):
_min_length=4
pass
class MetaOapg:
min_length = 4
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MaximumValidation(
Do not edit the class manually.
"""
_inclusive_maximum=3.0
class MetaOapg:
inclusive_maximum = 3.0
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MaximumValidationWithUnsignedInteger(
Do not edit the class manually.
"""
_inclusive_maximum=300
class MetaOapg:
inclusive_maximum = 300
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MaxitemsValidation(
Do not edit the class manually.
"""
_max_items=2
class MetaOapg:
max_items = 2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MaxlengthValidation(
Do not edit the class manually.
"""
_max_length=2
class MetaOapg:
max_length = 2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class Maxproperties0MeansTheObjectIsEmpty(
Do not edit the class manually.
"""
_max_properties=0
class MetaOapg:
max_properties = 0
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MaxpropertiesValidation(
Do not edit the class manually.
"""
_max_properties=2
class MetaOapg:
max_properties = 2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MinimumValidation(
Do not edit the class manually.
"""
_inclusive_minimum=1.1
class MetaOapg:
inclusive_minimum = 1.1
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MinimumValidationWithSignedInteger(
Do not edit the class manually.
"""
_inclusive_minimum=-2
class MetaOapg:
inclusive_minimum = -2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MinitemsValidation(
Do not edit the class manually.
"""
_min_items=1
class MetaOapg:
min_items = 1
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MinlengthValidation(
Do not edit the class manually.
"""
_min_length=2
class MetaOapg:
min_length = 2
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class MinpropertiesValidation(
Do not edit the class manually.
"""
_min_properties=1
class MetaOapg:
min_properties = 1
def __new__(
cls,

View File

@@ -32,22 +32,9 @@ class ModelNot(
Do not edit the class manually.
"""
not_schema = schemas.IntSchema
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
class MetaOapg:
not_schema = schemas.IntSchema
def __new__(
cls,

View File

@@ -32,18 +32,50 @@ class NestedAllofToCheckValidationSemantics(
Do not edit the class manually.
"""
class all_of_0(
schemas.ComposedSchema,
):
all_of_0 = schemas.NoneSchema
class MetaOapg:
class all_of_0(
schemas.ComposedSchema,
):
class MetaOapg:
all_of_0 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -54,34 +86,6 @@ class NestedAllofToCheckValidationSemantics(
return [
cls.all_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -32,18 +32,50 @@ class NestedAnyofToCheckValidationSemantics(
Do not edit the class manually.
"""
class any_of_0(
schemas.ComposedSchema,
):
any_of_0 = schemas.NoneSchema
class MetaOapg:
class any_of_0(
schemas.ComposedSchema,
):
class MetaOapg:
any_of_0 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _any_of(cls):
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -54,34 +86,6 @@ class NestedAnyofToCheckValidationSemantics(
return [
cls.any_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'any_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
]
def __new__(
cls,

View File

@@ -32,18 +32,50 @@ class NestedOneofToCheckValidationSemantics(
Do not edit the class manually.
"""
class one_of_0(
schemas.ComposedSchema,
):
one_of_0 = schemas.NoneSchema
class MetaOapg:
class one_of_0(
schemas.ComposedSchema,
):
class MetaOapg:
one_of_0 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
@@ -54,34 +86,6 @@ class NestedOneofToCheckValidationSemantics(
return [
cls.one_of_0,
]
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
]
def __new__(
cls,

View File

@@ -32,43 +32,30 @@ class NotMoreComplexSchema(
Do not edit the class manually.
"""
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
class MetaOapg:
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
def __new__(
cls,

View File

@@ -32,42 +32,47 @@ class Oneof(
Do not edit the class manually.
"""
one_of_0 = schemas.IntSchema
class one_of_1(
schemas.AnyTypeSchema,
):
_inclusive_minimum=2
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
class MetaOapg:
one_of_0 = schemas.IntSchema
class one_of_1(
schemas.AnyTypeSchema,
):
class MetaOapg:
inclusive_minimum = 2
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
def __new__(
cls,

View File

@@ -32,70 +32,72 @@ class OneofComplexTypes(
Do not edit the class manually.
"""
class one_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
class MetaOapg:
class one_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
}
bar = schemas.IntSchema
def __new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
bar: bar,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
bar=bar,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
}
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
foo: foo,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
def __new__(
cls,

View File

@@ -33,60 +33,68 @@ class OneofWithBaseSchema(
Do not edit the class manually.
"""
class one_of_0(
schemas.AnyTypeSchema,
):
_min_length=2
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
class MetaOapg:
class one_of_0(
schemas.AnyTypeSchema,
):
class MetaOapg:
min_length = 2
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
_max_length=4
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
class MetaOapg:
max_length = 4
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class OneofWithEmptySchema(
Do not edit the class manually.
"""
one_of_0 = schemas.NumberSchema
one_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
class MetaOapg:
one_of_0 = schemas.NumberSchema
one_of_1 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
def __new__(
cls,

View File

@@ -33,66 +33,68 @@ class OneofWithRequired(
Do not edit the class manually.
"""
class one_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
"foo",
}
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
class MetaOapg:
class one_of_0(
schemas.AnyTypeSchema,
):
_required_property_names = {
"bar",
"foo",
}
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
"baz",
}
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_0':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_1(
schemas.AnyTypeSchema,
):
_required_property_names = {
"foo",
"baz",
}
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_1':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
cls.one_of_1,
]
def __new__(
cls,

View File

@@ -31,9 +31,12 @@ class PatternIsNotAnchored(
Do not edit the class manually.
"""
_regex=[{
'pattern': r'a+', # noqa: E501
}]
class MetaOapg:
regex=[{
'pattern': r'a+', # noqa: E501
}]
def __new__(
cls,

View File

@@ -31,9 +31,12 @@ class PatternValidation(
Do not edit the class manually.
"""
_regex=[{
'pattern': r'^a*$', # noqa: E501
}]
class MetaOapg:
regex=[{
'pattern': r'^a*$', # noqa: E501
}]
def __new__(
cls,

View File

@@ -32,21 +32,23 @@ class RefInAllof(
Do not edit the class manually.
"""
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
def __new__(
cls,

View File

@@ -32,21 +32,23 @@ class RefInAnyof(
Do not edit the class manually.
"""
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
def __new__(
cls,

View File

@@ -32,21 +32,13 @@ class RefInNot(
Do not edit the class manually.
"""
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
PropertyNamedRefThatIsNotAReference
)
class MetaOapg:
@classmethod
@property
def not_schema(cls) -> typing.Type['PropertyNamedRefThatIsNotAReference']:
return PropertyNamedRefThatIsNotAReference
def __new__(
cls,

View File

@@ -32,21 +32,23 @@ class RefInOneof(
Do not edit the class manually.
"""
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
PropertyNamedRefThatIsNotAReference,
]
def __new__(
cls,

View File

@@ -36,8 +36,10 @@ class TheDefaultKeywordDoesNotDoAnythingIfThePropertyIsMissing(
class alpha(
schemas.NumberSchema
):
_inclusive_maximum=3
pass
class MetaOapg:
inclusive_maximum = 3
def __new__(

View File

@@ -32,6 +32,10 @@ class UniqueitemsFalseValidation(
Do not edit the class manually.
"""
class MetaOapg:
unique_items = False
def __new__(
cls,
*args: typing.Union[dict, frozendict, str, date, datetime, int, float, decimal.Decimal, None, list, tuple, bytes],

View File

@@ -31,7 +31,10 @@ class UniqueitemsValidation(
Do not edit the class manually.
"""
_unique_items=True
class MetaOapg:
unique_items = True
def __new__(
cls,

View File

@@ -30,43 +30,30 @@ class SchemaForRequestBodyApplicationJson(
schemas.ComposedSchema,
):
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
class MetaOapg:
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
def __new__(
cls,

View File

@@ -30,22 +30,9 @@ class SchemaForRequestBodyApplicationJson(
schemas.ComposedSchema,
):
not_schema = schemas.IntSchema
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
class MetaOapg:
not_schema = schemas.IntSchema
def __new__(
cls,

View File

@@ -32,21 +32,13 @@ class SchemaForRequestBodyApplicationJson(
schemas.ComposedSchema,
):
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
PropertyNamedRefThatIsNotAReference
)
class MetaOapg:
@classmethod
@property
def not_schema(cls) -> typing.Type['PropertyNamedRefThatIsNotAReference']:
return PropertyNamedRefThatIsNotAReference
def __new__(
cls,

View File

@@ -29,43 +29,30 @@ class SchemaFor200ResponseBodyApplicationJson(
schemas.ComposedSchema,
):
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
class MetaOapg:
class not_schema(
schemas.DictSchema
):
foo = schemas.StrSchema
def __new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
*args: typing.Union[dict, frozendict, ],
foo: typing.Union[foo, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'not_schema':
return super().__new__(
cls,
*args,
foo=foo,
_configuration=_configuration,
**kwargs,
)
def __new__(
cls,

View File

@@ -29,22 +29,9 @@ class SchemaFor200ResponseBodyApplicationJson(
schemas.ComposedSchema,
):
not_schema = schemas.IntSchema
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
class MetaOapg:
not_schema = schemas.IntSchema
def __new__(
cls,

View File

@@ -31,21 +31,13 @@ class SchemaFor200ResponseBodyApplicationJson(
schemas.ComposedSchema,
):
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
PropertyNamedRefThatIsNotAReference
)
class MetaOapg:
@classmethod
@property
def not_schema(cls) -> typing.Type['PropertyNamedRefThatIsNotAReference']:
return PropertyNamedRefThatIsNotAReference
def __new__(
cls,

View File

@@ -446,29 +446,31 @@ class StrBase(ValidatorBase):
arg: str,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
hasattr(cls, '_max_length') and
len(arg) > cls._max_length):
hasattr(cls.MetaOapg, 'max_length') and
len(arg) > cls.MetaOapg.max_length):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="length must be less than or equal to",
constraint_value=cls._max_length,
constraint_value=cls.MetaOapg.max_length,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
hasattr(cls, '_min_length') and
len(arg) < cls._min_length):
hasattr(cls.MetaOapg, 'min_length') and
len(arg) < cls.MetaOapg.min_length):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="length must be greater than or equal to",
constraint_value=cls._min_length,
constraint_value=cls.MetaOapg.min_length,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
hasattr(cls, '_regex')):
for regex_dict in cls._regex:
hasattr(cls.MetaOapg, 'regex')):
for regex_dict in cls.MetaOapg.regex:
flags = regex_dict.get('flags', 0)
if not re.search(regex_dict['pattern'], arg, flags=flags):
if flags != 0:
@@ -705,9 +707,11 @@ class NumberBase(ValidatorBase):
arg,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if cls.is_json_validation_enabled_oapg('multipleOf',
validation_metadata.configuration) and hasattr(cls, '_multiple_of'):
multiple_of_value = cls._multiple_of
validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
multiple_of_value = cls.MetaOapg.multiple_of
if (not (float(arg) / multiple_of_value).is_integer()):
# Note 'multipleOf' will be as good as the floating point arithmetic.
cls.raise_validation_error_message_oapg(
@@ -718,53 +722,53 @@ class NumberBase(ValidatorBase):
)
checking_max_or_min_values = any(
hasattr(cls, validation_key) for validation_key in {
'_exclusive_maximum',
'_inclusive_maximum',
'_exclusive_minimum',
'_inclusive_minimum',
hasattr(cls.MetaOapg, validation_key) for validation_key in {
'exclusive_maximum',
'inclusive_maximum',
'exclusive_minimum',
'inclusive_minimum',
}
)
if not checking_max_or_min_values:
return
if (cls.is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
hasattr(cls, '_exclusive_maximum') and
arg >= cls._exclusive_maximum):
hasattr(cls.MetaOapg, 'exclusive_maximum') and
arg >= cls.MetaOapg.exclusive_maximum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value less than",
constraint_value=cls._exclusive_maximum,
constraint_value=cls.MetaOapg.exclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
hasattr(cls, '_inclusive_maximum') and
arg > cls._inclusive_maximum):
hasattr(cls.MetaOapg, 'inclusive_maximum') and
arg > cls.MetaOapg.inclusive_maximum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value less than or equal to",
constraint_value=cls._inclusive_maximum,
constraint_value=cls.MetaOapg.inclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
hasattr(cls, '_exclusive_minimum') and
arg <= cls._exclusive_minimum):
hasattr(cls.MetaOapg, 'exclusive_minimum') and
arg <= cls.MetaOapg.exclusive_minimum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value greater than",
constraint_value=cls._exclusive_maximum,
constraint_value=cls.MetaOapg.exclusive_maximum,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
hasattr(cls, '_inclusive_minimum') and
arg < cls._inclusive_minimum):
hasattr(cls.MetaOapg, 'inclusive_minimum') and
arg < cls.MetaOapg.inclusive_minimum):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="must be a value greater than or equal to",
constraint_value=cls._inclusive_minimum,
constraint_value=cls.MetaOapg.inclusive_minimum,
path_to_item=validation_metadata.path_to_item
)
@@ -821,29 +825,30 @@ class ListBase(ValidatorBase):
def __check_tuple_validations(
cls, arg,
validation_metadata: ValidationMetadata):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
hasattr(cls, '_max_items') and
len(arg) > cls._max_items):
hasattr(cls.MetaOapg, 'max_items') and
len(arg) > cls.MetaOapg.max_items):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of items must be less than or equal to",
constraint_value=cls._max_items,
constraint_value=cls.MetaOapg.max_items,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
hasattr(cls, '_min_items') and
len(arg) < cls._min_items):
hasattr(cls.MetaOapg, 'min_items') and
len(arg) < cls.MetaOapg.min_items):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of items must be greater than or equal to",
constraint_value=cls._min_items,
constraint_value=cls.MetaOapg.min_items,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
hasattr(cls, '_unique_items') and cls._unique_items and arg):
hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
unique_items = set(arg)
if len(arg) > len(unique_items):
cls.raise_validation_error_message_oapg(
@@ -949,20 +954,26 @@ class Discriminable:
discriminated_cls = disc[disc_property_name].get(disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
elif not (hasattr(cls, '_all_of') or hasattr(cls, '_one_of') or hasattr(cls, '_any_of')):
if not hasattr(cls, 'MetaOapg'):
return None
elif not (
hasattr(cls.MetaOapg, 'all_of') or
hasattr(cls.MetaOapg, 'one_of') or
hasattr(cls.MetaOapg, 'any_of')
):
return None
# TODO stop traveling if a cycle is hit
for allof_cls in getattr(cls, '_all_of', []):
for allof_cls in getattr(cls.MetaOapg, 'all_of', []):
discriminated_cls = allof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
for oneof_cls in getattr(cls, '_one_of', []):
for oneof_cls in getattr(cls.MetaOapg, 'one_of', []):
discriminated_cls = oneof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
return discriminated_cls
for anyof_cls in getattr(cls, '_any_of', []):
for anyof_cls in getattr(cls.MetaOapg, 'any_of', []):
discriminated_cls = anyof_cls._get_discriminated_class(
disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
if discriminated_cls is not None:
@@ -1068,23 +1079,25 @@ class DictBase(Discriminable, ValidatorBase):
arg,
validation_metadata: ValidationMetadata
):
if not hasattr(cls, 'MetaOapg'):
return
if (cls.is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
hasattr(cls, '_max_properties') and
len(arg) > cls._max_properties):
hasattr(cls.MetaOapg, 'max_properties') and
len(arg) > cls.MetaOapg.max_properties):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of properties must be less than or equal to",
constraint_value=cls._max_properties,
constraint_value=cls.MetaOapg.max_properties,
path_to_item=validation_metadata.path_to_item
)
if (cls.is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
hasattr(cls, '_min_properties') and
len(arg) < cls._min_properties):
hasattr(cls.MetaOapg, 'min_properties') and
len(arg) < cls.MetaOapg.min_properties):
cls.raise_validation_error_message_oapg(
value=arg,
constraint_msg="number of properties must be greater than or equal to",
constraint_value=cls._min_properties,
constraint_value=cls.MetaOapg.min_properties,
path_to_item=validation_metadata.path_to_item
)
@@ -1567,7 +1580,7 @@ class ComposedBase(Discriminable):
@classmethod
def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
path_to_schemas = defaultdict(set)
for allof_cls in cls._all_of:
for allof_cls in cls.MetaOapg.all_of:
if validation_metadata.validation_ran_earlier(allof_cls):
continue
other_path_to_schemas = allof_cls._validate(arg, validation_metadata=validation_metadata)
@@ -1583,7 +1596,7 @@ class ComposedBase(Discriminable):
):
oneof_classes = []
path_to_schemas = defaultdict(set)
for oneof_cls in cls._one_of:
for oneof_cls in cls.MetaOapg.one_of:
if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
oneof_classes.append(oneof_cls)
continue
@@ -1618,7 +1631,7 @@ class ComposedBase(Discriminable):
):
anyof_classes = []
path_to_schemas = defaultdict(set)
for anyof_cls in cls._any_of:
for anyof_cls in cls.MetaOapg.any_of:
if validation_metadata.validation_ran_earlier(anyof_cls):
anyof_classes.append(anyof_cls)
continue
@@ -1690,24 +1703,26 @@ class ComposedBase(Discriminable):
)
)
if hasattr(cls, '_all_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
update(path_to_schemas, other_path_to_schemas)
if hasattr(cls, '_one_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
other_path_to_schemas = cls.__get_oneof_class(
arg,
discriminated_cls=discriminated_cls,
validation_metadata=updated_vm
)
update(path_to_schemas, other_path_to_schemas)
if hasattr(cls, '_any_of'):
if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
other_path_to_schemas = cls.__get_anyof_classes(
arg,
discriminated_cls=discriminated_cls,
validation_metadata=updated_vm
)
update(path_to_schemas, other_path_to_schemas)
not_cls = getattr(cls, '_not', None)
not_cls = None
if hasattr(cls, 'MetaOapg'):
not_cls = getattr(cls.MetaOapg, 'not_schema', None)
if not_cls:
other_path_to_schemas = None
not_exception = ApiValueError(
@@ -1848,8 +1863,10 @@ class IntSchema(IntBase, NumberSchema):
class Int32Base:
_inclusive_minimum = decimal.Decimal(-2147483648)
_inclusive_maximum = decimal.Decimal(2147483647)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-2147483648)
inclusive_maximum = decimal.Decimal(2147483647)
class Int32Schema(
@@ -1860,8 +1877,10 @@ class Int32Schema(
class Int64Base:
_inclusive_minimum = decimal.Decimal(-9223372036854775808)
_inclusive_maximum = decimal.Decimal(9223372036854775807)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-9223372036854775808)
inclusive_maximum = decimal.Decimal(9223372036854775807)
class Int64Schema(
@@ -1872,8 +1891,10 @@ class Int64Schema(
class Float32Base:
_inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
_inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
class Float32Schema(
@@ -1888,8 +1909,10 @@ class Float32Schema(
class Float64Base:
_inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
_inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
# TODO make this run even if the inheriting class defines these
class MetaOapg:
inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
class Float64Schema(
@@ -2001,10 +2024,11 @@ class BinarySchema(
BinaryBase,
Schema,
):
_one_of = [
BytesSchema,
FileSchema,
]
class MetaOapg:
one_of = [
BytesSchema,
FileSchema,
]
def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: typing.Union[ValidationMetadata]):
return super().__new__(cls, arg)

View File

@@ -32,22 +32,9 @@ class AnyTypeNotString(
Do not edit the class manually.
"""
not_schema = schemas.StrSchema
@classmethod
@property
@functools.cache
def _not(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return (
cls.not_schema
)
class MetaOapg:
not_schema = schemas.StrSchema
def __new__(
cls,

View File

@@ -42,22 +42,26 @@ class Apple(
class cultivar(
schemas.StrSchema
):
_regex=[{
'pattern': r'^[a-zA-Z\s]*$', # noqa: E501
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^[a-zA-Z\s]*$', # noqa: E501
}]
class origin(
schemas.StrSchema
):
_regex=[{
'pattern': r'^[A-Z\s]*$', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^[A-Z\s]*$', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
def __new__(
cls,

View File

@@ -31,11 +31,16 @@ class ArrayWithValidationsInItems(
Do not edit the class manually.
"""
_max_items=2
class _items(
schemas.Int64Schema
):
_inclusive_maximum=7
pass
class MetaOapg:
inclusive_maximum = 7
class MetaOapg:
max_items = 2

View File

@@ -32,44 +32,46 @@ class Cat(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
declawed = schemas.BoolSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
declawed: typing.Union[declawed, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class MetaOapg:
class all_of_1(
schemas.DictSchema
):
declawed = schemas.BoolSchema
def __new__(
cls,
*args,
declawed=declawed,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Animal,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
declawed: typing.Union[declawed, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
declawed=declawed,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Animal,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,44 +32,46 @@ class ChildCat(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
name = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
name: typing.Union[name, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class MetaOapg:
class all_of_1(
schemas.DictSchema
):
name = schemas.StrSchema
def __new__(
cls,
*args,
name=name,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
ParentPet,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
name: typing.Union[name, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
name=name,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
ParentPet,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,58 +32,60 @@ class ComplexQuadrilateral(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
class MetaOapg:
class quadrilateralType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"ComplexQuadrilateral": "COMPLEX_QUADRILATERAL",
}
),
schemas.StrSchema
class all_of_1(
schemas.DictSchema
):
@classmethod
@property
def COMPLEX_QUADRILATERAL(cls):
return cls("ComplexQuadrilateral")
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
quadrilateralType: typing.Union[quadrilateralType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class quadrilateralType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"ComplexQuadrilateral": "COMPLEX_QUADRILATERAL",
}
),
schemas.StrSchema
):
@classmethod
@property
def COMPLEX_QUADRILATERAL(cls):
return cls("ComplexQuadrilateral")
def __new__(
cls,
*args,
quadrilateralType=quadrilateralType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
QuadrilateralInterface,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
quadrilateralType: typing.Union[quadrilateralType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
quadrilateralType=quadrilateralType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
QuadrilateralInterface,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,57 +32,59 @@ class ComposedAnyOfDifferentTypesNoValidations(
Do not edit the class manually.
"""
any_of_0 = schemas.DictSchema
any_of_1 = schemas.DateSchema
any_of_2 = schemas.DateTimeSchema
any_of_3 = schemas.BinarySchema
any_of_4 = schemas.StrSchema
any_of_5 = schemas.StrSchema
any_of_6 = schemas.DictSchema
any_of_7 = schemas.BoolSchema
any_of_8 = schemas.NoneSchema
class any_of_9(
schemas.ListSchema
):
_items = schemas.AnyTypeSchema
any_of_10 = schemas.NumberSchema
any_of_11 = schemas.Float32Schema
any_of_12 = schemas.Float64Schema
any_of_13 = schemas.IntSchema
any_of_14 = schemas.Int32Schema
any_of_15 = schemas.Int64Schema
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
cls.any_of_2,
cls.any_of_3,
cls.any_of_4,
cls.any_of_5,
cls.any_of_6,
cls.any_of_7,
cls.any_of_8,
cls.any_of_9,
cls.any_of_10,
cls.any_of_11,
cls.any_of_12,
cls.any_of_13,
cls.any_of_14,
cls.any_of_15,
]
class MetaOapg:
any_of_0 = schemas.DictSchema
any_of_1 = schemas.DateSchema
any_of_2 = schemas.DateTimeSchema
any_of_3 = schemas.BinarySchema
any_of_4 = schemas.StrSchema
any_of_5 = schemas.StrSchema
any_of_6 = schemas.DictSchema
any_of_7 = schemas.BoolSchema
any_of_8 = schemas.NoneSchema
class any_of_9(
schemas.ListSchema
):
_items = schemas.AnyTypeSchema
any_of_10 = schemas.NumberSchema
any_of_11 = schemas.Float32Schema
any_of_12 = schemas.Float64Schema
any_of_13 = schemas.IntSchema
any_of_14 = schemas.Int32Schema
any_of_15 = schemas.Int64Schema
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.any_of_0,
cls.any_of_1,
cls.any_of_2,
cls.any_of_3,
cls.any_of_4,
cls.any_of_5,
cls.any_of_6,
cls.any_of_7,
cls.any_of_8,
cls.any_of_9,
cls.any_of_10,
cls.any_of_11,
cls.any_of_12,
cls.any_of_13,
cls.any_of_14,
cls.any_of_15,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class ComposedBool(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class ComposedNone(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class ComposedNumber(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class ComposedObject(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -34,67 +34,77 @@ class ComposedOneOfDifferentTypes(
this is a model that allows payloads of type object or number
"""
one_of_2 = schemas.NoneSchema
one_of_3 = schemas.DateSchema
class one_of_4(
schemas.DictSchema
):
_max_properties=4
_min_properties=4
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_4':
return super().__new__(
class MetaOapg:
one_of_2 = schemas.NoneSchema
one_of_3 = schemas.DateSchema
class one_of_4(
schemas.DictSchema
):
class MetaOapg:
max_properties = 4
min_properties = 4
def __new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_5(
schemas.ListSchema
):
_max_items=4
_min_items=4
_items = schemas.AnyTypeSchema
class one_of_6(
schemas.DateTimeSchema
):
_regex=[{
'pattern': r'^2020.*', # noqa: E501
}]
pass
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
NumberWithValidations,
Animal,
cls.one_of_2,
cls.one_of_3,
cls.one_of_4,
cls.one_of_5,
cls.one_of_6,
]
*args: typing.Union[dict, frozendict, ],
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'one_of_4':
return super().__new__(
cls,
*args,
_configuration=_configuration,
**kwargs,
)
class one_of_5(
schemas.ListSchema
):
_items = schemas.AnyTypeSchema
class MetaOapg:
max_items = 4
min_items = 4
class one_of_6(
schemas.DateTimeSchema
):
class MetaOapg:
regex=[{
'pattern': r'^2020.*', # noqa: E501
}]
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
NumberWithValidations,
Animal,
cls.one_of_2,
cls.one_of_3,
cls.one_of_4,
cls.one_of_5,
cls.one_of_6,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class ComposedString(
Do not edit the class manually.
"""
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
all_of_0 = schemas.AnyTypeSchema
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -31,7 +31,9 @@ class DateTimeWithValidations(
Do not edit the class manually.
"""
_regex=[{
'pattern': r'^2020.*', # noqa: E501
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^2020.*', # noqa: E501
}]

View File

@@ -31,7 +31,9 @@ class DateWithValidations(
Do not edit the class manually.
"""
_regex=[{
'pattern': r'^2020.*', # noqa: E501
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^2020.*', # noqa: E501
}]

View File

@@ -32,44 +32,46 @@ class Dog(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
breed = schemas.StrSchema
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
breed: typing.Union[breed, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class MetaOapg:
class all_of_1(
schemas.DictSchema
):
breed = schemas.StrSchema
def __new__(
cls,
*args,
breed=breed,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Animal,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
breed: typing.Union[breed, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
breed=breed,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Animal,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -32,58 +32,60 @@ class EquilateralTriangle(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
class MetaOapg:
class triangleType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"EquilateralTriangle": "EQUILATERAL_TRIANGLE",
}
),
schemas.StrSchema
class all_of_1(
schemas.DictSchema
):
@classmethod
@property
def EQUILATERAL_TRIANGLE(cls):
return cls("EquilateralTriangle")
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
triangleType: typing.Union[triangleType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class triangleType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"EquilateralTriangle": "EQUILATERAL_TRIANGLE",
}
),
schemas.StrSchema
):
@classmethod
@property
def EQUILATERAL_TRIANGLE(cls):
return cls("EquilateralTriangle")
def __new__(
cls,
*args,
triangleType=triangleType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
TriangleInterface,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
triangleType: typing.Union[triangleType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
triangleType=triangleType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
TriangleInterface,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -42,37 +42,45 @@ class FormatTest(
class integer(
schemas.IntSchema
):
_inclusive_maximum=100
_inclusive_minimum=10
_multiple_of=2
pass
class MetaOapg:
inclusive_maximum = 100
inclusive_minimum = 10
multiple_of = 2
int32 = schemas.Int32Schema
class int32withValidations(
schemas.Int32Schema
):
_inclusive_maximum=200
_inclusive_minimum=20
pass
class MetaOapg:
inclusive_maximum = 200
inclusive_minimum = 20
int64 = schemas.Int64Schema
class number(
schemas.NumberSchema
):
_inclusive_maximum=543.2
_inclusive_minimum=32.1
_multiple_of=32.5
pass
class MetaOapg:
inclusive_maximum = 543.2
inclusive_minimum = 32.1
multiple_of = 32.5
class _float(
schemas.Float32Schema
):
_inclusive_maximum=987.6
_inclusive_minimum=54.3
pass
class MetaOapg:
inclusive_maximum = 987.6
inclusive_minimum = 54.3
locals()["float"] = _float
del locals()['_float']
"""
@@ -91,29 +99,36 @@ class FormatTest(
class double(
schemas.Float64Schema
):
_inclusive_maximum=123.4
_inclusive_minimum=67.8
pass
class MetaOapg:
inclusive_maximum = 123.4
inclusive_minimum = 67.8
float64 = schemas.Float64Schema
class arrayWithUniqueItems(
schemas.ListSchema
):
_unique_items=True
_items = schemas.NumberSchema
class MetaOapg:
unique_items = True
class string(
schemas.StrSchema
):
_regex=[{
'pattern': r'[a-z]', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
pass
class MetaOapg:
regex=[{
'pattern': r'[a-z]', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
byte = schemas.StrSchema
binary = schemas.BinarySchema
date = schemas.DateSchema
@@ -125,30 +140,36 @@ class FormatTest(
class password(
schemas.StrSchema
):
_max_length=64
_min_length=10
pass
class MetaOapg:
max_length = 64
min_length = 10
class pattern_with_digits(
schemas.StrSchema
):
_regex=[{
'pattern': r'^\d{10}$', # noqa: E501
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^\d{10}$', # noqa: E501
}]
class pattern_with_digits_and_delimiter(
schemas.StrSchema
):
_regex=[{
'pattern': r'^image_\d{1,3}$', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
pass
class MetaOapg:
regex=[{
'pattern': r'^image_\d{1,3}$', # noqa: E501
'flags': (
re.IGNORECASE
)
}]
noneProp = schemas.NoneSchema

View File

@@ -33,22 +33,24 @@ class Fruit(
"""
color = schemas.StrSchema
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Apple,
Banana,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Apple,
Banana,
]
def __new__(
cls,

View File

@@ -32,24 +32,26 @@ class FruitReq(
Do not edit the class manually.
"""
one_of_0 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
AppleReq,
BananaReq,
]
class MetaOapg:
one_of_0 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.one_of_0,
AppleReq,
BananaReq,
]
def __new__(
cls,

View File

@@ -33,22 +33,24 @@ class GmFruit(
"""
color = schemas.StrSchema
@classmethod
@property
@functools.cache
def _any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Apple,
Banana,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def any_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Apple,
Banana,
]
def __new__(
cls,

View File

@@ -31,5 +31,7 @@ class IntegerMax10(
Do not edit the class manually.
"""
_inclusive_maximum=10
pass
class MetaOapg:
inclusive_maximum = 10

View File

@@ -31,5 +31,7 @@ class IntegerMin15(
Do not edit the class manually.
"""
_inclusive_minimum=15
pass
class MetaOapg:
inclusive_minimum = 15

View File

@@ -32,58 +32,60 @@ class IsoscelesTriangle(
Do not edit the class manually.
"""
class all_of_1(
schemas.DictSchema
):
class MetaOapg:
class triangleType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"IsoscelesTriangle": "ISOSCELES_TRIANGLE",
}
),
schemas.StrSchema
class all_of_1(
schemas.DictSchema
):
@classmethod
@property
def ISOSCELES_TRIANGLE(cls):
return cls("IsoscelesTriangle")
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
triangleType: typing.Union[triangleType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
class triangleType(
schemas.SchemaEnumMakerClsFactory(
enum_value_to_name={
"IsoscelesTriangle": "ISOSCELES_TRIANGLE",
}
),
schemas.StrSchema
):
@classmethod
@property
def ISOSCELES_TRIANGLE(cls):
return cls("IsoscelesTriangle")
def __new__(
cls,
*args,
triangleType=triangleType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
TriangleInterface,
cls.all_of_1,
]
*args: typing.Union[dict, frozendict, ],
triangleType: typing.Union[triangleType, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Type[schemas.Schema],
) -> 'all_of_1':
return super().__new__(
cls,
*args,
triangleType=triangleType,
_configuration=_configuration,
**kwargs,
)
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
TriangleInterface,
cls.all_of_1,
]
def __new__(
cls,

View File

@@ -43,23 +43,25 @@ class Mammal(
}
}
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Whale,
Zebra,
Pig,
]
class MetaOapg:
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Whale,
Zebra,
Pig,
]
def __new__(
cls,

View File

@@ -34,24 +34,26 @@ class NullableShape(
The value may be a shape or the 'null' value. For a composed schema to validate a null payload, one of its chosen oneOf schemas must be type null or nullable (introduced in OAS schema >= 3.0)
"""
one_of_2 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def _one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Triangle,
Quadrilateral,
cls.one_of_2,
]
class MetaOapg:
one_of_2 = schemas.NoneSchema
@classmethod
@property
@functools.cache
def one_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
Triangle,
Quadrilateral,
cls.one_of_2,
]
def __new__(
cls,

View File

@@ -31,6 +31,8 @@ class NumberWithValidations(
Do not edit the class manually.
"""
_inclusive_maximum=20
_inclusive_minimum=10
pass
class MetaOapg:
inclusive_maximum = 20
inclusive_minimum = 10

View File

@@ -37,28 +37,32 @@ class ObjectWithInlineCompositionProperty(
schemas.ComposedSchema,
):
class all_of_0(
schemas.StrSchema
):
_min_length=1
pass
@classmethod
@property
@functools.cache
def _all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
class MetaOapg:
class all_of_0(
schemas.StrSchema
):
class MetaOapg:
min_length = 1
@classmethod
@property
@functools.cache
def all_of(cls):
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error because the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return [
cls.all_of_0,
]
def __new__(
cls,

View File

@@ -31,7 +31,10 @@ class ObjectWithValidations(
Do not edit the class manually.
"""
_min_properties=2
class MetaOapg:
min_properties = 2
def __new__(

Some files were not shown because too many files have changed in this diff Show More