Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/datamodel_code_generator/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1401,10 +1401,12 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
print(f"Unable to load alias mapping: {e}", file=sys.stderr) # noqa: T201
return Exit.ERROR
if not isinstance(aliases, dict) or not all(
isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
isinstance(k, str) and (isinstance(v, str) or (isinstance(v, list) and all(isinstance(i, str) for i in v)))
for k, v in aliases.items()
):
print( # noqa: T201
'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
"Alias mapping must be a JSON mapping with string keys and string or list of strings values "
'(e.g. {"from": "to", "field": ["alias1", "alias2"]})',
file=sys.stderr,
)
return Exit.ERROR
Expand Down
3 changes: 2 additions & 1 deletion src/datamodel_code_generator/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,8 @@ def start_section(self, heading: str | None) -> None:
"Flat: {'field': 'alias'} applies to all occurrences. "
"Scoped: {'ClassName.field': 'alias'} applies to specific class. "
"Priority: scoped > flat. "
"Example: {'User.name': 'user_name', 'Address.name': 'addr_name', 'id': 'id_'}",
"Multiple aliases (Pydantic v2 only): {'field': ['alias1', 'alias2']} uses AliasChoices for validation. "
"Example: {'User.name': 'user_name', 'id': 'id_', 'field': ['my-field', 'my_field']}",
type=Path,
)
template_options.add_argument(
Expand Down
1 change: 1 addition & 0 deletions src/datamodel_code_generator/model/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ class Config:
default: Optional[Any] = None # noqa: UP045
required: bool = False
alias: Optional[str] = None # noqa: UP045
validation_aliases: Optional[list[str]] = None # noqa: UP045 # Multiple aliases for Pydantic v2 AliasChoices
data_type: DataType
constraints: Any = None
strip_default_none: bool = False
Expand Down
31 changes: 31 additions & 0 deletions src/datamodel_code_generator/model/pydantic_v2/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
DataModelField as DataModelFieldV1,
)
from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_BASE_MODEL, IMPORT_CONFIG_DICT
from datamodel_code_generator.types import chain_as_tuple
from datamodel_code_generator.util import field_validator, model_validate, model_validator

if TYPE_CHECKING:
Expand All @@ -32,6 +33,18 @@
from datamodel_code_generator.reference import Reference


class _RawRepr:
"""Wrapper to prevent repr() from adding quotes around a value."""

__slots__ = ("value",)

def __init__(self, value: str) -> None:
self.value = value

def __repr__(self) -> str:
return self.value


class Constraints(_Constraints):
"""Pydantic v2 field constraints with pattern support."""

Expand Down Expand Up @@ -137,6 +150,14 @@ def _process_data_in_str(self, data: dict[str, Any]) -> None:
else:
data.pop("union_mode")

# Handle multiple aliases using AliasChoices (Pydantic v2 feature)
if self.validation_aliases:
# Remove single alias if present (validation_aliases takes precedence)
data.pop("alias", None)
# Format as AliasChoices(...) - use _RawRepr to prevent double-quoting
aliases_repr = ", ".join(repr(a) for a in self.validation_aliases)
data["validation_alias"] = _RawRepr(f"AliasChoices({aliases_repr})")

# **extra is not supported in pydantic 2.0
json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
if json_schema_extra:
Expand All @@ -150,6 +171,16 @@ def _process_annotated_field_arguments( # noqa: PLR6301
) -> list[str]:
return field_arguments

@property
def imports(self) -> tuple[Import, ...]:
"""Get all required imports including AliasChoices if needed."""
base_imports = super().imports
if self.validation_aliases:
from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_ALIAS_CHOICES # noqa: PLC0415

return chain_as_tuple(base_imports, (IMPORT_ALIAS_CHOICES,))
return base_imports


class ConfigAttribute(NamedTuple):
"""Configuration attribute mapping for ConfigDict conversion."""
Expand Down
1 change: 1 addition & 0 deletions src/datamodel_code_generator/model/pydantic_v2/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

IMPORT_BASE_MODEL = Import.from_full_path("pydantic.BaseModel")
IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
IMPORT_ALIAS_CHOICES = Import.from_full_path("pydantic.AliasChoices")
IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
IMPORT_PAST_DATETIME = Import.from_full_path("pydantic.PastDatetime")
Expand Down
10 changes: 9 additions & 1 deletion src/datamodel_code_generator/parser/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1489,12 +1489,20 @@ def check_paths(
new_data_type = self._create_discriminator_data_type(
enum_from_base, type_names, discriminator_model, imports
)
# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
discriminator_model.fields.append(
self.data_model_field_type(
name=field_name,
data_type=new_data_type,
required=True,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
)
)
has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
Expand Down
12 changes: 10 additions & 2 deletions src/datamodel_code_generator/parser/graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ def parse_enum_as_enum_class(self, enum_object: graphql.GraphQLEnumType) -> None
def parse_field(
self,
field_name: str,
alias: str | None,
alias: str | list[str] | None,
field: graphql.GraphQLField | graphql.GraphQLInputField,
) -> DataModelFieldBase:
"""Parse a GraphQL field and return a data model field."""
Expand Down Expand Up @@ -604,13 +604,21 @@ def parse_field(
if field.description is not None: # pragma: no cover
extras["description"] = field.description

# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
return self.data_model_field_type(
name=field_name,
default=default,
data_type=final_data_type,
required=required,
extras=extras,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
strip_default_none=self.strip_default_none,
use_annotated=self.use_annotated,
use_serialize_as_any=self.use_serialize_as_any,
Expand Down
34 changes: 29 additions & 5 deletions src/datamodel_code_generator/parser/jsonschema.py
Original file line number Diff line number Diff line change
Expand Up @@ -1107,7 +1107,7 @@ def get_object_field( # noqa: PLR0913
field: JsonSchemaObject,
required: bool,
field_type: DataType,
alias: str | None,
alias: str | list[str] | None,
original_field_name: str | None,
) -> DataModelFieldBase:
"""Create a data model field from a JSON Schema object field."""
Expand All @@ -1118,12 +1118,20 @@ def get_object_field( # noqa: PLR0913
if constraints and self._is_fixed_length_tuple(field):
constraints.pop("minItems", None)
constraints.pop("maxItems", None)
# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
return self.data_model_field_type(
name=field_name,
default=field.default,
data_type=field_type,
required=required,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
constraints=constraints,
nullable=field.nullable
if self.strict_nullable and field.nullable is not None
Expand Down Expand Up @@ -2024,7 +2032,7 @@ def _parse_object_common_part( # noqa: PLR0912, PLR0913, PLR0915

return self.data_type(reference=reference)

def _parse_all_of_item( # noqa: PLR0912, PLR0913, PLR0917
def _parse_all_of_item( # noqa: PLR0912, PLR0913, PLR0915, PLR0917
self,
name: str,
obj: JsonSchemaObject,
Expand Down Expand Up @@ -2095,12 +2103,20 @@ def _parse_all_of_item( # noqa: PLR0912, PLR0913, PLR0917
data_type = self._get_inherited_field_type(request, base_classes)
if data_type is None:
data_type = DataType(type=ANY, import_=IMPORT_ANY)
# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
fields.append(
self.data_model_field_type(
name=field_name,
required=True,
original_name=request,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
data_type=data_type,
)
)
Expand Down Expand Up @@ -2270,14 +2286,22 @@ def parse_object_fields(
exclude_field_names.add(field_name)

if isinstance(field, bool):
# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
fields.append(
self.data_model_field_type(
name=field_name,
data_type=self.data_type_manager.get_data_type(
Types.any,
),
required=False if self.force_optional_for_required_fields else original_field_name in requires,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
strip_default_none=self.strip_default_none,
use_annotated=self.use_annotated,
use_field_description=self.use_field_description,
Expand Down
12 changes: 10 additions & 2 deletions src/datamodel_code_generator/parser/openapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,7 @@ def _get_model_name(cls, path_name: str, method: str, suffix: str) -> str:
camel_path_name = snake_to_upper_camel(normalized)
return f"{camel_path_name}{method.capitalize()}{suffix}"

def parse_all_parameters(
def parse_all_parameters( # noqa: PLR0912
self,
name: str,
parameters: list[ReferenceObject | ParameterObject],
Expand Down Expand Up @@ -751,13 +751,21 @@ def parse_all_parameters(
data_type = self.data_type(data_types=data_types)
# multiple data_type parse as non-constraints field
object_schema = None
# Handle multiple aliases (Pydantic v2 AliasChoices)
single_alias: str | None = None
validation_aliases: list[str] | None = None
if isinstance(alias, list):
validation_aliases = alias
else:
single_alias = alias
fields.append(
self.data_model_field_type(
name=field_name,
default=object_schema.default if object_schema else None,
data_type=data_type,
required=parameter.required,
alias=alias,
alias=single_alias,
validation_aliases=validation_aliases,
constraints=model_dump(object_schema, exclude_none=True)
if object_schema and self.is_constraints_field(object_schema)
else None,
Expand Down
35 changes: 28 additions & 7 deletions src/datamodel_code_generator/reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ class FieldNameResolver:

def __init__( # noqa: PLR0913, PLR0917
self,
aliases: Mapping[str, str] | None = None,
aliases: Mapping[str, str | list[str]] | None = None,
snake_case_field: bool = False, # noqa: FBT001, FBT002
empty_field_name: str | None = None,
original_delimiter: str | None = None,
Expand All @@ -235,7 +235,7 @@ def __init__( # noqa: PLR0913, PLR0917
no_alias: bool = False, # noqa: FBT001, FBT002
) -> None:
"""Initialize field name resolver with transformation options."""
self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
self.aliases: Mapping[str, str | list[str]] = {} if aliases is None else {**aliases}
self.empty_field_name: str = empty_field_name or "_"
self.snake_case_field = snake_case_field
self.original_delimiter: str | None = original_delimiter
Expand Down Expand Up @@ -306,7 +306,7 @@ def get_valid_field_name_and_alias(
excludes: set[str] | None = None,
path: list[str] | None = None,
class_name: str | None = None,
) -> tuple[str, str | None]:
) -> tuple[str, str | list[str] | None]:
"""Get valid field name and original alias if different.

Supports hierarchical alias resolution with the following priority:
Expand All @@ -318,15 +318,33 @@ def get_valid_field_name_and_alias(
excludes: Set of names to avoid when generating valid names.
path: Unused, kept for backward compatibility.
class_name: Optional class name for scoped alias resolution.

Returns:
A tuple of (python_field_name, alias_or_aliases) where:
- python_field_name: The valid Python identifier to use as the field name.
- alias_or_aliases: None if no alias needed, str for single alias,
or list[str] for multiple aliases (Pydantic v2 AliasChoices).
"""
del path
if class_name:
scoped_key = f"{class_name}.{field_name}"
if scoped_key in self.aliases:
return self.aliases[scoped_key], field_name
alias_value = self.aliases[scoped_key]
if isinstance(alias_value, list) and alias_value:
# Multiple aliases: validate first alias as field name, return all aliases including original
valid_name = self.get_valid_name(alias_value[0], excludes=excludes)
return valid_name, [field_name, *alias_value]
if isinstance(alias_value, str):
return alias_value, field_name

if field_name in self.aliases:
return self.aliases[field_name], field_name
alias_value = self.aliases[field_name]
if isinstance(alias_value, list) and alias_value:
# Multiple aliases: validate first alias as field name, return all aliases including original
valid_name = self.get_valid_name(alias_value[0], excludes=excludes)
return valid_name, [field_name, *alias_value]
if isinstance(alias_value, str):
return alias_value, field_name

valid_name = self.get_valid_name(field_name, excludes=excludes)
return (
Expand Down Expand Up @@ -1064,7 +1082,7 @@ def get_valid_field_name_and_alias(
model_type: ModelType = ModelType.PYDANTIC,
path: list[str] | None = None,
class_name: str | None = None,
) -> tuple[str, str | None]:
) -> tuple[str, str | list[str] | None]:
"""Get a valid field name and alias for the specified model type.

Args:
Expand All @@ -1075,7 +1093,10 @@ def get_valid_field_name_and_alias(
class_name: Optional class name for scoped alias resolution.

Returns:
A tuple of (valid_field_name, alias_or_none).
A tuple of (python_field_name, alias_or_aliases) where:
- python_field_name: The valid Python identifier to use as the field name.
- alias_or_aliases: None if no alias needed, str for single alias,
or list[str] for multiple aliases (Pydantic v2 AliasChoices).
"""
del path
return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(
Expand Down
3 changes: 3 additions & 0 deletions tests/data/aliases/discriminator_multiple_aliases.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"message_type": ["messageType", "message-type"]
}
3 changes: 3 additions & 0 deletions tests/data/aliases/discriminator_no_literal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"pet_type": ["petType", "pet-type"]
}
8 changes: 8 additions & 0 deletions tests/data/aliases/multiple_aliases.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"my_field": ["my-field", "myField"],
"User.user_name": ["user-name", "userName"],
"base_field": ["baseField", "base-field"],
"extra_field": ["extraField", "extra-field"],
"any_value_field": ["anyValueField", "any-value-field"],
"inherited_required": ["inheritedRequired", "inherited-required"]
}
6 changes: 6 additions & 0 deletions tests/data/aliases/multiple_aliases_parameters.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"page_size": ["pageSize", "page-size"],
"sort_order": ["sortOrder", "sort-order"],
"filter_options": ["filterOptions", "filter-options"],
"single_alias_field": "singleAliasField"
}
Loading
Loading