Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
00a3dff
Add multiple --input-model support with inheritance preservation
koxudaxi Dec 31, 2025
fd34ae2
Fix lint and coverage issues
koxudaxi Dec 31, 2025
38c777a
Simplify code to improve branch coverage
koxudaxi Dec 31, 2025
fe421ce
Extract input-model processing to separate module
koxudaxi Dec 31, 2025
2be74fd
Fix PEP8 naming violations and unused parameter
koxudaxi Dec 31, 2025
22df169
Fix lint error for multi-line import
koxudaxi Dec 31, 2025
1901f86
Add expected_output_not_contains parameter to test helper
koxudaxi Dec 31, 2025
1660177
Achieve 100% test coverage for input_model feature
koxudaxi Dec 31, 2025
2962e97
Mark pydantic v1 coercion code with pragma no cover
koxudaxi Dec 31, 2025
23eee28
Refactor input_model tests to use expected files with assert_output
koxudaxi Dec 31, 2025
9da503d
Fix expected files for Python version compatibility
koxudaxi Dec 31, 2025
f39a2d1
Fix import order
koxudaxi Dec 31, 2025
4afe19e
Fix PR review issues: duplicate imports, duplicate test, and duplicat…
koxudaxi Dec 31, 2025
ded2d40
Fix union type override for anyOf schemas and remove duplicate test
koxudaxi Dec 31, 2025
853a02d
Clarify test docstrings: input model type tests all output to default…
koxudaxi Dec 31, 2025
fae1e1d
Remove line comments
koxudaxi Dec 31, 2025
273c4e1
Add test for x-python-type with union in anyOf schema
koxudaxi Dec 31, 2025
963cac6
Remove line comments except ignore comments
koxudaxi Dec 31, 2025
6804416
Remove SKIP_PYTHON_314 and fix Union serialization for Python 3.14
koxudaxi Dec 31, 2025
0de02c0
Refactor duplicate tests using pytest.mark.parametrize
koxudaxi Dec 31, 2025
3fb45dc
Fix lint and type errors
koxudaxi Dec 31, 2025
6ab699c
Regenerate config-types
koxudaxi Dec 31, 2025
325ed3c
Fix StrictTypes import in config-types with reuse-foreign strategy
koxudaxi Dec 31, 2025
c0c7633
Use FQN for type arguments in x-python-type serialization
koxudaxi Dec 31, 2025
e45af85
Fix pyright type error in _full_type_name
koxudaxi Dec 31, 2025
12d9d82
Add unit tests for 100% patch coverage
koxudaxi Dec 31, 2025
b36b6d5
Fix lint error and improve test for generic type without args
koxudaxi Dec 31, 2025
c604da0
Fix variable naming: use 'spec' for find_spec result
koxudaxi Dec 31, 2025
28418ea
Add tests for _full_type_name branch coverage
koxudaxi Dec 31, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
250 changes: 248 additions & 2 deletions src/datamodel_code_generator/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,16 @@ def validate_all_exports_collision_strategy(self: Self) -> Self: # pyright: ign
raise Error(self.__validate_all_exports_collision_strategy_err)
return self

from pydantic import field_validator as _field_validator # noqa: PLC0415

@_field_validator("input_model", mode="before")
@classmethod
def coerce_input_model_to_list(cls, v: str | list[str] | None) -> list[str] | None: # pyright: ignore[reportRedeclaration]
"""Convert string input_model to list for backwards compatibility."""
if isinstance(v, str):
return [v]
return v

else:

@model_validator() # pyright: ignore[reportArgumentType]
Expand Down Expand Up @@ -443,8 +453,16 @@ def validate_all_exports_collision_strategy(cls, values: dict[str, Any]) -> dict
raise Error(cls.__validate_all_exports_collision_strategy_err)
return values

@field_validator("input_model", mode="before")
@classmethod
def coerce_input_model_to_list(cls, v: str | list[str] | None) -> list[str] | None:
"""Convert string input_model to list for backwards compatibility."""
if isinstance(v, str):
return [v]
return v

input: Optional[Union[Path, str]] = None # noqa: UP007, UP045
input_model: Optional[str] = None # noqa: UP045
input_model: Optional[list[str]] = None # noqa: UP045
input_model_ref_strategy: Optional[InputModelRefStrategy] = None # noqa: UP045
input_file_type: InputFileType = InputFileType.Auto
output_model_type: DataModelType = DataModelType.PydanticBaseModel
Expand Down Expand Up @@ -1172,6 +1190,231 @@ def _try_rebuild_model(obj: type) -> None:
obj.model_rebuild()


def _get_base_model_parents(model_class: type) -> list[type]:
"""Get parent classes that are BaseModel subclasses (excluding BaseModel itself)."""
return [
p
for p in model_class.__bases__
if isinstance(p, type) and issubclass(p, BaseModel) and p is not BaseModel
]


def _transform_single_model_to_inheritance( # noqa: PLR0912
schema: dict[str, object],
model_class: type,
schema_generator: type,
processed_parents: dict[str, dict[str, object]] | None = None,
) -> dict[str, object]:
"""Transform a single model's schema to use allOf inheritance structure.

Args:
schema: The JSON schema generated by Pydantic
model_class: The Pydantic model class
schema_generator: The schema generator class
processed_parents: Cache of already processed parent schemas

Returns:
Transformed schema with allOf structure for inheritance
"""
if processed_parents is None:
processed_parents = {}

direct_parents = _get_base_model_parents(model_class)

if not direct_parents:
return schema

parent = direct_parents[0]
parent_name = parent.__name__
parent_fields = set(parent.model_fields.keys())

defs = dict(cast("dict[str, object]", schema.get("$defs", {})))

if parent_name in processed_parents:
parent_schema = processed_parents[parent_name]
else:
if hasattr(parent, "model_rebuild"):
_try_rebuild_model(parent)
parent_schema = parent.model_json_schema(schema_generator=schema_generator)
parent_schema = _add_python_type_for_unserializable(parent_schema, parent)
parent_schema = _add_python_type_info(parent_schema, parent)
parent_schema = _transform_single_model_to_inheritance(
parent_schema, parent, schema_generator, processed_parents
)
processed_parents[parent_name] = parent_schema

if "$defs" in parent_schema:
parent_defs = cast("dict[str, object]", parent_schema["$defs"])
for k, v in parent_defs.items():
if k not in defs:
defs[k] = v

parent_def = {k: v for k, v in parent_schema.items() if k != "$defs"}
defs[parent_name] = parent_def

original_props = cast("dict[str, object]", schema.get("properties", {}))
child_props = {k: v for k, v in original_props.items() if k not in parent_fields}

new_schema: dict[str, object] = {}
if defs:
new_schema["$defs"] = defs
new_schema["allOf"] = [{"$ref": f"#/$defs/{parent_name}"}]
if child_props:
new_schema["properties"] = child_props
original_required = cast("list[str]", schema.get("required", []))
child_required = [r for r in original_required if r not in parent_fields]
if child_required:
new_schema["required"] = child_required
new_schema["title"] = schema.get("title")
new_schema["type"] = "object"

for key in schema:
if key not in {"$defs", "properties", "required", "title", "type", "allOf"}:
new_schema[key] = schema[key]

return new_schema


def _load_multiple_model_schemas( # noqa: PLR0912, PLR0914, PLR0915
input_models: list[str],
input_file_type: InputFileType,
ref_strategy: InputModelRefStrategy | None = None,
output_model_type: DataModelType = DataModelType.PydanticBaseModel,
) -> dict[str, object]:
"""Load and merge schemas from multiple Python import paths with inheritance support.

Args:
input_models: List of import paths in 'module.path:ObjectName' format
input_file_type: Current input file type setting for validation
ref_strategy: Strategy for handling referenced types
output_model_type: Target output model type for reuse-foreign strategy

Returns:
Merged schema dict with anyOf referencing all root models
"""
import importlib.util # noqa: PLC0415
import sys # noqa: PLC0415

if len(input_models) == 1:
return _load_model_schema(
input_models[0], input_file_type, ref_strategy, output_model_type
)

cwd = str(Path.cwd())
if cwd not in sys.path:
sys.path.insert(0, cwd)

model_classes: list[type] = []
loaded_modules: dict[str, object] = {}

for input_model in input_models:
modname, sep, qualname = input_model.rpartition(":")
if not sep or not modname:
msg = f"Invalid --input-model format: {input_model!r}. Expected 'module:Object' or 'path/to/file.py:Object'."
raise Error(msg)

if modname not in loaded_modules:
is_path = "/" in modname or "\\" in modname
if not is_path and modname.endswith(".py"):
is_path = Path(modname).exists()

if is_path:
file_path = Path(modname).resolve()
if not file_path.exists():
msg = f"File not found: {modname!r}"
raise Error(msg)
module_name = file_path.stem
spec = importlib.util.spec_from_file_location(module_name, file_path)
if spec is None or spec.loader is None:
msg = f"Cannot load module from {modname!r}"
raise Error(msg)
module = importlib.util.module_from_spec(spec)
sys.modules[module_name] = module
spec.loader.exec_module(module)
else:
try:
found_spec = importlib.util.find_spec(modname)
if found_spec is None:
msg = f"Cannot find module {modname!r}"
raise Error(msg)
module = importlib.import_module(modname)
except ImportError as e:
msg = f"Cannot import module {modname!r}: {e}"
raise Error(msg) from e
loaded_modules[modname] = module
else:
module = loaded_modules[modname]

try:
obj = getattr(module, qualname)
except AttributeError as e:
msg = f"Module {modname!r} has no attribute {qualname!r}"
raise Error(msg) from e

if not (isinstance(obj, type) and issubclass(obj, BaseModel)):
msg = f"Multiple --input-model only supports Pydantic v2 BaseModel classes, got {type(obj).__name__}"
raise Error(msg)

if not hasattr(obj, "model_json_schema"):
msg = "Multiple --input-model with Pydantic model requires Pydantic v2 runtime. Please upgrade Pydantic to v2."
raise Error(msg)

model_classes.append(obj)

if input_file_type not in {InputFileType.Auto, InputFileType.JsonSchema}:
msg = (
f"--input-file-type must be 'jsonschema' (or omitted) "
f"when --input-model points to Pydantic models, "
f"got '{input_file_type.value}'"
)
raise Error(msg)

schema_generator = _get_input_model_json_schema_class()
merged_defs: dict[str, object] = {}
root_refs: list[dict[str, str]] = []
processed_parents: dict[str, dict[str, object]] = {}

for model_class in model_classes:
model_name = model_class.__name__
if hasattr(model_class, "model_rebuild"):
_try_rebuild_model(model_class)

schema = model_class.model_json_schema(schema_generator=schema_generator)
schema = _add_python_type_for_unserializable(schema, model_class)
schema = _add_python_type_info(schema, model_class)

schema = _transform_single_model_to_inheritance(
schema, model_class, schema_generator, processed_parents
)

if "$defs" in schema:
schema_defs = cast("dict[str, object]", schema["$defs"])
for k, v in schema_defs.items():
if k not in merged_defs:
merged_defs[k] = v

model_def = {k: v for k, v in schema.items() if k != "$defs"}
merged_defs[model_name] = model_def

root_refs.append({"$ref": f"#/$defs/{model_name}"})

final_schema: dict[str, object] = {"$defs": merged_defs}
if len(root_refs) == 1:
final_schema.update(root_refs[0])
else:
final_schema["anyOf"] = root_refs

if ref_strategy and ref_strategy != InputModelRefStrategy.RegenerateAll:
all_nested_models: dict[str, type] = {}
for model_class in model_classes:
all_nested_models.update(_collect_nested_models(model_class))
final_schema = _filter_defs_by_strategy(
final_schema, all_nested_models, output_model_type, ref_strategy
)

return final_schema


def _load_model_schema( # noqa: PLR0912, PLR0914, PLR0915
input_model: str,
input_file_type: InputFileType,
Expand Down Expand Up @@ -1262,6 +1505,9 @@ def _load_model_schema( # noqa: PLR0912, PLR0914, PLR0915
schema = _add_python_type_for_unserializable(schema, obj)
schema = _add_python_type_info(schema, obj)

# Transform to inheritance structure if the model has BaseModel parents
schema = _transform_single_model_to_inheritance(schema, obj, schema_generator)

if ref_strategy and ref_strategy != InputModelRefStrategy.RegenerateAll:
nested_models = _collect_nested_models(obj)
model_name = getattr(obj, "__name__", None)
Expand Down Expand Up @@ -1890,7 +2136,7 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
try:
input_: Path | str | ParseResult
if config.input_model:
schema = _load_model_schema(
schema = _load_multiple_model_schemas(
config.input_model,
config.input_file_type,
config.input_model_ref_strategy,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from datamodel_code_generator.types import DataTypeManager


class GraphQLParserConfigDict(TypedDict):
class ParserConfig(TypedDict):
data_model_type: NotRequired[type[DataModel]]
data_model_root_type: NotRequired[type[DataModel]]
data_type_manager_type: NotRequired[type[DataTypeManager]]
Expand Down Expand Up @@ -142,5 +142,8 @@ class GraphQLParserConfigDict(TypedDict):
read_only_write_only_model_type: NotRequired[ReadOnlyWriteOnlyModelType | None]
field_type_collision_strategy: NotRequired[FieldTypeCollisionStrategy | None]
target_pydantic_version: NotRequired[TargetPydanticVersion | None]


class GraphQLParserConfigDict(ParserConfig):
data_model_scalar_type: NotRequired[type[DataModel]]
data_model_union_type: NotRequired[type[DataModel]]
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from datamodel_code_generator.types import DataTypeManager


class JSONSchemaParserConfigDict(TypedDict):
class ParserConfig(TypedDict):
data_model_type: NotRequired[type[DataModel]]
data_model_root_type: NotRequired[type[DataModel]]
data_type_manager_type: NotRequired[type[DataTypeManager]]
Expand Down Expand Up @@ -142,3 +142,7 @@ class JSONSchemaParserConfigDict(TypedDict):
read_only_write_only_model_type: NotRequired[ReadOnlyWriteOnlyModelType | None]
field_type_collision_strategy: NotRequired[FieldTypeCollisionStrategy | None]
target_pydantic_version: NotRequired[TargetPydanticVersion | None]


class JSONSchemaParserConfigDict(ParserConfig):
pass
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from datamodel_code_generator.types import DataTypeManager


class OpenAPIParserConfigDict(TypedDict):
class ParserConfig(TypedDict):
data_model_type: NotRequired[type[DataModel]]
data_model_root_type: NotRequired[type[DataModel]]
data_type_manager_type: NotRequired[type[DataTypeManager]]
Expand Down Expand Up @@ -143,6 +143,13 @@ class OpenAPIParserConfigDict(TypedDict):
read_only_write_only_model_type: NotRequired[ReadOnlyWriteOnlyModelType | None]
field_type_collision_strategy: NotRequired[FieldTypeCollisionStrategy | None]
target_pydantic_version: NotRequired[TargetPydanticVersion | None]


class JSONSchemaParserConfig(ParserConfig):
pass


class OpenAPIParserConfigDict(JSONSchemaParserConfig):
openapi_scopes: NotRequired[list[OpenAPIScope] | None]
include_path_parameters: NotRequired[bool]
use_status_code_in_response_name: NotRequired[bool]
2 changes: 2 additions & 0 deletions src/datamodel_code_generator/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,10 @@ def start_section(self, heading: str | None) -> None:
)
base_options.add_argument(
"--input-model",
action="append",
help="Python import path to a Pydantic v2 model or schema dict "
"(e.g., 'mypackage.module:ClassName' or 'mypackage.schemas:SCHEMA_DICT'). "
"Can be specified multiple times for related models with inheritance. "
"For dict input, --input-file-type is required. "
"Cannot be used with --input or --url.",
metavar="MODULE:NAME",
Expand Down
26 changes: 26 additions & 0 deletions tests/data/expected/main/input_model/forked_inheritance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# generated by datamodel-codegen:
# filename: <stdin>
# timestamp: 1985-10-26T08:21:00+00:00

from __future__ import annotations

from typing import TypeAlias, TypedDict


class GrandParent(TypedDict):
grand_field: str


class Parent(GrandParent):
parent_field: int


class ChildA(Parent):
child_a_field: float


class ChildB(Parent):
child_b_field: bool


Model: TypeAlias = ChildA | ChildB
Comment thread Dismissed
Loading
Loading