1111import sys
1212from collections .abc import Callable , Iterator , Mapping , Sequence
1313from datetime import datetime , timezone
14- from enum import Enum
1514from pathlib import Path
1615from typing import (
1716 IO ,
1817 TYPE_CHECKING ,
1918 Any ,
20- Final ,
2119 TextIO ,
2220 TypeAlias ,
2321 TypeVar ,
2422 cast ,
2523)
2624from urllib .parse import ParseResult
2725
28- import yaml
29- import yaml .parser
30- from typing_extensions import TypeAliasType , TypedDict
31-
32- import datamodel_code_generator .pydantic_patch # noqa: F401
26+ from typing_extensions import TypeAliasType
27+
28+ from datamodel_code_generator .enums import (
29+ DEFAULT_SHARED_MODULE_NAME ,
30+ MAX_VERSION ,
31+ MIN_VERSION ,
32+ AllExportsCollisionStrategy ,
33+ AllExportsScope ,
34+ AllOfMergeMode ,
35+ CollapseRootModelsNameStrategy ,
36+ DataclassArguments ,
37+ DataModelType ,
38+ FieldTypeCollisionStrategy ,
39+ GraphQLScope ,
40+ InputFileType ,
41+ ModuleSplitMode ,
42+ NamingStrategy ,
43+ OpenAPIScope ,
44+ ReadOnlyWriteOnlyModelType ,
45+ ReuseScope ,
46+ TargetPydanticVersion ,
47+ )
3348from datamodel_code_generator .format import (
3449 DEFAULT_FORMATTERS ,
3550 CodeFormatter ,
4055 PythonVersionMin ,
4156)
4257from datamodel_code_generator .parser import DefaultPutDict , LiteralType
43- from datamodel_code_generator .util import PYDANTIC_V2 , SafeLoader
4458
4559if TYPE_CHECKING :
4660 from collections import defaultdict
5266 YamlScalar : TypeAlias = str | int | float | bool | None
5367 YamlValue = TypeAliasType ("YamlValue" , "dict[str, YamlValue] | list[YamlValue] | YamlScalar" )
5468
55- MIN_VERSION : Final [int ] = 10
56- MAX_VERSION : Final [int ] = 13
57- DEFAULT_SHARED_MODULE_NAME : Final [str ] = "shared"
58-
5969T = TypeVar ("T" )
6070
71+ # Import is_pydantic_v2 here for module-level YamlValue type definition
72+ from datamodel_code_generator .util import is_pydantic_v2 # noqa: E402
6173
62- class DataclassArguments (TypedDict , total = False ):
63- """Arguments for @dataclass decorator."""
64-
65- init : bool
66- repr : bool
67- eq : bool
68- order : bool
69- unsafe_hash : bool
70- frozen : bool
71- match_args : bool
72- kw_only : bool
73- slots : bool
74- weakref_slot : bool
75-
76-
77- if not TYPE_CHECKING :
74+ if not TYPE_CHECKING : # pragma: no branch
7875 YamlScalar : TypeAlias = str | int | float | bool | None
79- if PYDANTIC_V2 :
76+ if is_pydantic_v2 () :
8077 YamlValue = TypeAliasType ("YamlValue" , "dict[str, YamlValue] | list[YamlValue] | YamlScalar" )
8178 else :
8279 # Pydantic v1 cannot handle TypeAliasType, use Any for recursive parts
8380 YamlValue : TypeAlias = dict [str , Any ] | list [Any ] | YamlScalar
8481
82+
8583GeneratedModules : TypeAlias = dict [tuple [str , ...], str ]
8684"""Type alias for multiple generated modules.
8785
8886Maps module path tuples (e.g., ("models", "user.py")) to generated code strings.
8987Returned by generate() when output=None and multiple modules are generated.
9088"""
9189
92- try :
93- import pysnooper
94-
95- pysnooper .tracer .DISABLED = True
96- except ImportError : # pragma: no cover
97- pysnooper = None
98-
9990DEFAULT_BASE_CLASS : str = "pydantic.BaseModel"
10091
10192
10293def load_yaml (stream : str | TextIO ) -> YamlValue :
10394 """Load YAML content from a string or file-like object."""
95+ import yaml # noqa: PLC0415
96+
97+ from datamodel_code_generator .util import SafeLoader # noqa: PLC0415
98+
10499 return yaml .load (stream , Loader = SafeLoader ) # noqa: S506
105100
106101
@@ -130,22 +125,38 @@ def get_version() -> str:
130125
131126def enable_debug_message () -> None : # pragma: no cover
132127 """Enable debug tracing with pysnooper."""
133- if not pysnooper :
134- msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
135- raise Exception ( msg ) # noqa: TRY002
128+ global _pysnooper_default_state_set # noqa: PLW0603
129+ try :
130+ import pysnooper # noqa: PLC0415
136131
137- pysnooper .tracer .DISABLED = False
132+ pysnooper .tracer .DISABLED = False
133+ _pysnooper_default_state_set = True
134+ except ImportError as err :
135+ msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
136+ raise Exception (msg ) from err # noqa: TRY002
138137
139138
140139DEFAULT_MAX_VARIABLE_LENGTH : int = 100
141140
142141
142+ _pysnooper_default_state_set : bool = False
143+
144+
143145def snooper_to_methods () -> Callable [..., Any ]:
144146 """Class decorator to add pysnooper tracing to all methods."""
145147
146148 def inner (cls : type [T ]) -> type [T ]:
147- if not pysnooper :
149+ global _pysnooper_default_state_set # noqa: PLW0603
150+ try :
151+ import pysnooper # noqa: PLC0415
152+ except ImportError :
148153 return cls
154+
155+ # Ensure tracing is disabled by default (only enabled via --debug flag)
156+ if not _pysnooper_default_state_set :
157+ pysnooper .tracer .DISABLED = True
158+ _pysnooper_default_state_set = True
159+
149160 import inspect # noqa: PLC0415
150161
151162 methods = inspect .getmembers (cls , predicate = inspect .isfunction )
@@ -201,19 +212,6 @@ def is_schema(data: dict) -> bool:
201212 return isinstance (data .get ("properties" ), dict )
202213
203214
204- class InputFileType (Enum ):
205- """Supported input file types for schema parsing."""
206-
207- Auto = "auto"
208- OpenAPI = "openapi"
209- JsonSchema = "jsonschema"
210- Json = "json"
211- Yaml = "yaml"
212- Dict = "dict"
213- CSV = "csv"
214- GraphQL = "graphql"
215-
216-
217215RAW_DATA_TYPES : list [InputFileType ] = [
218216 InputFileType .Json ,
219217 InputFileType .Yaml ,
@@ -223,150 +221,6 @@ class InputFileType(Enum):
223221]
224222
225223
226- class DataModelType (Enum ):
227- """Supported output data model types."""
228-
229- PydanticBaseModel = "pydantic.BaseModel"
230- PydanticV2BaseModel = "pydantic_v2.BaseModel"
231- PydanticV2Dataclass = "pydantic_v2.dataclass"
232- DataclassesDataclass = "dataclasses.dataclass"
233- TypingTypedDict = "typing.TypedDict"
234- MsgspecStruct = "msgspec.Struct"
235-
236-
237- class ReuseScope (Enum ):
238- """Scope for model reuse deduplication.
239-
240- module: Deduplicate identical models within each module (default).
241- tree: Deduplicate identical models across all modules, placing shared models in shared.py.
242- """
243-
244- Module = "module"
245- Tree = "tree"
246-
247-
248- class OpenAPIScope (Enum ):
249- """Scopes for OpenAPI model generation."""
250-
251- Schemas = "schemas"
252- Paths = "paths"
253- Tags = "tags"
254- Parameters = "parameters"
255- Webhooks = "webhooks"
256- RequestBodies = "requestbodies"
257-
258-
259- class AllExportsScope (Enum ):
260- """Scope for __all__ exports in __init__.py.
261-
262- children: Export models from direct child modules only.
263- recursive: Export models from all descendant modules recursively.
264- """
265-
266- Children = "children"
267- Recursive = "recursive"
268-
269-
270- class AllExportsCollisionStrategy (Enum ):
271- """Strategy for handling name collisions in recursive exports.
272-
273- error: Raise an error when name collision is detected.
274- minimal_prefix: Add module prefix only to colliding names.
275- full_prefix: Add full module path prefix to all colliding names.
276- """
277-
278- Error = "error"
279- MinimalPrefix = "minimal-prefix"
280- FullPrefix = "full-prefix"
281-
282-
283- class FieldTypeCollisionStrategy (Enum ):
284- """Strategy for handling field name and type name collisions.
285-
286- rename_field: Rename the field with a suffix and add alias (default).
287- rename_type: Rename the type class with a suffix to preserve field name.
288- """
289-
290- RenameField = "rename-field"
291- RenameType = "rename-type"
292-
293-
294- class NamingStrategy (Enum ):
295- """Strategy for generating unique model names when duplicates occur.
296-
297- numbered: Append numeric suffix (Address1, Address2) [default].
298- parent_prefixed: Prefix with parent model name (CustomerAddress, UserAddress).
299- full_path: Use full schema path for unique names (OrdersItemsAddress).
300- primary_first: Prioritize primary schema definitions, others get suffix.
301- """
302-
303- Numbered = "numbered"
304- ParentPrefixed = "parent-prefixed"
305- FullPath = "full-path"
306- PrimaryFirst = "primary-first"
307-
308-
309- class CollapseRootModelsNameStrategy (Enum ):
310- """Strategy for naming when collapsing root models with object references.
311-
312- child: Keep the inner (child) model's name, remove the wrapper.
313- parent: Rename inner model to wrapper's name, remove the wrapper.
314- """
315-
316- Child = "child"
317- Parent = "parent"
318-
319-
320- class AllOfMergeMode (Enum ):
321- """Mode for field merging in allOf schemas.
322-
323- constraints: Merge only constraint fields (minItems, maxItems, pattern, etc.) from parent.
324- all: Merge constraints plus annotation fields (default, examples) from parent.
325- none: Do not merge any fields from parent properties.
326- """
327-
328- Constraints = "constraints"
329- All = "all"
330- NoMerge = "none"
331-
332-
333- class GraphQLScope (Enum ):
334- """Scopes for GraphQL model generation."""
335-
336- Schema = "schema"
337-
338-
339- class ReadOnlyWriteOnlyModelType (Enum ):
340- """Model generation strategy for readOnly/writeOnly fields.
341-
342- RequestResponse: Generate only Request/Response model variants (no base model).
343- All: Generate Base, Request, and Response models.
344- """
345-
346- RequestResponse = "request-response"
347- All = "all"
348-
349-
350- class ModuleSplitMode (Enum ):
351- """Mode for splitting generated models into separate files.
352-
353- Single: Generate one file per model class.
354- """
355-
356- Single = "single"
357-
358-
359- class TargetPydanticVersion (Enum ):
360- """Target Pydantic version for generated code.
361-
362- V2: Generate code compatible with Pydantic 2.0+ (uses populate_by_name).
363- V2_11: Generate code for Pydantic 2.11+ (uses validate_by_name).
364- """
365-
366- V2 = "2"
367- V2_11 = "2.11"
368-
369-
370224class Error (Exception ):
371225 """Base exception for datamodel-code-generator errors."""
372226
@@ -1061,6 +915,8 @@ def get_header_and_first_line(csv_file: IO[str]) -> dict[str, Any]:
1061915
1062916def infer_input_type (text : str ) -> InputFileType :
1063917 """Automatically detect the input file type from text content."""
918+ import yaml .parser # noqa: PLC0415
919+
1064920 try :
1065921 data = load_yaml (text )
1066922 except yaml .parser .ParserError :
0 commit comments