Skip to content

Commit 926021e

Browse files
authored
Add coverage pragmas to unsupported data model error messages and remove unused load_yaml_from_path function (#2558)
1 parent c234691 commit 926021e

9 files changed

Lines changed: 42 additions & 43 deletions

File tree

src/datamodel_code_generator/__init__.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -88,12 +88,6 @@ def load_yaml_dict(stream: str | TextIO) -> dict[str, YamlValue]:
8888
return result
8989

9090

91-
def load_yaml_from_path(path: Path, encoding: str) -> YamlValue:
92-
"""Load YAML content from a file path."""
93-
with path.open(encoding=encoding) as f:
94-
return load_yaml(f)
95-
96-
9791
def load_yaml_dict_from_path(path: Path, encoding: str) -> dict[str, YamlValue]:
9892
"""Load YAML and return as dict from a file path."""
9993
with path.open(encoding=encoding) as f:

src/datamodel_code_generator/__main__.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -87,11 +87,11 @@ class Config(BaseModel):
8787
if PYDANTIC_V2:
8888
model_config = ConfigDict(arbitrary_types_allowed=True) # pyright: ignore[reportAssignmentType]
8989

90-
def get(self, item: str) -> Any:
90+
def get(self, item: str) -> Any: # pragma: no cover
9191
"""Get attribute value by name."""
9292
return getattr(self, item)
9393

94-
def __getitem__(self, item: str) -> Any:
94+
def __getitem__(self, item: str) -> Any: # pragma: no cover
9595
"""Get item by key."""
9696
return self.get(item)
9797

@@ -128,7 +128,7 @@ def validate_file(cls, value: Any) -> TextIOBase | None: # noqa: N805
128128
if path.is_file():
129129
return cast("TextIOBase", path.expanduser().resolve().open("rt"))
130130

131-
msg = f"A file was expected but {value} is not a file."
131+
msg = f"A file was expected but {value} is not a file." # pragma: no cover
132132
raise Error(msg) # pragma: no cover
133133

134134
@field_validator(
@@ -151,7 +151,7 @@ def validate_url(cls, value: Any) -> ParseResult | None: # noqa: N805
151151
return urlparse(value)
152152
if value is None: # pragma: no cover
153153
return None
154-
msg = f"This protocol doesn't support only http/https. --input={value}"
154+
msg = f"This protocol doesn't support only http/https. --input={value}" # pragma: no cover
155155
raise Error(msg) # pragma: no cover
156156

157157
# Pydantic 1.5.1 doesn't support each_item=True correctly
@@ -433,7 +433,9 @@ def _get_pyproject_toml_config(source: Path) -> dict[str, Any]:
433433
# Convert options from kebap- to snake-case
434434
pyproject_config = {k.replace("-", "_"): v for k, v in pyproject_config.items()}
435435
# Replace US-american spelling if present (ignore if british spelling is present)
436-
if "capitalize_enum_members" in pyproject_config and "capitalise_enum_members" not in pyproject_config:
436+
if (
437+
"capitalize_enum_members" in pyproject_config and "capitalise_enum_members" not in pyproject_config
438+
): # pragma: no cover
437439
pyproject_config["capitalise_enum_members"] = pyproject_config.pop("capitalize_enum_members")
438440
return pyproject_config
439441

src/datamodel_code_generator/model/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ def get_data_model_types(
133133
scalar_model=scalar_class,
134134
union_model=union_class,
135135
)
136-
msg = f"{data_model_type} is unsupported data model type"
136+
msg = f"{data_model_type} is unsupported data model type" # pragma: no cover
137137
raise ValueError(msg) # pragma: no cover
138138

139139

src/datamodel_code_generator/model/pydantic/base_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ def __init__( # noqa: PLR0913
326326
if config_attribute in self.extra_template_data:
327327
config_parameters[config_attribute] = self.extra_template_data[config_attribute]
328328
for data_type in self.all_data_types:
329-
if data_type.is_custom_type:
329+
if data_type.is_custom_type: # pragma: no cover
330330
config_parameters["arbitrary_types_allowed"] = True
331331
break
332332

src/datamodel_code_generator/parser/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def get_or_put(
3939
if default_factory:
4040
value = self[key] = default_factory(key)
4141
return value
42-
msg = "Not found default and default_factory"
42+
msg = "Not found default and default_factory" # pragma: no cover
4343
raise ValueError(msg) # pragma: no cover
4444

4545

src/datamodel_code_generator/parser/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -574,7 +574,7 @@ def _append_additional_imports(self, additional_imports: list[str] | None) -> No
574574
additional_imports = []
575575

576576
for additional_import_string in additional_imports:
577-
if additional_import_string is None:
577+
if additional_import_string is None: # pragma: no cover
578578
continue
579579
new_import = Import.from_full_path(additional_import_string)
580580
self.imports.append(new_import)
@@ -730,7 +730,7 @@ def __change_from_import(
730730
if imports.use_exact: # pragma: no cover
731731
from_, import_ = exact_import(from_, import_, data_type.reference.short_name)
732732
import_ = import_.replace("-", "_")
733-
if (
733+
if ( # pragma: no cover
734734
len(model.module_path) > 1
735735
and model.module_path[-1].count(".") > 0
736736
and not self.treat_dot_as_module
@@ -1033,7 +1033,7 @@ def __collapse_root_models( # noqa: PLR0912
10331033
model_field.constraints = ConstraintsBase.merge_constraints(
10341034
root_type_field.constraints, model_field.constraints
10351035
)
1036-
if (
1036+
if ( # pragma: no cover
10371037
isinstance(
10381038
root_type_field,
10391039
pydantic_model.DataModelField,

src/datamodel_code_generator/parser/jsonschema.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ def __init__(self, **data: Any) -> None:
339339
"""Initialize JsonSchemaObject with extra fields handling."""
340340
super().__init__(**data)
341341
self.extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
342-
if "const" in data.get(self.__extra_key__, {}):
342+
if "const" in data.get(self.__extra_key__, {}): # pragma: no cover
343343
self.extras["const"] = data[self.__extra_key__]["const"]
344344

345345
@cached_property
@@ -1178,7 +1178,7 @@ def parse_item( # noqa: PLR0911, PLR0912
11781178
)
11791179
if item.ref:
11801180
return self.get_ref_data_type(item.ref)
1181-
if item.custom_type_path:
1181+
if item.custom_type_path: # pragma: no cover
11821182
return self.data_type_manager.get_data_type_from_full_path(item.custom_type_path, is_custom_type=True)
11831183
if item.is_array:
11841184
return self.parse_array_fields(name, item, get_special_path("array", path)).data_type
@@ -1840,7 +1840,7 @@ def _parse_file(
18401840
try:
18411841
if definitions := get_model_by_path(raw, split_schema_path):
18421842
break
1843-
except KeyError:
1843+
except KeyError: # pragma: no cover
18441844
continue
18451845

18461846
for key, model in definitions.items():

src/datamodel_code_generator/types.py

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -180,29 +180,6 @@ def chain_as_tuple(*iterables: Iterable[T]) -> tuple[T, ...]:
180180
return tuple(chain(*iterables))
181181

182182

183-
@lru_cache
184-
def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: str) -> list[str]:
185-
"""Remove None from a type string and return the remaining types."""
186-
types: list[str] = []
187-
split_type: str = ""
188-
inner_count: int = 0
189-
for part in re.split(split_pattern, type_):
190-
if part == NONE:
191-
continue
192-
inner_count += part.count("[") - part.count("]")
193-
if split_type:
194-
split_type += delimiter
195-
if inner_count == 0:
196-
if split_type:
197-
types.append(f"{split_type}{part}")
198-
else:
199-
types.append(part)
200-
split_type = ""
201-
continue
202-
split_type += part
203-
return types
204-
205-
206183
def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0912
207184
"""Remove None from a Union type string, handling nested unions."""
208185
if use_union_operator:

tests/main/test_main_general.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from datamodel_code_generator import (
1111
DataModelType,
12+
Error,
1213
InputFileType,
1314
generate,
1415
snooper_to_methods,
@@ -246,3 +247,28 @@ def test_filename_with_various_control_characters(tmp_path: Path) -> None:
246247
), f"System call found for {test_name}"
247248

248249
compile(generated_content, str(output_path), "exec")
250+
251+
252+
def test_generate_with_nonexistent_file(tmp_path: Path) -> None:
253+
"""Test that generating from a nonexistent file raises an error."""
254+
nonexistent_file = tmp_path / "nonexistent.json"
255+
output_file = tmp_path / "output.py"
256+
257+
with pytest.raises(Error, match="File not found"):
258+
generate(
259+
input_=nonexistent_file,
260+
output=output_file,
261+
)
262+
263+
264+
def test_generate_with_invalid_file_format(tmp_path: Path) -> None:
265+
"""Test that generating from an invalid file format raises an error."""
266+
invalid_file = tmp_path / "invalid.txt"
267+
invalid_file.write_text("this is not valid json or yaml or anything")
268+
output_file = tmp_path / "output.py"
269+
270+
with pytest.raises(Error, match="Invalid file format"):
271+
generate(
272+
input_=invalid_file,
273+
output=output_file,
274+
)

0 commit comments

Comments
 (0)