Rather than using a validator, you can also overwrite __init__
so that the offending fields are immediately omitted:
class PreferDefaultsModel(BaseModel):
"""
Pydantic model that will use default values in place of an explicitly passed `None` value.
This is helpful when consuming APIs payloads which may explicitly define a field as `null`
rather than omitting it.
"""
def _field_allows_none(self, field_name):
"""
Returns True if the field is exists in the model's __fields__ and it's allow_none property is True.
Returns False otherwise.
"""
field = self.__fields__.get(field_name)
if field is None:
return False
return field.allow_none
def __init__(self, **data):
"""
Removes any fields from the data which are None and are not allowed to be None.
The results are then passed to the super class's init method.
"""
data_without_null_fields = {k: v for k, v in data.items() if (
v is not None
or self._field_allows_none(k)
)}
super().__init__(**data_without_null_fields)
This can then be used in place of BaseModel
like so:
class Foo(PreferDefaultsModel):
automatic_field = 1
explicit_field: int = Field(default=2)
default_factory_field: int = Field(default_factory=lambda:3)
optional_field: Optional[int] = Field(default=4)
f = Foo(automatic_field=None, explicit_field=None, default_factory_field=None, optional_field=None)
print(f.json(indent=True))
{
"explicit_field": 2,
"default_factory_field": 3,
"optional_field": null,
"automatic_field": 1
}
Note that the optional field is not overwritten.
This approach will simply activate the fields' default behavior, no matter how it is defined. This is far less likely to produce unexpected results than using a validator.