sqlmesh.utils.pydantic
1from __future__ import annotations 2 3import json 4import sys 5import typing as t 6from functools import cached_property, wraps 7 8import pydantic 9from pydantic.fields import FieldInfo 10from sqlglot import exp, parse_one 11from sqlglot.helper import ensure_list 12from sqlglot.optimizer.normalize_identifiers import normalize_identifiers 13from sqlglot.optimizer.qualify_columns import quote_identifiers 14 15from sqlmesh.core import dialect as d 16from sqlmesh.utils import str_to_bool 17 18if sys.version_info >= (3, 9): 19 from typing import Annotated 20else: 21 from typing_extensions import Annotated 22 23if t.TYPE_CHECKING: 24 Model = t.TypeVar("Model", bound="PydanticModel") 25 26 27T = t.TypeVar("T") 28DEFAULT_ARGS = {"exclude_none": True, "by_alias": True} 29PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION = [int(p) for p in pydantic.__version__.split(".")][ 30 :2 31] 32 33 34if PYDANTIC_MAJOR_VERSION >= 2: 35 36 def field_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 37 # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True. 38 kwargs.pop("always", None) 39 return pydantic.field_validator(*args, **kwargs) # type: ignore 40 41 def model_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 42 # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True. 43 kwargs.pop("always", None) 44 return pydantic.model_validator(*args, **kwargs) # type: ignore 45 46 def field_serializer(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 47 return pydantic.field_serializer(*args, **kwargs) # type: ignore 48 49else: 50 51 def field_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 52 mode = kwargs.pop("mode", "after") 53 return pydantic.validator(*args, **kwargs, pre=mode.lower() == "before", allow_reuse=True) 54 55 def model_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 56 mode = kwargs.pop("mode", "after") 57 return pydantic.root_validator( 58 *args, **kwargs, pre=mode.lower() == "before", allow_reuse=True 59 ) 60 61 def field_serializer(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]: 62 def _decorator(func: t.Callable[[t.Any], t.Any]) -> t.Callable[[t.Any], t.Any]: 63 @wraps(func) 64 def _wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: 65 return func(*args, **kwargs) 66 67 return _wrapper 68 69 return _decorator 70 71 72def parse_obj_as(type_: T, obj: t.Any) -> T: 73 if PYDANTIC_MAJOR_VERSION >= 2: 74 return pydantic.TypeAdapter(type_).validate_python(obj) # type: ignore 75 return pydantic.tools.parse_obj_as(type_, obj) # type: ignore 76 77 78def get_dialect(values: t.Any) -> str: 79 """Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect. 80 81 Python models allow users to instantiate pydantic models by hand. This is problematic 82 because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used 83 in python models using the project default dialect, we set a class variable on the model 84 registry and use that here. 85 """ 86 87 from sqlmesh.core.model import model 88 89 dialect = (values if isinstance(values, dict) else values.data).get("dialect") 90 return model._dialect if dialect is None else dialect 91 92 93def _expression_encoder(e: exp.Expression) -> str: 94 return e.meta.get("sql") or e.sql(dialect=e.meta.get("dialect")) 95 96 97AuditQueryTypes = t.Union[exp.Query, d.JinjaQuery] 98ModelQueryTypes = t.Union[exp.Query, d.JinjaQuery, d.MacroFunc] 99 100 101class PydanticModel(pydantic.BaseModel): 102 if PYDANTIC_MAJOR_VERSION >= 2: 103 model_config = pydantic.ConfigDict( # type: ignore 104 arbitrary_types_allowed=True, 105 extra="forbid", # type: ignore 106 # Even though Pydantic v2 kept support for json_encoders, the functionality has been 107 # crippled badly. Here we need to enumerate all different ways of how sqlglot expressions 108 # show up in pydantic models. 109 json_encoders={ 110 exp.Expression: _expression_encoder, 111 exp.DataType: _expression_encoder, 112 exp.Tuple: _expression_encoder, 113 AuditQueryTypes: _expression_encoder, # type: ignore 114 ModelQueryTypes: _expression_encoder, # type: ignore 115 }, 116 protected_namespaces=(), 117 ) 118 else: 119 120 class Config: 121 arbitrary_types_allowed = True 122 extra = "forbid" 123 json_encoders = {exp.Expression: _expression_encoder} 124 underscore_attrs_are_private = True 125 smart_union = True 126 keep_untouched = (cached_property,) 127 128 _hash_func_mapping: t.ClassVar[t.Dict[t.Type[t.Any], t.Callable[[t.Any], int]]] = {} 129 130 def dict( 131 self, 132 **kwargs: t.Any, 133 ) -> t.Dict[str, t.Any]: 134 kwargs = {**DEFAULT_ARGS, **kwargs} 135 if PYDANTIC_MAJOR_VERSION >= 2: 136 return super().model_dump(**kwargs) # type: ignore 137 138 include = kwargs.pop("include", None) 139 if include is None and self.__config__.extra != "allow": # type: ignore 140 # Workaround to support @cached_property in Pydantic v1. 141 include = {f.name for f in self.all_field_infos().values()} # type: ignore 142 143 mode = kwargs.pop("mode", None) 144 if mode == "json": 145 # Pydantic v1 doesn't support the 'json' mode for dict(). 146 return json.loads(super().json(include=include, **kwargs)) 147 return super().dict(include=include, **kwargs) # type: ignore 148 149 def json( 150 self, 151 **kwargs: t.Any, 152 ) -> str: 153 kwargs = {**DEFAULT_ARGS, **kwargs} 154 if PYDANTIC_MAJOR_VERSION >= 2: 155 # Pydantic v2 doesn't support arbitrary arguments for json.dump(). 156 if kwargs.pop("sort_keys", False): 157 return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True) # type: ignore 158 else: 159 return super().model_dump_json(**kwargs) # type: ignore 160 161 include = kwargs.pop("include", None) 162 if include is None and self.__config__.extra != "allow": # type: ignore 163 # Workaround to support @cached_property in Pydantic v1. 164 include = {f.name for f in self.all_field_infos().values()} # type: ignore 165 return super().json(include=include, **kwargs) # type: ignore 166 167 def copy(self: "Model", **kwargs: t.Any) -> "Model": 168 return ( 169 super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs) # type: ignore 170 ) 171 172 @property 173 def fields_set(self: "Model") -> t.Set[str]: 174 return self.__pydantic_fields_set__ if PYDANTIC_MAJOR_VERSION >= 2 else self.__fields_set__ # type: ignore 175 176 @classmethod 177 def parse_obj(cls: t.Type["Model"], obj: t.Any) -> "Model": 178 return ( 179 super().model_validate(obj) if PYDANTIC_MAJOR_VERSION >= 2 else super().parse_obj(obj) # type: ignore 180 ) 181 182 @classmethod 183 def parse_raw(cls: t.Type["Model"], b: t.Union[str, bytes], **kwargs: t.Any) -> "Model": 184 return ( 185 super().model_validate_json(b, **kwargs) # type: ignore 186 if PYDANTIC_MAJOR_VERSION >= 2 187 else super().parse_raw(b, **kwargs) 188 ) 189 190 @classmethod 191 def missing_required_fields( 192 cls: t.Type["PydanticModel"], provided_fields: t.Set[str] 193 ) -> t.Set[str]: 194 return cls.required_fields() - provided_fields 195 196 @classmethod 197 def extra_fields(cls: t.Type["PydanticModel"], provided_fields: t.Set[str]) -> t.Set[str]: 198 return provided_fields - cls.all_fields() 199 200 @classmethod 201 def all_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]: 202 return cls._fields() 203 204 @classmethod 205 def all_field_infos(cls: t.Type["PydanticModel"]) -> t.Dict[str, FieldInfo]: 206 return cls.model_fields if PYDANTIC_MAJOR_VERSION >= 2 else cls.__fields__ # type: ignore 207 208 @classmethod 209 def required_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]: 210 return cls._fields( 211 lambda field: field.is_required() if PYDANTIC_MAJOR_VERSION >= 2 else field.required 212 ) # type: ignore 213 214 @classmethod 215 def _fields( 216 cls: t.Type["PydanticModel"], 217 predicate: t.Callable[[t.Any], bool] = lambda _: True, 218 ) -> t.Set[str]: 219 return { 220 field_info.alias if field_info.alias else field_name 221 for field_name, field_info in cls.all_field_infos().items() # type: ignore 222 if predicate(field_info) 223 } 224 225 def __eq__(self, other: t.Any) -> bool: 226 if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6): 227 if isinstance(other, pydantic.BaseModel): 228 return self.dict() == other.dict() 229 else: 230 return self.dict() == other 231 return super().__eq__(other) 232 233 def __hash__(self) -> int: 234 if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6): 235 obj = {k: v for k, v in self.__dict__.items() if k in self.all_field_infos()} 236 return hash(self.__class__) + hash(tuple(obj.values())) 237 238 from pydantic._internal._model_construction import ( # type: ignore 239 make_hash_func, 240 ) 241 242 if self.__class__ not in PydanticModel._hash_func_mapping: 243 PydanticModel._hash_func_mapping[self.__class__] = make_hash_func(self.__class__) 244 return PydanticModel._hash_func_mapping[self.__class__](self) 245 246 def __str__(self) -> str: 247 args = [] 248 249 for k, info in self.all_field_infos().items(): 250 v = getattr(self, k) 251 252 if v != info.default: 253 args.append(f"{k}: {v}") 254 255 return f"{self.__class__.__name__}<{', '.join(args)}>" 256 257 def __repr__(self) -> str: 258 return str(self) 259 260 261def model_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: 262 @wraps(func) 263 def wrapper(cls: t.Type, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any: 264 is_values_dict = isinstance(values, dict) 265 values_dict = values if is_values_dict else values.__dict__ 266 result = func(cls, values_dict, *args, **kwargs) 267 if is_values_dict: 268 return result 269 else: 270 values.__dict__.update(result) 271 return values 272 273 return wrapper 274 275 276def field_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: 277 @wraps(func) 278 def wrapper(cls: t.Type, v: t.Any, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any: 279 values_dict = values if isinstance(values, dict) else values.data 280 return func(cls, v, values_dict, *args, **kwargs) 281 282 return wrapper 283 284 285def validate_list_of_strings(v: t.Any) -> t.List[str]: 286 if isinstance(v, exp.Identifier): 287 return [v.name] 288 if isinstance(v, (exp.Tuple, exp.Array)): 289 return [e.name for e in v.expressions] 290 return [i.name if isinstance(i, exp.Identifier) else str(i) for i in v] 291 292 293def validate_string(v: t.Any) -> str: 294 if isinstance(v, exp.Expression): 295 return v.name 296 return str(v) 297 298 299def bool_validator(v: t.Any) -> bool: 300 if isinstance(v, exp.Boolean): 301 return v.this 302 if isinstance(v, exp.Expression): 303 return str_to_bool(v.name) 304 return str_to_bool(str(v or "")) 305 306 307def positive_int_validator(v: t.Any) -> int: 308 if isinstance(v, exp.Expression) and v.is_int: 309 v = int(v.name) 310 if not isinstance(v, int): 311 raise ValueError(f"Invalid num {v}. Value must be an integer value") 312 if v <= 0: 313 raise ValueError(f"Invalid num {v}. Value must be a positive integer") 314 return v 315 316 317def _get_field( 318 v: t.Any, 319 values: t.Any, 320) -> exp.Expression: 321 dialect = get_dialect(values) 322 323 if isinstance(v, exp.Expression): 324 expression = v 325 else: 326 expression = parse_one(v, dialect=dialect) 327 328 expression = exp.column(expression) if isinstance(expression, exp.Identifier) else expression 329 expression = quote_identifiers( 330 normalize_identifiers(expression, dialect=dialect), dialect=dialect 331 ) 332 expression.meta["dialect"] = dialect 333 334 return expression 335 336 337def _get_fields( 338 v: t.Any, 339 values: t.Any, 340) -> t.List[exp.Expression]: 341 dialect = get_dialect(values) 342 343 if isinstance(v, (exp.Tuple, exp.Array)): 344 expressions: t.List[exp.Expression] = v.expressions 345 elif isinstance(v, exp.Expression): 346 expressions = [v] 347 else: 348 expressions = [ 349 parse_one(entry, dialect=dialect) if isinstance(entry, str) else entry 350 for entry in ensure_list(v) 351 ] 352 353 results = [] 354 355 for expr in expressions: 356 results.append(_get_field(expr, values)) 357 358 return results 359 360 361def list_of_fields_validator(v: t.Any, values: t.Any) -> t.List[exp.Expression]: 362 return _get_fields(v, values) 363 364 365def column_validator(v: t.Any, values: t.Any) -> exp.Column: 366 expression = _get_field(v, values) 367 if not isinstance(expression, exp.Column): 368 raise ValueError(f"Invalid column {expression}. Value must be a column") 369 return expression 370 371 372def list_of_columns_or_star_validator( 373 v: t.Any, values: t.Any 374) -> t.Union[exp.Star, t.List[exp.Column]]: 375 expressions = _get_fields(v, values) 376 if len(expressions) == 1 and isinstance(expressions[0], exp.Star): 377 return t.cast(exp.Star, expressions[0]) 378 return t.cast(t.List[exp.Column], expressions) 379 380 381if t.TYPE_CHECKING: 382 SQLGlotListOfStrings = t.List[str] 383 SQLGlotString = str 384 SQLGlotBool = bool 385 SQLGlotPositiveInt = int 386 SQLGlotColumn = exp.Column 387 SQLGlotListOfFields = t.List[exp.Expression] 388 SQLGlotListOfColumnsOrStar = t.Union[t.List[exp.Column], exp.Star] 389elif PYDANTIC_MAJOR_VERSION >= 2: 390 from pydantic.functional_validators import BeforeValidator # type: ignore 391 392 SQLGlotListOfStrings = Annotated[t.List[str], BeforeValidator(validate_list_of_strings)] 393 SQLGlotString = Annotated[str, BeforeValidator(validate_string)] 394 SQLGlotBool = Annotated[bool, BeforeValidator(bool_validator)] 395 SQLGlotPositiveInt = Annotated[int, BeforeValidator(positive_int_validator)] 396 SQLGlotColumn = Annotated[exp.Expression, BeforeValidator(column_validator)] 397 SQLGlotListOfFields = Annotated[ 398 t.List[exp.Expression], BeforeValidator(list_of_fields_validator) 399 ] 400 SQLGlotListOfColumnsOrStar = Annotated[ 401 t.Union[t.List[exp.Column], exp.Star], BeforeValidator(list_of_columns_or_star_validator) 402 ] 403else: 404 405 class PydanticTypeProxy(t.Generic[T]): 406 validate: t.Callable[[t.Any], T] 407 408 @classmethod 409 def __get_validators__(cls) -> t.Iterator[t.Callable[[t.Any], T]]: 410 yield cls.validate 411 412 class SQLGlotListOfStrings(PydanticTypeProxy[t.List[str]]): 413 validate = validate_list_of_strings 414 415 class SQLGlotString(PydanticTypeProxy[str]): 416 validate = validate_string 417 418 class SQLGlotBool(PydanticTypeProxy[bool]): 419 validate = bool_validator 420 421 class SQLGlotPositiveInt(PydanticTypeProxy[int]): 422 validate = positive_int_validator 423 424 class SQLGlotColumn(PydanticTypeProxy[exp.Column]): 425 validate = column_validator 426 427 class SQLGlotListOfFields(PydanticTypeProxy[t.List[exp.Expression]]): 428 validate = list_of_fields_validator 429 430 class SQLGlotListOfColumnsOrStar(PydanticTypeProxy[t.Union[exp.Star, t.List[exp.Column]]]): 431 validate = list_of_columns_or_star_validator
79def get_dialect(values: t.Any) -> str: 80 """Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect. 81 82 Python models allow users to instantiate pydantic models by hand. This is problematic 83 because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used 84 in python models using the project default dialect, we set a class variable on the model 85 registry and use that here. 86 """ 87 88 from sqlmesh.core.model import model 89 90 dialect = (values if isinstance(values, dict) else values.data).get("dialect") 91 return model._dialect if dialect is None else dialect
Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect.
Python models allow users to instantiate pydantic models by hand. This is problematic because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used in python models using the project default dialect, we set a class variable on the model registry and use that here.
102class PydanticModel(pydantic.BaseModel): 103 if PYDANTIC_MAJOR_VERSION >= 2: 104 model_config = pydantic.ConfigDict( # type: ignore 105 arbitrary_types_allowed=True, 106 extra="forbid", # type: ignore 107 # Even though Pydantic v2 kept support for json_encoders, the functionality has been 108 # crippled badly. Here we need to enumerate all different ways of how sqlglot expressions 109 # show up in pydantic models. 110 json_encoders={ 111 exp.Expression: _expression_encoder, 112 exp.DataType: _expression_encoder, 113 exp.Tuple: _expression_encoder, 114 AuditQueryTypes: _expression_encoder, # type: ignore 115 ModelQueryTypes: _expression_encoder, # type: ignore 116 }, 117 protected_namespaces=(), 118 ) 119 else: 120 121 class Config: 122 arbitrary_types_allowed = True 123 extra = "forbid" 124 json_encoders = {exp.Expression: _expression_encoder} 125 underscore_attrs_are_private = True 126 smart_union = True 127 keep_untouched = (cached_property,) 128 129 _hash_func_mapping: t.ClassVar[t.Dict[t.Type[t.Any], t.Callable[[t.Any], int]]] = {} 130 131 def dict( 132 self, 133 **kwargs: t.Any, 134 ) -> t.Dict[str, t.Any]: 135 kwargs = {**DEFAULT_ARGS, **kwargs} 136 if PYDANTIC_MAJOR_VERSION >= 2: 137 return super().model_dump(**kwargs) # type: ignore 138 139 include = kwargs.pop("include", None) 140 if include is None and self.__config__.extra != "allow": # type: ignore 141 # Workaround to support @cached_property in Pydantic v1. 142 include = {f.name for f in self.all_field_infos().values()} # type: ignore 143 144 mode = kwargs.pop("mode", None) 145 if mode == "json": 146 # Pydantic v1 doesn't support the 'json' mode for dict(). 147 return json.loads(super().json(include=include, **kwargs)) 148 return super().dict(include=include, **kwargs) # type: ignore 149 150 def json( 151 self, 152 **kwargs: t.Any, 153 ) -> str: 154 kwargs = {**DEFAULT_ARGS, **kwargs} 155 if PYDANTIC_MAJOR_VERSION >= 2: 156 # Pydantic v2 doesn't support arbitrary arguments for json.dump(). 157 if kwargs.pop("sort_keys", False): 158 return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True) # type: ignore 159 else: 160 return super().model_dump_json(**kwargs) # type: ignore 161 162 include = kwargs.pop("include", None) 163 if include is None and self.__config__.extra != "allow": # type: ignore 164 # Workaround to support @cached_property in Pydantic v1. 165 include = {f.name for f in self.all_field_infos().values()} # type: ignore 166 return super().json(include=include, **kwargs) # type: ignore 167 168 def copy(self: "Model", **kwargs: t.Any) -> "Model": 169 return ( 170 super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs) # type: ignore 171 ) 172 173 @property 174 def fields_set(self: "Model") -> t.Set[str]: 175 return self.__pydantic_fields_set__ if PYDANTIC_MAJOR_VERSION >= 2 else self.__fields_set__ # type: ignore 176 177 @classmethod 178 def parse_obj(cls: t.Type["Model"], obj: t.Any) -> "Model": 179 return ( 180 super().model_validate(obj) if PYDANTIC_MAJOR_VERSION >= 2 else super().parse_obj(obj) # type: ignore 181 ) 182 183 @classmethod 184 def parse_raw(cls: t.Type["Model"], b: t.Union[str, bytes], **kwargs: t.Any) -> "Model": 185 return ( 186 super().model_validate_json(b, **kwargs) # type: ignore 187 if PYDANTIC_MAJOR_VERSION >= 2 188 else super().parse_raw(b, **kwargs) 189 ) 190 191 @classmethod 192 def missing_required_fields( 193 cls: t.Type["PydanticModel"], provided_fields: t.Set[str] 194 ) -> t.Set[str]: 195 return cls.required_fields() - provided_fields 196 197 @classmethod 198 def extra_fields(cls: t.Type["PydanticModel"], provided_fields: t.Set[str]) -> t.Set[str]: 199 return provided_fields - cls.all_fields() 200 201 @classmethod 202 def all_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]: 203 return cls._fields() 204 205 @classmethod 206 def all_field_infos(cls: t.Type["PydanticModel"]) -> t.Dict[str, FieldInfo]: 207 return cls.model_fields if PYDANTIC_MAJOR_VERSION >= 2 else cls.__fields__ # type: ignore 208 209 @classmethod 210 def required_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]: 211 return cls._fields( 212 lambda field: field.is_required() if PYDANTIC_MAJOR_VERSION >= 2 else field.required 213 ) # type: ignore 214 215 @classmethod 216 def _fields( 217 cls: t.Type["PydanticModel"], 218 predicate: t.Callable[[t.Any], bool] = lambda _: True, 219 ) -> t.Set[str]: 220 return { 221 field_info.alias if field_info.alias else field_name 222 for field_name, field_info in cls.all_field_infos().items() # type: ignore 223 if predicate(field_info) 224 } 225 226 def __eq__(self, other: t.Any) -> bool: 227 if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6): 228 if isinstance(other, pydantic.BaseModel): 229 return self.dict() == other.dict() 230 else: 231 return self.dict() == other 232 return super().__eq__(other) 233 234 def __hash__(self) -> int: 235 if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6): 236 obj = {k: v for k, v in self.__dict__.items() if k in self.all_field_infos()} 237 return hash(self.__class__) + hash(tuple(obj.values())) 238 239 from pydantic._internal._model_construction import ( # type: ignore 240 make_hash_func, 241 ) 242 243 if self.__class__ not in PydanticModel._hash_func_mapping: 244 PydanticModel._hash_func_mapping[self.__class__] = make_hash_func(self.__class__) 245 return PydanticModel._hash_func_mapping[self.__class__](self) 246 247 def __str__(self) -> str: 248 args = [] 249 250 for k, info in self.all_field_infos().items(): 251 v = getattr(self, k) 252 253 if v != info.default: 254 args.append(f"{k}: {v}") 255 256 return f"{self.__class__.__name__}<{', '.join(args)}>" 257 258 def __repr__(self) -> str: 259 return str(self)
Usage docs: https://docs.pydantic.dev/2.7/concepts/models/
A base class for creating Pydantic models.
Attributes:
- __class_vars__: The names of classvars defined on the model.
- __private_attributes__: Metadata about the private attributes of the model.
- __signature__: The signature for instantiating the model.
- __pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
- __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
- __pydantic_custom_init__: Whether the model has a custom
__init__
function. - __pydantic_decorators__: Metadata containing the decorators defined on the model.
This replaces
Model.__validators__
andModel.__root_validators__
from Pydantic V1. - __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
- __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
- __pydantic_post_init__: The name of the post-init method for the model, if defined.
- __pydantic_root_model__: Whether the model is a
RootModel
. - __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
- __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
- __pydantic_extra__: An instance attribute with the values of extra fields from validation when
model_config['extra'] == 'allow'
. - __pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
- __pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
131 def dict( 132 self, 133 **kwargs: t.Any, 134 ) -> t.Dict[str, t.Any]: 135 kwargs = {**DEFAULT_ARGS, **kwargs} 136 if PYDANTIC_MAJOR_VERSION >= 2: 137 return super().model_dump(**kwargs) # type: ignore 138 139 include = kwargs.pop("include", None) 140 if include is None and self.__config__.extra != "allow": # type: ignore 141 # Workaround to support @cached_property in Pydantic v1. 142 include = {f.name for f in self.all_field_infos().values()} # type: ignore 143 144 mode = kwargs.pop("mode", None) 145 if mode == "json": 146 # Pydantic v1 doesn't support the 'json' mode for dict(). 147 return json.loads(super().json(include=include, **kwargs)) 148 return super().dict(include=include, **kwargs) # type: ignore
150 def json( 151 self, 152 **kwargs: t.Any, 153 ) -> str: 154 kwargs = {**DEFAULT_ARGS, **kwargs} 155 if PYDANTIC_MAJOR_VERSION >= 2: 156 # Pydantic v2 doesn't support arbitrary arguments for json.dump(). 157 if kwargs.pop("sort_keys", False): 158 return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True) # type: ignore 159 else: 160 return super().model_dump_json(**kwargs) # type: ignore 161 162 include = kwargs.pop("include", None) 163 if include is None and self.__config__.extra != "allow": # type: ignore 164 # Workaround to support @cached_property in Pydantic v1. 165 include = {f.name for f in self.all_field_infos().values()} # type: ignore 166 return super().json(include=include, **kwargs) # type: ignore
168 def copy(self: "Model", **kwargs: t.Any) -> "Model": 169 return ( 170 super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs) # type: ignore 171 )
Returns a copy of the model.
!!! warning "Deprecated"
This method is now deprecated; use model_copy
instead.
If you need include
or exclude
, use:
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)
Arguments:
- include: Optional set or mapping specifying which fields to include in the copied model.
- exclude: Optional set or mapping specifying which fields to exclude in the copied model.
- update: Optional dictionary of field-value pairs to override field values in the copied model.
- deep: If True, the values of fields that are Pydantic models will be deep-copied.
Returns:
A copy of the model with included, excluded and updated fields as specified.
286def init_private_attributes(self: BaseModel, __context: Any) -> None: 287 """This function is meant to behave like a BaseModel method to initialise private attributes. 288 289 It takes context as an argument since that's what pydantic-core passes when calling it. 290 291 Args: 292 self: The BaseModel instance. 293 __context: The context. 294 """ 295 if getattr(self, '__pydantic_private__', None) is None: 296 pydantic_private = {} 297 for name, private_attr in self.__private_attributes__.items(): 298 default = private_attr.get_default() 299 if default is not PydanticUndefined: 300 pydantic_private[name] = default 301 object_setattr(self, '__pydantic_private__', pydantic_private)
Override this method to perform additional initialization after __init__
and model_construct
.
This is useful if you want to do some validation that requires the entire model to be initialized.
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- model_extra
- model_fields_set
- model_construct
- model_copy
- model_dump
- model_dump_json
- model_json_schema
- model_parametrized_name
- model_rebuild
- model_validate
- model_validate_json
- model_validate_strings
- parse_file
- from_orm
- construct
- schema
- schema_json
- validate
- update_forward_refs
262def model_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: 263 @wraps(func) 264 def wrapper(cls: t.Type, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any: 265 is_values_dict = isinstance(values, dict) 266 values_dict = values if is_values_dict else values.__dict__ 267 result = func(cls, values_dict, *args, **kwargs) 268 if is_values_dict: 269 return result 270 else: 271 values.__dict__.update(result) 272 return values 273 274 return wrapper
277def field_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: 278 @wraps(func) 279 def wrapper(cls: t.Type, v: t.Any, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any: 280 values_dict = values if isinstance(values, dict) else values.data 281 return func(cls, v, values_dict, *args, **kwargs) 282 283 return wrapper
308def positive_int_validator(v: t.Any) -> int: 309 if isinstance(v, exp.Expression) and v.is_int: 310 v = int(v.name) 311 if not isinstance(v, int): 312 raise ValueError(f"Invalid num {v}. Value must be an integer value") 313 if v <= 0: 314 raise ValueError(f"Invalid num {v}. Value must be a positive integer") 315 return v
373def list_of_columns_or_star_validator( 374 v: t.Any, values: t.Any 375) -> t.Union[exp.Star, t.List[exp.Column]]: 376 expressions = _get_fields(v, values) 377 if len(expressions) == 1 and isinstance(expressions[0], exp.Star): 378 return t.cast(exp.Star, expressions[0]) 379 return t.cast(t.List[exp.Column], expressions)