Edit on GitHub

sqlmesh.utils.pydantic

  1from __future__ import annotations
  2
  3import json
  4import sys
  5import typing as t
  6from functools import cached_property, wraps
  7
  8import pydantic
  9from pydantic.fields import FieldInfo
 10from sqlglot import exp, parse_one
 11from sqlglot.helper import ensure_list
 12from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
 13from sqlglot.optimizer.qualify_columns import quote_identifiers
 14
 15from sqlmesh.core import dialect as d
 16from sqlmesh.utils import str_to_bool
 17
 18if sys.version_info >= (3, 9):
 19    from typing import Annotated
 20else:
 21    from typing_extensions import Annotated
 22
 23if t.TYPE_CHECKING:
 24    Model = t.TypeVar("Model", bound="PydanticModel")
 25
 26
 27T = t.TypeVar("T")
 28DEFAULT_ARGS = {"exclude_none": True, "by_alias": True}
 29PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION = [int(p) for p in pydantic.__version__.split(".")][
 30    :2
 31]
 32
 33
 34if PYDANTIC_MAJOR_VERSION >= 2:
 35
 36    def field_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 37        # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True.
 38        kwargs.pop("always", None)
 39        return pydantic.field_validator(*args, **kwargs)  # type: ignore
 40
 41    def model_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 42        # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True.
 43        kwargs.pop("always", None)
 44        return pydantic.model_validator(*args, **kwargs)  # type: ignore
 45
 46    def field_serializer(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 47        return pydantic.field_serializer(*args, **kwargs)  # type: ignore
 48
 49else:
 50
 51    def field_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 52        mode = kwargs.pop("mode", "after")
 53        return pydantic.validator(*args, **kwargs, pre=mode.lower() == "before", allow_reuse=True)
 54
 55    def model_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 56        mode = kwargs.pop("mode", "after")
 57        return pydantic.root_validator(
 58            *args, **kwargs, pre=mode.lower() == "before", allow_reuse=True
 59        )
 60
 61    def field_serializer(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
 62        def _decorator(func: t.Callable[[t.Any], t.Any]) -> t.Callable[[t.Any], t.Any]:
 63            @wraps(func)
 64            def _wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any:
 65                return func(*args, **kwargs)
 66
 67            return _wrapper
 68
 69        return _decorator
 70
 71
 72def parse_obj_as(type_: T, obj: t.Any) -> T:
 73    if PYDANTIC_MAJOR_VERSION >= 2:
 74        return pydantic.TypeAdapter(type_).validate_python(obj)  # type: ignore
 75    return pydantic.tools.parse_obj_as(type_, obj)  # type: ignore
 76
 77
 78def get_dialect(values: t.Any) -> str:
 79    """Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect.
 80
 81    Python models allow users to instantiate pydantic models by hand. This is problematic
 82    because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used
 83    in python models using the project default dialect, we set a class variable on the model
 84    registry and use that here.
 85    """
 86
 87    from sqlmesh.core.model import model
 88
 89    dialect = (values if isinstance(values, dict) else values.data).get("dialect")
 90    return model._dialect if dialect is None else dialect
 91
 92
 93def _expression_encoder(e: exp.Expression) -> str:
 94    return e.meta.get("sql") or e.sql(dialect=e.meta.get("dialect"))
 95
 96
 97AuditQueryTypes = t.Union[exp.Query, d.JinjaQuery]
 98ModelQueryTypes = t.Union[exp.Query, d.JinjaQuery, d.MacroFunc]
 99
100
101class PydanticModel(pydantic.BaseModel):
102    if PYDANTIC_MAJOR_VERSION >= 2:
103        model_config = pydantic.ConfigDict(  # type: ignore
104            arbitrary_types_allowed=True,
105            extra="forbid",  # type: ignore
106            # Even though Pydantic v2 kept support for json_encoders, the functionality has been
107            # crippled badly. Here we need to enumerate all different ways of how sqlglot expressions
108            # show up in pydantic models.
109            json_encoders={
110                exp.Expression: _expression_encoder,
111                exp.DataType: _expression_encoder,
112                exp.Tuple: _expression_encoder,
113                AuditQueryTypes: _expression_encoder,  # type: ignore
114                ModelQueryTypes: _expression_encoder,  # type: ignore
115            },
116            protected_namespaces=(),
117        )
118    else:
119
120        class Config:
121            arbitrary_types_allowed = True
122            extra = "forbid"
123            json_encoders = {exp.Expression: _expression_encoder}
124            underscore_attrs_are_private = True
125            smart_union = True
126            keep_untouched = (cached_property,)
127
128    _hash_func_mapping: t.ClassVar[t.Dict[t.Type[t.Any], t.Callable[[t.Any], int]]] = {}
129
130    def dict(
131        self,
132        **kwargs: t.Any,
133    ) -> t.Dict[str, t.Any]:
134        kwargs = {**DEFAULT_ARGS, **kwargs}
135        if PYDANTIC_MAJOR_VERSION >= 2:
136            return super().model_dump(**kwargs)  # type: ignore
137
138        include = kwargs.pop("include", None)
139        if include is None and self.__config__.extra != "allow":  # type: ignore
140            # Workaround to support @cached_property in Pydantic v1.
141            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
142        return super().dict(include=include, **kwargs)  # type: ignore
143
144    def json(
145        self,
146        **kwargs: t.Any,
147    ) -> str:
148        kwargs = {**DEFAULT_ARGS, **kwargs}
149        if PYDANTIC_MAJOR_VERSION >= 2:
150            # Pydantic v2 doesn't support arbitrary arguments for json.dump().
151            if kwargs.pop("sort_keys", False):
152                return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True)  # type: ignore
153            else:
154                return super().model_dump_json(**kwargs)  # type: ignore
155
156        include = kwargs.pop("include", None)
157        if include is None and self.__config__.extra != "allow":  # type: ignore
158            # Workaround to support @cached_property in Pydantic v1.
159            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
160        return super().json(include=include, **kwargs)  # type: ignore
161
162    def copy(self: "Model", **kwargs: t.Any) -> "Model":
163        return (
164            super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs)  # type: ignore
165        )
166
167    @property
168    def fields_set(self: "Model") -> t.Set[str]:
169        return self.__pydantic_fields_set__ if PYDANTIC_MAJOR_VERSION >= 2 else self.__fields_set__  # type: ignore
170
171    @classmethod
172    def parse_obj(cls: t.Type["Model"], obj: t.Any) -> "Model":
173        return (
174            super().model_validate(obj) if PYDANTIC_MAJOR_VERSION >= 2 else super().parse_obj(obj)  # type: ignore
175        )
176
177    @classmethod
178    def parse_raw(cls: t.Type["Model"], b: t.Union[str, bytes], **kwargs: t.Any) -> "Model":
179        return (
180            super().model_validate_json(b, **kwargs)  # type: ignore
181            if PYDANTIC_MAJOR_VERSION >= 2
182            else super().parse_raw(b, **kwargs)
183        )
184
185    @classmethod
186    def missing_required_fields(
187        cls: t.Type["PydanticModel"], provided_fields: t.Set[str]
188    ) -> t.Set[str]:
189        return cls.required_fields() - provided_fields
190
191    @classmethod
192    def extra_fields(cls: t.Type["PydanticModel"], provided_fields: t.Set[str]) -> t.Set[str]:
193        return provided_fields - cls.all_fields()
194
195    @classmethod
196    def all_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
197        return cls._fields()
198
199    @classmethod
200    def all_field_infos(cls: t.Type["PydanticModel"]) -> t.Dict[str, FieldInfo]:
201        return cls.model_fields if PYDANTIC_MAJOR_VERSION >= 2 else cls.__fields__  # type: ignore
202
203    @classmethod
204    def required_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
205        return cls._fields(
206            lambda field: field.is_required() if PYDANTIC_MAJOR_VERSION >= 2 else field.required
207        )  # type: ignore
208
209    @classmethod
210    def _fields(
211        cls: t.Type["PydanticModel"],
212        predicate: t.Callable[[t.Any], bool] = lambda _: True,
213    ) -> t.Set[str]:
214        return {
215            field_info.alias if field_info.alias else field_name
216            for field_name, field_info in cls.all_field_infos().items()  # type: ignore
217            if predicate(field_info)
218        }
219
220    def __eq__(self, other: t.Any) -> bool:
221        if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6):
222            if isinstance(other, pydantic.BaseModel):
223                return self.dict() == other.dict()
224            else:
225                return self.dict() == other
226        return super().__eq__(other)
227
228    def __hash__(self) -> int:
229        if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6):
230            obj = {k: v for k, v in self.__dict__.items() if k in self.all_field_infos()}
231            return hash(self.__class__) + hash(tuple(obj.values()))
232
233        from pydantic._internal._model_construction import (  # type: ignore
234            make_hash_func,
235        )
236
237        if self.__class__ not in PydanticModel._hash_func_mapping:
238            PydanticModel._hash_func_mapping[self.__class__] = make_hash_func(self.__class__)
239        return PydanticModel._hash_func_mapping[self.__class__](self)
240
241    def __str__(self) -> str:
242        args = []
243
244        for k, info in self.all_field_infos().items():
245            v = getattr(self, k)
246
247            if v != info.default:
248                args.append(f"{k}: {v}")
249
250        return f"{self.__class__.__name__}<{', '.join(args)}>"
251
252    def __repr__(self) -> str:
253        return str(self)
254
255
256def model_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
257    @wraps(func)
258    def wrapper(cls: t.Type, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
259        is_values_dict = isinstance(values, dict)
260        values_dict = values if is_values_dict else values.__dict__
261        result = func(cls, values_dict, *args, **kwargs)
262        if is_values_dict:
263            return result
264        else:
265            values.__dict__.update(result)
266            return values
267
268    return wrapper
269
270
271def field_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
272    @wraps(func)
273    def wrapper(cls: t.Type, v: t.Any, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
274        values_dict = values if isinstance(values, dict) else values.data
275        return func(cls, v, values_dict, *args, **kwargs)
276
277    return wrapper
278
279
280def validate_list_of_strings(v: t.Any) -> t.List[str]:
281    if isinstance(v, exp.Identifier):
282        return [v.name]
283    if isinstance(v, (exp.Tuple, exp.Array)):
284        return [e.name for e in v.expressions]
285    return [i.name if isinstance(i, exp.Identifier) else str(i) for i in v]
286
287
288def validate_string(v: t.Any) -> str:
289    if isinstance(v, exp.Expression):
290        return v.name
291    return str(v)
292
293
294def bool_validator(v: t.Any) -> bool:
295    if isinstance(v, exp.Boolean):
296        return v.this
297    if isinstance(v, exp.Expression):
298        return str_to_bool(v.name)
299    return str_to_bool(str(v or ""))
300
301
302def positive_int_validator(v: t.Any) -> int:
303    if isinstance(v, exp.Expression) and v.is_int:
304        v = int(v.name)
305    if not isinstance(v, int):
306        raise ValueError(f"Invalid num {v}. Value must be an integer value")
307    if v <= 0:
308        raise ValueError(f"Invalid num {v}. Value must be a positive integer")
309    return v
310
311
312def _get_field(
313    v: t.Any,
314    values: t.Any,
315) -> exp.Expression:
316    dialect = get_dialect(values)
317
318    if isinstance(v, exp.Expression):
319        expression = v
320    else:
321        expression = parse_one(v, dialect=dialect)
322
323    expression = exp.column(expression) if isinstance(expression, exp.Identifier) else expression
324    expression = quote_identifiers(
325        normalize_identifiers(expression, dialect=dialect), dialect=dialect
326    )
327    expression.meta["dialect"] = dialect
328
329    return expression
330
331
332def _get_fields(
333    v: t.Any,
334    values: t.Any,
335) -> t.List[exp.Expression]:
336    dialect = get_dialect(values)
337
338    if isinstance(v, (exp.Tuple, exp.Array)):
339        expressions: t.List[exp.Expression] = v.expressions
340    elif isinstance(v, exp.Expression):
341        expressions = [v]
342    else:
343        expressions = [
344            parse_one(entry, dialect=dialect) if isinstance(entry, str) else entry
345            for entry in ensure_list(v)
346        ]
347
348    results = []
349
350    for expr in expressions:
351        results.append(_get_field(expr, values))
352
353    return results
354
355
356def list_of_fields_validator(v: t.Any, values: t.Any) -> t.List[exp.Expression]:
357    return _get_fields(v, values)
358
359
360def column_validator(v: t.Any, values: t.Any) -> exp.Column:
361    expression = _get_field(v, values)
362    if not isinstance(expression, exp.Column):
363        raise ValueError(f"Invalid column {expression}. Value must be a column")
364    return expression
365
366
367def list_of_columns_or_star_validator(
368    v: t.Any, values: t.Any
369) -> t.Union[exp.Star, t.List[exp.Column]]:
370    expressions = _get_fields(v, values)
371    if len(expressions) == 1 and isinstance(expressions[0], exp.Star):
372        return t.cast(exp.Star, expressions[0])
373    return t.cast(t.List[exp.Column], expressions)
374
375
376if t.TYPE_CHECKING:
377    SQLGlotListOfStrings = t.List[str]
378    SQLGlotString = str
379    SQLGlotBool = bool
380    SQLGlotPositiveInt = int
381    SQLGlotColumn = exp.Column
382    SQLGlotListOfFields = t.List[exp.Expression]
383    SQLGlotListOfColumnsOrStar = t.Union[t.List[exp.Column], exp.Star]
384elif PYDANTIC_MAJOR_VERSION >= 2:
385    from pydantic.functional_validators import BeforeValidator  # type: ignore
386
387    SQLGlotListOfStrings = Annotated[t.List[str], BeforeValidator(validate_list_of_strings)]
388    SQLGlotString = Annotated[str, BeforeValidator(validate_string)]
389    SQLGlotBool = Annotated[bool, BeforeValidator(bool_validator)]
390    SQLGlotPositiveInt = Annotated[int, BeforeValidator(positive_int_validator)]
391    SQLGlotColumn = Annotated[exp.Expression, BeforeValidator(column_validator)]
392    SQLGlotListOfFields = Annotated[
393        t.List[exp.Expression], BeforeValidator(list_of_fields_validator)
394    ]
395    SQLGlotListOfColumnsOrStar = Annotated[
396        t.Union[t.List[exp.Column], exp.Star], BeforeValidator(list_of_columns_or_star_validator)
397    ]
398else:
399
400    class PydanticTypeProxy(t.Generic[T]):
401        validate: t.Callable[[t.Any], T]
402
403        @classmethod
404        def __get_validators__(cls) -> t.Iterator[t.Callable[[t.Any], T]]:
405            yield cls.validate
406
407    class SQLGlotListOfStrings(PydanticTypeProxy[t.List[str]]):
408        validate = validate_list_of_strings
409
410    class SQLGlotString(PydanticTypeProxy[str]):
411        validate = validate_string
412
413    class SQLGlotBool(PydanticTypeProxy[bool]):
414        validate = bool_validator
415
416    class SQLGlotPositiveInt(PydanticTypeProxy[int]):
417        validate = positive_int_validator
418
419    class SQLGlotColumn(PydanticTypeProxy[exp.Column]):
420        validate = column_validator
421
422    class SQLGlotListOfFields(PydanticTypeProxy[t.List[exp.Expression]]):
423        validate = list_of_fields_validator
424
425    class SQLGlotListOfColumnsOrStar(PydanticTypeProxy[t.Union[exp.Star, t.List[exp.Column]]]):
426        validate = list_of_columns_or_star_validator
def parse_obj_as(type_: ~T, obj: Any) -> ~T:
73def parse_obj_as(type_: T, obj: t.Any) -> T:
74    if PYDANTIC_MAJOR_VERSION >= 2:
75        return pydantic.TypeAdapter(type_).validate_python(obj)  # type: ignore
76    return pydantic.tools.parse_obj_as(type_, obj)  # type: ignore
def get_dialect(values: Any) -> str:
79def get_dialect(values: t.Any) -> str:
80    """Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect.
81
82    Python models allow users to instantiate pydantic models by hand. This is problematic
83    because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used
84    in python models using the project default dialect, we set a class variable on the model
85    registry and use that here.
86    """
87
88    from sqlmesh.core.model import model
89
90    dialect = (values if isinstance(values, dict) else values.data).get("dialect")
91    return model._dialect if dialect is None else dialect

Extracts dialect from a dict or pydantic obj, defaulting to the globally set dialect.

Python models allow users to instantiate pydantic models by hand. This is problematic because the validators kick in with the SQLGLot dialect. To instantiate Pydantic Models used in python models using the project default dialect, we set a class variable on the model registry and use that here.

class PydanticModel(pydantic.main.BaseModel):
102class PydanticModel(pydantic.BaseModel):
103    if PYDANTIC_MAJOR_VERSION >= 2:
104        model_config = pydantic.ConfigDict(  # type: ignore
105            arbitrary_types_allowed=True,
106            extra="forbid",  # type: ignore
107            # Even though Pydantic v2 kept support for json_encoders, the functionality has been
108            # crippled badly. Here we need to enumerate all different ways of how sqlglot expressions
109            # show up in pydantic models.
110            json_encoders={
111                exp.Expression: _expression_encoder,
112                exp.DataType: _expression_encoder,
113                exp.Tuple: _expression_encoder,
114                AuditQueryTypes: _expression_encoder,  # type: ignore
115                ModelQueryTypes: _expression_encoder,  # type: ignore
116            },
117            protected_namespaces=(),
118        )
119    else:
120
121        class Config:
122            arbitrary_types_allowed = True
123            extra = "forbid"
124            json_encoders = {exp.Expression: _expression_encoder}
125            underscore_attrs_are_private = True
126            smart_union = True
127            keep_untouched = (cached_property,)
128
129    _hash_func_mapping: t.ClassVar[t.Dict[t.Type[t.Any], t.Callable[[t.Any], int]]] = {}
130
131    def dict(
132        self,
133        **kwargs: t.Any,
134    ) -> t.Dict[str, t.Any]:
135        kwargs = {**DEFAULT_ARGS, **kwargs}
136        if PYDANTIC_MAJOR_VERSION >= 2:
137            return super().model_dump(**kwargs)  # type: ignore
138
139        include = kwargs.pop("include", None)
140        if include is None and self.__config__.extra != "allow":  # type: ignore
141            # Workaround to support @cached_property in Pydantic v1.
142            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
143        return super().dict(include=include, **kwargs)  # type: ignore
144
145    def json(
146        self,
147        **kwargs: t.Any,
148    ) -> str:
149        kwargs = {**DEFAULT_ARGS, **kwargs}
150        if PYDANTIC_MAJOR_VERSION >= 2:
151            # Pydantic v2 doesn't support arbitrary arguments for json.dump().
152            if kwargs.pop("sort_keys", False):
153                return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True)  # type: ignore
154            else:
155                return super().model_dump_json(**kwargs)  # type: ignore
156
157        include = kwargs.pop("include", None)
158        if include is None and self.__config__.extra != "allow":  # type: ignore
159            # Workaround to support @cached_property in Pydantic v1.
160            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
161        return super().json(include=include, **kwargs)  # type: ignore
162
163    def copy(self: "Model", **kwargs: t.Any) -> "Model":
164        return (
165            super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs)  # type: ignore
166        )
167
168    @property
169    def fields_set(self: "Model") -> t.Set[str]:
170        return self.__pydantic_fields_set__ if PYDANTIC_MAJOR_VERSION >= 2 else self.__fields_set__  # type: ignore
171
172    @classmethod
173    def parse_obj(cls: t.Type["Model"], obj: t.Any) -> "Model":
174        return (
175            super().model_validate(obj) if PYDANTIC_MAJOR_VERSION >= 2 else super().parse_obj(obj)  # type: ignore
176        )
177
178    @classmethod
179    def parse_raw(cls: t.Type["Model"], b: t.Union[str, bytes], **kwargs: t.Any) -> "Model":
180        return (
181            super().model_validate_json(b, **kwargs)  # type: ignore
182            if PYDANTIC_MAJOR_VERSION >= 2
183            else super().parse_raw(b, **kwargs)
184        )
185
186    @classmethod
187    def missing_required_fields(
188        cls: t.Type["PydanticModel"], provided_fields: t.Set[str]
189    ) -> t.Set[str]:
190        return cls.required_fields() - provided_fields
191
192    @classmethod
193    def extra_fields(cls: t.Type["PydanticModel"], provided_fields: t.Set[str]) -> t.Set[str]:
194        return provided_fields - cls.all_fields()
195
196    @classmethod
197    def all_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
198        return cls._fields()
199
200    @classmethod
201    def all_field_infos(cls: t.Type["PydanticModel"]) -> t.Dict[str, FieldInfo]:
202        return cls.model_fields if PYDANTIC_MAJOR_VERSION >= 2 else cls.__fields__  # type: ignore
203
204    @classmethod
205    def required_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
206        return cls._fields(
207            lambda field: field.is_required() if PYDANTIC_MAJOR_VERSION >= 2 else field.required
208        )  # type: ignore
209
210    @classmethod
211    def _fields(
212        cls: t.Type["PydanticModel"],
213        predicate: t.Callable[[t.Any], bool] = lambda _: True,
214    ) -> t.Set[str]:
215        return {
216            field_info.alias if field_info.alias else field_name
217            for field_name, field_info in cls.all_field_infos().items()  # type: ignore
218            if predicate(field_info)
219        }
220
221    def __eq__(self, other: t.Any) -> bool:
222        if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6):
223            if isinstance(other, pydantic.BaseModel):
224                return self.dict() == other.dict()
225            else:
226                return self.dict() == other
227        return super().__eq__(other)
228
229    def __hash__(self) -> int:
230        if (PYDANTIC_MAJOR_VERSION, PYDANTIC_MINOR_VERSION) < (2, 6):
231            obj = {k: v for k, v in self.__dict__.items() if k in self.all_field_infos()}
232            return hash(self.__class__) + hash(tuple(obj.values()))
233
234        from pydantic._internal._model_construction import (  # type: ignore
235            make_hash_func,
236        )
237
238        if self.__class__ not in PydanticModel._hash_func_mapping:
239            PydanticModel._hash_func_mapping[self.__class__] = make_hash_func(self.__class__)
240        return PydanticModel._hash_func_mapping[self.__class__](self)
241
242    def __str__(self) -> str:
243        args = []
244
245        for k, info in self.all_field_infos().items():
246            v = getattr(self, k)
247
248            if v != info.default:
249                args.append(f"{k}: {v}")
250
251        return f"{self.__class__.__name__}<{', '.join(args)}>"
252
253    def __repr__(self) -> str:
254        return str(self)

Usage docs: https://docs.pydantic.dev/2.7/concepts/models/

A base class for creating Pydantic models.

Attributes:
  • __class_vars__: The names of classvars defined on the model.
  • __private_attributes__: Metadata about the private attributes of the model.
  • __signature__: The signature for instantiating the model.
  • __pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
  • __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.
  • __pydantic_custom_init__: Whether the model has a custom __init__ function.
  • __pydantic_decorators__: Metadata containing the decorators defined on the model. This replaces Model.__validators__ and Model.__root_validators__ from Pydantic V1.
  • __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
  • __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
  • __pydantic_post_init__: The name of the post-init method for the model, if defined.
  • __pydantic_root_model__: Whether the model is a RootModel.
  • __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model.
  • __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model.
  • __pydantic_extra__: An instance attribute with the values of extra fields from validation when model_config['extra'] == 'allow'.
  • __pydantic_fields_set__: An instance attribute with the names of fields explicitly set.
  • __pydantic_private__: Instance attribute with the values of private attributes set on the model instance.
def dict(self, **kwargs: Any) -> Dict[str, Any]:
131    def dict(
132        self,
133        **kwargs: t.Any,
134    ) -> t.Dict[str, t.Any]:
135        kwargs = {**DEFAULT_ARGS, **kwargs}
136        if PYDANTIC_MAJOR_VERSION >= 2:
137            return super().model_dump(**kwargs)  # type: ignore
138
139        include = kwargs.pop("include", None)
140        if include is None and self.__config__.extra != "allow":  # type: ignore
141            # Workaround to support @cached_property in Pydantic v1.
142            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
143        return super().dict(include=include, **kwargs)  # type: ignore
def json(self, **kwargs: Any) -> str:
145    def json(
146        self,
147        **kwargs: t.Any,
148    ) -> str:
149        kwargs = {**DEFAULT_ARGS, **kwargs}
150        if PYDANTIC_MAJOR_VERSION >= 2:
151            # Pydantic v2 doesn't support arbitrary arguments for json.dump().
152            if kwargs.pop("sort_keys", False):
153                return json.dumps(super().model_dump(mode="json", **kwargs), sort_keys=True)  # type: ignore
154            else:
155                return super().model_dump_json(**kwargs)  # type: ignore
156
157        include = kwargs.pop("include", None)
158        if include is None and self.__config__.extra != "allow":  # type: ignore
159            # Workaround to support @cached_property in Pydantic v1.
160            include = {f.name for f in self.all_field_infos().values()}  # type: ignore
161        return super().json(include=include, **kwargs)  # type: ignore
def copy(self: ~Model, **kwargs: Any) -> ~Model:
163    def copy(self: "Model", **kwargs: t.Any) -> "Model":
164        return (
165            super().model_copy(**kwargs) if PYDANTIC_MAJOR_VERSION >= 2 else super().copy(**kwargs)  # type: ignore
166        )

Returns a copy of the model.

!!! warning "Deprecated" This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)
Arguments:
  • include: Optional set or mapping specifying which fields to include in the copied model.
  • exclude: Optional set or mapping specifying which fields to exclude in the copied model.
  • update: Optional dictionary of field-value pairs to override field values in the copied model.
  • deep: If True, the values of fields that are Pydantic models will be deep-copied.
Returns:

A copy of the model with included, excluded and updated fields as specified.

@classmethod
def parse_obj(cls: type[~Model], obj: Any) -> ~Model:
172    @classmethod
173    def parse_obj(cls: t.Type["Model"], obj: t.Any) -> "Model":
174        return (
175            super().model_validate(obj) if PYDANTIC_MAJOR_VERSION >= 2 else super().parse_obj(obj)  # type: ignore
176        )
@classmethod
def parse_raw(cls: type[~Model], b: Union[str, bytes], **kwargs: Any) -> ~Model:
178    @classmethod
179    def parse_raw(cls: t.Type["Model"], b: t.Union[str, bytes], **kwargs: t.Any) -> "Model":
180        return (
181            super().model_validate_json(b, **kwargs)  # type: ignore
182            if PYDANTIC_MAJOR_VERSION >= 2
183            else super().parse_raw(b, **kwargs)
184        )
@classmethod
def missing_required_fields( cls: type[sqlmesh.utils.pydantic.PydanticModel], provided_fields: Set[str]) -> Set[str]:
186    @classmethod
187    def missing_required_fields(
188        cls: t.Type["PydanticModel"], provided_fields: t.Set[str]
189    ) -> t.Set[str]:
190        return cls.required_fields() - provided_fields
@classmethod
def extra_fields( cls: type[sqlmesh.utils.pydantic.PydanticModel], provided_fields: Set[str]) -> Set[str]:
192    @classmethod
193    def extra_fields(cls: t.Type["PydanticModel"], provided_fields: t.Set[str]) -> t.Set[str]:
194        return provided_fields - cls.all_fields()
@classmethod
def all_fields(cls: type[sqlmesh.utils.pydantic.PydanticModel]) -> Set[str]:
196    @classmethod
197    def all_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
198        return cls._fields()
@classmethod
def all_field_infos( cls: type[sqlmesh.utils.pydantic.PydanticModel]) -> Dict[str, pydantic.fields.FieldInfo]:
200    @classmethod
201    def all_field_infos(cls: t.Type["PydanticModel"]) -> t.Dict[str, FieldInfo]:
202        return cls.model_fields if PYDANTIC_MAJOR_VERSION >= 2 else cls.__fields__  # type: ignore
@classmethod
def required_fields(cls: type[sqlmesh.utils.pydantic.PydanticModel]) -> Set[str]:
204    @classmethod
205    def required_fields(cls: t.Type["PydanticModel"]) -> t.Set[str]:
206        return cls._fields(
207            lambda field: field.is_required() if PYDANTIC_MAJOR_VERSION >= 2 else field.required
208        )  # type: ignore
def model_post_init(self: pydantic.main.BaseModel, __context: Any) -> None:
286def init_private_attributes(self: BaseModel, __context: Any) -> None:
287    """This function is meant to behave like a BaseModel method to initialise private attributes.
288
289    It takes context as an argument since that's what pydantic-core passes when calling it.
290
291    Args:
292        self: The BaseModel instance.
293        __context: The context.
294    """
295    if getattr(self, '__pydantic_private__', None) is None:
296        pydantic_private = {}
297        for name, private_attr in self.__private_attributes__.items():
298            default = private_attr.get_default()
299            if default is not PydanticUndefined:
300                pydantic_private[name] = default
301        object_setattr(self, '__pydantic_private__', pydantic_private)

Override this method to perform additional initialization after __init__ and model_construct. This is useful if you want to do some validation that requires the entire model to be initialized.

Inherited Members
pydantic.main.BaseModel
BaseModel
model_extra
model_fields_set
model_construct
model_copy
model_dump
model_dump_json
model_json_schema
model_parametrized_name
model_rebuild
model_validate
model_validate_json
model_validate_strings
parse_file
from_orm
construct
schema
schema_json
validate
update_forward_refs
def model_validator_v1_args(func: Callable[..., Any]) -> Callable[..., Any]:
257def model_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
258    @wraps(func)
259    def wrapper(cls: t.Type, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
260        is_values_dict = isinstance(values, dict)
261        values_dict = values if is_values_dict else values.__dict__
262        result = func(cls, values_dict, *args, **kwargs)
263        if is_values_dict:
264            return result
265        else:
266            values.__dict__.update(result)
267            return values
268
269    return wrapper
def field_validator_v1_args(func: Callable[..., Any]) -> Callable[..., Any]:
272def field_validator_v1_args(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
273    @wraps(func)
274    def wrapper(cls: t.Type, v: t.Any, values: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
275        values_dict = values if isinstance(values, dict) else values.data
276        return func(cls, v, values_dict, *args, **kwargs)
277
278    return wrapper
def validate_list_of_strings(v: Any) -> List[str]:
281def validate_list_of_strings(v: t.Any) -> t.List[str]:
282    if isinstance(v, exp.Identifier):
283        return [v.name]
284    if isinstance(v, (exp.Tuple, exp.Array)):
285        return [e.name for e in v.expressions]
286    return [i.name if isinstance(i, exp.Identifier) else str(i) for i in v]
def validate_string(v: Any) -> str:
289def validate_string(v: t.Any) -> str:
290    if isinstance(v, exp.Expression):
291        return v.name
292    return str(v)
def bool_validator(v: Any) -> bool:
295def bool_validator(v: t.Any) -> bool:
296    if isinstance(v, exp.Boolean):
297        return v.this
298    if isinstance(v, exp.Expression):
299        return str_to_bool(v.name)
300    return str_to_bool(str(v or ""))
def positive_int_validator(v: Any) -> int:
303def positive_int_validator(v: t.Any) -> int:
304    if isinstance(v, exp.Expression) and v.is_int:
305        v = int(v.name)
306    if not isinstance(v, int):
307        raise ValueError(f"Invalid num {v}. Value must be an integer value")
308    if v <= 0:
309        raise ValueError(f"Invalid num {v}. Value must be a positive integer")
310    return v
def list_of_fields_validator(v: Any, values: Any) -> List[sqlglot.expressions.Expression]:
357def list_of_fields_validator(v: t.Any, values: t.Any) -> t.List[exp.Expression]:
358    return _get_fields(v, values)
def column_validator(v: Any, values: Any) -> sqlglot.expressions.Column:
361def column_validator(v: t.Any, values: t.Any) -> exp.Column:
362    expression = _get_field(v, values)
363    if not isinstance(expression, exp.Column):
364        raise ValueError(f"Invalid column {expression}. Value must be a column")
365    return expression
def list_of_columns_or_star_validator( v: Any, values: Any) -> Union[sqlglot.expressions.Star, List[sqlglot.expressions.Column]]:
368def list_of_columns_or_star_validator(
369    v: t.Any, values: t.Any
370) -> t.Union[exp.Star, t.List[exp.Column]]:
371    expressions = _get_fields(v, values)
372    if len(expressions) == 1 and isinstance(expressions[0], exp.Star):
373        return t.cast(exp.Star, expressions[0])
374    return t.cast(t.List[exp.Column], expressions)
def field_validator(*args: Any, **kwargs: Any) -> Callable[[Any], Any]:
37    def field_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
38        # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True.
39        kwargs.pop("always", None)
40        return pydantic.field_validator(*args, **kwargs)  # type: ignore
def model_validator(*args: Any, **kwargs: Any) -> Callable[[Any], Any]:
42    def model_validator(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
43        # Pydantic v2 doesn't support "always" argument. The validator behaves as if "always" is True.
44        kwargs.pop("always", None)
45        return pydantic.model_validator(*args, **kwargs)  # type: ignore
def field_serializer(*args: Any, **kwargs: Any) -> Callable[[Any], Any]:
47    def field_serializer(*args: t.Any, **kwargs: t.Any) -> t.Callable[[t.Any], t.Any]:
48        return pydantic.field_serializer(*args, **kwargs)  # type: ignore