postgresql.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854
  1. # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
  2. # mypy: no-warn-return-any, allow-any-generics
  3. from __future__ import annotations
  4. import logging
  5. import re
  6. from typing import Any
  7. from typing import cast
  8. from typing import Dict
  9. from typing import List
  10. from typing import Optional
  11. from typing import Sequence
  12. from typing import Tuple
  13. from typing import TYPE_CHECKING
  14. from typing import Union
  15. from sqlalchemy import Column
  16. from sqlalchemy import Float
  17. from sqlalchemy import Identity
  18. from sqlalchemy import literal_column
  19. from sqlalchemy import Numeric
  20. from sqlalchemy import select
  21. from sqlalchemy import text
  22. from sqlalchemy import types as sqltypes
  23. from sqlalchemy.dialects.postgresql import BIGINT
  24. from sqlalchemy.dialects.postgresql import ExcludeConstraint
  25. from sqlalchemy.dialects.postgresql import INTEGER
  26. from sqlalchemy.schema import CreateIndex
  27. from sqlalchemy.sql.elements import ColumnClause
  28. from sqlalchemy.sql.elements import TextClause
  29. from sqlalchemy.sql.functions import FunctionElement
  30. from sqlalchemy.types import NULLTYPE
  31. from .base import alter_column
  32. from .base import alter_table
  33. from .base import AlterColumn
  34. from .base import ColumnComment
  35. from .base import format_column_name
  36. from .base import format_table_name
  37. from .base import format_type
  38. from .base import IdentityColumnDefault
  39. from .base import RenameTable
  40. from .impl import ComparisonResult
  41. from .impl import DefaultImpl
  42. from .. import util
  43. from ..autogenerate import render
  44. from ..operations import ops
  45. from ..operations import schemaobj
  46. from ..operations.base import BatchOperations
  47. from ..operations.base import Operations
  48. from ..util import sqla_compat
  49. from ..util.sqla_compat import compiles
  50. if TYPE_CHECKING:
  51. from typing import Literal
  52. from sqlalchemy import Index
  53. from sqlalchemy import UniqueConstraint
  54. from sqlalchemy.dialects.postgresql.array import ARRAY
  55. from sqlalchemy.dialects.postgresql.base import PGDDLCompiler
  56. from sqlalchemy.dialects.postgresql.hstore import HSTORE
  57. from sqlalchemy.dialects.postgresql.json import JSON
  58. from sqlalchemy.dialects.postgresql.json import JSONB
  59. from sqlalchemy.sql.elements import ClauseElement
  60. from sqlalchemy.sql.elements import ColumnElement
  61. from sqlalchemy.sql.elements import quoted_name
  62. from sqlalchemy.sql.schema import MetaData
  63. from sqlalchemy.sql.schema import Table
  64. from sqlalchemy.sql.type_api import TypeEngine
  65. from .base import _ServerDefault
  66. from ..autogenerate.api import AutogenContext
  67. from ..autogenerate.render import _f_name
  68. from ..runtime.migration import MigrationContext
  69. log = logging.getLogger(__name__)
  70. class PostgresqlImpl(DefaultImpl):
  71. __dialect__ = "postgresql"
  72. transactional_ddl = True
  73. type_synonyms = DefaultImpl.type_synonyms + (
  74. {"FLOAT", "DOUBLE PRECISION"},
  75. )
  76. def create_index(self, index: Index, **kw: Any) -> None:
  77. # this likely defaults to None if not present, so get()
  78. # should normally not return the default value. being
  79. # defensive in any case
  80. postgresql_include = index.kwargs.get("postgresql_include", None) or ()
  81. for col in postgresql_include:
  82. if col not in index.table.c: # type: ignore[union-attr]
  83. index.table.append_column( # type: ignore[union-attr]
  84. Column(col, sqltypes.NullType)
  85. )
  86. self._exec(CreateIndex(index, **kw))
  87. def prep_table_for_batch(self, batch_impl, table):
  88. for constraint in table.constraints:
  89. if (
  90. constraint.name is not None
  91. and constraint.name in batch_impl.named_constraints
  92. ):
  93. self.drop_constraint(constraint)
  94. def compare_server_default(
  95. self,
  96. inspector_column,
  97. metadata_column,
  98. rendered_metadata_default,
  99. rendered_inspector_default,
  100. ):
  101. # don't do defaults for SERIAL columns
  102. if (
  103. metadata_column.primary_key
  104. and metadata_column is metadata_column.table._autoincrement_column
  105. ):
  106. return False
  107. conn_col_default = rendered_inspector_default
  108. defaults_equal = conn_col_default == rendered_metadata_default
  109. if defaults_equal:
  110. return False
  111. if None in (
  112. conn_col_default,
  113. rendered_metadata_default,
  114. metadata_column.server_default,
  115. ):
  116. return not defaults_equal
  117. metadata_default = metadata_column.server_default.arg
  118. if isinstance(metadata_default, str):
  119. if not isinstance(inspector_column.type, (Numeric, Float)):
  120. metadata_default = re.sub(r"^'|'$", "", metadata_default)
  121. metadata_default = f"'{metadata_default}'"
  122. metadata_default = literal_column(metadata_default)
  123. # run a real compare against the server
  124. conn = self.connection
  125. assert conn is not None
  126. return not conn.scalar(
  127. select(literal_column(conn_col_default) == metadata_default)
  128. )
  129. def alter_column(
  130. self,
  131. table_name: str,
  132. column_name: str,
  133. *,
  134. nullable: Optional[bool] = None,
  135. server_default: Optional[
  136. Union[_ServerDefault, Literal[False]]
  137. ] = False,
  138. name: Optional[str] = None,
  139. type_: Optional[TypeEngine] = None,
  140. schema: Optional[str] = None,
  141. autoincrement: Optional[bool] = None,
  142. existing_type: Optional[TypeEngine] = None,
  143. existing_server_default: Optional[_ServerDefault] = None,
  144. existing_nullable: Optional[bool] = None,
  145. existing_autoincrement: Optional[bool] = None,
  146. **kw: Any,
  147. ) -> None:
  148. using = kw.pop("postgresql_using", None)
  149. if using is not None and type_ is None:
  150. raise util.CommandError(
  151. "postgresql_using must be used with the type_ parameter"
  152. )
  153. if type_ is not None:
  154. self._exec(
  155. PostgresqlColumnType(
  156. table_name,
  157. column_name,
  158. type_,
  159. schema=schema,
  160. using=using,
  161. existing_type=existing_type,
  162. existing_server_default=existing_server_default,
  163. existing_nullable=existing_nullable,
  164. )
  165. )
  166. super().alter_column(
  167. table_name,
  168. column_name,
  169. nullable=nullable,
  170. server_default=server_default,
  171. name=name,
  172. schema=schema,
  173. autoincrement=autoincrement,
  174. existing_type=existing_type,
  175. existing_server_default=existing_server_default,
  176. existing_nullable=existing_nullable,
  177. existing_autoincrement=existing_autoincrement,
  178. **kw,
  179. )
  180. def autogen_column_reflect(self, inspector, table, column_info):
  181. if column_info.get("default") and isinstance(
  182. column_info["type"], (INTEGER, BIGINT)
  183. ):
  184. seq_match = re.match(
  185. r"nextval\('(.+?)'::regclass\)", column_info["default"]
  186. )
  187. if seq_match:
  188. info = sqla_compat._exec_on_inspector(
  189. inspector,
  190. text(
  191. "select c.relname, a.attname "
  192. "from pg_class as c join "
  193. "pg_depend d on d.objid=c.oid and "
  194. "d.classid='pg_class'::regclass and "
  195. "d.refclassid='pg_class'::regclass "
  196. "join pg_class t on t.oid=d.refobjid "
  197. "join pg_attribute a on a.attrelid=t.oid and "
  198. "a.attnum=d.refobjsubid "
  199. "where c.relkind='S' and "
  200. "c.oid=cast(:seqname as regclass)"
  201. ),
  202. seqname=seq_match.group(1),
  203. ).first()
  204. if info:
  205. seqname, colname = info
  206. if colname == column_info["name"]:
  207. log.info(
  208. "Detected sequence named '%s' as "
  209. "owned by integer column '%s(%s)', "
  210. "assuming SERIAL and omitting",
  211. seqname,
  212. table.name,
  213. colname,
  214. )
  215. # sequence, and the owner is this column,
  216. # its a SERIAL - whack it!
  217. del column_info["default"]
  218. def correct_for_autogen_constraints(
  219. self,
  220. conn_unique_constraints,
  221. conn_indexes,
  222. metadata_unique_constraints,
  223. metadata_indexes,
  224. ):
  225. doubled_constraints = {
  226. index
  227. for index in conn_indexes
  228. if index.info.get("duplicates_constraint")
  229. }
  230. for ix in doubled_constraints:
  231. conn_indexes.remove(ix)
  232. if not sqla_compat.sqla_2:
  233. self._skip_functional_indexes(metadata_indexes, conn_indexes)
  234. # pg behavior regarding modifiers
  235. # | # | compiled sql | returned sql | regexp. group is removed |
  236. # | - | ---------------- | -----------------| ------------------------ |
  237. # | 1 | nulls first | nulls first | - |
  238. # | 2 | nulls last | | (?<! desc)( nulls last)$ |
  239. # | 3 | asc | | ( asc)$ |
  240. # | 4 | asc nulls first | nulls first | ( asc) nulls first$ |
  241. # | 5 | asc nulls last | | ( asc nulls last)$ |
  242. # | 6 | desc | desc | - |
  243. # | 7 | desc nulls first | desc | desc( nulls first)$ |
  244. # | 8 | desc nulls last | desc nulls last | - |
  245. _default_modifiers_re = ( # order of case 2 and 5 matters
  246. re.compile("( asc nulls last)$"), # case 5
  247. re.compile("(?<! desc)( nulls last)$"), # case 2
  248. re.compile("( asc)$"), # case 3
  249. re.compile("( asc) nulls first$"), # case 4
  250. re.compile(" desc( nulls first)$"), # case 7
  251. )
  252. def _cleanup_index_expr(self, index: Index, expr: str) -> str:
  253. expr = expr.lower().replace('"', "").replace("'", "")
  254. if index.table is not None:
  255. # should not be needed, since include_table=False is in compile
  256. expr = expr.replace(f"{index.table.name.lower()}.", "")
  257. if "::" in expr:
  258. # strip :: cast. types can have spaces in them
  259. expr = re.sub(r"(::[\w ]+\w)", "", expr)
  260. while expr and expr[0] == "(" and expr[-1] == ")":
  261. expr = expr[1:-1]
  262. # NOTE: when parsing the connection expression this cleanup could
  263. # be skipped
  264. for rs in self._default_modifiers_re:
  265. if match := rs.search(expr):
  266. start, end = match.span(1)
  267. expr = expr[:start] + expr[end:]
  268. break
  269. while expr and expr[0] == "(" and expr[-1] == ")":
  270. expr = expr[1:-1]
  271. # strip casts
  272. cast_re = re.compile(r"cast\s*\(")
  273. if cast_re.match(expr):
  274. expr = cast_re.sub("", expr)
  275. # remove the as type
  276. expr = re.sub(r"as\s+[^)]+\)", "", expr)
  277. # remove spaces
  278. expr = expr.replace(" ", "")
  279. return expr
  280. def _dialect_options(
  281. self, item: Union[Index, UniqueConstraint]
  282. ) -> Tuple[Any, ...]:
  283. # only the positive case is returned by sqlalchemy reflection so
  284. # None and False are threated the same
  285. if item.dialect_kwargs.get("postgresql_nulls_not_distinct"):
  286. return ("nulls_not_distinct",)
  287. return ()
  288. def compare_indexes(
  289. self,
  290. metadata_index: Index,
  291. reflected_index: Index,
  292. ) -> ComparisonResult:
  293. msg = []
  294. unique_msg = self._compare_index_unique(
  295. metadata_index, reflected_index
  296. )
  297. if unique_msg:
  298. msg.append(unique_msg)
  299. m_exprs = metadata_index.expressions
  300. r_exprs = reflected_index.expressions
  301. if len(m_exprs) != len(r_exprs):
  302. msg.append(f"expression number {len(r_exprs)} to {len(m_exprs)}")
  303. if msg:
  304. # no point going further, return early
  305. return ComparisonResult.Different(msg)
  306. skip = []
  307. for pos, (m_e, r_e) in enumerate(zip(m_exprs, r_exprs), 1):
  308. m_compile = self._compile_element(m_e)
  309. m_text = self._cleanup_index_expr(metadata_index, m_compile)
  310. # print(f"META ORIG: {m_compile!r} CLEANUP: {m_text!r}")
  311. r_compile = self._compile_element(r_e)
  312. r_text = self._cleanup_index_expr(metadata_index, r_compile)
  313. # print(f"CONN ORIG: {r_compile!r} CLEANUP: {r_text!r}")
  314. if m_text == r_text:
  315. continue # expressions these are equal
  316. elif m_compile.strip().endswith("_ops") and (
  317. " " in m_compile or ")" in m_compile # is an expression
  318. ):
  319. skip.append(
  320. f"expression #{pos} {m_compile!r} detected "
  321. "as including operator clause."
  322. )
  323. util.warn(
  324. f"Expression #{pos} {m_compile!r} in index "
  325. f"{reflected_index.name!r} detected to include "
  326. "an operator clause. Expression compare cannot proceed. "
  327. "Please move the operator clause to the "
  328. "``postgresql_ops`` dict to enable proper compare "
  329. "of the index expressions: "
  330. "https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#operator-classes", # noqa: E501
  331. )
  332. else:
  333. msg.append(f"expression #{pos} {r_compile!r} to {m_compile!r}")
  334. m_options = self._dialect_options(metadata_index)
  335. r_options = self._dialect_options(reflected_index)
  336. if m_options != r_options:
  337. msg.extend(f"options {r_options} to {m_options}")
  338. if msg:
  339. return ComparisonResult.Different(msg)
  340. elif skip:
  341. # if there are other changes detected don't skip the index
  342. return ComparisonResult.Skip(skip)
  343. else:
  344. return ComparisonResult.Equal()
  345. def compare_unique_constraint(
  346. self,
  347. metadata_constraint: UniqueConstraint,
  348. reflected_constraint: UniqueConstraint,
  349. ) -> ComparisonResult:
  350. metadata_tup = self._create_metadata_constraint_sig(
  351. metadata_constraint
  352. )
  353. reflected_tup = self._create_reflected_constraint_sig(
  354. reflected_constraint
  355. )
  356. meta_sig = metadata_tup.unnamed
  357. conn_sig = reflected_tup.unnamed
  358. if conn_sig != meta_sig:
  359. return ComparisonResult.Different(
  360. f"expression {conn_sig} to {meta_sig}"
  361. )
  362. metadata_do = self._dialect_options(metadata_tup.const)
  363. conn_do = self._dialect_options(reflected_tup.const)
  364. if metadata_do != conn_do:
  365. return ComparisonResult.Different(
  366. f"expression {conn_do} to {metadata_do}"
  367. )
  368. return ComparisonResult.Equal()
  369. def adjust_reflected_dialect_options(
  370. self, reflected_options: Dict[str, Any], kind: str
  371. ) -> Dict[str, Any]:
  372. options: Dict[str, Any]
  373. options = reflected_options.get("dialect_options", {}).copy()
  374. if not options.get("postgresql_include"):
  375. options.pop("postgresql_include", None)
  376. return options
  377. def _compile_element(self, element: Union[ClauseElement, str]) -> str:
  378. if isinstance(element, str):
  379. return element
  380. return element.compile(
  381. dialect=self.dialect,
  382. compile_kwargs={"literal_binds": True, "include_table": False},
  383. ).string
  384. def render_ddl_sql_expr(
  385. self,
  386. expr: ClauseElement,
  387. is_server_default: bool = False,
  388. is_index: bool = False,
  389. **kw: Any,
  390. ) -> str:
  391. """Render a SQL expression that is typically a server default,
  392. index expression, etc.
  393. """
  394. # apply self_group to index expressions;
  395. # see https://github.com/sqlalchemy/sqlalchemy/blob/
  396. # 82fa95cfce070fab401d020c6e6e4a6a96cc2578/
  397. # lib/sqlalchemy/dialects/postgresql/base.py#L2261
  398. if is_index and not isinstance(expr, ColumnClause):
  399. expr = expr.self_group()
  400. return super().render_ddl_sql_expr(
  401. expr, is_server_default=is_server_default, is_index=is_index, **kw
  402. )
  403. def render_type(
  404. self, type_: TypeEngine, autogen_context: AutogenContext
  405. ) -> Union[str, Literal[False]]:
  406. mod = type(type_).__module__
  407. if not mod.startswith("sqlalchemy.dialects.postgresql"):
  408. return False
  409. if hasattr(self, "_render_%s_type" % type_.__visit_name__):
  410. meth = getattr(self, "_render_%s_type" % type_.__visit_name__)
  411. return meth(type_, autogen_context)
  412. return False
  413. def _render_HSTORE_type(
  414. self, type_: HSTORE, autogen_context: AutogenContext
  415. ) -> str:
  416. return cast(
  417. str,
  418. render._render_type_w_subtype(
  419. type_, autogen_context, "text_type", r"(.+?\(.*text_type=)"
  420. ),
  421. )
  422. def _render_ARRAY_type(
  423. self, type_: ARRAY, autogen_context: AutogenContext
  424. ) -> str:
  425. return cast(
  426. str,
  427. render._render_type_w_subtype(
  428. type_, autogen_context, "item_type", r"(.+?\()"
  429. ),
  430. )
  431. def _render_JSON_type(
  432. self, type_: JSON, autogen_context: AutogenContext
  433. ) -> str:
  434. return cast(
  435. str,
  436. render._render_type_w_subtype(
  437. type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
  438. ),
  439. )
  440. def _render_JSONB_type(
  441. self, type_: JSONB, autogen_context: AutogenContext
  442. ) -> str:
  443. return cast(
  444. str,
  445. render._render_type_w_subtype(
  446. type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
  447. ),
  448. )
  449. class PostgresqlColumnType(AlterColumn):
  450. def __init__(
  451. self, name: str, column_name: str, type_: TypeEngine, **kw
  452. ) -> None:
  453. using = kw.pop("using", None)
  454. super().__init__(name, column_name, **kw)
  455. self.type_ = sqltypes.to_instance(type_)
  456. self.using = using
  457. @compiles(RenameTable, "postgresql")
  458. def visit_rename_table(
  459. element: RenameTable, compiler: PGDDLCompiler, **kw
  460. ) -> str:
  461. return "%s RENAME TO %s" % (
  462. alter_table(compiler, element.table_name, element.schema),
  463. format_table_name(compiler, element.new_table_name, None),
  464. )
  465. @compiles(PostgresqlColumnType, "postgresql")
  466. def visit_column_type(
  467. element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw
  468. ) -> str:
  469. return "%s %s %s %s" % (
  470. alter_table(compiler, element.table_name, element.schema),
  471. alter_column(compiler, element.column_name),
  472. "TYPE %s" % format_type(compiler, element.type_),
  473. "USING %s" % element.using if element.using else "",
  474. )
  475. @compiles(ColumnComment, "postgresql")
  476. def visit_column_comment(
  477. element: ColumnComment, compiler: PGDDLCompiler, **kw
  478. ) -> str:
  479. ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}"
  480. comment = (
  481. compiler.sql_compiler.render_literal_value(
  482. element.comment, sqltypes.String()
  483. )
  484. if element.comment is not None
  485. else "NULL"
  486. )
  487. return ddl.format(
  488. table_name=format_table_name(
  489. compiler, element.table_name, element.schema
  490. ),
  491. column_name=format_column_name(compiler, element.column_name),
  492. comment=comment,
  493. )
  494. @compiles(IdentityColumnDefault, "postgresql")
  495. def visit_identity_column(
  496. element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw
  497. ):
  498. text = "%s %s " % (
  499. alter_table(compiler, element.table_name, element.schema),
  500. alter_column(compiler, element.column_name),
  501. )
  502. if element.default is None:
  503. # drop identity
  504. text += "DROP IDENTITY"
  505. return text
  506. elif element.existing_server_default is None:
  507. # add identity options
  508. text += "ADD "
  509. text += compiler.visit_identity_column(element.default)
  510. return text
  511. else:
  512. # alter identity
  513. diff, _, _ = element.impl._compare_identity_default(
  514. element.default, element.existing_server_default
  515. )
  516. identity = element.default
  517. for attr in sorted(diff):
  518. if attr == "always":
  519. text += "SET GENERATED %s " % (
  520. "ALWAYS" if identity.always else "BY DEFAULT"
  521. )
  522. else:
  523. text += "SET %s " % compiler.get_identity_options(
  524. Identity(**{attr: getattr(identity, attr)})
  525. )
  526. return text
  527. @Operations.register_operation("create_exclude_constraint")
  528. @BatchOperations.register_operation(
  529. "create_exclude_constraint", "batch_create_exclude_constraint"
  530. )
  531. @ops.AddConstraintOp.register_add_constraint("exclude_constraint")
  532. class CreateExcludeConstraintOp(ops.AddConstraintOp):
  533. """Represent a create exclude constraint operation."""
  534. constraint_type = "exclude"
  535. def __init__(
  536. self,
  537. constraint_name: sqla_compat._ConstraintName,
  538. table_name: Union[str, quoted_name],
  539. elements: Union[
  540. Sequence[Tuple[str, str]],
  541. Sequence[Tuple[ColumnClause[Any], str]],
  542. ],
  543. where: Optional[Union[ColumnElement[bool], str]] = None,
  544. schema: Optional[str] = None,
  545. _orig_constraint: Optional[ExcludeConstraint] = None,
  546. **kw,
  547. ) -> None:
  548. self.constraint_name = constraint_name
  549. self.table_name = table_name
  550. self.elements = elements
  551. self.where = where
  552. self.schema = schema
  553. self._orig_constraint = _orig_constraint
  554. self.kw = kw
  555. @classmethod
  556. def from_constraint( # type:ignore[override]
  557. cls, constraint: ExcludeConstraint
  558. ) -> CreateExcludeConstraintOp:
  559. constraint_table = sqla_compat._table_for_constraint(constraint)
  560. return cls(
  561. constraint.name,
  562. constraint_table.name,
  563. [ # type: ignore
  564. (expr, op) for expr, name, op in constraint._render_exprs
  565. ],
  566. where=cast("ColumnElement[bool] | None", constraint.where),
  567. schema=constraint_table.schema,
  568. _orig_constraint=constraint,
  569. deferrable=constraint.deferrable,
  570. initially=constraint.initially,
  571. using=constraint.using,
  572. )
  573. def to_constraint(
  574. self, migration_context: Optional[MigrationContext] = None
  575. ) -> ExcludeConstraint:
  576. if self._orig_constraint is not None:
  577. return self._orig_constraint
  578. schema_obj = schemaobj.SchemaObjects(migration_context)
  579. t = schema_obj.table(self.table_name, schema=self.schema)
  580. excl = ExcludeConstraint(
  581. *self.elements,
  582. name=self.constraint_name,
  583. where=self.where,
  584. **self.kw,
  585. )
  586. for (
  587. expr,
  588. name,
  589. oper,
  590. ) in excl._render_exprs:
  591. t.append_column(Column(name, NULLTYPE))
  592. t.append_constraint(excl)
  593. return excl
  594. @classmethod
  595. def create_exclude_constraint(
  596. cls,
  597. operations: Operations,
  598. constraint_name: str,
  599. table_name: str,
  600. *elements: Any,
  601. **kw: Any,
  602. ) -> Optional[Table]:
  603. """Issue an alter to create an EXCLUDE constraint using the
  604. current migration context.
  605. .. note:: This method is Postgresql specific, and additionally
  606. requires at least SQLAlchemy 1.0.
  607. e.g.::
  608. from alembic import op
  609. op.create_exclude_constraint(
  610. "user_excl",
  611. "user",
  612. ("period", "&&"),
  613. ("group", "="),
  614. where=("group != 'some group'"),
  615. )
  616. Note that the expressions work the same way as that of
  617. the ``ExcludeConstraint`` object itself; if plain strings are
  618. passed, quoting rules must be applied manually.
  619. :param name: Name of the constraint.
  620. :param table_name: String name of the source table.
  621. :param elements: exclude conditions.
  622. :param where: SQL expression or SQL string with optional WHERE
  623. clause.
  624. :param deferrable: optional bool. If set, emit DEFERRABLE or
  625. NOT DEFERRABLE when issuing DDL for this constraint.
  626. :param initially: optional string. If set, emit INITIALLY <value>
  627. when issuing DDL for this constraint.
  628. :param schema: Optional schema name to operate within.
  629. """
  630. op = cls(constraint_name, table_name, elements, **kw)
  631. return operations.invoke(op)
  632. @classmethod
  633. def batch_create_exclude_constraint(
  634. cls,
  635. operations: BatchOperations,
  636. constraint_name: str,
  637. *elements: Any,
  638. **kw: Any,
  639. ) -> Optional[Table]:
  640. """Issue a "create exclude constraint" instruction using the
  641. current batch migration context.
  642. .. note:: This method is Postgresql specific, and additionally
  643. requires at least SQLAlchemy 1.0.
  644. .. seealso::
  645. :meth:`.Operations.create_exclude_constraint`
  646. """
  647. kw["schema"] = operations.impl.schema
  648. op = cls(constraint_name, operations.impl.table_name, elements, **kw)
  649. return operations.invoke(op)
  650. @render.renderers.dispatch_for(CreateExcludeConstraintOp)
  651. def _add_exclude_constraint(
  652. autogen_context: AutogenContext, op: CreateExcludeConstraintOp
  653. ) -> str:
  654. return _exclude_constraint(op.to_constraint(), autogen_context, alter=True)
  655. @render._constraint_renderers.dispatch_for(ExcludeConstraint)
  656. def _render_inline_exclude_constraint(
  657. constraint: ExcludeConstraint,
  658. autogen_context: AutogenContext,
  659. namespace_metadata: MetaData,
  660. ) -> str:
  661. rendered = render._user_defined_render(
  662. "exclude", constraint, autogen_context
  663. )
  664. if rendered is not False:
  665. return rendered
  666. return _exclude_constraint(constraint, autogen_context, False)
  667. def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str:
  668. imports = autogen_context.imports
  669. if imports is not None:
  670. imports.add("from sqlalchemy.dialects import postgresql")
  671. return "postgresql."
  672. def _exclude_constraint(
  673. constraint: ExcludeConstraint,
  674. autogen_context: AutogenContext,
  675. alter: bool,
  676. ) -> str:
  677. opts: List[Tuple[str, Union[quoted_name, str, _f_name, None]]] = []
  678. has_batch = autogen_context._has_batch
  679. if constraint.deferrable:
  680. opts.append(("deferrable", str(constraint.deferrable)))
  681. if constraint.initially:
  682. opts.append(("initially", str(constraint.initially)))
  683. if constraint.using:
  684. opts.append(("using", str(constraint.using)))
  685. if not has_batch and alter and constraint.table.schema:
  686. opts.append(("schema", render._ident(constraint.table.schema)))
  687. if not alter and constraint.name:
  688. opts.append(
  689. ("name", render._render_gen_name(autogen_context, constraint.name))
  690. )
  691. def do_expr_where_opts():
  692. args = [
  693. "(%s, %r)"
  694. % (
  695. _render_potential_column(
  696. sqltext, # type:ignore[arg-type]
  697. autogen_context,
  698. ),
  699. opstring,
  700. )
  701. for sqltext, name, opstring in constraint._render_exprs
  702. ]
  703. if constraint.where is not None:
  704. args.append(
  705. "where=%s"
  706. % render._render_potential_expr(
  707. constraint.where, autogen_context
  708. )
  709. )
  710. args.extend(["%s=%r" % (k, v) for k, v in opts])
  711. return args
  712. if alter:
  713. args = [
  714. repr(render._render_gen_name(autogen_context, constraint.name))
  715. ]
  716. if not has_batch:
  717. args += [repr(render._ident(constraint.table.name))]
  718. args.extend(do_expr_where_opts())
  719. return "%(prefix)screate_exclude_constraint(%(args)s)" % {
  720. "prefix": render._alembic_autogenerate_prefix(autogen_context),
  721. "args": ", ".join(args),
  722. }
  723. else:
  724. args = do_expr_where_opts()
  725. return "%(prefix)sExcludeConstraint(%(args)s)" % {
  726. "prefix": _postgresql_autogenerate_prefix(autogen_context),
  727. "args": ", ".join(args),
  728. }
  729. def _render_potential_column(
  730. value: Union[
  731. ColumnClause[Any], Column[Any], TextClause, FunctionElement[Any]
  732. ],
  733. autogen_context: AutogenContext,
  734. ) -> str:
  735. if isinstance(value, ColumnClause):
  736. if value.is_literal:
  737. # like literal_column("int8range(from, to)") in ExcludeConstraint
  738. template = "%(prefix)sliteral_column(%(name)r)"
  739. else:
  740. template = "%(prefix)scolumn(%(name)r)"
  741. return template % {
  742. "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context),
  743. "name": value.name,
  744. }
  745. else:
  746. return render._render_potential_expr(
  747. value,
  748. autogen_context,
  749. wrap_in_element=isinstance(value, (TextClause, FunctionElement)),
  750. )