impl.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902
  1. # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
  2. # mypy: no-warn-return-any, allow-any-generics
  3. from __future__ import annotations
  4. import logging
  5. import re
  6. from typing import Any
  7. from typing import Callable
  8. from typing import Dict
  9. from typing import Iterable
  10. from typing import List
  11. from typing import Mapping
  12. from typing import NamedTuple
  13. from typing import Optional
  14. from typing import Sequence
  15. from typing import Set
  16. from typing import Tuple
  17. from typing import Type
  18. from typing import TYPE_CHECKING
  19. from typing import Union
  20. from sqlalchemy import cast
  21. from sqlalchemy import Column
  22. from sqlalchemy import MetaData
  23. from sqlalchemy import PrimaryKeyConstraint
  24. from sqlalchemy import schema
  25. from sqlalchemy import String
  26. from sqlalchemy import Table
  27. from sqlalchemy import text
  28. from . import _autogen
  29. from . import base
  30. from ._autogen import _constraint_sig as _constraint_sig
  31. from ._autogen import ComparisonResult as ComparisonResult
  32. from .. import util
  33. from ..util import sqla_compat
  34. if TYPE_CHECKING:
  35. from typing import Literal
  36. from typing import TextIO
  37. from sqlalchemy.engine import Connection
  38. from sqlalchemy.engine import Dialect
  39. from sqlalchemy.engine.cursor import CursorResult
  40. from sqlalchemy.engine.reflection import Inspector
  41. from sqlalchemy.sql import ClauseElement
  42. from sqlalchemy.sql import Executable
  43. from sqlalchemy.sql.elements import quoted_name
  44. from sqlalchemy.sql.schema import Constraint
  45. from sqlalchemy.sql.schema import ForeignKeyConstraint
  46. from sqlalchemy.sql.schema import Index
  47. from sqlalchemy.sql.schema import UniqueConstraint
  48. from sqlalchemy.sql.selectable import TableClause
  49. from sqlalchemy.sql.type_api import TypeEngine
  50. from .base import _ServerDefault
  51. from ..autogenerate.api import AutogenContext
  52. from ..operations.batch import ApplyBatchImpl
  53. from ..operations.batch import BatchOperationsImpl
  54. log = logging.getLogger(__name__)
  55. class ImplMeta(type):
  56. def __init__(
  57. cls,
  58. classname: str,
  59. bases: Tuple[Type[DefaultImpl]],
  60. dict_: Dict[str, Any],
  61. ):
  62. newtype = type.__init__(cls, classname, bases, dict_)
  63. if "__dialect__" in dict_:
  64. _impls[dict_["__dialect__"]] = cls # type: ignore[assignment]
  65. return newtype
  66. _impls: Dict[str, Type[DefaultImpl]] = {}
  67. class DefaultImpl(metaclass=ImplMeta):
  68. """Provide the entrypoint for major migration operations,
  69. including database-specific behavioral variances.
  70. While individual SQL/DDL constructs already provide
  71. for database-specific implementations, variances here
  72. allow for entirely different sequences of operations
  73. to take place for a particular migration, such as
  74. SQL Server's special 'IDENTITY INSERT' step for
  75. bulk inserts.
  76. """
  77. __dialect__ = "default"
  78. transactional_ddl = False
  79. command_terminator = ";"
  80. type_synonyms: Tuple[Set[str], ...] = ({"NUMERIC", "DECIMAL"},)
  81. type_arg_extract: Sequence[str] = ()
  82. # These attributes are deprecated in SQLAlchemy via #10247. They need to
  83. # be ignored to support older version that did not use dialect kwargs.
  84. # They only apply to Oracle and are replaced by oracle_order,
  85. # oracle_on_null
  86. identity_attrs_ignore: Tuple[str, ...] = ("order", "on_null")
  87. def __init__(
  88. self,
  89. dialect: Dialect,
  90. connection: Optional[Connection],
  91. as_sql: bool,
  92. transactional_ddl: Optional[bool],
  93. output_buffer: Optional[TextIO],
  94. context_opts: Dict[str, Any],
  95. ) -> None:
  96. self.dialect = dialect
  97. self.connection = connection
  98. self.as_sql = as_sql
  99. self.literal_binds = context_opts.get("literal_binds", False)
  100. self.output_buffer = output_buffer
  101. self.memo: dict = {}
  102. self.context_opts = context_opts
  103. if transactional_ddl is not None:
  104. self.transactional_ddl = transactional_ddl
  105. if self.literal_binds:
  106. if not self.as_sql:
  107. raise util.CommandError(
  108. "Can't use literal_binds setting without as_sql mode"
  109. )
  110. @classmethod
  111. def get_by_dialect(cls, dialect: Dialect) -> Type[DefaultImpl]:
  112. return _impls[dialect.name]
  113. def static_output(self, text: str) -> None:
  114. assert self.output_buffer is not None
  115. self.output_buffer.write(text + "\n\n")
  116. self.output_buffer.flush()
  117. def version_table_impl(
  118. self,
  119. *,
  120. version_table: str,
  121. version_table_schema: Optional[str],
  122. version_table_pk: bool,
  123. **kw: Any,
  124. ) -> Table:
  125. """Generate a :class:`.Table` object which will be used as the
  126. structure for the Alembic version table.
  127. Third party dialects may override this hook to provide an alternate
  128. structure for this :class:`.Table`; requirements are only that it
  129. be named based on the ``version_table`` parameter and contains
  130. at least a single string-holding column named ``version_num``.
  131. .. versionadded:: 1.14
  132. """
  133. vt = Table(
  134. version_table,
  135. MetaData(),
  136. Column("version_num", String(32), nullable=False),
  137. schema=version_table_schema,
  138. )
  139. if version_table_pk:
  140. vt.append_constraint(
  141. PrimaryKeyConstraint(
  142. "version_num", name=f"{version_table}_pkc"
  143. )
  144. )
  145. return vt
  146. def requires_recreate_in_batch(
  147. self, batch_op: BatchOperationsImpl
  148. ) -> bool:
  149. """Return True if the given :class:`.BatchOperationsImpl`
  150. would need the table to be recreated and copied in order to
  151. proceed.
  152. Normally, only returns True on SQLite when operations other
  153. than add_column are present.
  154. """
  155. return False
  156. def prep_table_for_batch(
  157. self, batch_impl: ApplyBatchImpl, table: Table
  158. ) -> None:
  159. """perform any operations needed on a table before a new
  160. one is created to replace it in batch mode.
  161. the PG dialect uses this to drop constraints on the table
  162. before the new one uses those same names.
  163. """
  164. @property
  165. def bind(self) -> Optional[Connection]:
  166. return self.connection
  167. def _exec(
  168. self,
  169. construct: Union[Executable, str],
  170. execution_options: Optional[Mapping[str, Any]] = None,
  171. multiparams: Optional[Sequence[Mapping[str, Any]]] = None,
  172. params: Mapping[str, Any] = util.immutabledict(),
  173. ) -> Optional[CursorResult]:
  174. if isinstance(construct, str):
  175. construct = text(construct)
  176. if self.as_sql:
  177. if multiparams is not None or params:
  178. raise TypeError("SQL parameters not allowed with as_sql")
  179. compile_kw: dict[str, Any]
  180. if self.literal_binds and not isinstance(
  181. construct, schema.DDLElement
  182. ):
  183. compile_kw = dict(compile_kwargs={"literal_binds": True})
  184. else:
  185. compile_kw = {}
  186. if TYPE_CHECKING:
  187. assert isinstance(construct, ClauseElement)
  188. compiled = construct.compile(dialect=self.dialect, **compile_kw)
  189. self.static_output(
  190. str(compiled).replace("\t", " ").strip()
  191. + self.command_terminator
  192. )
  193. return None
  194. else:
  195. conn = self.connection
  196. assert conn is not None
  197. if execution_options:
  198. conn = conn.execution_options(**execution_options)
  199. if params and multiparams is not None:
  200. raise TypeError(
  201. "Can't send params and multiparams at the same time"
  202. )
  203. if multiparams:
  204. return conn.execute(construct, multiparams)
  205. else:
  206. return conn.execute(construct, params)
  207. def execute(
  208. self,
  209. sql: Union[Executable, str],
  210. execution_options: Optional[dict[str, Any]] = None,
  211. ) -> None:
  212. self._exec(sql, execution_options)
  213. def alter_column(
  214. self,
  215. table_name: str,
  216. column_name: str,
  217. *,
  218. nullable: Optional[bool] = None,
  219. server_default: Optional[
  220. Union[_ServerDefault, Literal[False]]
  221. ] = False,
  222. name: Optional[str] = None,
  223. type_: Optional[TypeEngine] = None,
  224. schema: Optional[str] = None,
  225. autoincrement: Optional[bool] = None,
  226. comment: Optional[Union[str, Literal[False]]] = False,
  227. existing_comment: Optional[str] = None,
  228. existing_type: Optional[TypeEngine] = None,
  229. existing_server_default: Optional[_ServerDefault] = None,
  230. existing_nullable: Optional[bool] = None,
  231. existing_autoincrement: Optional[bool] = None,
  232. **kw: Any,
  233. ) -> None:
  234. if autoincrement is not None or existing_autoincrement is not None:
  235. util.warn(
  236. "autoincrement and existing_autoincrement "
  237. "only make sense for MySQL",
  238. stacklevel=3,
  239. )
  240. if nullable is not None:
  241. self._exec(
  242. base.ColumnNullable(
  243. table_name,
  244. column_name,
  245. nullable,
  246. schema=schema,
  247. existing_type=existing_type,
  248. existing_server_default=existing_server_default,
  249. existing_nullable=existing_nullable,
  250. existing_comment=existing_comment,
  251. )
  252. )
  253. if server_default is not False:
  254. kw = {}
  255. cls_: Type[
  256. Union[
  257. base.ComputedColumnDefault,
  258. base.IdentityColumnDefault,
  259. base.ColumnDefault,
  260. ]
  261. ]
  262. if sqla_compat._server_default_is_computed(
  263. server_default, existing_server_default
  264. ):
  265. cls_ = base.ComputedColumnDefault
  266. elif sqla_compat._server_default_is_identity(
  267. server_default, existing_server_default
  268. ):
  269. cls_ = base.IdentityColumnDefault
  270. kw["impl"] = self
  271. else:
  272. cls_ = base.ColumnDefault
  273. self._exec(
  274. cls_(
  275. table_name,
  276. column_name,
  277. server_default, # type:ignore[arg-type]
  278. schema=schema,
  279. existing_type=existing_type,
  280. existing_server_default=existing_server_default,
  281. existing_nullable=existing_nullable,
  282. existing_comment=existing_comment,
  283. **kw,
  284. )
  285. )
  286. if type_ is not None:
  287. self._exec(
  288. base.ColumnType(
  289. table_name,
  290. column_name,
  291. type_,
  292. schema=schema,
  293. existing_type=existing_type,
  294. existing_server_default=existing_server_default,
  295. existing_nullable=existing_nullable,
  296. existing_comment=existing_comment,
  297. )
  298. )
  299. if comment is not False:
  300. self._exec(
  301. base.ColumnComment(
  302. table_name,
  303. column_name,
  304. comment,
  305. schema=schema,
  306. existing_type=existing_type,
  307. existing_server_default=existing_server_default,
  308. existing_nullable=existing_nullable,
  309. existing_comment=existing_comment,
  310. )
  311. )
  312. # do the new name last ;)
  313. if name is not None:
  314. self._exec(
  315. base.ColumnName(
  316. table_name,
  317. column_name,
  318. name,
  319. schema=schema,
  320. existing_type=existing_type,
  321. existing_server_default=existing_server_default,
  322. existing_nullable=existing_nullable,
  323. )
  324. )
  325. def add_column(
  326. self,
  327. table_name: str,
  328. column: Column[Any],
  329. *,
  330. schema: Optional[Union[str, quoted_name]] = None,
  331. if_not_exists: Optional[bool] = None,
  332. ) -> None:
  333. self._exec(
  334. base.AddColumn(
  335. table_name,
  336. column,
  337. schema=schema,
  338. if_not_exists=if_not_exists,
  339. )
  340. )
  341. def drop_column(
  342. self,
  343. table_name: str,
  344. column: Column[Any],
  345. *,
  346. schema: Optional[str] = None,
  347. if_exists: Optional[bool] = None,
  348. **kw,
  349. ) -> None:
  350. self._exec(
  351. base.DropColumn(
  352. table_name, column, schema=schema, if_exists=if_exists
  353. )
  354. )
  355. def add_constraint(self, const: Any) -> None:
  356. if const._create_rule is None or const._create_rule(self):
  357. self._exec(schema.AddConstraint(const))
  358. def drop_constraint(self, const: Constraint, **kw: Any) -> None:
  359. self._exec(schema.DropConstraint(const, **kw))
  360. def rename_table(
  361. self,
  362. old_table_name: str,
  363. new_table_name: Union[str, quoted_name],
  364. schema: Optional[Union[str, quoted_name]] = None,
  365. ) -> None:
  366. self._exec(
  367. base.RenameTable(old_table_name, new_table_name, schema=schema)
  368. )
  369. def create_table(self, table: Table, **kw: Any) -> None:
  370. table.dispatch.before_create(
  371. table, self.connection, checkfirst=False, _ddl_runner=self
  372. )
  373. self._exec(schema.CreateTable(table, **kw))
  374. table.dispatch.after_create(
  375. table, self.connection, checkfirst=False, _ddl_runner=self
  376. )
  377. for index in table.indexes:
  378. self._exec(schema.CreateIndex(index))
  379. with_comment = (
  380. self.dialect.supports_comments and not self.dialect.inline_comments
  381. )
  382. comment = table.comment
  383. if comment and with_comment:
  384. self.create_table_comment(table)
  385. for column in table.columns:
  386. comment = column.comment
  387. if comment and with_comment:
  388. self.create_column_comment(column)
  389. def drop_table(self, table: Table, **kw: Any) -> None:
  390. table.dispatch.before_drop(
  391. table, self.connection, checkfirst=False, _ddl_runner=self
  392. )
  393. self._exec(schema.DropTable(table, **kw))
  394. table.dispatch.after_drop(
  395. table, self.connection, checkfirst=False, _ddl_runner=self
  396. )
  397. def create_index(self, index: Index, **kw: Any) -> None:
  398. self._exec(schema.CreateIndex(index, **kw))
  399. def create_table_comment(self, table: Table) -> None:
  400. self._exec(schema.SetTableComment(table))
  401. def drop_table_comment(self, table: Table) -> None:
  402. self._exec(schema.DropTableComment(table))
  403. def create_column_comment(self, column: Column[Any]) -> None:
  404. self._exec(schema.SetColumnComment(column))
  405. def drop_index(self, index: Index, **kw: Any) -> None:
  406. self._exec(schema.DropIndex(index, **kw))
  407. def bulk_insert(
  408. self,
  409. table: Union[TableClause, Table],
  410. rows: List[dict],
  411. multiinsert: bool = True,
  412. ) -> None:
  413. if not isinstance(rows, list):
  414. raise TypeError("List expected")
  415. elif rows and not isinstance(rows[0], dict):
  416. raise TypeError("List of dictionaries expected")
  417. if self.as_sql:
  418. for row in rows:
  419. self._exec(
  420. table.insert()
  421. .inline()
  422. .values(
  423. **{
  424. k: (
  425. sqla_compat._literal_bindparam(
  426. k, v, type_=table.c[k].type
  427. )
  428. if not isinstance(
  429. v, sqla_compat._literal_bindparam
  430. )
  431. else v
  432. )
  433. for k, v in row.items()
  434. }
  435. )
  436. )
  437. else:
  438. if rows:
  439. if multiinsert:
  440. self._exec(table.insert().inline(), multiparams=rows)
  441. else:
  442. for row in rows:
  443. self._exec(table.insert().inline().values(**row))
  444. def _tokenize_column_type(self, column: Column) -> Params:
  445. definition: str
  446. definition = self.dialect.type_compiler.process(column.type).lower()
  447. # tokenize the SQLAlchemy-generated version of a type, so that
  448. # the two can be compared.
  449. #
  450. # examples:
  451. # NUMERIC(10, 5)
  452. # TIMESTAMP WITH TIMEZONE
  453. # INTEGER UNSIGNED
  454. # INTEGER (10) UNSIGNED
  455. # INTEGER(10) UNSIGNED
  456. # varchar character set utf8
  457. #
  458. tokens: List[str] = re.findall(r"[\w\-_]+|\(.+?\)", definition)
  459. term_tokens: List[str] = []
  460. paren_term = None
  461. for token in tokens:
  462. if re.match(r"^\(.*\)$", token):
  463. paren_term = token
  464. else:
  465. term_tokens.append(token)
  466. params = Params(term_tokens[0], term_tokens[1:], [], {})
  467. if paren_term:
  468. term: str
  469. for term in re.findall("[^(),]+", paren_term):
  470. if "=" in term:
  471. key, val = term.split("=")
  472. params.kwargs[key.strip()] = val.strip()
  473. else:
  474. params.args.append(term.strip())
  475. return params
  476. def _column_types_match(
  477. self, inspector_params: Params, metadata_params: Params
  478. ) -> bool:
  479. if inspector_params.token0 == metadata_params.token0:
  480. return True
  481. synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms]
  482. inspector_all_terms = " ".join(
  483. [inspector_params.token0] + inspector_params.tokens
  484. )
  485. metadata_all_terms = " ".join(
  486. [metadata_params.token0] + metadata_params.tokens
  487. )
  488. for batch in synonyms:
  489. if {inspector_all_terms, metadata_all_terms}.issubset(batch) or {
  490. inspector_params.token0,
  491. metadata_params.token0,
  492. }.issubset(batch):
  493. return True
  494. return False
  495. def _column_args_match(
  496. self, inspected_params: Params, meta_params: Params
  497. ) -> bool:
  498. """We want to compare column parameters. However, we only want
  499. to compare parameters that are set. If they both have `collation`,
  500. we want to make sure they are the same. However, if only one
  501. specifies it, dont flag it for being less specific
  502. """
  503. if (
  504. len(meta_params.tokens) == len(inspected_params.tokens)
  505. and meta_params.tokens != inspected_params.tokens
  506. ):
  507. return False
  508. if (
  509. len(meta_params.args) == len(inspected_params.args)
  510. and meta_params.args != inspected_params.args
  511. ):
  512. return False
  513. insp = " ".join(inspected_params.tokens).lower()
  514. meta = " ".join(meta_params.tokens).lower()
  515. for reg in self.type_arg_extract:
  516. mi = re.search(reg, insp)
  517. mm = re.search(reg, meta)
  518. if mi and mm and mi.group(1) != mm.group(1):
  519. return False
  520. return True
  521. def compare_type(
  522. self, inspector_column: Column[Any], metadata_column: Column
  523. ) -> bool:
  524. """Returns True if there ARE differences between the types of the two
  525. columns. Takes impl.type_synonyms into account between retrospected
  526. and metadata types
  527. """
  528. inspector_params = self._tokenize_column_type(inspector_column)
  529. metadata_params = self._tokenize_column_type(metadata_column)
  530. if not self._column_types_match(inspector_params, metadata_params):
  531. return True
  532. if not self._column_args_match(inspector_params, metadata_params):
  533. return True
  534. return False
  535. def compare_server_default(
  536. self,
  537. inspector_column,
  538. metadata_column,
  539. rendered_metadata_default,
  540. rendered_inspector_default,
  541. ):
  542. return rendered_inspector_default != rendered_metadata_default
  543. def correct_for_autogen_constraints(
  544. self,
  545. conn_uniques: Set[UniqueConstraint],
  546. conn_indexes: Set[Index],
  547. metadata_unique_constraints: Set[UniqueConstraint],
  548. metadata_indexes: Set[Index],
  549. ) -> None:
  550. pass
  551. def cast_for_batch_migrate(self, existing, existing_transfer, new_type):
  552. if existing.type._type_affinity is not new_type._type_affinity:
  553. existing_transfer["expr"] = cast(
  554. existing_transfer["expr"], new_type
  555. )
  556. def render_ddl_sql_expr(
  557. self, expr: ClauseElement, is_server_default: bool = False, **kw: Any
  558. ) -> str:
  559. """Render a SQL expression that is typically a server default,
  560. index expression, etc.
  561. """
  562. compile_kw = {"literal_binds": True, "include_table": False}
  563. return str(
  564. expr.compile(dialect=self.dialect, compile_kwargs=compile_kw)
  565. )
  566. def _compat_autogen_column_reflect(self, inspector: Inspector) -> Callable:
  567. return self.autogen_column_reflect
  568. def correct_for_autogen_foreignkeys(
  569. self,
  570. conn_fks: Set[ForeignKeyConstraint],
  571. metadata_fks: Set[ForeignKeyConstraint],
  572. ) -> None:
  573. pass
  574. def autogen_column_reflect(self, inspector, table, column_info):
  575. """A hook that is attached to the 'column_reflect' event for when
  576. a Table is reflected from the database during the autogenerate
  577. process.
  578. Dialects can elect to modify the information gathered here.
  579. """
  580. def start_migrations(self) -> None:
  581. """A hook called when :meth:`.EnvironmentContext.run_migrations`
  582. is called.
  583. Implementations can set up per-migration-run state here.
  584. """
  585. def emit_begin(self) -> None:
  586. """Emit the string ``BEGIN``, or the backend-specific
  587. equivalent, on the current connection context.
  588. This is used in offline mode and typically
  589. via :meth:`.EnvironmentContext.begin_transaction`.
  590. """
  591. self.static_output("BEGIN" + self.command_terminator)
  592. def emit_commit(self) -> None:
  593. """Emit the string ``COMMIT``, or the backend-specific
  594. equivalent, on the current connection context.
  595. This is used in offline mode and typically
  596. via :meth:`.EnvironmentContext.begin_transaction`.
  597. """
  598. self.static_output("COMMIT" + self.command_terminator)
  599. def render_type(
  600. self, type_obj: TypeEngine, autogen_context: AutogenContext
  601. ) -> Union[str, Literal[False]]:
  602. return False
  603. def _compare_identity_default(self, metadata_identity, inspector_identity):
  604. # ignored contains the attributes that were not considered
  605. # because assumed to their default values in the db.
  606. diff, ignored = _compare_identity_options(
  607. metadata_identity,
  608. inspector_identity,
  609. schema.Identity(),
  610. skip={"always"},
  611. )
  612. meta_always = getattr(metadata_identity, "always", None)
  613. inspector_always = getattr(inspector_identity, "always", None)
  614. # None and False are the same in this comparison
  615. if bool(meta_always) != bool(inspector_always):
  616. diff.add("always")
  617. diff.difference_update(self.identity_attrs_ignore)
  618. # returns 3 values:
  619. return (
  620. # different identity attributes
  621. diff,
  622. # ignored identity attributes
  623. ignored,
  624. # if the two identity should be considered different
  625. bool(diff) or bool(metadata_identity) != bool(inspector_identity),
  626. )
  627. def _compare_index_unique(
  628. self, metadata_index: Index, reflected_index: Index
  629. ) -> Optional[str]:
  630. conn_unique = bool(reflected_index.unique)
  631. meta_unique = bool(metadata_index.unique)
  632. if conn_unique != meta_unique:
  633. return f"unique={conn_unique} to unique={meta_unique}"
  634. else:
  635. return None
  636. def _create_metadata_constraint_sig(
  637. self, constraint: _autogen._C, **opts: Any
  638. ) -> _constraint_sig[_autogen._C]:
  639. return _constraint_sig.from_constraint(True, self, constraint, **opts)
  640. def _create_reflected_constraint_sig(
  641. self, constraint: _autogen._C, **opts: Any
  642. ) -> _constraint_sig[_autogen._C]:
  643. return _constraint_sig.from_constraint(False, self, constraint, **opts)
  644. def compare_indexes(
  645. self,
  646. metadata_index: Index,
  647. reflected_index: Index,
  648. ) -> ComparisonResult:
  649. """Compare two indexes by comparing the signature generated by
  650. ``create_index_sig``.
  651. This method returns a ``ComparisonResult``.
  652. """
  653. msg: List[str] = []
  654. unique_msg = self._compare_index_unique(
  655. metadata_index, reflected_index
  656. )
  657. if unique_msg:
  658. msg.append(unique_msg)
  659. m_sig = self._create_metadata_constraint_sig(metadata_index)
  660. r_sig = self._create_reflected_constraint_sig(reflected_index)
  661. assert _autogen.is_index_sig(m_sig)
  662. assert _autogen.is_index_sig(r_sig)
  663. # The assumption is that the index have no expression
  664. for sig in m_sig, r_sig:
  665. if sig.has_expressions:
  666. log.warning(
  667. "Generating approximate signature for index %s. "
  668. "The dialect "
  669. "implementation should either skip expression indexes "
  670. "or provide a custom implementation.",
  671. sig.const,
  672. )
  673. if m_sig.column_names != r_sig.column_names:
  674. msg.append(
  675. f"expression {r_sig.column_names} to {m_sig.column_names}"
  676. )
  677. if msg:
  678. return ComparisonResult.Different(msg)
  679. else:
  680. return ComparisonResult.Equal()
  681. def compare_unique_constraint(
  682. self,
  683. metadata_constraint: UniqueConstraint,
  684. reflected_constraint: UniqueConstraint,
  685. ) -> ComparisonResult:
  686. """Compare two unique constraints by comparing the two signatures.
  687. The arguments are two tuples that contain the unique constraint and
  688. the signatures generated by ``create_unique_constraint_sig``.
  689. This method returns a ``ComparisonResult``.
  690. """
  691. metadata_tup = self._create_metadata_constraint_sig(
  692. metadata_constraint
  693. )
  694. reflected_tup = self._create_reflected_constraint_sig(
  695. reflected_constraint
  696. )
  697. meta_sig = metadata_tup.unnamed
  698. conn_sig = reflected_tup.unnamed
  699. if conn_sig != meta_sig:
  700. return ComparisonResult.Different(
  701. f"expression {conn_sig} to {meta_sig}"
  702. )
  703. else:
  704. return ComparisonResult.Equal()
  705. def _skip_functional_indexes(self, metadata_indexes, conn_indexes):
  706. conn_indexes_by_name = {c.name: c for c in conn_indexes}
  707. for idx in list(metadata_indexes):
  708. if idx.name in conn_indexes_by_name:
  709. continue
  710. iex = sqla_compat.is_expression_index(idx)
  711. if iex:
  712. util.warn(
  713. "autogenerate skipping metadata-specified "
  714. "expression-based index "
  715. f"{idx.name!r}; dialect {self.__dialect__!r} under "
  716. f"SQLAlchemy {sqla_compat.sqlalchemy_version} can't "
  717. "reflect these indexes so they can't be compared"
  718. )
  719. metadata_indexes.discard(idx)
  720. def adjust_reflected_dialect_options(
  721. self, reflected_object: Dict[str, Any], kind: str
  722. ) -> Dict[str, Any]:
  723. return reflected_object.get("dialect_options", {})
  724. class Params(NamedTuple):
  725. token0: str
  726. tokens: List[str]
  727. args: List[str]
  728. kwargs: Dict[str, str]
  729. def _compare_identity_options(
  730. metadata_io: Union[schema.Identity, schema.Sequence, None],
  731. inspector_io: Union[schema.Identity, schema.Sequence, None],
  732. default_io: Union[schema.Identity, schema.Sequence],
  733. skip: Set[str],
  734. ):
  735. # this can be used for identity or sequence compare.
  736. # default_io is an instance of IdentityOption with all attributes to the
  737. # default value.
  738. meta_d = sqla_compat._get_identity_options_dict(metadata_io)
  739. insp_d = sqla_compat._get_identity_options_dict(inspector_io)
  740. diff = set()
  741. ignored_attr = set()
  742. def check_dicts(
  743. meta_dict: Mapping[str, Any],
  744. insp_dict: Mapping[str, Any],
  745. default_dict: Mapping[str, Any],
  746. attrs: Iterable[str],
  747. ):
  748. for attr in set(attrs).difference(skip):
  749. meta_value = meta_dict.get(attr)
  750. insp_value = insp_dict.get(attr)
  751. if insp_value != meta_value:
  752. default_value = default_dict.get(attr)
  753. if meta_value == default_value:
  754. ignored_attr.add(attr)
  755. else:
  756. diff.add(attr)
  757. check_dicts(
  758. meta_d,
  759. insp_d,
  760. sqla_compat._get_identity_options_dict(default_io),
  761. set(meta_d).union(insp_d),
  762. )
  763. if sqla_compat.identity_has_dialect_kwargs:
  764. assert hasattr(default_io, "dialect_kwargs")
  765. # use only the dialect kwargs in inspector_io since metadata_io
  766. # can have options for many backends
  767. check_dicts(
  768. getattr(metadata_io, "dialect_kwargs", {}),
  769. getattr(inspector_io, "dialect_kwargs", {}),
  770. default_io.dialect_kwargs,
  771. getattr(inspector_io, "dialect_kwargs", {}),
  772. )
  773. return diff, ignored_attr