compare.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866
  1. from sqlalchemy import schema as sa_schema, types as sqltypes
  2. from sqlalchemy.engine.reflection import Inspector
  3. from sqlalchemy import event
  4. from ..operations import ops
  5. import logging
  6. from .. import util
  7. from ..util import compat
  8. from ..util import sqla_compat
  9. from sqlalchemy.util import OrderedSet
  10. import re
  11. from .render import _user_defined_render
  12. import contextlib
  13. from alembic.ddl.base import _fk_spec
  14. log = logging.getLogger(__name__)
  15. def _populate_migration_script(autogen_context, migration_script):
  16. upgrade_ops = migration_script.upgrade_ops_list[-1]
  17. downgrade_ops = migration_script.downgrade_ops_list[-1]
  18. _produce_net_changes(autogen_context, upgrade_ops)
  19. upgrade_ops.reverse_into(downgrade_ops)
  20. comparators = util.Dispatcher(uselist=True)
  21. def _produce_net_changes(autogen_context, upgrade_ops):
  22. connection = autogen_context.connection
  23. include_schemas = autogen_context.opts.get('include_schemas', False)
  24. inspector = Inspector.from_engine(connection)
  25. default_schema = connection.dialect.default_schema_name
  26. if include_schemas:
  27. schemas = set(inspector.get_schema_names())
  28. # replace default schema name with None
  29. schemas.discard("information_schema")
  30. # replace the "default" schema with None
  31. schemas.discard(default_schema)
  32. schemas.add(None)
  33. else:
  34. schemas = [None]
  35. comparators.dispatch("schema", autogen_context.dialect.name)(
  36. autogen_context, upgrade_ops, schemas
  37. )
  38. @comparators.dispatch_for("schema")
  39. def _autogen_for_tables(autogen_context, upgrade_ops, schemas):
  40. inspector = autogen_context.inspector
  41. conn_table_names = set()
  42. version_table_schema = \
  43. autogen_context.migration_context.version_table_schema
  44. version_table = autogen_context.migration_context.version_table
  45. for s in schemas:
  46. tables = set(inspector.get_table_names(schema=s))
  47. if s == version_table_schema:
  48. tables = tables.difference(
  49. [autogen_context.migration_context.version_table]
  50. )
  51. conn_table_names.update(zip([s] * len(tables), tables))
  52. metadata_table_names = OrderedSet(
  53. [(table.schema, table.name) for table in autogen_context.sorted_tables]
  54. ).difference([(version_table_schema, version_table)])
  55. _compare_tables(conn_table_names, metadata_table_names,
  56. inspector, upgrade_ops, autogen_context)
  57. def _compare_tables(conn_table_names, metadata_table_names,
  58. inspector, upgrade_ops, autogen_context):
  59. default_schema = inspector.bind.dialect.default_schema_name
  60. # tables coming from the connection will not have "schema"
  61. # set if it matches default_schema_name; so we need a list
  62. # of table names from local metadata that also have "None" if schema
  63. # == default_schema_name. Most setups will be like this anyway but
  64. # some are not (see #170)
  65. metadata_table_names_no_dflt_schema = OrderedSet([
  66. (schema if schema != default_schema else None, tname)
  67. for schema, tname in metadata_table_names
  68. ])
  69. # to adjust for the MetaData collection storing the tables either
  70. # as "schemaname.tablename" or just "tablename", create a new lookup
  71. # which will match the "non-default-schema" keys to the Table object.
  72. tname_to_table = dict(
  73. (
  74. no_dflt_schema,
  75. autogen_context.table_key_to_table[
  76. sa_schema._get_table_key(tname, schema)]
  77. )
  78. for no_dflt_schema, (schema, tname) in zip(
  79. metadata_table_names_no_dflt_schema,
  80. metadata_table_names)
  81. )
  82. metadata_table_names = metadata_table_names_no_dflt_schema
  83. for s, tname in metadata_table_names.difference(conn_table_names):
  84. name = '%s.%s' % (s, tname) if s else tname
  85. metadata_table = tname_to_table[(s, tname)]
  86. if autogen_context.run_filters(
  87. metadata_table, tname, "table", False, None):
  88. upgrade_ops.ops.append(
  89. ops.CreateTableOp.from_table(metadata_table))
  90. log.info("Detected added table %r", name)
  91. modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
  92. comparators.dispatch("table")(
  93. autogen_context, modify_table_ops,
  94. s, tname, None, metadata_table
  95. )
  96. if not modify_table_ops.is_empty():
  97. upgrade_ops.ops.append(modify_table_ops)
  98. removal_metadata = sa_schema.MetaData()
  99. for s, tname in conn_table_names.difference(metadata_table_names):
  100. name = sa_schema._get_table_key(tname, s)
  101. exists = name in removal_metadata.tables
  102. t = sa_schema.Table(tname, removal_metadata, schema=s)
  103. if not exists:
  104. event.listen(
  105. t,
  106. "column_reflect",
  107. autogen_context.migration_context.impl.
  108. _compat_autogen_column_reflect(inspector))
  109. inspector.reflecttable(t, None)
  110. if autogen_context.run_filters(t, tname, "table", True, None):
  111. upgrade_ops.ops.append(
  112. ops.DropTableOp.from_table(t)
  113. )
  114. log.info("Detected removed table %r", name)
  115. existing_tables = conn_table_names.intersection(metadata_table_names)
  116. existing_metadata = sa_schema.MetaData()
  117. conn_column_info = {}
  118. for s, tname in existing_tables:
  119. name = sa_schema._get_table_key(tname, s)
  120. exists = name in existing_metadata.tables
  121. t = sa_schema.Table(tname, existing_metadata, schema=s)
  122. if not exists:
  123. event.listen(
  124. t,
  125. "column_reflect",
  126. autogen_context.migration_context.impl.
  127. _compat_autogen_column_reflect(inspector))
  128. inspector.reflecttable(t, None)
  129. conn_column_info[(s, tname)] = t
  130. for s, tname in sorted(existing_tables, key=lambda x: (x[0] or '', x[1])):
  131. s = s or None
  132. name = '%s.%s' % (s, tname) if s else tname
  133. metadata_table = tname_to_table[(s, tname)]
  134. conn_table = existing_metadata.tables[name]
  135. if autogen_context.run_filters(
  136. metadata_table, tname, "table", False,
  137. conn_table):
  138. modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
  139. with _compare_columns(
  140. s, tname,
  141. conn_table,
  142. metadata_table,
  143. modify_table_ops, autogen_context, inspector):
  144. comparators.dispatch("table")(
  145. autogen_context, modify_table_ops,
  146. s, tname, conn_table, metadata_table
  147. )
  148. if not modify_table_ops.is_empty():
  149. upgrade_ops.ops.append(modify_table_ops)
  150. def _make_index(params, conn_table):
  151. # TODO: add .info such as 'duplicates_constraint'
  152. return sa_schema.Index(
  153. params['name'],
  154. *[conn_table.c[cname] for cname in params['column_names']],
  155. unique=params['unique']
  156. )
  157. def _make_unique_constraint(params, conn_table):
  158. uq = sa_schema.UniqueConstraint(
  159. *[conn_table.c[cname] for cname in params['column_names']],
  160. name=params['name']
  161. )
  162. if 'duplicates_index' in params:
  163. uq.info['duplicates_index'] = params['duplicates_index']
  164. return uq
  165. def _make_foreign_key(params, conn_table):
  166. tname = params['referred_table']
  167. if params['referred_schema']:
  168. tname = "%s.%s" % (params['referred_schema'], tname)
  169. options = params.get('options', {})
  170. const = sa_schema.ForeignKeyConstraint(
  171. [conn_table.c[cname] for cname in params['constrained_columns']],
  172. ["%s.%s" % (tname, n) for n in params['referred_columns']],
  173. onupdate=options.get('onupdate'),
  174. ondelete=options.get('ondelete'),
  175. deferrable=options.get('deferrable'),
  176. initially=options.get('initially'),
  177. name=params['name']
  178. )
  179. # needed by 0.7
  180. conn_table.append_constraint(const)
  181. return const
  182. @contextlib.contextmanager
  183. def _compare_columns(schema, tname, conn_table, metadata_table,
  184. modify_table_ops, autogen_context, inspector):
  185. name = '%s.%s' % (schema, tname) if schema else tname
  186. metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
  187. conn_col_names = dict((c.name, c) for c in conn_table.c)
  188. metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))
  189. for cname in metadata_col_names.difference(conn_col_names):
  190. if autogen_context.run_filters(
  191. metadata_cols_by_name[cname], cname,
  192. "column", False, None):
  193. modify_table_ops.ops.append(
  194. ops.AddColumnOp.from_column_and_tablename(
  195. schema, tname, metadata_cols_by_name[cname])
  196. )
  197. log.info("Detected added column '%s.%s'", name, cname)
  198. for colname in metadata_col_names.intersection(conn_col_names):
  199. metadata_col = metadata_cols_by_name[colname]
  200. conn_col = conn_table.c[colname]
  201. if not autogen_context.run_filters(
  202. metadata_col, colname, "column", False,
  203. conn_col):
  204. continue
  205. alter_column_op = ops.AlterColumnOp(
  206. tname, colname, schema=schema)
  207. comparators.dispatch("column")(
  208. autogen_context, alter_column_op,
  209. schema, tname, colname, conn_col, metadata_col
  210. )
  211. if alter_column_op.has_changes():
  212. modify_table_ops.ops.append(alter_column_op)
  213. yield
  214. for cname in set(conn_col_names).difference(metadata_col_names):
  215. if autogen_context.run_filters(
  216. conn_table.c[cname], cname,
  217. "column", True, None):
  218. modify_table_ops.ops.append(
  219. ops.DropColumnOp.from_column_and_tablename(
  220. schema, tname, conn_table.c[cname]
  221. )
  222. )
  223. log.info("Detected removed column '%s.%s'", name, cname)
  224. class _constraint_sig(object):
  225. def md_name_to_sql_name(self, context):
  226. return self.name
  227. def __eq__(self, other):
  228. return self.const == other.const
  229. def __ne__(self, other):
  230. return self.const != other.const
  231. def __hash__(self):
  232. return hash(self.const)
  233. class _uq_constraint_sig(_constraint_sig):
  234. is_index = False
  235. is_unique = True
  236. def __init__(self, const):
  237. self.const = const
  238. self.name = const.name
  239. self.sig = tuple(sorted([col.name for col in const.columns]))
  240. @property
  241. def column_names(self):
  242. return [col.name for col in self.const.columns]
  243. class _ix_constraint_sig(_constraint_sig):
  244. is_index = True
  245. def __init__(self, const):
  246. self.const = const
  247. self.name = const.name
  248. self.sig = tuple(sorted([col.name for col in const.columns]))
  249. self.is_unique = bool(const.unique)
  250. def md_name_to_sql_name(self, context):
  251. return sqla_compat._get_index_final_name(context.dialect, self.const)
  252. @property
  253. def column_names(self):
  254. return sqla_compat._get_index_column_names(self.const)
  255. class _fk_constraint_sig(_constraint_sig):
  256. def __init__(self, const, include_options=False):
  257. self.const = const
  258. self.name = const.name
  259. (
  260. self.source_schema, self.source_table,
  261. self.source_columns, self.target_schema, self.target_table,
  262. self.target_columns,
  263. onupdate, ondelete,
  264. deferrable, initially) = _fk_spec(const)
  265. self.sig = (
  266. self.source_schema, self.source_table, tuple(self.source_columns),
  267. self.target_schema, self.target_table, tuple(self.target_columns)
  268. )
  269. if include_options:
  270. self.sig += (
  271. (None if onupdate.lower() == 'no action'
  272. else onupdate.lower())
  273. if onupdate else None,
  274. (None if ondelete.lower() == 'no action'
  275. else ondelete.lower())
  276. if ondelete else None,
  277. # convert initially + deferrable into one three-state value
  278. "initially_deferrable"
  279. if initially and initially.lower() == "deferred"
  280. else "deferrable" if deferrable
  281. else "not deferrable"
  282. )
  283. @comparators.dispatch_for("table")
  284. def _compare_indexes_and_uniques(
  285. autogen_context, modify_ops, schema, tname, conn_table,
  286. metadata_table):
  287. inspector = autogen_context.inspector
  288. is_create_table = conn_table is None
  289. # 1a. get raw indexes and unique constraints from metadata ...
  290. metadata_unique_constraints = set(
  291. uq for uq in metadata_table.constraints
  292. if isinstance(uq, sa_schema.UniqueConstraint)
  293. )
  294. metadata_indexes = set(metadata_table.indexes)
  295. conn_uniques = conn_indexes = frozenset()
  296. supports_unique_constraints = False
  297. unique_constraints_duplicate_unique_indexes = False
  298. if conn_table is not None:
  299. # 1b. ... and from connection, if the table exists
  300. if hasattr(inspector, "get_unique_constraints"):
  301. try:
  302. conn_uniques = inspector.get_unique_constraints(
  303. tname, schema=schema)
  304. supports_unique_constraints = True
  305. except NotImplementedError:
  306. pass
  307. except TypeError:
  308. # number of arguments is off for the base
  309. # method in SQLAlchemy due to the cache decorator
  310. # not being present
  311. pass
  312. else:
  313. for uq in conn_uniques:
  314. if uq.get('duplicates_index'):
  315. unique_constraints_duplicate_unique_indexes = True
  316. try:
  317. conn_indexes = inspector.get_indexes(tname, schema=schema)
  318. except NotImplementedError:
  319. pass
  320. # 2. convert conn-level objects from raw inspector records
  321. # into schema objects
  322. conn_uniques = set(_make_unique_constraint(uq_def, conn_table)
  323. for uq_def in conn_uniques)
  324. conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes)
  325. # 2a. if the dialect dupes unique indexes as unique constraints
  326. # (mysql and oracle), correct for that
  327. if unique_constraints_duplicate_unique_indexes:
  328. _correct_for_uq_duplicates_uix(
  329. conn_uniques, conn_indexes,
  330. metadata_unique_constraints,
  331. metadata_indexes
  332. )
  333. # 3. give the dialect a chance to omit indexes and constraints that
  334. # we know are either added implicitly by the DB or that the DB
  335. # can't accurately report on
  336. autogen_context.migration_context.impl.\
  337. correct_for_autogen_constraints(
  338. conn_uniques, conn_indexes,
  339. metadata_unique_constraints,
  340. metadata_indexes)
  341. # 4. organize the constraints into "signature" collections, the
  342. # _constraint_sig() objects provide a consistent facade over both
  343. # Index and UniqueConstraint so we can easily work with them
  344. # interchangeably
  345. metadata_unique_constraints = set(_uq_constraint_sig(uq)
  346. for uq in metadata_unique_constraints
  347. )
  348. metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes)
  349. conn_unique_constraints = set(
  350. _uq_constraint_sig(uq) for uq in conn_uniques)
  351. conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes)
  352. # 5. index things by name, for those objects that have names
  353. metadata_names = dict(
  354. (c.md_name_to_sql_name(autogen_context), c) for c in
  355. metadata_unique_constraints.union(metadata_indexes)
  356. if c.name is not None)
  357. conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints)
  358. conn_indexes_by_name = dict((c.name, c) for c in conn_indexes)
  359. conn_names = dict((c.name, c) for c in
  360. conn_unique_constraints.union(conn_indexes)
  361. if c.name is not None)
  362. doubled_constraints = dict(
  363. (name, (conn_uniques_by_name[name], conn_indexes_by_name[name]))
  364. for name in set(
  365. conn_uniques_by_name).intersection(conn_indexes_by_name)
  366. )
  367. # 6. index things by "column signature", to help with unnamed unique
  368. # constraints.
  369. conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints)
  370. metadata_uniques_by_sig = dict(
  371. (uq.sig, uq) for uq in metadata_unique_constraints)
  372. metadata_indexes_by_sig = dict(
  373. (ix.sig, ix) for ix in metadata_indexes)
  374. unnamed_metadata_uniques = dict(
  375. (uq.sig, uq) for uq in
  376. metadata_unique_constraints if uq.name is None)
  377. # assumptions:
  378. # 1. a unique constraint or an index from the connection *always*
  379. # has a name.
  380. # 2. an index on the metadata side *always* has a name.
  381. # 3. a unique constraint on the metadata side *might* have a name.
  382. # 4. The backend may double up indexes as unique constraints and
  383. # vice versa (e.g. MySQL, Postgresql)
  384. def obj_added(obj):
  385. if obj.is_index:
  386. if autogen_context.run_filters(
  387. obj.const, obj.name, "index", False, None):
  388. modify_ops.ops.append(
  389. ops.CreateIndexOp.from_index(obj.const)
  390. )
  391. log.info("Detected added index '%s' on %s",
  392. obj.name, ', '.join([
  393. "'%s'" % obj.column_names
  394. ]))
  395. else:
  396. if not supports_unique_constraints:
  397. # can't report unique indexes as added if we don't
  398. # detect them
  399. return
  400. if is_create_table:
  401. # unique constraints are created inline with table defs
  402. return
  403. if autogen_context.run_filters(
  404. obj.const, obj.name,
  405. "unique_constraint", False, None):
  406. modify_ops.ops.append(
  407. ops.AddConstraintOp.from_constraint(obj.const)
  408. )
  409. log.info("Detected added unique constraint '%s' on %s",
  410. obj.name, ', '.join([
  411. "'%s'" % obj.column_names
  412. ]))
  413. def obj_removed(obj):
  414. if obj.is_index:
  415. if obj.is_unique and not supports_unique_constraints:
  416. # many databases double up unique constraints
  417. # as unique indexes. without that list we can't
  418. # be sure what we're doing here
  419. return
  420. if autogen_context.run_filters(
  421. obj.const, obj.name, "index", True, None):
  422. modify_ops.ops.append(
  423. ops.DropIndexOp.from_index(obj.const)
  424. )
  425. log.info(
  426. "Detected removed index '%s' on '%s'", obj.name, tname)
  427. else:
  428. if autogen_context.run_filters(
  429. obj.const, obj.name,
  430. "unique_constraint", True, None):
  431. modify_ops.ops.append(
  432. ops.DropConstraintOp.from_constraint(obj.const)
  433. )
  434. log.info("Detected removed unique constraint '%s' on '%s'",
  435. obj.name, tname
  436. )
  437. def obj_changed(old, new, msg):
  438. if old.is_index:
  439. if autogen_context.run_filters(
  440. new.const, new.name, "index",
  441. False, old.const):
  442. log.info("Detected changed index '%s' on '%s':%s",
  443. old.name, tname, ', '.join(msg)
  444. )
  445. modify_ops.ops.append(
  446. ops.DropIndexOp.from_index(old.const)
  447. )
  448. modify_ops.ops.append(
  449. ops.CreateIndexOp.from_index(new.const)
  450. )
  451. else:
  452. if autogen_context.run_filters(
  453. new.const, new.name,
  454. "unique_constraint", False, old.const):
  455. log.info("Detected changed unique constraint '%s' on '%s':%s",
  456. old.name, tname, ', '.join(msg)
  457. )
  458. modify_ops.ops.append(
  459. ops.DropConstraintOp.from_constraint(old.const)
  460. )
  461. modify_ops.ops.append(
  462. ops.AddConstraintOp.from_constraint(new.const)
  463. )
  464. for added_name in sorted(set(metadata_names).difference(conn_names)):
  465. obj = metadata_names[added_name]
  466. obj_added(obj)
  467. for existing_name in sorted(set(metadata_names).intersection(conn_names)):
  468. metadata_obj = metadata_names[existing_name]
  469. if existing_name in doubled_constraints:
  470. conn_uq, conn_idx = doubled_constraints[existing_name]
  471. if metadata_obj.is_index:
  472. conn_obj = conn_idx
  473. else:
  474. conn_obj = conn_uq
  475. else:
  476. conn_obj = conn_names[existing_name]
  477. if conn_obj.is_index != metadata_obj.is_index:
  478. obj_removed(conn_obj)
  479. obj_added(metadata_obj)
  480. else:
  481. msg = []
  482. if conn_obj.is_unique != metadata_obj.is_unique:
  483. msg.append(' unique=%r to unique=%r' % (
  484. conn_obj.is_unique, metadata_obj.is_unique
  485. ))
  486. if conn_obj.sig != metadata_obj.sig:
  487. msg.append(' columns %r to %r' % (
  488. conn_obj.sig, metadata_obj.sig
  489. ))
  490. if msg:
  491. obj_changed(conn_obj, metadata_obj, msg)
  492. for removed_name in sorted(set(conn_names).difference(metadata_names)):
  493. conn_obj = conn_names[removed_name]
  494. if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques:
  495. continue
  496. elif removed_name in doubled_constraints:
  497. if conn_obj.sig not in metadata_indexes_by_sig and \
  498. conn_obj.sig not in metadata_uniques_by_sig:
  499. conn_uq, conn_idx = doubled_constraints[removed_name]
  500. obj_removed(conn_uq)
  501. obj_removed(conn_idx)
  502. else:
  503. obj_removed(conn_obj)
  504. for uq_sig in unnamed_metadata_uniques:
  505. if uq_sig not in conn_uniques_by_sig:
  506. obj_added(unnamed_metadata_uniques[uq_sig])
  507. def _correct_for_uq_duplicates_uix(
  508. conn_unique_constraints,
  509. conn_indexes,
  510. metadata_unique_constraints,
  511. metadata_indexes):
  512. # dedupe unique indexes vs. constraints, since MySQL / Oracle
  513. # doesn't really have unique constraints as a separate construct.
  514. # but look in the metadata and try to maintain constructs
  515. # that already seem to be defined one way or the other
  516. # on that side. This logic was formerly local to MySQL dialect,
  517. # generalized to Oracle and others. See #276
  518. metadata_uq_names = set([
  519. cons.name for cons in metadata_unique_constraints
  520. if cons.name is not None])
  521. unnamed_metadata_uqs = set([
  522. _uq_constraint_sig(cons).sig
  523. for cons in metadata_unique_constraints
  524. if cons.name is None
  525. ])
  526. metadata_ix_names = set([
  527. cons.name for cons in metadata_indexes if cons.unique])
  528. conn_ix_names = dict(
  529. (cons.name, cons) for cons in conn_indexes if cons.unique
  530. )
  531. uqs_dupe_indexes = dict(
  532. (cons.name, cons) for cons in conn_unique_constraints
  533. if cons.info['duplicates_index']
  534. )
  535. for overlap in uqs_dupe_indexes:
  536. if overlap not in metadata_uq_names:
  537. if _uq_constraint_sig(uqs_dupe_indexes[overlap]).sig \
  538. not in unnamed_metadata_uqs:
  539. conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
  540. elif overlap not in metadata_ix_names:
  541. conn_indexes.discard(conn_ix_names[overlap])
  542. @comparators.dispatch_for("column")
  543. def _compare_nullable(
  544. autogen_context, alter_column_op, schema, tname, cname, conn_col,
  545. metadata_col):
  546. # work around SQLAlchemy issue #3023
  547. if metadata_col.primary_key:
  548. return
  549. metadata_col_nullable = metadata_col.nullable
  550. conn_col_nullable = conn_col.nullable
  551. alter_column_op.existing_nullable = conn_col_nullable
  552. if conn_col_nullable is not metadata_col_nullable:
  553. alter_column_op.modify_nullable = metadata_col_nullable
  554. log.info("Detected %s on column '%s.%s'",
  555. "NULL" if metadata_col_nullable else "NOT NULL",
  556. tname,
  557. cname
  558. )
  559. @comparators.dispatch_for("column")
  560. def _setup_autoincrement(
  561. autogen_context, alter_column_op, schema, tname, cname, conn_col,
  562. metadata_col):
  563. if metadata_col.table._autoincrement_column is metadata_col:
  564. alter_column_op.kw['autoincrement'] = True
  565. elif util.sqla_110 and metadata_col.autoincrement is True:
  566. alter_column_op.kw['autoincrement'] = True
  567. elif metadata_col.autoincrement is False:
  568. alter_column_op.kw['autoincrement'] = False
  569. @comparators.dispatch_for("column")
  570. def _compare_type(
  571. autogen_context, alter_column_op, schema, tname, cname, conn_col,
  572. metadata_col):
  573. conn_type = conn_col.type
  574. alter_column_op.existing_type = conn_type
  575. metadata_type = metadata_col.type
  576. if conn_type._type_affinity is sqltypes.NullType:
  577. log.info("Couldn't determine database type "
  578. "for column '%s.%s'", tname, cname)
  579. return
  580. if metadata_type._type_affinity is sqltypes.NullType:
  581. log.info("Column '%s.%s' has no type within "
  582. "the model; can't compare", tname, cname)
  583. return
  584. isdiff = autogen_context.migration_context._compare_type(
  585. conn_col, metadata_col)
  586. if isdiff:
  587. alter_column_op.modify_type = metadata_type
  588. log.info("Detected type change from %r to %r on '%s.%s'",
  589. conn_type, metadata_type, tname, cname
  590. )
  591. def _render_server_default_for_compare(metadata_default,
  592. metadata_col, autogen_context):
  593. rendered = _user_defined_render(
  594. "server_default", metadata_default, autogen_context)
  595. if rendered is not False:
  596. return rendered
  597. if isinstance(metadata_default, sa_schema.DefaultClause):
  598. if isinstance(metadata_default.arg, compat.string_types):
  599. metadata_default = metadata_default.arg
  600. else:
  601. metadata_default = str(metadata_default.arg.compile(
  602. dialect=autogen_context.dialect))
  603. if isinstance(metadata_default, compat.string_types):
  604. if metadata_col.type._type_affinity is sqltypes.String:
  605. metadata_default = re.sub(r"^'|'$", "", metadata_default)
  606. return repr(metadata_default)
  607. else:
  608. return metadata_default
  609. else:
  610. return None
  611. @comparators.dispatch_for("column")
  612. def _compare_server_default(
  613. autogen_context, alter_column_op, schema, tname, cname,
  614. conn_col, metadata_col):
  615. metadata_default = metadata_col.server_default
  616. conn_col_default = conn_col.server_default
  617. if conn_col_default is None and metadata_default is None:
  618. return False
  619. rendered_metadata_default = _render_server_default_for_compare(
  620. metadata_default, metadata_col, autogen_context)
  621. rendered_conn_default = conn_col.server_default.arg.text \
  622. if conn_col.server_default else None
  623. alter_column_op.existing_server_default = conn_col_default
  624. isdiff = autogen_context.migration_context._compare_server_default(
  625. conn_col, metadata_col,
  626. rendered_metadata_default,
  627. rendered_conn_default
  628. )
  629. if isdiff:
  630. alter_column_op.modify_server_default = metadata_default
  631. log.info(
  632. "Detected server default on column '%s.%s'",
  633. tname, cname)
  634. @comparators.dispatch_for("table")
  635. def _compare_foreign_keys(
  636. autogen_context, modify_table_ops, schema, tname, conn_table,
  637. metadata_table):
  638. # if we're doing CREATE TABLE, all FKs are created
  639. # inline within the table def
  640. if conn_table is None:
  641. return
  642. inspector = autogen_context.inspector
  643. metadata_fks = set(
  644. fk for fk in metadata_table.constraints
  645. if isinstance(fk, sa_schema.ForeignKeyConstraint)
  646. )
  647. conn_fks = inspector.get_foreign_keys(tname, schema=schema)
  648. backend_reflects_fk_options = conn_fks and 'options' in conn_fks[0]
  649. conn_fks = set(_make_foreign_key(const, conn_table) for const in conn_fks)
  650. # give the dialect a chance to correct the FKs to match more
  651. # closely
  652. autogen_context.migration_context.impl.\
  653. correct_for_autogen_foreignkeys(
  654. conn_fks, metadata_fks,
  655. )
  656. metadata_fks = set(
  657. _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
  658. for fk in metadata_fks
  659. )
  660. conn_fks = set(
  661. _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
  662. for fk in conn_fks
  663. )
  664. conn_fks_by_sig = dict(
  665. (c.sig, c) for c in conn_fks
  666. )
  667. metadata_fks_by_sig = dict(
  668. (c.sig, c) for c in metadata_fks
  669. )
  670. metadata_fks_by_name = dict(
  671. (c.name, c) for c in metadata_fks if c.name is not None
  672. )
  673. conn_fks_by_name = dict(
  674. (c.name, c) for c in conn_fks if c.name is not None
  675. )
  676. def _add_fk(obj, compare_to):
  677. if autogen_context.run_filters(
  678. obj.const, obj.name, "foreign_key_constraint", False,
  679. compare_to):
  680. modify_table_ops.ops.append(
  681. ops.CreateForeignKeyOp.from_constraint(const.const)
  682. )
  683. log.info(
  684. "Detected added foreign key (%s)(%s) on table %s%s",
  685. ", ".join(obj.source_columns),
  686. ", ".join(obj.target_columns),
  687. "%s." % obj.source_schema if obj.source_schema else "",
  688. obj.source_table)
  689. def _remove_fk(obj, compare_to):
  690. if autogen_context.run_filters(
  691. obj.const, obj.name, "foreign_key_constraint", True,
  692. compare_to):
  693. modify_table_ops.ops.append(
  694. ops.DropConstraintOp.from_constraint(obj.const)
  695. )
  696. log.info(
  697. "Detected removed foreign key (%s)(%s) on table %s%s",
  698. ", ".join(obj.source_columns),
  699. ", ".join(obj.target_columns),
  700. "%s." % obj.source_schema if obj.source_schema else "",
  701. obj.source_table)
  702. # so far it appears we don't need to do this by name at all.
  703. # SQLite doesn't preserve constraint names anyway
  704. for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig):
  705. const = conn_fks_by_sig[removed_sig]
  706. if removed_sig not in metadata_fks_by_sig:
  707. compare_to = metadata_fks_by_name[const.name].const \
  708. if const.name in metadata_fks_by_name else None
  709. _remove_fk(const, compare_to)
  710. for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig):
  711. const = metadata_fks_by_sig[added_sig]
  712. if added_sig not in conn_fks_by_sig:
  713. compare_to = conn_fks_by_name[const.name].const \
  714. if const.name in conn_fks_by_name else None
  715. _add_fk(const, compare_to)