impl.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. from sqlalchemy import schema, text
  2. from sqlalchemy import types as sqltypes
  3. from ..util.compat import (
  4. string_types, text_type, with_metaclass
  5. )
  6. from ..util import sqla_compat
  7. from .. import util
  8. from . import base
  9. class ImplMeta(type):
  10. def __init__(cls, classname, bases, dict_):
  11. newtype = type.__init__(cls, classname, bases, dict_)
  12. if '__dialect__' in dict_:
  13. _impls[dict_['__dialect__']] = cls
  14. return newtype
  15. _impls = {}
  16. class DefaultImpl(with_metaclass(ImplMeta)):
  17. """Provide the entrypoint for major migration operations,
  18. including database-specific behavioral variances.
  19. While individual SQL/DDL constructs already provide
  20. for database-specific implementations, variances here
  21. allow for entirely different sequences of operations
  22. to take place for a particular migration, such as
  23. SQL Server's special 'IDENTITY INSERT' step for
  24. bulk inserts.
  25. """
  26. __dialect__ = 'default'
  27. transactional_ddl = False
  28. command_terminator = ";"
  29. def __init__(self, dialect, connection, as_sql,
  30. transactional_ddl, output_buffer,
  31. context_opts):
  32. self.dialect = dialect
  33. self.connection = connection
  34. self.as_sql = as_sql
  35. self.literal_binds = context_opts.get('literal_binds', False)
  36. if self.literal_binds and not util.sqla_08:
  37. util.warn("'literal_binds' flag not supported in SQLAlchemy 0.7")
  38. self.literal_binds = False
  39. self.output_buffer = output_buffer
  40. self.memo = {}
  41. self.context_opts = context_opts
  42. if transactional_ddl is not None:
  43. self.transactional_ddl = transactional_ddl
  44. if self.literal_binds:
  45. if not self.as_sql:
  46. raise util.CommandError(
  47. "Can't use literal_binds setting without as_sql mode")
  48. @classmethod
  49. def get_by_dialect(cls, dialect):
  50. return _impls[dialect.name]
  51. def static_output(self, text):
  52. self.output_buffer.write(text_type(text + "\n\n"))
  53. self.output_buffer.flush()
  54. def requires_recreate_in_batch(self, batch_op):
  55. """Return True if the given :class:`.BatchOperationsImpl`
  56. would need the table to be recreated and copied in order to
  57. proceed.
  58. Normally, only returns True on SQLite when operations other
  59. than add_column are present.
  60. """
  61. return False
  62. def prep_table_for_batch(self, table):
  63. """perform any operations needed on a table before a new
  64. one is created to replace it in batch mode.
  65. the PG dialect uses this to drop constraints on the table
  66. before the new one uses those same names.
  67. """
  68. @property
  69. def bind(self):
  70. return self.connection
  71. def _exec(self, construct, execution_options=None,
  72. multiparams=(),
  73. params=util.immutabledict()):
  74. if isinstance(construct, string_types):
  75. construct = text(construct)
  76. if self.as_sql:
  77. if multiparams or params:
  78. # TODO: coverage
  79. raise Exception("Execution arguments not allowed with as_sql")
  80. if self.literal_binds and not isinstance(
  81. construct, schema.DDLElement):
  82. compile_kw = dict(compile_kwargs={"literal_binds": True})
  83. else:
  84. compile_kw = {}
  85. self.static_output(text_type(
  86. construct.compile(dialect=self.dialect, **compile_kw)
  87. ).replace("\t", " ").strip() + self.command_terminator)
  88. else:
  89. conn = self.connection
  90. if execution_options:
  91. conn = conn.execution_options(**execution_options)
  92. return conn.execute(construct, *multiparams, **params)
  93. def execute(self, sql, execution_options=None):
  94. self._exec(sql, execution_options)
  95. def alter_column(self, table_name, column_name,
  96. nullable=None,
  97. server_default=False,
  98. name=None,
  99. type_=None,
  100. schema=None,
  101. autoincrement=None,
  102. existing_type=None,
  103. existing_server_default=None,
  104. existing_nullable=None,
  105. existing_autoincrement=None
  106. ):
  107. if autoincrement is not None or existing_autoincrement is not None:
  108. util.warn(
  109. "autoincrement and existing_autoincrement "
  110. "only make sense for MySQL")
  111. if nullable is not None:
  112. self._exec(base.ColumnNullable(
  113. table_name, column_name,
  114. nullable, schema=schema,
  115. existing_type=existing_type,
  116. existing_server_default=existing_server_default,
  117. existing_nullable=existing_nullable,
  118. ))
  119. if server_default is not False:
  120. self._exec(base.ColumnDefault(
  121. table_name, column_name, server_default,
  122. schema=schema,
  123. existing_type=existing_type,
  124. existing_server_default=existing_server_default,
  125. existing_nullable=existing_nullable,
  126. ))
  127. if type_ is not None:
  128. self._exec(base.ColumnType(
  129. table_name, column_name, type_, schema=schema,
  130. existing_type=existing_type,
  131. existing_server_default=existing_server_default,
  132. existing_nullable=existing_nullable,
  133. ))
  134. # do the new name last ;)
  135. if name is not None:
  136. self._exec(base.ColumnName(
  137. table_name, column_name, name, schema=schema,
  138. existing_type=existing_type,
  139. existing_server_default=existing_server_default,
  140. existing_nullable=existing_nullable,
  141. ))
  142. def add_column(self, table_name, column, schema=None):
  143. self._exec(base.AddColumn(table_name, column, schema=schema))
  144. def drop_column(self, table_name, column, schema=None, **kw):
  145. self._exec(base.DropColumn(table_name, column, schema=schema))
  146. def add_constraint(self, const):
  147. if const._create_rule is None or \
  148. const._create_rule(self):
  149. self._exec(schema.AddConstraint(const))
  150. def drop_constraint(self, const):
  151. self._exec(schema.DropConstraint(const))
  152. def rename_table(self, old_table_name, new_table_name, schema=None):
  153. self._exec(base.RenameTable(old_table_name,
  154. new_table_name, schema=schema))
  155. def create_table(self, table):
  156. if util.sqla_07:
  157. table.dispatch.before_create(table, self.connection,
  158. checkfirst=False,
  159. _ddl_runner=self)
  160. self._exec(schema.CreateTable(table))
  161. if util.sqla_07:
  162. table.dispatch.after_create(table, self.connection,
  163. checkfirst=False,
  164. _ddl_runner=self)
  165. for index in table.indexes:
  166. self._exec(schema.CreateIndex(index))
  167. def drop_table(self, table):
  168. self._exec(schema.DropTable(table))
  169. def create_index(self, index):
  170. self._exec(schema.CreateIndex(index))
  171. def drop_index(self, index):
  172. self._exec(schema.DropIndex(index))
  173. def bulk_insert(self, table, rows, multiinsert=True):
  174. if not isinstance(rows, list):
  175. raise TypeError("List expected")
  176. elif rows and not isinstance(rows[0], dict):
  177. raise TypeError("List of dictionaries expected")
  178. if self.as_sql:
  179. for row in rows:
  180. self._exec(table.insert(inline=True).values(**dict(
  181. (k,
  182. sqla_compat._literal_bindparam(
  183. k, v, type_=table.c[k].type)
  184. if not isinstance(
  185. v, sqla_compat._literal_bindparam) else v)
  186. for k, v in row.items()
  187. )))
  188. else:
  189. # work around http://www.sqlalchemy.org/trac/ticket/2461
  190. if not hasattr(table, '_autoincrement_column'):
  191. table._autoincrement_column = None
  192. if rows:
  193. if multiinsert:
  194. self._exec(table.insert(inline=True), multiparams=rows)
  195. else:
  196. for row in rows:
  197. self._exec(table.insert(inline=True).values(**row))
  198. def compare_type(self, inspector_column, metadata_column):
  199. conn_type = inspector_column.type
  200. metadata_type = metadata_column.type
  201. metadata_impl = metadata_type.dialect_impl(self.dialect)
  202. # work around SQLAlchemy bug "stale value for type affinity"
  203. # fixed in 0.7.4
  204. metadata_impl.__dict__.pop('_type_affinity', None)
  205. if hasattr(metadata_impl, "compare_against_backend"):
  206. comparison = metadata_impl.compare_against_backend(
  207. self.dialect, conn_type)
  208. if comparison is not None:
  209. return not comparison
  210. if conn_type._compare_type_affinity(
  211. metadata_impl
  212. ):
  213. comparator = _type_comparators.get(conn_type._type_affinity, None)
  214. return comparator and comparator(metadata_impl, conn_type)
  215. else:
  216. return True
  217. def compare_server_default(self, inspector_column,
  218. metadata_column,
  219. rendered_metadata_default,
  220. rendered_inspector_default):
  221. return rendered_inspector_default != rendered_metadata_default
  222. def correct_for_autogen_constraints(self, conn_uniques, conn_indexes,
  223. metadata_unique_constraints,
  224. metadata_indexes):
  225. pass
  226. def _compat_autogen_column_reflect(self, inspector):
  227. if util.sqla_08:
  228. return self.autogen_column_reflect
  229. else:
  230. def adapt(table, column_info):
  231. return self.autogen_column_reflect(
  232. inspector, table, column_info)
  233. return adapt
  234. def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks):
  235. pass
  236. def autogen_column_reflect(self, inspector, table, column_info):
  237. """A hook that is attached to the 'column_reflect' event for when
  238. a Table is reflected from the database during the autogenerate
  239. process.
  240. Dialects can elect to modify the information gathered here.
  241. """
  242. def start_migrations(self):
  243. """A hook called when :meth:`.EnvironmentContext.run_migrations`
  244. is called.
  245. Implementations can set up per-migration-run state here.
  246. """
  247. def emit_begin(self):
  248. """Emit the string ``BEGIN``, or the backend-specific
  249. equivalent, on the current connection context.
  250. This is used in offline mode and typically
  251. via :meth:`.EnvironmentContext.begin_transaction`.
  252. """
  253. self.static_output("BEGIN" + self.command_terminator)
  254. def emit_commit(self):
  255. """Emit the string ``COMMIT``, or the backend-specific
  256. equivalent, on the current connection context.
  257. This is used in offline mode and typically
  258. via :meth:`.EnvironmentContext.begin_transaction`.
  259. """
  260. self.static_output("COMMIT" + self.command_terminator)
  261. def render_type(self, type_obj, autogen_context):
  262. return False
  263. def _string_compare(t1, t2):
  264. return \
  265. t1.length is not None and \
  266. t1.length != t2.length
  267. def _numeric_compare(t1, t2):
  268. return \
  269. (
  270. t1.precision is not None and
  271. t1.precision != t2.precision
  272. ) or \
  273. (
  274. t1.scale is not None and
  275. t1.scale != t2.scale
  276. )
  277. def _integer_compare(t1, t2):
  278. t1_small_or_big = (
  279. 'S' if isinstance(t1, sqltypes.SmallInteger)
  280. else 'B' if isinstance(t1, sqltypes.BigInteger) else 'I'
  281. )
  282. t2_small_or_big = (
  283. 'S' if isinstance(t2, sqltypes.SmallInteger)
  284. else 'B' if isinstance(t2, sqltypes.BigInteger) else 'I'
  285. )
  286. return t1_small_or_big != t2_small_or_big
  287. def _datetime_compare(t1, t2):
  288. return (
  289. t1.timezone != t2.timezone
  290. )
  291. _type_comparators = {
  292. sqltypes.String: _string_compare,
  293. sqltypes.Numeric: _numeric_compare,
  294. sqltypes.Integer: _integer_compare,
  295. sqltypes.DateTime: _datetime_compare,
  296. }