psycopg2.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702
  1. # postgresql/psycopg2.py
  2. # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: http://www.opensource.org/licenses/mit-license.php
  7. """
  8. .. dialect:: postgresql+psycopg2
  9. :name: psycopg2
  10. :dbapi: psycopg2
  11. :connectstring: postgresql+psycopg2://user:password@host:port/dbname\
  12. [?key=value&key=value...]
  13. :url: http://pypi.python.org/pypi/psycopg2/
  14. psycopg2 Connect Arguments
  15. -----------------------------------
  16. psycopg2-specific keyword arguments which are accepted by
  17. :func:`.create_engine()` are:
  18. * ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
  19. statements which support this feature. What this essentially means from a
  20. psycopg2 point of view is that the cursor is created using a name, e.g.
  21. ``connection.cursor('some name')``, which has the effect that result rows
  22. are not immediately pre-fetched and buffered after statement execution, but
  23. are instead left on the server and only retrieved as needed. SQLAlchemy's
  24. :class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
  25. behavior when this feature is enabled, such that groups of 100 rows at a
  26. time are fetched over the wire to reduce conversational overhead.
  27. Note that the :paramref:`.Connection.execution_options.stream_results`
  28. execution option is a more targeted
  29. way of enabling this mode on a per-execution basis.
  30. * ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
  31. per connection. True by default.
  32. .. seealso::
  33. :ref:`psycopg2_disable_native_unicode`
  34. * ``isolation_level``: This option, available for all PostgreSQL dialects,
  35. includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
  36. dialect.
  37. .. seealso::
  38. :ref:`psycopg2_isolation_level`
  39. * ``client_encoding``: sets the client encoding in a libpq-agnostic way,
  40. using psycopg2's ``set_client_encoding()`` method.
  41. .. seealso::
  42. :ref:`psycopg2_unicode`
  43. Unix Domain Connections
  44. ------------------------
  45. psycopg2 supports connecting via Unix domain connections. When the ``host``
  46. portion of the URL is omitted, SQLAlchemy passes ``None`` to psycopg2,
  47. which specifies Unix-domain communication rather than TCP/IP communication::
  48. create_engine("postgresql+psycopg2://user:password@/dbname")
  49. By default, the socket file used is to connect to a Unix-domain socket
  50. in ``/tmp``, or whatever socket directory was specified when PostgreSQL
  51. was built. This value can be overridden by passing a pathname to psycopg2,
  52. using ``host`` as an additional keyword argument::
  53. create_engine("postgresql+psycopg2://user:password@/dbname?\
  54. host=/var/lib/postgresql")
  55. See also:
  56. `PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\
  57. libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
  58. .. _psycopg2_execution_options:
  59. Per-Statement/Connection Execution Options
  60. -------------------------------------------
  61. The following DBAPI-specific options are respected when used with
  62. :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
  63. :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
  64. * ``isolation_level`` - Set the transaction isolation level for the lifespan of a
  65. :class:`.Connection` (can only be set on a connection, not a statement
  66. or query). See :ref:`psycopg2_isolation_level`.
  67. * ``stream_results`` - Enable or disable usage of psycopg2 server side cursors -
  68. this feature makes use of "named" cursors in combination with special
  69. result handling methods so that result rows are not fully buffered.
  70. If ``None`` or not set, the ``server_side_cursors`` option of the
  71. :class:`.Engine` is used.
  72. * ``max_row_buffer`` - when using ``stream_results``, an integer value that
  73. specifies the maximum number of rows to buffer at a time. This is
  74. interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the
  75. buffer will grow to ultimately store 1000 rows at a time.
  76. .. versionadded:: 1.0.6
  77. .. _psycopg2_unicode:
  78. Unicode with Psycopg2
  79. ----------------------
  80. By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
  81. extension, such that the DBAPI receives and returns all strings as Python
  82. Unicode objects directly - SQLAlchemy passes these values through without
  83. change. Psycopg2 here will encode/decode string values based on the
  84. current "client encoding" setting; by default this is the value in
  85. the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
  86. Typically, this can be changed to ``utf8``, as a more useful default::
  87. # postgresql.conf file
  88. # client_encoding = sql_ascii # actually, defaults to database
  89. # encoding
  90. client_encoding = utf8
  91. A second way to affect the client encoding is to set it within Psycopg2
  92. locally. SQLAlchemy will call psycopg2's
  93. :meth:`psycopg2:connection.set_client_encoding` method
  94. on all new connections based on the value passed to
  95. :func:`.create_engine` using the ``client_encoding`` parameter::
  96. # set_client_encoding() setting;
  97. # works for *all* PostgreSQL versions
  98. engine = create_engine("postgresql://user:pass@host/dbname",
  99. client_encoding='utf8')
  100. This overrides the encoding specified in the PostgreSQL client configuration.
  101. When using the parameter in this way, the psycopg2 driver emits
  102. ``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
  103. in all PostgreSQL versions.
  104. Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
  105. is **not the same** as the more recently added ``client_encoding`` parameter
  106. now supported by libpq directly. This is enabled when ``client_encoding``
  107. is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
  108. using the :paramref:`.create_engine.connect_args` parameter::
  109. # libpq direct parameter setting;
  110. # only works for PostgreSQL **9.1 and above**
  111. engine = create_engine("postgresql://user:pass@host/dbname",
  112. connect_args={'client_encoding': 'utf8'})
  113. # using the query string is equivalent
  114. engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
  115. The above parameter was only added to libpq as of version 9.1 of PostgreSQL,
  116. so using the previous method is better for cross-version support.
  117. .. _psycopg2_disable_native_unicode:
  118. Disabling Native Unicode
  119. ^^^^^^^^^^^^^^^^^^^^^^^^
  120. SQLAlchemy can also be instructed to skip the usage of the psycopg2
  121. ``UNICODE`` extension and to instead utilize its own unicode encode/decode
  122. services, which are normally reserved only for those DBAPIs that don't
  123. fully support unicode directly. Passing ``use_native_unicode=False`` to
  124. :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
  125. SQLAlchemy will instead encode data itself into Python bytestrings on the way
  126. in and coerce from bytes on the way back,
  127. using the value of the :func:`.create_engine` ``encoding`` parameter, which
  128. defaults to ``utf-8``.
  129. SQLAlchemy's own unicode encode/decode functionality is steadily becoming
  130. obsolete as most DBAPIs now support unicode fully.
  131. Bound Parameter Styles
  132. ----------------------
  133. The default parameter style for the psycopg2 dialect is "pyformat", where
  134. SQL is rendered using ``%(paramname)s`` style. This format has the limitation
  135. that it does not accommodate the unusual case of parameter names that
  136. actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
  137. generates bound parameter names based on the name of a column, the presence
  138. of these characters in a column name can lead to problems.
  139. There are two solutions to the issue of a :class:`.schema.Column` that contains
  140. one of these characters in its name. One is to specify the
  141. :paramref:`.schema.Column.key` for columns that have such names::
  142. measurement = Table('measurement', metadata,
  143. Column('Size (meters)', Integer, key='size_meters')
  144. )
  145. Above, an INSERT statement such as ``measurement.insert()`` will use
  146. ``size_meters`` as the parameter name, and a SQL expression such as
  147. ``measurement.c.size_meters > 10`` will derive the bound parameter name
  148. from the ``size_meters`` key as well.
  149. .. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
  150. as the source of naming when anonymous bound parameters are created
  151. in SQL expressions; previously, this behavior only applied to
  152. :meth:`.Table.insert` and :meth:`.Table.update` parameter names.
  153. The other solution is to use a positional format; psycopg2 allows use of the
  154. "format" paramstyle, which can be passed to
  155. :paramref:`.create_engine.paramstyle`::
  156. engine = create_engine(
  157. 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
  158. With the above engine, instead of a statement like::
  159. INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
  160. {'Size (meters)': 1}
  161. we instead see::
  162. INSERT INTO measurement ("Size (meters)") VALUES (%s)
  163. (1, )
  164. Where above, the dictionary style is converted into a tuple with positional
  165. style.
  166. Transactions
  167. ------------
  168. The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
  169. .. _psycopg2_isolation_level:
  170. Psycopg2 Transaction Isolation Level
  171. -------------------------------------
  172. As discussed in :ref:`postgresql_isolation_level`,
  173. all PostgreSQL dialects support setting of transaction isolation level
  174. both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
  175. as well as the ``isolation_level`` argument used by
  176. :meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
  177. options make use of psycopg2's ``set_isolation_level()`` connection method,
  178. rather than emitting a PostgreSQL directive; this is because psycopg2's
  179. API-level setting is always emitted at the start of each transaction in any
  180. case.
  181. The psycopg2 dialect supports these constants for isolation level:
  182. * ``READ COMMITTED``
  183. * ``READ UNCOMMITTED``
  184. * ``REPEATABLE READ``
  185. * ``SERIALIZABLE``
  186. * ``AUTOCOMMIT``
  187. .. versionadded:: 0.8.2 support for AUTOCOMMIT isolation level when using
  188. psycopg2.
  189. .. seealso::
  190. :ref:`postgresql_isolation_level`
  191. :ref:`pg8000_isolation_level`
  192. NOTICE logging
  193. ---------------
  194. The psycopg2 dialect will log PostgreSQL NOTICE messages via the
  195. ``sqlalchemy.dialects.postgresql`` logger::
  196. import logging
  197. logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
  198. .. _psycopg2_hstore::
  199. HSTORE type
  200. ------------
  201. The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
  202. the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
  203. by default when psycopg2 version 2.4 or greater is used, and
  204. it is detected that the target database has the HSTORE type set up for use.
  205. In other words, when the dialect makes the first
  206. connection, a sequence like the following is performed:
  207. 1. Request the available HSTORE oids using
  208. ``psycopg2.extras.HstoreAdapter.get_oids()``.
  209. If this function returns a list of HSTORE identifiers, we then determine
  210. that the ``HSTORE`` extension is present.
  211. This function is **skipped** if the version of psycopg2 installed is
  212. less than version 2.4.
  213. 2. If the ``use_native_hstore`` flag is at its default of ``True``, and
  214. we've detected that ``HSTORE`` oids are available, the
  215. ``psycopg2.extensions.register_hstore()`` extension is invoked for all
  216. connections.
  217. The ``register_hstore()`` extension has the effect of **all Python
  218. dictionaries being accepted as parameters regardless of the type of target
  219. column in SQL**. The dictionaries are converted by this extension into a
  220. textual HSTORE expression. If this behavior is not desired, disable the
  221. use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
  222. follows::
  223. engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
  224. use_native_hstore=False)
  225. The ``HSTORE`` type is **still supported** when the
  226. ``psycopg2.extensions.register_hstore()`` extension is not used. It merely
  227. means that the coercion between Python dictionaries and the HSTORE
  228. string format, on both the parameter side and the result side, will take
  229. place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
  230. which may be more performant.
  231. """
  232. from __future__ import absolute_import
  233. import re
  234. import logging
  235. from ... import util, exc
  236. import decimal
  237. from ... import processors
  238. from ...engine import result as _result
  239. from ...sql import expression
  240. from ... import types as sqltypes
  241. from .base import PGDialect, PGCompiler, \
  242. PGIdentifierPreparer, PGExecutionContext, \
  243. ENUM, _DECIMAL_TYPES, _FLOAT_TYPES,\
  244. _INT_TYPES, UUID
  245. from .hstore import HSTORE
  246. from .json import JSON, JSONB
  247. try:
  248. from uuid import UUID as _python_UUID
  249. except ImportError:
  250. _python_UUID = None
  251. logger = logging.getLogger('sqlalchemy.dialects.postgresql')
  252. class _PGNumeric(sqltypes.Numeric):
  253. def bind_processor(self, dialect):
  254. return None
  255. def result_processor(self, dialect, coltype):
  256. if self.asdecimal:
  257. if coltype in _FLOAT_TYPES:
  258. return processors.to_decimal_processor_factory(
  259. decimal.Decimal,
  260. self._effective_decimal_return_scale)
  261. elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
  262. # pg8000 returns Decimal natively for 1700
  263. return None
  264. else:
  265. raise exc.InvalidRequestError(
  266. "Unknown PG numeric type: %d" % coltype)
  267. else:
  268. if coltype in _FLOAT_TYPES:
  269. # pg8000 returns float natively for 701
  270. return None
  271. elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
  272. return processors.to_float
  273. else:
  274. raise exc.InvalidRequestError(
  275. "Unknown PG numeric type: %d" % coltype)
  276. class _PGEnum(ENUM):
  277. def result_processor(self, dialect, coltype):
  278. if self.native_enum and util.py2k and self.convert_unicode is True:
  279. # we can't easily use PG's extensions here because
  280. # the OID is on the fly, and we need to give it a python
  281. # function anyway - not really worth it.
  282. self.convert_unicode = "force_nocheck"
  283. return super(_PGEnum, self).result_processor(dialect, coltype)
  284. class _PGHStore(HSTORE):
  285. def bind_processor(self, dialect):
  286. if dialect._has_native_hstore:
  287. return None
  288. else:
  289. return super(_PGHStore, self).bind_processor(dialect)
  290. def result_processor(self, dialect, coltype):
  291. if dialect._has_native_hstore:
  292. return None
  293. else:
  294. return super(_PGHStore, self).result_processor(dialect, coltype)
  295. class _PGJSON(JSON):
  296. def result_processor(self, dialect, coltype):
  297. if dialect._has_native_json:
  298. return None
  299. else:
  300. return super(_PGJSON, self).result_processor(dialect, coltype)
  301. class _PGJSONB(JSONB):
  302. def result_processor(self, dialect, coltype):
  303. if dialect._has_native_jsonb:
  304. return None
  305. else:
  306. return super(_PGJSONB, self).result_processor(dialect, coltype)
  307. class _PGUUID(UUID):
  308. def bind_processor(self, dialect):
  309. if not self.as_uuid and dialect.use_native_uuid:
  310. nonetype = type(None)
  311. def process(value):
  312. if value is not None:
  313. value = _python_UUID(value)
  314. return value
  315. return process
  316. def result_processor(self, dialect, coltype):
  317. if not self.as_uuid and dialect.use_native_uuid:
  318. def process(value):
  319. if value is not None:
  320. value = str(value)
  321. return value
  322. return process
  323. _server_side_id = util.counter()
  324. class PGExecutionContext_psycopg2(PGExecutionContext):
  325. def create_server_side_cursor(self):
  326. # use server-side cursors:
  327. # http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
  328. ident = "c_%s_%s" % (hex(id(self))[2:],
  329. hex(_server_side_id())[2:])
  330. return self._dbapi_connection.cursor(ident)
  331. def get_result_proxy(self):
  332. # TODO: ouch
  333. if logger.isEnabledFor(logging.INFO):
  334. self._log_notices(self.cursor)
  335. if self._is_server_side:
  336. return _result.BufferedRowResultProxy(self)
  337. else:
  338. return _result.ResultProxy(self)
  339. def _log_notices(self, cursor):
  340. for notice in cursor.connection.notices:
  341. # NOTICE messages have a
  342. # newline character at the end
  343. logger.info(notice.rstrip())
  344. cursor.connection.notices[:] = []
  345. class PGCompiler_psycopg2(PGCompiler):
  346. def visit_mod_binary(self, binary, operator, **kw):
  347. return self.process(binary.left, **kw) + " %% " + \
  348. self.process(binary.right, **kw)
  349. def post_process_text(self, text):
  350. return text.replace('%', '%%')
  351. class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
  352. def _escape_identifier(self, value):
  353. value = value.replace(self.escape_quote, self.escape_to_quote)
  354. return value.replace('%', '%%')
  355. class PGDialect_psycopg2(PGDialect):
  356. driver = 'psycopg2'
  357. if util.py2k:
  358. supports_unicode_statements = False
  359. supports_server_side_cursors = True
  360. default_paramstyle = 'pyformat'
  361. # set to true based on psycopg2 version
  362. supports_sane_multi_rowcount = False
  363. execution_ctx_cls = PGExecutionContext_psycopg2
  364. statement_compiler = PGCompiler_psycopg2
  365. preparer = PGIdentifierPreparer_psycopg2
  366. psycopg2_version = (0, 0)
  367. FEATURE_VERSION_MAP = dict(
  368. native_json=(2, 5),
  369. native_jsonb=(2, 5, 4),
  370. sane_multi_rowcount=(2, 0, 9),
  371. array_oid=(2, 4, 3),
  372. hstore_adapter=(2, 4)
  373. )
  374. _has_native_hstore = False
  375. _has_native_json = False
  376. _has_native_jsonb = False
  377. engine_config_types = PGDialect.engine_config_types.union([
  378. ('use_native_unicode', util.asbool),
  379. ])
  380. colspecs = util.update_copy(
  381. PGDialect.colspecs,
  382. {
  383. sqltypes.Numeric: _PGNumeric,
  384. ENUM: _PGEnum, # needs force_unicode
  385. sqltypes.Enum: _PGEnum, # needs force_unicode
  386. HSTORE: _PGHStore,
  387. JSON: _PGJSON,
  388. sqltypes.JSON: _PGJSON,
  389. JSONB: _PGJSONB,
  390. UUID: _PGUUID
  391. }
  392. )
  393. def __init__(self, server_side_cursors=False, use_native_unicode=True,
  394. client_encoding=None,
  395. use_native_hstore=True, use_native_uuid=True,
  396. **kwargs):
  397. PGDialect.__init__(self, **kwargs)
  398. self.server_side_cursors = server_side_cursors
  399. self.use_native_unicode = use_native_unicode
  400. self.use_native_hstore = use_native_hstore
  401. self.use_native_uuid = use_native_uuid
  402. self.supports_unicode_binds = use_native_unicode
  403. self.client_encoding = client_encoding
  404. if self.dbapi and hasattr(self.dbapi, '__version__'):
  405. m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
  406. self.dbapi.__version__)
  407. if m:
  408. self.psycopg2_version = tuple(
  409. int(x)
  410. for x in m.group(1, 2, 3)
  411. if x is not None)
  412. def initialize(self, connection):
  413. super(PGDialect_psycopg2, self).initialize(connection)
  414. self._has_native_hstore = self.use_native_hstore and \
  415. self._hstore_oids(connection.connection) \
  416. is not None
  417. self._has_native_json = \
  418. self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json']
  419. self._has_native_jsonb = \
  420. self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb']
  421. # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
  422. self.supports_sane_multi_rowcount = \
  423. self.psycopg2_version >= \
  424. self.FEATURE_VERSION_MAP['sane_multi_rowcount']
  425. @classmethod
  426. def dbapi(cls):
  427. import psycopg2
  428. return psycopg2
  429. @classmethod
  430. def _psycopg2_extensions(cls):
  431. from psycopg2 import extensions
  432. return extensions
  433. @classmethod
  434. def _psycopg2_extras(cls):
  435. from psycopg2 import extras
  436. return extras
  437. @util.memoized_property
  438. def _isolation_lookup(self):
  439. extensions = self._psycopg2_extensions()
  440. return {
  441. 'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
  442. 'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
  443. 'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
  444. 'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ,
  445. 'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE
  446. }
  447. def set_isolation_level(self, connection, level):
  448. try:
  449. level = self._isolation_lookup[level.replace('_', ' ')]
  450. except KeyError:
  451. raise exc.ArgumentError(
  452. "Invalid value '%s' for isolation_level. "
  453. "Valid isolation levels for %s are %s" %
  454. (level, self.name, ", ".join(self._isolation_lookup))
  455. )
  456. connection.set_isolation_level(level)
  457. def on_connect(self):
  458. extras = self._psycopg2_extras()
  459. extensions = self._psycopg2_extensions()
  460. fns = []
  461. if self.client_encoding is not None:
  462. def on_connect(conn):
  463. conn.set_client_encoding(self.client_encoding)
  464. fns.append(on_connect)
  465. if self.isolation_level is not None:
  466. def on_connect(conn):
  467. self.set_isolation_level(conn, self.isolation_level)
  468. fns.append(on_connect)
  469. if self.dbapi and self.use_native_uuid:
  470. def on_connect(conn):
  471. extras.register_uuid(None, conn)
  472. fns.append(on_connect)
  473. if self.dbapi and self.use_native_unicode:
  474. def on_connect(conn):
  475. extensions.register_type(extensions.UNICODE, conn)
  476. extensions.register_type(extensions.UNICODEARRAY, conn)
  477. fns.append(on_connect)
  478. if self.dbapi and self.use_native_hstore:
  479. def on_connect(conn):
  480. hstore_oids = self._hstore_oids(conn)
  481. if hstore_oids is not None:
  482. oid, array_oid = hstore_oids
  483. kw = {'oid': oid}
  484. if util.py2k:
  485. kw['unicode'] = True
  486. if self.psycopg2_version >= \
  487. self.FEATURE_VERSION_MAP['array_oid']:
  488. kw['array_oid'] = array_oid
  489. extras.register_hstore(conn, **kw)
  490. fns.append(on_connect)
  491. if self.dbapi and self._json_deserializer:
  492. def on_connect(conn):
  493. if self._has_native_json:
  494. extras.register_default_json(
  495. conn, loads=self._json_deserializer)
  496. if self._has_native_jsonb:
  497. extras.register_default_jsonb(
  498. conn, loads=self._json_deserializer)
  499. fns.append(on_connect)
  500. if fns:
  501. def on_connect(conn):
  502. for fn in fns:
  503. fn(conn)
  504. return on_connect
  505. else:
  506. return None
  507. @util.memoized_instancemethod
  508. def _hstore_oids(self, conn):
  509. if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']:
  510. extras = self._psycopg2_extras()
  511. oids = extras.HstoreAdapter.get_oids(conn)
  512. if oids is not None and oids[0]:
  513. return oids[0:2]
  514. return None
  515. def create_connect_args(self, url):
  516. opts = url.translate_connect_args(username='user')
  517. if 'port' in opts:
  518. opts['port'] = int(opts['port'])
  519. opts.update(url.query)
  520. return ([], opts)
  521. def is_disconnect(self, e, connection, cursor):
  522. if isinstance(e, self.dbapi.Error):
  523. # check the "closed" flag. this might not be
  524. # present on old psycopg2 versions. Also,
  525. # this flag doesn't actually help in a lot of disconnect
  526. # situations, so don't rely on it.
  527. if getattr(connection, 'closed', False):
  528. return True
  529. # checks based on strings. in the case that .closed
  530. # didn't cut it, fall back onto these.
  531. str_e = str(e).partition("\n")[0]
  532. for msg in [
  533. # these error messages from libpq: interfaces/libpq/fe-misc.c
  534. # and interfaces/libpq/fe-secure.c.
  535. 'terminating connection',
  536. 'closed the connection',
  537. 'connection not open',
  538. 'could not receive data from server',
  539. 'could not send data to server',
  540. # psycopg2 client errors, psycopg2/conenction.h,
  541. # psycopg2/cursor.h
  542. 'connection already closed',
  543. 'cursor already closed',
  544. # not sure where this path is originally from, it may
  545. # be obsolete. It really says "losed", not "closed".
  546. 'losed the connection unexpectedly',
  547. # these can occur in newer SSL
  548. 'connection has been closed unexpectedly',
  549. 'SSL SYSCALL error: Bad file descriptor',
  550. 'SSL SYSCALL error: EOF detected',
  551. 'SSL error: decryption failed or bad record mac',
  552. ]:
  553. idx = str_e.find(msg)
  554. if idx >= 0 and '"' not in str_e[:idx]:
  555. return True
  556. return False
  557. dialect = PGDialect_psycopg2