result.py 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435
  1. # engine/result.py
  2. # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: http://www.opensource.org/licenses/mit-license.php
  7. """Define result set constructs including :class:`.ResultProxy`
  8. and :class:`.RowProxy."""
  9. from .. import exc, util
  10. from ..sql import expression, sqltypes, util as sql_util
  11. import collections
  12. import operator
  13. # This reconstructor is necessary so that pickles with the C extension or
  14. # without use the same Binary format.
  15. try:
  16. # We need a different reconstructor on the C extension so that we can
  17. # add extra checks that fields have correctly been initialized by
  18. # __setstate__.
  19. from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
  20. # The extra function embedding is needed so that the
  21. # reconstructor function has the same signature whether or not
  22. # the extension is present.
  23. def rowproxy_reconstructor(cls, state):
  24. return safe_rowproxy_reconstructor(cls, state)
  25. except ImportError:
  26. def rowproxy_reconstructor(cls, state):
  27. obj = cls.__new__(cls)
  28. obj.__setstate__(state)
  29. return obj
  30. try:
  31. from sqlalchemy.cresultproxy import BaseRowProxy
  32. _baserowproxy_usecext = True
  33. except ImportError:
  34. _baserowproxy_usecext = False
  35. class BaseRowProxy(object):
  36. __slots__ = ('_parent', '_row', '_processors', '_keymap')
  37. def __init__(self, parent, row, processors, keymap):
  38. """RowProxy objects are constructed by ResultProxy objects."""
  39. self._parent = parent
  40. self._row = row
  41. self._processors = processors
  42. self._keymap = keymap
  43. def __reduce__(self):
  44. return (rowproxy_reconstructor,
  45. (self.__class__, self.__getstate__()))
  46. def values(self):
  47. """Return the values represented by this RowProxy as a list."""
  48. return list(self)
  49. def __iter__(self):
  50. for processor, value in zip(self._processors, self._row):
  51. if processor is None:
  52. yield value
  53. else:
  54. yield processor(value)
  55. def __len__(self):
  56. return len(self._row)
  57. def __getitem__(self, key):
  58. try:
  59. processor, obj, index = self._keymap[key]
  60. except KeyError:
  61. processor, obj, index = self._parent._key_fallback(key)
  62. except TypeError:
  63. if isinstance(key, slice):
  64. l = []
  65. for processor, value in zip(self._processors[key],
  66. self._row[key]):
  67. if processor is None:
  68. l.append(value)
  69. else:
  70. l.append(processor(value))
  71. return tuple(l)
  72. else:
  73. raise
  74. if index is None:
  75. raise exc.InvalidRequestError(
  76. "Ambiguous column name '%s' in "
  77. "result set column descriptions" % obj)
  78. if processor is not None:
  79. return processor(self._row[index])
  80. else:
  81. return self._row[index]
  82. def __getattr__(self, name):
  83. try:
  84. return self[name]
  85. except KeyError as e:
  86. raise AttributeError(e.args[0])
  87. class RowProxy(BaseRowProxy):
  88. """Proxy values from a single cursor row.
  89. Mostly follows "ordered dictionary" behavior, mapping result
  90. values to the string-based column name, the integer position of
  91. the result in the row, as well as Column instances which can be
  92. mapped to the original Columns that produced this result set (for
  93. results that correspond to constructed SQL expressions).
  94. """
  95. __slots__ = ()
  96. def __contains__(self, key):
  97. return self._parent._has_key(key)
  98. def __getstate__(self):
  99. return {
  100. '_parent': self._parent,
  101. '_row': tuple(self)
  102. }
  103. def __setstate__(self, state):
  104. self._parent = parent = state['_parent']
  105. self._row = state['_row']
  106. self._processors = parent._processors
  107. self._keymap = parent._keymap
  108. __hash__ = None
  109. def _op(self, other, op):
  110. return op(tuple(self), tuple(other)) \
  111. if isinstance(other, RowProxy) \
  112. else op(tuple(self), other)
  113. def __lt__(self, other):
  114. return self._op(other, operator.lt)
  115. def __le__(self, other):
  116. return self._op(other, operator.le)
  117. def __ge__(self, other):
  118. return self._op(other, operator.ge)
  119. def __gt__(self, other):
  120. return self._op(other, operator.gt)
  121. def __eq__(self, other):
  122. return self._op(other, operator.eq)
  123. def __ne__(self, other):
  124. return self._op(other, operator.ne)
  125. def __repr__(self):
  126. return repr(sql_util._repr_row(self))
  127. def has_key(self, key):
  128. """Return True if this RowProxy contains the given key."""
  129. return self._parent._has_key(key)
  130. def items(self):
  131. """Return a list of tuples, each tuple containing a key/value pair."""
  132. # TODO: no coverage here
  133. return [(key, self[key]) for key in self.keys()]
  134. def keys(self):
  135. """Return the list of keys as strings represented by this RowProxy."""
  136. return self._parent.keys
  137. def iterkeys(self):
  138. return iter(self._parent.keys)
  139. def itervalues(self):
  140. return iter(self)
  141. try:
  142. # Register RowProxy with Sequence,
  143. # so sequence protocol is implemented
  144. from collections import Sequence
  145. Sequence.register(RowProxy)
  146. except ImportError:
  147. pass
  148. class ResultMetaData(object):
  149. """Handle cursor.description, applying additional info from an execution
  150. context."""
  151. __slots__ = (
  152. '_keymap', 'case_sensitive', 'matched_on_name',
  153. '_processors', 'keys', '_orig_processors')
  154. def __init__(self, parent, cursor_description):
  155. context = parent.context
  156. dialect = context.dialect
  157. self.case_sensitive = dialect.case_sensitive
  158. self.matched_on_name = False
  159. self._orig_processors = None
  160. if context.result_column_struct:
  161. result_columns, cols_are_ordered, textual_ordered = \
  162. context.result_column_struct
  163. num_ctx_cols = len(result_columns)
  164. else:
  165. result_columns = cols_are_ordered = \
  166. num_ctx_cols = textual_ordered = False
  167. # merge cursor.description with the column info
  168. # present in the compiled structure, if any
  169. raw = self._merge_cursor_description(
  170. context, cursor_description, result_columns,
  171. num_ctx_cols, cols_are_ordered, textual_ordered)
  172. self._keymap = {}
  173. if not _baserowproxy_usecext:
  174. # keymap indexes by integer index: this is only used
  175. # in the pure Python BaseRowProxy.__getitem__
  176. # implementation to avoid an expensive
  177. # isinstance(key, util.int_types) in the most common
  178. # case path
  179. len_raw = len(raw)
  180. self._keymap.update([
  181. (elem[0], (elem[3], elem[4], elem[0]))
  182. for elem in raw
  183. ] + [
  184. (elem[0] - len_raw, (elem[3], elem[4], elem[0]))
  185. for elem in raw
  186. ])
  187. # processors in key order for certain per-row
  188. # views like __iter__ and slices
  189. self._processors = [elem[3] for elem in raw]
  190. # keymap by primary string...
  191. by_key = dict([
  192. (elem[2], (elem[3], elem[4], elem[0]))
  193. for elem in raw
  194. ])
  195. # for compiled SQL constructs, copy additional lookup keys into
  196. # the key lookup map, such as Column objects, labels,
  197. # column keys and other names
  198. if num_ctx_cols:
  199. # if by-primary-string dictionary smaller (or bigger?!) than
  200. # number of columns, assume we have dupes, rewrite
  201. # dupe records with "None" for index which results in
  202. # ambiguous column exception when accessed.
  203. if len(by_key) != num_ctx_cols:
  204. seen = set()
  205. for rec in raw:
  206. key = rec[1]
  207. if key in seen:
  208. # this is an "ambiguous" element, replacing
  209. # the full record in the map
  210. key = key.lower() if not self.case_sensitive else key
  211. by_key[key] = (None, key, None)
  212. seen.add(key)
  213. # copy secondary elements from compiled columns
  214. # into self._keymap, write in the potentially "ambiguous"
  215. # element
  216. self._keymap.update([
  217. (obj_elem, by_key[elem[2]])
  218. for elem in raw if elem[4]
  219. for obj_elem in elem[4]
  220. ])
  221. # if we did a pure positional match, then reset the
  222. # original "expression element" back to the "unambiguous"
  223. # entry. This is a new behavior in 1.1 which impacts
  224. # TextAsFrom but also straight compiled SQL constructs.
  225. if not self.matched_on_name:
  226. self._keymap.update([
  227. (elem[4][0], (elem[3], elem[4], elem[0]))
  228. for elem in raw if elem[4]
  229. ])
  230. else:
  231. # no dupes - copy secondary elements from compiled
  232. # columns into self._keymap
  233. self._keymap.update([
  234. (obj_elem, (elem[3], elem[4], elem[0]))
  235. for elem in raw if elem[4]
  236. for obj_elem in elem[4]
  237. ])
  238. # update keymap with primary string names taking
  239. # precedence
  240. self._keymap.update(by_key)
  241. # update keymap with "translated" names (sqlite-only thing)
  242. if not num_ctx_cols and context._translate_colname:
  243. self._keymap.update([
  244. (elem[5], self._keymap[elem[2]])
  245. for elem in raw if elem[5]
  246. ])
  247. def _merge_cursor_description(
  248. self, context, cursor_description, result_columns,
  249. num_ctx_cols, cols_are_ordered, textual_ordered):
  250. """Merge a cursor.description with compiled result column information.
  251. There are at least four separate strategies used here, selected
  252. depending on the type of SQL construct used to start with.
  253. The most common case is that of the compiled SQL expression construct,
  254. which generated the column names present in the raw SQL string and
  255. which has the identical number of columns as were reported by
  256. cursor.description. In this case, we assume a 1-1 positional mapping
  257. between the entries in cursor.description and the compiled object.
  258. This is also the most performant case as we disregard extracting /
  259. decoding the column names present in cursor.description since we
  260. already have the desired name we generated in the compiled SQL
  261. construct.
  262. The next common case is that of the completely raw string SQL,
  263. such as passed to connection.execute(). In this case we have no
  264. compiled construct to work with, so we extract and decode the
  265. names from cursor.description and index those as the primary
  266. result row target keys.
  267. The remaining fairly common case is that of the textual SQL
  268. that includes at least partial column information; this is when
  269. we use a :class:`.TextAsFrom` construct. This contruct may have
  270. unordered or ordered column information. In the ordered case, we
  271. merge the cursor.description and the compiled construct's information
  272. positionally, and warn if there are additional description names
  273. present, however we still decode the names in cursor.description
  274. as we don't have a guarantee that the names in the columns match
  275. on these. In the unordered case, we match names in cursor.description
  276. to that of the compiled construct based on name matching.
  277. In both of these cases, the cursor.description names and the column
  278. expression objects and names are indexed as result row target keys.
  279. The final case is much less common, where we have a compiled
  280. non-textual SQL expression construct, but the number of columns
  281. in cursor.description doesn't match what's in the compiled
  282. construct. We make the guess here that there might be textual
  283. column expressions in the compiled construct that themselves include
  284. a comma in them causing them to split. We do the same name-matching
  285. as with textual non-ordered columns.
  286. The name-matched system of merging is the same as that used by
  287. SQLAlchemy for all cases up through te 0.9 series. Positional
  288. matching for compiled SQL expressions was introduced in 1.0 as a
  289. major performance feature, and positional matching for textual
  290. :class:`.TextAsFrom` objects in 1.1. As name matching is no longer
  291. a common case, it was acceptable to factor it into smaller generator-
  292. oriented methods that are easier to understand, but incur slightly
  293. more performance overhead.
  294. """
  295. case_sensitive = context.dialect.case_sensitive
  296. if num_ctx_cols and \
  297. cols_are_ordered and \
  298. not textual_ordered and \
  299. num_ctx_cols == len(cursor_description):
  300. self.keys = [elem[0] for elem in result_columns]
  301. # pure positional 1-1 case; doesn't need to read
  302. # the names from cursor.description
  303. return [
  304. (
  305. idx,
  306. key,
  307. name.lower() if not case_sensitive else name,
  308. context.get_result_processor(
  309. type_, key, cursor_description[idx][1]
  310. ),
  311. obj,
  312. None
  313. ) for idx, (key, name, obj, type_)
  314. in enumerate(result_columns)
  315. ]
  316. else:
  317. # name-based or text-positional cases, where we need
  318. # to read cursor.description names
  319. if textual_ordered:
  320. # textual positional case
  321. raw_iterator = self._merge_textual_cols_by_position(
  322. context, cursor_description, result_columns)
  323. elif num_ctx_cols:
  324. # compiled SQL with a mismatch of description cols
  325. # vs. compiled cols, or textual w/ unordered columns
  326. raw_iterator = self._merge_cols_by_name(
  327. context, cursor_description, result_columns)
  328. else:
  329. # no compiled SQL, just a raw string
  330. raw_iterator = self._merge_cols_by_none(
  331. context, cursor_description)
  332. return [
  333. (
  334. idx, colname, colname,
  335. context.get_result_processor(
  336. mapped_type, colname, coltype),
  337. obj, untranslated)
  338. for idx, colname, mapped_type, coltype, obj, untranslated
  339. in raw_iterator
  340. ]
  341. def _colnames_from_description(self, context, cursor_description):
  342. """Extract column names and data types from a cursor.description.
  343. Applies unicode decoding, column translation, "normalization",
  344. and case sensitivity rules to the names based on the dialect.
  345. """
  346. dialect = context.dialect
  347. case_sensitive = dialect.case_sensitive
  348. translate_colname = context._translate_colname
  349. description_decoder = dialect._description_decoder \
  350. if dialect.description_encoding else None
  351. normalize_name = dialect.normalize_name \
  352. if dialect.requires_name_normalize else None
  353. untranslated = None
  354. self.keys = []
  355. for idx, rec in enumerate(cursor_description):
  356. colname = rec[0]
  357. coltype = rec[1]
  358. if description_decoder:
  359. colname = description_decoder(colname)
  360. if translate_colname:
  361. colname, untranslated = translate_colname(colname)
  362. if normalize_name:
  363. colname = normalize_name(colname)
  364. self.keys.append(colname)
  365. if not case_sensitive:
  366. colname = colname.lower()
  367. yield idx, colname, untranslated, coltype
  368. def _merge_textual_cols_by_position(
  369. self, context, cursor_description, result_columns):
  370. dialect = context.dialect
  371. typemap = dialect.dbapi_type_map
  372. num_ctx_cols = len(result_columns) if result_columns else None
  373. if num_ctx_cols > len(cursor_description):
  374. util.warn(
  375. "Number of columns in textual SQL (%d) is "
  376. "smaller than number of columns requested (%d)" % (
  377. num_ctx_cols, len(cursor_description)
  378. ))
  379. seen = set()
  380. for idx, colname, untranslated, coltype in \
  381. self._colnames_from_description(context, cursor_description):
  382. if idx < num_ctx_cols:
  383. ctx_rec = result_columns[idx]
  384. obj = ctx_rec[2]
  385. mapped_type = ctx_rec[3]
  386. if obj[0] in seen:
  387. raise exc.InvalidRequestError(
  388. "Duplicate column expression requested "
  389. "in textual SQL: %r" % obj[0])
  390. seen.add(obj[0])
  391. else:
  392. mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
  393. obj = None
  394. yield idx, colname, mapped_type, coltype, obj, untranslated
  395. def _merge_cols_by_name(self, context, cursor_description, result_columns):
  396. dialect = context.dialect
  397. typemap = dialect.dbapi_type_map
  398. case_sensitive = dialect.case_sensitive
  399. result_map = self._create_result_map(result_columns, case_sensitive)
  400. self.matched_on_name = True
  401. for idx, colname, untranslated, coltype in \
  402. self._colnames_from_description(context, cursor_description):
  403. try:
  404. ctx_rec = result_map[colname]
  405. except KeyError:
  406. mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
  407. obj = None
  408. else:
  409. obj = ctx_rec[1]
  410. mapped_type = ctx_rec[2]
  411. yield idx, colname, mapped_type, coltype, obj, untranslated
  412. def _merge_cols_by_none(self, context, cursor_description):
  413. dialect = context.dialect
  414. typemap = dialect.dbapi_type_map
  415. for idx, colname, untranslated, coltype in \
  416. self._colnames_from_description(context, cursor_description):
  417. mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
  418. yield idx, colname, mapped_type, coltype, None, untranslated
  419. @classmethod
  420. def _create_result_map(cls, result_columns, case_sensitive=True):
  421. d = {}
  422. for elem in result_columns:
  423. key, rec = elem[0], elem[1:]
  424. if not case_sensitive:
  425. key = key.lower()
  426. if key in d:
  427. # conflicting keyname, just double up the list
  428. # of objects. this will cause an "ambiguous name"
  429. # error if an attempt is made by the result set to
  430. # access.
  431. e_name, e_obj, e_type = d[key]
  432. d[key] = e_name, e_obj + rec[1], e_type
  433. else:
  434. d[key] = rec
  435. return d
  436. def _key_fallback(self, key, raiseerr=True):
  437. map = self._keymap
  438. result = None
  439. if isinstance(key, util.string_types):
  440. result = map.get(key if self.case_sensitive else key.lower())
  441. # fallback for targeting a ColumnElement to a textual expression
  442. # this is a rare use case which only occurs when matching text()
  443. # or colummn('name') constructs to ColumnElements, or after a
  444. # pickle/unpickle roundtrip
  445. elif isinstance(key, expression.ColumnElement):
  446. if key._label and (
  447. key._label
  448. if self.case_sensitive
  449. else key._label.lower()) in map:
  450. result = map[key._label
  451. if self.case_sensitive
  452. else key._label.lower()]
  453. elif hasattr(key, 'name') and (
  454. key.name
  455. if self.case_sensitive
  456. else key.name.lower()) in map:
  457. # match is only on name.
  458. result = map[key.name
  459. if self.case_sensitive
  460. else key.name.lower()]
  461. # search extra hard to make sure this
  462. # isn't a column/label name overlap.
  463. # this check isn't currently available if the row
  464. # was unpickled.
  465. if result is not None and \
  466. result[1] is not None:
  467. for obj in result[1]:
  468. if key._compare_name_for_result(obj):
  469. break
  470. else:
  471. result = None
  472. if result is None:
  473. if raiseerr:
  474. raise exc.NoSuchColumnError(
  475. "Could not locate column in row for column '%s'" %
  476. expression._string_or_unprintable(key))
  477. else:
  478. return None
  479. else:
  480. map[key] = result
  481. return result
  482. def _has_key(self, key):
  483. if key in self._keymap:
  484. return True
  485. else:
  486. return self._key_fallback(key, False) is not None
  487. def _getter(self, key, raiseerr=True):
  488. if key in self._keymap:
  489. processor, obj, index = self._keymap[key]
  490. else:
  491. ret = self._key_fallback(key, raiseerr)
  492. if ret is None:
  493. return None
  494. processor, obj, index = ret
  495. if index is None:
  496. raise exc.InvalidRequestError(
  497. "Ambiguous column name '%s' in "
  498. "result set column descriptions" % obj)
  499. return operator.itemgetter(index)
  500. def __getstate__(self):
  501. return {
  502. '_pickled_keymap': dict(
  503. (key, index)
  504. for key, (processor, obj, index) in self._keymap.items()
  505. if isinstance(key, util.string_types + util.int_types)
  506. ),
  507. 'keys': self.keys,
  508. "case_sensitive": self.case_sensitive,
  509. "matched_on_name": self.matched_on_name
  510. }
  511. def __setstate__(self, state):
  512. # the row has been processed at pickling time so we don't need any
  513. # processor anymore
  514. self._processors = [None for _ in range(len(state['keys']))]
  515. self._keymap = keymap = {}
  516. for key, index in state['_pickled_keymap'].items():
  517. # not preserving "obj" here, unfortunately our
  518. # proxy comparison fails with the unpickle
  519. keymap[key] = (None, None, index)
  520. self.keys = state['keys']
  521. self.case_sensitive = state['case_sensitive']
  522. self.matched_on_name = state['matched_on_name']
  523. class ResultProxy(object):
  524. """Wraps a DB-API cursor object to provide easier access to row columns.
  525. Individual columns may be accessed by their integer position,
  526. case-insensitive column name, or by ``schema.Column``
  527. object. e.g.::
  528. row = fetchone()
  529. col1 = row[0] # access via integer position
  530. col2 = row['col2'] # access via name
  531. col3 = row[mytable.c.mycol] # access via Column object.
  532. ``ResultProxy`` also handles post-processing of result column
  533. data using ``TypeEngine`` objects, which are referenced from
  534. the originating SQL statement that produced this result set.
  535. """
  536. _process_row = RowProxy
  537. out_parameters = None
  538. _autoclose_connection = False
  539. _metadata = None
  540. _soft_closed = False
  541. closed = False
  542. def __init__(self, context):
  543. self.context = context
  544. self.dialect = context.dialect
  545. self.cursor = self._saved_cursor = context.cursor
  546. self.connection = context.root_connection
  547. self._echo = self.connection._echo and \
  548. context.engine._should_log_debug()
  549. self._init_metadata()
  550. def _getter(self, key, raiseerr=True):
  551. try:
  552. getter = self._metadata._getter
  553. except AttributeError:
  554. return self._non_result(None)
  555. else:
  556. return getter(key, raiseerr)
  557. def _has_key(self, key):
  558. try:
  559. has_key = self._metadata._has_key
  560. except AttributeError:
  561. return self._non_result(None)
  562. else:
  563. return has_key(key)
  564. def _init_metadata(self):
  565. cursor_description = self._cursor_description()
  566. if cursor_description is not None:
  567. if self.context.compiled and \
  568. 'compiled_cache' in self.context.execution_options:
  569. if self.context.compiled._cached_metadata:
  570. self._metadata = self.context.compiled._cached_metadata
  571. else:
  572. self._metadata = self.context.compiled._cached_metadata = \
  573. ResultMetaData(self, cursor_description)
  574. else:
  575. self._metadata = ResultMetaData(self, cursor_description)
  576. if self._echo:
  577. self.context.engine.logger.debug(
  578. "Col %r", tuple(x[0] for x in cursor_description))
  579. def keys(self):
  580. """Return the current set of string keys for rows."""
  581. if self._metadata:
  582. return self._metadata.keys
  583. else:
  584. return []
  585. @util.memoized_property
  586. def rowcount(self):
  587. """Return the 'rowcount' for this result.
  588. The 'rowcount' reports the number of rows *matched*
  589. by the WHERE criterion of an UPDATE or DELETE statement.
  590. .. note::
  591. Notes regarding :attr:`.ResultProxy.rowcount`:
  592. * This attribute returns the number of rows *matched*,
  593. which is not necessarily the same as the number of rows
  594. that were actually *modified* - an UPDATE statement, for example,
  595. may have no net change on a given row if the SET values
  596. given are the same as those present in the row already.
  597. Such a row would be matched but not modified.
  598. On backends that feature both styles, such as MySQL,
  599. rowcount is configured by default to return the match
  600. count in all cases.
  601. * :attr:`.ResultProxy.rowcount` is *only* useful in conjunction
  602. with an UPDATE or DELETE statement. Contrary to what the Python
  603. DBAPI says, it does *not* return the
  604. number of rows available from the results of a SELECT statement
  605. as DBAPIs cannot support this functionality when rows are
  606. unbuffered.
  607. * :attr:`.ResultProxy.rowcount` may not be fully implemented by
  608. all dialects. In particular, most DBAPIs do not support an
  609. aggregate rowcount result from an executemany call.
  610. The :meth:`.ResultProxy.supports_sane_rowcount` and
  611. :meth:`.ResultProxy.supports_sane_multi_rowcount` methods
  612. will report from the dialect if each usage is known to be
  613. supported.
  614. * Statements that use RETURNING may not return a correct
  615. rowcount.
  616. """
  617. try:
  618. return self.context.rowcount
  619. except BaseException as e:
  620. self.connection._handle_dbapi_exception(
  621. e, None, None, self.cursor, self.context)
  622. @property
  623. def lastrowid(self):
  624. """return the 'lastrowid' accessor on the DBAPI cursor.
  625. This is a DBAPI specific method and is only functional
  626. for those backends which support it, for statements
  627. where it is appropriate. It's behavior is not
  628. consistent across backends.
  629. Usage of this method is normally unnecessary when
  630. using insert() expression constructs; the
  631. :attr:`~ResultProxy.inserted_primary_key` attribute provides a
  632. tuple of primary key values for a newly inserted row,
  633. regardless of database backend.
  634. """
  635. try:
  636. return self._saved_cursor.lastrowid
  637. except BaseException as e:
  638. self.connection._handle_dbapi_exception(
  639. e, None, None,
  640. self._saved_cursor, self.context)
  641. @property
  642. def returns_rows(self):
  643. """True if this :class:`.ResultProxy` returns rows.
  644. I.e. if it is legal to call the methods
  645. :meth:`~.ResultProxy.fetchone`,
  646. :meth:`~.ResultProxy.fetchmany`
  647. :meth:`~.ResultProxy.fetchall`.
  648. """
  649. return self._metadata is not None
  650. @property
  651. def is_insert(self):
  652. """True if this :class:`.ResultProxy` is the result
  653. of a executing an expression language compiled
  654. :func:`.expression.insert` construct.
  655. When True, this implies that the
  656. :attr:`inserted_primary_key` attribute is accessible,
  657. assuming the statement did not include
  658. a user defined "returning" construct.
  659. """
  660. return self.context.isinsert
  661. def _cursor_description(self):
  662. """May be overridden by subclasses."""
  663. return self._saved_cursor.description
  664. def _soft_close(self):
  665. """Soft close this :class:`.ResultProxy`.
  666. This releases all DBAPI cursor resources, but leaves the
  667. ResultProxy "open" from a semantic perspective, meaning the
  668. fetchXXX() methods will continue to return empty results.
  669. This method is called automatically when:
  670. * all result rows are exhausted using the fetchXXX() methods.
  671. * cursor.description is None.
  672. This method is **not public**, but is documented in order to clarify
  673. the "autoclose" process used.
  674. .. versionadded:: 1.0.0
  675. .. seealso::
  676. :meth:`.ResultProxy.close`
  677. """
  678. if self._soft_closed:
  679. return
  680. self._soft_closed = True
  681. cursor = self.cursor
  682. self.connection._safe_close_cursor(cursor)
  683. if self._autoclose_connection:
  684. self.connection.close()
  685. self.cursor = None
  686. def close(self):
  687. """Close this ResultProxy.
  688. This closes out the underlying DBAPI cursor corresonding
  689. to the statement execution, if one is still present. Note that the
  690. DBAPI cursor is automatically released when the :class:`.ResultProxy`
  691. exhausts all available rows. :meth:`.ResultProxy.close` is generally
  692. an optional method except in the case when discarding a
  693. :class:`.ResultProxy` that still has additional rows pending for fetch.
  694. In the case of a result that is the product of
  695. :ref:`connectionless execution <dbengine_implicit>`,
  696. the underlying :class:`.Connection` object is also closed, which
  697. :term:`releases` DBAPI connection resources.
  698. After this method is called, it is no longer valid to call upon
  699. the fetch methods, which will raise a :class:`.ResourceClosedError`
  700. on subsequent use.
  701. .. versionchanged:: 1.0.0 - the :meth:`.ResultProxy.close` method
  702. has been separated out from the process that releases the underlying
  703. DBAPI cursor resource. The "auto close" feature of the
  704. :class:`.Connection` now performs a so-called "soft close", which
  705. releases the underlying DBAPI cursor, but allows the
  706. :class:`.ResultProxy` to still behave as an open-but-exhausted
  707. result set; the actual :meth:`.ResultProxy.close` method is never
  708. called. It is still safe to discard a :class:`.ResultProxy`
  709. that has been fully exhausted without calling this method.
  710. .. seealso::
  711. :ref:`connections_toplevel`
  712. :meth:`.ResultProxy._soft_close`
  713. """
  714. if not self.closed:
  715. self._soft_close()
  716. self.closed = True
  717. def __iter__(self):
  718. while True:
  719. row = self.fetchone()
  720. if row is None:
  721. return
  722. else:
  723. yield row
  724. @util.memoized_property
  725. def inserted_primary_key(self):
  726. """Return the primary key for the row just inserted.
  727. The return value is a list of scalar values
  728. corresponding to the list of primary key columns
  729. in the target table.
  730. This only applies to single row :func:`.insert`
  731. constructs which did not explicitly specify
  732. :meth:`.Insert.returning`.
  733. Note that primary key columns which specify a
  734. server_default clause,
  735. or otherwise do not qualify as "autoincrement"
  736. columns (see the notes at :class:`.Column`), and were
  737. generated using the database-side default, will
  738. appear in this list as ``None`` unless the backend
  739. supports "returning" and the insert statement executed
  740. with the "implicit returning" enabled.
  741. Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
  742. statement is not a compiled expression construct
  743. or is not an insert() construct.
  744. """
  745. if not self.context.compiled:
  746. raise exc.InvalidRequestError(
  747. "Statement is not a compiled "
  748. "expression construct.")
  749. elif not self.context.isinsert:
  750. raise exc.InvalidRequestError(
  751. "Statement is not an insert() "
  752. "expression construct.")
  753. elif self.context._is_explicit_returning:
  754. raise exc.InvalidRequestError(
  755. "Can't call inserted_primary_key "
  756. "when returning() "
  757. "is used.")
  758. return self.context.inserted_primary_key
  759. def last_updated_params(self):
  760. """Return the collection of updated parameters from this
  761. execution.
  762. Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
  763. statement is not a compiled expression construct
  764. or is not an update() construct.
  765. """
  766. if not self.context.compiled:
  767. raise exc.InvalidRequestError(
  768. "Statement is not a compiled "
  769. "expression construct.")
  770. elif not self.context.isupdate:
  771. raise exc.InvalidRequestError(
  772. "Statement is not an update() "
  773. "expression construct.")
  774. elif self.context.executemany:
  775. return self.context.compiled_parameters
  776. else:
  777. return self.context.compiled_parameters[0]
  778. def last_inserted_params(self):
  779. """Return the collection of inserted parameters from this
  780. execution.
  781. Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
  782. statement is not a compiled expression construct
  783. or is not an insert() construct.
  784. """
  785. if not self.context.compiled:
  786. raise exc.InvalidRequestError(
  787. "Statement is not a compiled "
  788. "expression construct.")
  789. elif not self.context.isinsert:
  790. raise exc.InvalidRequestError(
  791. "Statement is not an insert() "
  792. "expression construct.")
  793. elif self.context.executemany:
  794. return self.context.compiled_parameters
  795. else:
  796. return self.context.compiled_parameters[0]
  797. @property
  798. def returned_defaults(self):
  799. """Return the values of default columns that were fetched using
  800. the :meth:`.ValuesBase.return_defaults` feature.
  801. The value is an instance of :class:`.RowProxy`, or ``None``
  802. if :meth:`.ValuesBase.return_defaults` was not used or if the
  803. backend does not support RETURNING.
  804. .. versionadded:: 0.9.0
  805. .. seealso::
  806. :meth:`.ValuesBase.return_defaults`
  807. """
  808. return self.context.returned_defaults
  809. def lastrow_has_defaults(self):
  810. """Return ``lastrow_has_defaults()`` from the underlying
  811. :class:`.ExecutionContext`.
  812. See :class:`.ExecutionContext` for details.
  813. """
  814. return self.context.lastrow_has_defaults()
  815. def postfetch_cols(self):
  816. """Return ``postfetch_cols()`` from the underlying
  817. :class:`.ExecutionContext`.
  818. See :class:`.ExecutionContext` for details.
  819. Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
  820. statement is not a compiled expression construct
  821. or is not an insert() or update() construct.
  822. """
  823. if not self.context.compiled:
  824. raise exc.InvalidRequestError(
  825. "Statement is not a compiled "
  826. "expression construct.")
  827. elif not self.context.isinsert and not self.context.isupdate:
  828. raise exc.InvalidRequestError(
  829. "Statement is not an insert() or update() "
  830. "expression construct.")
  831. return self.context.postfetch_cols
  832. def prefetch_cols(self):
  833. """Return ``prefetch_cols()`` from the underlying
  834. :class:`.ExecutionContext`.
  835. See :class:`.ExecutionContext` for details.
  836. Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
  837. statement is not a compiled expression construct
  838. or is not an insert() or update() construct.
  839. """
  840. if not self.context.compiled:
  841. raise exc.InvalidRequestError(
  842. "Statement is not a compiled "
  843. "expression construct.")
  844. elif not self.context.isinsert and not self.context.isupdate:
  845. raise exc.InvalidRequestError(
  846. "Statement is not an insert() or update() "
  847. "expression construct.")
  848. return self.context.prefetch_cols
  849. def supports_sane_rowcount(self):
  850. """Return ``supports_sane_rowcount`` from the dialect.
  851. See :attr:`.ResultProxy.rowcount` for background.
  852. """
  853. return self.dialect.supports_sane_rowcount
  854. def supports_sane_multi_rowcount(self):
  855. """Return ``supports_sane_multi_rowcount`` from the dialect.
  856. See :attr:`.ResultProxy.rowcount` for background.
  857. """
  858. return self.dialect.supports_sane_multi_rowcount
  859. def _fetchone_impl(self):
  860. try:
  861. return self.cursor.fetchone()
  862. except AttributeError:
  863. return self._non_result(None)
  864. def _fetchmany_impl(self, size=None):
  865. try:
  866. if size is None:
  867. return self.cursor.fetchmany()
  868. else:
  869. return self.cursor.fetchmany(size)
  870. except AttributeError:
  871. return self._non_result([])
  872. def _fetchall_impl(self):
  873. try:
  874. return self.cursor.fetchall()
  875. except AttributeError:
  876. return self._non_result([])
  877. def _non_result(self, default):
  878. if self._metadata is None:
  879. raise exc.ResourceClosedError(
  880. "This result object does not return rows. "
  881. "It has been closed automatically.",
  882. )
  883. elif self.closed:
  884. raise exc.ResourceClosedError("This result object is closed.")
  885. else:
  886. return default
  887. def process_rows(self, rows):
  888. process_row = self._process_row
  889. metadata = self._metadata
  890. keymap = metadata._keymap
  891. processors = metadata._processors
  892. if self._echo:
  893. log = self.context.engine.logger.debug
  894. l = []
  895. for row in rows:
  896. log("Row %r", sql_util._repr_row(row))
  897. l.append(process_row(metadata, row, processors, keymap))
  898. return l
  899. else:
  900. return [process_row(metadata, row, processors, keymap)
  901. for row in rows]
  902. def fetchall(self):
  903. """Fetch all rows, just like DB-API ``cursor.fetchall()``.
  904. After all rows have been exhausted, the underlying DBAPI
  905. cursor resource is released, and the object may be safely
  906. discarded.
  907. Subsequent calls to :meth:`.ResultProxy.fetchall` will return
  908. an empty list. After the :meth:`.ResultProxy.close` method is
  909. called, the method will raise :class:`.ResourceClosedError`.
  910. .. versionchanged:: 1.0.0 - Added "soft close" behavior which
  911. allows the result to be used in an "exhausted" state prior to
  912. calling the :meth:`.ResultProxy.close` method.
  913. """
  914. try:
  915. l = self.process_rows(self._fetchall_impl())
  916. self._soft_close()
  917. return l
  918. except BaseException as e:
  919. self.connection._handle_dbapi_exception(
  920. e, None, None,
  921. self.cursor, self.context)
  922. def fetchmany(self, size=None):
  923. """Fetch many rows, just like DB-API
  924. ``cursor.fetchmany(size=cursor.arraysize)``.
  925. After all rows have been exhausted, the underlying DBAPI
  926. cursor resource is released, and the object may be safely
  927. discarded.
  928. Calls to :meth:`.ResultProxy.fetchmany` after all rows have been
  929. exhausted will return
  930. an empty list. After the :meth:`.ResultProxy.close` method is
  931. called, the method will raise :class:`.ResourceClosedError`.
  932. .. versionchanged:: 1.0.0 - Added "soft close" behavior which
  933. allows the result to be used in an "exhausted" state prior to
  934. calling the :meth:`.ResultProxy.close` method.
  935. """
  936. try:
  937. l = self.process_rows(self._fetchmany_impl(size))
  938. if len(l) == 0:
  939. self._soft_close()
  940. return l
  941. except BaseException as e:
  942. self.connection._handle_dbapi_exception(
  943. e, None, None,
  944. self.cursor, self.context)
  945. def fetchone(self):
  946. """Fetch one row, just like DB-API ``cursor.fetchone()``.
  947. After all rows have been exhausted, the underlying DBAPI
  948. cursor resource is released, and the object may be safely
  949. discarded.
  950. Calls to :meth:`.ResultProxy.fetchone` after all rows have
  951. been exhausted will return ``None``.
  952. After the :meth:`.ResultProxy.close` method is
  953. called, the method will raise :class:`.ResourceClosedError`.
  954. .. versionchanged:: 1.0.0 - Added "soft close" behavior which
  955. allows the result to be used in an "exhausted" state prior to
  956. calling the :meth:`.ResultProxy.close` method.
  957. """
  958. try:
  959. row = self._fetchone_impl()
  960. if row is not None:
  961. return self.process_rows([row])[0]
  962. else:
  963. self._soft_close()
  964. return None
  965. except BaseException as e:
  966. self.connection._handle_dbapi_exception(
  967. e, None, None,
  968. self.cursor, self.context)
  969. def first(self):
  970. """Fetch the first row and then close the result set unconditionally.
  971. Returns None if no row is present.
  972. After calling this method, the object is fully closed,
  973. e.g. the :meth:`.ResultProxy.close` method will have been called.
  974. """
  975. if self._metadata is None:
  976. return self._non_result(None)
  977. try:
  978. row = self._fetchone_impl()
  979. except BaseException as e:
  980. self.connection._handle_dbapi_exception(
  981. e, None, None,
  982. self.cursor, self.context)
  983. try:
  984. if row is not None:
  985. return self.process_rows([row])[0]
  986. else:
  987. return None
  988. finally:
  989. self.close()
  990. def scalar(self):
  991. """Fetch the first column of the first row, and close the result set.
  992. Returns None if no row is present.
  993. After calling this method, the object is fully closed,
  994. e.g. the :meth:`.ResultProxy.close` method will have been called.
  995. """
  996. row = self.first()
  997. if row is not None:
  998. return row[0]
  999. else:
  1000. return None
  1001. class BufferedRowResultProxy(ResultProxy):
  1002. """A ResultProxy with row buffering behavior.
  1003. ``ResultProxy`` that buffers the contents of a selection of rows
  1004. before ``fetchone()`` is called. This is to allow the results of
  1005. ``cursor.description`` to be available immediately, when
  1006. interfacing with a DB-API that requires rows to be consumed before
  1007. this information is available (currently psycopg2, when used with
  1008. server-side cursors).
  1009. The pre-fetching behavior fetches only one row initially, and then
  1010. grows its buffer size by a fixed amount with each successive need
  1011. for additional rows up to a size of 1000.
  1012. The size argument is configurable using the ``max_row_buffer``
  1013. execution option::
  1014. with psycopg2_engine.connect() as conn:
  1015. result = conn.execution_options(
  1016. stream_results=True, max_row_buffer=50
  1017. ).execute("select * from table")
  1018. .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option.
  1019. .. seealso::
  1020. :ref:`psycopg2_execution_options`
  1021. """
  1022. def _init_metadata(self):
  1023. self._max_row_buffer = self.context.execution_options.get(
  1024. 'max_row_buffer', None)
  1025. self.__buffer_rows()
  1026. super(BufferedRowResultProxy, self)._init_metadata()
  1027. # this is a "growth chart" for the buffering of rows.
  1028. # each successive __buffer_rows call will use the next
  1029. # value in the list for the buffer size until the max
  1030. # is reached
  1031. size_growth = {
  1032. 1: 5,
  1033. 5: 10,
  1034. 10: 20,
  1035. 20: 50,
  1036. 50: 100,
  1037. 100: 250,
  1038. 250: 500,
  1039. 500: 1000
  1040. }
  1041. def __buffer_rows(self):
  1042. if self.cursor is None:
  1043. return
  1044. size = getattr(self, '_bufsize', 1)
  1045. self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
  1046. self._bufsize = self.size_growth.get(size, size)
  1047. if self._max_row_buffer is not None:
  1048. self._bufsize = min(self._max_row_buffer, self._bufsize)
  1049. def _soft_close(self, **kw):
  1050. self.__rowbuffer.clear()
  1051. super(BufferedRowResultProxy, self)._soft_close(**kw)
  1052. def _fetchone_impl(self):
  1053. if self.cursor is None:
  1054. return self._non_result(None)
  1055. if not self.__rowbuffer:
  1056. self.__buffer_rows()
  1057. if not self.__rowbuffer:
  1058. return None
  1059. return self.__rowbuffer.popleft()
  1060. def _fetchmany_impl(self, size=None):
  1061. if size is None:
  1062. return self._fetchall_impl()
  1063. result = []
  1064. for x in range(0, size):
  1065. row = self._fetchone_impl()
  1066. if row is None:
  1067. break
  1068. result.append(row)
  1069. return result
  1070. def _fetchall_impl(self):
  1071. if self.cursor is None:
  1072. return self._non_result([])
  1073. self.__rowbuffer.extend(self.cursor.fetchall())
  1074. ret = self.__rowbuffer
  1075. self.__rowbuffer = collections.deque()
  1076. return ret
  1077. class FullyBufferedResultProxy(ResultProxy):
  1078. """A result proxy that buffers rows fully upon creation.
  1079. Used for operations where a result is to be delivered
  1080. after the database conversation can not be continued,
  1081. such as MSSQL INSERT...OUTPUT after an autocommit.
  1082. """
  1083. def _init_metadata(self):
  1084. super(FullyBufferedResultProxy, self)._init_metadata()
  1085. self.__rowbuffer = self._buffer_rows()
  1086. def _buffer_rows(self):
  1087. return collections.deque(self.cursor.fetchall())
  1088. def _soft_close(self, **kw):
  1089. self.__rowbuffer.clear()
  1090. super(FullyBufferedResultProxy, self)._soft_close(**kw)
  1091. def _fetchone_impl(self):
  1092. if self.__rowbuffer:
  1093. return self.__rowbuffer.popleft()
  1094. else:
  1095. return self._non_result(None)
  1096. def _fetchmany_impl(self, size=None):
  1097. if size is None:
  1098. return self._fetchall_impl()
  1099. result = []
  1100. for x in range(0, size):
  1101. row = self._fetchone_impl()
  1102. if row is None:
  1103. break
  1104. result.append(row)
  1105. return result
  1106. def _fetchall_impl(self):
  1107. if not self.cursor:
  1108. return self._non_result([])
  1109. ret = self.__rowbuffer
  1110. self.__rowbuffer = collections.deque()
  1111. return ret
  1112. class BufferedColumnRow(RowProxy):
  1113. def __init__(self, parent, row, processors, keymap):
  1114. # preprocess row
  1115. row = list(row)
  1116. # this is a tad faster than using enumerate
  1117. index = 0
  1118. for processor in parent._orig_processors:
  1119. if processor is not None:
  1120. row[index] = processor(row[index])
  1121. index += 1
  1122. row = tuple(row)
  1123. super(BufferedColumnRow, self).__init__(parent, row,
  1124. processors, keymap)
  1125. class BufferedColumnResultProxy(ResultProxy):
  1126. """A ResultProxy with column buffering behavior.
  1127. ``ResultProxy`` that loads all columns into memory each time
  1128. fetchone() is called. If fetchmany() or fetchall() are called,
  1129. the full grid of results is fetched. This is to operate with
  1130. databases where result rows contain "live" results that fall out
  1131. of scope unless explicitly fetched. Currently this includes
  1132. cx_Oracle LOB objects.
  1133. """
  1134. _process_row = BufferedColumnRow
  1135. def _init_metadata(self):
  1136. super(BufferedColumnResultProxy, self)._init_metadata()
  1137. metadata = self._metadata
  1138. # don't double-replace the processors, in the case
  1139. # of a cached ResultMetaData
  1140. if metadata._orig_processors is None:
  1141. # orig_processors will be used to preprocess each row when
  1142. # they are constructed.
  1143. metadata._orig_processors = metadata._processors
  1144. # replace the all type processors by None processors.
  1145. metadata._processors = [None for _ in range(len(metadata.keys))]
  1146. keymap = {}
  1147. for k, (func, obj, index) in metadata._keymap.items():
  1148. keymap[k] = (None, obj, index)
  1149. metadata._keymap = keymap
  1150. def fetchall(self):
  1151. # can't call cursor.fetchall(), since rows must be
  1152. # fully processed before requesting more from the DBAPI.
  1153. l = []
  1154. while True:
  1155. row = self.fetchone()
  1156. if row is None:
  1157. break
  1158. l.append(row)
  1159. return l
  1160. def fetchmany(self, size=None):
  1161. # can't call cursor.fetchmany(), since rows must be
  1162. # fully processed before requesting more from the DBAPI.
  1163. if size is None:
  1164. return self.fetchall()
  1165. l = []
  1166. for i in range(size):
  1167. row = self.fetchone()
  1168. if row is None:
  1169. break
  1170. l.append(row)
  1171. return l