util.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535
  1. # testing/util.py
  2. # Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: https://www.opensource.org/licenses/mit-license.php
  7. # mypy: ignore-errors
  8. from __future__ import annotations
  9. from collections import deque
  10. from collections import namedtuple
  11. import contextlib
  12. import decimal
  13. import gc
  14. from itertools import chain
  15. import pickle
  16. import random
  17. import sys
  18. from sys import getsizeof
  19. import time
  20. import types
  21. from typing import Any
  22. from . import config
  23. from . import mock
  24. from .. import inspect
  25. from ..engine import Connection
  26. from ..schema import Column
  27. from ..schema import DropConstraint
  28. from ..schema import DropTable
  29. from ..schema import ForeignKeyConstraint
  30. from ..schema import MetaData
  31. from ..schema import Table
  32. from ..sql import schema
  33. from ..sql.sqltypes import Integer
  34. from ..util import decorator
  35. from ..util import defaultdict
  36. from ..util import has_refcount_gc
  37. from ..util import inspect_getfullargspec
  38. if not has_refcount_gc:
  39. def non_refcount_gc_collect(*args):
  40. gc.collect()
  41. gc.collect()
  42. gc_collect = lazy_gc = non_refcount_gc_collect
  43. else:
  44. # assume CPython - straight gc.collect, lazy_gc() is a pass
  45. gc_collect = gc.collect
  46. def lazy_gc():
  47. pass
  48. def picklers():
  49. nt = namedtuple("picklers", ["loads", "dumps"])
  50. for protocol in range(-2, pickle.HIGHEST_PROTOCOL + 1):
  51. yield nt(pickle.loads, lambda d: pickle.dumps(d, protocol))
  52. def random_choices(population, k=1):
  53. return random.choices(population, k=k)
  54. def round_decimal(value, prec):
  55. if isinstance(value, float):
  56. return round(value, prec)
  57. # can also use shift() here but that is 2.6 only
  58. return (value * decimal.Decimal("1" + "0" * prec)).to_integral(
  59. decimal.ROUND_FLOOR
  60. ) / pow(10, prec)
  61. class RandomSet(set):
  62. def __iter__(self):
  63. l = list(set.__iter__(self))
  64. random.shuffle(l)
  65. return iter(l)
  66. def pop(self):
  67. index = random.randint(0, len(self) - 1)
  68. item = list(set.__iter__(self))[index]
  69. self.remove(item)
  70. return item
  71. def union(self, other):
  72. return RandomSet(set.union(self, other))
  73. def difference(self, other):
  74. return RandomSet(set.difference(self, other))
  75. def intersection(self, other):
  76. return RandomSet(set.intersection(self, other))
  77. def copy(self):
  78. return RandomSet(self)
  79. def conforms_partial_ordering(tuples, sorted_elements):
  80. """True if the given sorting conforms to the given partial ordering."""
  81. deps = defaultdict(set)
  82. for parent, child in tuples:
  83. deps[parent].add(child)
  84. for i, node in enumerate(sorted_elements):
  85. for n in sorted_elements[i:]:
  86. if node in deps[n]:
  87. return False
  88. else:
  89. return True
  90. def all_partial_orderings(tuples, elements):
  91. edges = defaultdict(set)
  92. for parent, child in tuples:
  93. edges[child].add(parent)
  94. def _all_orderings(elements):
  95. if len(elements) == 1:
  96. yield list(elements)
  97. else:
  98. for elem in elements:
  99. subset = set(elements).difference([elem])
  100. if not subset.intersection(edges[elem]):
  101. for sub_ordering in _all_orderings(subset):
  102. yield [elem] + sub_ordering
  103. return iter(_all_orderings(elements))
  104. def function_named(fn, name):
  105. """Return a function with a given __name__.
  106. Will assign to __name__ and return the original function if possible on
  107. the Python implementation, otherwise a new function will be constructed.
  108. This function should be phased out as much as possible
  109. in favor of @decorator. Tests that "generate" many named tests
  110. should be modernized.
  111. """
  112. try:
  113. fn.__name__ = name
  114. except TypeError:
  115. fn = types.FunctionType(
  116. fn.__code__, fn.__globals__, name, fn.__defaults__, fn.__closure__
  117. )
  118. return fn
  119. def run_as_contextmanager(ctx, fn, *arg, **kw):
  120. """Run the given function under the given contextmanager,
  121. simulating the behavior of 'with' to support older
  122. Python versions.
  123. This is not necessary anymore as we have placed 2.6
  124. as minimum Python version, however some tests are still using
  125. this structure.
  126. """
  127. obj = ctx.__enter__()
  128. try:
  129. result = fn(obj, *arg, **kw)
  130. ctx.__exit__(None, None, None)
  131. return result
  132. except:
  133. exc_info = sys.exc_info()
  134. raise_ = ctx.__exit__(*exc_info)
  135. if not raise_:
  136. raise
  137. else:
  138. return raise_
  139. def rowset(results):
  140. """Converts the results of sql execution into a plain set of column tuples.
  141. Useful for asserting the results of an unordered query.
  142. """
  143. return {tuple(row) for row in results}
  144. def fail(msg):
  145. assert False, msg
  146. @decorator
  147. def provide_metadata(fn, *args, **kw):
  148. """Provide bound MetaData for a single test, dropping afterwards.
  149. Legacy; use the "metadata" pytest fixture.
  150. """
  151. from . import fixtures
  152. metadata = schema.MetaData()
  153. self = args[0]
  154. prev_meta = getattr(self, "metadata", None)
  155. self.metadata = metadata
  156. try:
  157. return fn(*args, **kw)
  158. finally:
  159. # close out some things that get in the way of dropping tables.
  160. # when using the "metadata" fixture, there is a set ordering
  161. # of things that makes sure things are cleaned up in order, however
  162. # the simple "decorator" nature of this legacy function means
  163. # we have to hardcode some of that cleanup ahead of time.
  164. # close ORM sessions
  165. fixtures.close_all_sessions()
  166. # integrate with the "connection" fixture as there are many
  167. # tests where it is used along with provide_metadata
  168. cfc = fixtures.base._connection_fixture_connection
  169. if cfc:
  170. # TODO: this warning can be used to find all the places
  171. # this is used with connection fixture
  172. # warn("mixing legacy provide metadata with connection fixture")
  173. drop_all_tables_from_metadata(metadata, cfc)
  174. # as the provide_metadata fixture is often used with "testing.db",
  175. # when we do the drop we have to commit the transaction so that
  176. # the DB is actually updated as the CREATE would have been
  177. # committed
  178. cfc.get_transaction().commit()
  179. else:
  180. drop_all_tables_from_metadata(metadata, config.db)
  181. self.metadata = prev_meta
  182. def flag_combinations(*combinations):
  183. """A facade around @testing.combinations() oriented towards boolean
  184. keyword-based arguments.
  185. Basically generates a nice looking identifier based on the keywords
  186. and also sets up the argument names.
  187. E.g.::
  188. @testing.flag_combinations(
  189. dict(lazy=False, passive=False),
  190. dict(lazy=True, passive=False),
  191. dict(lazy=False, passive=True),
  192. dict(lazy=False, passive=True, raiseload=True),
  193. )
  194. def test_fn(lazy, passive, raiseload): ...
  195. would result in::
  196. @testing.combinations(
  197. ("", False, False, False),
  198. ("lazy", True, False, False),
  199. ("lazy_passive", True, True, False),
  200. ("lazy_passive", True, True, True),
  201. id_="iaaa",
  202. argnames="lazy,passive,raiseload",
  203. )
  204. def test_fn(lazy, passive, raiseload): ...
  205. """
  206. keys = set()
  207. for d in combinations:
  208. keys.update(d)
  209. keys = sorted(keys)
  210. return config.combinations(
  211. *[
  212. ("_".join(k for k in keys if d.get(k, False)),)
  213. + tuple(d.get(k, False) for k in keys)
  214. for d in combinations
  215. ],
  216. id_="i" + ("a" * len(keys)),
  217. argnames=",".join(keys),
  218. )
  219. def lambda_combinations(lambda_arg_sets, **kw):
  220. args = inspect_getfullargspec(lambda_arg_sets)
  221. arg_sets = lambda_arg_sets(*[mock.Mock() for arg in args[0]])
  222. def create_fixture(pos):
  223. def fixture(**kw):
  224. return lambda_arg_sets(**kw)[pos]
  225. fixture.__name__ = "fixture_%3.3d" % pos
  226. return fixture
  227. return config.combinations(
  228. *[(create_fixture(i),) for i in range(len(arg_sets))], **kw
  229. )
  230. def resolve_lambda(__fn, **kw):
  231. """Given a no-arg lambda and a namespace, return a new lambda that
  232. has all the values filled in.
  233. This is used so that we can have module-level fixtures that
  234. refer to instance-level variables using lambdas.
  235. """
  236. pos_args = inspect_getfullargspec(__fn)[0]
  237. pass_pos_args = {arg: kw.pop(arg) for arg in pos_args}
  238. glb = dict(__fn.__globals__)
  239. glb.update(kw)
  240. new_fn = types.FunctionType(__fn.__code__, glb)
  241. return new_fn(**pass_pos_args)
  242. def metadata_fixture(ddl="function"):
  243. """Provide MetaData for a pytest fixture."""
  244. def decorate(fn):
  245. def run_ddl(self):
  246. metadata = self.metadata = schema.MetaData()
  247. try:
  248. result = fn(self, metadata)
  249. metadata.create_all(config.db)
  250. # TODO:
  251. # somehow get a per-function dml erase fixture here
  252. yield result
  253. finally:
  254. metadata.drop_all(config.db)
  255. return config.fixture(scope=ddl)(run_ddl)
  256. return decorate
  257. def force_drop_names(*names):
  258. """Force the given table names to be dropped after test complete,
  259. isolating for foreign key cycles
  260. """
  261. @decorator
  262. def go(fn, *args, **kw):
  263. try:
  264. return fn(*args, **kw)
  265. finally:
  266. drop_all_tables(config.db, inspect(config.db), include_names=names)
  267. return go
  268. class adict(dict):
  269. """Dict keys available as attributes. Shadows."""
  270. def __getattribute__(self, key):
  271. try:
  272. return self[key]
  273. except KeyError:
  274. return dict.__getattribute__(self, key)
  275. def __call__(self, *keys):
  276. return tuple([self[key] for key in keys])
  277. get_all = __call__
  278. def drop_all_tables_from_metadata(metadata, engine_or_connection):
  279. from . import engines
  280. def go(connection):
  281. engines.testing_reaper.prepare_for_drop_tables(connection)
  282. if not connection.dialect.supports_alter:
  283. from . import assertions
  284. with assertions.expect_warnings(
  285. "Can't sort tables", assert_=False
  286. ):
  287. metadata.drop_all(connection)
  288. else:
  289. metadata.drop_all(connection)
  290. if not isinstance(engine_or_connection, Connection):
  291. with engine_or_connection.begin() as connection:
  292. go(connection)
  293. else:
  294. go(engine_or_connection)
  295. def drop_all_tables(
  296. engine,
  297. inspector,
  298. schema=None,
  299. consider_schemas=(None,),
  300. include_names=None,
  301. ):
  302. if include_names is not None:
  303. include_names = set(include_names)
  304. if schema is not None:
  305. assert consider_schemas == (
  306. None,
  307. ), "consider_schemas and schema are mutually exclusive"
  308. consider_schemas = (schema,)
  309. with engine.begin() as conn:
  310. for table_key, fkcs in reversed(
  311. inspector.sort_tables_on_foreign_key_dependency(
  312. consider_schemas=consider_schemas
  313. )
  314. ):
  315. if table_key:
  316. if (
  317. include_names is not None
  318. and table_key[1] not in include_names
  319. ):
  320. continue
  321. conn.execute(
  322. DropTable(
  323. Table(table_key[1], MetaData(), schema=table_key[0])
  324. )
  325. )
  326. elif fkcs:
  327. if not engine.dialect.supports_alter:
  328. continue
  329. for t_key, fkc in fkcs:
  330. if (
  331. include_names is not None
  332. and t_key[1] not in include_names
  333. ):
  334. continue
  335. tb = Table(
  336. t_key[1],
  337. MetaData(),
  338. Column("x", Integer),
  339. Column("y", Integer),
  340. schema=t_key[0],
  341. )
  342. conn.execute(
  343. DropConstraint(
  344. ForeignKeyConstraint([tb.c.x], [tb.c.y], name=fkc)
  345. )
  346. )
  347. def teardown_events(event_cls):
  348. @decorator
  349. def decorate(fn, *arg, **kw):
  350. try:
  351. return fn(*arg, **kw)
  352. finally:
  353. event_cls._clear()
  354. return decorate
  355. def total_size(o):
  356. """Returns the approximate memory footprint an object and all of its
  357. contents.
  358. source: https://code.activestate.com/recipes/577504/
  359. """
  360. def dict_handler(d):
  361. return chain.from_iterable(d.items())
  362. all_handlers = {
  363. tuple: iter,
  364. list: iter,
  365. deque: iter,
  366. dict: dict_handler,
  367. set: iter,
  368. frozenset: iter,
  369. }
  370. seen = set() # track which object id's have already been seen
  371. default_size = getsizeof(0) # estimate sizeof object without __sizeof__
  372. def sizeof(o):
  373. if id(o) in seen: # do not double count the same object
  374. return 0
  375. seen.add(id(o))
  376. s = getsizeof(o, default_size)
  377. for typ, handler in all_handlers.items():
  378. if isinstance(o, typ):
  379. s += sum(map(sizeof, handler(o)))
  380. break
  381. return s
  382. return sizeof(o)
  383. def count_cache_key_tuples(tup):
  384. """given a cache key tuple, counts how many instances of actual
  385. tuples are found.
  386. used to alert large jumps in cache key complexity.
  387. """
  388. stack = [tup]
  389. sentinel = object()
  390. num_elements = 0
  391. while stack:
  392. elem = stack.pop(0)
  393. if elem is sentinel:
  394. num_elements += 1
  395. elif isinstance(elem, tuple):
  396. if elem:
  397. stack = list(elem) + [sentinel] + stack
  398. return num_elements
  399. @contextlib.contextmanager
  400. def skip_if_timeout(seconds: float, cleanup: Any = None):
  401. now = time.time()
  402. yield
  403. sec = time.time() - now
  404. if sec > seconds:
  405. try:
  406. cleanup()
  407. finally:
  408. config.skip_test(
  409. f"test took too long ({sec:.4f} seconds > {seconds})"
  410. )