When open_urlresource() fails, HTTPException is another possible error
[python.git] / Lib / bsddb / dbutils.py
blob02a686f5d9af83253e975e765e97c3be10dc8619
1 #------------------------------------------------------------------------
3 # Copyright (C) 2000 Autonomous Zone Industries
5 # License: This is free software. You may use this software for any
6 # purpose including modification/redistribution, so long as
7 # this header remains intact and that you do not claim any
8 # rights of ownership or authorship of this software. This
9 # software has been tested, but no warranty is expressed or
10 # implied.
12 # Author: Gregory P. Smith <greg@krypto.org>
14 # Note: I don't know how useful this is in reality since when a
15 # DBLockDeadlockError happens the current transaction is supposed to be
16 # aborted. If it doesn't then when the operation is attempted again
17 # the deadlock is still happening...
18 # --Robin
20 #------------------------------------------------------------------------
24 # import the time.sleep function in a namespace safe way to allow
25 # "from bsddb.dbutils import *"
27 from time import sleep as _sleep
29 import sys
30 absolute_import = (sys.version_info[0] >= 3)
31 if absolute_import :
32 # Because this syntaxis is not valid before Python 2.5
33 exec("from . import db")
34 else :
35 import db
37 # always sleep at least N seconds between retrys
38 _deadlock_MinSleepTime = 1.0/128
39 # never sleep more than N seconds between retrys
40 _deadlock_MaxSleepTime = 3.14159
42 # Assign a file object to this for a "sleeping" message to be written to it
43 # each retry
44 _deadlock_VerboseFile = None
47 def DeadlockWrap(function, *_args, **_kwargs):
48 """DeadlockWrap(function, *_args, **_kwargs) - automatically retries
49 function in case of a database deadlock.
51 This is a function intended to be used to wrap database calls such
52 that they perform retrys with exponentially backing off sleeps in
53 between when a DBLockDeadlockError exception is raised.
55 A 'max_retries' parameter may optionally be passed to prevent it
56 from retrying forever (in which case the exception will be reraised).
58 d = DB(...)
59 d.open(...)
60 DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar"
61 """
62 sleeptime = _deadlock_MinSleepTime
63 max_retries = _kwargs.get('max_retries', -1)
64 if 'max_retries' in _kwargs:
65 del _kwargs['max_retries']
66 while True:
67 try:
68 return function(*_args, **_kwargs)
69 except db.DBLockDeadlockError:
70 if _deadlock_VerboseFile:
71 _deadlock_VerboseFile.write(
72 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime)
73 _sleep(sleeptime)
74 # exponential backoff in the sleep time
75 sleeptime *= 2
76 if sleeptime > _deadlock_MaxSleepTime:
77 sleeptime = _deadlock_MaxSleepTime
78 max_retries -= 1
79 if max_retries == -1:
80 raise
83 #------------------------------------------------------------------------