mirror of
https://github.com/python/cpython.git
synced 2025-11-10 14:31:24 +00:00
Fix dbshelve and much of dbtables.
This commit is contained in:
parent
9b01862d32
commit
cccc58d993
4 changed files with 66 additions and 53 deletions
|
|
@ -37,6 +37,8 @@ except ImportError:
|
||||||
class DictMixin: pass
|
class DictMixin: pass
|
||||||
from . import db
|
from . import db
|
||||||
|
|
||||||
|
_unspecified = object()
|
||||||
|
|
||||||
#------------------------------------------------------------------------
|
#------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -163,18 +165,19 @@ class DBShelf(DictMixin):
|
||||||
return self.db.associate(secondaryDB, _shelf_callback, flags)
|
return self.db.associate(secondaryDB, _shelf_callback, flags)
|
||||||
|
|
||||||
|
|
||||||
#def get(self, key, default=None, txn=None, flags=0):
|
def get(self, key, default=_unspecified, txn=None, flags=0):
|
||||||
def get(self, *args, **kw):
|
# If no default is given, we must not pass one to the
|
||||||
# We do it with *args and **kw so if the default value wasn't
|
# extension module, so that an exception can be raised if
|
||||||
# given nothing is passed to the extension module. That way
|
# set_get_returns_none is turned off.
|
||||||
# an exception can be raised if set_get_returns_none is turned
|
if default is _unspecified:
|
||||||
# off.
|
data = self.db.get(key, txn=txn, flags=flags)
|
||||||
data = self.db.get(*args, **kw)
|
# if this returns, the default value would be None
|
||||||
try:
|
default = None
|
||||||
|
else:
|
||||||
|
data = self.db.get(key, default, txn=txn, flags=flags)
|
||||||
|
if data is default:
|
||||||
|
return data
|
||||||
return pickle.loads(data)
|
return pickle.loads(data)
|
||||||
except (TypeError, pickle.UnpicklingError, EOFError):
|
|
||||||
return data # we may be getting the default value, or None,
|
|
||||||
# so it doesn't need unpickled.
|
|
||||||
|
|
||||||
def get_both(self, key, value, txn=None, flags=0):
|
def get_both(self, key, value, txn=None, flags=0):
|
||||||
data = pickle.dumps(value, self.binary)
|
data = pickle.dumps(value, self.binary)
|
||||||
|
|
|
||||||
|
|
@ -24,12 +24,16 @@ import xdrlib
|
||||||
import random
|
import random
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
try:
|
from bsddb.db import *
|
||||||
# For Pythons w/distutils pybsddb
|
|
||||||
from bsddb3.db import *
|
# All table names, row names etc. must be ASCII strings
|
||||||
except ImportError:
|
def _E(s):
|
||||||
# For Python 2.3
|
return s.encode("ascii")
|
||||||
from bsddb.db import *
|
|
||||||
|
# Yet, rowid are arbitrary bytes; if there is a need to hash
|
||||||
|
# them, convert them to Latin-1 first
|
||||||
|
def _D(s):
|
||||||
|
return s.decode("latin-1")
|
||||||
|
|
||||||
# XXX(nnorwitz): is this correct? DBIncompleteError is conditional in _bsddb.c
|
# XXX(nnorwitz): is this correct? DBIncompleteError is conditional in _bsddb.c
|
||||||
try:
|
try:
|
||||||
|
|
@ -90,11 +94,11 @@ class LikeCond(Cond):
|
||||||
#
|
#
|
||||||
# keys used to store database metadata
|
# keys used to store database metadata
|
||||||
#
|
#
|
||||||
_table_names_key = b'__TABLE_NAMES__' # list of the tables in this db
|
_table_names_key = '__TABLE_NAMES__' # list of the tables in this db
|
||||||
_columns = b'._COLUMNS__' # table_name+this key contains a list of columns
|
_columns = '._COLUMNS__' # table_name+this key contains a list of columns
|
||||||
|
|
||||||
def _columns_key(table):
|
def _columns_key(table):
|
||||||
return table + _columns
|
return _E(table + _columns)
|
||||||
|
|
||||||
#
|
#
|
||||||
# these keys are found within table sub databases
|
# these keys are found within table sub databases
|
||||||
|
|
@ -105,19 +109,19 @@ _rowid = '._ROWID_.' # this+rowid+this key contains a unique entry for each
|
||||||
_rowid_str_len = 8 # length in bytes of the unique rowid strings
|
_rowid_str_len = 8 # length in bytes of the unique rowid strings
|
||||||
|
|
||||||
def _data_key(table, col, rowid):
|
def _data_key(table, col, rowid):
|
||||||
return table + _data + col + _data + rowid
|
return _E(table + _data + col + _data) + rowid
|
||||||
|
|
||||||
def _search_col_data_key(table, col):
|
def _search_col_data_key(table, col):
|
||||||
return table + _data + col + _data
|
return _E(table + _data + col + _data)
|
||||||
|
|
||||||
def _search_all_data_key(table):
|
def _search_all_data_key(table):
|
||||||
return table + _data
|
return _E(table + _data)
|
||||||
|
|
||||||
def _rowid_key(table, rowid):
|
def _rowid_key(table, rowid):
|
||||||
return table + _rowid + rowid + _rowid
|
return _E(table + _rowid) + rowid + _E(_rowid)
|
||||||
|
|
||||||
def _search_rowid_key(table):
|
def _search_rowid_key(table):
|
||||||
return table + _rowid
|
return _E(table + _rowid)
|
||||||
|
|
||||||
def contains_metastrings(s) :
|
def contains_metastrings(s) :
|
||||||
"""Verify that the given string does not contain any
|
"""Verify that the given string does not contain any
|
||||||
|
|
@ -171,8 +175,8 @@ class bsdTableDB :
|
||||||
# Initialize the table names list if this is a new database
|
# Initialize the table names list if this is a new database
|
||||||
txn = self.env.txn_begin()
|
txn = self.env.txn_begin()
|
||||||
try:
|
try:
|
||||||
if not self.db.has_key(_table_names_key, txn):
|
if not self.db.has_key(_E(_table_names_key), txn):
|
||||||
self.db.put(_table_names_key, pickle.dumps([], 1), txn=txn)
|
self.db.put(_E(_table_names_key), pickle.dumps([], 1), txn=txn)
|
||||||
# Yes, bare except
|
# Yes, bare except
|
||||||
except:
|
except:
|
||||||
txn.abort()
|
txn.abort()
|
||||||
|
|
@ -250,12 +254,12 @@ class bsdTableDB :
|
||||||
self.db.put(columnlist_key, pickle.dumps(columns, 1), txn=txn)
|
self.db.put(columnlist_key, pickle.dumps(columns, 1), txn=txn)
|
||||||
|
|
||||||
# add the table name to the tablelist
|
# add the table name to the tablelist
|
||||||
tablelist = pickle.loads(self.db.get(_table_names_key, txn=txn,
|
tablelist = pickle.loads(self.db.get(_E(_table_names_key), txn=txn,
|
||||||
flags=DB_RMW))
|
flags=DB_RMW))
|
||||||
tablelist.append(table)
|
tablelist.append(table)
|
||||||
# delete 1st, in case we opened with DB_DUP
|
# delete 1st, in case we opened with DB_DUP
|
||||||
self.db.delete(_table_names_key, txn)
|
self.db.delete(_E(_table_names_key), txn)
|
||||||
self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
|
self.db.put(_E(_table_names_key), pickle.dumps(tablelist, 1), txn=txn)
|
||||||
|
|
||||||
txn.commit()
|
txn.commit()
|
||||||
txn = None
|
txn = None
|
||||||
|
|
@ -284,7 +288,7 @@ class bsdTableDB :
|
||||||
|
|
||||||
def ListTables(self):
|
def ListTables(self):
|
||||||
"""Return a list of tables in this database."""
|
"""Return a list of tables in this database."""
|
||||||
pickledtablelist = self.db.get(_table_names_key)
|
pickledtablelist = self.db.get(_E(_table_names_key))
|
||||||
if pickledtablelist:
|
if pickledtablelist:
|
||||||
return pickle.loads(pickledtablelist)
|
return pickle.loads(pickledtablelist)
|
||||||
else:
|
else:
|
||||||
|
|
@ -435,6 +439,7 @@ class bsdTableDB :
|
||||||
# modify only requested columns
|
# modify only requested columns
|
||||||
columns = mappings.keys()
|
columns = mappings.keys()
|
||||||
for rowid in matching_rowids.keys():
|
for rowid in matching_rowids.keys():
|
||||||
|
rowid = rowid.encode("latin-1")
|
||||||
txn = None
|
txn = None
|
||||||
try:
|
try:
|
||||||
for column in columns:
|
for column in columns:
|
||||||
|
|
@ -598,7 +603,7 @@ class bsdTableDB :
|
||||||
key, data = cur.set_range(searchkey)
|
key, data = cur.set_range(searchkey)
|
||||||
while key[:len(searchkey)] == searchkey:
|
while key[:len(searchkey)] == searchkey:
|
||||||
# extract the rowid from the key
|
# extract the rowid from the key
|
||||||
rowid = key[-_rowid_str_len:]
|
rowid = _D(key[-_rowid_str_len:])
|
||||||
|
|
||||||
if rowid not in rejected_rowids:
|
if rowid not in rejected_rowids:
|
||||||
# if no condition was specified or the condition
|
# if no condition was specified or the condition
|
||||||
|
|
@ -629,6 +634,7 @@ class bsdTableDB :
|
||||||
# database for the matching rows.
|
# database for the matching rows.
|
||||||
if len(columns) > 0:
|
if len(columns) > 0:
|
||||||
for rowid, rowdata in matching_rowids.items():
|
for rowid, rowdata in matching_rowids.items():
|
||||||
|
rowid = rowid.encode("latin-1")
|
||||||
for column in columns:
|
for column in columns:
|
||||||
if column in rowdata:
|
if column in rowdata:
|
||||||
continue
|
continue
|
||||||
|
|
@ -683,15 +689,15 @@ class bsdTableDB :
|
||||||
|
|
||||||
# delete the tablename from the table name list
|
# delete the tablename from the table name list
|
||||||
tablelist = pickle.loads(
|
tablelist = pickle.loads(
|
||||||
self.db.get(_table_names_key, txn=txn, flags=DB_RMW))
|
self.db.get(_E(_table_names_key), txn=txn, flags=DB_RMW))
|
||||||
try:
|
try:
|
||||||
tablelist.remove(table)
|
tablelist.remove(table)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# hmm, it wasn't there, oh well, that's what we want.
|
# hmm, it wasn't there, oh well, that's what we want.
|
||||||
pass
|
pass
|
||||||
# delete 1st, incase we opened with DB_DUP
|
# delete 1st, incase we opened with DB_DUP
|
||||||
self.db.delete(_table_names_key, txn)
|
self.db.delete(_E(_table_names_key), txn)
|
||||||
self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
|
self.db.put(_E(_table_names_key), pickle.dumps(tablelist, 1), txn=txn)
|
||||||
|
|
||||||
txn.commit()
|
txn.commit()
|
||||||
txn = None
|
txn = None
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,11 @@ class TableDBTestCase(unittest.TestCase):
|
||||||
except dbtables.TableDBError:
|
except dbtables.TableDBError:
|
||||||
pass
|
pass
|
||||||
self.tdb.CreateTable(tabname, [colname])
|
self.tdb.CreateTable(tabname, [colname])
|
||||||
|
try:
|
||||||
self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159, 1)})
|
self.tdb.Insert(tabname, {colname: pickle.dumps(3.14159, 1)})
|
||||||
|
except Exception:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
self.tdb._db_print()
|
self.tdb._db_print()
|
||||||
|
|
@ -109,7 +113,7 @@ class TableDBTestCase(unittest.TestCase):
|
||||||
else :
|
else :
|
||||||
if verbose:
|
if verbose:
|
||||||
print("values= %r" % (values,))
|
print("values= %r" % (values,))
|
||||||
raise "Wrong values returned!"
|
self.fail("Wrong values returned!")
|
||||||
|
|
||||||
def test03(self):
|
def test03(self):
|
||||||
tabname = "test03"
|
tabname = "test03"
|
||||||
|
|
|
||||||
|
|
@ -31,22 +31,22 @@ def suite():
|
||||||
unlink(f)
|
unlink(f)
|
||||||
|
|
||||||
test_modules = [
|
test_modules = [
|
||||||
#'test_associate',
|
'test_associate',
|
||||||
#'test_basics',
|
'test_basics',
|
||||||
#'test_compat',
|
'test_compat',
|
||||||
#'test_dbobj',
|
'test_dbobj',
|
||||||
'test_dbshelve',
|
'test_dbshelve',
|
||||||
#'test_dbtables',
|
'test_dbtables',
|
||||||
#'test_env_close',
|
'test_env_close',
|
||||||
#'test_get_none',
|
'test_get_none',
|
||||||
#'test_join',
|
'test_join',
|
||||||
#'test_lock',
|
'test_lock',
|
||||||
#'test_misc',
|
'test_misc',
|
||||||
#'test_queue',
|
'test_queue',
|
||||||
#'test_recno',
|
'test_recno',
|
||||||
#'test_thread',
|
'test_thread',
|
||||||
#'test_sequence',
|
'test_sequence',
|
||||||
#'test_cursor_pget_bug',
|
'test_cursor_pget_bug',
|
||||||
]
|
]
|
||||||
|
|
||||||
alltests = unittest.TestSuite()
|
alltests = unittest.TestSuite()
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue