SF 563203. Replaced 'has_key()' with 'in'.

This commit is contained in:
Raymond Hettinger 2002-06-01 14:18:47 +00:00
parent 9d5e4aa414
commit 54f0222547
54 changed files with 243 additions and 222 deletions

View file

@ -191,7 +191,7 @@ class ConfigParser:
Raise DuplicateSectionError if a section by the specified name
already exists.
"""
if self.__sections.has_key(section):
if section in self.__sections:
raise DuplicateSectionError(section)
self.__sections[section] = {}
@ -209,7 +209,7 @@ class ConfigParser:
except KeyError:
raise NoSectionError(section)
opts.update(self.__defaults)
if opts.has_key('__name__'):
if '__name__' in opts:
del opts['__name__']
return opts.keys()
@ -310,7 +310,7 @@ class ConfigParser:
states = {'1': 1, 'yes': 1, 'true': 1, 'on': 1,
'0': 0, 'no': 0, 'false': 0, 'off': 0}
v = self.get(section, option)
if not states.has_key(v.lower()):
if not v.lower() in states:
raise ValueError, 'Not a boolean: %s' % v
return states[v.lower()]
@ -320,12 +320,12 @@ class ConfigParser:
def has_option(self, section, option):
"""Check for the existence of a given option in a given section."""
if not section or section == "DEFAULT":
return self.__defaults.has_key(option)
return option in self.__defaults
elif not self.has_section(section):
return 0
else:
option = self.optionxform(option)
return self.__sections[section].has_key(option)
return option in self.__sections[section]
def set(self, section, option, value):
"""Set an option."""
@ -365,14 +365,14 @@ class ConfigParser:
except KeyError:
raise NoSectionError(section)
option = self.optionxform(option)
existed = sectdict.has_key(option)
existed = option in sectdict
if existed:
del sectdict[option]
return existed
def remove_section(self, section):
"""Remove a file section."""
if self.__sections.has_key(section):
if section in self.__sections:
del self.__sections[section]
return True
else:
@ -433,7 +433,7 @@ class ConfigParser:
mo = self.SECTCRE.match(line)
if mo:
sectname = mo.group('header')
if self.__sections.has_key(sectname):
if sectname in self.__sections:
cursect = self.__sections[sectname]
elif sectname == DEFAULTSECT:
cursect = self.__defaults

View file

@ -175,10 +175,10 @@ class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""
base, ext = posixpath.splitext(path)
if self.extensions_map.has_key(ext):
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
if self.extensions_map.has_key(ext):
if ext in self.extensions_map:
return self.extensions_map[ext]
else:
return self.extensions_map['']

View file

@ -25,7 +25,7 @@ It has the following interface (key and data are strings):
# such key)
del d[key] # delete data stored at key (raises KeyError
# if no such key)
flag = d.has_key(key) # true if the key exists
flag = key in d # true if the key exists
list = d.keys() # return a list of all existing keys (slow!)
Future versions may change the order in which implementations are

View file

@ -103,7 +103,7 @@ class Bdb:
def break_here(self, frame):
filename = self.canonic(frame.f_code.co_filename)
if not self.breaks.has_key(filename):
if not filename in self.breaks:
return False
lineno = frame.f_lineno
if not lineno in self.breaks[filename]:
@ -211,7 +211,7 @@ class Bdb:
if not line:
return 'Line %s:%d does not exist' % (filename,
lineno)
if not self.breaks.has_key(filename):
if not filename in self.breaks:
self.breaks[filename] = []
list = self.breaks[filename]
if not lineno in list:
@ -220,7 +220,7 @@ class Bdb:
def clear_break(self, filename, lineno):
filename = self.canonic(filename)
if not self.breaks.has_key(filename):
if not filename in self.breaks:
return 'There are no breakpoints in %s' % filename
if lineno not in self.breaks[filename]:
return 'There is no breakpoint at %s:%d' % (filename,
@ -249,7 +249,7 @@ class Bdb:
def clear_all_file_breaks(self, filename):
filename = self.canonic(filename)
if not self.breaks.has_key(filename):
if not filename in self.breaks:
return 'There are no breakpoints in %s' % filename
for line in self.breaks[filename]:
blist = Breakpoint.bplist[filename, line]
@ -267,18 +267,18 @@ class Bdb:
def get_break(self, filename, lineno):
filename = self.canonic(filename)
return self.breaks.has_key(filename) and \
return filename in self.breaks and \
lineno in self.breaks[filename]
def get_breaks(self, filename, lineno):
filename = self.canonic(filename)
return self.breaks.has_key(filename) and \
return filename in self.breaks and \
lineno in self.breaks[filename] and \
Breakpoint.bplist[filename, lineno] or []
def get_file_breaks(self, filename):
filename = self.canonic(filename)
if self.breaks.has_key(filename):
if filename in self.breaks:
return self.breaks[filename]
else:
return []
@ -316,7 +316,7 @@ class Bdb:
s = s + frame.f_code.co_name
else:
s = s + "<lambda>"
if frame.f_locals.has_key('__args__'):
if '__args__' in frame.f_locals:
args = frame.f_locals['__args__']
else:
args = None
@ -324,7 +324,7 @@ class Bdb:
s = s + repr.repr(args)
else:
s = s + '()'
if frame.f_locals.has_key('__return__'):
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
s = s + '->'
s = s + repr.repr(rv)

View file

@ -130,7 +130,7 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
"""
if fp is None:
fp = sys.stdin
if not environ.has_key('REQUEST_METHOD'):
if not 'REQUEST_METHOD' in environ:
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
@ -143,14 +143,14 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
qs = fp.read(clength)
else:
qs = '' # Unknown content-type
if environ.has_key('QUERY_STRING'):
if 'QUERY_STRING' in environ:
if qs: qs = qs + '&'
qs = qs + environ['QUERY_STRING']
elif sys.argv[1:]:
if qs: qs = qs + '&'
qs = qs + sys.argv[1]
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
elif environ.has_key('QUERY_STRING'):
elif 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
else:
if sys.argv[1:]:
@ -181,7 +181,7 @@ def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
if dict.has_key(name):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
@ -244,7 +244,7 @@ def parse_multipart(fp, pdict):
"""
boundary = ""
if pdict.has_key('boundary'):
if 'boundary' in pdict:
boundary = pdict['boundary']
if not valid_boundary(boundary):
raise ValueError, ('Invalid boundary in multipart form: %s'
@ -304,11 +304,11 @@ def parse_multipart(fp, pdict):
key, params = parse_header(line)
if key != 'form-data':
continue
if params.has_key('name'):
if 'name' in params:
name = params['name']
else:
continue
if partdict.has_key(name):
if name in partdict:
partdict[name].append(data)
else:
partdict[name] = [data]
@ -440,10 +440,10 @@ class FieldStorage:
method = 'GET'
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
if environ.has_key('REQUEST_METHOD'):
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
if method == 'GET' or method == 'HEAD':
if environ.has_key('QUERY_STRING'):
if 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
elif sys.argv[1:]:
qs = sys.argv[1]
@ -458,9 +458,9 @@ class FieldStorage:
if method == 'POST':
# Set default content-type for POST to what's traditional
headers['content-type'] = "application/x-www-form-urlencoded"
if environ.has_key('CONTENT_TYPE'):
if 'CONTENT_TYPE' in environ:
headers['content-type'] = environ['CONTENT_TYPE']
if environ.has_key('CONTENT_LENGTH'):
if 'CONTENT_LENGTH' in environ:
headers['content-length'] = environ['CONTENT_LENGTH']
self.fp = fp or sys.stdin
self.headers = headers
@ -468,15 +468,15 @@ class FieldStorage:
# Process content-disposition header
cdisp, pdict = "", {}
if self.headers.has_key('content-disposition'):
if 'content-disposition' in self.headers:
cdisp, pdict = parse_header(self.headers['content-disposition'])
self.disposition = cdisp
self.disposition_options = pdict
self.name = None
if pdict.has_key('name'):
if 'name' in pdict:
self.name = pdict['name']
self.filename = None
if pdict.has_key('filename'):
if 'filename' in pdict:
self.filename = pdict['filename']
# Process content-type header
@ -491,7 +491,7 @@ class FieldStorage:
#
# See below for what we do if there does exist a content-type header,
# but it happens to be something we don't understand.
if self.headers.has_key('content-type'):
if 'content-type' in self.headers:
ctype, pdict = parse_header(self.headers['content-type'])
elif self.outerboundary or method != 'POST':
ctype, pdict = "text/plain", {}
@ -500,10 +500,10 @@ class FieldStorage:
self.type = ctype
self.type_options = pdict
self.innerboundary = ""
if pdict.has_key('boundary'):
if 'boundary' in pdict:
self.innerboundary = pdict['boundary']
clen = -1
if self.headers.has_key('content-length'):
if 'content-length' in self.headers:
try:
clen = int(self.headers['content-length'])
except ValueError:
@ -555,7 +555,7 @@ class FieldStorage:
def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if self.has_key(key):
if key in self:
value = self[key]
if type(value) is type([]):
return map(lambda v: v.value, value)
@ -566,7 +566,7 @@ class FieldStorage:
def getfirst(self, key, default=None):
""" Return the first value received."""
if self.has_key(key):
if key in self:
value = self[key]
if type(value) is type([]):
return value[0].value
@ -577,7 +577,7 @@ class FieldStorage:
def getlist(self, key):
""" Return list of received values."""
if self.has_key(key):
if key in self:
value = self[key]
if type(value) is type([]):
return map(lambda v: v.value, value)
@ -603,6 +603,14 @@ class FieldStorage:
if item.name == key: return True
return False
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:
raise TypeError, "not indexable"
for item in self.list:
if item.name == key: return True
return False
def __len__(self):
"""Dictionary style len(x) support."""
return len(self.keys())
@ -770,7 +778,7 @@ class FormContentDict(UserDict.UserDict):
form = FormContentDict()
form[key] -> [value, value, ...]
form.has_key(key) -> Boolean
key in form -> Boolean
form.keys() -> [key, key, ...]
form.values() -> [[val, val, ...], [val, val, ...], ...]
form.items() -> [(key, [val, val, ...]), (key, [val, val, ...]), ...]
@ -847,21 +855,21 @@ class InterpFormContentDict(SvFormContentDict):
class FormContent(FormContentDict):
"""This class is present for backwards compatibility only."""
def values(self, key):
if self.dict.has_key(key) :return self.dict[key]
if key in self.dict :return self.dict[key]
else: return None
def indexed_value(self, key, location):
if self.dict.has_key(key):
if key in self.dict:
if len(self.dict[key]) > location:
return self.dict[key][location]
else: return None
else: return None
def value(self, key):
if self.dict.has_key(key): return self.dict[key][0]
if key in self.dict: return self.dict[key][0]
else: return None
def length(self, key):
return len(self.dict[key])
def stripped(self, key):
if self.dict.has_key(key): return self.dict[key][0].strip()
if key in self.dict: return self.dict[key][0].strip()
else: return None
def pars(self):
return self.dict

View file

@ -304,7 +304,7 @@ class Cmd:
continue
prevname = name
cmd=name[3:]
if help.has_key(cmd):
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:

View file

@ -611,7 +611,7 @@ def make_encoding_map(decoding_map):
"""
m = {}
for k,v in decoding_map.items():
if not m.has_key(v):
if not v in m:
m[v] = k
else:
m[v] = None

View file

@ -158,7 +158,7 @@ def deepcopy(x, memo = None):
if memo is None:
memo = {}
d = id(x)
if memo.has_key(d):
if d in memo:
return memo[d]
try:
copierfunction = _deepcopy_dispatch[type(x)]

View file

@ -321,7 +321,7 @@ class SequenceMatcher:
junkdict[elt] = 1
del d[elt]
# Now for x in b, isjunk(x) == junkdict.has_key(x), but the
# Now for x in b, isjunk(x) == x in junkdict, but the
# latter is much faster. Note too that while there may be a
# lot of junk in the sequence, the number of *unique* junk
# elements is probably small. So the memory burden of keeping

View file

@ -1015,7 +1015,7 @@ see its docs for details.
d = self.name2ft
for name, (f, t) in other.name2ft.items():
if d.has_key(name):
if name in d:
print "*** Tester.merge: '" + name + "' in both" \
" testers; summing outcomes."
f2, t2 = d[name]
@ -1024,7 +1024,7 @@ see its docs for details.
d[name] = f, t
def __record_outcome(self, name, f, t):
if self.name2ft.has_key(name):
if name in self.name2ft:
print "*** Warning: '" + name + "' was tested before;", \
"summing outcomes."
f2, t2 = self.name2ft[name]

View file

@ -226,7 +226,7 @@ def expanduser(path):
while i < n and path[i] not in '/\\':
i = i+1
if i == 1:
if not os.environ.has_key('HOME'):
if not 'HOME' in os.environ:
return path
userhome = os.environ['HOME']
else:
@ -272,7 +272,7 @@ def expandvars(path):
try:
index = path.index('}')
var = path[:index]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
except ValueError:
res = res + path
@ -285,7 +285,7 @@ def expandvars(path):
var = var + c
index = index + 1
c = path[index:index + 1]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
if c != '':
res = res + c

View file

@ -42,7 +42,7 @@ def filter(names, pat):
import os,posixpath
result=[]
pat=os.path.normcase(pat)
if not _cache.has_key(pat):
if not pat in _cache:
res = translate(pat)
_cache[pat] = re.compile(res)
match=_cache[pat].match
@ -64,7 +64,7 @@ def fnmatchcase(name, pat):
its arguments.
"""
if not _cache.has_key(pat):
if not pat in _cache:
res = translate(pat)
_cache[pat] = re.compile(res)
return _cache[pat].match(name) is not None

View file

@ -660,7 +660,7 @@ class Netrc:
def __init__(self, filename=None):
if filename is None:
if os.environ.has_key("HOME"):
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
@ -714,7 +714,7 @@ class Netrc:
self.__defpasswd = passwd or self.__defpasswd
self.__defacct = acct or self.__defacct
if host:
if self.__hosts.has_key(host):
if host in self.__hosts:
ouser, opasswd, oacct = \
self.__hosts[host]
user = user or ouser
@ -736,7 +736,7 @@ class Netrc:
"""
host = host.lower()
user = passwd = acct = None
if self.__hosts.has_key(host):
if host in self.__hosts:
user, passwd, acct = self.__hosts[host]
user = user or self.__defuser
passwd = passwd or self.__defpasswd

View file

@ -45,7 +45,7 @@ def type_to_name(gtype):
for name in _names:
if name[:2] == 'A_':
_type_to_name_map[eval(name)] = name[2:]
if _type_to_name_map.has_key(gtype):
if gtype in _type_to_name_map:
return _type_to_name_map[gtype]
return 'TYPE=' + `gtype`

View file

@ -549,7 +549,7 @@ class HTTPConnection:
# If headers already contains a host header, then define the
# optional skip_host argument to putrequest(). The check is
# harder because field names are case insensitive.
if (headers.has_key('Host')
if 'Host' in (headers
or [k for k in headers.iterkeys() if k.lower() == "host"]):
self.putrequest(method, url, skip_host=1)
else:

View file

@ -175,7 +175,7 @@ class Hooks(_Verbose):
def add_module(self, name):
d = self.modules_dict()
if d.has_key(name): return d[name]
if name in d: return d[name]
d[name] = m = self.new_module(name)
return m
@ -352,7 +352,7 @@ class BasicModuleImporter(_Verbose):
return self.loader.set_hooks(hooks)
def import_module(self, name, globals={}, locals={}, fromlist=[]):
if self.modules.has_key(name):
if name in self.modules:
return self.modules[name] # Fast path
stuff = self.loader.find_module(name)
if not stuff:
@ -403,10 +403,10 @@ class ModuleImporter(BasicModuleImporter):
return m
def determine_parent(self, globals):
if not globals or not globals.has_key("__name__"):
if not globals or not "__name__" in globals:
return None
pname = globals['__name__']
if globals.has_key("__path__"):
if "__path__" in globals:
parent = self.modules[pname]
assert globals is parent.__dict__
return parent

View file

@ -168,16 +168,16 @@ class IMAP4:
self._mesg('new IMAP4 connection, tag=%s' % self.tagpre)
self.welcome = self._get_response()
if self.untagged_responses.has_key('PREAUTH'):
if 'PREAUTH' in self.untagged_responses:
self.state = 'AUTH'
elif self.untagged_responses.has_key('OK'):
elif 'OK' in self.untagged_responses:
self.state = 'NONAUTH'
else:
raise self.error(self.welcome)
cap = 'CAPABILITY'
self._simple_command(cap)
if not self.untagged_responses.has_key(cap):
if not cap in self.untagged_responses:
raise self.error('no CAPABILITY response from server')
self.capabilities = tuple(self.untagged_responses[cap][-1].upper().split())
@ -196,7 +196,7 @@ class IMAP4:
def __getattr__(self, attr):
# Allow UPPERCASE variants of IMAP4 command methods.
if Commands.has_key(attr):
if attr in Commands:
return getattr(self, attr.lower())
raise AttributeError("Unknown IMAP4 command: '%s'" % attr)
@ -454,7 +454,7 @@ class IMAP4:
try: typ, dat = self._simple_command('LOGOUT')
except: typ, dat = 'NO', ['%s: %s' % sys.exc_info()[:2]]
self.shutdown()
if self.untagged_responses.has_key('BYE'):
if 'BYE' in self.untagged_responses:
return 'BYE', self.untagged_responses['BYE']
return typ, dat
@ -548,7 +548,7 @@ class IMAP4:
self.state = 'AUTH' # Might have been 'SELECTED'
return typ, dat
self.state = 'SELECTED'
if self.untagged_responses.has_key('READ-ONLY') \
if 'READ-ONLY' in self.untagged_responses \
and not readonly:
if __debug__:
if self.debug >= 1:
@ -619,7 +619,7 @@ class IMAP4:
Returns response appropriate to 'command'.
"""
command = command.upper()
if not Commands.has_key(command):
if not command in Commands:
raise self.error("Unknown IMAP4 UID command: %s" % command)
if self.state not in Commands[command]:
raise self.error('command %s illegal in state %s'
@ -654,7 +654,7 @@ class IMAP4:
name = name.upper()
#if not name in self.capabilities: # Let the server decide!
# raise self.error('unknown extension command: %s' % name)
if not Commands.has_key(name):
if not name in Commands:
Commands[name] = (self.state,)
return apply(self._simple_command, (name,) + args)
@ -671,7 +671,7 @@ class IMAP4:
if self.debug >= 5:
self._mesg('untagged_responses[%s] %s += ["%s"]' %
(typ, len(ur.get(typ,'')), dat))
if ur.has_key(typ):
if typ in ur:
ur[typ].append(dat)
else:
ur[typ] = [dat]
@ -691,10 +691,10 @@ class IMAP4:
'command %s illegal in state %s' % (name, self.state))
for typ in ('OK', 'NO', 'BAD'):
if self.untagged_responses.has_key(typ):
if typ in self.untagged_responses:
del self.untagged_responses[typ]
if self.untagged_responses.has_key('READ-ONLY') \
if 'READ-ONLY' in self.untagged_responses \
and not self.is_readonly:
raise self.readonly('mailbox status changed to READ-ONLY')
@ -782,7 +782,7 @@ class IMAP4:
if self._match(self.tagre, resp):
tag = self.mo.group('tag')
if not self.tagged_commands.has_key(tag):
if not tag in self.tagged_commands:
raise self.abort('unexpected tagged response: %s' % resp)
typ = self.mo.group('type')
@ -935,7 +935,7 @@ class IMAP4:
if typ == 'NO':
return typ, dat
if not self.untagged_responses.has_key(name):
if not name in self.untagged_responses:
return typ, [None]
data = self.untagged_responses[name]
if __debug__:

View file

@ -356,12 +356,12 @@ def getmodule(object):
file = getabsfile(object)
except TypeError:
return None
if modulesbyfile.has_key(file):
if file in modulesbyfile:
return sys.modules[modulesbyfile[file]]
for module in sys.modules.values():
if hasattr(module, '__file__'):
modulesbyfile[getabsfile(module)] = module.__name__
if modulesbyfile.has_key(file):
if file in modulesbyfile:
return sys.modules[modulesbyfile[file]]
main = sys.modules['__main__']
if hasattr(main, object.__name__):
@ -529,7 +529,7 @@ def walktree(classes, children, parent):
classes.sort(lambda a, b: cmp(a.__name__, b.__name__))
for c in classes:
results.append((c, c.__bases__))
if children.has_key(c):
if c in children:
results.append(walktree(children[c], children, c))
return results
@ -547,7 +547,7 @@ def getclasstree(classes, unique=0):
for c in classes:
if c.__bases__:
for parent in c.__bases__:
if not children.has_key(parent):
if not parent in children:
children[parent] = []
children[parent].append(c)
if unique and parent in classes: break

View file

@ -35,7 +35,7 @@ def getlines(filename):
"""Get the lines for a file from the cache.
Update the cache if it doesn't contain an entry for this file already."""
if cache.has_key(filename):
if filename in cache:
return cache[filename][2]
else:
return updatecache(filename)
@ -61,7 +61,7 @@ def updatecache(filename):
If something's wrong, print a message, discard the cache entry,
and return an empty list."""
if cache.has_key(filename):
if filename in cache:
del cache[filename]
if not filename or filename[0] + filename[-1] == '<>':
return []

View file

@ -265,7 +265,7 @@ def _test():
args = sys.argv[1:]
if not args:
for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER':
if os.environ.has_key(key):
if key in os.environ:
mbox = os.environ[key]
break
else:

View file

@ -25,7 +25,7 @@ def getcaps():
morecaps = readmailcapfile(fp)
fp.close()
for key in morecaps.keys():
if not caps.has_key(key):
if not key in caps:
caps[key] = morecaps[key]
else:
caps[key] = caps[key] + morecaps[key]
@ -34,11 +34,11 @@ def getcaps():
def listmailcapfiles():
"""Return a list of all mailcap files found on the system."""
# XXX Actually, this is Unix-specific
if os.environ.has_key('MAILCAPS'):
if 'MAILCAPS' in os.environ:
str = os.environ['MAILCAPS']
mailcaps = str.split(':')
else:
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
home = os.environ['HOME']
else:
# Don't bother with getpwuid()
@ -82,7 +82,7 @@ def readmailcapfile(fp):
types[j] = types[j].strip()
key = '/'.join(types).lower()
# Update the database
if caps.has_key(key):
if key in caps:
caps[key].append(fields)
else:
caps[key] = [fields]
@ -112,7 +112,7 @@ def parseline(line):
else:
fkey = field[:i].strip()
fvalue = field[i+1:].strip()
if fields.has_key(fkey):
if fkey in fields:
# Ignore it
pass
else:
@ -147,7 +147,7 @@ def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
entries = lookup(caps, MIMEtype, key)
# XXX This code should somehow check for the needsterminal flag.
for e in entries:
if e.has_key('test'):
if 'test' in e:
test = subst(e['test'], filename, plist)
if test and os.system(test) != 0:
continue
@ -157,14 +157,14 @@ def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
def lookup(caps, MIMEtype, key=None):
entries = []
if caps.has_key(MIMEtype):
if MIMEtype in caps:
entries = entries + caps[MIMEtype]
MIMEtypes = MIMEtype.split('/')
MIMEtype = MIMEtypes[0] + '/*'
if caps.has_key(MIMEtype):
if MIMEtype in caps:
entries = entries + caps[MIMEtype]
if key is not None:
entries = filter(lambda e, key=key: e.has_key(key), entries)
entries = filter(lambda e, key=key: key in e, entries)
return entries
def subst(field, MIMEtype, filename, plist=[]):

View file

@ -375,7 +375,7 @@ class Folder:
anchor = self._parseindex(head, all)
except Error, msg:
seqs = self.getsequences()
if not seqs.has_key(head):
if not head in seqs:
if not msg:
msg = "bad message list %s" % seq
raise Error, msg, sys.exc_info()[2]
@ -412,7 +412,7 @@ class Folder:
n = self._parseindex(seq, all)
except Error, msg:
seqs = self.getsequences()
if not seqs.has_key(seq):
if not seq in seqs:
if not msg:
msg = "bad message list %s" % seq
raise Error, msg

View file

@ -142,7 +142,7 @@ def decode(input, output, encoding):
return uu.decode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if decodetab.has_key(encoding):
if encoding in decodetab:
pipethrough(input, decodetab[encoding], output)
else:
raise ValueError, \
@ -161,7 +161,7 @@ def encode(input, output, encoding):
return uu.encode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if encodetab.has_key(encoding):
if encoding in encodetab:
pipethrough(input, encodetab[encoding], output)
else:
raise ValueError, \

View file

@ -96,24 +96,24 @@ class MimeTypes:
type = 'text/plain'
return type, None # never compressed, so encoding is None
base, ext = posixpath.splitext(url)
while self.suffix_map.has_key(ext):
while ext in self.suffix_map:
base, ext = posixpath.splitext(base + self.suffix_map[ext])
if self.encodings_map.has_key(ext):
if ext in self.encodings_map:
encoding = self.encodings_map[ext]
base, ext = posixpath.splitext(base)
else:
encoding = None
types_map = self.types_map
common_types = self.common_types
if types_map.has_key(ext):
if ext in types_map:
return types_map[ext], encoding
elif types_map.has_key(ext.lower()):
elif ext.lower() in types_map:
return types_map[ext.lower()], encoding
elif strict:
return None, encoding
elif common_types.has_key(ext):
elif ext in common_types:
return common_types[ext], encoding
elif common_types.has_key(ext.lower()):
elif ext.lower() in common_types:
return common_types[ext.lower()], encoding
else:
return None, encoding

View file

@ -84,9 +84,9 @@ class netrc:
def authenticators(self, host):
"""Return a (user, account, password) tuple for given host."""
if self.hosts.has_key(host):
if host in self.hosts:
return self.hosts[host]
elif self.hosts.has_key('default'):
elif 'default' in self.hosts:
return self.hosts['default']
else:
return None

View file

@ -343,9 +343,9 @@ def expanduser(path):
while i < n and path[i] not in '/\\':
i = i + 1
if i == 1:
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
userhome = os.environ['HOME']
elif not os.environ.has_key('HOMEPATH'):
elif not 'HOMEPATH' in os.environ:
return path
else:
try:
@ -399,7 +399,7 @@ def expandvars(path):
try:
index = path.index('}')
var = path[:index]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
except ValueError:
res = res + path
@ -412,7 +412,7 @@ def expandvars(path):
var = var + c
index = index + 1
c = path[index:index + 1]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
if c != '':
res = res + c

View file

@ -333,7 +333,7 @@ def _execvpe(file, args, env=None):
if head:
apply(func, (file,) + argrest)
return
if env.has_key('PATH'):
if 'PATH' in env:
envpath = env['PATH']
else:
envpath = defpath
@ -406,7 +406,9 @@ else:
unsetenv(key)
del self.data[key.upper()]
def has_key(self, key):
return self.data.has_key(key.upper())
return key.upper() in self.data
def __contains__(self, key):
return key.upper() in self.data
def get(self, key, failobj=None):
return self.data.get(key.upper(), failobj)
def update(self, dict):

View file

@ -291,9 +291,9 @@ def expanduser(path):
while i < n and path[i] not in '/\\':
i = i + 1
if i == 1:
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
userhome = os.environ['HOME']
elif not os.environ.has_key('HOMEPATH'):
elif not 'HOMEPATH' in os.environ:
return path
else:
try:
@ -347,7 +347,7 @@ def expandvars(path):
try:
index = path.index('}')
var = path[:index]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
except ValueError:
res = res + path
@ -360,7 +360,7 @@ def expandvars(path):
var = var + c
index = index + 1
c = path[index:index + 1]
if os.environ.has_key(var):
if var in os.environ:
res = res + os.environ[var]
if c != '':
res = res + c

View file

@ -58,7 +58,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
# Read $HOME/.pdbrc and ./.pdbrc
self.rcLines = []
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
envHome = os.environ['HOME']
try:
rcFile = open(os.path.join(envHome, ".pdbrc"))
@ -154,7 +154,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
if not line:
return line
args = line.split()
while self.aliases.has_key(args[0]):
while args[0] in self.aliases:
line = self.aliases[args[0]]
ii = 1
for tmpArg in args[1:]:
@ -509,12 +509,12 @@ class Pdb(bdb.Bdb, cmd.Cmd):
for i in range(n):
name = co.co_varnames[i]
print name, '=',
if dict.has_key(name): print dict[name]
if name in dict: print dict[name]
else: print "*** undefined ***"
do_a = do_args
def do_retval(self, arg):
if self.curframe.f_locals.has_key('__return__'):
if '__return__' in self.curframe.f_locals:
print self.curframe.f_locals['__return__']
else:
print '*** Not yet returned!'
@ -614,7 +614,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
for alias in keys:
print "%s = %s" % (alias, self.aliases[alias])
return
if self.aliases.has_key(args[0]) and len (args) == 1:
if args[0] in self.aliases and len (args) == 1:
print "%s = %s" % (args[0], self.aliases[args[0]])
else:
self.aliases[args[0]] = ' '.join(args[1:])
@ -622,7 +622,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
def do_unalias(self, arg):
args = arg.split()
if len(args) == 0: return
if self.aliases.has_key(args[0]):
if args[0] in self.aliases:
del self.aliases[args[0]]
# Print a traceback starting at the top stack frame.

View file

@ -206,7 +206,7 @@ class Pickler:
self.save_tuple(object)
return
if memo.has_key(d):
if d in memo:
self.write(self.get(memo[d][0]))
return
@ -430,7 +430,7 @@ class Pickler:
for element in object:
save(element)
if len(object) and memo.has_key(d):
if len(object) and d in memo:
if self.bin:
write(POP_MARK + self.get(memo[d][0]))
return
@ -620,7 +620,7 @@ def whichmodule(cls, clsname):
Return a module name.
If the class cannot be found, return __main__.
"""
if classmap.has_key(cls):
if cls in classmap:
return classmap[cls]
for name, module in sys.modules.items():
@ -913,7 +913,7 @@ class Unpickler:
del stack[-2:]
if type(callable) is not ClassType:
if not safe_constructors.has_key(callable):
if not callable in safe_constructors:
try:
safe = callable.__safe_for_unpickling__
except AttributeError:

View file

@ -305,7 +305,7 @@ def expanduser(path):
while i < n and path[i] != '/':
i = i + 1
if i == 1:
if not os.environ.has_key('HOME'):
if not 'HOME' in os.environ:
return path
userhome = os.environ['HOME']
else:
@ -343,7 +343,7 @@ def expandvars(path):
name = m.group(1)
if name[:1] == '{' and name[-1:] == '}':
name = name[1:-1]
if os.environ.has_key(name):
if name in os.environ:
tail = path[j:]
path = path[:i] + os.environ[name]
i = len(path)

View file

@ -268,7 +268,7 @@ class Profile:
fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
timings = self.timings
if timings.has_key(fn):
if fn in timings:
cc, ns, tt, ct, callers = timings[fn]
timings[fn] = cc, ns + 1, tt, ct, callers
else:
@ -300,7 +300,7 @@ class Profile:
ct = ct + frame_total
cc = cc + 1
if callers.has_key(pfn):
if pfn in callers:
callers[pfn] = callers[pfn] + 1 # hack: gather more
# stats such as the amount of time added to ct courtesy
# of this specific call, and the contribution to cc

View file

@ -151,7 +151,7 @@ class Stats:
self.fcn_list = None
for func in other.stats.keys():
if self.stats.has_key(func):
if func in self.stats:
old_func_stat = self.stats[func]
else:
old_func_stat = (0, 0, 0, 0, {},)
@ -183,7 +183,7 @@ class Stats:
while fragment:
if not fragment:
break
if dict.has_key(fragment):
if fragment in dict:
bad_list[fragment] = 0
break
dict[fragment] = self.sort_arg_dict_default[word]
@ -243,7 +243,7 @@ class Stats:
for func2 in callers.keys():
newcallers[func_strip_path(func2)] = callers[func2]
if newstats.has_key(newfunc):
if newfunc in newstats:
newstats[newfunc] = add_func_stats(
newstats[newfunc],
(cc, nc, tt, ct, newcallers))
@ -264,11 +264,11 @@ class Stats:
if self.all_callees: return
self.all_callees = all_callees = {}
for func in self.stats.keys():
if not all_callees.has_key(func):
if not func in all_callees:
all_callees[func] = {}
cc, nc, tt, ct, callers = self.stats[func]
for func2 in callers.keys():
if not all_callees.has_key(func2):
if not func2 in all_callees:
all_callees[func2] = {}
all_callees[func2][func] = callers[func2]
return
@ -354,7 +354,7 @@ class Stats:
self.print_call_heading(width, "called...")
for func in list:
if self.all_callees.has_key(func):
if func in self.all_callees:
self.print_call_line(width, func, self.all_callees[func])
else:
self.print_call_line(width, func, {})
@ -471,7 +471,7 @@ def add_callers(target, source):
for func in target.keys():
new_callers[func] = target[func]
for func in source.keys():
if new_callers.has_key(func):
if func in new_callers:
new_callers[func] = source[func] + new_callers[func]
else:
new_callers[func] = source[func]

View file

@ -171,7 +171,7 @@ def readmodule_ex(module, path=[], inpackage=0):
child = readmodule_ex(submodule, parent['__path__'], 1)
return child
if _modules.has_key(module):
if module in _modules:
# we've seen this module before...
return _modules[module]
if module in sys.builtin_module_names:
@ -265,7 +265,7 @@ def readmodule_ex(module, path=[], inpackage=0):
names = []
for n in inherit.split(','):
n = n.strip()
if dict.has_key(n):
if n in dict:
# we know this super class
n = dict[n]
else:
@ -278,9 +278,9 @@ def readmodule_ex(module, path=[], inpackage=0):
# module for class
m = c[-2]
c = c[-1]
if _modules.has_key(m):
if m in _modules:
d = _modules[m]
if d.has_key(c):
if c in d:
n = d[c]
names.append(n)
inherit = names
@ -316,7 +316,7 @@ def readmodule_ex(module, path=[], inpackage=0):
# were mentioned in the list
for n in names:
n = n.strip()
if d.has_key(n):
if n in d:
dict[n] = d[n]
elif n == '*':
# only add a name if not
@ -326,7 +326,7 @@ def readmodule_ex(module, path=[], inpackage=0):
# start with _
for n in d.keys():
if n[0] != '_' and \
not dict.has_key(n):
not n in dict:
dict[n] = d[n]
else:
assert 0, "regexp _getnext found something unexpected"

View file

@ -223,7 +223,7 @@ def safeimport(path, forceload=0, cache={}):
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
if forceload and sys.modules.has_key(path):
if forceload and path in sys.modules:
# This is the only way to be sure. Checking the mtime of the file
# isn't good enough (e.g. what if the module contains a class that
# inherits from another module that has changed?).
@ -241,7 +241,7 @@ def safeimport(path, forceload=0, cache={}):
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if sys.modules.has_key(path):
if path in sys.modules:
# An error occured while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
@ -403,7 +403,7 @@ TT { font-family: lucidatypewriter, lucida console, courier }
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if dict.has_key(name):
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
@ -536,7 +536,7 @@ TT { font-family: lucidatypewriter, lucida console, courier }
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not cdict.has_key(key):
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
@ -778,7 +778,7 @@ TT { font-family: lucidatypewriter, lucida console, courier }
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and cl.__dict__.has_key(realname) and
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
@ -822,8 +822,8 @@ TT { font-family: lucidatypewriter, lucida console, courier }
def found(name, ispackage,
modpkgs=modpkgs, shadowed=shadowed, seen=seen):
if not seen.has_key(name):
modpkgs.append((name, '', ispackage, shadowed.has_key(name)))
if not name in seen:
modpkgs.append((name, '', ispackage, name)) in shadowed
seen[name] = 1
shadowed[name] = 1
@ -1140,7 +1140,7 @@ class TextDoc(Doc):
if name == realname:
title = self.bold(realname)
else:
if (cl and cl.__dict__.has_key(realname) and
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
@ -1189,7 +1189,7 @@ def getpager():
return plainpager
if os.environ.get('TERM') in ['dumb', 'emacs']:
return plainpager
if os.environ.has_key('PAGER'):
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ['dumb', 'emacs']:
@ -1375,7 +1375,7 @@ def writedocs(dir, pkgpath='', done=None):
modname = inspect.getmodulename(path)
if modname:
modname = pkgpath + modname
if not done.has_key(modname):
if not modname in done:
done[modname] = 1
writedoc(modname)
@ -1546,8 +1546,8 @@ has the same effect as typing a particular string at the help> prompt.
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(split(request)[1])
elif self.keywords.has_key(request): self.showtopic(request)
elif self.topics.has_key(request): self.showtopic(request)
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:')
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:')
@ -1740,7 +1740,7 @@ class ModuleScanner(Scanner):
modname = inspect.getmodulename(path)
if os.path.isfile(path) and modname:
modname = package + (package and '.') + modname
if not seen.has_key(modname):
if not modname in seen:
seen[modname] = 1 # if we see spam.py, skip spam.pyc
if key is None:
callback(path, modname, '')

View file

@ -134,7 +134,7 @@ def compile(pat):
if type(pat) != type(''):
return pat # Assume it is a compiled regex
key = (pat, regex.get_syntax())
if cache.has_key(key):
if key in cache:
prog = cache[key] # Get it from the cache
else:
prog = cache[key] = regex.compile(pat)

View file

@ -206,7 +206,7 @@ class RExec(ihooks._Verbose):
def load_dynamic(self, name, filename, file):
if name not in self.ok_dynamic_modules:
raise ImportError, "untrusted dynamic module: %s" % name
if sys.modules.has_key(name):
if name in sys.modules:
src = sys.modules[name]
else:
src = imp.load_dynamic(name, filename, file)
@ -288,7 +288,7 @@ class RExec(ihooks._Verbose):
# Add a module -- return an existing module or create one
def add_module(self, mname):
if self.modules.has_key(mname):
if mname in self.modules:
return self.modules[mname]
self.modules[mname] = m = self.hooks.new_module(mname)
m.__builtins__ = self.modules['__builtin__']

View file

@ -406,7 +406,7 @@ class Message:
def __delitem__(self, name):
"""Delete all occurrences of a specific header, if it is present."""
name = name.lower()
if not self.dict.has_key(name):
if not name in self.dict:
return
del self.dict[name]
name = name + ':'
@ -427,14 +427,14 @@ class Message:
def get(self, name, default=""):
name = name.lower()
if self.dict.has_key(name):
if name in self.dict:
return self.dict[name]
else:
return default
def setdefault(self, name, default=""):
lowername = name.lower()
if self.dict.has_key(lowername):
if lowername in self.dict:
return self.dict[lowername]
else:
text = name + ": " + default
@ -446,7 +446,11 @@ class Message:
def has_key(self, name):
"""Determine whether a message contains the named header."""
return self.dict.has_key(name.lower())
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def keys(self):
"""Get all of a message's header field names."""
@ -919,7 +923,7 @@ def parsedate_tz(data):
return None
tzoffset = None
tz = tz.upper()
if _timezones.has_key(tz):
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
@ -1010,8 +1014,8 @@ if __name__ == '__main__':
print 'Lines:', n
print '-'*70
print 'len =', len(m)
if m.has_key('Date'): print 'Date =', m['Date']
if m.has_key('X-Nonsense'): pass
if 'Date' in m: print 'Date =', m['Date']
if 'X-Nonsense' in m: pass
print 'keys =', m.keys()
print 'values =', m.values()
print 'items =', m.items()

View file

@ -396,7 +396,7 @@ class SGMLParser(markupbase.ParserBase):
tailored by setting up the self.entitydefs mapping appropriately.
"""
table = self.entitydefs
if table.has_key(name):
if name in table:
self.handle_data(table[name])
else:
self.unknown_entityref(name)

View file

@ -84,7 +84,7 @@ for dir in sys.path:
if dir and not os.path.exists(dir):
continue
dir, dircase = makepath(dir)
if not _dirs_in_sys_path.has_key(dircase):
if not dircase in _dirs_in_sys_path:
L.append(dir)
_dirs_in_sys_path[dircase] = 1
sys.path[:] = L
@ -116,7 +116,7 @@ def addsitedir(sitedir):
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not _dirs_in_sys_path.has_key(sitedircase):
if not sitedircase in _dirs_in_sys_path:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
@ -153,7 +153,7 @@ def addpackage(sitedir, name):
if dir[-1] == '\n':
dir = dir[:-1]
dir, dircase = makepath(sitedir, dir)
if not _dirs_in_sys_path.has_key(dircase) and os.path.exists(dir):
if not dircase in _dirs_in_sys_path and os.path.exists(dir):
sys.path.append(dir)
_dirs_in_sys_path[dircase] = 1
if reset:

View file

@ -411,7 +411,7 @@ class SMTP:
def has_extn(self, opt):
"""Does the server support a given SMTP service extension?"""
return self.esmtp_features.has_key(opt.lower())
return opt.lower() in self.esmtp_features
def help(self, args=''):
"""SMTP 'help' command.

View file

@ -568,9 +568,9 @@ def _parse(source, state):
continue
else:
# flags
if not FLAGS.has_key(source.next):
if not source.next in FLAGS:
raise error, "unexpected end of pattern"
while FLAGS.has_key(source.next):
while source.next in FLAGS:
state.flags = state.flags | FLAGS[source.get()]
if group:
# parse group contents

View file

@ -16,7 +16,7 @@ __all__ = ["stat","reset","forget","forget_prefix","forget_dir",
# The cache. Keys are pathnames, values are os.stat outcomes.
# Remember that multiple threads may be calling this! So, e.g., that
# cache.has_key(path) returns 1 doesn't mean the cache will still contain
# path in cache returns 1 doesn't mean the cache will still contain
# path on the next line. Code defensively.
cache = {}

View file

@ -63,7 +63,7 @@ def _gettempdir_inner():
if scrapdir:
attempdirs.insert(0, scrapdir)
for envname in 'TMPDIR', 'TEMP', 'TMP':
if os.environ.has_key(envname):
if envname in os.environ:
attempdirs.insert(0, os.environ[envname])
testfile = gettempprefix() + 'test'
for dir in attempdirs:

View file

@ -95,7 +95,7 @@ def _toaiff(filename, temps):
raise error, filename + ': ' + msg
if ftype == 'aiff':
return fname
if ftype is None or not table.has_key(ftype):
if ftype is None or not ftype in table:
raise error, \
filename + ': unsupported audio file type ' + `ftype`
temp = tempfile.mktemp()

View file

@ -153,14 +153,14 @@ class URLopener:
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
if self.tempcache and self.tempcache.has_key(fullurl):
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if self.proxies.has_key(urltype):
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
@ -200,7 +200,7 @@ class URLopener:
"""retrieve(url) returns (filename, None) for a local object
or (tempfilename, headers) for a remote object."""
url = unwrap(toBytes(url))
if self.tempcache and self.tempcache.has_key(url):
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
type, url1 = splittype(url)
if not filename and (not type or type == 'file'):
@ -230,7 +230,7 @@ class URLopener:
size = -1
blocknum = 1
if reporthook:
if headers.has_key("content-length"):
if "content-length" in headers:
size = int(headers["Content-Length"])
reporthook(0, bs, size)
block = fp.read(bs)
@ -473,7 +473,7 @@ class URLopener:
del self.ftpcache[k]
v.close()
try:
if not self.ftpcache.has_key(key):
if not key in self.ftpcache:
self.ftpcache[key] = \
ftpwrapper(user, passwd, host, port, dirs)
if not file: type = 'D'
@ -566,9 +566,9 @@ class FancyURLopener(URLopener):
return result
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if headers.has_key('location'):
if 'location' in headers:
newurl = headers['location']
elif headers.has_key('uri'):
elif 'uri' in headers:
newurl = headers['uri']
else:
return
@ -589,7 +589,7 @@ class FancyURLopener(URLopener):
"""Error 401 -- authentication required.
See this URL for a description of the basic authentication scheme:
http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt"""
if not headers.has_key('www-authenticate'):
if not 'www-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['www-authenticate']
@ -633,7 +633,7 @@ class FancyURLopener(URLopener):
def get_user_passwd(self, host, realm, clear_cache = 0):
key = realm + '@' + host.lower()
if self.auth_cache.has_key(key):
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
@ -1108,7 +1108,7 @@ def _fast_quote(s):
res = list(s)
for i in range(len(res)):
c = res[i]
if not _fast_safe.has_key(c):
if not c in _fast_safe:
res[i] = '%%%02X' % ord(c)
return ''.join(res)
@ -1253,7 +1253,7 @@ if os.name == 'mac':
return {}
proxies = {}
# HTTP:
if config.has_key('UseHTTPProxy') and config['UseHTTPProxy']:
if 'UseHTTPProxy' in config and config['UseHTTPProxy']:
try:
value = config['HTTPProxyHost']
except ic.error:

View file

@ -255,7 +255,7 @@ class OpenerDirector:
for meth in dir(handler):
if meth[-5:] == '_open':
protocol = meth[:-5]
if self.handle_open.has_key(protocol):
if protocol in self.handle_open:
self.handle_open[protocol].append(handler)
else:
self.handle_open[protocol] = [handler]
@ -271,7 +271,7 @@ class OpenerDirector:
except ValueError:
pass
dict = self.handle_error.get(proto, {})
if dict.has_key(kind):
if kind in dict:
dict[kind].append(handler)
else:
dict[kind] = [handler]
@ -404,9 +404,9 @@ class HTTPRedirectHandler(BaseHandler):
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
if headers.has_key('location'):
if 'location' in headers:
newurl = headers['location']
elif headers.has_key('uri'):
elif 'uri' in headers:
newurl = headers['uri']
else:
return
@ -419,7 +419,7 @@ class HTTPRedirectHandler(BaseHandler):
new.error_302_dict = {}
if hasattr(req, 'error_302_dict'):
if len(req.error_302_dict)>10 or \
req.error_302_dict.has_key(newurl):
newurl in req.error_302_dict:
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
new.error_302_dict.update(req.error_302_dict)
@ -505,7 +505,7 @@ class CustomProxyHandler(BaseHandler):
return self.parent.open(req)
def add_proxy(self, cpo):
if self.proxies.has_key(cpo.proto):
if cpo.proto in self.proxies:
self.proxies[cpo.proto].append(cpo)
else:
self.proxies[cpo.proto] = [cpo]
@ -519,7 +519,7 @@ class HTTPPasswordMgr:
if isinstance(uri, types.StringTypes):
uri = [uri]
uri = tuple(map(self.reduce_uri, uri))
if not self.passwd.has_key(realm):
if not realm in self.passwd:
self.passwd[realm] = {}
self.passwd[realm][uri] = (user, passwd)
@ -751,10 +751,10 @@ class AbstractHTTPHandler(BaseHandler):
if req.has_data():
data = req.get_data()
h.putrequest('POST', req.get_selector())
if not req.headers.has_key('Content-type'):
if not 'Content-type' in req.headers:
h.putheader('Content-type',
'application/x-www-form-urlencoded')
if not req.headers.has_key('Content-length'):
if not 'Content-length' in req.headers:
h.putheader('Content-length', '%d' % len(data))
else:
h.putrequest('GET', req.get_selector())
@ -954,7 +954,7 @@ class CacheFTPHandler(FTPHandler):
def connect_ftp(self, user, passwd, host, port, dirs):
key = user, passwd, host, port
if self.cache.has_key(key):
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs)

View file

@ -24,11 +24,11 @@ wishes to do different things depending on the Python version.
import os
home = os.curdir # Default
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
home = os.environ['HOME']
elif os.name == 'nt': # Contributed by Jeff Bauer
if os.environ.has_key('HOMEPATH'):
if os.environ.has_key('HOMEDRIVE'):
if 'HOMEPATH' in os.environ:
if 'HOMEDRIVE' in os.environ:
home = os.environ['HOMEDRIVE'] + os.environ['HOMEPATH']
else:
home = os.environ['HOMEPATH']

View file

@ -27,7 +27,7 @@ def warn(message, category=None, stacklevel=1):
else:
globals = caller.f_globals
lineno = caller.f_lineno
if globals.has_key('__name__'):
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"

View file

@ -183,7 +183,14 @@ class WeakKeyDictionary(UserDict.UserDict):
wr = ref(key)
except TypeError:
return 0
return self.data.has_key(wr)
return wr in self.data
def __contains__(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def items(self):
L = []

View file

@ -312,19 +312,19 @@ if sys.platform[:3] == "os2" and _iscommand("netscape.exe"):
# OK, now that we know what the default preference orders for each
# platform are, allow user to override them with the BROWSER variable.
#
if os.environ.has_key("BROWSER"):
if "BROWSER" in os.environ:
# It's the user's responsibility to register handlers for any unknown
# browser referenced by this value, before calling open().
_tryorder = os.environ["BROWSER"].split(os.pathsep)
for cmd in _tryorder:
if not _browsers.has_key(cmd.lower()):
if not cmd.lower() in _browsers:
if _iscommand(cmd.lower()):
register(cmd.lower(), None, GenericBrowser(
"%s '%%s'" % cmd.lower()))
cmd = None # to make del work if _tryorder was empty
del cmd
_tryorder = filter(lambda x: _browsers.has_key(x.lower())
_tryorder = filter(lambda x: x.lower() in _browsers
or x.find("%s") > -1, _tryorder)
# what to do if _tryorder is now empty?

View file

@ -102,15 +102,15 @@ class XMLParser:
# Interface -- initialize and reset this instance
def __init__(self, **kw):
self.__fixed = 0
if kw.has_key('accept_unquoted_attributes'):
if 'accept_unquoted_attributes' in kw:
self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
if kw.has_key('accept_missing_endtag_name'):
if 'accept_missing_endtag_name' in kw:
self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
if kw.has_key('map_case'):
if 'map_case' in kw:
self.__map_case = kw['map_case']
if kw.has_key('accept_utf8'):
if 'accept_utf8' in kw:
self.__accept_utf8 = kw['accept_utf8']
if kw.has_key('translate_attribute_references'):
if 'translate_attribute_references' in kw:
self.__translate_attribute_references = kw['translate_attribute_references']
self.reset()
@ -206,7 +206,7 @@ class XMLParser:
self.syntax_error("`;' missing after char reference")
i = i-1
elif all:
if self.entitydefs.has_key(str):
if str in self.entitydefs:
str = self.entitydefs[str]
rescan = 1
elif data[i - 1] != ';':
@ -375,7 +375,7 @@ class XMLParser:
name = res.group('name')
if self.__map_case:
name = name.lower()
if self.entitydefs.has_key(name):
if name in self.entitydefs:
self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
n = len(rawdata)
i = res.start(0)
@ -533,15 +533,15 @@ class XMLParser:
if namespace:
self.syntax_error('namespace declaration inside namespace declaration')
for attrname in attrdict.keys():
if not self.__xml_namespace_attributes.has_key(attrname):
if not attrname in self.__xml_namespace_attributes:
self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
if not attrdict.has_key('ns') or not attrdict.has_key('prefix'):
if not 'ns' in attrdict or not 'prefix' in attrdict:
self.syntax_error('xml:namespace without required attributes')
prefix = attrdict.get('prefix')
if ncname.match(prefix) is None:
self.syntax_error('xml:namespace illegal prefix value')
return end.end(0)
if self.__namespaces.has_key(prefix):
if prefix in self.__namespaces:
self.syntax_error('xml:namespace prefix not unique')
self.__namespaces[prefix] = attrdict['ns']
else:
@ -581,7 +581,7 @@ class XMLParser:
continue
if '<' in attrvalue:
self.syntax_error("`<' illegal in attribute value")
if attrdict.has_key(attrname):
if attrname in attrdict:
self.syntax_error("attribute `%s' specified twice" % attrname)
attrvalue = attrvalue.translate(attrtrans)
attrdict[attrname] = self.translate_references(attrvalue)
@ -619,7 +619,7 @@ class XMLParser:
prefix = ''
ns = None
for t, d, nst in self.stack:
if d.has_key(prefix):
if prefix in d:
ns = d[prefix]
if ns is None and prefix != '':
ns = self.__namespaces.get(prefix)
@ -645,7 +645,7 @@ class XMLParser:
aprefix = ''
ans = None
for t, d, nst in self.stack:
if d.has_key(aprefix):
if aprefix in d:
ans = d[aprefix]
if ans is None and aprefix != '':
ans = self.__namespaces.get(aprefix)
@ -661,10 +661,10 @@ class XMLParser:
attributes = self.attributes.get(nstag)
if attributes is not None:
for key in attrdict.keys():
if not attributes.has_key(key):
if not key in attributes:
self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
for key, val in attributes.items():
if val is not None and not attrdict.has_key(key):
if val is not None and not key in attrdict:
attrdict[key] = val
method = self.elements.get(nstag, (None, None))[0]
self.finish_starttag(nstag, attrdict, method)

View file

@ -496,7 +496,7 @@ class Marshaller:
def opencontainer(self, value):
if value:
i = id(value)
if self.memo.has_key(i):
if i in self.memo:
raise TypeError, "cannot marshal recursive data structures"
self.memo[i] = None

View file

@ -354,7 +354,7 @@ class ZipFile:
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if self.NameToInfo.has_key(zinfo.filename):
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):