mirror of
https://github.com/python/cpython.git
synced 2025-08-15 22:30:42 +00:00
Merged revisions 86925 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k ........ r86925 | r.david.murray | 2010-12-01 21:58:07 -0500 (Wed, 01 Dec 2010) | 4 lines #10464: fix netrc handling of lines with embedded '#" characters. Patch by Xuanji Li. ........
This commit is contained in:
parent
27f5a7e462
commit
d75cc91647
4 changed files with 23 additions and 7 deletions
|
@ -34,11 +34,15 @@ class netrc:
|
||||||
def _parse(self, file, fp):
|
def _parse(self, file, fp):
|
||||||
lexer = shlex.shlex(fp)
|
lexer = shlex.shlex(fp)
|
||||||
lexer.wordchars += r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
|
lexer.wordchars += r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
|
||||||
|
lexer.commenters = lexer.commenters.replace('#', '')
|
||||||
while 1:
|
while 1:
|
||||||
# Look for a machine, default, or macdef top-level keyword
|
# Look for a machine, default, or macdef top-level keyword
|
||||||
toplevel = tt = lexer.get_token()
|
toplevel = tt = lexer.get_token()
|
||||||
if not tt:
|
if not tt:
|
||||||
break
|
break
|
||||||
|
elif tt[0] == '#':
|
||||||
|
fp.readline();
|
||||||
|
continue;
|
||||||
elif tt == 'machine':
|
elif tt == 'machine':
|
||||||
entryname = lexer.get_token()
|
entryname = lexer.get_token()
|
||||||
elif tt == 'default':
|
elif tt == 'default':
|
||||||
|
|
|
@ -3,7 +3,13 @@ import netrc, os, unittest, sys
|
||||||
from test import test_support
|
from test import test_support
|
||||||
|
|
||||||
TEST_NETRC = """
|
TEST_NETRC = """
|
||||||
|
|
||||||
|
#this is a comment
|
||||||
|
#this is a comment
|
||||||
|
# this is a comment
|
||||||
|
|
||||||
machine foo login log1 password pass1 account acct1
|
machine foo login log1 password pass1 account acct1
|
||||||
|
machine bar login log1 password pass# account acct1
|
||||||
|
|
||||||
macdef macro1
|
macdef macro1
|
||||||
line1
|
line1
|
||||||
|
@ -28,18 +34,21 @@ class NetrcTestCase(unittest.TestCase):
|
||||||
fp = open(temp_filename, mode)
|
fp = open(temp_filename, mode)
|
||||||
fp.write(TEST_NETRC)
|
fp.write(TEST_NETRC)
|
||||||
fp.close()
|
fp.close()
|
||||||
self.netrc = netrc.netrc(temp_filename)
|
self.nrc = netrc.netrc(temp_filename)
|
||||||
|
|
||||||
def tearDown (self):
|
def tearDown (self):
|
||||||
del self.netrc
|
|
||||||
os.unlink(temp_filename)
|
os.unlink(temp_filename)
|
||||||
|
|
||||||
def test_case_1(self):
|
def test_case_1(self):
|
||||||
self.assertTrue(self.netrc.macros == {'macro1':['line1\n', 'line2\n'],
|
self.assertEqual(self.nrc.hosts['foo'], ('log1', 'acct1', 'pass1'))
|
||||||
'macro2':['line3\n', 'line4\n']}
|
self.assertEqual(self.nrc.hosts['default'], ('log2', None, 'pass2'))
|
||||||
)
|
|
||||||
self.assertTrue(self.netrc.hosts['foo'] == ('log1', 'acct1', 'pass1'))
|
def test_macros(self):
|
||||||
self.assertTrue(self.netrc.hosts['default'] == ('log2', None, 'pass2'))
|
self.assertEqual(self.nrc.macros, {'macro1':['line1\n', 'line2\n'],
|
||||||
|
'macro2':['line3\n', 'line4\n']})
|
||||||
|
|
||||||
|
def test_parses_passwords_with_hash_character(self):
|
||||||
|
self.assertEqual(self.nrc.hosts['bar'], ('log1', 'acct1', 'pass#'))
|
||||||
|
|
||||||
def test_main():
|
def test_main():
|
||||||
test_support.run_unittest(NetrcTestCase)
|
test_support.run_unittest(NetrcTestCase)
|
||||||
|
|
|
@ -472,6 +472,7 @@ John Lenton
|
||||||
Christopher Tur Lesniewski-Laas
|
Christopher Tur Lesniewski-Laas
|
||||||
Mark Levinson
|
Mark Levinson
|
||||||
William Lewis
|
William Lewis
|
||||||
|
Xuanji Li
|
||||||
Robert van Liere
|
Robert van Liere
|
||||||
Ross Light
|
Ross Light
|
||||||
Shawn Ligocki
|
Shawn Ligocki
|
||||||
|
|
|
@ -13,6 +13,8 @@ Core and Builtins
|
||||||
Library
|
Library
|
||||||
-------
|
-------
|
||||||
|
|
||||||
|
- Issue #10464: netrc now correctly handles lines with embedded '#' characters.
|
||||||
|
|
||||||
|
|
||||||
What's New in Python 2.7.1?
|
What's New in Python 2.7.1?
|
||||||
===========================
|
===========================
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue