mirror of
https://github.com/python/cpython.git
synced 2025-08-04 00:48:58 +00:00
merge from 3.3
Issue #12411: Fix to cgi.parse_multipart to correctly use bytes boundaries and bytes data. Patch by Jonas Wagner.
This commit is contained in:
commit
42d8773df4
3 changed files with 30 additions and 9 deletions
18
Lib/cgi.py
18
Lib/cgi.py
|
@ -223,17 +223,17 @@ def parse_multipart(fp, pdict):
|
|||
"""
|
||||
import http.client
|
||||
|
||||
boundary = ""
|
||||
boundary = b""
|
||||
if 'boundary' in pdict:
|
||||
boundary = pdict['boundary']
|
||||
if not valid_boundary(boundary):
|
||||
raise ValueError('Invalid boundary in multipart form: %r'
|
||||
% (boundary,))
|
||||
|
||||
nextpart = "--" + boundary
|
||||
lastpart = "--" + boundary + "--"
|
||||
nextpart = b"--" + boundary
|
||||
lastpart = b"--" + boundary + b"--"
|
||||
partdict = {}
|
||||
terminator = ""
|
||||
terminator = b""
|
||||
|
||||
while terminator != lastpart:
|
||||
bytes = -1
|
||||
|
@ -252,7 +252,7 @@ def parse_multipart(fp, pdict):
|
|||
raise ValueError('Maximum content length exceeded')
|
||||
data = fp.read(bytes)
|
||||
else:
|
||||
data = ""
|
||||
data = b""
|
||||
# Read lines until end of part.
|
||||
lines = []
|
||||
while 1:
|
||||
|
@ -260,7 +260,7 @@ def parse_multipart(fp, pdict):
|
|||
if not line:
|
||||
terminator = lastpart # End outer loop
|
||||
break
|
||||
if line.startswith("--"):
|
||||
if line.startswith(b"--"):
|
||||
terminator = line.rstrip()
|
||||
if terminator in (nextpart, lastpart):
|
||||
break
|
||||
|
@ -272,12 +272,12 @@ def parse_multipart(fp, pdict):
|
|||
if lines:
|
||||
# Strip final line terminator
|
||||
line = lines[-1]
|
||||
if line[-2:] == "\r\n":
|
||||
if line[-2:] == b"\r\n":
|
||||
line = line[:-2]
|
||||
elif line[-1:] == "\n":
|
||||
elif line[-1:] == b"\n":
|
||||
line = line[:-1]
|
||||
lines[-1] = line
|
||||
data = "".join(lines)
|
||||
data = b"".join(lines)
|
||||
line = headers['content-disposition']
|
||||
if not line:
|
||||
continue
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue