mirror of
https://github.com/anthropics/claude-code-sdk-python.git
synced 2025-07-07 14:45:00 +00:00
chore: Remove obvious comments from code
Removed redundant comments that simply restate what the code is doing. Kept only comments that provide valuable context or explain complex behavior. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
3ab62b617d
commit
1791031d20
2 changed files with 6 additions and 34 deletions
|
@ -184,7 +184,6 @@ class SubprocessCLITransport(Transport):
|
||||||
async with anyio.create_task_group() as tg:
|
async with anyio.create_task_group() as tg:
|
||||||
tg.start_soon(read_stderr)
|
tg.start_soon(read_stderr)
|
||||||
|
|
||||||
# Buffer for incomplete JSON
|
|
||||||
json_buffer = ""
|
json_buffer = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -193,7 +192,6 @@ class SubprocessCLITransport(Transport):
|
||||||
if not line_str:
|
if not line_str:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Split on newlines in case multiple JSON objects are buffered together
|
|
||||||
json_lines = line_str.split("\n")
|
json_lines = line_str.split("\n")
|
||||||
|
|
||||||
for json_line in json_lines:
|
for json_line in json_lines:
|
||||||
|
@ -201,12 +199,10 @@ class SubprocessCLITransport(Transport):
|
||||||
if not json_line:
|
if not json_line:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Add to buffer
|
|
||||||
json_buffer += json_line
|
json_buffer += json_line
|
||||||
|
|
||||||
# Check buffer size
|
|
||||||
if len(json_buffer) > _MAX_BUFFER_SIZE:
|
if len(json_buffer) > _MAX_BUFFER_SIZE:
|
||||||
json_buffer = "" # Clear buffer to prevent repeated errors
|
json_buffer = ""
|
||||||
raise SDKJSONDecodeError(
|
raise SDKJSONDecodeError(
|
||||||
f"JSON message exceeded maximum buffer size of {_MAX_BUFFER_SIZE} bytes",
|
f"JSON message exceeded maximum buffer size of {_MAX_BUFFER_SIZE} bytes",
|
||||||
ValueError(
|
ValueError(
|
||||||
|
@ -216,14 +212,12 @@ class SubprocessCLITransport(Transport):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.loads(json_buffer)
|
data = json.loads(json_buffer)
|
||||||
json_buffer = "" # Clear buffer on successful parse
|
json_buffer = ""
|
||||||
try:
|
try:
|
||||||
yield data
|
yield data
|
||||||
except GeneratorExit:
|
except GeneratorExit:
|
||||||
# Handle generator cleanup gracefully
|
|
||||||
return
|
return
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
# Continue accumulating in buffer
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except anyio.ClosedResourceError:
|
except anyio.ClosedResourceError:
|
||||||
|
|
|
@ -45,34 +45,27 @@ class TestSubprocessBuffering:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# Two valid JSON objects separated by a newline character
|
|
||||||
json_obj1 = {"type": "message", "id": "msg1", "content": "First message"}
|
json_obj1 = {"type": "message", "id": "msg1", "content": "First message"}
|
||||||
json_obj2 = {"type": "result", "id": "res1", "status": "completed"}
|
json_obj2 = {"type": "result", "id": "res1", "status": "completed"}
|
||||||
|
|
||||||
# Simulate buffered output where both objects appear on one line
|
|
||||||
buffered_line = json.dumps(json_obj1) + "\n" + json.dumps(json_obj2)
|
buffered_line = json.dumps(json_obj1) + "\n" + json.dumps(json_obj2)
|
||||||
|
|
||||||
# Create transport
|
|
||||||
transport = SubprocessCLITransport(
|
transport = SubprocessCLITransport(
|
||||||
prompt="test", options=ClaudeCodeOptions(), cli_path="/usr/bin/claude"
|
prompt="test", options=ClaudeCodeOptions(), cli_path="/usr/bin/claude"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Mock the process and streams
|
|
||||||
mock_process = MagicMock()
|
mock_process = MagicMock()
|
||||||
mock_process.returncode = None
|
mock_process.returncode = None
|
||||||
mock_process.wait = AsyncMock(return_value=None)
|
mock_process.wait = AsyncMock(return_value=None)
|
||||||
transport._process = mock_process
|
transport._process = mock_process
|
||||||
|
|
||||||
# Create mock stream that returns the buffered line
|
|
||||||
transport._stdout_stream = MockTextReceiveStream([buffered_line]) # type: ignore[assignment]
|
transport._stdout_stream = MockTextReceiveStream([buffered_line]) # type: ignore[assignment]
|
||||||
transport._stderr_stream = MockTextReceiveStream([]) # type: ignore[assignment]
|
transport._stderr_stream = MockTextReceiveStream([]) # type: ignore[assignment]
|
||||||
|
|
||||||
# Collect all messages
|
|
||||||
messages: list[Any] = []
|
messages: list[Any] = []
|
||||||
async for msg in transport.receive_messages():
|
async for msg in transport.receive_messages():
|
||||||
messages.append(msg)
|
messages.append(msg)
|
||||||
|
|
||||||
# Verify both JSON objects were successfully parsed
|
|
||||||
assert len(messages) == 2
|
assert len(messages) == 2
|
||||||
assert messages[0]["type"] == "message"
|
assert messages[0]["type"] == "message"
|
||||||
assert messages[0]["id"] == "msg1"
|
assert messages[0]["id"] == "msg1"
|
||||||
|
@ -87,7 +80,6 @@ class TestSubprocessBuffering:
|
||||||
"""Test parsing JSON objects that contain newline characters in string values."""
|
"""Test parsing JSON objects that contain newline characters in string values."""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# JSON objects with newlines in string values
|
|
||||||
json_obj1 = {"type": "message", "content": "Line 1\nLine 2\nLine 3"}
|
json_obj1 = {"type": "message", "content": "Line 1\nLine 2\nLine 3"}
|
||||||
json_obj2 = {"type": "result", "data": "Some\nMultiline\nContent"}
|
json_obj2 = {"type": "result", "data": "Some\nMultiline\nContent"}
|
||||||
|
|
||||||
|
@ -121,7 +113,6 @@ class TestSubprocessBuffering:
|
||||||
json_obj1 = {"type": "message", "id": "msg1"}
|
json_obj1 = {"type": "message", "id": "msg1"}
|
||||||
json_obj2 = {"type": "result", "id": "res1"}
|
json_obj2 = {"type": "result", "id": "res1"}
|
||||||
|
|
||||||
# Multiple newlines between objects
|
|
||||||
buffered_line = json.dumps(json_obj1) + "\n\n\n" + json.dumps(json_obj2)
|
buffered_line = json.dumps(json_obj1) + "\n\n\n" + json.dumps(json_obj2)
|
||||||
|
|
||||||
transport = SubprocessCLITransport(
|
transport = SubprocessCLITransport(
|
||||||
|
@ -149,7 +140,6 @@ class TestSubprocessBuffering:
|
||||||
"""Test parsing when a single JSON object is split across multiple stream reads."""
|
"""Test parsing when a single JSON object is split across multiple stream reads."""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# Large JSON object that simulates being split
|
|
||||||
json_obj = {
|
json_obj = {
|
||||||
"type": "assistant",
|
"type": "assistant",
|
||||||
"message": {
|
"message": {
|
||||||
|
@ -167,7 +157,6 @@ class TestSubprocessBuffering:
|
||||||
|
|
||||||
complete_json = json.dumps(json_obj)
|
complete_json = json.dumps(json_obj)
|
||||||
|
|
||||||
# Split at arbitrary points to simulate stream chunking
|
|
||||||
part1 = complete_json[:100]
|
part1 = complete_json[:100]
|
||||||
part2 = complete_json[100:250]
|
part2 = complete_json[100:250]
|
||||||
part3 = complete_json[250:]
|
part3 = complete_json[250:]
|
||||||
|
@ -187,7 +176,6 @@ class TestSubprocessBuffering:
|
||||||
async for msg in transport.receive_messages():
|
async for msg in transport.receive_messages():
|
||||||
messages.append(msg)
|
messages.append(msg)
|
||||||
|
|
||||||
# Should reconstruct the complete JSON
|
|
||||||
assert len(messages) == 1
|
assert len(messages) == 1
|
||||||
assert messages[0]["type"] == "assistant"
|
assert messages[0]["type"] == "assistant"
|
||||||
assert len(messages[0]["message"]["content"]) == 2
|
assert len(messages[0]["message"]["content"]) == 2
|
||||||
|
@ -198,7 +186,6 @@ class TestSubprocessBuffering:
|
||||||
"""Test parsing a large minified JSON (simulating the reported issue)."""
|
"""Test parsing a large minified JSON (simulating the reported issue)."""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# Create a large minified JSON similar to what caused the issue
|
|
||||||
large_data = {"data": [{"id": i, "value": "x" * 100} for i in range(1000)]}
|
large_data = {"data": [{"id": i, "value": "x" * 100} for i in range(1000)]}
|
||||||
json_obj = {
|
json_obj = {
|
||||||
"type": "user",
|
"type": "user",
|
||||||
|
@ -216,8 +203,7 @@ class TestSubprocessBuffering:
|
||||||
|
|
||||||
complete_json = json.dumps(json_obj)
|
complete_json = json.dumps(json_obj)
|
||||||
|
|
||||||
# Split into chunks simulating 64KB buffer limit
|
chunk_size = 64 * 1024
|
||||||
chunk_size = 64 * 1024 # 64KB
|
|
||||||
chunks = [
|
chunks = [
|
||||||
complete_json[i : i + chunk_size]
|
complete_json[i : i + chunk_size]
|
||||||
for i in range(0, len(complete_json), chunk_size)
|
for i in range(0, len(complete_json), chunk_size)
|
||||||
|
@ -251,7 +237,6 @@ class TestSubprocessBuffering:
|
||||||
"""Test that exceeding buffer size raises an appropriate error."""
|
"""Test that exceeding buffer size raises an appropriate error."""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# Create incomplete JSON larger than buffer limit
|
|
||||||
huge_incomplete = '{"data": "' + "x" * (_MAX_BUFFER_SIZE + 1000)
|
huge_incomplete = '{"data": "' + "x" * (_MAX_BUFFER_SIZE + 1000)
|
||||||
|
|
||||||
transport = SubprocessCLITransport(
|
transport = SubprocessCLITransport(
|
||||||
|
@ -270,7 +255,6 @@ class TestSubprocessBuffering:
|
||||||
async for msg in transport.receive_messages():
|
async for msg in transport.receive_messages():
|
||||||
messages.append(msg)
|
messages.append(msg)
|
||||||
|
|
||||||
# The exception is wrapped in ExceptionGroup by anyio
|
|
||||||
assert len(exc_info.value.exceptions) == 1
|
assert len(exc_info.value.exceptions) == 1
|
||||||
assert isinstance(exc_info.value.exceptions[0], CLIJSONDecodeError)
|
assert isinstance(exc_info.value.exceptions[0], CLIJSONDecodeError)
|
||||||
assert "exceeded maximum buffer size" in str(exc_info.value.exceptions[0])
|
assert "exceeded maximum buffer size" in str(exc_info.value.exceptions[0])
|
||||||
|
@ -281,27 +265,21 @@ class TestSubprocessBuffering:
|
||||||
"""Test handling a mix of complete and split JSON messages."""
|
"""Test handling a mix of complete and split JSON messages."""
|
||||||
|
|
||||||
async def _test() -> None:
|
async def _test() -> None:
|
||||||
# First: complete JSON
|
|
||||||
msg1 = json.dumps({"type": "system", "subtype": "start"})
|
msg1 = json.dumps({"type": "system", "subtype": "start"})
|
||||||
|
|
||||||
# Second: large JSON split across reads
|
|
||||||
large_msg = {
|
large_msg = {
|
||||||
"type": "assistant",
|
"type": "assistant",
|
||||||
"message": {"content": [{"type": "text", "text": "y" * 5000}]},
|
"message": {"content": [{"type": "text", "text": "y" * 5000}]},
|
||||||
}
|
}
|
||||||
large_json = json.dumps(large_msg)
|
large_json = json.dumps(large_msg)
|
||||||
|
|
||||||
# Third: another complete JSON
|
|
||||||
msg3 = json.dumps({"type": "system", "subtype": "end"})
|
msg3 = json.dumps({"type": "system", "subtype": "end"})
|
||||||
|
|
||||||
# Simulate streaming with mixed complete and partial messages
|
|
||||||
lines = [
|
lines = [
|
||||||
msg1 + "\n",
|
msg1 + "\n",
|
||||||
large_json[:1000], # First part of large message
|
large_json[:1000],
|
||||||
large_json[1000:3000], # Middle part
|
large_json[1000:3000],
|
||||||
large_json[3000:]
|
large_json[3000:] + "\n" + msg3,
|
||||||
+ "\n"
|
|
||||||
+ msg3, # End of large message + complete message
|
|
||||||
]
|
]
|
||||||
|
|
||||||
transport = SubprocessCLITransport(
|
transport = SubprocessCLITransport(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue