mirror of
https://github.com/denoland/deno.git
synced 2025-10-03 15:44:36 +00:00
Reorgnanize repos, examples and tests (denoland/deno_std#105)
Original: c5e6e015b5
This commit is contained in:
parent
7d6a0f64f2
commit
f626b04ebe
44 changed files with 54 additions and 45 deletions
223
net/http_test.ts
223
net/http_test.ts
|
@ -1,223 +0,0 @@
|
|||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Ported from
|
||||
// https://github.com/golang/go/blob/master/src/net/http/responsewrite_test.go
|
||||
|
||||
import { Buffer } from "deno";
|
||||
import { test, assert, assertEqual } from "../testing/mod.ts";
|
||||
import {
|
||||
listenAndServe,
|
||||
ServerRequest,
|
||||
setContentLength,
|
||||
Response
|
||||
} from "./http.ts";
|
||||
import { BufWriter, BufReader } from "./bufio.ts";
|
||||
|
||||
interface ResponseTest {
|
||||
response: Response;
|
||||
raw: string;
|
||||
}
|
||||
|
||||
const enc = new TextEncoder();
|
||||
const dec = new TextDecoder();
|
||||
|
||||
const responseTests: ResponseTest[] = [
|
||||
// Default response
|
||||
{
|
||||
response: {},
|
||||
raw: "HTTP/1.1 200 OK\r\n" + "\r\n"
|
||||
},
|
||||
// HTTP/1.1, chunked coding; empty trailer; close
|
||||
{
|
||||
response: {
|
||||
status: 200,
|
||||
body: new Buffer(new TextEncoder().encode("abcdef"))
|
||||
},
|
||||
|
||||
raw:
|
||||
"HTTP/1.1 200 OK\r\n" +
|
||||
"transfer-encoding: chunked\r\n\r\n" +
|
||||
"6\r\nabcdef\r\n0\r\n\r\n"
|
||||
}
|
||||
];
|
||||
|
||||
test(async function responseWrite() {
|
||||
for (const testCase of responseTests) {
|
||||
const buf = new Buffer();
|
||||
const bufw = new BufWriter(buf);
|
||||
const request = new ServerRequest();
|
||||
request.w = bufw;
|
||||
|
||||
await request.respond(testCase.response);
|
||||
assertEqual(buf.toString(), testCase.raw);
|
||||
}
|
||||
});
|
||||
|
||||
test(async function requestBodyWithContentLength() {
|
||||
{
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("content-length", "5");
|
||||
const buf = new Buffer(enc.encode("Hello"));
|
||||
req.r = new BufReader(buf);
|
||||
const body = dec.decode(await req.body());
|
||||
assertEqual(body, "Hello");
|
||||
}
|
||||
|
||||
// Larger than internal buf
|
||||
{
|
||||
const longText = "1234\n".repeat(1000);
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("Content-Length", "5000");
|
||||
const buf = new Buffer(enc.encode(longText));
|
||||
req.r = new BufReader(buf);
|
||||
const body = dec.decode(await req.body());
|
||||
assertEqual(body, longText);
|
||||
}
|
||||
});
|
||||
|
||||
test(async function requestBodyWithTransferEncoding() {
|
||||
{
|
||||
const shortText = "Hello";
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("transfer-encoding", "chunked");
|
||||
let chunksData = "";
|
||||
let chunkOffset = 0;
|
||||
const maxChunkSize = 70;
|
||||
while (chunkOffset < shortText.length) {
|
||||
const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
|
||||
chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
|
||||
chunkOffset,
|
||||
chunkSize
|
||||
)}\r\n`;
|
||||
chunkOffset += chunkSize;
|
||||
}
|
||||
chunksData += "0\r\n\r\n";
|
||||
const buf = new Buffer(enc.encode(chunksData));
|
||||
req.r = new BufReader(buf);
|
||||
const body = dec.decode(await req.body());
|
||||
assertEqual(body, shortText);
|
||||
}
|
||||
|
||||
// Larger than internal buf
|
||||
{
|
||||
const longText = "1234\n".repeat(1000);
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("transfer-encoding", "chunked");
|
||||
let chunksData = "";
|
||||
let chunkOffset = 0;
|
||||
const maxChunkSize = 70;
|
||||
while (chunkOffset < longText.length) {
|
||||
const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
|
||||
chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
|
||||
chunkOffset,
|
||||
chunkSize
|
||||
)}\r\n`;
|
||||
chunkOffset += chunkSize;
|
||||
}
|
||||
chunksData += "0\r\n\r\n";
|
||||
const buf = new Buffer(enc.encode(chunksData));
|
||||
req.r = new BufReader(buf);
|
||||
const body = dec.decode(await req.body());
|
||||
assertEqual(body, longText);
|
||||
}
|
||||
});
|
||||
|
||||
test(async function requestBodyStreamWithContentLength() {
|
||||
{
|
||||
const shortText = "Hello";
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("content-length", "" + shortText.length);
|
||||
const buf = new Buffer(enc.encode(shortText));
|
||||
req.r = new BufReader(buf);
|
||||
const it = await req.bodyStream();
|
||||
let offset = 0;
|
||||
for await (const chunk of it) {
|
||||
const s = dec.decode(chunk);
|
||||
assertEqual(shortText.substr(offset, s.length), s);
|
||||
offset += s.length;
|
||||
}
|
||||
}
|
||||
|
||||
// Larger than internal buf
|
||||
{
|
||||
const longText = "1234\n".repeat(1000);
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("Content-Length", "5000");
|
||||
const buf = new Buffer(enc.encode(longText));
|
||||
req.r = new BufReader(buf);
|
||||
const it = await req.bodyStream();
|
||||
let offset = 0;
|
||||
for await (const chunk of it) {
|
||||
const s = dec.decode(chunk);
|
||||
assertEqual(longText.substr(offset, s.length), s);
|
||||
offset += s.length;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test(async function requestBodyStreamWithTransferEncoding() {
|
||||
{
|
||||
const shortText = "Hello";
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("transfer-encoding", "chunked");
|
||||
let chunksData = "";
|
||||
let chunkOffset = 0;
|
||||
const maxChunkSize = 70;
|
||||
while (chunkOffset < shortText.length) {
|
||||
const chunkSize = Math.min(maxChunkSize, shortText.length - chunkOffset);
|
||||
chunksData += `${chunkSize.toString(16)}\r\n${shortText.substr(
|
||||
chunkOffset,
|
||||
chunkSize
|
||||
)}\r\n`;
|
||||
chunkOffset += chunkSize;
|
||||
}
|
||||
chunksData += "0\r\n\r\n";
|
||||
const buf = new Buffer(enc.encode(chunksData));
|
||||
req.r = new BufReader(buf);
|
||||
const it = await req.bodyStream();
|
||||
let offset = 0;
|
||||
for await (const chunk of it) {
|
||||
const s = dec.decode(chunk);
|
||||
assertEqual(shortText.substr(offset, s.length), s);
|
||||
offset += s.length;
|
||||
}
|
||||
}
|
||||
|
||||
// Larger than internal buf
|
||||
{
|
||||
const longText = "1234\n".repeat(1000);
|
||||
const req = new ServerRequest();
|
||||
req.headers = new Headers();
|
||||
req.headers.set("transfer-encoding", "chunked");
|
||||
let chunksData = "";
|
||||
let chunkOffset = 0;
|
||||
const maxChunkSize = 70;
|
||||
while (chunkOffset < longText.length) {
|
||||
const chunkSize = Math.min(maxChunkSize, longText.length - chunkOffset);
|
||||
chunksData += `${chunkSize.toString(16)}\r\n${longText.substr(
|
||||
chunkOffset,
|
||||
chunkSize
|
||||
)}\r\n`;
|
||||
chunkOffset += chunkSize;
|
||||
}
|
||||
chunksData += "0\r\n\r\n";
|
||||
const buf = new Buffer(enc.encode(chunksData));
|
||||
req.r = new BufReader(buf);
|
||||
const it = await req.bodyStream();
|
||||
let offset = 0;
|
||||
for await (const chunk of it) {
|
||||
const s = dec.decode(chunk);
|
||||
assertEqual(longText.substr(offset, s.length), s);
|
||||
offset += s.length;
|
||||
}
|
||||
}
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue