mirror of
https://github.com/denoland/deno.git
synced 2025-09-26 12:19:12 +00:00
fix(ext/node): enable Buffer
pool for strings (#29592)
Part 1 towards enabling `parallel/test-buffer-pool-untransferable.js`
This commit is contained in:
parent
1e6aca57e8
commit
8b81644b59
6 changed files with 83 additions and 24 deletions
|
@ -550,7 +550,7 @@ Object.defineProperties(
|
|||
if (data instanceof Buffer) {
|
||||
data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
||||
}
|
||||
if (data.buffer.byteLength > 0) {
|
||||
if (data.byteLength > 0) {
|
||||
this._bodyWriter.ready.then(() => {
|
||||
if (this._bodyWriter.desiredSize > 0) {
|
||||
this._bodyWriter.write(data).then(() => {
|
||||
|
|
|
@ -207,9 +207,8 @@ function createBuffer(length) {
|
|||
'The value "' + length + '" is invalid for option "size"',
|
||||
);
|
||||
}
|
||||
const buf = new Uint8Array(length);
|
||||
ObjectSetPrototypeOf(buf, BufferPrototype);
|
||||
return buf;
|
||||
|
||||
return new FastBuffer(length);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -238,7 +237,24 @@ export function Buffer(arg, encodingOrOffset, length) {
|
|||
return _from(arg, encodingOrOffset, length);
|
||||
}
|
||||
|
||||
Buffer.poolSize = 8192;
|
||||
Buffer.poolSize = 8 * 1024;
|
||||
let poolSize, poolOffset, allocPool, allocBuffer;
|
||||
|
||||
function createPool() {
|
||||
poolSize = Buffer.poolSize;
|
||||
allocBuffer = new Uint8Array(poolSize);
|
||||
allocPool = TypedArrayPrototypeGetBuffer(allocBuffer);
|
||||
poolOffset = 0;
|
||||
}
|
||||
createPool();
|
||||
|
||||
function alignPool() {
|
||||
// Ensure aligned slices
|
||||
if (poolOffset & 0x7) {
|
||||
poolOffset |= 0x7;
|
||||
poolOffset++;
|
||||
}
|
||||
}
|
||||
|
||||
function _from(value, encodingOrOffset, length) {
|
||||
if (typeof value === "string") {
|
||||
|
@ -396,20 +412,36 @@ function fromString(string, encoding) {
|
|||
if (!BufferIsEncoding(encoding)) {
|
||||
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
|
||||
}
|
||||
const maxLength = Buffer.poolSize >>> 1;
|
||||
const length = byteLength(string, encoding) | 0;
|
||||
let buf = createBuffer(length);
|
||||
const actual = buf.write(string, encoding);
|
||||
if (actual !== length) {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
buf = buf.slice(0, actual);
|
||||
if (length >= maxLength) {
|
||||
let buf = createBuffer(length);
|
||||
const actual = buf.write(string, encoding);
|
||||
if (actual !== length) {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
buf = buf.slice(0, actual);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
return buf;
|
||||
|
||||
if (length > (poolSize - poolOffset)) {
|
||||
createPool();
|
||||
}
|
||||
const ops = getEncodingOps(encoding);
|
||||
let b = new FastBuffer(allocPool, poolOffset, length);
|
||||
const actual = ops.write(b, string, 0, length);
|
||||
if (actual !== length) {
|
||||
// byteLength() may overestimate the length, so we slice it down.
|
||||
b = new FastBuffer(allocPool, poolOffset, actual);
|
||||
}
|
||||
|
||||
poolOffset += actual;
|
||||
alignPool();
|
||||
return b;
|
||||
}
|
||||
|
||||
function fromArrayLike(obj) {
|
||||
const buf = new Uint8Array(obj);
|
||||
ObjectSetPrototypeOf(buf, BufferPrototype);
|
||||
return buf;
|
||||
return new FastBuffer(obj);
|
||||
}
|
||||
|
||||
function fromObject(obj) {
|
||||
|
@ -447,7 +479,7 @@ ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype);
|
|||
ObjectSetPrototypeOf(SlowBuffer, Uint8Array);
|
||||
|
||||
const BufferIsBuffer = Buffer.isBuffer = function isBuffer(b) {
|
||||
return b != null && b._isBuffer === true && b !== BufferPrototype;
|
||||
return ObjectPrototypeIsPrototypeOf(Buffer.prototype, b);
|
||||
};
|
||||
|
||||
const BufferCompare = Buffer.compare = function compare(a, b) {
|
||||
|
@ -1072,9 +1104,7 @@ function fromArrayBuffer(obj, byteOffset, length) {
|
|||
}
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array(obj, byteOffset, length);
|
||||
ObjectSetPrototypeOf(buffer, BufferPrototype);
|
||||
return buffer;
|
||||
return new FastBuffer(obj, byteOffset, length);
|
||||
}
|
||||
|
||||
function _base64Slice(buf, start, end) {
|
||||
|
|
|
@ -39,10 +39,15 @@ const {
|
|||
Symbol,
|
||||
MathMin,
|
||||
DataViewPrototypeGetBuffer,
|
||||
DataViewPrototypeGetByteLength,
|
||||
DataViewPrototypeGetByteOffset,
|
||||
ObjectPrototypeIsPrototypeOf,
|
||||
String,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
TypedArrayPrototypeGetByteLength,
|
||||
TypedArrayPrototypeGetByteOffset,
|
||||
StringPrototypeToLowerCase,
|
||||
Uint8Array,
|
||||
} = primordials;
|
||||
const { isTypedArray } = core;
|
||||
|
||||
|
@ -84,10 +89,19 @@ function normalizeBuffer(buf: Buffer) {
|
|||
if (isBufferType(buf)) {
|
||||
return buf;
|
||||
} else {
|
||||
const isTA = isTypedArray(buf);
|
||||
return Buffer.from(
|
||||
isTypedArray(buf)
|
||||
? TypedArrayPrototypeGetBuffer(buf)
|
||||
: DataViewPrototypeGetBuffer(buf),
|
||||
new Uint8Array(
|
||||
isTA
|
||||
? TypedArrayPrototypeGetBuffer(buf)
|
||||
: DataViewPrototypeGetBuffer(buf),
|
||||
isTA
|
||||
? TypedArrayPrototypeGetByteOffset(buf)
|
||||
: DataViewPrototypeGetByteOffset(buf),
|
||||
isTA
|
||||
? TypedArrayPrototypeGetByteLength(buf)
|
||||
: DataViewPrototypeGetByteLength(buf),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2025 the Deno authors. MIT license.
|
||||
import { Buffer } from "node:buffer";
|
||||
import { assertEquals, assertThrows } from "@std/assert";
|
||||
import { strictEqual } from "node:assert";
|
||||
|
||||
Deno.test({
|
||||
name: "[node/buffer] alloc fails if size is not a number",
|
||||
|
@ -651,3 +652,12 @@ Deno.test({
|
|||
assertEquals([...buf], [0x61, 0x62, 0x63, 0, 0, 0, 0, 0]);
|
||||
},
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "[node/buffer] Buffer.from pool",
|
||||
fn() {
|
||||
const a = Buffer.from("hello world");
|
||||
const b = Buffer.from("hello world");
|
||||
strictEqual(a.buffer, b.buffer);
|
||||
},
|
||||
});
|
||||
|
|
|
@ -1153,8 +1153,9 @@ Deno.test({
|
|||
assertEquals(stdout.toString(), expected);
|
||||
}
|
||||
{
|
||||
const b = Buffer.from(text);
|
||||
const { stdout } = spawnSync(Deno.execPath(), ["fmt", "-"], {
|
||||
input: new DataView(Buffer.from(text).buffer),
|
||||
input: new DataView(b.buffer, b.byteOffset, b.byteLength),
|
||||
});
|
||||
assertEquals(stdout.toString(), expected);
|
||||
}
|
||||
|
|
|
@ -133,7 +133,9 @@ Deno.test(
|
|||
|
||||
Deno.test("should work with dataview", () => {
|
||||
const buf = Buffer.from("hello world");
|
||||
const compressed = brotliCompressSync(new DataView(buf.buffer));
|
||||
const compressed = brotliCompressSync(
|
||||
new DataView(buf.buffer, buf.byteOffset, buf.byteLength),
|
||||
);
|
||||
const decompressed = brotliDecompressSync(compressed);
|
||||
assertEquals(decompressed.toString(), "hello world");
|
||||
});
|
||||
|
@ -164,7 +166,9 @@ Deno.test(
|
|||
"zlib compression with dataview",
|
||||
() => {
|
||||
const buf = Buffer.from("hello world");
|
||||
const compressed = gzipSync(new DataView(buf.buffer));
|
||||
const compressed = gzipSync(
|
||||
new DataView(buf.buffer, buf.byteOffset, buf.byteLength),
|
||||
);
|
||||
const decompressed = unzipSync(compressed);
|
||||
assertEquals(decompressed.toString(), "hello world");
|
||||
},
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue