mirror of
https://github.com/denoland/deno.git
synced 2025-08-02 10:02:23 +00:00
perf(cli): use faster_hex (#22761)
`cli::util::checksum` was showing up on flame graphs because it was concatenating allocated strings. We can use `faster-hex` to improve it.
This commit is contained in:
parent
594d8397ad
commit
4791d16a8e
11 changed files with 32 additions and 24 deletions
19
Cargo.lock
generated
19
Cargo.lock
generated
|
@ -1064,12 +1064,12 @@ dependencies = [
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"eszip",
|
"eszip",
|
||||||
"fancy-regex",
|
"fancy-regex",
|
||||||
|
"faster-hex",
|
||||||
"flate2",
|
"flate2",
|
||||||
"fs3",
|
"fs3",
|
||||||
"fwdansi",
|
"fwdansi",
|
||||||
"glibc_version",
|
"glibc_version",
|
||||||
"glob",
|
"glob",
|
||||||
"hex",
|
|
||||||
"ignore",
|
"ignore",
|
||||||
"import_map",
|
"import_map",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
|
@ -1534,7 +1534,7 @@ dependencies = [
|
||||||
"denokv_proto",
|
"denokv_proto",
|
||||||
"denokv_remote",
|
"denokv_remote",
|
||||||
"denokv_sqlite",
|
"denokv_sqlite",
|
||||||
"hex",
|
"faster-hex",
|
||||||
"log",
|
"log",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"prost",
|
"prost",
|
||||||
|
@ -1650,8 +1650,8 @@ dependencies = [
|
||||||
"ecb",
|
"ecb",
|
||||||
"elliptic-curve",
|
"elliptic-curve",
|
||||||
"errno 0.2.8",
|
"errno 0.2.8",
|
||||||
|
"faster-hex",
|
||||||
"h2 0.3.22",
|
"h2 0.3.22",
|
||||||
"hex",
|
|
||||||
"hkdf",
|
"hkdf",
|
||||||
"http 0.2.11",
|
"http 0.2.11",
|
||||||
"idna 0.3.0",
|
"idna 0.3.0",
|
||||||
|
@ -2508,6 +2508,15 @@ dependencies = [
|
||||||
"regex",
|
"regex",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "faster-hex"
|
||||||
|
version = "0.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastrand"
|
name = "fastrand"
|
||||||
version = "2.0.1"
|
version = "2.0.1"
|
||||||
|
@ -7276,7 +7285,7 @@ dependencies = [
|
||||||
"codespan-reporting",
|
"codespan-reporting",
|
||||||
"log",
|
"log",
|
||||||
"naga",
|
"naga",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.12.1",
|
||||||
"profiling",
|
"profiling",
|
||||||
"raw-window-handle",
|
"raw-window-handle",
|
||||||
"ron",
|
"ron",
|
||||||
|
@ -7317,7 +7326,7 @@ dependencies = [
|
||||||
"naga",
|
"naga",
|
||||||
"objc",
|
"objc",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.12.1",
|
||||||
"profiling",
|
"profiling",
|
||||||
"range-alloc",
|
"range-alloc",
|
||||||
"raw-window-handle",
|
"raw-window-handle",
|
||||||
|
|
|
@ -103,6 +103,7 @@ dlopen2 = "0.6.1"
|
||||||
ecb = "=0.1.2"
|
ecb = "=0.1.2"
|
||||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||||
encoding_rs = "=0.8.33"
|
encoding_rs = "=0.8.33"
|
||||||
|
faster-hex = "0.9"
|
||||||
fastwebsockets = { version = "0.6", features = ["upgrade", "unstable-split"] }
|
fastwebsockets = { version = "0.6", features = ["upgrade", "unstable-split"] }
|
||||||
filetime = "0.2.16"
|
filetime = "0.2.16"
|
||||||
flate2 = { version = "1.0.26", default-features = false }
|
flate2 = { version = "1.0.26", default-features = false }
|
||||||
|
@ -110,7 +111,6 @@ fs3 = "0.5.0"
|
||||||
futures = "0.3.21"
|
futures = "0.3.21"
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
h2 = "0.4"
|
h2 = "0.4"
|
||||||
hex = "0.4"
|
|
||||||
http = "1.0"
|
http = "1.0"
|
||||||
http-body-util = "0.1"
|
http-body-util = "0.1"
|
||||||
http_v02 = { package = "http", version = "0.2.9" }
|
http_v02 = { package = "http", version = "0.2.9" }
|
||||||
|
|
|
@ -101,11 +101,11 @@ dprint-plugin-markdown = "=0.16.4"
|
||||||
dprint-plugin-typescript = "=0.89.3"
|
dprint-plugin-typescript = "=0.89.3"
|
||||||
env_logger = "=0.10.0"
|
env_logger = "=0.10.0"
|
||||||
fancy-regex = "=0.10.0"
|
fancy-regex = "=0.10.0"
|
||||||
|
faster-hex.workspace = true
|
||||||
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
|
# If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`.
|
||||||
flate2.workspace = true
|
flate2.workspace = true
|
||||||
fs3.workspace = true
|
fs3.workspace = true
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
hex.workspace = true
|
|
||||||
ignore = "0.4"
|
ignore = "0.4"
|
||||||
import_map = { version = "=0.19.0", features = ["ext"] }
|
import_map = { version = "=0.19.0", features = ["ext"] }
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
|
|
|
@ -61,7 +61,7 @@ fn verify_tarball_integrity(
|
||||||
let mut hash_ctx = Context::new(&ring::digest::SHA1_FOR_LEGACY_USE_ONLY);
|
let mut hash_ctx = Context::new(&ring::digest::SHA1_FOR_LEGACY_USE_ONLY);
|
||||||
hash_ctx.update(data);
|
hash_ctx.update(data);
|
||||||
let digest = hash_ctx.finish();
|
let digest = hash_ctx.finish();
|
||||||
let tarball_checksum = hex::encode(digest.as_ref());
|
let tarball_checksum = faster_hex::hex_string(digest.as_ref());
|
||||||
(tarball_checksum, hex)
|
(tarball_checksum, hex)
|
||||||
}
|
}
|
||||||
NpmPackageVersionDistInfoIntegrity::UnknownIntegrity(integrity) => {
|
NpmPackageVersionDistInfoIntegrity::UnknownIntegrity(integrity) => {
|
||||||
|
|
|
@ -688,7 +688,7 @@ async fn publish_package(
|
||||||
package.scope, package.package, package.version
|
package.scope, package.package, package.version
|
||||||
),
|
),
|
||||||
digest: provenance::SubjectDigest {
|
digest: provenance::SubjectDigest {
|
||||||
sha256: hex::encode(sha2::Sha256::digest(&meta_bytes)),
|
sha256: faster_hex::hex_string(&sha2::Sha256::digest(&meta_bytes)),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let bundle = provenance::generate_provenance(subject).await?;
|
let bundle = provenance::generate_provenance(subject).await?;
|
||||||
|
|
|
@ -622,12 +622,12 @@ async fn testify(
|
||||||
// Rekor "intoto" entry for the given DSSE envelope and signature.
|
// Rekor "intoto" entry for the given DSSE envelope and signature.
|
||||||
//
|
//
|
||||||
// Calculate the value for the payloadHash field into the Rekor entry
|
// Calculate the value for the payloadHash field into the Rekor entry
|
||||||
let payload_hash = hex::encode(sha2::Sha256::digest(
|
let payload_hash = faster_hex::hex_string(&sha2::Sha256::digest(
|
||||||
content.dsse_envelope.payload.as_bytes(),
|
content.dsse_envelope.payload.as_bytes(),
|
||||||
));
|
));
|
||||||
|
|
||||||
// Calculate the value for the hash field into the Rekor entry
|
// Calculate the value for the hash field into the Rekor entry
|
||||||
let envelope_hash = hex::encode({
|
let envelope_hash = faster_hex::hex_string(&{
|
||||||
let dsse = DsseEnvelope {
|
let dsse = DsseEnvelope {
|
||||||
payload: content.dsse_envelope.payload.clone(),
|
payload: content.dsse_envelope.payload.clone(),
|
||||||
payload_type: content.dsse_envelope.payload_type.clone(),
|
payload_type: content.dsse_envelope.payload_type.clone(),
|
||||||
|
|
|
@ -3,18 +3,13 @@
|
||||||
use ring::digest::Context;
|
use ring::digest::Context;
|
||||||
use ring::digest::SHA256;
|
use ring::digest::SHA256;
|
||||||
|
|
||||||
|
/// Generate a SHA256 checksum of a slice of byte-slice-like things.
|
||||||
pub fn gen(v: &[impl AsRef<[u8]>]) -> String {
|
pub fn gen(v: &[impl AsRef<[u8]>]) -> String {
|
||||||
let mut ctx = Context::new(&SHA256);
|
let mut ctx = Context::new(&SHA256);
|
||||||
for src in v {
|
for src in v {
|
||||||
ctx.update(src.as_ref());
|
ctx.update(src.as_ref());
|
||||||
}
|
}
|
||||||
let digest = ctx.finish();
|
faster_hex::hex_string(ctx.finish().as_ref())
|
||||||
let out: Vec<String> = digest
|
|
||||||
.as_ref()
|
|
||||||
.iter()
|
|
||||||
.map(|byte| format!("{byte:02x}"))
|
|
||||||
.collect();
|
|
||||||
out.join("")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -26,7 +26,7 @@ deno_unsync = "0.1.1"
|
||||||
denokv_proto.workspace = true
|
denokv_proto.workspace = true
|
||||||
denokv_remote.workspace = true
|
denokv_remote.workspace = true
|
||||||
denokv_sqlite.workspace = true
|
denokv_sqlite.workspace = true
|
||||||
hex.workspace = true
|
faster-hex.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
num-bigint.workspace = true
|
num-bigint.workspace = true
|
||||||
prost.workspace = true
|
prost.workspace = true
|
||||||
|
|
|
@ -12,6 +12,7 @@ use std::num::NonZeroU32;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use anyhow::bail;
|
||||||
use base64::prelude::BASE64_URL_SAFE;
|
use base64::prelude::BASE64_URL_SAFE;
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use chrono::DateTime;
|
use chrono::DateTime;
|
||||||
|
@ -234,7 +235,7 @@ impl TryFrom<KvEntry> for ToV8KvEntry {
|
||||||
.map(key_part_to_v8)
|
.map(key_part_to_v8)
|
||||||
.collect(),
|
.collect(),
|
||||||
value: entry.value.into(),
|
value: entry.value.into(),
|
||||||
versionstamp: hex::encode(entry.versionstamp).into(),
|
versionstamp: faster_hex::hex_string(&entry.versionstamp).into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -511,7 +512,10 @@ fn check_from_v8(value: V8KvCheck) -> Result<Check, AnyError> {
|
||||||
let versionstamp = match value.1 {
|
let versionstamp = match value.1 {
|
||||||
Some(data) => {
|
Some(data) => {
|
||||||
let mut out = [0u8; 10];
|
let mut out = [0u8; 10];
|
||||||
hex::decode_to_slice(data, &mut out)
|
if data.len() != out.len() * 2 {
|
||||||
|
bail!(type_error("invalid versionstamp"));
|
||||||
|
}
|
||||||
|
faster_hex::hex_decode(&data, &mut out)
|
||||||
.map_err(|_| type_error("invalid versionstamp"))?;
|
.map_err(|_| type_error("invalid versionstamp"))?;
|
||||||
Some(out)
|
Some(out)
|
||||||
}
|
}
|
||||||
|
@ -856,7 +860,7 @@ where
|
||||||
|
|
||||||
let result = db.atomic_write(atomic_write).await?;
|
let result = db.atomic_write(atomic_write).await?;
|
||||||
|
|
||||||
Ok(result.map(|res| hex::encode(res.versionstamp)))
|
Ok(result.map(|res| faster_hex::hex_string(&res.versionstamp)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// (prefix, start, end)
|
// (prefix, start, end)
|
||||||
|
|
|
@ -32,8 +32,8 @@ dsa = "0.6.1"
|
||||||
ecb.workspace = true
|
ecb.workspace = true
|
||||||
elliptic-curve.workspace = true
|
elliptic-curve.workspace = true
|
||||||
errno = "0.2.8"
|
errno = "0.2.8"
|
||||||
|
faster-hex.workspace = true
|
||||||
h2 = { version = "0.3.17", features = ["unstable"] }
|
h2 = { version = "0.3.17", features = ["unstable"] }
|
||||||
hex.workspace = true
|
|
||||||
hkdf.workspace = true
|
hkdf.workspace = true
|
||||||
http_v02.workspace = true
|
http_v02.workspace = true
|
||||||
idna = "0.3.0"
|
idna = "0.3.0"
|
||||||
|
|
|
@ -163,7 +163,7 @@ pub fn op_node_hash_digest_hex(
|
||||||
let context = Rc::try_unwrap(context)
|
let context = Rc::try_unwrap(context)
|
||||||
.map_err(|_| type_error("Hash context is already in use"))?;
|
.map_err(|_| type_error("Hash context is already in use"))?;
|
||||||
let digest = context.digest()?;
|
let digest = context.digest()?;
|
||||||
Ok(hex::encode(digest))
|
Ok(faster_hex::hex_string(&digest))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[op2(fast)]
|
#[op2(fast)]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue