Revert "refactor: merge deno_cache into deno_web (#31183)" (#31205)

This reverts commit 5eccca8717.

Turns out this extension requires `ext/fetch` to be merged in `ext/web`
first, so reverting for now to keep `main` green.
This commit is contained in:
Bartek Iwańczuk 2025-11-06 10:25:55 +01:00 committed by GitHub
parent 982658e8ec
commit 004b85703a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 670 additions and 630 deletions

40
Cargo.lock generated
View file

@ -1807,6 +1807,33 @@ dependencies = [
"serde",
]
[[package]]
name = "deno_cache"
version = "0.153.0"
dependencies = [
"async-stream",
"async-trait",
"base64 0.22.1",
"bytes",
"chrono",
"deno_core",
"deno_error",
"futures",
"http 1.1.0",
"http-body 1.0.0",
"http-body-util",
"hyper 1.6.0",
"hyper-util",
"log",
"rusqlite",
"serde",
"sha2",
"slab",
"thiserror 2.0.12",
"tokio",
"tokio-util",
]
[[package]]
name = "deno_cache_dir"
version = "0.25.0"
@ -2787,6 +2814,7 @@ dependencies = [
"color-print",
"deno_ast",
"deno_bundle_runtime",
"deno_cache",
"deno_canvas",
"deno_core",
"deno_cron",
@ -3049,11 +3077,9 @@ version = "0.222.0"
name = "deno_web"
version = "0.252.0"
dependencies = [
"async-stream",
"async-trait",
"base64-simd",
"bytes",
"chrono",
"deno_bench_util",
"deno_core",
"deno_error",
@ -3063,19 +3089,9 @@ dependencies = [
"encoding_rs",
"flate2",
"futures",
"http 1.1.0",
"http-body 1.0.0",
"http-body-util",
"hyper 1.6.0",
"hyper-util",
"log",
"rusqlite",
"serde",
"sha2",
"slab",
"thiserror 2.0.12",
"tokio",
"tokio-util",
"urlpattern",
"uuid",
]

View file

@ -9,6 +9,7 @@ members = [
"cli/rt",
"cli/snapshot",
"ext/bundle",
"ext/cache",
"ext/canvas",
"ext/cron",
"ext/crypto",
@ -86,6 +87,7 @@ denokv_sqlite = { default-features = false, version = "0.12.0" }
# exts
deno_bundle_runtime = { version = "0.8.0", path = "./ext/bundle" }
deno_cache = { version = "0.153.0", path = "./ext/cache" }
deno_canvas = { version = "0.90.0", path = "./ext/canvas" }
deno_cron = { version = "0.101.0", path = "./ext/cron" }
deno_crypto = { version = "0.235.0", path = "./ext/crypto" }

30
ext/cache/Cargo.toml vendored
View file

@ -2,10 +2,36 @@
[package]
name = "deno_cache"
version = "0.154.0"
version = "0.153.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "DEPRECATED: Use deno_web instead"
description = "Implementation of Cache API for Deno"
[lib]
path = "lib.rs"
[dependencies]
async-stream.workspace = true
async-trait.workspace = true
base64.workspace = true
bytes.workspace = true
chrono = { workspace = true, features = ["now"] }
deno_core.workspace = true
deno_error.workspace = true
futures.workspace = true
http.workspace = true
http-body.workspace = true
http-body-util.workspace = true
hyper.workspace = true
hyper-util.workspace = true
log.workspace = true
rusqlite.workspace = true
serde.workspace = true
sha2.workspace = true
slab.workspace = true
thiserror.workspace = true
tokio.workspace = true
tokio-util.workspace = true

24
ext/cache/README.md vendored
View file

@ -1,4 +1,24 @@
# deno_cache
This crate has been deprecated, use
[deno_web](https://crates.io/crates/deno_web) instead.
This crate implements the Cache API for Deno.
The following APIs are implemented:
- [`CacheStorage::open()`][cache_storage_open]
- [`CacheStorage::has()`][cache_storage_has]
- [`CacheStorage::delete()`][cache_storage_delete]
- [`Cache::match()`][cache_match]
- [`Cache::put()`][cache_put]
- [`Cache::delete()`][cache_delete]
Cache APIs don't support the [query options][query_options] yet.
Spec: https://w3c.github.io/ServiceWorker/#cache-interface
[query_options]: https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions
[cache_storage_open]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/open
[cache_storage_has]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/has
[cache_storage_delete]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/delete
[cache_match]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/match
[cache_put]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/put
[cache_delete]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/delete

546
ext/cache/lib.rs vendored
View file

@ -1 +1,547 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::path::PathBuf;
use std::pin::Pin;
use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use bytes::Bytes;
use deno_core::AsyncRefCell;
use deno_core::AsyncResult;
use deno_core::ByteString;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
use deno_core::op2;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_error::JsErrorBox;
use futures::Stream;
use tokio::io::AsyncRead;
use tokio::io::AsyncReadExt;
mod lsc_shard;
mod lscache;
mod sqlite;
pub use lsc_shard::CacheShard;
pub use lscache::LscBackend;
pub use sqlite::SqliteBackedCache;
use tokio_util::io::StreamReader;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CacheError {
#[class(type)]
#[error("CacheStorage is not available in this context")]
ContextUnsupported,
#[class(type)]
#[error("Cache name cannot be empty")]
EmptyName,
#[class(type)]
#[error("Cache is not available")]
NotAvailable,
#[class(type)]
#[error("Cache not found")]
NotFound,
#[class(type)]
#[error("Cache deletion is not supported")]
DeletionNotSupported,
#[class(type)]
#[error("Content-Encoding is not allowed in response headers")]
ContentEncodingNotAllowed,
#[class(generic)]
#[error(transparent)]
Sqlite(#[from] rusqlite::Error),
#[class(generic)]
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[class(inherit)]
#[error(transparent)]
Resource(#[from] deno_core::error::ResourceError),
#[class(inherit)]
#[error(transparent)]
Other(JsErrorBox),
#[class(inherit)]
#[error("{0}")]
Io(#[from] std::io::Error),
#[class(type)]
#[error(transparent)]
InvalidHeaderName(#[from] hyper::header::InvalidHeaderName),
#[class(type)]
#[error(transparent)]
InvalidHeaderValue(#[from] hyper::header::InvalidHeaderValue),
#[class(type)]
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[class(generic)]
#[error(transparent)]
ClientError(#[from] hyper_util::client::legacy::Error),
#[class(generic)]
#[error("Failed to create cache storage directory {}", .dir.display())]
CacheStorageDirectory {
dir: PathBuf,
#[source]
source: std::io::Error,
},
#[class(generic)]
#[error("cache {method} request failed: {status}")]
RequestFailed {
method: &'static str,
status: hyper::StatusCode,
},
}
#[derive(Clone)]
pub struct CreateCache(pub Arc<dyn Fn() -> Result<CacheImpl, CacheError>>);
deno_core::extension!(deno_cache,
deps = [ deno_webidl, deno_web, deno_fetch ],
ops = [
op_cache_storage_open,
op_cache_storage_has,
op_cache_storage_delete,
op_cache_put,
op_cache_match,
op_cache_delete,
],
esm = [ "01_cache.js" ],
options = {
maybe_create_cache: Option<CreateCache>,
},
state = |state, options| {
if let Some(create_cache) = options.maybe_create_cache {
state.put(create_cache);
}
},
);
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CachePutRequest {
pub cache_id: i64,
pub request_url: String,
pub request_headers: Vec<(ByteString, ByteString)>,
pub response_headers: Vec<(ByteString, ByteString)>,
pub response_status: u16,
pub response_status_text: String,
pub response_rid: Option<ResourceId>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchRequest {
pub cache_id: i64,
pub request_url: String,
pub request_headers: Vec<(ByteString, ByteString)>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchResponse(CacheMatchResponseMeta, Option<ResourceId>);
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchResponseMeta {
pub response_status: u16,
pub response_status_text: String,
pub request_headers: Vec<(ByteString, ByteString)>,
pub response_headers: Vec<(ByteString, ByteString)>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct CacheDeleteRequest {
pub cache_id: i64,
pub request_url: String,
}
#[async_trait(?Send)]
pub trait Cache: Clone + 'static {
type CacheMatchResourceType: Resource;
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError>;
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError>;
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError>;
/// Put a resource into the cache.
async fn put(
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), CacheError>;
async fn r#match(
&self,
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>,
CacheError,
>;
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, CacheError>;
}
#[derive(Clone)]
pub enum CacheImpl {
Sqlite(SqliteBackedCache),
Lsc(LscBackend),
}
#[async_trait(?Send)]
impl Cache for CacheImpl {
type CacheMatchResourceType = CacheResponseResource;
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_open(cache_name).await,
Self::Lsc(cache) => cache.storage_open(cache_name).await,
}
}
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_has(cache_name).await,
Self::Lsc(cache) => cache.storage_has(cache_name).await,
}
}
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_delete(cache_name).await,
Self::Lsc(cache) => cache.storage_delete(cache_name).await,
}
}
async fn put(
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), CacheError> {
match self {
Self::Sqlite(cache) => cache.put(request_response, resource).await,
Self::Lsc(cache) => cache.put(request_response, resource).await,
}
}
async fn r#match(
&self,
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>,
CacheError,
> {
match self {
Self::Sqlite(cache) => cache.r#match(request).await,
Self::Lsc(cache) => cache.r#match(request).await,
}
}
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.delete(request).await,
Self::Lsc(cache) => cache.delete(request).await,
}
}
}
pub enum CacheResponseResource {
Sqlite(AsyncRefCell<tokio::fs::File>),
Lsc(AsyncRefCell<Pin<Box<dyn AsyncRead>>>),
}
impl CacheResponseResource {
fn sqlite(file: tokio::fs::File) -> Self {
Self::Sqlite(AsyncRefCell::new(file))
}
fn lsc(
body: impl Stream<Item = Result<Bytes, std::io::Error>> + 'static,
) -> Self {
Self::Lsc(AsyncRefCell::new(Box::pin(StreamReader::new(body))))
}
async fn read(
self: Rc<Self>,
data: &mut [u8],
) -> Result<usize, std::io::Error> {
let nread = match &*self {
CacheResponseResource::Sqlite(_) => {
let resource = deno_core::RcRef::map(&self, |r| match r {
Self::Sqlite(r) => r,
_ => unreachable!(),
});
let mut file = resource.borrow_mut().await;
file.read(data).await?
}
CacheResponseResource::Lsc(_) => {
let resource = deno_core::RcRef::map(&self, |r| match r {
Self::Lsc(r) => r,
_ => unreachable!(),
});
let mut file = resource.borrow_mut().await;
file.read(data).await?
}
};
Ok(nread)
}
}
impl Resource for CacheResponseResource {
deno_core::impl_readable_byob!();
fn name(&self) -> Cow<'_, str> {
"CacheResponseResource".into()
}
}
#[op2(async)]
#[number]
pub async fn op_cache_storage_open(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<i64, CacheError> {
let cache = get_cache(&state)?;
cache.storage_open(cache_name).await
}
#[op2(async)]
pub async fn op_cache_storage_has(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.storage_has(cache_name).await
}
#[op2(async)]
pub async fn op_cache_storage_delete(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.storage_delete(cache_name).await
}
#[op2(async)]
pub async fn op_cache_put(
state: Rc<RefCell<OpState>>,
#[serde] request_response: CachePutRequest,
) -> Result<(), CacheError> {
let cache = get_cache(&state)?;
let resource = match request_response.response_rid {
Some(rid) => Some(
state
.borrow_mut()
.resource_table
.take_any(rid)
.map_err(CacheError::Resource)?,
),
None => None,
};
cache.put(request_response, resource).await
}
#[op2(async)]
#[serde]
pub async fn op_cache_match(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheMatchRequest,
) -> Result<Option<CacheMatchResponse>, CacheError> {
let cache = get_cache(&state)?;
match cache.r#match(request).await? {
Some((meta, None)) => Ok(Some(CacheMatchResponse(meta, None))),
Some((meta, Some(resource))) => {
let rid = state.borrow_mut().resource_table.add(resource);
Ok(Some(CacheMatchResponse(meta, Some(rid))))
}
None => Ok(None),
}
}
#[op2(async)]
pub async fn op_cache_delete(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheDeleteRequest,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.delete(request).await
}
pub fn get_cache(
state: &Rc<RefCell<OpState>>,
) -> Result<CacheImpl, CacheError> {
let mut state = state.borrow_mut();
if let Some(cache) = state.try_borrow::<CacheImpl>() {
Ok(cache.clone())
} else if let Some(create_cache) = state.try_borrow::<CreateCache>() {
let cache = create_cache.0()?;
state.put(cache);
Ok(state.borrow::<CacheImpl>().clone())
} else {
Err(CacheError::ContextUnsupported)
}
}
/// Check if headers, mentioned in the vary header, of query request
/// and cached request are equal.
pub fn vary_header_matches(
vary_header: &ByteString,
query_request_headers: &[(ByteString, ByteString)],
cached_request_headers: &[(ByteString, ByteString)],
) -> bool {
let vary_header = match std::str::from_utf8(vary_header) {
Ok(vary_header) => vary_header,
Err(_) => return false,
};
let headers = get_headers_from_vary_header(vary_header);
for header in headers {
let query_header = get_header(&header, query_request_headers);
let cached_header = get_header(&header, cached_request_headers);
if query_header != cached_header {
return false;
}
}
true
}
#[test]
fn test_vary_header_matches() {
let vary_header = ByteString::from("accept-encoding");
let query_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
let cached_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
assert!(vary_header_matches(
&vary_header,
&query_request_headers,
&cached_request_headers
));
let vary_header = ByteString::from("accept-encoding");
let query_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
let cached_request_headers =
vec![(ByteString::from("accept-encoding"), ByteString::from("br"))];
assert!(!vary_header_matches(
&vary_header,
&query_request_headers,
&cached_request_headers
));
}
/// Get headers from the vary header.
pub fn get_headers_from_vary_header(vary_header: &str) -> Vec<String> {
vary_header
.split(',')
.map(|s| s.trim().to_lowercase())
.collect()
}
#[test]
fn test_get_headers_from_vary_header() {
let headers = get_headers_from_vary_header("accept-encoding");
assert_eq!(headers, vec!["accept-encoding"]);
let headers = get_headers_from_vary_header("accept-encoding, user-agent");
assert_eq!(headers, vec!["accept-encoding", "user-agent"]);
}
/// Get value for the header with the given name.
pub fn get_header(
name: &str,
headers: &[(ByteString, ByteString)],
) -> Option<ByteString> {
headers
.iter()
.find(|(k, _)| {
if let Ok(k) = std::str::from_utf8(k) {
k.eq_ignore_ascii_case(name)
} else {
false
}
})
.map(|(_, v)| v.to_owned())
}
#[test]
fn test_get_header() {
let headers = vec![
(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
),
(
ByteString::from("content-type"),
ByteString::from("application/json"),
),
(
ByteString::from("vary"),
ByteString::from("accept-encoding"),
),
];
let value = get_header("accept-encoding", &headers);
assert_eq!(value, Some(ByteString::from("gzip")));
let value = get_header("content-type", &headers);
assert_eq!(value, Some(ByteString::from("application/json")));
let value = get_header("vary", &headers);
assert_eq!(value, Some(ByteString::from("accept-encoding")));
}
/// Serialize headers into bytes.
pub fn serialize_headers(headers: &[(ByteString, ByteString)]) -> Vec<u8> {
let mut serialized_headers = Vec::new();
for (name, value) in headers {
serialized_headers.extend_from_slice(name);
serialized_headers.extend_from_slice(b"\r\n");
serialized_headers.extend_from_slice(value);
serialized_headers.extend_from_slice(b"\r\n");
}
serialized_headers
}
/// Deserialize bytes into headers.
pub fn deserialize_headers(
serialized_headers: &[u8],
) -> Vec<(ByteString, ByteString)> {
let mut headers = Vec::new();
let mut piece = None;
let mut start = 0;
for (i, byte) in serialized_headers.iter().enumerate() {
if byte == &b'\r' && serialized_headers.get(i + 1) == Some(&b'\n') {
if piece.is_none() {
piece = Some(start..i);
} else {
let name = piece.unwrap();
let value = start..i;
headers.push((
ByteString::from(&serialized_headers[name]),
ByteString::from(&serialized_headers[value]),
));
piece = None;
}
start = i + 2;
}
}
assert!(piece.is_none());
assert_eq!(start, serialized_headers.len());
headers
}

View file

@ -18,7 +18,7 @@ use hyper_util::client::legacy::Client;
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::rt::tokio::TokioExecutor;
use super::CacheError;
use crate::CacheError;
type ClientBody =
Either<UnsyncBoxBody<Bytes, CacheError>, UnsyncBoxBody<Bytes, Infallible>>;

View file

@ -4,6 +4,7 @@ use std::cell::RefCell;
use std::rc::Rc;
use async_stream::try_stream;
use base64::Engine;
use bytes::Bytes;
use deno_core::BufMutView;
use deno_core::ByteString;
@ -18,15 +19,15 @@ use http::header::VARY;
use http_body_util::combinators::UnsyncBoxBody;
use slab::Slab;
use super::CacheDeleteRequest;
use super::CacheError;
use super::CacheMatchRequest;
use super::CacheMatchResponseMeta;
use super::CachePutRequest;
use super::CacheResponseResource;
use super::get_header;
use super::get_headers_from_vary_header;
use super::lsc_shard::CacheShard;
use crate::CacheDeleteRequest;
use crate::CacheError;
use crate::CacheMatchRequest;
use crate::CacheMatchResponseMeta;
use crate::CachePutRequest;
use crate::CacheResponseResource;
use crate::get_header;
use crate::get_headers_from_vary_header;
use crate::lsc_shard::CacheShard;
const REQHDR_PREFIX: &str = "x-lsc-meta-reqhdr-";
@ -325,7 +326,7 @@ fn vary_header_matches(
fn build_cache_object_key(cache_name: &[u8], request_url: &[u8]) -> String {
format!(
"v1/{}/{}",
base64_simd::URL_SAFE_NO_PAD.encode_to_string(cache_name),
base64_simd::URL_SAFE_NO_PAD.encode_to_string(request_url),
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(cache_name),
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(request_url),
)
}

View file

@ -19,16 +19,16 @@ use rusqlite::params;
use tokio::io::AsyncWrite;
use tokio::io::AsyncWriteExt;
use super::CacheDeleteRequest;
use super::CacheError;
use super::CacheMatchRequest;
use super::CacheMatchResponseMeta;
use super::CachePutRequest;
use super::CacheResponseResource;
use super::deserialize_headers;
use super::get_header;
use super::serialize_headers;
use super::vary_header_matches;
use crate::CacheDeleteRequest;
use crate::CacheError;
use crate::CacheMatchRequest;
use crate::CacheMatchResponseMeta;
use crate::CachePutRequest;
use crate::CacheResponseResource;
use crate::deserialize_headers;
use crate::get_header;
use crate::serialize_headers;
use crate::vary_header_matches;
#[derive(Clone)]
pub struct SqliteBackedCache {

View file

@ -14,11 +14,9 @@ description = "Collection of Web APIs"
path = "lib.rs"
[dependencies]
async-stream.workspace = true
async-trait.workspace = true
base64-simd.workspace = true
bytes.workspace = true
chrono = { workspace = true, features = ["now"] }
deno_core.workspace = true
deno_error.workspace = true
deno_features.workspace = true
@ -26,19 +24,9 @@ deno_permissions.workspace = true
encoding_rs.workspace = true
flate2 = { workspace = true, features = ["default"] }
futures.workspace = true
http.workspace = true
http-body.workspace = true
http-body-util.workspace = true
hyper.workspace = true
hyper-util.workspace = true
log.workspace = true
rusqlite.workspace = true
serde.workspace = true
sha2.workspace = true
slab.workspace = true
thiserror.workspace = true
tokio.workspace = true
tokio-util.workspace = true
urlpattern.workspace = true
uuid = { workspace = true, features = ["serde"] }

View file

@ -36,7 +36,6 @@ fn setup() -> Vec<Extension> {
deno_web::deno_web::init::<Permissions, deno_web::InMemoryBroadcastChannel>(
Default::default(),
None,
None,
Default::default(),
),
bench_setup::init(),

View file

@ -35,7 +35,6 @@ fn setup() -> Vec<Extension> {
deno_web::deno_web::init::<Permissions, deno_web::InMemoryBroadcastChannel>(
Default::default(),
None,
None,
Default::default(),
),
bench_setup::init(),

View file

@ -32,7 +32,6 @@ fn setup() -> Vec<Extension> {
deno_web::deno_web::init::<Permissions, deno_web::InMemoryBroadcastChannel>(
Default::default(),
None,
None,
Default::default(),
),
bench_setup::init(),

View file

@ -1,24 +0,0 @@
# deno_cache
This crate implements the Cache API for Deno.
The following APIs are implemented:
- [`CacheStorage::open()`][cache_storage_open]
- [`CacheStorage::has()`][cache_storage_has]
- [`CacheStorage::delete()`][cache_storage_delete]
- [`Cache::match()`][cache_match]
- [`Cache::put()`][cache_put]
- [`Cache::delete()`][cache_delete]
Cache APIs don't support the [query options][query_options] yet.
Spec: https://w3c.github.io/ServiceWorker/#cache-interface
[query_options]: https://w3c.github.io/ServiceWorker/#dictdef-cachequeryoptions
[cache_storage_open]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/open
[cache_storage_has]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/has
[cache_storage_delete]: https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage/delete
[cache_match]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/match
[cache_put]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/put
[cache_delete]: https://developer.mozilla.org/en-US/docs/Web/API/Cache/delete

526
ext/web/cache/mod.rs vendored
View file

@ -1,526 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::cell::RefCell;
use std::path::PathBuf;
use std::pin::Pin;
use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use bytes::Bytes;
use deno_core::AsyncRefCell;
use deno_core::AsyncResult;
use deno_core::ByteString;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
use deno_core::op2;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_error::JsErrorBox;
use futures::Stream;
use tokio::io::AsyncRead;
use tokio::io::AsyncReadExt;
mod lsc_shard;
mod lscache;
mod sqlite;
pub use lsc_shard::CacheShard;
pub use lscache::LscBackend;
pub use sqlite::SqliteBackedCache;
use tokio_util::io::StreamReader;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CacheError {
#[class(type)]
#[error("CacheStorage is not available in this context")]
ContextUnsupported,
#[class(type)]
#[error("Cache name cannot be empty")]
EmptyName,
#[class(type)]
#[error("Cache is not available")]
NotAvailable,
#[class(type)]
#[error("Cache not found")]
NotFound,
#[class(type)]
#[error("Cache deletion is not supported")]
DeletionNotSupported,
#[class(type)]
#[error("Content-Encoding is not allowed in response headers")]
ContentEncodingNotAllowed,
#[class(generic)]
#[error(transparent)]
Sqlite(#[from] rusqlite::Error),
#[class(generic)]
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[class(inherit)]
#[error(transparent)]
Resource(#[from] deno_core::error::ResourceError),
#[class(inherit)]
#[error(transparent)]
Other(JsErrorBox),
#[class(inherit)]
#[error("{0}")]
Io(#[from] std::io::Error),
#[class(type)]
#[error(transparent)]
InvalidHeaderName(#[from] hyper::header::InvalidHeaderName),
#[class(type)]
#[error(transparent)]
InvalidHeaderValue(#[from] hyper::header::InvalidHeaderValue),
#[class(type)]
#[error(transparent)]
Hyper(#[from] hyper::Error),
#[class(generic)]
#[error(transparent)]
ClientError(#[from] hyper_util::client::legacy::Error),
#[class(generic)]
#[error("Failed to create cache storage directory {}", .dir.display())]
CacheStorageDirectory {
dir: PathBuf,
#[source]
source: std::io::Error,
},
#[class(generic)]
#[error("cache {method} request failed: {status}")]
RequestFailed {
method: &'static str,
status: hyper::StatusCode,
},
}
#[derive(Clone)]
pub struct CreateCache(pub Arc<dyn Fn() -> Result<CacheImpl, CacheError>>);
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CachePutRequest {
pub cache_id: i64,
pub request_url: String,
pub request_headers: Vec<(ByteString, ByteString)>,
pub response_headers: Vec<(ByteString, ByteString)>,
pub response_status: u16,
pub response_status_text: String,
pub response_rid: Option<ResourceId>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchRequest {
pub cache_id: i64,
pub request_url: String,
pub request_headers: Vec<(ByteString, ByteString)>,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchResponse(CacheMatchResponseMeta, Option<ResourceId>);
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CacheMatchResponseMeta {
pub response_status: u16,
pub response_status_text: String,
pub request_headers: Vec<(ByteString, ByteString)>,
pub response_headers: Vec<(ByteString, ByteString)>,
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct CacheDeleteRequest {
pub cache_id: i64,
pub request_url: String,
}
#[async_trait(?Send)]
pub trait Cache: Clone + 'static {
type CacheMatchResourceType: Resource;
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError>;
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError>;
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError>;
/// Put a resource into the cache.
async fn put(
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), CacheError>;
async fn r#match(
&self,
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>,
CacheError,
>;
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, CacheError>;
}
#[derive(Clone)]
pub enum CacheImpl {
Sqlite(SqliteBackedCache),
Lsc(LscBackend),
}
#[async_trait(?Send)]
impl Cache for CacheImpl {
type CacheMatchResourceType = CacheResponseResource;
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_open(cache_name).await,
Self::Lsc(cache) => cache.storage_open(cache_name).await,
}
}
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_has(cache_name).await,
Self::Lsc(cache) => cache.storage_has(cache_name).await,
}
}
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.storage_delete(cache_name).await,
Self::Lsc(cache) => cache.storage_delete(cache_name).await,
}
}
async fn put(
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), CacheError> {
match self {
Self::Sqlite(cache) => cache.put(request_response, resource).await,
Self::Lsc(cache) => cache.put(request_response, resource).await,
}
}
async fn r#match(
&self,
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>,
CacheError,
> {
match self {
Self::Sqlite(cache) => cache.r#match(request).await,
Self::Lsc(cache) => cache.r#match(request).await,
}
}
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, CacheError> {
match self {
Self::Sqlite(cache) => cache.delete(request).await,
Self::Lsc(cache) => cache.delete(request).await,
}
}
}
pub enum CacheResponseResource {
Sqlite(AsyncRefCell<tokio::fs::File>),
Lsc(AsyncRefCell<Pin<Box<dyn AsyncRead>>>),
}
impl CacheResponseResource {
fn sqlite(file: tokio::fs::File) -> Self {
Self::Sqlite(AsyncRefCell::new(file))
}
fn lsc(
body: impl Stream<Item = Result<Bytes, std::io::Error>> + 'static,
) -> Self {
Self::Lsc(AsyncRefCell::new(Box::pin(StreamReader::new(body))))
}
async fn read(
self: Rc<Self>,
data: &mut [u8],
) -> Result<usize, std::io::Error> {
let nread = match &*self {
CacheResponseResource::Sqlite(_) => {
let resource = deno_core::RcRef::map(&self, |r| match r {
Self::Sqlite(r) => r,
_ => unreachable!(),
});
let mut file = resource.borrow_mut().await;
file.read(data).await?
}
CacheResponseResource::Lsc(_) => {
let resource = deno_core::RcRef::map(&self, |r| match r {
Self::Lsc(r) => r,
_ => unreachable!(),
});
let mut file = resource.borrow_mut().await;
file.read(data).await?
}
};
Ok(nread)
}
}
impl Resource for CacheResponseResource {
deno_core::impl_readable_byob!();
fn name(&self) -> Cow<'_, str> {
"CacheResponseResource".into()
}
}
#[op2(async)]
#[number]
pub async fn op_cache_storage_open(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<i64, CacheError> {
let cache = get_cache(&state)?;
cache.storage_open(cache_name).await
}
#[op2(async)]
pub async fn op_cache_storage_has(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.storage_has(cache_name).await
}
#[op2(async)]
pub async fn op_cache_storage_delete(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.storage_delete(cache_name).await
}
#[op2(async)]
pub async fn op_cache_put(
state: Rc<RefCell<OpState>>,
#[serde] request_response: CachePutRequest,
) -> Result<(), CacheError> {
let cache = get_cache(&state)?;
let resource = match request_response.response_rid {
Some(rid) => Some(
state
.borrow_mut()
.resource_table
.take_any(rid)
.map_err(CacheError::Resource)?,
),
None => None,
};
cache.put(request_response, resource).await
}
#[op2(async)]
#[serde]
pub async fn op_cache_match(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheMatchRequest,
) -> Result<Option<CacheMatchResponse>, CacheError> {
let cache = get_cache(&state)?;
match cache.r#match(request).await? {
Some((meta, None)) => Ok(Some(CacheMatchResponse(meta, None))),
Some((meta, Some(resource))) => {
let rid = state.borrow_mut().resource_table.add(resource);
Ok(Some(CacheMatchResponse(meta, Some(rid))))
}
None => Ok(None),
}
}
#[op2(async)]
pub async fn op_cache_delete(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheDeleteRequest,
) -> Result<bool, CacheError> {
let cache = get_cache(&state)?;
cache.delete(request).await
}
pub fn get_cache(
state: &Rc<RefCell<OpState>>,
) -> Result<CacheImpl, CacheError> {
let mut state = state.borrow_mut();
if let Some(cache) = state.try_borrow::<CacheImpl>() {
Ok(cache.clone())
} else if let Some(create_cache) = state.try_borrow::<CreateCache>() {
let cache = create_cache.0()?;
state.put(cache);
Ok(state.borrow::<CacheImpl>().clone())
} else {
Err(CacheError::ContextUnsupported)
}
}
/// Check if headers, mentioned in the vary header, of query request
/// and cached request are equal.
pub fn vary_header_matches(
vary_header: &ByteString,
query_request_headers: &[(ByteString, ByteString)],
cached_request_headers: &[(ByteString, ByteString)],
) -> bool {
let vary_header = match std::str::from_utf8(vary_header) {
Ok(vary_header) => vary_header,
Err(_) => return false,
};
let headers = get_headers_from_vary_header(vary_header);
for header in headers {
let query_header = get_header(&header, query_request_headers);
let cached_header = get_header(&header, cached_request_headers);
if query_header != cached_header {
return false;
}
}
true
}
#[test]
fn test_vary_header_matches() {
let vary_header = ByteString::from("accept-encoding");
let query_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
let cached_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
assert!(vary_header_matches(
&vary_header,
&query_request_headers,
&cached_request_headers
));
let vary_header = ByteString::from("accept-encoding");
let query_request_headers = vec![(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
)];
let cached_request_headers =
vec![(ByteString::from("accept-encoding"), ByteString::from("br"))];
assert!(!vary_header_matches(
&vary_header,
&query_request_headers,
&cached_request_headers
));
}
/// Get headers from the vary header.
pub fn get_headers_from_vary_header(vary_header: &str) -> Vec<String> {
vary_header
.split(',')
.map(|s| s.trim().to_lowercase())
.collect()
}
#[test]
fn test_get_headers_from_vary_header() {
let headers = get_headers_from_vary_header("accept-encoding");
assert_eq!(headers, vec!["accept-encoding"]);
let headers = get_headers_from_vary_header("accept-encoding, user-agent");
assert_eq!(headers, vec!["accept-encoding", "user-agent"]);
}
/// Get value for the header with the given name.
pub fn get_header(
name: &str,
headers: &[(ByteString, ByteString)],
) -> Option<ByteString> {
headers
.iter()
.find(|(k, _)| {
if let Ok(k) = std::str::from_utf8(k) {
k.eq_ignore_ascii_case(name)
} else {
false
}
})
.map(|(_, v)| v.to_owned())
}
#[test]
fn test_get_header() {
let headers = vec![
(
ByteString::from("accept-encoding"),
ByteString::from("gzip"),
),
(
ByteString::from("content-type"),
ByteString::from("application/json"),
),
(
ByteString::from("vary"),
ByteString::from("accept-encoding"),
),
];
let value = get_header("accept-encoding", &headers);
assert_eq!(value, Some(ByteString::from("gzip")));
let value = get_header("content-type", &headers);
assert_eq!(value, Some(ByteString::from("application/json")));
let value = get_header("vary", &headers);
assert_eq!(value, Some(ByteString::from("accept-encoding")));
}
/// Serialize headers into bytes.
pub fn serialize_headers(headers: &[(ByteString, ByteString)]) -> Vec<u8> {
let mut serialized_headers = Vec::new();
for (name, value) in headers {
serialized_headers.extend_from_slice(name);
serialized_headers.extend_from_slice(b"\r\n");
serialized_headers.extend_from_slice(value);
serialized_headers.extend_from_slice(b"\r\n");
}
serialized_headers
}
/// Deserialize bytes into headers.
pub fn deserialize_headers(
serialized_headers: &[u8],
) -> Vec<(ByteString, ByteString)> {
let mut headers = Vec::new();
let mut piece = None;
let mut start = 0;
for (i, byte) in serialized_headers.iter().enumerate() {
if byte == &b'\r' && serialized_headers.get(i + 1) == Some(&b'\n') {
if piece.is_none() {
piece = Some(start..i);
} else {
let name = piece.unwrap();
let value = start..i;
headers.push((
ByteString::from(&serialized_headers[name]),
ByteString::from(&serialized_headers[value]),
));
piece = None;
}
start = i + 2;
}
}
assert!(piece.is_none());
assert_eq!(start, serialized_headers.len());
headers
}

View file

@ -1,8 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod blob;
mod broadcast_channel;
pub mod cache;
mod compression;
mod console;
mod message_port;
@ -43,7 +43,6 @@ use crate::blob::op_blob_revoke_object_url;
use crate::blob::op_blob_slice_part;
pub use crate::broadcast_channel::BroadcastChannel;
pub use crate::broadcast_channel::InMemoryBroadcastChannel;
pub use crate::cache::CreateCache;
pub use crate::message_port::JsMessageData;
pub use crate::message_port::MessagePort;
pub use crate::message_port::Transferable;
@ -99,12 +98,6 @@ deno_core::extension!(deno_web,
stream_resource::op_readable_stream_resource_write_sync,
stream_resource::op_readable_stream_resource_close,
stream_resource::op_readable_stream_resource_await_close,
cache::op_cache_storage_open,
cache::op_cache_storage_has,
cache::op_cache_storage_delete,
cache::op_cache_put,
cache::op_cache_match,
cache::op_cache_delete,
url::op_url_reparse,
url::op_url_parse,
url::op_url_get_serialization,
@ -138,7 +131,6 @@ deno_core::extension!(deno_web,
"14_compression.js",
"15_performance.js",
"16_image_data.js",
"17_cache.js",
"00_url.js",
"01_urlpattern.js",
"01_console.js",
@ -148,7 +140,6 @@ deno_core::extension!(deno_web,
options = {
blob_store: Arc<BlobStore>,
maybe_location: Option<Url>,
maybe_create_cache: Option<CreateCache>,
bc: BC,
},
state = |state, options| {
@ -156,9 +147,6 @@ deno_core::extension!(deno_web,
if let Some(location) = options.maybe_location {
state.put(Location(location));
}
if let Some(create_cache) = options.maybe_create_cache {
state.put(create_cache);
}
state.put(StartTime::default());
state.put(options.bc);
}

View file

@ -40,6 +40,7 @@ path = "lib.rs"
[dependencies]
deno_ast = { workspace = true, optional = true }
deno_bundle_runtime.workspace = true
deno_cache.workspace = true
deno_canvas.workspace = true
deno_core.workspace = true
deno_cron.workspace = true

View file

@ -7,7 +7,7 @@ import * as timers from "ext:deno_web/02_timers.js";
import * as base64 from "ext:deno_web/05_base64.js";
import * as encoding from "ext:deno_web/08_text_encoding.js";
import * as console from "ext:deno_web/01_console.js";
import * as caches from "ext:deno_web/17_cache.js";
import * as caches from "ext:deno_cache/01_cache.js";
import * as compression from "ext:deno_web/14_compression.js";
import * as worker from "ext:runtime/11_workers.js";
import * as performance from "ext:deno_web/15_performance.js";

View file

@ -1,5 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub use deno_cache;
pub use deno_canvas;
pub use deno_core;
pub use deno_cron;

View file

@ -10,6 +10,7 @@ extension!(runtime,
deno_tls,
deno_web,
deno_fetch,
deno_cache,
deno_websocket,
deno_webstorage,
deno_crypto,

View file

@ -34,6 +34,7 @@ pub fn create_runtime_snapshot(
deno_webgpu::deno_webgpu::lazy_init(),
deno_canvas::deno_canvas::lazy_init(),
deno_fetch::deno_fetch::lazy_init::<Permissions>(),
deno_cache::deno_cache::lazy_init(),
deno_websocket::deno_websocket::lazy_init::<Permissions>(),
deno_webstorage::deno_webstorage::lazy_init(),
deno_crypto::deno_crypto::lazy_init(),

View file

@ -220,12 +220,12 @@ pub fn get_extensions_in_snapshot() -> Vec<Extension> {
deno_web::deno_web::init::<Permissions, deno_web::InMemoryBroadcastChannel>(
Default::default(),
Default::default(),
None,
deno_web::InMemoryBroadcastChannel::default(),
),
deno_webgpu::deno_webgpu::init(),
deno_canvas::deno_canvas::init(),
deno_fetch::deno_fetch::init::<Permissions>(Default::default()),
deno_cache::deno_cache::init(None),
deno_websocket::deno_websocket::init::<Permissions>(),
deno_webstorage::deno_webstorage::init(None),
deno_crypto::deno_crypto::init(None),

View file

@ -11,6 +11,9 @@ use std::sync::atomic::Ordering;
use std::task::Context;
use std::task::Poll;
use deno_cache::CacheImpl;
use deno_cache::CreateCache;
use deno_cache::SqliteBackedCache;
use deno_core::CancelHandle;
use deno_core::CompiledWasmModuleStore;
use deno_core::DetachedBuffer;
@ -52,9 +55,6 @@ use deno_web::InMemoryBroadcastChannel;
use deno_web::JsMessageData;
use deno_web::MessagePort;
use deno_web::Transferable;
use deno_web::cache::CacheImpl;
use deno_web::cache::CreateCache;
use deno_web::cache::SqliteBackedCache;
use deno_web::create_entangled_message_port;
use deno_web::serialize_transferables;
use log::debug;
@ -487,12 +487,12 @@ impl WebWorker {
if elems.len() == 2 {
let endpoint = elems[0];
let token = elems[1];
use deno_web::cache::CacheShard;
use deno_cache::CacheShard;
let shard =
Rc::new(CacheShard::new(endpoint.to_string(), token.to_string()));
let create_cache_fn = move || {
let x = deno_web::cache::LscBackend::default();
let x = deno_cache::LscBackend::default();
x.set_shard(shard.clone());
Ok(CacheImpl::Lsc(x))
@ -525,7 +525,6 @@ impl WebWorker {
deno_web::deno_web::init::<PermissionsContainer, InMemoryBroadcastChannel>(
services.blob_store,
Some(options.main_module.clone()),
create_cache,
services.broadcast_channel,
),
deno_webgpu::deno_webgpu::init(),
@ -541,6 +540,7 @@ impl WebWorker {
..Default::default()
},
),
deno_cache::deno_cache::init(create_cache),
deno_websocket::deno_websocket::init::<PermissionsContainer>(),
deno_webstorage::deno_webstorage::init(None).disable(),
deno_crypto::deno_crypto::init(options.seed),

View file

@ -10,6 +10,9 @@ use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use deno_cache::CacheImpl;
use deno_cache::CreateCache;
use deno_cache::SqliteBackedCache;
use deno_core::CompiledWasmModuleStore;
use deno_core::Extension;
use deno_core::InspectorSessionKind;
@ -45,10 +48,7 @@ use deno_process::NpmProcessStateProviderRc;
use deno_tls::RootCertStoreProvider;
use deno_tls::TlsKeys;
use deno_web::BlobStore;
use deno_web::CreateCache;
use deno_web::InMemoryBroadcastChannel;
use deno_web::cache::CacheImpl;
use deno_web::cache::SqliteBackedCache;
use log::debug;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmPackageFolderResolver;
@ -399,12 +399,12 @@ impl MainWorker {
if elems.len() == 2 {
let endpoint = elems[0];
let token = elems[1];
use deno_web::cache::CacheShard;
use deno_cache::CacheShard;
let shard =
Rc::new(CacheShard::new(endpoint.to_string(), token.to_string()));
let create_cache_fn = move || {
let x = deno_web::cache::LscBackend::default();
let x = deno_cache::LscBackend::default();
x.set_shard(shard.clone());
Ok(CacheImpl::Lsc(x))
@ -536,7 +536,6 @@ impl MainWorker {
>(
services.blob_store.clone(),
options.bootstrap.location.clone(),
create_cache,
services.broadcast_channel.clone(),
),
deno_fetch::deno_fetch::args::<PermissionsContainer>(
@ -551,6 +550,7 @@ impl MainWorker {
..Default::default()
},
),
deno_cache::deno_cache::args(create_cache),
deno_websocket::deno_websocket::args::<PermissionsContainer>(),
deno_webstorage::deno_webstorage::args(
options.origin_storage_dir.clone(),
@ -1058,6 +1058,7 @@ fn common_extensions<
deno_webgpu::deno_webgpu::init(),
deno_canvas::deno_canvas::init(),
deno_fetch::deno_fetch::lazy_init::<PermissionsContainer>(),
deno_cache::deno_cache::lazy_init(),
deno_websocket::deno_websocket::lazy_init::<PermissionsContainer>(),
deno_webstorage::deno_webstorage::lazy_init(),
deno_crypto::deno_crypto::lazy_init(),

View file

@ -505,6 +505,7 @@
"ext:core/mod.js": "../../deno_core/core/core.d.ts",
"ext:core/ops": "./ops.d.ts",
"ext:deno_web/01_broadcast_channel.js": "../ext/deno_web/01_broadcast_channel.js",
"ext:deno_cache/01_cache.js": "../ext/cache/01_cache.js",
"ext:deno_canvas/01_image.js": "../ext/canvas/01_image.js",
"ext:deno_web/01_console.js": "../ext/console/01_console.js",
"ext:deno_cron/01_cron.ts": "../ext/cron/01_cron.ts",