mirror of
https://github.com/denoland/deno.git
synced 2025-08-04 19:08:15 +00:00
fix: bump cache sqlite dbs to v2 for WAL journal mode change (#24030)
In https://github.com/denoland/deno/pull/23955 we changed the sqlite db journal mode to WAL. This causes issues when someone is running an old version of Deno using TRUNCATE and a new version because the two fight against each other.
This commit is contained in:
parent
fada25b0dd
commit
94f040ac28
22 changed files with 347 additions and 263 deletions
61
cli/cache/node.rs
vendored
61
cli/cache/node.rs
vendored
|
@ -9,15 +9,17 @@ use crate::node::CliCjsAnalysis;
|
|||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::cache_db::CacheFailure;
|
||||
use super::FastInsecureHasher;
|
||||
use super::CacheDBHash;
|
||||
|
||||
pub static NODE_ANALYSIS_CACHE_DB: CacheDBConfiguration =
|
||||
CacheDBConfiguration {
|
||||
table_initializer: "CREATE TABLE IF NOT EXISTS cjsanalysiscache (
|
||||
specifier TEXT PRIMARY KEY,
|
||||
source_hash TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);",
|
||||
table_initializer: concat!(
|
||||
"CREATE TABLE IF NOT EXISTS cjsanalysiscache (",
|
||||
"specifier TEXT PRIMARY KEY,",
|
||||
"source_hash INTEGER NOT NULL,",
|
||||
"data TEXT NOT NULL",
|
||||
");"
|
||||
),
|
||||
on_version_change: "DELETE FROM cjsanalysiscache;",
|
||||
preheat_queries: &[],
|
||||
on_failure: CacheFailure::InMemory,
|
||||
|
@ -35,10 +37,6 @@ impl NodeAnalysisCache {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn compute_source_hash(text: &str) -> String {
|
||||
FastInsecureHasher::hash(text).to_string()
|
||||
}
|
||||
|
||||
fn ensure_ok<T: Default>(res: Result<T, AnyError>) -> T {
|
||||
match res {
|
||||
Ok(x) => x,
|
||||
|
@ -59,7 +57,7 @@ impl NodeAnalysisCache {
|
|||
pub fn get_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
expected_source_hash: &str,
|
||||
expected_source_hash: CacheDBHash,
|
||||
) -> Option<CliCjsAnalysis> {
|
||||
Self::ensure_ok(
|
||||
self.inner.get_cjs_analysis(specifier, expected_source_hash),
|
||||
|
@ -69,7 +67,7 @@ impl NodeAnalysisCache {
|
|||
pub fn set_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
source_hash: &str,
|
||||
source_hash: CacheDBHash,
|
||||
cjs_analysis: &CliCjsAnalysis,
|
||||
) {
|
||||
Self::ensure_ok(self.inner.set_cjs_analysis(
|
||||
|
@ -93,7 +91,7 @@ impl NodeAnalysisCacheInner {
|
|||
pub fn get_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
expected_source_hash: &str,
|
||||
expected_source_hash: CacheDBHash,
|
||||
) -> Result<Option<CliCjsAnalysis>, AnyError> {
|
||||
let query = "
|
||||
SELECT
|
||||
|
@ -106,7 +104,7 @@ impl NodeAnalysisCacheInner {
|
|||
LIMIT 1";
|
||||
let res = self.conn.query_row(
|
||||
query,
|
||||
params![specifier, &expected_source_hash],
|
||||
params![specifier, expected_source_hash],
|
||||
|row| {
|
||||
let analysis_info: String = row.get(0)?;
|
||||
Ok(serde_json::from_str(&analysis_info)?)
|
||||
|
@ -118,7 +116,7 @@ impl NodeAnalysisCacheInner {
|
|||
pub fn set_cjs_analysis(
|
||||
&self,
|
||||
specifier: &str,
|
||||
source_hash: &str,
|
||||
source_hash: CacheDBHash,
|
||||
cjs_analysis: &CliCjsAnalysis,
|
||||
) -> Result<(), AnyError> {
|
||||
let sql = "
|
||||
|
@ -130,7 +128,7 @@ impl NodeAnalysisCacheInner {
|
|||
sql,
|
||||
params![
|
||||
specifier,
|
||||
&source_hash.to_string(),
|
||||
source_hash,
|
||||
&serde_json::to_string(&cjs_analysis)?,
|
||||
],
|
||||
)?;
|
||||
|
@ -147,34 +145,47 @@ mod test {
|
|||
let conn = CacheDB::in_memory(&NODE_ANALYSIS_CACHE_DB, "1.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
|
||||
assert!(cache.get_cjs_analysis("file.js", "2").unwrap().is_none());
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.is_none());
|
||||
let cjs_analysis = CliCjsAnalysis::Cjs {
|
||||
exports: vec!["export1".to_string()],
|
||||
reexports: vec!["re-export1".to_string()],
|
||||
};
|
||||
cache
|
||||
.set_cjs_analysis("file.js", "2", &cjs_analysis)
|
||||
.set_cjs_analysis("file.js", CacheDBHash::new(2), &cjs_analysis)
|
||||
.unwrap();
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(3))
|
||||
.unwrap()
|
||||
.is_none()); // different hash
|
||||
let actual_cjs_analysis = cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(cache.get_cjs_analysis("file.js", "3").unwrap().is_none()); // different hash
|
||||
let actual_cjs_analysis =
|
||||
cache.get_cjs_analysis("file.js", "2").unwrap().unwrap();
|
||||
assert_eq!(actual_cjs_analysis, cjs_analysis);
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
cache
|
||||
.set_cjs_analysis("file.js", "2", &cjs_analysis)
|
||||
.set_cjs_analysis("file.js", CacheDBHash::new(2), &cjs_analysis)
|
||||
.unwrap();
|
||||
|
||||
// recreating with same cli version should still have it
|
||||
let conn = cache.conn.recreate_with_version("1.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
let actual_analysis =
|
||||
cache.get_cjs_analysis("file.js", "2").unwrap().unwrap();
|
||||
let actual_analysis = cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(actual_analysis, cjs_analysis);
|
||||
|
||||
// now changing the cli version should clear it
|
||||
let conn = cache.conn.recreate_with_version("2.0.0");
|
||||
let cache = NodeAnalysisCacheInner::new(conn);
|
||||
assert!(cache.get_cjs_analysis("file.js", "2").unwrap().is_none());
|
||||
assert!(cache
|
||||
.get_cjs_analysis("file.js", CacheDBHash::new(2))
|
||||
.unwrap()
|
||||
.is_none());
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue