chore: fix clippy warnings

This commit is contained in:
Nils Koch 2025-06-13 21:20:26 +01:00
parent 1a0d4f500f
commit 2827b86917
No known key found for this signature in database
95 changed files with 902 additions and 992 deletions

View file

@ -21,6 +21,7 @@ impl LimboConnection {
LimboConnection { conn, io }
}
#[allow(clippy::wrong_self_convention)]
pub fn to_ptr(self) -> jlong {
Box::into_raw(Box::new(self)) as jlong
}

View file

@ -17,6 +17,7 @@ impl LimboDB {
LimboDB { db, io }
}
#[allow(clippy::wrong_self_convention)]
pub fn to_ptr(self) -> jlong {
Box::into_raw(Box::new(self)) as jlong
}

View file

@ -26,6 +26,7 @@ impl LimboStatement {
LimboStatement { stmt, connection }
}
#[allow(clippy::wrong_self_convention)]
pub fn to_ptr(self) -> jlong {
Box::into_raw(Box::new(self)) as jlong
}
@ -66,7 +67,7 @@ pub extern "system" fn Java_tech_turso_core_LimboStatement_step<'local>(
match step_result {
StepResult::Row => {
let row = stmt.stmt.row().unwrap();
return match row_to_obj_array(&mut env, &row) {
return match row_to_obj_array(&mut env, row) {
Ok(row) => to_limbo_step_result(&mut env, STEP_RESULT_ID_ROW, Some(row)),
Err(e) => {
set_err_msg_and_throw_exception(&mut env, obj, LIMBO_ETC, e.to_string());
@ -114,7 +115,7 @@ fn row_to_obj_array<'local>(
env.new_object("java/lang/Double", "(D)V", &[JValue::Double(*f)])?
}
limbo_core::Value::Text(s) => env.new_string(s.as_str())?.into(),
limbo_core::Value::Blob(b) => env.byte_array_from_slice(&b.as_slice())?.into(),
limbo_core::Value::Blob(b) => env.byte_array_from_slice(b.as_slice())?.into(),
};
if let Err(e) = env.set_object_array_element(&obj_array, i as i32, obj) {
eprintln!("Error on parsing row: {:?}", e);

View file

@ -6,7 +6,6 @@ use std::num::NonZeroUsize;
use std::rc::Rc;
use std::sync::Arc;
use limbo_core::types::Text;
use limbo_core::{maybe_init_database_file, LimboError, StepResult};
use napi::iterator::Generator;
use napi::{bindgen_prelude::ObjectFinalize, Env, JsUnknown};
@ -529,9 +528,7 @@ fn from_js_value(value: JsUnknown) -> napi::Result<limbo_core::Value> {
}
napi::ValueType::String => {
let s = value.coerce_to_string()?;
Ok(limbo_core::Value::Text(Text::from_str(
s.into_utf8()?.as_str()?,
)))
Ok(limbo_core::Value::Text(s.into_utf8()?.as_str()?.into()))
}
napi::ValueType::Symbol
| napi::ValueType::Object

View file

@ -1,6 +1,5 @@
use anyhow::Result;
use errors::*;
use limbo_core::types::Text;
use limbo_core::Value;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyList, PyTuple};
@ -96,17 +95,15 @@ impl Cursor {
// For DDL and DML statements,
// we need to execute the statement immediately
if stmt_is_ddl || stmt_is_dml {
loop {
match stmt.borrow_mut().step().map_err(|e| {
PyErr::new::<OperationalError, _>(format!("Step error: {:?}", e))
})? {
limbo_core::StepResult::IO => {
self.conn.io.run_once().map_err(|e| {
PyErr::new::<OperationalError, _>(format!("IO error: {:?}", e))
})?;
}
_ => break,
}
while let limbo_core::StepResult::IO = stmt
.borrow_mut()
.step()
.map_err(|e| PyErr::new::<OperationalError, _>(format!("Step error: {:?}", e)))?
{
self.conn
.io
.run_once()
.map_err(|e| PyErr::new::<OperationalError, _>(format!("IO error: {:?}", e)))?;
}
}
@ -130,7 +127,7 @@ impl Cursor {
})? {
limbo_core::StepResult::Row => {
let row = stmt.row().unwrap();
let py_row = row_to_py(py, &row)?;
let py_row = row_to_py(py, row)?;
return Ok(Some(py_row));
}
limbo_core::StepResult::IO => {
@ -166,7 +163,7 @@ impl Cursor {
})? {
limbo_core::StepResult::Row => {
let row = stmt.row().unwrap();
let py_row = row_to_py(py, &row)?;
let py_row = row_to_py(py, row)?;
results.push(py_row);
}
limbo_core::StepResult::IO => {
@ -342,13 +339,13 @@ fn row_to_py(py: Python, row: &limbo_core::Row) -> Result<PyObject> {
/// Converts a Python object to a Limbo Value
fn py_to_owned_value(obj: &Bound<PyAny>) -> Result<limbo_core::Value> {
if obj.is_none() {
return Ok(Value::Null);
Ok(Value::Null)
} else if let Ok(integer) = obj.extract::<i64>() {
return Ok(Value::Integer(integer));
} else if let Ok(float) = obj.extract::<f64>() {
return Ok(Value::Float(float));
} else if let Ok(string) = obj.extract::<String>() {
return Ok(Value::Text(Text::from_str(string)));
return Ok(Value::Text(string.into()));
} else if let Ok(bytes) = obj.downcast::<PyBytes>() {
return Ok(Value::Blob(bytes.as_bytes().to_vec()));
} else {

View file

@ -445,14 +445,14 @@ mod tests {
.query("SELECT data FROM test_large_persistence ORDER BY id;", ())
.await?;
for i in 0..NUM_INSERTS {
for (i, value) in original_data.iter().enumerate().take(NUM_INSERTS) {
let row = rows
.next()
.await?
.unwrap_or_else(|| panic!("Expected row {} but found None", i));
assert_eq!(
row.get_value(0)?,
Value::Text(original_data[i].clone()),
Value::Text(value.clone()),
"Mismatch in retrieved data for row {}",
i
);

View file

@ -110,9 +110,9 @@ impl Value {
}
}
impl Into<limbo_core::Value> for Value {
fn into(self) -> limbo_core::Value {
match self {
impl From<Value> for limbo_core::Value {
fn from(val: Value) -> Self {
match val {
Value::Null => limbo_core::Value::Null,
Value::Integer(n) => limbo_core::Value::Integer(n),
Value::Real(n) => limbo_core::Value::Float(n),

View file

@ -1,8 +1,12 @@
#[cfg(all(feature = "web", feature = "nodejs"))]
compile_error!("Features 'web' and 'nodejs' cannot be enabled at the same time");
use js_sys::{Array, Object};
use limbo_core::{maybe_init_database_file, Clock, Instant, OpenFlags, Result};
use std::cell::RefCell;
use std::sync::Arc;
use wasm_bindgen::prelude::*;
#[allow(dead_code)]
#[wasm_bindgen]
pub struct Database {
@ -48,6 +52,7 @@ impl RowIterator {
}
#[wasm_bindgen]
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> JsValue {
let mut stmt = self.inner.borrow_mut();
match stmt.step() {
@ -364,10 +369,7 @@ impl limbo_core::DatabaseStorage for DatabaseFile {
}
}
#[cfg(all(feature = "web", feature = "nodejs"))]
compile_error!("Features 'web' and 'nodejs' cannot be enabled at the same time");
#[cfg(feature = "web")]
#[cfg(all(feature = "web", not(feature = "nodejs")))]
#[wasm_bindgen(module = "/web/src/web-vfs.js")]
extern "C" {
type VFS;
@ -393,7 +395,7 @@ extern "C" {
fn sync(this: &VFS, fd: i32);
}
#[cfg(feature = "nodejs")]
#[cfg(all(feature = "nodejs", not(feature = "web")))]
#[wasm_bindgen(module = "/node/src/vfs.cjs")]
extern "C" {
type VFS;

View file

@ -509,8 +509,8 @@ impl Limbo {
if line.is_empty() {
return Ok(());
}
if line.starts_with('.') {
self.handle_dot_command(&line[1..]);
if let Some(command) = line.strip_prefix('.') {
self.handle_dot_command(command);
let _ = self.reset_line(line);
return Ok(());
}
@ -747,7 +747,7 @@ impl Limbo {
let name = rows.get_column_name(i);
Cell::new(name)
.add_attribute(Attribute::Bold)
.fg(config.table.header_color.into_comfy_table_color())
.fg(config.table.header_color.as_comfy_table_color())
})
.collect::<Vec<_>>();
table.set_header(header);
@ -785,7 +785,7 @@ impl Limbo {
.set_alignment(alignment)
.fg(config.table.column_colors
[idx % config.table.column_colors.len()]
.into_comfy_table_color()),
.as_comfy_table_color()),
);
}
table.add_row(row);
@ -1060,10 +1060,9 @@ impl Limbo {
Ok(rl.readline(&self.prompt)?)
} else {
let mut input = String::new();
println!("");
let mut reader = std::io::stdin().lock();
if reader.read_line(&mut input)? == 0 {
return Err(ReadlineError::Eof.into());
return Err(ReadlineError::Eof);
}
// Remove trailing newline
if input.ends_with('\n') {

View file

@ -174,7 +174,7 @@ impl<'de> Deserialize<'de> for LimboColor {
{
struct LimboColorVisitor;
impl<'de> Visitor<'de> for LimboColorVisitor {
impl Visitor<'_> for LimboColorVisitor {
type Value = LimboColor;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
@ -228,7 +228,7 @@ impl Validate for LimboColor {
}
impl LimboColor {
pub fn into_comfy_table_color(&self) -> comfy_table::Color {
pub fn as_comfy_table_color(&self) -> comfy_table::Color {
match self.0 {
Color::Black => comfy_table::Color::Black,
Color::Red => comfy_table::Color::Red,
@ -247,7 +247,7 @@ impl LimboColor {
Color::Fixed(7) => comfy_table::Color::Grey,
Color::Fixed(8) => comfy_table::Color::DarkGrey,
Color::DarkGray => comfy_table::Color::AnsiValue(241),
Color::LightRed => comfy_table::Color::AnsiValue(09),
Color::LightRed => comfy_table::Color::AnsiValue(9),
Color::LightGreen => comfy_table::Color::AnsiValue(10),
Color::LightYellow => comfy_table::Color::AnsiValue(11),
Color::LightBlue => comfy_table::Color::AnsiValue(12),

View file

@ -154,7 +154,7 @@ impl<C: Parser + Send + Sync + 'static> SqlCompleter<C> {
conn,
io,
cmd: C::command().into(),
_cmd_phantom: PhantomData::default(),
_cmd_phantom: PhantomData,
}
}
@ -165,7 +165,7 @@ impl<C: Parser + Send + Sync + 'static> SqlCompleter<C> {
) -> rustyline::Result<(usize, Vec<Pair>)> {
// TODO maybe check to see if the line is empty and then just output the command names
line = &line[1..];
pos = pos - 1;
pos -= 1;
let (prefix_pos, _) = extract_word(line, pos, ESCAPE_CHAR, default_break_chars);

View file

@ -4,7 +4,7 @@ use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criteri
use limbo_core::{Database, PlatformIO, IO as _};
use pprof::criterion::{Output, PProfProfiler};
const TPC_H_PATH: &'static str = "../perf/tpc-h/TPC-H.db";
const TPC_H_PATH: &str = "../perf/tpc-h/TPC-H.db";
macro_rules! tpc_query {
($num:literal) => {

View file

@ -61,7 +61,7 @@ pub unsafe extern "C" fn execute(
return ResultCode::Error;
}
Ok(StepResult::Done) => {
*last_insert_rowid = conn.last_insert_rowid() as i64;
*last_insert_rowid = conn.last_insert_rowid();
return ResultCode::OK;
}
Ok(StepResult::IO) => {

View file

@ -14,7 +14,7 @@ pub struct SpinLockGuard<'a, T> {
lock: &'a SpinLock<T>,
}
impl<'a, T> Drop for SpinLockGuard<'a, T> {
impl<T> Drop for SpinLockGuard<'_, T> {
fn drop(&mut self) {
self.lock.locked.store(false, Ordering::Release);
}

View file

@ -664,11 +664,7 @@ pub fn exec_timediff(values: &[Register]) -> Value {
fn format_time_duration(duration: &chrono::Duration) -> Value {
let is_negative = duration.num_seconds() < 0;
let abs_duration = if is_negative {
-duration.clone()
} else {
duration.clone()
};
let abs_duration = if is_negative { -*duration } else { *duration };
let total_seconds = abs_duration.num_seconds();
let hours = (total_seconds % 86400) / 3600;
@ -695,7 +691,7 @@ fn format_time_duration(duration: &chrono::Duration) -> Value {
millis
);
Value::build_text(&result)
Value::build_text(result)
}
#[cfg(test)]
@ -839,7 +835,7 @@ mod tests {
Value::Float(f64::NAN), // NaN
Value::Float(f64::INFINITY), // Infinity
Value::Null, // Null value
Value::Blob(vec![1, 2, 3].into()), // Blob (unsupported type)
Value::Blob(vec![1, 2, 3]), // Blob (unsupported type)
// Invalid timezone tests
Value::build_text("2024-07-21T12:00:00+24:00"), // Invalid timezone offset (too large)
Value::build_text("2024-07-21T12:00:00-24:00"), // Invalid timezone offset (too small)
@ -974,7 +970,7 @@ mod tests {
Value::Float(f64::NAN), // NaN
Value::Float(f64::INFINITY), // Infinity
Value::Null, // Null value
Value::Blob(vec![1, 2, 3].into()), // Blob (unsupported type)
Value::Blob(vec![1, 2, 3]), // Blob (unsupported type)
// Invalid timezone tests
Value::build_text("2024-07-21T12:00:00+24:00"), // Invalid timezone offset (too large)
Value::build_text("2024-07-21T12:00:00-24:00"), // Invalid timezone offset (too small)

View file

@ -36,7 +36,7 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
match value {
Value::Integer(_) => result.push_str(&format!("{}", value)),
Value::Float(_) => result.push_str(&format!("{}", value)),
_ => result.push_str("0".into()),
_ => result.push('0'),
}
args_index += 1;
}
@ -59,7 +59,7 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
match value {
Value::Float(f) => result.push_str(&format!("{:.6}", f)),
Value::Integer(i) => result.push_str(&format!("{:.6}", *i as f64)),
_ => result.push_str("0.0".into()),
_ => result.push_str("0.0"),
}
args_index += 1;
}
@ -75,7 +75,7 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
}
}
}
Ok(Value::build_text(&result))
Ok(Value::build_text(result))
}
#[cfg(test)]

View file

@ -90,7 +90,7 @@ impl JsonCacheCell {
#[cfg(test)]
pub fn lookup(&self, key: &Value) -> Option<Jsonb> {
assert_eq!(self.accessed.get(), false);
assert!(!self.accessed.get());
self.accessed.set(true);
@ -116,7 +116,7 @@ impl JsonCacheCell {
key: &Value,
value: impl Fn(&Value) -> crate::Result<Jsonb>,
) -> crate::Result<Jsonb> {
assert_eq!(self.accessed.get(), false);
assert!(!self.accessed.get());
self.accessed.set(true);
let result = unsafe {
@ -139,8 +139,7 @@ impl JsonCacheCell {
}
}
} else {
let result = value(key);
result
value(key)
}
};
self.accessed.set(false);
@ -149,7 +148,7 @@ impl JsonCacheCell {
}
pub fn clear(&mut self) {
assert_eq!(self.accessed.get(), false);
assert!(!self.accessed.get());
self.accessed.set(true);
unsafe {
let cache_ptr = self.inner.get();
@ -325,7 +324,7 @@ mod tests {
let cache_cell = JsonCacheCell::new();
// Access flag should be false initially
assert_eq!(cache_cell.accessed.get(), false);
assert!(!cache_cell.accessed.get());
// Inner cache should be None initially
unsafe {
@ -350,7 +349,7 @@ mod tests {
}
// Access flag should be reset to false
assert_eq!(cache_cell.accessed.get(), false);
assert!(!cache_cell.accessed.get());
// Insert the value using get_or_insert_with
let insert_result = cache_cell.get_or_insert_with(&key, |k| {
@ -363,7 +362,7 @@ mod tests {
assert_eq!(insert_result.unwrap(), value);
// Access flag should be reset to false
assert_eq!(cache_cell.accessed.get(), false);
assert!(!cache_cell.accessed.get());
// Lookup should now return the value
let lookup_result = cache_cell.lookup(&key);
@ -426,7 +425,7 @@ mod tests {
assert!(error_result.is_err());
// Access flag should be reset to false
assert_eq!(cache_cell.accessed.get(), false);
assert!(!cache_cell.accessed.get());
// The entry should not be cached
let lookup_result = cache_cell.lookup(&key);

View file

@ -179,6 +179,7 @@ pub struct Jsonb {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[allow(clippy::enum_variant_names, clippy::upper_case_acronyms)]
pub enum ElementType {
NULL = 0,
TRUE = 1,
@ -203,7 +204,7 @@ pub enum JsonIndentation<'a> {
None,
}
impl<'a> JsonIndentation<'a> {
impl JsonIndentation<'_> {
pub fn is_pretty(&self) -> bool {
match self {
Self::Indentation(_) => true,
@ -2067,10 +2068,10 @@ impl Jsonb {
return Ok(pos);
}
return Err(PError::Message {
Err(PError::Message {
msg: "Expected null or nan".to_string(),
location: Some(pos),
});
})
}
fn write_element_header(
@ -2108,13 +2109,17 @@ impl Jsonb {
let new_len = header_bytes.len();
if new_len > old_len {
self.data.splice(
cursor + old_len..cursor + old_len,
std::iter::repeat(0).take(new_len - old_len),
);
} else if new_len < old_len {
self.data.drain(cursor + new_len..cursor + old_len);
match new_len.cmp(&old_len) {
std::cmp::Ordering::Greater => {
self.data.splice(
cursor + old_len..cursor + old_len,
std::iter::repeat(0).take(new_len - old_len),
);
}
std::cmp::Ordering::Less => {
self.data.drain(cursor + new_len..cursor + old_len);
}
std::cmp::Ordering::Equal => {}
}
for (i, &byte) in header_bytes.iter().enumerate() {
@ -2365,7 +2370,7 @@ impl Jsonb {
} else {
if root_type == ElementType::OBJECT
&& root_size == 0
&& (*idx == Some(0) || *idx == None)
&& (*idx == Some(0) || idx.is_none())
&& mode.allows_insert()
{
let array = JsonbHeader::new(ElementType::ARRAY, 0).into_bytes();
@ -3093,7 +3098,7 @@ mod tests {
// Test round-trip
let reparsed = Jsonb::from_str("null").unwrap();
assert_eq!(reparsed.data[0] as u8, ElementType::NULL as u8);
assert_eq!(reparsed.data[0], ElementType::NULL as u8);
}
#[test]
@ -3110,10 +3115,10 @@ mod tests {
// Round-trip
let true_parsed = Jsonb::from_str("true").unwrap();
assert_eq!(true_parsed.data[0] as u8, ElementType::TRUE as u8);
assert_eq!(true_parsed.data[0], ElementType::TRUE as u8);
let false_parsed = Jsonb::from_str("false").unwrap();
assert_eq!(false_parsed.data[0] as u8, ElementType::FALSE as u8);
assert_eq!(false_parsed.data[0], ElementType::FALSE as u8);
}
#[test]
@ -3398,12 +3403,12 @@ world""#,
// Create a JSON string that exceeds MAX_JSON_DEPTH
let mut deep_json = String::from("[");
for _ in 0..MAX_JSON_DEPTH + 1 {
deep_json.push_str("[");
deep_json.push('[');
}
for _ in 0..MAX_JSON_DEPTH + 1 {
deep_json.push_str("]");
deep_json.push(']');
}
deep_json.push_str("]");
deep_json.push(']');
// Should fail due to exceeding depth limit
assert!(Jsonb::from_str(&deep_json).is_err());
@ -3559,10 +3564,10 @@ world""#,
for i in 0..1000 {
large_array.push_str(&format!("{}", i));
if i < 999 {
large_array.push_str(",");
large_array.push(',');
}
}
large_array.push_str("]");
large_array.push(']');
let parsed = Jsonb::from_str(&large_array).unwrap();
assert!(parsed.to_string().unwrap().starts_with("[0,1,2,"));

View file

@ -208,13 +208,13 @@ pub fn json_set(args: &[Register], json_cache: &JsonCacheCell) -> crate::Result<
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
let mut op = SetOperation::new(value);
if let Some(path) = path {
let _ = json.operate_on_path(&path, &mut op);
@ -232,13 +232,13 @@ pub fn jsonb_set(args: &[Register], json_cache: &JsonCacheCell) -> crate::Result
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
let mut op = SetOperation::new(value);
if let Some(path) = path {
let _ = json.operate_on_path(&path, &mut op);
@ -360,7 +360,7 @@ pub fn jsonb_extract(
fn jsonb_extract_internal(value: Jsonb, paths: &[Register]) -> crate::Result<(Jsonb, ElementType)> {
let null = Jsonb::from_raw_data(JsonbHeader::make_null().into_bytes().as_bytes());
if paths.len() == 1 {
if let Some(path) = json_path_from_owned_value(&paths[0].get_owned_value(), true)? {
if let Some(path) = json_path_from_owned_value(paths[0].get_owned_value(), true)? {
let mut json = value;
let mut op = SearchOperation::new(json.len());
@ -559,9 +559,9 @@ pub fn json_object(values: &[Register]) -> crate::Result<Value> {
if chunk[0].get_owned_value().value_type() != ValueType::Text {
bail_constraint_error!("json_object() labels must be TEXT")
}
let key = convert_dbtype_to_jsonb(&chunk[0].get_owned_value(), Conv::ToString)?;
let key = convert_dbtype_to_jsonb(chunk[0].get_owned_value(), Conv::ToString)?;
json.append_jsonb_to_end(key.data());
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
json.append_jsonb_to_end(value.data());
}
@ -580,9 +580,9 @@ pub fn jsonb_object(values: &[Register]) -> crate::Result<Value> {
if chunk[0].get_owned_value().value_type() != ValueType::Text {
bail_constraint_error!("json_object() labels must be TEXT")
}
let key = convert_dbtype_to_jsonb(&chunk[0].get_owned_value(), Conv::ToString)?;
let key = convert_dbtype_to_jsonb(chunk[0].get_owned_value(), Conv::ToString)?;
json.append_jsonb_to_end(key.data());
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
json.append_jsonb_to_end(value.data());
}
@ -624,7 +624,7 @@ pub fn json_quote(value: &Value) -> crate::Result<Value> {
}
escaped_value.push('"');
Ok(Value::build_text(&escaped_value))
Ok(Value::build_text(escaped_value))
}
// Numbers are unquoted in json
Value::Integer(ref int) => Ok(Value::Integer(int.to_owned())),

View file

@ -54,7 +54,7 @@ pub fn json_remove(args: &[Register], json_cache: &JsonCacheCell) -> crate::Resu
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
for arg in &args[1..] {
if let Some(path) = json_path_from_owned_value(arg.get_owned_value(), true)? {
let mut op = DeleteOperation::new();
@ -73,7 +73,7 @@ pub fn jsonb_remove(args: &[Register], json_cache: &JsonCacheCell) -> crate::Res
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
for arg in &args[1..] {
if let Some(path) = json_path_from_owned_value(arg.get_owned_value(), true)? {
let mut op = DeleteOperation::new();
@ -90,12 +90,12 @@ pub fn json_replace(args: &[Register], json_cache: &JsonCacheCell) -> crate::Res
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
if let Some(path) = path {
let mut op = ReplaceOperation::new(value);
@ -114,11 +114,11 @@ pub fn jsonb_replace(args: &[Register], json_cache: &JsonCacheCell) -> crate::Re
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
if let Some(path) = path {
let mut op = ReplaceOperation::new(value);
@ -137,11 +137,11 @@ pub fn json_insert(args: &[Register], json_cache: &JsonCacheCell) -> crate::Resu
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
if let Some(path) = path {
let mut op = InsertOperation::new(value);
@ -160,11 +160,11 @@ pub fn jsonb_insert(args: &[Register], json_cache: &JsonCacheCell) -> crate::Res
}
let make_jsonb_fn = curry_convert_dbtype_to_jsonb(Conv::Strict);
let mut json = json_cache.get_or_insert_with(&args[0].get_owned_value(), make_jsonb_fn)?;
let mut json = json_cache.get_or_insert_with(args[0].get_owned_value(), make_jsonb_fn)?;
let other = args[1..].chunks_exact(2);
for chunk in other {
let path = json_path_from_owned_value(&chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(&chunk[1].get_owned_value(), Conv::NotStrict)?;
let path = json_path_from_owned_value(chunk[0].get_owned_value(), true)?;
let value = convert_dbtype_to_jsonb(chunk[1].get_owned_value(), Conv::NotStrict)?;
if let Some(path) = path {
let mut op = InsertOperation::new(value);
@ -184,7 +184,7 @@ mod tests {
use super::*;
fn create_text(s: &str) -> Value {
Value::Text(Text::from_str(s))
Value::Text(s.into())
}
fn create_json(s: &str) -> Value {

View file

@ -168,6 +168,7 @@ fn handle_after_root(
}
}
#[allow(clippy::too_many_arguments)]
fn handle_in_key<'a>(
ch: (usize, char),
parser_state: &mut PPState,

View file

@ -237,7 +237,7 @@ impl Database {
mv_transactions: RefCell::new(Vec::new()),
transaction_state: Cell::new(TransactionState::None),
last_change: Cell::new(0),
syms: RefCell::new(SymbolTable::new()),
syms: RefCell::new(SymbolTable::default()),
total_changes: Cell::new(0),
_shared_cache: false,
cache_size: Cell::new(self.header.lock().default_page_cache_size),
@ -375,7 +375,7 @@ impl Connection {
self.clone(),
&syms,
QueryMode::Normal,
&input,
input,
)?);
Ok(Statement::new(
program,
@ -494,7 +494,7 @@ impl Connection {
self.clone(),
&syms,
QueryMode::Explain,
&input,
input,
)?;
let _ = std::io::stdout().write_all(program.explain().as_bytes());
}
@ -511,7 +511,7 @@ impl Connection {
self.clone(),
&syms,
QueryMode::Normal,
&input,
input,
)?;
let mut state =
@ -648,12 +648,7 @@ impl Connection {
loop {
match stmt.step()? {
vdbe::StepResult::Row => {
let row: Vec<Value> = stmt
.row()
.unwrap()
.get_values()
.map(|v| v.clone())
.collect();
let row: Vec<Value> = stmt.row().unwrap().get_values().cloned().collect();
results.push(row);
}
vdbe::StepResult::Interrupt | vdbe::StepResult::Busy => {
@ -681,12 +676,7 @@ impl Connection {
loop {
match stmt.step()? {
vdbe::StepResult::Row => {
let row: Vec<Value> = stmt
.row()
.unwrap()
.get_values()
.map(|v| v.clone())
.collect();
let row: Vec<Value> = stmt.row().unwrap().get_values().cloned().collect();
results.push(row);
}
vdbe::StepResult::Interrupt | vdbe::StepResult::Busy => {
@ -716,12 +706,7 @@ impl Connection {
loop {
match stmt.step()? {
vdbe::StepResult::Row => {
let row: Vec<Value> = stmt
.row()
.unwrap()
.get_values()
.map(|v| v.clone())
.collect();
let row: Vec<Value> = stmt.row().unwrap().get_values().cloned().collect();
results.push(row);
}
vdbe::StepResult::Interrupt | vdbe::StepResult::Busy => {
@ -815,6 +800,7 @@ pub type Row = vdbe::Row;
pub type StepResult = vdbe::StepResult;
#[derive(Default)]
pub struct SymbolTable {
pub functions: HashMap<String, Rc<function::ExternalFunc>>,
pub vtabs: HashMap<String, Rc<VirtualTable>>,
@ -857,14 +843,6 @@ pub fn resolve_ext_path(extpath: &str) -> Result<std::path::PathBuf> {
}
impl SymbolTable {
pub fn new() -> Self {
Self {
functions: HashMap::new(),
vtabs: HashMap::new(),
vtab_modules: HashMap::new(),
}
}
pub fn resolve_function(
&self,
name: &str,
@ -903,7 +881,7 @@ impl Iterator for QueryRunner<'_> {
.unwrap()
.trim();
self.last_offset = byte_offset_end;
Some(self.conn.run_cmd(cmd, &input))
Some(self.conn.run_cmd(cmd, input))
}
Ok(None) => None,
Err(err) => {

View file

@ -387,8 +387,7 @@ impl<Clock: LogicalClock> MvStore<Clock> {
if let Some(rv) = row_versions
.iter()
.rev()
.filter(|rv| rv.is_visible_to(&tx, &self.txs))
.next()
.find(|rv| rv.is_visible_to(&tx, &self.txs))
{
tx.insert_to_read_set(id);
return Ok(Some(rv.row.clone()));

View file

@ -721,7 +721,7 @@ fn test_lazy_scan_cursor_basic() {
assert_eq!(count, 5);
// After the last row, is_empty should return true
assert!(cursor.forward() == false);
assert!(!cursor.forward());
assert!(cursor.is_empty());
}
@ -840,7 +840,7 @@ fn test_scan_cursor_basic() {
assert_eq!(count, 5);
// After the last row, is_empty should return true
assert!(cursor.forward() == false);
assert!(!cursor.forward());
assert!(cursor.is_empty());
}

View file

@ -413,7 +413,7 @@ impl std::ops::Mul for DoubleDouble {
impl std::ops::MulAssign for DoubleDouble {
fn mul_assign(&mut self, rhs: Self) {
*self = self.clone() * rhs;
*self = *self * rhs;
}
}
@ -539,7 +539,7 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
if exponent > 0 {
while exponent >= 100 {
exponent -= 100;
result *= DoubleDouble(1.0e+100, -1.5902891109759918046e+83);
result *= DoubleDouble(1.0e+100, -1.590_289_110_975_991_8e83);
}
while exponent >= 10 {
exponent -= 10;
@ -552,15 +552,15 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
} else {
while exponent <= -100 {
exponent += 100;
result *= DoubleDouble(1.0e-100, -1.99918998026028836196e-117);
result *= DoubleDouble(1.0e-100, -1.999_189_980_260_288_3e-117);
}
while exponent <= -10 {
exponent += 10;
result *= DoubleDouble(1.0e-10, -3.6432197315497741579e-27);
result *= DoubleDouble(1.0e-10, -3.643_219_731_549_774e-27);
}
while exponent <= -1 {
exponent += 1;
result *= DoubleDouble(1.0e-01, -5.5511151231257827021e-18);
result *= DoubleDouble(1.0e-01, -5.551_115_123_125_783e-18);
}
}

View file

@ -1,14 +1,11 @@
use crate::types::ImmutableRecord;
#[derive(Default)]
pub struct PseudoCursor {
current: Option<ImmutableRecord>,
}
impl PseudoCursor {
pub fn new() -> Self {
Self { current: None }
}
pub fn record(&self) -> Option<&ImmutableRecord> {
self.current.as_ref()
}

View file

@ -67,7 +67,7 @@ impl Schema {
pub fn get_table(&self, name: &str) -> Option<Arc<Table>> {
let name = normalize_ident(name);
let name = if name.eq_ignore_ascii_case(&SCHEMA_TABLE_NAME_ALT) {
let name = if name.eq_ignore_ascii_case(SCHEMA_TABLE_NAME_ALT) {
SCHEMA_TABLE_NAME
} else {
&name
@ -500,7 +500,7 @@ fn create_table(
} => {
primary_key = true;
if let Some(o) = o {
order = o.clone();
order = *o;
}
}
limbo_sqlite3_parser::ast::ColumnConstraint::NotNull { .. } => {
@ -859,7 +859,7 @@ impl Affinity {
}
}
pub fn to_char_code(&self) -> u8 {
pub fn as_char_code(&self) -> u8 {
self.aff_mask() as u8
}
@ -1168,7 +1168,7 @@ impl Index {
.all(|col| set.contains(col))
{
// skip unique columns that are satisfied with pk constraint
return false;
false
} else {
true
}
@ -1463,7 +1463,7 @@ mod tests {
let sql = r#"CREATE TABLE t1 (a INTEGER NOT NULL);"#;
let table = BTreeTable::from_sql(sql, 0)?;
let column = table.get_column("a").unwrap().1;
assert_eq!(column.notnull, true);
assert!(column.notnull);
Ok(())
}
@ -1472,7 +1472,7 @@ mod tests {
let sql = r#"CREATE TABLE t1 (a INTEGER);"#;
let table = BTreeTable::from_sql(sql, 0)?;
let column = table.get_column("a").unwrap().1;
assert_eq!(column.notnull, false);
assert!(!column.notnull);
Ok(())
}

View file

@ -580,7 +580,7 @@ impl BTreeCursor {
return None;
}
let rowid = match self.get_immutable_record().as_ref().unwrap().last_value() {
Some(RefValue::Integer(rowid)) => *rowid as i64,
Some(RefValue::Integer(rowid)) => *rowid,
_ => unreachable!(
"index where has_rowid() is true should have an integer rowid as the last value"
),
@ -929,8 +929,8 @@ impl BTreeCursor {
CursorState::ReadWritePayload(PayloadOverflowWithOffset::SkipOverflowPages {
next_page: first_overflow_page.unwrap(),
pages_left_to_skip: pages_to_skip,
page_offset: page_offset,
amount: amount,
page_offset,
amount,
buffer_offset: bytes_processed as usize,
is_write,
});
@ -964,7 +964,7 @@ impl BTreeCursor {
CursorState::ReadWritePayload(PayloadOverflowWithOffset::ProcessPage {
next_page: *next_page,
remaining_to_read: *amount,
page: page,
page,
current_offset: *page_offset as usize,
buffer_offset: *buffer_offset,
is_write: *is_write,
@ -1102,7 +1102,7 @@ impl BTreeCursor {
payload_offset: u32,
num_bytes: u32,
payload: &[u8],
buffer: &mut Vec<u8>,
buffer: &mut [u8],
page: BTreePage,
) {
page.get().set_dirty();
@ -1353,9 +1353,8 @@ impl BTreeCursor {
let max = max_cell_idx.get();
if min > max {
if let Some(nearest_matching_cell) = nearest_matching_cell.get() {
let left_child_page = contents.cell_table_interior_read_left_child_page(
nearest_matching_cell as usize,
)?;
let left_child_page = contents
.cell_table_interior_read_left_child_page(nearest_matching_cell)?;
self.stack.set_cell_index(nearest_matching_cell as i32);
let mem_page = self.read_page(left_child_page as usize)?;
self.stack.push(mem_page);
@ -1737,19 +1736,17 @@ impl BTreeCursor {
min_cell_idx.set(cur_cell_idx + 1);
}
}
} else if cmp.is_gt() {
max_cell_idx.set(cur_cell_idx - 1);
} else if cmp.is_lt() {
min_cell_idx.set(cur_cell_idx + 1);
} else {
if cmp.is_gt() {
max_cell_idx.set(cur_cell_idx - 1);
} else if cmp.is_lt() {
min_cell_idx.set(cur_cell_idx + 1);
} else {
match iter_dir {
IterationDirection::Forwards => {
min_cell_idx.set(cur_cell_idx + 1);
}
IterationDirection::Backwards => {
max_cell_idx.set(cur_cell_idx - 1);
}
match iter_dir {
IterationDirection::Forwards => {
min_cell_idx.set(cur_cell_idx + 1);
}
IterationDirection::Backwards => {
max_cell_idx.set(cur_cell_idx - 1);
}
}
}
@ -1964,19 +1961,17 @@ impl BTreeCursor {
min_cell_idx.set(cur_cell_idx + 1);
}
}
} else if cmp.is_gt() {
max_cell_idx.set(cur_cell_idx - 1);
} else if cmp.is_lt() {
min_cell_idx.set(cur_cell_idx + 1);
} else {
if cmp.is_gt() {
max_cell_idx.set(cur_cell_idx - 1);
} else if cmp.is_lt() {
min_cell_idx.set(cur_cell_idx + 1);
} else {
match iter_dir {
IterationDirection::Forwards => {
min_cell_idx.set(cur_cell_idx + 1);
}
IterationDirection::Backwards => {
max_cell_idx.set(cur_cell_idx - 1);
}
match iter_dir {
IterationDirection::Forwards => {
min_cell_idx.set(cur_cell_idx + 1);
}
IterationDirection::Backwards => {
max_cell_idx.set(cur_cell_idx - 1);
}
}
}
@ -2744,7 +2739,7 @@ impl BTreeCursor {
} else {
size_of_cell_to_remove_from_left
};
new_page_sizes[i + 1] += size_of_cell_to_move_right as i64;
new_page_sizes[i + 1] += size_of_cell_to_move_right;
cell_array.number_of_cells_per_page[i] -= 1;
}
@ -2967,7 +2962,7 @@ impl BTreeCursor {
if !is_leaf_page {
// Interior
// Make this page's rightmost pointer point to pointer of divider cell before modification
let previous_pointer_divider = read_u32(&divider_cell, 0);
let previous_pointer_divider = read_u32(divider_cell, 0);
page.get()
.get_contents()
.write_u32(offset::BTREE_RIGHTMOST_PTR, previous_pointer_divider);
@ -2992,7 +2987,7 @@ impl BTreeCursor {
let (rowid, _) = read_varint(&divider_cell[n_bytes_payload..])?;
new_divider_cell
.extend_from_slice(&(page.get().get().id as u32).to_be_bytes());
write_varint_to_vec(rowid as u64, &mut new_divider_cell);
write_varint_to_vec(rowid, &mut new_divider_cell);
} else {
// Leaf index
new_divider_cell
@ -3225,7 +3220,7 @@ impl BTreeCursor {
i: usize,
page: &std::sync::Arc<crate::Page>,
) {
let left_pointer = if parent_contents.overflow_cells.len() == 0 {
let left_pointer = if parent_contents.overflow_cells.is_empty() {
let (cell_start, cell_len) = parent_contents.cell_get_raw_region(
balance_info.first_divider_cell + i,
payload_overflow_threshold_max(
@ -3265,6 +3260,7 @@ impl BTreeCursor {
}
#[cfg(debug_assertions)]
#[allow(clippy::too_many_arguments)]
fn post_balance_non_root_validation(
&self,
parent_page: &BTreePage,
@ -3420,7 +3416,7 @@ impl BTreeCursor {
let rightmost = read_u32(rightmost_pointer, 0);
debug_validate_cells!(parent_contents, self.usable_space() as u16);
if !pages_to_balance_new[0].is_some() {
if pages_to_balance_new[0].is_none() {
tracing::error!(
"balance_non_root(balance_shallower_incorrect_page, page_idx={})",
0
@ -3428,8 +3424,13 @@ impl BTreeCursor {
valid = false;
}
for i in 1..sibling_count_new {
if pages_to_balance_new[i].is_some() {
for (i, value) in pages_to_balance_new
.iter()
.enumerate()
.take(sibling_count_new)
.skip(1)
{
if value.is_some() {
tracing::error!(
"balance_non_root(balance_shallower_incorrect_page, page_idx={})",
i
@ -3490,7 +3491,9 @@ impl BTreeCursor {
valid = false
}
for parent_cell_idx in 0..contents.cell_count() {
for (parent_cell_idx, cell_buf_in_array) in
cells_debug.iter().enumerate().take(contents.cell_count())
{
let (parent_cell_start, parent_cell_len) = parent_contents.cell_get_raw_region(
parent_cell_idx,
payload_overflow_threshold_max(
@ -3522,7 +3525,6 @@ impl BTreeCursor {
let parent_cell_buf = to_static_buf(
&mut parent_buf[parent_cell_start..parent_cell_start + parent_cell_len],
);
let cell_buf_in_array = &cells_debug[parent_cell_idx];
if cell_buf != cell_buf_in_array || cell_buf != parent_cell_buf {
tracing::error!("balance_non_root(balance_shallower_cell_not_found_debug, page_id={}, cell_in_cell_array_idx={})",
@ -4225,11 +4227,9 @@ impl BTreeCursor {
return Ok(CursorResult::Ok(()));
}
};
} else {
if self.reusable_immutable_record.borrow().is_none() {
self.state = CursorState::None;
return Ok(CursorResult::Ok(()));
}
} else if self.reusable_immutable_record.borrow().is_none() {
self.state = CursorState::None;
return Ok(CursorResult::Ok(()));
}
let delete_info = self.state.mut_delete_info().unwrap();
@ -4934,7 +4934,7 @@ impl BTreeCursor {
return_if_locked!(page_ref.get());
let page_ref = page_ref.get();
let buf = page_ref.get().contents.as_mut().unwrap().as_ptr();
buf[dest_offset..dest_offset + new_payload.len()].copy_from_slice(&new_payload);
buf[dest_offset..dest_offset + new_payload.len()].copy_from_slice(new_payload);
Ok(CursorResult::Ok(()))
}
@ -4952,10 +4952,7 @@ impl BTreeCursor {
}
pub fn is_write_in_progress(&self) -> bool {
match self.state {
CursorState::Write(_) => true,
_ => false,
}
matches!(self.state, CursorState::Write(_))
}
/// Count the number of entries in the b-tree
@ -6503,7 +6500,6 @@ mod tests {
storage::{
database::DatabaseFile,
page_cache::DumbLruPageCache,
pager::CreateBTreeFlags,
sqlite3_ondisk::{self, DatabaseHeader},
},
types::Text,
@ -7098,6 +7094,8 @@ mod tests {
#[cfg(feature = "index_experimental")]
fn btree_index_insert_fuzz_run(attempts: usize, inserts: usize) {
use crate::storage::pager::CreateBTreeFlags;
let (mut rng, seed) = if std::env::var("SEED").is_ok() {
let seed = std::env::var("SEED").unwrap();
let seed = seed.parse::<u64>().unwrap();
@ -7825,7 +7823,7 @@ mod tests {
// allow appends with extra place to insert
let cell_idx = rng.next_u64() as usize % (page.cell_count() + 1);
let free = compute_free_space(page, usable_space);
let regs = &[Register::Value(Value::Integer(i as i64))];
let regs = &[Register::Value(Value::Integer(i))];
let record = ImmutableRecord::from_registers(regs, regs.len());
let mut payload: Vec<u8> = Vec::new();
fill_cell_payload(
@ -8339,9 +8337,8 @@ mod tests {
.unwrap();
}
match validate_btree(pager.clone(), root_page) {
(_, false) => panic!("Invalid B-tree after insertion"),
_ => {}
if let (_, false) = validate_btree(pager.clone(), root_page) {
panic!("Invalid B-tree after insertion");
}
// Delete records with 500 <= key <= 3500
@ -8362,7 +8359,7 @@ mod tests {
// Verify that records with key < 500 and key > 3500 still exist in the BTree.
for i in 1..=10000 {
if i >= 500 && i <= 3500 {
if (500..=3500).contains(&i) {
continue;
}
@ -8395,11 +8392,11 @@ mod tests {
let (pager, root_page) = empty_btree();
for i in 0..iterations {
for (i, huge_text) in huge_texts.iter().enumerate().take(iterations) {
let mut cursor = BTreeCursor::new_table(None, pager.clone(), root_page);
tracing::info!("INSERT INTO t VALUES ({});", i,);
let regs = &[Register::Value(Value::Text(Text {
value: huge_texts[i].as_bytes().to_vec(),
value: huge_text.as_bytes().to_vec(),
subtype: crate::types::TextSubtype::Text,
}))];
let value = ImmutableRecord::from_registers(regs, regs.len());
@ -8674,7 +8671,7 @@ mod tests {
let removed = page_free_array(
contents,
start,
size as usize,
size,
&cell_array,
pager.usable_space() as u16,
)

View file

@ -514,12 +514,12 @@ impl PageHashMap {
}
pub fn contains_key(&self, key: &PageCacheKey) -> bool {
let bucket = self.hash(&key);
let bucket = self.hash(key);
self.buckets[bucket].iter().any(|node| node.key == *key)
}
pub fn get(&self, key: &PageCacheKey) -> Option<&NonNull<PageCacheEntry>> {
let bucket = self.hash(&key);
let bucket = self.hash(key);
let bucket = &self.buckets[bucket];
let mut idx = 0;
while let Some(node) = bucket.get(idx) {
@ -532,7 +532,7 @@ impl PageHashMap {
}
pub fn remove(&mut self, key: &PageCacheKey) -> Option<NonNull<PageCacheEntry>> {
let bucket = self.hash(&key);
let bucket = self.hash(key);
let bucket = &mut self.buckets[bucket];
let mut idx = 0;
while let Some(node) = bucket.get(idx) {
@ -996,7 +996,7 @@ mod tests {
let key = PageCacheKey::new(id_page as usize);
#[allow(clippy::arc_with_non_send_sync)]
let page = Arc::new(Page::new(id_page as usize));
if let Some(_) = cache.peek(&key, false) {
if cache.peek(&key, false).is_some() {
continue; // skip duplicate page ids
}
tracing::debug!("inserting page {:?}", key);
@ -1017,11 +1017,11 @@ mod tests {
let random = rng.next_u64() % 2 == 0;
let key = if random || lru.is_empty() {
let id_page: u64 = rng.next_u64() % max_pages;
let key = PageCacheKey::new(id_page as usize);
key
PageCacheKey::new(id_page as usize)
} else {
let i = rng.next_u64() as usize % lru.len();
let key: PageCacheKey = lru.iter().skip(i).next().unwrap().0.clone();
let key: PageCacheKey = lru.iter().nth(i).unwrap().0.clone();
key
};
tracing::debug!("removing page {:?}", key);
@ -1038,7 +1038,7 @@ mod tests {
cache.verify_list_integrity();
for (key, page) in &lru {
println!("getting page {:?}", key);
cache.peek(&key, false).unwrap();
cache.peek(key, false).unwrap();
assert_eq!(page.get().id, key.pgno);
}
}

View file

@ -449,7 +449,7 @@ impl Pager {
{
let page = self.do_allocate_page(page_type, 0, BtreePageAllocMode::Any);
let page_id = page.get().get().id;
return Ok(CursorResult::Ok(page_id as u32));
Ok(CursorResult::Ok(page_id as u32))
}
// If autovacuum is enabled, we need to allocate a new page number that is greater than the largest root page number
@ -460,7 +460,7 @@ impl Pager {
AutoVacuumMode::None => {
let page = self.do_allocate_page(page_type, 0, BtreePageAllocMode::Any);
let page_id = page.get().get().id;
return Ok(CursorResult::Ok(page_id as u32));
Ok(CursorResult::Ok(page_id as u32))
}
AutoVacuumMode::Full => {
let mut root_page_num = self.db_header.lock().vacuum_mode_largest_root_page;
@ -560,14 +560,14 @@ impl Pager {
pub fn end_tx(&self) -> Result<PagerCacheflushStatus> {
let cacheflush_status = self.cacheflush()?;
return match cacheflush_status {
match cacheflush_status {
PagerCacheflushStatus::IO => Ok(PagerCacheflushStatus::IO),
PagerCacheflushStatus::Done(_) => {
self.wal.borrow().end_write_tx()?;
self.wal.borrow().end_read_tx()?;
Ok(cacheflush_status)
}
};
}
}
pub fn end_read_tx(&self) -> Result<()> {
@ -645,7 +645,7 @@ impl Pager {
self.add_dirty(DATABASE_HEADER_PAGE_ID);
let contents = header_page.get().contents.as_ref().unwrap();
contents.write_database_header(&header);
contents.write_database_header(header);
Ok(())
}
@ -756,12 +756,12 @@ impl Pager {
frame_len: u32,
) -> Result<Arc<Completion>> {
let wal = self.wal.borrow();
return wal.read_frame_raw(
wal.read_frame_raw(
frame_no.into(),
self.buffer_pool.clone(),
p_frame,
frame_len,
);
)
}
pub fn checkpoint(&self) -> Result<CheckpointStatus> {
@ -978,7 +978,7 @@ impl Pager {
}
// update database size
self.write_database_header(&mut header)?;
self.write_database_header(&header)?;
// FIXME: should reserve page cache entry before modifying the database
let page = allocate_page(header.database_size as usize, &self.buffer_pool, 0);
@ -1190,7 +1190,7 @@ mod ptrmap {
if db_page_no == FIRST_PTRMAP_PAGE_NO {
return true;
}
return get_ptrmap_page_no_for_db_page(db_page_no, page_size) == db_page_no;
get_ptrmap_page_no_for_db_page(db_page_no, page_size) == db_page_no
}
/// Calculates which pointer map page (1-indexed) contains the entry for `db_page_no_to_query` (1-indexed).

View file

@ -1095,14 +1095,13 @@ pub struct SmallVecIter<'a, T, const N: usize> {
pos: usize,
}
impl<'a, T: Default + Copy, const N: usize> Iterator for SmallVecIter<'a, T, N> {
impl<T: Default + Copy, const N: usize> Iterator for SmallVecIter<'_, T, N> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.vec.get(self.pos).map(|item| {
self.pos += 1;
item
})
let next = self.vec.get(self.pos)?;
self.pos += 1;
Some(next)
}
}
@ -1401,7 +1400,7 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<UnsafeCell<WalFi
let calculated_header_checksum = checksum_wal(
&buf_slice[0..24],
&*header_locked,
&header_locked,
(0, 0),
use_native_endian_checksum,
);
@ -1419,8 +1418,7 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<UnsafeCell<WalFi
let mut cumulative_checksum = (header_locked.checksum_1, header_locked.checksum_2);
let page_size_u32 = header_locked.page_size;
if page_size_u32 < MIN_PAGE_SIZE
|| page_size_u32 > MAX_PAGE_SIZE
if !(MIN_PAGE_SIZE..=MAX_PAGE_SIZE).contains(&page_size_u32)
|| page_size_u32.count_ones() != 1
{
panic!("Invalid page size in WAL header: {}", page_size_u32);
@ -1462,13 +1460,13 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<UnsafeCell<WalFi
let checksum_after_fh_meta = checksum_wal(
&frame_header_slice[0..8],
&*header_locked,
&header_locked,
cumulative_checksum,
use_native_endian_checksum,
);
let calculated_frame_checksum = checksum_wal(
page_data_slice,
&*header_locked,
&header_locked,
checksum_after_fh_meta,
use_native_endian_checksum,
);
@ -1516,7 +1514,7 @@ pub fn begin_read_wal_frame(
io: &Arc<dyn File>,
offset: usize,
buffer_pool: Rc<BufferPool>,
complete: Box<dyn Fn(Arc<RefCell<Buffer>>) -> ()>,
complete: Box<dyn Fn(Arc<RefCell<Buffer>>)>,
) -> Result<Arc<Completion>> {
tracing::trace!("begin_read_wal_frame(offset={})", offset);
let buf = buffer_pool.get();
@ -1532,6 +1530,7 @@ pub fn begin_read_wal_frame(
}
#[instrument(skip(io, page, write_counter, wal_header, checksums), level = Level::TRACE)]
#[allow(clippy::too_many_arguments)]
pub fn begin_write_wal_frame(
io: &Arc<dyn File>,
offset: usize,
@ -1752,8 +1751,8 @@ mod tests {
#[case(&[0x40, 0x09, 0x21, 0xFB, 0x54, 0x44, 0x2D, 0x18], SerialType::f64(), Value::Float(std::f64::consts::PI))]
#[case(&[1, 2], SerialType::const_int0(), Value::Integer(0))]
#[case(&[65, 66], SerialType::const_int1(), Value::Integer(1))]
#[case(&[1, 2, 3], SerialType::blob(3), Value::Blob(vec![1, 2, 3].into()))]
#[case(&[], SerialType::blob(0), Value::Blob(vec![].into()))] // empty blob
#[case(&[1, 2, 3], SerialType::blob(3), Value::Blob(vec![1, 2, 3]))]
#[case(&[], SerialType::blob(0), Value::Blob(vec![]))] // empty blob
#[case(&[65, 66, 67], SerialType::text(3), Value::build_text("ABC"))]
#[case(&[0x80], SerialType::i8(), Value::Integer(-128))]
#[case(&[0x80, 0], SerialType::i16(), Value::Integer(-32768))]

View file

@ -86,7 +86,7 @@ pub fn handle_distinct(program: &mut ProgramBuilder, agg: &Aggregate, agg_arg_re
});
program.emit_insn(Insn::IdxInsert {
cursor_id: distinct_ctx.cursor_id,
record_reg: record_reg,
record_reg,
unpacked_start: None,
unpacked_count: None,
flags: IdxInsertFlags::new(),

View file

@ -69,11 +69,7 @@ pub fn prepare_delete_plan(
crate::bail_parse_error!("Table is neither a virtual table nor a btree table");
};
let name = tbl_name.name.0.as_str().to_string();
let indexes = schema
.get_indices(table.get_name())
.iter()
.cloned()
.collect();
let indexes = schema.get_indices(table.get_name()).to_vec();
let joined_tables = vec![JoinedTable {
table,
identifier: name,
@ -83,7 +79,7 @@ pub fn prepare_delete_plan(
index: None,
},
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}];
let mut table_references = TableReferences::new(joined_tables, vec![]);

View file

@ -262,7 +262,7 @@ pub fn emit_query<'a>(
t_ctx: &mut TranslateCtx<'a>,
) -> Result<usize> {
if !plan.values.is_empty() {
let reg_result_cols_start = emit_values(program, &plan, &t_ctx.resolver)?;
let reg_result_cols_start = emit_values(program, plan, &t_ctx.resolver)?;
return Ok(reg_result_cols_start);
}
@ -309,7 +309,7 @@ pub fn emit_query<'a>(
program,
t_ctx,
group_by,
&plan,
plan,
&plan.result_columns,
&plan.order_by,
)?;
@ -348,7 +348,7 @@ pub fn emit_query<'a>(
t_ctx,
&plan.table_references,
&plan.join_order,
&mut plan.where_clause,
&plan.where_clause,
None,
)?;
@ -398,7 +398,7 @@ pub fn emit_query<'a>(
#[instrument(skip_all, level = Level::TRACE)]
fn emit_program_for_delete(
program: &mut ProgramBuilder,
mut plan: DeletePlan,
plan: DeletePlan,
schema: &Schema,
syms: &SymbolTable,
) -> Result<()> {
@ -446,7 +446,7 @@ fn emit_program_for_delete(
&mut t_ctx,
&plan.table_references,
&[JoinOrderMember::default()],
&mut plan.where_clause,
&plan.where_clause,
None,
)?;
@ -501,7 +501,7 @@ fn emit_delete_insns(
dest: key_reg,
});
if let Some(_) = table_reference.virtual_table() {
if table_reference.virtual_table().is_some() {
let conflict_action = 0u16;
let start_reg = key_reg;
@ -672,7 +672,7 @@ fn emit_program_for_update(
&mut t_ctx,
&plan.table_references,
&[JoinOrderMember::default()],
&mut plan.where_clause,
&plan.where_clause,
temp_cursor_id,
)?;
@ -935,7 +935,7 @@ fn emit_update_insns(
if idx > 0 {
accum.push_str(", ");
}
accum.push_str(&table_ref.table.get_name());
accum.push_str(table_ref.table.get_name());
accum.push('.');
accum.push_str(&col.name);
@ -1060,7 +1060,7 @@ fn emit_update_insns(
// Insert new index key (filled further above with values from set_clauses)
program.emit_insn(Insn::IdxInsert {
cursor_id: idx_cursor_id,
record_reg: record_reg,
record_reg,
unpacked_start: Some(start),
unpacked_count: Some((index.columns.len() + 1) as u16),
flags: IdxInsertFlags::new(),
@ -1074,7 +1074,7 @@ fn emit_update_insns(
flag: InsertFlags::new().update(true),
table_name: table_ref.identifier.clone(),
});
} else if let Some(_) = table_ref.virtual_table() {
} else if table_ref.virtual_table().is_some() {
let arg_count = table_ref.columns().len() + 2;
program.emit_insn(Insn::VUpdate {
cursor_id,

View file

@ -2211,6 +2211,7 @@ pub fn translate_expr(
Ok(target_register)
}
#[allow(clippy::too_many_arguments)]
fn emit_binary_insn(
program: &mut ProgramBuilder,
op: &ast::Operator,
@ -2490,7 +2491,7 @@ fn translate_like_base(
};
match op {
ast::LikeOperator::Like | ast::LikeOperator::Glob => {
let arg_count = if matches!(escape, Some(_)) { 3 } else { 2 };
let arg_count = if escape.is_some() { 3 } else { 2 };
let start_reg = program.alloc_registers(arg_count);
let mut constant_mask = 0;
translate_expr(program, referenced_tables, lhs, start_reg + 1, resolver)?;

View file

@ -86,7 +86,7 @@ pub fn init_group_by<'a>(
t_ctx: &mut TranslateCtx<'a>,
group_by: &'a GroupBy,
plan: &SelectPlan,
result_columns: &'a Vec<ResultSetColumn>,
result_columns: &'a [ResultSetColumn],
order_by: &'a Option<Vec<(ast::Expr, ast::SortOrder)>>,
) -> Result<()> {
collect_non_aggregate_expressions(
@ -239,7 +239,7 @@ fn collect_non_aggregate_expressions<'a>(
non_aggregate_expressions: &mut Vec<(&'a ast::Expr, bool)>,
group_by: &'a GroupBy,
plan: &SelectPlan,
root_result_columns: &'a Vec<ResultSetColumn>,
root_result_columns: &'a [ResultSetColumn],
order_by: &'a Option<Vec<(ast::Expr, ast::SortOrder)>>,
) -> Result<()> {
let mut result_columns = Vec::new();
@ -512,11 +512,11 @@ impl<'a> GroupByAggArgumentSource<'a> {
}
/// Emits bytecode for processing a single GROUP BY group.
pub fn group_by_process_single_group<'a>(
pub fn group_by_process_single_group(
program: &mut ProgramBuilder,
group_by: &GroupBy,
plan: &SelectPlan,
t_ctx: &mut TranslateCtx<'a>,
t_ctx: &mut TranslateCtx,
) -> Result<()> {
let GroupByMetadata {
registers,
@ -663,16 +663,16 @@ pub fn group_by_process_single_group<'a>(
start_reg_dest,
..
} => {
let mut sorter_column_index = 0;
let mut next_reg = *start_reg_dest;
for (expr, in_result) in t_ctx.non_aggregate_expressions.iter() {
for (sorter_column_index, (expr, in_result)) in
t_ctx.non_aggregate_expressions.iter().enumerate()
{
if *in_result {
program.emit_column(*pseudo_cursor, sorter_column_index, next_reg);
t_ctx.resolver.expr_to_reg_cache.push((expr, next_reg));
next_reg += 1;
}
sorter_column_index += 1;
}
}
GroupByRowSource::MainLoop { start_reg_dest, .. } => {
@ -712,12 +712,12 @@ pub fn group_by_process_single_group<'a>(
/// Emits the bytecode for processing the aggregation phase of a GROUP BY clause.
/// This is called either when:
/// 1. the main query execution loop has finished processing,
/// and we now have data in the GROUP BY sorter.
/// and we now have data in the GROUP BY sorter.
/// 2. the rows are already sorted in the order that the GROUP BY keys are defined,
/// and we can start aggregating inside the main loop.
pub fn group_by_agg_phase<'a>(
/// and we can start aggregating inside the main loop.
pub fn group_by_agg_phase(
program: &mut ProgramBuilder,
t_ctx: &mut TranslateCtx<'a>,
t_ctx: &mut TranslateCtx,
plan: &SelectPlan,
) -> Result<()> {
let GroupByMetadata {

View file

@ -110,7 +110,7 @@ pub fn translate_create_index(
);
// determine the order of the columns in the index for the sorter
let order = idx.columns.iter().map(|c| c.order.clone()).collect();
let order = idx.columns.iter().map(|c| c.order).collect();
// open the sorter and the pseudo table
program.emit_insn(Insn::SorterOpen {
cursor_id: sorter_cursor_id,

View file

@ -67,7 +67,7 @@ impl LoopLabels {
}
pub fn init_distinct(program: &mut ProgramBuilder, plan: &SelectPlan) -> DistinctCtx {
let index_name = format!("distinct_{}", program.offset().to_offset_int()); // we don't really care about the name that much, just enough that we don't get name collisions
let index_name = format!("distinct_{}", program.offset().as_offset_int()); // we don't really care about the name that much, just enough that we don't get name collisions
let index = Arc::new(Index {
name: index_name.clone(),
table_name: String::new(),
@ -100,7 +100,7 @@ pub fn init_distinct(program: &mut ProgramBuilder, plan: &SelectPlan) -> Distinc
is_table: false,
});
return ctx;
ctx
}
/// Initialize resources needed for the source operators (tables, joins, etc)
@ -345,7 +345,7 @@ pub fn init_loop(
jump_target_when_true: jump_target,
jump_target_when_false: t_ctx.label_main_loop_end.unwrap(),
};
translate_condition_expr(program, &tables, &cond.expr, meta, &t_ctx.resolver)?;
translate_condition_expr(program, tables, &cond.expr, meta, &t_ctx.resolver)?;
program.preassign_label_to_next_insn(jump_target);
}
@ -631,7 +631,7 @@ pub fn open_loop(
};
Some(emit_autoindex(
program,
&index,
index,
table_cursor_id
.expect("an ephemeral index must have a source table cursor"),
index_cursor_id
@ -747,9 +747,9 @@ enum LoopEmitTarget {
/// Emits the bytecode for the inner loop of a query.
/// At this point the cursors for all tables have been opened and rewound.
pub fn emit_loop<'a>(
pub fn emit_loop(
program: &mut ProgramBuilder,
t_ctx: &mut TranslateCtx<'a>,
t_ctx: &mut TranslateCtx,
plan: &SelectPlan,
) -> Result<()> {
// if we have a group by, we emit a record into the group by sorter,
@ -773,9 +773,9 @@ pub fn emit_loop<'a>(
/// This is a helper function for inner_loop_emit,
/// which does a different thing depending on the emit target.
/// See the InnerLoopEmitTarget enum for more details.
fn emit_loop_source<'a>(
fn emit_loop_source(
program: &mut ProgramBuilder,
t_ctx: &mut TranslateCtx<'a>,
t_ctx: &mut TranslateCtx,
plan: &SelectPlan,
emit_target: LoopEmitTarget,
) -> Result<()> {
@ -1182,7 +1182,7 @@ fn emit_seek(
translate_expr_no_constant_opt(
program,
Some(tables),
&expr,
expr,
reg,
&t_ctx.resolver,
NoConstantOptReason::RegisterReuse,

View file

@ -54,6 +54,7 @@ use transaction::{translate_tx_begin, translate_tx_commit};
use update::translate_update;
#[instrument(skip_all, level = Level::TRACE)]
#[allow(clippy::too_many_arguments)]
pub fn translate(
schema: &Schema,
stmt: ast::Stmt,
@ -151,7 +152,7 @@ pub fn translate_inner(
ast::Stmt::CreateTrigger { .. } => bail_parse_error!("CREATE TRIGGER not supported yet"),
ast::Stmt::CreateView { .. } => bail_parse_error!("CREATE VIEW not supported yet"),
ast::Stmt::CreateVirtualTable(vtab) => {
translate_create_virtual_table(*vtab, schema, query_mode, &syms, program)?
translate_create_virtual_table(*vtab, schema, query_mode, syms, program)?
}
ast::Stmt::Delete(delete) => {
let Delete {

View file

@ -31,7 +31,7 @@ pub struct AccessMethod<'a> {
pub constraint_refs: &'a [ConstraintRef],
}
impl<'a> AccessMethod<'a> {
impl AccessMethod<'_> {
pub fn is_scan(&self) -> bool {
self.constraint_refs.is_empty()
}
@ -81,7 +81,7 @@ pub fn find_best_access_method_for_join_order<'a>(
let cost = estimate_cost_for_scan_or_seek(
Some(index_info),
&rhs_constraints.constraints,
&usable_constraint_refs,
usable_constraint_refs,
input_cardinality,
);
@ -139,7 +139,7 @@ pub fn find_best_access_method_for_join_order<'a>(
cost,
index: candidate.index.clone(),
iter_dir,
constraint_refs: &usable_constraint_refs,
constraint_refs: usable_constraint_refs,
};
}
}

View file

@ -33,7 +33,7 @@ pub const ESTIMATED_HARDCODED_ROWS_PER_TABLE: usize = 1000000;
pub const ESTIMATED_HARDCODED_ROWS_PER_PAGE: usize = 50; // roughly 80 bytes per 4096 byte page
pub fn estimate_page_io_cost(rowcount: f64) -> Cost {
Cost((rowcount as f64 / ESTIMATED_HARDCODED_ROWS_PER_PAGE as f64).ceil())
Cost((rowcount / ESTIMATED_HARDCODED_ROWS_PER_PAGE as f64).ceil())
}
/// Estimate the cost of a scan or seek operation.

View file

@ -60,7 +60,7 @@ pub fn join_lhs_and_rhs<'a>(
let best_access_method = find_best_access_method_for_join_order(
rhs_table_reference,
rhs_constraints,
&join_order,
join_order,
maybe_order_target,
input_cardinality as f64,
)?;
@ -135,17 +135,17 @@ pub fn compute_best_join_order<'a>(
joined_tables,
maybe_order_target,
access_methods_arena,
&constraints,
constraints,
)?;
// Keep track of both 1. the best plan overall (not considering sorting), and 2. the best ordered plan (which might not be the same).
// We assign Some Cost (tm) to any required sort operation, so the best ordered plan may end up being
// the one we choose, if the cost reduction from avoiding sorting brings it below the cost of the overall best one.
let mut best_ordered_plan: Option<JoinN> = None;
let mut best_plan_is_also_ordered = if let Some(ref order_target) = maybe_order_target {
let mut best_plan_is_also_ordered = if let Some(order_target) = maybe_order_target {
plan_satisfies_order_target(
&naive_plan,
&access_methods_arena,
access_methods_arena,
joined_tables,
order_target,
)
@ -226,12 +226,8 @@ pub fn compute_best_join_order<'a>(
let mut left_join_illegal_map: HashMap<usize, TableMask> =
HashMap::with_capacity(left_join_count);
for (i, _) in joined_tables.iter().enumerate() {
for j in i + 1..joined_tables.len() {
if joined_tables[j]
.join_info
.as_ref()
.map_or(false, |j| j.outer)
{
for (j, joined_table) in joined_tables.iter().enumerate().skip(i + 1) {
if joined_table.join_info.as_ref().map_or(false, |j| j.outer) {
// bitwise OR the masks
if let Some(illegal_lhs) = left_join_illegal_map.get_mut(&i) {
illegal_lhs.add_table(j);
@ -329,10 +325,10 @@ pub fn compute_best_join_order<'a>(
continue;
};
let satisfies_order_target = if let Some(ref order_target) = maybe_order_target {
let satisfies_order_target = if let Some(order_target) = maybe_order_target {
plan_satisfies_order_target(
&rel,
&access_methods_arena,
access_methods_arena,
joined_tables,
order_target,
)
@ -1039,7 +1035,7 @@ mod tests {
.unwrap();
// Verify that t2 is chosen first due to its equality filter
assert_eq!(best_plan.table_numbers().nth(0).unwrap(), 1);
assert_eq!(best_plan.table_numbers().next().unwrap(), 1);
// Verify table scan is used since there are no indexes
let access_method = &access_methods_arena.borrow()[best_plan.data[0].1];
assert!(access_method.is_scan());
@ -1148,11 +1144,11 @@ mod tests {
// Expected optimal order: fact table as outer, with rowid seeks in any order on each dimension table
// Verify fact table is selected as the outer table as all the other tables can use SeekRowid
assert_eq!(
best_plan.table_numbers().nth(0).unwrap(),
best_plan.table_numbers().next().unwrap(),
FACT_TABLE_IDX,
"First table should be fact (table {}) due to available index, got table {} instead",
FACT_TABLE_IDX,
best_plan.table_numbers().nth(0).unwrap()
best_plan.table_numbers().next().unwrap()
);
// Verify access methods
@ -1187,7 +1183,7 @@ mod tests {
for i in 0..NUM_TABLES {
let mut columns = vec![_create_column_rowid_alias("id")];
if i < NUM_TABLES - 1 {
columns.push(_create_column_of_type(&format!("next_id"), Type::Integer));
columns.push(_create_column_of_type("next_id", Type::Integer));
}
tables.push(_create_btree_table(&format!("t{}", i + 1), columns));
}
@ -1250,14 +1246,19 @@ mod tests {
assert!(access_method.constraint_refs.is_empty());
// all of the rest should use rowid equality
for i in 1..NUM_TABLES {
for (i, table_constraints) in table_constraints
.iter()
.enumerate()
.take(NUM_TABLES)
.skip(1)
{
let access_method = &access_methods_arena.borrow()[best_plan.data[i].1];
assert!(!access_method.is_scan());
assert!(access_method.iter_dir == IterationDirection::Forwards);
assert!(access_method.index.is_none());
assert!(access_method.constraint_refs.len() == 1);
let constraint = &table_constraints[i].constraints
[access_method.constraint_refs[0].constraint_vec_pos];
let constraint =
&table_constraints.constraints[access_method.constraint_refs[0].constraint_vec_pos];
assert!(constraint.lhs_mask.contains_table(i - 1));
assert!(constraint.operator == ast::Operator::Equals);
}
@ -1311,7 +1312,7 @@ mod tests {
},
identifier: "t1".to_string(),
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
// Create where clause that only references second column
@ -1402,7 +1403,7 @@ mod tests {
},
identifier: "t1".to_string(),
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
// Create where clause that references first and third columns
@ -1518,7 +1519,7 @@ mod tests {
},
identifier: "t1".to_string(),
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
// Create where clause: c1 = 5 AND c2 > 10 AND c3 = 7
@ -1666,7 +1667,7 @@ mod tests {
identifier: name,
internal_id,
join_info,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}
}

View file

@ -20,6 +20,7 @@ use crate::{
/// 1. (c AND d) OR (e AND f)
/// 2. a,
/// 3. b,
///
/// where `a` and `b` become separate WhereTerms, and the original WhereTerm
/// is updated to `(c AND d) OR (e AND f)`.
///

View file

@ -10,7 +10,7 @@ use limbo_sqlite3_parser::{
ast::{self, Expr, SortOrder},
to_sql_string::ToSqlString as _,
};
use order::{compute_order_target, plan_satisfies_order_target, EliminatesSort};
use order::{compute_order_target, plan_satisfies_order_target, EliminatesSortBy};
use crate::{
parameters::PARAM_PREFIX,
@ -152,14 +152,14 @@ fn optimize_subqueries(plan: &mut SelectPlan, schema: &Schema) -> Result<()> {
fn optimize_table_access(
table_references: &mut TableReferences,
available_indexes: &HashMap<String, Vec<Arc<Index>>>,
where_clause: &mut Vec<WhereTerm>,
where_clause: &mut [WhereTerm],
order_by: &mut Option<Vec<(ast::Expr, SortOrder)>>,
group_by: &mut Option<GroupBy>,
) -> Result<Option<Vec<JoinOrderMember>>> {
let access_methods_arena = RefCell::new(Vec::new());
let maybe_order_target = compute_order_target(order_by, group_by.as_mut());
let constraints_per_table =
constraints_from_where_clause(where_clause, &table_references, available_indexes)?;
constraints_from_where_clause(where_clause, table_references, available_indexes)?;
let Some(best_join_order_result) = compute_best_join_order(
table_references.joined_tables_mut(),
maybe_order_target.as_ref(),
@ -204,13 +204,13 @@ fn optimize_table_access(
);
if satisfies_order_target {
match order_target.1 {
EliminatesSort::GroupBy => {
EliminatesSortBy::Group => {
let _ = group_by.as_mut().and_then(|g| g.sort_order.take());
}
EliminatesSort::OrderBy => {
EliminatesSortBy::Order => {
let _ = order_by.take();
}
EliminatesSort::GroupByAndOrderBy => {
EliminatesSortBy::GroupByAndOrder => {
let _ = group_by.as_mut().and_then(|g| g.sort_order.take());
let _ = order_by.take();
}
@ -294,14 +294,14 @@ fn optimize_table_access(
let ephemeral_index = ephemeral_index_build(
&joined_tables[table_idx],
&table_constraints.constraints,
&usable_constraint_refs,
usable_constraint_refs,
);
let ephemeral_index = Arc::new(ephemeral_index);
joined_tables[table_idx].op = Operation::Search(Search::Seek {
index: Some(ephemeral_index),
seek_def: build_seek_def_from_constraints(
&table_constraints.constraints,
&usable_constraint_refs,
usable_constraint_refs,
access_method.iter_dir,
where_clause,
)?,
@ -326,7 +326,7 @@ fn optimize_table_access(
index: Some(index.clone()),
seek_def: build_seek_def_from_constraints(
&constraints_per_table[table_idx].constraints,
&constraint_refs,
constraint_refs,
access_method.iter_dir,
where_clause,
)?,
@ -348,7 +348,7 @@ fn optimize_table_access(
index: None,
seek_def: build_seek_def_from_constraints(
&constraints_per_table[table_idx].constraints,
&constraint_refs,
constraint_refs,
access_method.iter_dir,
where_clause,
)?,
@ -370,7 +370,7 @@ enum ConstantConditionEliminationResult {
/// Returns a ConstantEliminationResult indicating whether any predicates are always false.
/// This is used to determine whether the query can be aborted early.
fn eliminate_constant_conditions(
where_clause: &mut Vec<WhereTerm>,
where_clause: &mut [WhereTerm],
) -> Result<ConstantConditionEliminationResult> {
let mut i = 0;
while i < where_clause.len() {
@ -530,7 +530,7 @@ impl Optimizable for ast::Expr {
let table_ref = tables.find_joined_table_by_internal_id(*table).unwrap();
let columns = table_ref.columns();
let column = &columns[*column];
return column.primary_key || column.notnull;
column.primary_key || column.notnull
}
Expr::RowId { .. } => true,
Expr::InList { lhs, rhs, .. } => {
@ -864,11 +864,11 @@ pub fn build_seek_def_from_constraints(
/// But to illustrate the general idea, consider the following examples:
///
/// 1. For example, having two conditions like (x>10 AND y>20) cannot be used as a valid [SeekKey] GT(x:10, y:20)
/// because the first row greater than (x:10, y:20) might be (x:10, y:21), which does not satisfy the where clause.
/// In this case, only GT(x:10) must be used as the [SeekKey], and rows with y <= 20 must be filtered as a regular condition expression for each value of x.
/// because the first row greater than (x:10, y:20) might be (x:10, y:21), which does not satisfy the where clause.
/// In this case, only GT(x:10) must be used as the [SeekKey], and rows with y <= 20 must be filtered as a regular condition expression for each value of x.
///
/// 2. In contrast, having (x=10 AND y>20) forms a valid index key GT(x:10, y:20) because after the seek, we can simply terminate as soon as x > 10,
/// i.e. use GT(x:10, y:20) as the [SeekKey] and GT(x:10) as the [TerminationKey].
/// i.e. use GT(x:10, y:20) as the [SeekKey] and GT(x:10) as the [TerminationKey].
///
/// The preceding examples are for an ascending index. The logic is similar for descending indexes, but an important distinction is that
/// since a descending index is laid out in reverse order, the comparison operators are reversed, e.g. LT becomes GT, LE becomes GE, etc.

View file

@ -19,22 +19,22 @@ pub struct ColumnOrder {
#[derive(Debug, PartialEq, Clone)]
/// If an [OrderTarget] is satisfied, then [EliminatesSort] describes which part of the query no longer requires sorting.
pub enum EliminatesSort {
GroupBy,
OrderBy,
GroupByAndOrderBy,
pub enum EliminatesSortBy {
Group,
Order,
GroupByAndOrder,
}
#[derive(Debug, PartialEq, Clone)]
/// An [OrderTarget] is considered in join optimization and index selection,
/// so that if a given join ordering and its access methods satisfy the [OrderTarget],
/// then the join ordering and its access methods are preferred, all other things being equal.
pub struct OrderTarget(pub Vec<ColumnOrder>, pub EliminatesSort);
pub struct OrderTarget(pub Vec<ColumnOrder>, pub EliminatesSortBy);
impl OrderTarget {
fn maybe_from_iterator<'a>(
list: impl Iterator<Item = (&'a ast::Expr, SortOrder)> + Clone,
eliminates_sort: EliminatesSort,
eliminates_sort: EliminatesSortBy,
) -> Option<Self> {
if list.clone().count() == 0 {
return None;
@ -79,12 +79,12 @@ pub fn compute_order_target(
// Only ORDER BY - we would like the joined result rows to be in the order specified by the ORDER BY
(Some(order_by), None) => OrderTarget::maybe_from_iterator(
order_by.iter().map(|(expr, order)| (expr, *order)),
EliminatesSort::OrderBy,
EliminatesSortBy::Order,
),
// Only GROUP BY - we would like the joined result rows to be in the order specified by the GROUP BY
(None, Some(group_by)) => OrderTarget::maybe_from_iterator(
group_by.exprs.iter().map(|expr| (expr, SortOrder::Asc)),
EliminatesSort::GroupBy,
EliminatesSortBy::Group,
),
// Both ORDER BY and GROUP BY:
// If the GROUP BY does not contain all the expressions in the ORDER BY,
@ -107,7 +107,7 @@ pub fn compute_order_target(
if !group_by_contains_all {
return OrderTarget::maybe_from_iterator(
group_by.exprs.iter().map(|expr| (expr, SortOrder::Asc)),
EliminatesSort::GroupBy,
EliminatesSortBy::Group,
);
}
// If yes, let's try to target an ordering that matches the GROUP BY columns,
@ -146,7 +146,7 @@ pub fn compute_order_target(
.iter(),
)
.map(|(expr, dir)| (expr, *dir)),
EliminatesSort::GroupByAndOrderBy,
EliminatesSortBy::GroupByAndOrder,
)
}
}

View file

@ -114,7 +114,7 @@ impl WhereTerm {
}
fn eval_at(&self, join_order: &[JoinOrderMember]) -> Result<EvalAt> {
determine_where_to_eval_term(&self, join_order)
determine_where_to_eval_term(self, join_order)
}
}
@ -335,7 +335,7 @@ pub enum QueryDestination {
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub struct JoinOrderMember {
/// The internal ID of the[TableReference]
pub table_id: TableInternalId,
@ -346,16 +346,6 @@ pub struct JoinOrderMember {
pub is_outer: bool,
}
impl Default for JoinOrderMember {
fn default() -> Self {
Self {
table_id: TableInternalId::default(),
original_idx: 0,
is_outer: false,
}
}
}
#[derive(Debug, Clone, PartialEq)]
/// Whether a column is DISTINCT or not.
@ -414,7 +404,7 @@ impl DistinctCtx {
});
program.emit_insn(Insn::IdxInsert {
cursor_id: self.cursor_id,
record_reg: record_reg,
record_reg,
unpacked_start: None,
unpacked_count: None,
flags: IdxInsertFlags::new(),
@ -472,7 +462,7 @@ impl SelectPlan {
QueryDestination::CoroutineYield { .. }
)
|| self.table_references.joined_tables().len() != 1
|| self.table_references.outer_query_refs().len() != 0
|| self.table_references.outer_query_refs().is_empty()
|| self.result_columns.len() != 1
|| self.group_by.is_some()
|| self.contains_constant_false_condition
@ -837,15 +827,11 @@ impl TableReferences {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct ColumnUsedMask(u128);
impl ColumnUsedMask {
pub fn new() -> Self {
Self(0)
}
pub fn set(&mut self, index: usize) {
assert!(
index < 128,
@ -950,7 +936,7 @@ impl JoinedTable {
identifier,
internal_id,
join_info,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}
}
@ -987,14 +973,12 @@ impl JoinedTable {
CursorType::BTreeTable(btree.clone()),
))
};
let index_cursor_id = if let Some(index) = index {
Some(program.alloc_cursor_id_keyed(
let index_cursor_id = index.map(|index| {
program.alloc_cursor_id_keyed(
CursorKey::index(self.internal_id, index.clone()),
CursorType::BTreeIndex(index.clone()),
))
} else {
None
};
)
});
Ok((table_cursor_id, index_cursor_id))
}
Table::Virtual(virtual_table) => {
@ -1031,7 +1015,7 @@ impl JoinedTable {
if self.col_used_mask.is_empty() {
return false;
}
let mut index_cols_mask = ColumnUsedMask::new();
let mut index_cols_mask = ColumnUsedMask::default();
for col in index.columns.iter() {
index_cols_mask.set(col.pos_in_table);
}
@ -1040,7 +1024,7 @@ impl JoinedTable {
if btree.has_rowid {
if let Some(pos_of_rowid_alias_col) = btree.get_rowid_alias_column().map(|(pos, _)| pos)
{
let mut empty_mask = ColumnUsedMask::new();
let mut empty_mask = ColumnUsedMask::default();
empty_mask.set(pos_of_rowid_alias_col);
if self.col_used_mask == empty_mask {
// However if the index would be ONLY used for the rowid, then let's not bother using it to cover the query.
@ -1079,6 +1063,7 @@ pub struct SeekDef {
/// For example, given:
/// - CREATE INDEX i ON t (x, y desc)
/// - SELECT * FROM t WHERE x = 1 AND y >= 30
///
/// The key is [(1, ASC), (30, DESC)]
pub key: Vec<(ast::Expr, SortOrder)>,
/// The condition to use when seeking. See [SeekKey] for more details.
@ -1100,6 +1085,7 @@ pub struct SeekKey {
/// For example, given:
/// - CREATE INDEX i ON t (x, y)
/// - SELECT * FROM t WHERE x = 1 AND y < 30
///
/// We want to seek to the first row where x = 1, and then iterate forwards.
/// In this case, the seek key is GT(1, NULL) since NULL is always LT in index key comparisons.
/// We can't use just GT(1) because in index key comparisons, only the given number of columns are compared,

View file

@ -240,7 +240,7 @@ pub fn bind_column_references(
})
}
fn parse_from_clause_table<'a>(
fn parse_from_clause_table(
schema: &Schema,
table: ast::SelectTable,
table_references: &mut TableReferences,
@ -288,7 +288,7 @@ fn parse_from_clause_table<'a>(
identifier: alias.unwrap_or(normalized_qualified_name),
internal_id: table_ref_counter.next(),
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
return Ok(());
};
@ -313,7 +313,7 @@ fn parse_from_clause_table<'a>(
identifier: outer_ref.identifier.clone(),
internal_id: table_ref_counter.next(),
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
return Ok(());
}
@ -371,7 +371,7 @@ fn parse_from_clause_table<'a>(
table: Table::Virtual(vtab),
identifier: alias,
internal_id: table_ref_counter.next(),
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
});
Ok(())
@ -380,7 +380,7 @@ fn parse_from_clause_table<'a>(
}
}
pub fn parse_from<'a>(
pub fn parse_from(
schema: &Schema,
mut from: Option<FromClause>,
syms: &SymbolTable,
@ -435,7 +435,7 @@ pub fn parse_from<'a>(
identifier: t.identifier.clone(),
internal_id: t.internal_id,
table: t.table.clone(),
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}
}));
@ -535,7 +535,7 @@ pub fn determine_where_to_eval_term(
));
}
return determine_where_to_eval_expr(&term.expr, join_order);
determine_where_to_eval_expr(&term.expr, join_order)
}
/// A bitmask representing a set of tables in a query plan.
@ -667,8 +667,8 @@ pub fn table_mask_from_expr(
Ok(mask)
}
pub fn determine_where_to_eval_expr<'a>(
top_level_expr: &'a Expr,
pub fn determine_where_to_eval_expr(
top_level_expr: &Expr,
join_order: &[JoinOrderMember],
) -> Result<EvalAt> {
let mut eval_at: EvalAt = EvalAt::BeforeLoop;
@ -689,7 +689,7 @@ pub fn determine_where_to_eval_expr<'a>(
Ok(eval_at)
}
fn parse_join<'a>(
fn parse_join(
schema: &Schema,
join: ast::JoinedSelectTable,
syms: &SymbolTable,

View file

@ -29,6 +29,7 @@ fn list_pragmas(program: &mut ProgramBuilder) {
program.epilogue(crate::translate::emitter::TransactionMode::None);
}
#[allow(clippy::too_many_arguments)]
pub fn translate_pragma(
query_mode: QueryMode,
schema: &Schema,

View file

@ -17,6 +17,7 @@ use super::{
/// - all result columns
/// - result row (or if a subquery, yields to the parent query)
/// - limit
#[allow(clippy::too_many_arguments)]
pub fn emit_select_result(
program: &mut ProgramBuilder,
resolver: &Resolver,

View file

@ -800,7 +800,7 @@ pub fn translate_drop_table(
});
program.emit_insn(Insn::OpenRead {
cursor_id: sqlite_schema_cursor_id_1,
root_page: 1usize.into(),
root_page: 1usize,
});
let schema_column_0_register = program.alloc_register();

View file

@ -88,7 +88,7 @@ pub fn translate_select(
})
}
pub fn prepare_select_plan<'a>(
pub fn prepare_select_plan(
schema: &Schema,
mut select: ast::Select,
syms: &SymbolTable,
@ -181,7 +181,8 @@ pub fn prepare_select_plan<'a>(
}
}
fn prepare_one_select_plan<'a>(
#[allow(clippy::too_many_arguments)]
fn prepare_one_select_plan(
schema: &Schema,
select: ast::OneSelect,
limit: Option<&ast::Limit>,
@ -284,7 +285,7 @@ fn prepare_one_select_plan<'a>(
match column {
ResultColumn::Star => {
select_star(
&plan.table_references.joined_tables(),
plan.table_references.joined_tables(),
&mut plan.result_columns,
);
for table in plan.table_references.joined_tables_mut() {
@ -574,7 +575,7 @@ fn prepare_one_select_plan<'a>(
}
// Parse the LIMIT/OFFSET clause
(plan.limit, plan.offset) = limit.map_or(Ok((None, None)), |l| parse_limit(l))?;
(plan.limit, plan.offset) = limit.map_or(Ok((None, None)), parse_limit)?;
// Return the unoptimized query plan
Ok(plan)
@ -676,13 +677,7 @@ fn estimate_num_instructions(select: &SelectPlan) -> usize {
let order_by_instructions = select.order_by.is_some() as usize * 10;
let condition_instructions = select.where_clause.len() * 3;
let num_instructions = 20
+ table_instructions
+ group_by_instructions
+ order_by_instructions
+ condition_instructions;
num_instructions
20 + table_instructions + group_by_instructions + order_by_instructions + condition_instructions
}
fn estimate_num_labels(select: &SelectPlan) -> usize {
@ -706,20 +701,17 @@ fn estimate_num_labels(select: &SelectPlan) -> usize {
let order_by_labels = select.order_by.is_some() as usize * 10;
let condition_labels = select.where_clause.len() * 2;
let num_labels =
init_halt_labels + table_labels + group_by_labels + order_by_labels + condition_labels;
num_labels
init_halt_labels + table_labels + group_by_labels + order_by_labels + condition_labels
}
pub fn emit_simple_count<'a>(
pub fn emit_simple_count(
program: &mut ProgramBuilder,
_t_ctx: &mut TranslateCtx<'a>,
_t_ctx: &mut TranslateCtx,
plan: &SelectPlan,
) -> Result<()> {
let cursors = plan
.joined_tables()
.get(0)
.first()
.unwrap()
.resolve_cursors(program)?;

View file

@ -44,10 +44,10 @@ pub fn emit_subqueries(
///
/// Since a subquery has its own SelectPlan, it can contain nested subqueries,
/// which can contain even more nested subqueries, etc.
pub fn emit_subquery<'a>(
pub fn emit_subquery(
program: &mut ProgramBuilder,
plan: &mut SelectPlan,
t_ctx: &mut TranslateCtx<'a>,
t_ctx: &mut TranslateCtx,
) -> Result<usize> {
let yield_reg = program.alloc_register();
let coroutine_implementation_start_offset = program.allocate_label();

View file

@ -144,7 +144,7 @@ pub fn prepare_update_plan(
index: None,
},
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}];
let mut table_references = TableReferences::new(joined_tables, vec![]);
let set_clauses = body
@ -229,7 +229,7 @@ pub fn prepare_update_plan(
index: None,
},
join_info: None,
col_used_mask: ColumnUsedMask::new(),
col_used_mask: ColumnUsedMask::default(),
}];
let mut table_references = TableReferences::new(joined_tables, vec![]);

View file

@ -39,7 +39,7 @@ fn emit_values_when_single_row(
translate_expr_no_constant_opt(
program,
None,
&v,
v,
start_reg + i,
resolver,
NoConstantOptReason::RegisterReuse,
@ -131,7 +131,7 @@ fn emit_values_in_subquery(
translate_expr_no_constant_opt(
program,
None,
&v,
v,
start_reg + i,
resolver,
NoConstantOptReason::RegisterReuse,

View file

@ -58,6 +58,12 @@ pub struct Text {
pub subtype: TextSubtype,
}
impl Display for Text {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct TextRef {
pub value: RawSlice,
@ -65,10 +71,6 @@ pub struct TextRef {
}
impl Text {
pub fn from_str<S: Into<String>>(value: S) -> Self {
Self::new(&value.into())
}
pub fn new(value: &str) -> Self {
Self {
value: value.as_bytes().to_vec(),
@ -84,10 +86,6 @@ impl Text {
}
}
pub fn to_string(&self) -> String {
self.as_str().to_string()
}
pub fn as_str(&self) -> &str {
unsafe { std::str::from_utf8_unchecked(self.value.as_ref()) }
}
@ -99,6 +97,15 @@ impl AsRef<str> for Text {
}
}
impl From<&str> for Text {
fn from(value: &str) -> Self {
Text {
value: value.as_bytes().to_vec(),
subtype: TextSubtype::Text,
}
}
}
impl From<String> for Text {
fn from(value: String) -> Self {
Text {
@ -108,14 +115,16 @@ impl From<String> for Text {
}
}
impl Display for TextRef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl TextRef {
pub fn as_str(&self) -> &str {
unsafe { std::str::from_utf8_unchecked(self.value.to_slice()) }
}
pub fn to_string(&self) -> String {
self.as_str().to_string()
}
}
#[cfg(feature = "serde")]
@ -331,11 +340,12 @@ impl Display for Value {
} else {
format!("{}.{}", whole, fraction)
};
let (prefix, exponent) = if exponent.starts_with('-') {
("-0", &exponent[1..])
} else {
("+", exponent)
};
let (prefix, exponent) =
if let Some(stripped_exponent) = exponent.strip_prefix('-') {
("-0", &stripped_exponent[1..])
} else {
("+", exponent)
};
return write!(f, "{}e{}{}", trimmed_mantissa, prefix, exponent);
}
}
@ -514,10 +524,6 @@ impl PartialEq<Value> for Value {
_ => false,
}
}
fn ne(&self, other: &Value) -> bool {
!self.eq(other)
}
}
#[allow(clippy::non_canonical_partial_ord_impl)]
@ -782,6 +788,10 @@ impl Record {
pub fn len(&self) -> usize {
self.values.len()
}
pub fn is_empty(&self) -> bool {
self.values.is_empty()
}
}
struct AppendWriter<'a> {
buf: &'a mut Vec<u8>,
@ -859,6 +869,10 @@ impl ImmutableRecord {
self.values.len()
}
pub fn is_empty(&self) -> bool {
self.values.is_empty()
}
pub fn from_registers<'a>(
registers: impl IntoIterator<Item = &'a Register> + Copy,
len: usize,
@ -1152,7 +1166,7 @@ impl PartialOrd<RefValue> for RefValue {
/// A bitfield that represents the comparison spec for index keys.
/// Since indexed columns can individually specify ASC/DESC, each key must
/// be compared differently.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[repr(transparent)]
pub struct IndexKeySortOrder(u64);
@ -1180,16 +1194,6 @@ impl IndexKeySortOrder {
}
IndexKeySortOrder(spec)
}
pub fn default() -> Self {
Self(0)
}
}
impl Default for IndexKeySortOrder {
fn default() -> Self {
Self::default()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
@ -1715,8 +1719,6 @@ mod tests {
assert_eq!(
buf.len(),
header_length // 9 bytes (header size + 8 serial types)
+ 0 // ConstInt0: 0 bytes
+ 0 // ConstInt1: 0 bytes
+ size_of::<i8>() // I8: 1 byte
+ size_of::<i16>() // I16: 2 bytes
+ (size_of::<i32>() - 1) // I24: 3 bytes

View file

@ -147,7 +147,7 @@ pub fn parse_schema_rows(
.unwrap();
let index = schema::Index::from_sql(
&unparsed_sql_from_index.sql,
unparsed_sql_from_index.root_page as usize,
unparsed_sql_from_index.root_page,
table.as_ref(),
)?;
schema.add_index(Arc::new(index));
@ -432,7 +432,7 @@ pub fn exprs_are_equivalent(expr1: &Expr, expr2: &Expr) -> bool {
}
// Variables that are not bound to a specific value, are treated as NULL
// https://sqlite.org/lang_expr.html#varparam
(Expr::Variable(var), Expr::Variable(var2)) if var == "" && var2 == "" => false,
(Expr::Variable(var), Expr::Variable(var2)) if var.is_empty() && var2.is_empty() => false,
// Named variables can be compared by their name
(Expr::Variable(val), Expr::Variable(val2)) => val == val2,
(Expr::Parenthesized(exprs1), Expr::Parenthesized(exprs2)) => {
@ -1366,7 +1366,7 @@ pub mod tests {
assert_eq!(opts.path, "/home/user/db.sqlite");
assert_eq!(opts.vfs, Some("unix".to_string()));
assert_eq!(opts.mode, OpenMode::ReadOnly);
assert_eq!(opts.immutable, true);
assert!(opts.immutable);
}
#[test]
@ -1448,7 +1448,7 @@ pub mod tests {
assert_eq!(opts.vfs, Some("unix".to_string()));
assert_eq!(opts.mode, OpenMode::ReadWrite);
assert_eq!(opts.cache, CacheMode::Private);
assert_eq!(opts.immutable, false);
assert!(!opts.immutable);
}
#[test]
@ -2012,12 +2012,12 @@ pub mod tests {
// > i64::MAX, convert to float
assert_eq!(
parse_numeric_literal("9223372036854775808").unwrap(),
Value::Float(9.223372036854775808e+18)
Value::Float(9.223_372_036_854_776e18)
);
// < i64::MIN, convert to float
assert_eq!(
parse_numeric_literal("-9223372036854775809").unwrap(),
Value::Float(-9.223372036854775809e+18)
Value::Float(-9.223_372_036_854_776e18)
);
}
}

View file

@ -14,9 +14,10 @@ use crate::{
emitter::TransactionMode,
plan::{ResultSetColumn, TableReferences},
},
types::Text,
Connection, Value, VirtualTable,
};
#[derive(Default)]
pub struct TableRefIdCounter {
next_free: TableInternalId,
}
@ -356,7 +357,7 @@ impl ProgramBuilder {
pub fn add_comment(&mut self, insn_index: BranchOffset, comment: &'static str) {
if let Some(comments) = &mut self.comments {
comments.push((insn_index.to_offset_int(), comment));
comments.push((insn_index.as_offset_int(), comment));
}
}
@ -387,8 +388,8 @@ impl ProgramBuilder {
.constant_spans
.iter()
.find(|span| span.0 <= *index_b && span.1 >= *index_b);
if a_span.is_some() && b_span.is_some() {
a_span.unwrap().0.cmp(&b_span.unwrap().0)
if let (Some(a_span), Some(b_span)) = (a_span, b_span) {
a_span.0.cmp(&b_span.0)
} else if a_span.is_some() {
Ordering::Greater
} else if b_span.is_some() {
@ -466,7 +467,7 @@ impl ProgramBuilder {
unreachable!("Label is not a label");
};
self.label_to_resolved_offset[label_number as usize] =
Some((to_offset.to_offset_int(), target));
Some((to_offset.as_offset_int(), target));
}
/// Resolve unresolved labels to a specific offset in the instruction list.
@ -730,7 +731,7 @@ impl ProgramBuilder {
}
/// Initialize the program with basic setup and return initial metadata and labels
pub fn prologue<'a>(&mut self) {
pub fn prologue(&mut self) {
if self.nested_level == 0 {
self.init_label = self.allocate_label();
@ -823,7 +824,7 @@ impl ProgramBuilder {
Numeric::Float(v) => Value::Float(v.into()),
},
ast::Literal::Null => Value::Null,
ast::Literal::String(s) => Value::Text(Text::from_str(sanitize_string(s))),
ast::Literal::String(s) => Value::Text(sanitize_string(s).into()),
ast::Literal::Blob(s) => Value::Blob(
// Taken from `translate_expr`
s.as_bytes()

File diff suppressed because it is too large Load diff

View file

@ -27,11 +27,11 @@ pub fn insn_to_str(
Insn::Init { target_pc } => (
"Init",
0,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
format!("Start at {}", target_pc.to_debug_int()),
format!("Start at {}", target_pc.as_debug_int()),
),
Insn::Add { lhs, rhs, dest } => (
"Add",
@ -141,11 +141,11 @@ pub fn insn_to_str(
Insn::NotNull { reg, target_pc } => (
"NotNull",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
format!("r[{}]!=NULL -> goto {}", reg, target_pc.to_debug_int()),
format!("r[{}]!=NULL -> goto {}", reg, target_pc.as_debug_int()),
),
Insn::Compare {
start_reg_a,
@ -157,7 +157,7 @@ pub fn insn_to_str(
*start_reg_a as i32,
*start_reg_b as i32,
*count as i32,
Value::build_text(&format!("k({count}, {})", collation.unwrap_or_default())),
Value::build_text(format!("k({count}, {})", collation.unwrap_or_default())),
0,
format!(
"r[{}..{}]==r[{}..{}]",
@ -173,9 +173,9 @@ pub fn insn_to_str(
target_pc_gt,
} => (
"Jump",
target_pc_lt.to_debug_int(),
target_pc_eq.to_debug_int(),
target_pc_gt.to_debug_int(),
target_pc_lt.as_debug_int(),
target_pc_eq.as_debug_int(),
target_pc_gt.as_debug_int(),
Value::build_text(""),
0,
"".to_string(),
@ -206,7 +206,7 @@ pub fn insn_to_str(
} => (
"IfPos",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
@ -215,7 +215,7 @@ pub fn insn_to_str(
reg,
reg,
decrement_by,
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::Eq {
@ -228,14 +228,14 @@ pub fn insn_to_str(
"Eq",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!(
"if r[{}]==r[{}] goto {}",
lhs,
rhs,
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::Ne {
@ -248,14 +248,14 @@ pub fn insn_to_str(
"Ne",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!(
"if r[{}]!=r[{}] goto {}",
lhs,
rhs,
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::Lt {
@ -268,10 +268,10 @@ pub fn insn_to_str(
"Lt",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!("if r[{}]<r[{}] goto {}", lhs, rhs, target_pc.to_debug_int()),
format!("if r[{}]<r[{}] goto {}", lhs, rhs, target_pc.as_debug_int()),
),
Insn::Le {
lhs,
@ -283,14 +283,14 @@ pub fn insn_to_str(
"Le",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!(
"if r[{}]<=r[{}] goto {}",
lhs,
rhs,
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::Gt {
@ -303,10 +303,10 @@ pub fn insn_to_str(
"Gt",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!("if r[{}]>r[{}] goto {}", lhs, rhs, target_pc.to_debug_int()),
format!("if r[{}]>r[{}] goto {}", lhs, rhs, target_pc.as_debug_int()),
),
Insn::Ge {
lhs,
@ -318,14 +318,14 @@ pub fn insn_to_str(
"Ge",
*lhs as i32,
*rhs as i32,
target_pc.to_debug_int(),
Value::build_text(&collation.map_or("".to_string(), |c| c.to_string())),
target_pc.as_debug_int(),
Value::build_text(collation.map_or("".to_string(), |c| c.to_string())),
0,
format!(
"if r[{}]>=r[{}] goto {}",
lhs,
rhs,
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::If {
@ -335,11 +335,11 @@ pub fn insn_to_str(
} => (
"If",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*jump_if_null as i32,
Value::build_text(""),
0,
format!("if r[{}] goto {}", reg, target_pc.to_debug_int()),
format!("if r[{}] goto {}", reg, target_pc.as_debug_int()),
),
Insn::IfNot {
reg,
@ -348,11 +348,11 @@ pub fn insn_to_str(
} => (
"IfNot",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*jump_if_null as i32,
Value::build_text(""),
0,
format!("if !r[{}] goto {}", reg, target_pc.to_debug_int()),
format!("if !r[{}] goto {}", reg, target_pc.as_debug_int()),
),
Insn::OpenRead {
cursor_id,
@ -427,7 +427,7 @@ pub fn insn_to_str(
} => (
"VFilter",
*cursor_id as i32,
pc_if_empty.to_debug_int(),
pc_if_empty.as_debug_int(),
*arg_count as i32,
Value::build_text(""),
0,
@ -466,7 +466,7 @@ pub fn insn_to_str(
} => (
"VNext",
*cursor_id as i32,
pc_if_next.to_debug_int(),
pc_if_next.as_debug_int(),
0,
Value::build_text(""),
0,
@ -500,7 +500,7 @@ pub fn insn_to_str(
} => (
"Rewind",
*cursor_id as i32,
pc_if_empty.to_debug_int(),
pc_if_empty.as_debug_int(),
0,
Value::build_text(""),
0,
@ -617,7 +617,7 @@ pub fn insn_to_str(
} => (
"Next",
*cursor_id as i32,
pc_if_next.to_debug_int(),
pc_if_next.as_debug_int(),
0,
Value::build_text(""),
0,
@ -631,7 +631,7 @@ pub fn insn_to_str(
*err_code as i32,
0,
0,
Value::build_text(&description),
Value::build_text(description),
0,
"".to_string(),
),
@ -644,7 +644,7 @@ pub fn insn_to_str(
*err_code as i32,
0,
*target_reg as i32,
Value::build_text(&description),
Value::build_text(description),
0,
"".to_string(),
),
@ -660,7 +660,7 @@ pub fn insn_to_str(
Insn::Goto { target_pc } => (
"Goto",
0,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
@ -672,7 +672,7 @@ pub fn insn_to_str(
} => (
"Gosub",
*return_reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
@ -779,7 +779,7 @@ pub fn insn_to_str(
"SeekRowid",
*cursor_id as i32,
*src_reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
Value::build_text(""),
0,
format!(
@ -795,7 +795,7 @@ pub fn insn_to_str(
get_table_or_index_name(*cursor_id),
))
.unwrap_or(format!("cursor {}", cursor_id)),
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::DeferredSeek {
@ -848,7 +848,7 @@ pub fn insn_to_str(
_ => unreachable!(),
},
*cursor_id as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*start_reg as i32,
Value::build_text(""),
0,
@ -910,7 +910,7 @@ pub fn insn_to_str(
_ => unreachable!(),
},
*cursor_id as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*start_reg as i32,
Value::build_text(""),
0,
@ -919,11 +919,11 @@ pub fn insn_to_str(
Insn::DecrJumpZero { reg, target_pc } => (
"DecrJumpZero",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
format!("if (--r[{}]==0) goto {}", reg, target_pc.to_debug_int()),
format!("if (--r[{}]==0) goto {}", reg, target_pc.as_debug_int()),
),
Insn::AggStep {
func,
@ -975,7 +975,7 @@ pub fn insn_to_str(
*cursor_id as i32,
*columns as i32,
0,
Value::build_text(&(format!("k({},{})", order.len(), to_print.join(",")))),
Value::build_text(format!("k({},{})", order.len(), to_print.join(","))),
0,
format!("cursor={}", cursor_id),
)
@ -1011,7 +1011,7 @@ pub fn insn_to_str(
} => (
"SorterSort",
*cursor_id as i32,
pc_if_empty.to_debug_int(),
pc_if_empty.as_debug_int(),
0,
Value::build_text(""),
0,
@ -1023,7 +1023,7 @@ pub fn insn_to_str(
} => (
"SorterNext",
*cursor_id as i32,
pc_if_next.to_debug_int(),
pc_if_next.as_debug_int(),
0,
Value::build_text(""),
0,
@ -1045,7 +1045,7 @@ pub fn insn_to_str(
} else {
func.func.to_string()
};
Value::build_text(&s)
Value::build_text(s)
},
0,
if func.arg_count == 0 {
@ -1068,8 +1068,8 @@ pub fn insn_to_str(
} => (
"InitCoroutine",
*yield_reg as i32,
jump_on_definition.to_debug_int(),
start_offset.to_debug_int(),
jump_on_definition.as_debug_int(),
start_offset.as_debug_int(),
Value::build_text(""),
0,
"".to_string(),
@ -1089,7 +1089,7 @@ pub fn insn_to_str(
} => (
"Yield",
*yield_reg as i32,
end_offset.to_debug_int(),
end_offset.as_debug_int(),
0,
Value::build_text(""),
0,
@ -1106,7 +1106,7 @@ pub fn insn_to_str(
*cursor as i32,
*record_reg as i32,
*key_reg as i32,
Value::build_text(&table_name),
Value::build_text(table_name),
flag.0 as u16,
format!("intkey=r[{}] data=r[{}]", key_reg, record_reg),
),
@ -1177,9 +1177,9 @@ pub fn insn_to_str(
(
"NoConflict",
*cursor_id as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*record_reg as i32,
Value::build_text(&format!("{num_regs}")),
Value::build_text(format!("{num_regs}")),
0,
key,
)
@ -1191,7 +1191,7 @@ pub fn insn_to_str(
} => (
"NotExists",
*cursor as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*rowid_reg as i32,
Value::build_text(""),
0,
@ -1306,7 +1306,7 @@ pub fn insn_to_str(
} => (
"Last",
*cursor_id as i32,
pc_if_empty.to_debug_int(),
pc_if_empty.as_debug_int(),
0,
Value::build_text(""),
0,
@ -1315,11 +1315,11 @@ pub fn insn_to_str(
Insn::IsNull { reg, target_pc } => (
"IsNull",
*reg as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
0,
Value::build_text(""),
0,
format!("if (r[{}]==NULL) goto {}", reg, target_pc.to_debug_int()),
format!("if (r[{}]==NULL) goto {}", reg, target_pc.as_debug_int()),
),
Insn::ParseSchema { db, where_clause } => (
"ParseSchema",
@ -1336,7 +1336,7 @@ pub fn insn_to_str(
} => (
"Prev",
*cursor_id as i32,
pc_if_prev.to_debug_int(),
pc_if_prev.as_debug_int(),
0,
Value::build_text(""),
0,
@ -1491,12 +1491,12 @@ pub fn insn_to_str(
target_pc_when_reentered,
} => (
"Once",
target_pc_when_reentered.to_debug_int(),
target_pc_when_reentered.as_debug_int(),
0,
0,
Value::build_text(""),
0,
format!("goto {}", target_pc_when_reentered.to_debug_int()),
format!("goto {}", target_pc_when_reentered.as_debug_int()),
),
Insn::BeginSubrtn { dest, dest_end } => (
"BeginSubrtn",
@ -1527,7 +1527,7 @@ pub fn insn_to_str(
"Found"
},
*cursor_id as i32,
target_pc.to_debug_int(),
target_pc.as_debug_int(),
*record_reg as i32,
Value::build_text(""),
0,
@ -1538,7 +1538,7 @@ pub fn insn_to_str(
} else {
""
},
target_pc.to_debug_int()
target_pc.as_debug_int()
),
),
Insn::Affinity {

View file

@ -46,7 +46,7 @@ impl CmpInsFlags {
}
pub fn with_affinity(mut self, affinity: Affinity) -> Self {
let aff_code = affinity.to_char_code() as usize;
let aff_code = affinity.as_char_code() as usize;
self.0 = (self.0 & !Self::AFFINITY_MASK) | aff_code;
self
}

View file

@ -35,7 +35,7 @@ use crate::{
use crate::{
storage::{btree::BTreeCursor, pager::Pager, sqlite3_ondisk::DatabaseHeader},
translate::plan::ResultSetColumn,
types::{AggContext, Cursor, CursorResult, ImmutableRecord, SeekKey, SeekOp, Value},
types::{AggContext, Cursor, CursorResult, ImmutableRecord, Value},
vdbe::{builder::CursorType, insn::Insn},
};
@ -45,10 +45,7 @@ use crate::{Connection, MvStore, Result, TransactionState};
use builder::CursorKey;
use execute::{InsnFunction, InsnFunctionStepResult, OpIdxDeleteState, OpIntegrityCheckState};
use rand::{
distributions::{Distribution, Uniform},
Rng,
};
use rand::Rng;
use regex::Regex;
use std::{
cell::{Cell, RefCell},
@ -65,10 +62,11 @@ use tracing::{instrument, Level};
///
/// In some cases, we want to jump to EXACTLY a specific instruction.
/// - Example: a condition is not met, so we want to jump to wherever Halt is.
///
/// In other cases, we don't care what the exact instruction is, but we know that we
/// want to jump to whatever comes AFTER a certain instruction.
/// - Example: a Next instruction will want to jump to "whatever the start of the loop is",
/// but it doesn't care what instruction that is.
/// but it doesn't care what instruction that is.
///
/// The reason this distinction is important is that we might reorder instructions that are
/// constant at compile time, and when we do that, we need to change the offsets of any impacted
@ -106,7 +104,7 @@ impl BranchOffset {
}
/// Returns the offset value. Panics if the branch offset is a label or placeholder.
pub fn to_offset_int(&self) -> InsnReference {
pub fn as_offset_int(&self) -> InsnReference {
match self {
BranchOffset::Label(v) => unreachable!("Unresolved label: {}", v),
BranchOffset::Offset(v) => *v,
@ -117,7 +115,7 @@ impl BranchOffset {
/// Returns the branch offset as a signed integer.
/// Used in explain output, where we don't want to panic in case we have an unresolved
/// label or placeholder.
pub fn to_debug_int(&self) -> i32 {
pub fn as_debug_int(&self) -> i32 {
match self {
BranchOffset::Label(v) => *v as i32,
BranchOffset::Offset(v) => *v as i32,
@ -129,11 +127,11 @@ impl BranchOffset {
/// Returns a new branch offset.
/// Panics if the branch offset is a label or placeholder.
pub fn add<N: Into<u32>>(self, n: N) -> BranchOffset {
BranchOffset::Offset(self.to_offset_int() + n.into())
BranchOffset::Offset(self.as_offset_int() + n.into())
}
pub fn sub<N: Into<u32>>(self, n: N) -> BranchOffset {
BranchOffset::Offset(self.to_offset_int() - n.into())
BranchOffset::Offset(self.as_offset_int() - n.into())
}
}
@ -206,9 +204,9 @@ impl<const N: usize> Bitfield<N> {
/// The commit state of the program.
/// There are two states:
/// - Ready: The program is ready to run the next instruction, or has shut down after
/// the last instruction.
/// the last instruction.
/// - Committing: The program is committing a write transaction. It is waiting for the pager to finish flushing the cache to disk,
/// primarily to the WAL, but also possibly checkpointing the WAL to the database file.
/// primarily to the WAL, but also possibly checkpointing the WAL to the database file.
enum CommitState {
Ready,
Committing,
@ -488,37 +486,38 @@ impl Program {
}
}
fn get_new_rowid<R: Rng>(cursor: &mut BTreeCursor, mut rng: R) -> Result<CursorResult<i64>> {
fn get_new_rowid<R: Rng>(cursor: &mut BTreeCursor, mut _rng: R) -> Result<CursorResult<i64>> {
match cursor.seek_to_last()? {
CursorResult::Ok(()) => {}
CursorResult::IO => return Ok(CursorResult::IO),
}
let mut rowid = match cursor.rowid()? {
let rowid = match cursor.rowid()? {
CursorResult::Ok(Some(rowid)) => rowid.checked_add(1).unwrap_or(i64::MAX), // add 1 but be careful with overflows, in case of overflow - use i64::MAX
CursorResult::Ok(None) => 1,
CursorResult::IO => return Ok(CursorResult::IO),
};
if rowid > i64::MAX.try_into().unwrap() {
let distribution = Uniform::from(1..=i64::MAX);
let max_attempts = 100;
for count in 0..max_attempts {
rowid = distribution.sample(&mut rng).try_into().unwrap();
match cursor.seek(SeekKey::TableRowId(rowid), SeekOp::GE { eq_only: true })? {
CursorResult::Ok(false) => break, // Found a non-existing rowid
CursorResult::Ok(true) => {
if count == max_attempts - 1 {
return Err(LimboError::InternalError(
"Failed to generate a new rowid".to_string(),
));
} else {
continue; // Try next random rowid
}
}
CursorResult::IO => return Ok(CursorResult::IO),
}
}
}
Ok(CursorResult::Ok(rowid.try_into().unwrap()))
// NOTE(nilskch): I commented this part out because this condition will never be true.
// if rowid > i64::MAX {
// let distribution = Uniform::from(1..=i64::MAX);
// let max_attempts = 100;
// for count in 0..max_attempts {
// rowid = distribution.sample(&mut rng);
// match cursor.seek(SeekKey::TableRowId(rowid), SeekOp::GE { eq_only: true })? {
// CursorResult::Ok(false) => break, // Found a non-existing rowid
// CursorResult::Ok(true) => {
// if count == max_attempts - 1 {
// return Err(LimboError::InternalError(
// "Failed to generate a new rowid".to_string(),
// ));
// } else {
// continue; // Try next random rowid
// }
// }
// CursorResult::IO => return Ok(CursorResult::IO),
// }
// }
// }
Ok(CursorResult::Ok(rowid))
}
fn make_record(registers: &[Register], start_reg: &usize, count: &usize) -> ImmutableRecord {
@ -575,7 +574,7 @@ fn print_insn(program: &Program, addr: InsnReference, insn: &Insn, indent: Strin
// Yield SeekGt SeekLt RowSetRead Rewind
// or if the P1 parameter is one instead of zero, then increase the indent number for all
// opcodes between the earlier instruction and "Goto"
fn get_indent_counts(insns: &Vec<(Insn, InsnFunction)>) -> Vec<usize> {
fn get_indent_counts(insns: &[(Insn, InsnFunction)]) -> Vec<usize> {
let mut indents = vec![0; insns.len()];
for (i, (insn, _)) in insns.iter().enumerate() {
@ -583,14 +582,14 @@ fn get_indent_counts(insns: &Vec<(Insn, InsnFunction)>) -> Vec<usize> {
let mut end = 0;
match insn {
Insn::Next { pc_if_next, .. } | Insn::VNext { pc_if_next, .. } => {
let dest = pc_if_next.to_debug_int() as usize;
let dest = pc_if_next.as_debug_int() as usize;
if dest < i {
start = dest;
end = i;
}
}
Insn::Prev { pc_if_prev, .. } => {
let dest = pc_if_prev.to_debug_int() as usize;
let dest = pc_if_prev.as_debug_int() as usize;
if dest < i {
start = dest;
end = i;
@ -598,7 +597,7 @@ fn get_indent_counts(insns: &Vec<(Insn, InsnFunction)>) -> Vec<usize> {
}
Insn::Goto { target_pc } => {
let dest = target_pc.to_debug_int() as usize;
let dest = target_pc.as_debug_int() as usize;
if dest < i
&& matches!(
insns.get(dest).map(|(insn, _)| insn),
@ -615,8 +614,8 @@ fn get_indent_counts(insns: &Vec<(Insn, InsnFunction)>) -> Vec<usize> {
_ => {}
}
for i in start..end {
indents[i] += 1;
for indent in indents.iter_mut().take(end).skip(start) {
*indent += 1;
}
}
@ -681,7 +680,7 @@ impl Row {
T::from_value(value)
}
pub fn get_value<'a>(&'a self, idx: usize) -> &'a Value {
pub fn get_value(&self, idx: usize) -> &Value {
let value = unsafe { self.values.add(idx).as_ref().unwrap() };
match value {
Register::Value(owned_value) => owned_value,

View file

@ -62,7 +62,7 @@ pub fn vector_extract(args: &[Register]) -> Result<Value> {
let vector_type = vector_type(blob)?;
let vector = vector_deserialize(vector_type, blob)?;
Ok(Value::build_text(&vector_to_text(&vector)))
Ok(Value::build_text(vector_to_text(&vector)))
}
pub fn vector_distance_cos(args: &[Register]) -> Result<Value> {

View file

@ -116,7 +116,7 @@ pub fn parse_vector(value: &Register, vec_ty: Option<VectorType>) -> Result<Vect
"Invalid vector value".to_string(),
));
};
let vector_type = vector_type(&blob)?;
let vector_type = vector_type(blob)?;
if let Some(vec_ty) = vec_ty {
if vec_ty != vector_type {
return Err(LimboError::ConversionError(
@ -124,7 +124,7 @@ pub fn parse_vector(value: &Register, vec_ty: Option<VectorType>) -> Result<Vect
));
}
}
vector_deserialize(vector_type, &blob)
vector_deserialize(vector_type, blob)
}
_ => Err(LimboError::ConversionError(
"Invalid vector type".to_string(),
@ -138,8 +138,8 @@ pub fn vector_to_text(vector: &Vector) -> String {
match vector.vector_type {
VectorType::Float32 => {
let data = vector.as_f32_slice();
for i in 0..vector.dims {
text.push_str(&data[i].to_string());
for (i, value) in data.iter().enumerate().take(vector.dims) {
text.push_str(&value.to_string());
if i < vector.dims - 1 {
text.push(',');
}
@ -147,8 +147,8 @@ pub fn vector_to_text(vector: &Vector) -> String {
}
VectorType::Float64 => {
let data = vector.as_f64_slice();
for i in 0..vector.dims {
text.push_str(&data[i].to_string());
for (i, value) in data.iter().enumerate().take(vector.dims) {
text.push_str(&value.to_string());
if i < vector.dims - 1 {
text.push(',');
}
@ -555,7 +555,7 @@ mod tests {
// Skip test if types are different
return true;
}
match do_vector_distance_cos(&v1, &v2) {
match do_vector_distance_cos(v1, v2) {
Ok(distance) => {
// Cosine distance is always between 0 and 2
(0.0..=2.0).contains(&distance)

View file

@ -32,7 +32,7 @@ impl VirtualTable {
syms: &SymbolTable,
) -> crate::Result<Rc<VirtualTable>> {
let module = syms.vtab_modules.get(name);
let (vtab_type, schema) = if let Some(_) = module {
let (vtab_type, schema) = if module.is_some() {
let ext_args = match args {
Some(ref args) => vtable_args(args),
None => vec![],

View file

@ -41,10 +41,10 @@ enum CompletionPhase {
Eof = 11,
}
impl Into<i64> for CompletionPhase {
fn into(self) -> i64 {
impl From<CompletionPhase> for i64 {
fn from(val: CompletionPhase) -> Self {
use self::CompletionPhase::*;
match self {
match val {
Keywords => 1,
// Pragmas => 2,
// Functions => 3,

View file

@ -119,7 +119,7 @@ pub fn decode(data: &Value, format: &Value) -> Result<Value, Error> {
))
}
"base85" => {
let decoded = decode_ascii85(&input_text).map_err(|_| Error::DecodeFailed)?;
let decoded = decode_ascii85(input_text).map_err(|_| Error::DecodeFailed)?;
Ok(Value::from_text(
String::from_utf8(decoded).map_err(|_| Error::InvalidUtf8)?,
@ -157,7 +157,7 @@ fn decode_ascii85(input: &str) -> Result<Vec<u8>, Box<dyn StdError>> {
}
}
if digit < 33 || digit > 117 {
if !(33..=117).contains(&digit) {
return Err("Input char is out of range for Ascii85".into());
}
@ -205,8 +205,8 @@ fn encode_ascii85(input: &[u8]) -> String {
let number = u32::from_be_bytes(chunk.as_ref().try_into().expect("Internal Error"));
for i in 0..count {
let digit = (((number / TABLE[i]) % 85) + 33) as u8;
for value in TABLE.iter().take(count) {
let digit = (((number / value) % 85) + 33) as u8;
result.push(digit as char);
}
}

View file

@ -182,10 +182,7 @@ impl VTabModule for CsvVTabModule {
if table.header {
let headers = reader.headers().map_err(|_| ResultCode::Error)?;
if column_count.is_none() && schema.is_none() {
columns = headers
.into_iter()
.map(|header| Self::escape_double_quote(header))
.collect();
columns = headers.into_iter().map(Self::escape_double_quote).collect();
}
if columns.is_empty() {
columns.push("(NULL)".to_owned());

View file

@ -64,7 +64,7 @@ fn regexp_replace(&self, args: &[Value]) -> Value {
None => "", // If args[2] does not exist, use an empty string
};
match (args.get(0), args.get(1)) {
match (args.first(), args.get(1)) {
(Some(haystack), Some(pattern)) => {
let Some(haystack_text) = haystack.to_text() else {
return Value::from_text("".to_string()); // Return an empty string if haystack is not valid
@ -73,11 +73,11 @@ fn regexp_replace(&self, args: &[Value]) -> Value {
return Value::from_text("".to_string()); // Return an empty string if pattern is not valid
};
let re = match Regex::new(&pattern_text) {
let re = match Regex::new(pattern_text) {
Ok(re) => re,
Err(_) => return Value::from_text("".to_string()), // Return an empty string if regex compilation fails
};
Value::from_text(re.replace(&haystack_text, replacement).to_string())
Value::from_text(re.replace(haystack_text, replacement).to_string())
}
_ => Value::from_text("".to_string()), // Return an empty string for invalid value types
}

View file

@ -103,6 +103,12 @@ pub enum TimeRoundField {
Micro,
}
impl Default for Time {
fn default() -> Self {
Self::new()
}
}
impl Time {
/// Returns a new instance of Time with tracking UTC::now
pub fn new() -> Self {

View file

@ -50,10 +50,7 @@ where
{
fn arbitrary_from<R: rand::Rng>(rng: &mut R, t: A) -> Self {
let size = rng.gen_range(0..5);
(0..size)
.into_iter()
.map(|_| T::arbitrary_from(rng, t))
.collect()
(0..size).map(|_| T::arbitrary_from(rng, t)).collect()
}
}
@ -61,7 +58,8 @@ where
impl ArbitraryFrom<&SimulatorEnv> for Expr {
fn arbitrary_from<R: rand::Rng>(rng: &mut R, t: &SimulatorEnv) -> Self {
let choice = rng.gen_range(0..13);
let expr = match choice {
match choice {
0 => Expr::Between {
lhs: Box::arbitrary_from(rng, t),
not: rng.gen_bool(0.5),
@ -78,7 +76,6 @@ impl ArbitraryFrom<&SimulatorEnv> for Expr {
when_then_pairs: {
let size = rng.gen_range(0..5);
(0..size)
.into_iter()
.map(|_| (Self::arbitrary_from(rng, t), Self::arbitrary_from(rng, t)))
.collect()
},
@ -136,8 +133,7 @@ impl ArbitraryFrom<&SimulatorEnv> for Expr {
// TODO: skip Raise
// TODO: skip subquery
_ => unreachable!(),
};
expr
}
}
}
@ -257,7 +253,7 @@ impl ArbitraryFrom<&Vec<&SimValue>> for ast::Expr {
return Self::Literal(ast::Literal::Null);
}
// TODO: for now just convert the value to an ast::Literal
let value = pick(&values, rng);
let value = pick(values, rng);
Expr::Literal((*value).into())
}
}

View file

@ -10,6 +10,9 @@ pub mod property;
pub mod query;
pub mod table;
type ArbitraryFromFunc<'a, R, T> = Box<dyn Fn(&mut R) -> T + 'a>;
type Choice<'a, R, T> = (usize, Box<dyn Fn(&mut R) -> Option<T> + 'a>);
/// Arbitrary trait for generating random values
/// An implementation of arbitrary is assumed to be a uniform sampling of
/// the possible values of the type, with a bias towards smaller values for
@ -42,12 +45,11 @@ pub trait ArbitraryFromMaybe<T> {
// todo: switch to a simpler type signature that can accommodate all integer and float types, which
// should be enough for our purposes.
pub(crate) fn frequency<
'a,
T,
R: Rng,
N: Sum + PartialOrd + Copy + Default + SampleUniform + SubAssign,
>(
choices: Vec<(N, Box<dyn Fn(&mut R) -> T + 'a>)>,
choices: Vec<(N, ArbitraryFromFunc<R, T>)>,
rng: &mut R,
) -> T {
let total = choices.iter().map(|(weight, _)| *weight).sum::<N>();
@ -64,7 +66,7 @@ pub(crate) fn frequency<
}
/// one_of is a helper function for composing different generators with equal probability of occurrence.
pub(crate) fn one_of<'a, T, R: Rng>(choices: Vec<Box<dyn Fn(&mut R) -> T + 'a>>, rng: &mut R) -> T {
pub(crate) fn one_of<T, R: Rng>(choices: Vec<ArbitraryFromFunc<R, T>>, rng: &mut R) -> T {
let index = rng.gen_range(0..choices.len());
choices[index](rng)
}
@ -72,10 +74,7 @@ pub(crate) fn one_of<'a, T, R: Rng>(choices: Vec<Box<dyn Fn(&mut R) -> T + 'a>>,
/// backtrack is a helper function for composing different "failable" generators.
/// The function takes a list of functions that return an Option<T>, along with number of retries
/// to make before giving up.
pub(crate) fn backtrack<'a, T, R: Rng>(
mut choices: Vec<(usize, Box<dyn Fn(&mut R) -> Option<T> + 'a>)>,
rng: &mut R,
) -> Option<T> {
pub(crate) fn backtrack<T, R: Rng>(mut choices: Vec<Choice<R, T>>, rng: &mut R) -> Option<T> {
loop {
// If there are no more choices left, we give up
let choices_ = choices

View file

@ -54,7 +54,7 @@ impl Predicate {
}
/// Produces a true [ast::Expr::Binary] [Predicate] that is true for the provided row in the given table
pub fn true_binary<R: rand::Rng>(rng: &mut R, t: &Table, row: &Vec<SimValue>) -> Predicate {
pub fn true_binary<R: rand::Rng>(rng: &mut R, t: &Table, row: &[SimValue]) -> Predicate {
// Pick a column
let column_index = rng.gen_range(0..t.columns.len());
let column = &t.columns[column_index];
@ -146,7 +146,7 @@ impl Predicate {
}
/// Produces an [ast::Expr::Binary] [Predicate] that is false for the provided row in the given table
pub fn false_binary<R: rand::Rng>(rng: &mut R, t: &Table, row: &Vec<SimValue>) -> Predicate {
pub fn false_binary<R: rand::Rng>(rng: &mut R, t: &Table, row: &[SimValue]) -> Predicate {
// Pick a column
let column_index = rng.gen_range(0..t.columns.len());
let column = &t.columns[column_index];
@ -321,11 +321,11 @@ impl CompoundPredicate {
) -> Self {
// Cannot pick a row if the table is empty
if table.rows.is_empty() {
return Self(
predicate_value
.then_some(Predicate::true_())
.unwrap_or(Predicate::false_()),
);
return Self(if predicate_value {
Predicate::true_()
} else {
Predicate::false_()
});
}
let row = pick(&table.rows, rng);
let predicate = if rng.gen_bool(0.7) {
@ -449,7 +449,7 @@ mod tests {
let predicate = Predicate::true_binary(&mut rng, &table, row);
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.into_bool()),
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
@ -478,7 +478,7 @@ mod tests {
let predicate = Predicate::false_binary(&mut rng, &table, row);
let value = expr_to_value(&predicate.0, row, &table);
assert!(
!value.as_ref().map_or(false, |value| value.into_bool()),
!value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,

View file

@ -268,7 +268,7 @@ mod tests {
let predicate = SimplePredicate::arbitrary_from(&mut rng, (&table, row, true)).0;
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.into_bool()),
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
@ -297,7 +297,7 @@ mod tests {
let predicate = SimplePredicate::arbitrary_from(&mut rng, (&table, row, false)).0;
let value = expr_to_value(&predicate.0, row, &table);
assert!(
!value.as_ref().map_or(false, |value| value.into_bool()),
!value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
@ -326,7 +326,7 @@ mod tests {
let predicate = Predicate::arbitrary_from(&mut rng, (&table, row));
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.into_bool()),
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,

View file

@ -20,7 +20,7 @@ impl ArbitraryFromMaybe<&SimValue> for TrueValue {
Self: Sized,
{
// If the Value is a true value return it else you cannot return a true Value
value.into_bool().then_some(Self(value.clone()))
value.as_bool().then_some(Self(value.clone()))
}
}
@ -46,7 +46,7 @@ impl ArbitraryFromMaybe<&SimValue> for FalseValue {
Self: Sized,
{
// If the Value is a false value return it else you cannot return a false Value
(!value.into_bool()).then_some(Self(value.clone()))
(!value.as_bool()).then_some(Self(value.clone()))
}
}
@ -76,7 +76,7 @@ impl ArbitraryFromMaybe<(&SimValue, bool)> for BitNotValue {
{
let bit_not_val = value.unary_exec(ast::UnaryOperator::BitwiseNot);
// If you bit not the Value and it meets the predicate return Some, else None
(bit_not_val.into_bool() == predicate).then_some(BitNotValue(value.clone()))
(bit_not_val.as_bool() == predicate).then_some(BitNotValue(value.clone()))
}
}
@ -115,7 +115,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
TrueValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(value.0.into_bool());
assert!(value.0.as_bool());
// Positive is a no-op in Sqlite
Expr::unary(ast::UnaryOperator::Positive, Expr::Literal(value.0.into()))
})
@ -125,7 +125,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
TrueValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(value.0.into_bool());
assert!(value.0.as_bool());
// True Value with negative is still True
Expr::unary(ast::UnaryOperator::Negative, Expr::Literal(value.0.into()))
})
@ -146,7 +146,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
FalseValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(!value.0.into_bool());
assert!(!value.0.as_bool());
Expr::unary(ast::UnaryOperator::Not, Expr::Literal(value.0.into()))
})
}),
@ -176,7 +176,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
FalseValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(!value.0.into_bool());
assert!(!value.0.as_bool());
// Positive is a no-op in Sqlite
Expr::unary(ast::UnaryOperator::Positive, Expr::Literal(value.0.into()))
})
@ -186,7 +186,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
FalseValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(!value.0.into_bool());
assert!(!value.0.as_bool());
// True Value with negative is still True
Expr::unary(ast::UnaryOperator::Negative, Expr::Literal(value.0.into()))
})
@ -207,7 +207,7 @@ impl SimplePredicate {
num_retries,
Box::new(|rng| {
TrueValue::arbitrary_from_maybe(rng, column_value).map(|value| {
assert!(value.0.into_bool());
assert!(value.0.as_bool());
Expr::unary(ast::UnaryOperator::Not, Expr::Literal(value.0.into()))
})
}),

View file

@ -416,7 +416,7 @@ impl Property {
.iter()
.filter(|vs| {
let v = vs.first().unwrap();
v.into_bool()
v.as_bool()
})
.count();
Ok(rows1_count == rows2.len())
@ -582,15 +582,12 @@ fn property_double_create_failure<R: rand::Rng>(
// - [ ] Table `t` will not be renamed or dropped.(todo: add this constraint once ALTER or DROP is implemented)
for _ in 0..rng.gen_range(0..3) {
let query = Query::arbitrary_from(rng, (env, remaining));
match &query {
Query::Create(Create { table: t }) => {
// There will be no errors in the middle interactions.
// - Creating the same table is an error
if t.name == table.name {
continue;
}
if let Query::Create(Create { table: t }) = &query {
// There will be no errors in the middle interactions.
// - Creating the same table is an error
if t.name == table.name {
continue;
}
_ => (),
}
queries.push(query);
}
@ -658,14 +655,11 @@ fn property_drop_select<R: rand::Rng>(
// - [-] The table `t` will not be created, no table will be renamed to `t`. (todo: update this constraint once ALTER is implemented)
for _ in 0..rng.gen_range(0..3) {
let query = Query::arbitrary_from(rng, (env, remaining));
match &query {
Query::Create(Create { table: t }) => {
// - The table `t` will not be created
if t.name == table.name {
continue;
}
if let Query::Create(Create { table: t }) = &query {
// - The table `t` will not be created
if t.name == table.name {
continue;
}
_ => (),
}
queries.push(query);
}

View file

@ -55,12 +55,7 @@ impl ArbitraryFrom<&SimulatorEnv> for Insert {
let _gen_select = |rng: &mut R| {
// Find a non-empty table
let table = env.tables.iter().find(|t| !t.rows.is_empty());
if table.is_none() {
return None;
}
let select_table = table.unwrap();
let select_table = env.tables.iter().find(|t| !t.rows.is_empty())?;
let row = pick(&select_table.rows, rng);
let predicate = Predicate::arbitrary_from(rng, (select_table, row));
// Pick another table to insert into
@ -81,7 +76,7 @@ impl ArbitraryFrom<&SimulatorEnv> for Insert {
// Backtrack here cannot return None
backtrack(
vec![
(1, Box::new(|rng| gen_values(rng))),
(1, Box::new(gen_values)),
// todo: test and enable this once `INSERT INTO <table> SELECT * FROM <table>` is supported
// (1, Box::new(|rng| gen_select(rng))),
],

View file

@ -122,8 +122,8 @@ impl ArbitraryFrom<&SimValue> for LTValue {
let index = rng.gen_range(0..t.len());
t[index] -= 1;
// Mutate the rest of the string
for i in (index + 1)..t.len() {
t[i] = rng.gen_range('a' as u32..='z' as u32);
for val in t.iter_mut().skip(index + 1) {
*val = rng.gen_range('a' as u32..='z' as u32);
}
let t = t
.into_iter()
@ -142,8 +142,8 @@ impl ArbitraryFrom<&SimValue> for LTValue {
let index = rng.gen_range(0..b.len());
b[index] -= 1;
// Mutate the rest of the blob
for i in (index + 1)..b.len() {
b[i] = rng.gen_range(0..=255);
for val in b.iter_mut().skip(index + 1) {
*val = rng.gen_range(0..=255);
}
Value::Blob(b)
}
@ -184,8 +184,8 @@ impl ArbitraryFrom<&SimValue> for GTValue {
let index = rng.gen_range(0..t.len());
t[index] += 1;
// Mutate the rest of the string
for i in (index + 1)..t.len() {
t[i] = rng.gen_range('a' as u32..='z' as u32);
for val in t.iter_mut().skip(index + 1) {
*val = rng.gen_range('a' as u32..='z' as u32);
}
let t = t
.into_iter()
@ -204,8 +204,8 @@ impl ArbitraryFrom<&SimValue> for GTValue {
let index = rng.gen_range(0..b.len());
b[index] += 1;
// Mutate the rest of the blob
for i in (index + 1)..b.len() {
b[i] = rng.gen_range(0..=255);
for val in b.iter_mut().skip(index + 1) {
*val = rng.gen_range(0..=255);
}
Value::Blob(b)
}

View file

@ -22,7 +22,7 @@ impl Predicate {
pub(crate) fn test(&self, row: &[SimValue], table: &Table) -> bool {
let value = expr_to_value(&self.0, row, table);
value.map_or(false, |value| value.into_bool())
value.map_or(false, |value| value.as_bool())
}
}

View file

@ -106,7 +106,7 @@ impl SimValue {
pub const FALSE: Self = SimValue(types::Value::Integer(0));
pub const TRUE: Self = SimValue(types::Value::Integer(1));
pub fn into_bool(&self) -> bool {
pub fn as_bool(&self) -> bool {
Numeric::from(&self.0).try_into_bool().unwrap_or_default()
}
@ -197,7 +197,7 @@ impl From<&ast::Literal> for SimValue {
ast::Literal::Null => types::Value::Null,
ast::Literal::Numeric(number) => Numeric::from(number).into(),
// TODO: see how to avoid sanitizing here
ast::Literal::String(string) => types::Value::build_text(sanitize_string(&string)),
ast::Literal::String(string) => types::Value::build_text(sanitize_string(string)),
ast::Literal::Blob(blob) => types::Value::Blob(
blob.as_bytes()
.chunks_exact(2)
@ -235,7 +235,11 @@ impl From<&SimValue> for ast::Literal {
impl From<bool> for SimValue {
fn from(value: bool) -> Self {
value.then_some(SimValue::TRUE).unwrap_or(SimValue::FALSE)
if value {
SimValue::TRUE
} else {
SimValue::FALSE
}
}
}

View file

@ -338,7 +338,7 @@ impl BugBase {
}
pub(crate) fn load_bugs(&mut self) -> anyhow::Result<Vec<LoadedBug>> {
let seeds = self.bugs.keys().map(|seed| *seed).collect::<Vec<_>>();
let seeds = self.bugs.keys().copied().collect::<Vec<_>>();
seeds
.iter()

View file

@ -57,8 +57,8 @@ impl SimulatorFile {
"--------- -------- --------".to_string(),
format!("total {:8} {:8}", sum_calls, sum_faults),
];
let table = stats_table.join("\n");
table
stats_table.join("\n")
}
}

View file

@ -5,7 +5,6 @@ use antithesis_sdk::random::{get_random, AntithesisRng};
use antithesis_sdk::*;
use clap::Parser;
use core::panic;
use hex;
use limbo::Builder;
use opts::Opts;
use std::collections::HashSet;
@ -323,14 +322,12 @@ fn generate_plan(opts: &Opts) -> Result<Plan, Box<dyn std::error::Error + Send +
for _ in 0..opts.nr_threads {
let mut queries = vec![];
for i in 0..opts.nr_iterations {
if !opts.silent && !opts.verbose {
if i % 100 == 0 {
print!(
"\r{} %",
(i as f64 / opts.nr_iterations as f64 * 100.0) as usize
);
std::io::stdout().flush().unwrap();
}
if !opts.silent && !opts.verbose && i % 100 == 0 {
print!(
"\r{} %",
(i as f64 / opts.nr_iterations as f64 * 100.0) as usize
);
std::io::stdout().flush().unwrap();
}
let sql = generate_random_statement(&schema);
if !opts.skip_log {
@ -407,18 +404,15 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let _g = init_tracing()?;
antithesis_init();
let mut opts = Opts::parse();
let opts = Opts::parse();
if opts.nr_threads > 1 {
println!("ERROR: Multi-threaded data access is not yet supported: https://github.com/tursodatabase/limbo/issues/1552");
return Ok(());
}
let plan = if opts.load_log {
println!("Loading plan from log file...");
read_plan_from_log_file(&mut opts)?
read_plan_from_log_file(&opts)?
} else {
println!("Generating plan...");
generate_plan(&opts)?
};
@ -461,23 +455,10 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let handle = tokio::spawn(async move {
let conn = db.connect()?;
println!("\rExecuting queries...");
for query_index in 0..nr_iterations {
let sql = &plan.queries_per_thread[thread][query_index];
if !opts.silent {
if opts.verbose {
println!("executing query {}", sql);
} else {
if query_index % 100 == 0 {
print!(
"\r{:.2} %",
(query_index as f64 / nr_iterations as f64 * 100.0)
);
std::io::stdout().flush().unwrap();
}
}
}
if let Err(e) = conn.execute(&sql, ()).await {
println!("executing: {}", sql);
if let Err(e) = conn.execute(sql, ()).await {
match e {
limbo::Error::SqlExecutionFailure(e) => {
if e.contains("Corrupt database") {

View file

@ -7,6 +7,13 @@ extern "C" {
}
#[no_mangle]
/// Initialize the Limbo SQLite Test Extension.
///
/// # Safety
///
/// This function is unsafe because it interacts with raw pointers and FFI.
/// Caller must ensure that `db`, `err_msg`, and `api` are valid pointers,
/// and that the SQLite database handle is properly initialized.
pub unsafe extern "C" fn sqlite3_limbosqlitetestext_init(
db: *mut std::ffi::c_void,
err_msg: *mut *mut i8,

View file

@ -142,7 +142,7 @@ pub(crate) fn sqlite_exec_rows(
conn: &rusqlite::Connection,
query: &str,
) -> Vec<Vec<rusqlite::types::Value>> {
let mut stmt = conn.prepare(&query).unwrap();
let mut stmt = conn.prepare(query).unwrap();
let mut rows = stmt.query(params![]).unwrap();
let mut results = Vec::new();
while let Some(row) = rows.next().unwrap() {
@ -221,8 +221,6 @@ pub(crate) fn limbo_exec_rows_error(
#[cfg(test)]
mod tests {
use std::vec;
use rand::Rng;
use tempfile::TempDir;
use super::{limbo_exec_rows, limbo_exec_rows_error, TempDatabase};
@ -297,6 +295,8 @@ mod tests {
#[test]
#[cfg(feature = "index_experimental")]
fn test_unique_index_ordering() -> anyhow::Result<()> {
use rand::Rng;
let db = TempDatabase::new_empty();
let conn = db.connect_limbo();

View file

@ -6,7 +6,7 @@
/// 2. Symbol -> [Int]: generate terminals which form integer from specified range
/// 3. Symbol -> (Inner)?: generate expansion for Inner symbol with some probability
/// 4. Symbol -> (Inner){n..m}: generate k expansions for Inner symbol where k \in [n..m) with uniform distribution
/// (note, that every repetition will be expanded independently)
/// (note, that every repetition will be expanded independently)
/// 5. Symbol -> Inner1 Inner2 .. Inner[n]: concatenate expansions from inner symbols and insert separator string between them
/// 6. Symbol -> Choice1 | Choice2 | .. | Choice[n]: pick random choice according to their weights randomly and generate expansion for it
///
@ -123,7 +123,7 @@ impl GrammarGenerator {
root: SymbolHandle,
is_recursive: &mut HashMap<SymbolHandle, bool>,
) -> bool {
if let Some(_) = is_recursive.get(&root) {
if is_recursive.get(&root).is_some() {
is_recursive.insert(root, true);
return true;
}
@ -233,10 +233,10 @@ impl GrammarGenerator {
values
.iter()
.filter(|x| is_recursive.get(&x.0) != Some(&true))
.map(|x| *x)
.copied()
.collect::<Vec<_>>()
};
if handles.len() == 0 {
if handles.is_empty() {
handles = values.clone();
}

View file

@ -2,9 +2,12 @@ pub mod grammar_generator;
#[cfg(test)]
mod tests {
#[cfg(feature = "index_experimental")]
use rand::seq::IndexedRandom;
#[cfg(feature = "index_experimental")]
use std::collections::HashSet;
use rand::{seq::IndexedRandom, Rng, SeedableRng};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use rusqlite::params;
@ -400,7 +403,7 @@ mod tests {
comp3.map(|x| format!("z {} {}", x, col_val_third.unwrap())),
]
.into_iter()
.filter_map(|x| x)
.flatten()
.collect::<Vec<_>>();
let where_clause = if where_clause_components.is_empty() {
"".to_string()
@ -415,7 +418,7 @@ mod tests {
order_by3.map(|x| format!("z {}", x)),
]
.into_iter()
.filter_map(|x| x)
.flatten()
.collect::<Vec<_>>();
let order_by = if order_by_components.is_empty() {
"".to_string()
@ -436,7 +439,7 @@ mod tests {
// Execute the query on all databases and compare the results
for (i, sqlite_conn) in sqlite_conns.iter().enumerate() {
let limbo = limbo_exec_rows(&dbs[i], &limbo_conns[i], &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
let sqlite = sqlite_exec_rows(sqlite_conn, &query);
if limbo != sqlite {
// if the order by contains exclusively components that are constrained by an equality (=),
// sqlite sometimes doesn't bother with ASC/DESC because it doesn't semantically matter
@ -457,7 +460,7 @@ mod tests {
let query_no_limit =
format!("SELECT * FROM t {} {} {}", where_clause, order_by, "");
let limbo_no_limit = limbo_exec_rows(&dbs[i], &limbo_conns[i], &query_no_limit);
let sqlite_no_limit = sqlite_exec_rows(&sqlite_conn, &query_no_limit);
let sqlite_no_limit = sqlite_exec_rows(sqlite_conn, &query_no_limit);
let limbo_rev = limbo_no_limit.iter().cloned().rev().collect::<Vec<_>>();
if limbo_rev == sqlite_no_limit && order_by_only_equalities {
continue;
@ -988,6 +991,7 @@ mod tests {
pub cast_expr: SymbolHandle,
pub case_expr: SymbolHandle,
pub cmp_op: SymbolHandle,
#[cfg(feature = "index_experimental")]
pub number: SymbolHandle,
}
@ -1222,10 +1226,12 @@ mod tests {
cast_expr,
case_expr,
cmp_op,
#[cfg(feature = "index_experimental")]
number,
}
}
#[cfg(feature = "index_experimental")]
fn predicate_builders(g: &GrammarGenerator, tables: Option<&[TestTable]>) -> PredicateBuilders {
let (in_op, in_op_builder) = g.create_handle();
let (column, column_builder) = g.create_handle();
@ -1439,7 +1445,7 @@ mod tests {
i += 1;
}
// verify the same number of rows in both tables
let query = format!("SELECT COUNT(*) FROM t");
let query = "SELECT COUNT(*) FROM t".to_string();
let limbo = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
assert_eq!(limbo, sqlite, "seed: {}", seed);

View file

@ -80,7 +80,7 @@ fn test_statement_bind() -> anyhow::Result<()> {
}
if let limbo_core::Value::Blob(v) = row.get::<&Value>(3).unwrap() {
assert_eq!(v.as_slice(), &vec![0x1 as u8, 0x2, 0x3])
assert_eq!(v.as_slice(), &vec![0x1_u8, 0x2, 0x3])
}
if let limbo_core::Value::Float(f) = row.get::<&Value>(4).unwrap() {

View file

@ -96,8 +96,7 @@ fn test_sequential_overflow_page() -> anyhow::Result<()> {
huge_texts.push(huge_text);
}
for i in 0..iterations {
let huge_text = &huge_texts[i];
for (i, huge_text) in huge_texts.iter().enumerate().take(iterations) {
let insert_query = format!("INSERT INTO test VALUES ({}, '{}')", i, huge_text.as_str());
match conn.query(insert_query) {
Ok(Some(ref mut rows)) => loop {
@ -168,7 +167,7 @@ fn test_sequential_write() -> anyhow::Result<()> {
run_query(&tmp_db, &conn, &insert_query)?;
let mut current_read_index = 0;
run_query_on_row(&tmp_db, &conn, &list_query, |row: &Row| {
run_query_on_row(&tmp_db, &conn, list_query, |row: &Row| {
let first_value = row.get::<&Value>(0).expect("missing id");
let id = match first_value {
limbo_core::Value::Integer(i) => *i as i32,

View file

@ -105,18 +105,15 @@ impl Stmt {
match self {
Self::AlterTable(alter_table) => {
let (_, body) = &**alter_table;
match body {
AlterTableBody::AddColumn(cd) => {
for c in cd {
if let ColumnConstraint::PrimaryKey { .. } = c {
return Err(custom_err!("Cannot add a PRIMARY KEY column"));
}
if let ColumnConstraint::Unique(..) = c {
return Err(custom_err!("Cannot add a UNIQUE column"));
}
if let AlterTableBody::AddColumn(cd) = body {
for c in cd {
if let ColumnConstraint::PrimaryKey { .. } = c {
return Err(custom_err!("Cannot add a PRIMARY KEY column"));
}
if let ColumnConstraint::Unique(..) = c {
return Err(custom_err!("Cannot add a UNIQUE column"));
}
}
_ => {}
}
Ok(())
}
@ -164,10 +161,8 @@ impl Stmt {
let Delete {
order_by, limit, ..
} = &**delete;
if let Some(_) = order_by {
if limit.is_none() {
return Err(custom_err!("ORDER BY without LIMIT on DELETE"));
}
if order_by.is_some() && limit.is_none() {
return Err(custom_err!("ORDER BY without LIMIT on DELETE"));
}
Ok(())
}
@ -177,7 +172,7 @@ impl Stmt {
return Ok(());
}
let columns = columns.as_ref().unwrap();
match &*body {
match body {
InsertBody::Select(select, ..) => match select.body.select.column_count() {
ColumnCount::Fixed(n) if n != columns.len() => {
Err(custom_err!("{} values for {} columns", n, columns.len()))
@ -193,10 +188,8 @@ impl Stmt {
let Update {
order_by, limit, ..
} = &**update;
if let Some(_) = order_by {
if limit.is_none() {
return Err(custom_err!("ORDER BY without LIMIT on UPDATE"));
}
if order_by.is_some() && limit.is_none() {
return Err(custom_err!("ORDER BY without LIMIT on UPDATE"));
}
Ok(())

View file

@ -71,7 +71,7 @@ impl ToSqlString for Expr {
Expr::Collate(expr, name) => {
ret.push_str(&expr.to_sql_string(context));
ret.push_str(" COLLATE ");
ret.push_str(&name);
ret.push_str(name);
}
Expr::DoublyQualified(name, name1, name2) => {
ret.push_str(&name.0);

View file

@ -23,7 +23,7 @@ pub trait ToSqlString {
impl<T: ToSqlString> ToSqlString for Box<T> {
fn to_sql_string<C: ToSqlContext>(&self, context: &C) -> String {
T::to_sql_string(&self, context)
T::to_sql_string(self, context)
}
}

View file

@ -44,7 +44,7 @@ impl ToSqlString for ast::NamedTableConstraint {
self.constraint.to_sql_string(context)
)
} else {
format!("{}", self.constraint.to_sql_string(context))
self.constraint.to_sql_string(context).to_string()
}
}
}

View file

@ -9,14 +9,22 @@ impl ToSqlString for ast::CreateTrigger {
fn to_sql_string<C: crate::to_sql_string::ToSqlContext>(&self, context: &C) -> String {
format!(
"CREATE{} TRIGGER {}{}{} {} ON {}{}{} BEGIN {} END;",
self.temporary.then_some(" TEMP").unwrap_or(""),
self.if_not_exists.then_some("IF NOT EXISTS ").unwrap_or(""),
if self.temporary { " TEMP" } else { "" },
if self.if_not_exists {
"IF NOT EXISTS "
} else {
""
},
self.trigger_name.to_sql_string(context),
self.time
.map_or("".to_string(), |time| format!(" {}", time)),
self.event,
self.tbl_name.to_sql_string(context),
self.for_each_row.then_some(" FOR EACH ROW").unwrap_or(""),
if self.for_each_row {
" FOR EACH ROW"
} else {
""
},
self.when_clause
.as_ref()
.map_or("".to_string(), |expr| format!(

View file

@ -4,7 +4,11 @@ impl ToSqlString for ast::CreateVirtualTable {
fn to_sql_string<C: crate::to_sql_string::ToSqlContext>(&self, context: &C) -> String {
format!(
"CREATE VIRTUAL TABLE {}{} USING {}{};",
self.if_not_exists.then_some("IF NOT EXISTS ").unwrap_or(""),
if self.if_not_exists {
"IF NOT EXISTS "
} else {
""
},
self.tbl_name.to_sql_string(context),
self.module_name.0,
self.args

View file

@ -26,7 +26,7 @@ impl ToSqlString for ast::Stmt {
if let Some(name) = name {
format!("ANALYZE {};", name.to_sql_string(context))
} else {
format!("ANALYZE;")
"ANALYZE;".to_string()
}
}
Self::Attach {
@ -211,15 +211,15 @@ mod tests {
($test_name:ident, $input:expr) => {
#[test]
fn $test_name() {
let context = crate::to_sql_string::stmt::tests::TestContext;
let context = $crate::to_sql_string::stmt::tests::TestContext;
let input = $input.split_whitespace().collect::<Vec<&str>>().join(" ");
let mut parser = crate::lexer::sql::Parser::new(input.as_bytes());
let mut parser = $crate::lexer::sql::Parser::new(input.as_bytes());
let cmd = fallible_iterator::FallibleIterator::next(&mut parser)
.unwrap()
.unwrap();
assert_eq!(
input,
crate::to_sql_string::ToSqlString::to_sql_string(cmd.stmt(), &context)
$crate::to_sql_string::ToSqlString::to_sql_string(cmd.stmt(), &context)
);
}
};
@ -227,15 +227,15 @@ mod tests {
#[test]
$(#[$attribute])*
fn $test_name() {
let context = crate::to_sql_string::stmt::tests::TestContext;
let context = $crate::to_sql_string::stmt::tests::TestContext;
let input = $input.split_whitespace().collect::<Vec<&str>>().join(" ");
let mut parser = crate::lexer::sql::Parser::new(input.as_bytes());
let mut parser = $crate::lexer::sql::Parser::new(input.as_bytes());
let cmd = fallible_iterator::FallibleIterator::next(&mut parser)
.unwrap()
.unwrap();
assert_eq!(
input,
crate::to_sql_string::ToSqlString::to_sql_string(cmd.stmt(), &context)
$crate::to_sql_string::ToSqlString::to_sql_string(cmd.stmt(), &context)
);
}
}