ruff lint fix

This commit is contained in:
pedrocarlo 2025-06-19 16:59:49 -03:00
parent 50c8b2ca2e
commit 80ccca8827
29 changed files with 375 additions and 434 deletions

View file

@ -1,7 +1,6 @@
#!/usr/bin/env -S python3 -u
import limbo
from antithesis.random import get_random
from antithesis.assertions import always
try:
@ -12,20 +11,16 @@ except Exception as e:
cur = con.cursor()
initial_state = cur.execute(f'''
initial_state = cur.execute("""
SELECT * FROM initial_state
''').fetchone()
""").fetchone()
curr_total = cur.execute(f'''
curr_total = cur.execute("""
SELECT SUM(balance) AS total FROM accounts;
''').fetchone()
""").fetchone()
always(
initial_state[1] == curr_total[0],
'[Anytime] Initial balance always equals current balance',
{
'init_bal': initial_state[1],
'curr_bal': curr_total[0]
}
initial_state[1] == curr_total[0],
"[Anytime] Initial balance always equals current balance",
{"init_bal": initial_state[1], "curr_bal": curr_total[0]},
)

View file

@ -1,7 +1,6 @@
#!/usr/bin/env -S python3 -u
import limbo
from antithesis.random import get_random
from antithesis.assertions import always
try:
@ -12,20 +11,16 @@ except Exception as e:
cur = con.cursor()
initial_state = cur.execute(f'''
initial_state = cur.execute("""
SELECT * FROM initial_state
''').fetchone()
""").fetchone()
curr_total = cur.execute(f'''
curr_total = cur.execute("""
SELECT SUM(balance) AS total FROM accounts;
''').fetchone()
""").fetchone()
always(
initial_state[1] == curr_total[0],
'[Eventually] Initial balance always equals current balance',
{
'init_bal': initial_state[1],
'curr_bal': curr_total[0]
}
initial_state[1] == curr_total[0],
"[Eventually] Initial balance always equals current balance",
{"init_bal": initial_state[1], "curr_bal": curr_total[0]},
)

View file

@ -1,7 +1,6 @@
#!/usr/bin/env -S python3 -u
import limbo
from antithesis.random import get_random
from antithesis.assertions import always
try:
@ -12,20 +11,16 @@ except Exception as e:
cur = con.cursor()
initial_state = cur.execute(f'''
initial_state = cur.execute("""
SELECT * FROM initial_state
''').fetchone()
""").fetchone()
curr_total = cur.execute(f'''
curr_total = cur.execute("""
SELECT SUM(balance) AS total FROM accounts;
''').fetchone()
""").fetchone()
always(
initial_state[1] == curr_total[0],
'[Finally] Initial balance always equals current balance',
{
'init_bal': initial_state[1],
'curr_bal': curr_total[0]
}
initial_state[1] == curr_total[0],
"[Finally] Initial balance always equals current balance",
{"init_bal": initial_state[1], "curr_bal": curr_total[0]},
)

View file

@ -12,16 +12,16 @@ except Exception as e:
cur = con.cursor()
# drop accounts table if it exists and create a new table
cur.execute(f'''
cur.execute("""
DROP TABLE IF EXISTS accounts;
''')
""")
cur.execute(f'''
cur.execute("""
CREATE TABLE accounts (
account_id INTEGER PRIMARY KEY AUTOINCREMENT,
balance REAL NOT NULL DEFAULT 0.0
);
''')
""")
# randomly create up to 100 accounts with a balance up to 1e9
total = 0
@ -29,24 +29,24 @@ num_accts = get_random() % 100 + 1
for i in range(num_accts):
bal = get_random() % 1e9
total += bal
cur.execute(f'''
cur.execute(f"""
INSERT INTO accounts (balance)
VALUES ({bal})
''')
""")
# drop initial_state table if it exists and create a new table
cur.execute(f'''
cur.execute("""
DROP TABLE IF EXISTS initial_state;
''')
cur.execute(f'''
""")
cur.execute("""
CREATE TABLE initial_state (
num_accts INTEGER,
total REAL
);
''')
""")
# store initial state in the table
cur.execute(f'''
cur.execute(f"""
INSERT INTO initial_state (num_accts, total)
VALUES ({num_accts}, {total})
''')
""")

View file

@ -1,14 +1,17 @@
#!/usr/bin/env -S python3 -u
import limbo
import logging
from logging.handlers import RotatingFileHandler
import limbo
from antithesis.random import get_random
handler = RotatingFileHandler(filename='bank_test.log', mode='a', maxBytes=1*1024*1024, backupCount=5, encoding=None, delay=0)
handler = RotatingFileHandler(
filename="bank_test.log", mode="a", maxBytes=1 * 1024 * 1024, backupCount=5, encoding=None, delay=0
)
handler.setLevel(logging.INFO)
logger = logging.getLogger('root')
logger = logging.getLogger("root")
logger.setLevel(logging.INFO)
logger.addHandler(handler)
@ -23,6 +26,7 @@ cur = con.cursor()
length = cur.execute("SELECT num_accts FROM initial_state").fetchone()[0]
def transaction():
# check that sender and recipient are different
sender = get_random() % length + 1
@ -34,23 +38,24 @@ def transaction():
logger.info(f"Sender ID: {sender} | Recipient ID: {recipient} | Txn Val: {value}")
cur.execute("BEGIN TRANSACTION;")
# subtract value from balance of the sender account
cur.execute(f'''
UPDATE accounts
cur.execute(f"""
UPDATE accounts
SET balance = balance - {value}
WHERE account_id = {sender};
''')
""")
# add value to balance of the recipient account
cur.execute(f'''
UPDATE accounts
cur.execute(f"""
UPDATE accounts
SET balance = balance + {value}
WHERE account_id = {recipient};
''')
""")
cur.execute("COMMIT;")
# run up to 100 transactions
iterations = get_random() % 100
# logger.info(f"Starting {iterations} iterations")

View file

@ -1,22 +1,23 @@
#!/usr/bin/env -S python3 -u
import json
import glob
import json
import os
import limbo
from antithesis.random import get_random, random_choice
constraints = ['NOT NULL', '']
data_type = ['INTEGER', 'REAL', 'TEXT', 'BLOB', 'NUMERIC']
constraints = ["NOT NULL", ""]
data_type = ["INTEGER", "REAL", "TEXT", "BLOB", "NUMERIC"]
# remove any existing db files
for f in glob.glob('*.db'):
for f in glob.glob("*.db"):
try:
os.remove(f)
except OSError:
pass
for f in glob.glob('*.db-wal'):
for f in glob.glob("*.db-wal"):
try:
os.remove(f)
except OSError:
@ -24,17 +25,17 @@ for f in glob.glob('*.db-wal'):
# store initial states in a separate db
try:
con_init = limbo.connect('init_state.db')
con_init = limbo.connect("init_state.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
cur_init = con_init.cursor()
cur_init.execute('CREATE TABLE schemas (schema TEXT, tbl INT)')
cur_init.execute('CREATE TABLE tables (count INT)')
cur_init.execute("CREATE TABLE schemas (schema TEXT, tbl INT)")
cur_init.execute("CREATE TABLE tables (count INT)")
try:
con = limbo.connect('stress_composer.db')
con = limbo.connect("stress_composer.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
@ -43,43 +44,43 @@ cur = con.cursor()
tbl_count = max(1, get_random() % 10)
cur_init.execute(f'INSERT INTO tables (count) VALUES ({tbl_count})')
cur_init.execute(f"INSERT INTO tables (count) VALUES ({tbl_count})")
schemas = []
for i in range(tbl_count):
col_count = max(1, get_random() % 10)
pk = get_random() % col_count
schema = {
'table': i,
'colCount': col_count,
'pk': pk
}
schema = {"table": i, "colCount": col_count, "pk": pk}
cols = []
cols_str = ''
cols_str = ""
for j in range(col_count):
col_data_type = random_choice(data_type)
col_constraint_1 = random_choice(constraints)
col_constraint_2 = random_choice(constraints)
col = f'col_{j} {col_data_type} {col_constraint_1} {col_constraint_2 if col_constraint_2 != col_constraint_1 else ""}' if j != pk else f'col_{j} {col_data_type}'
col = (
f"col_{j} {col_data_type} {col_constraint_1} {col_constraint_2 if col_constraint_2 != col_constraint_1 else ''}" # noqa: E501
if j != pk
else f"col_{j} {col_data_type}"
)
cols.append(col)
schema[f'col_{j}'] = {
'data_type': col_data_type,
'constraint1': col_constraint_1 if j != pk else '',
'constraint2': col_constraint_2 if col_constraint_1 != col_constraint_2 else "" if j != pk else 'NOT NULL',
schema[f"col_{j}"] = {
"data_type": col_data_type,
"constraint1": col_constraint_1 if j != pk else "",
"constraint2": col_constraint_2 if col_constraint_1 != col_constraint_2 else "" if j != pk else "NOT NULL",
}
cols_str = ', '.join(cols)
schemas.append(schema)
cols_str = ", ".join(cols)
schemas.append(schema)
cur_init.execute(f"INSERT INTO schemas (schema, tbl) VALUES ('{json.dumps(schema)}', {i})")
cur.execute(f'''
cur.execute(f"""
CREATE TABLE tbl_{i} ({cols_str})
''')
""")
print(f'DB Schemas\n------------\n{json.dumps(schemas, indent=2)}')
print(f"DB Schemas\n------------\n{json.dumps(schemas, indent=2)}")

View file

@ -1,42 +1,42 @@
#!/usr/bin/env -S python3 -u
import json
import limbo
from utils import generate_random_value
from antithesis.random import get_random
from utils import generate_random_value
# Get initial state
try:
con_init = limbo.connect('init_state.db')
con_init = limbo.connect("init_state.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
cur_init = con_init.cursor()
tbl_len = cur_init.execute('SELECT count FROM tables').fetchone()[0]
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
tbl_schema = json.loads(cur_init.execute(f'SELECT schema FROM schemas WHERE tbl = {selected_tbl}').fetchone()[0])
tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
# get primary key column
pk = tbl_schema['pk']
pk = tbl_schema["pk"]
# get non-pk columns
cols = [f'col_{col}' for col in range(tbl_schema['colCount']) if col != pk]
cols = [f"col_{col}" for col in range(tbl_schema["colCount"]) if col != pk]
try:
con = limbo.connect('stress_composer.db')
con = limbo.connect("stress_composer.db")
except limbo.OperationalError as e:
print(f'Failed to open stress_composer.db. Exiting... {e}')
print(f"Failed to open stress_composer.db. Exiting... {e}")
exit(0)
cur = con.cursor()
deletions = get_random() % 100
print(f'Attempt to delete {deletions} rows in tbl_{selected_tbl}...')
print(f"Attempt to delete {deletions} rows in tbl_{selected_tbl}...")
for i in range(deletions):
where_clause = f"col_{pk} = {generate_random_value(tbl_schema[f'col_{pk}']['data_type'])}"
cur.execute(f'''
cur.execute(f"""
DELETE FROM tbl_{selected_tbl} WHERE {where_clause}
''')
""")

View file

@ -1,44 +1,44 @@
#!/usr/bin/env -S python3 -u
import json
import limbo
from utils import generate_random_value
from antithesis.random import get_random
import limbo
from antithesis.random import get_random
from utils import generate_random_value
# Get initial state
try:
con_init = limbo.connect('init_state.db')
con_init = limbo.connect("init_state.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
cur_init = con_init.cursor()
tbl_len = cur_init.execute('SELECT count FROM tables').fetchone()[0]
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
tbl_schema = json.loads(cur_init.execute(f'SELECT schema FROM schemas WHERE tbl = {selected_tbl}').fetchone()[0])
cols = ', '.join([f'col_{col}' for col in range(tbl_schema['colCount'])])
tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
cols = ", ".join([f"col_{col}" for col in range(tbl_schema["colCount"])])
try:
con = limbo.connect('stress_composer.db')
con = limbo.connect("stress_composer.db")
except limbo.OperationalError as e:
print(f'Failed to open stress_composer.db. Exiting... {e}')
print(f"Failed to open stress_composer.db. Exiting... {e}")
exit(0)
cur = con.cursor()
# insert up to 100 rows in the selected table
insertions = get_random() % 100
print(f'Inserting {insertions} rows...')
print(f"Inserting {insertions} rows...")
for i in range(insertions):
values = [generate_random_value(tbl_schema[f'col_{col}']['data_type']) for col in range(tbl_schema['colCount'])]
values = [generate_random_value(tbl_schema[f"col_{col}"]["data_type"]) for col in range(tbl_schema["colCount"])]
try:
cur.execute(f'''
cur.execute(f"""
INSERT INTO tbl_{selected_tbl} ({cols})
VALUES ({', '.join(values)})
''')
VALUES ({", ".join(values)})
""")
except limbo.OperationalError as e:
if "UNIQUE constraint failed" in str(e):
# Ignore UNIQUE constraint violations
@ -46,4 +46,3 @@ for i in range(insertions):
else:
# Re-raise other operational errors
raise

View file

@ -1,33 +1,34 @@
#!/usr/bin/env -S python3 -u
import json
import limbo
from antithesis.random import get_random
from antithesis.assertions import always
from antithesis.random import get_random
# Get initial state
try:
con_init = limbo.connect('init_state.db')
con_init = limbo.connect("init_state.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
cur_init = con_init.cursor()
tbl_len = cur_init.execute('SELECT count FROM tables').fetchone()[0]
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
tbl_schema = json.loads(cur_init.execute(f'SELECT schema FROM schemas WHERE tbl = {selected_tbl}').fetchone()[0])
cols = ', '.join([f'col_{col}' for col in range(tbl_schema['colCount'])])
tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
cols = ", ".join([f"col_{col}" for col in range(tbl_schema["colCount"])])
try:
con = limbo.connect('stress_composer.db')
con = limbo.connect("stress_composer.db")
except limbo.OperationalError as e:
print(f'Failed to open stress_composer.db. Exiting... {e}')
print(f"Failed to open stress_composer.db. Exiting... {e}")
exit(0)
cur = con.cursor();
cur = con.cursor()
print('Running integrity check...')
print("Running integrity check...")
result = cur.execute("PRAGMA integrity_check")
row = result.fetchone()
always(row == ("ok",), f"Integrity check failed: {row}", {})
always(row == ("ok",), f"Integrity check failed: {row}", {})

View file

@ -1,57 +1,58 @@
#!/usr/bin/env -S python3 -u
import json
import limbo
from utils import generate_random_value
from antithesis.random import get_random
from utils import generate_random_value
# Get initial state
try:
con_init = limbo.connect('init_state.db')
con_init = limbo.connect("init_state.db")
except Exception as e:
print(f"Error connecting to database: {e}")
exit(0)
cur_init = con_init.cursor()
tbl_len = cur_init.execute('SELECT count FROM tables').fetchone()[0]
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
tbl_schema = json.loads(cur_init.execute(f'SELECT schema FROM schemas WHERE tbl = {selected_tbl}').fetchone()[0])
tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
# get primary key column
pk = tbl_schema['pk']
pk = tbl_schema["pk"]
# get non-pk columns
cols = [f'col_{col}' for col in range(tbl_schema['colCount']) if col != pk]
cols = [f"col_{col}" for col in range(tbl_schema["colCount"]) if col != pk]
# print(cols)
try:
con = limbo.connect('stress_composer.db')
con = limbo.connect("stress_composer.db")
except limbo.OperationalError as e:
print(f'Failed to open stress_composer.db. Exiting... {e}')
print(f"Failed to open stress_composer.db. Exiting... {e}")
exit(0)
cur = con.cursor()
# insert up to 100 rows in the selected table
updates = get_random() % 100
print(f'Attempt to update {updates} rows in tbl_{selected_tbl}...')
print(f"Attempt to update {updates} rows in tbl_{selected_tbl}...")
for i in range(updates):
set_clause = ''
if tbl_schema['colCount'] == 1:
set_clause = ""
if tbl_schema["colCount"] == 1:
set_clause = f"col_{pk} = {generate_random_value(tbl_schema[f'col_{pk}']['data_type'])}"
else:
values = []
for col in cols:
# print(col)
values.append(f"{col} = {generate_random_value(tbl_schema[col]['data_type'])}")
set_clause = ', '.join(values)
set_clause = ", ".join(values)
where_clause = f"col_{pk} = {generate_random_value(tbl_schema[f'col_{pk}']['data_type'])}"
# print(where_clause)
try:
cur.execute(f'''
cur.execute(f"""
UPDATE tbl_{selected_tbl} SET {set_clause} WHERE {where_clause}
''')
""")
except limbo.OperationalError as e:
if "UNIQUE constraint failed" in str(e):
# Ignore UNIQUE constraint violations
@ -59,4 +60,3 @@ for i in range(updates):
else:
# Re-raise other operational errors
raise

View file

@ -1,19 +1,22 @@
import string
from antithesis.random import get_random, random_choice
def generate_random_identifier(type: str, num: int):
return ''.join(type, '_', get_random() % num)
return "".join(type, "_", get_random() % num)
def generate_random_value(type_str):
if type_str == 'INTEGER':
if type_str == "INTEGER":
return str(get_random() % 100)
elif type_str == 'REAL':
return '{:.2f}'.format(get_random() % 100 / 100.0)
elif type_str == 'TEXT':
elif type_str == "REAL":
return "{:.2f}".format(get_random() % 100 / 100.0)
elif type_str == "TEXT":
return f"'{''.join(random_choice(string.ascii_lowercase) for _ in range(5))}'"
elif type_str == 'BLOB':
elif type_str == "BLOB":
return f"x'{''.join(random_choice(string.ascii_lowercase) for _ in range(5)).encode().hex()}'"
elif type_str == 'NUMERIC':
elif type_str == "NUMERIC":
return str(get_random() % 100)
else:
return NULL
return "NULL"

View file

@ -1,9 +1,8 @@
import os
import sqlite3
import pytest
import limbo
import pytest
@pytest.fixture(autouse=True)

View file

@ -2,11 +2,12 @@
import argparse
import sqlite3
from faker import Faker
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-c', '--count', type=int)
parser.add_argument("filename")
parser.add_argument("-c", "--count", type=int)
args = parser.parse_args()
@ -14,7 +15,7 @@ conn = sqlite3.connect(args.filename)
cursor = conn.cursor()
# Create the user table
cursor.execute('''
cursor.execute("""
CREATE TABLE IF NOT EXISTS user (
id INTEGER PRIMARY KEY,
first_name TEXT,
@ -26,7 +27,7 @@ cursor.execute('''
state TEXT,
zipcode TEXT
)
''')
""")
fake = Faker()
for _ in range(args.count):
@ -39,10 +40,13 @@ for _ in range(args.count):
state = fake.state_abbr()
zipcode = fake.zipcode()
cursor.execute('''
cursor.execute(
"""
INSERT INTO user (first_name, last_name, email, phone_number, address, city, state, zipcode)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (first_name, last_name, email, phone_number, address, city, state, zipcode))
""",
(first_name, last_name, email, phone_number, address, city, state, zipcode),
)
conn.commit()
conn.close()

View file

@ -1,16 +1,15 @@
#!/usr/bin/env python3
import matplotlib.pyplot as plt
import matplotlib
import csv
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 22}
import matplotlib
import matplotlib.pyplot as plt
matplotlib.rcParams.update({'font.size': 22})
font = {"family": "normal", "weight": "bold", "size": 22}
file_name = 'results.csv'
matplotlib.rcParams.update({"font.size": 22})
file_name = "results.csv"
threads = []
p50_values = []
p95_values = []
@ -22,34 +21,34 @@ p99_limbo = []
p999_limbo = []
# Parse the CSV file
with open(file_name, 'r') as csvfile:
with open(file_name, "r") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if row['system'] == 'rusqlite':
threads.append(int(row['count']))
p50_values.append(float(row['p50']) / 1e3)
p95_values.append(float(row['p95']) / 1e3)
p99_values.append(float(row['p99']) / 1e3)
p999_values.append(float(row['p999']) / 1e3)
if row["system"] == "rusqlite":
threads.append(int(row["count"]))
p50_values.append(float(row["p50"]) / 1e3)
p95_values.append(float(row["p95"]) / 1e3)
p99_values.append(float(row["p99"]) / 1e3)
p999_values.append(float(row["p999"]) / 1e3)
else:
p95_limbo.append(float(row['p95']) / 1e3)
p99_limbo.append(float(row['p99']) / 1e3)
p999_limbo.append(float(row['p999']) / 1e3)
p95_limbo.append(float(row["p95"]) / 1e3)
p99_limbo.append(float(row["p99"]) / 1e3)
p999_limbo.append(float(row["p999"]) / 1e3)
plt.figure(figsize=(10, 6))
plt.plot(threads, p999_values, label='rusqlite (p999)', linestyle='solid', marker='$\u2217$')
plt.plot(threads, p999_limbo, label='limbo (p999)', linestyle='solid', marker='$\u2217$')
plt.plot(threads, p99_values, label='rusqlite (p99)', linestyle='solid', marker='$\u002B$')
plt.plot(threads, p99_limbo, label='limbo (p99)', linestyle='solid', marker='$\u002B$')
#plt.plot(threads, p95_values, label='p95', linestyle='solid', marker="$\u25FE$")
#plt.plot(threads, p50_values, label='p50', linestyle='solid', marker="$\u25B2$")
plt.plot(threads, p999_values, label="rusqlite (p999)", linestyle="solid", marker="$\u2217$")
plt.plot(threads, p999_limbo, label="limbo (p999)", linestyle="solid", marker="$\u2217$")
plt.plot(threads, p99_values, label="rusqlite (p99)", linestyle="solid", marker="$\u002b$")
plt.plot(threads, p99_limbo, label="limbo (p99)", linestyle="solid", marker="$\u002b$")
# plt.plot(threads, p95_values, label='p95', linestyle='solid', marker="$\u25FE$")
# plt.plot(threads, p50_values, label='p50', linestyle='solid', marker="$\u25B2$")
plt.yscale("log")
plt.xlabel('Number of Tenants')
plt.ylabel('Latency (µs)')
plt.xlabel("Number of Tenants")
plt.ylabel("Latency (µs)")
plt.grid(True)
plt.legend()
plt.tight_layout()
plt.savefig('latency_distribution.pdf')
plt.savefig("latency_distribution.pdf")

View file

@ -2,11 +2,12 @@
import argparse
import sqlite3
from faker import Faker
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('-c', '--count', type=int)
parser.add_argument("filename")
parser.add_argument("-c", "--count", type=int)
args = parser.parse_args()
@ -14,7 +15,7 @@ conn = sqlite3.connect(args.filename)
cursor = conn.cursor()
# Create the user table
cursor.execute('''
cursor.execute("""
CREATE TABLE IF NOT EXISTS user (
id INTEGER PRIMARY KEY,
first_name TEXT,
@ -26,7 +27,7 @@ cursor.execute('''
state TEXT,
zipcode TEXT
)
''')
""")
fake = Faker()
for _ in range(args.count):
@ -39,10 +40,13 @@ for _ in range(args.count):
state = fake.state_abbr()
zipcode = fake.zipcode()
cursor.execute('''
cursor.execute(
"""
INSERT INTO user (first_name, last_name, email, phone_number, address, city, state, zipcode)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (first_name, last_name, email, phone_number, address, city, state, zipcode))
""",
(first_name, last_name, email, phone_number, address, city, state, zipcode),
)
conn.commit()
conn.close()

View file

@ -1,16 +1,15 @@
#!/usr/bin/env python3
import matplotlib.pyplot as plt
import matplotlib
import csv
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 22}
import matplotlib
import matplotlib.pyplot as plt
matplotlib.rcParams.update({'font.size': 22})
font = {"family": "normal", "weight": "bold", "size": 22}
file_name = 'results.csv'
matplotlib.rcParams.update({"font.size": 22})
file_name = "results.csv"
threads = []
p50_values = []
p95_values = []
@ -18,27 +17,27 @@ p99_values = []
p999_values = []
# Parse the CSV file
with open(file_name, 'r') as csvfile:
with open(file_name, "r") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
threads.append(int(row['count']))
p50_values.append(float(row['p50']) / 1e3)
p95_values.append(float(row['p95']) / 1e3)
p99_values.append(float(row['p99']) / 1e3)
p999_values.append(float(row['p999']) / 1e3)
threads.append(int(row["count"]))
p50_values.append(float(row["p50"]) / 1e3)
p95_values.append(float(row["p95"]) / 1e3)
p99_values.append(float(row["p99"]) / 1e3)
p999_values.append(float(row["p999"]) / 1e3)
plt.figure(figsize=(10, 6))
plt.plot(threads, p999_values, label='p999', linestyle='solid', marker='$\u2217$')
plt.plot(threads, p99_values, label='p99', linestyle='solid', marker='$\u002B$')
plt.plot(threads, p95_values, label='p95', linestyle='solid', marker="$\u25FE$")
plt.plot(threads, p50_values, label='p50', linestyle='solid', marker="$\u25B2$")
plt.plot(threads, p999_values, label="p999", linestyle="solid", marker="$\u2217$")
plt.plot(threads, p99_values, label="p99", linestyle="solid", marker="$\u002b$")
plt.plot(threads, p95_values, label="p95", linestyle="solid", marker="$\u25fe$")
plt.plot(threads, p50_values, label="p50", linestyle="solid", marker="$\u25b2$")
plt.yscale("log")
plt.xlabel('Number of Threads')
plt.ylabel('Latency (µs)')
plt.xlabel("Number of Threads")
plt.ylabel("Latency (µs)")
plt.grid(True)
plt.legend()
plt.tight_layout()
plt.savefig('latency_distribution.pdf')
plt.savefig("latency_distribution.pdf")

View file

@ -9,29 +9,33 @@
# ```
# pip install PyGithub
# ```
import sys
import re
from github import Github
import json
import os
import re
import subprocess
import sys
import tempfile
import textwrap
import json
from github import Github
def run_command(command):
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, error = process.communicate()
return output.decode('utf-8').strip(), error.decode('utf-8').strip(), process.returncode
return output.decode("utf-8").strip(), error.decode("utf-8").strip(), process.returncode
def load_user_mapping(file_path='.github.json'):
def load_user_mapping(file_path=".github.json"):
if os.path.exists(file_path):
with open(file_path, 'r') as f:
with open(file_path, "r") as f:
return json.load(f)
return {}
user_mapping = load_user_mapping()
def get_user_email(g, username):
if username in user_mapping:
return f"{user_mapping[username]['name']} <{user_mapping[username]['email']}>"
@ -48,6 +52,7 @@ def get_user_email(g, username):
# If we couldn't find an email, return a noreply address
return f"{username} <{username}@users.noreply.github.com>"
def get_pr_info(g, repo, pr_number):
pr = repo.get_pull(int(pr_number))
author = pr.user
@ -57,41 +62,43 @@ def get_pr_info(g, repo, pr_number):
reviewed_by = []
reviews = pr.get_reviews()
for review in reviews:
if review.state == 'APPROVED':
if review.state == "APPROVED":
reviewer = review.user
reviewed_by.append(get_user_email(g, reviewer.login))
return {
'number': pr.number,
'title': pr.title,
'author': author_name,
'head': pr.head.ref,
'head_sha': pr.head.sha,
'body': pr.body.strip() if pr.body else '',
'reviewed_by': reviewed_by
"number": pr.number,
"title": pr.title,
"author": author_name,
"head": pr.head.ref,
"head_sha": pr.head.sha,
"body": pr.body.strip() if pr.body else "",
"reviewed_by": reviewed_by,
}
def wrap_text(text, width=72):
lines = text.split('\n')
lines = text.split("\n")
wrapped_lines = []
in_code_block = False
for line in lines:
if line.strip().startswith('```'):
if line.strip().startswith("```"):
in_code_block = not in_code_block
wrapped_lines.append(line)
elif in_code_block:
wrapped_lines.append(line)
else:
wrapped_lines.extend(textwrap.wrap(line, width=width))
return '\n'.join(wrapped_lines)
return "\n".join(wrapped_lines)
def merge_pr(pr_number):
# GitHub authentication
token = os.getenv('GITHUB_TOKEN')
token = os.getenv("GITHUB_TOKEN")
g = Github(token)
# Get the repository
repo_name = os.getenv('GITHUB_REPOSITORY')
repo_name = os.getenv("GITHUB_REPOSITORY")
if not repo_name:
print("Error: GITHUB_REPOSITORY environment variable not set")
sys.exit(1)
@ -102,19 +109,19 @@ def merge_pr(pr_number):
# Format commit message
commit_title = f"Merge '{pr_info['title']}' from {pr_info['author']}"
commit_body = wrap_text(pr_info['body'])
commit_body = wrap_text(pr_info["body"])
commit_message = f"{commit_title}\n\n{commit_body}\n"
# Add Reviewed-by lines
for approver in pr_info['reviewed_by']:
for approver in pr_info["reviewed_by"]:
commit_message += f"\nReviewed-by: {approver}"
# Add Closes line
commit_message += f"\n\nCloses #{pr_info['number']}"
# Create a temporary file for the commit message
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file:
temp_file.write(commit_message)
temp_file_path = temp_file.name
@ -147,13 +154,14 @@ def merge_pr(pr_number):
# Clean up the temporary file
os.unlink(temp_file_path)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: python merge_pr.py <pr_number>")
sys.exit(1)
pr_number = sys.argv[1]
if not re.match(r'^\d+$', pr_number):
if not re.match(r"^\d+$", pr_number):
print("Error: PR number must be a positive integer")
sys.exit(1)

View file

@ -6,22 +6,21 @@ updates the JavaScript and WebAssembly bindings package.json and package-lock.js
uses cargo update to update Cargo.lock, creates a git commit, and adds a version tag.
"""
import re
import argparse
import sys
import json
import subprocess
import os
import re
import subprocess
import sys
from pathlib import Path
# Define all npm package paths in one place
NPM_PACKAGES = [
"bindings/javascript",
"bindings/javascript/npm/darwin-universal",
"bindings/javascript/npm/linux-x64-gnu",
"bindings/javascript/npm/linux-x64-gnu",
"bindings/javascript/npm/win32-x64-msvc",
"bindings/wasm"
"bindings/wasm",
]
@ -29,10 +28,7 @@ def parse_args():
parser = argparse.ArgumentParser(description="Update version in project files")
# Version argument
parser.add_argument(
"version",
help="The new version to set (e.g., 0.1.0)"
)
parser.add_argument("version", help="The new version to set (e.g., 0.1.0)")
return parser.parse_args()
@ -58,7 +54,7 @@ def update_cargo_toml(new_version):
# Pattern to match version in various contexts while maintaining the quotes
pattern = r'(version\s*=\s*)"' + re.escape(current_version) + r'"'
updated_content = re.sub(pattern, fr'\1"{new_version}"', content)
updated_content = re.sub(pattern, rf'\1"{new_version}"', content)
cargo_path.write_text(updated_content)
return True
@ -66,7 +62,7 @@ def update_cargo_toml(new_version):
sys.exit(1)
def update_package_json(dir_path, new_version):
def update_package_json(dir_path, new_version): # noqa: C901
"""Update version in package.json and package-lock.json files."""
dir_path = Path(dir_path)
@ -77,14 +73,14 @@ def update_package_json(dir_path, new_version):
return False
# Read and parse the package.json file
with open(package_path, 'r') as f:
with open(package_path, "r") as f:
package_data = json.load(f)
# Update version regardless of current value
package_data['version'] = new_version
package_data["version"] = new_version
# Write updated package.json
with open(package_path, 'w') as f:
with open(package_path, "w") as f:
json.dump(package_data, f, indent=2)
except Exception:
return False
@ -96,27 +92,27 @@ def update_package_json(dir_path, new_version):
return True # package.json was updated successfully
# Read and parse the package-lock.json file
with open(lock_path, 'r') as f:
with open(lock_path, "r") as f:
lock_data = json.load(f)
# Update version in multiple places in package-lock.json
if 'version' in lock_data:
lock_data['version'] = new_version
if "version" in lock_data:
lock_data["version"] = new_version
# Update version in packages section if it exists (npm >= 7)
if 'packages' in lock_data:
if '' in lock_data['packages']: # Root package
if 'version' in lock_data['packages']['']:
lock_data['packages']['']['version'] = new_version
if "packages" in lock_data:
if "" in lock_data["packages"]: # Root package
if "version" in lock_data["packages"][""]:
lock_data["packages"][""]["version"] = new_version
# Update version in dependencies section if it exists (older npm)
package_name = package_data.get('name', '')
if 'dependencies' in lock_data and package_name in lock_data['dependencies']:
if 'version' in lock_data['dependencies'][package_name]:
lock_data['dependencies'][package_name]['version'] = new_version
package_name = package_data.get("name", "")
if "dependencies" in lock_data and package_name in lock_data["dependencies"]:
if "version" in lock_data["dependencies"][package_name]:
lock_data["dependencies"][package_name]["version"] = new_version
# Write updated package-lock.json
with open(lock_path, 'w') as f:
with open(lock_path, "w") as f:
json.dump(lock_data, f, indent=2)
return True
@ -137,10 +133,7 @@ def run_cargo_update():
"""Run cargo update to update the Cargo.lock file."""
try:
# Run cargo update showing its output with verbose flag
subprocess.run(
["cargo", "update", "--workspace", "--verbose"],
check=True
)
subprocess.run(["cargo", "update", "--workspace", "--verbose"], check=True)
return True
except Exception:
return False
@ -156,7 +149,7 @@ def create_git_commit_and_tag(version):
for package_path in NPM_PACKAGES:
package_json = f"{package_path}/package.json"
package_lock = f"{package_path}/package-lock.json"
if os.path.exists(package_json):
files_to_add.append(package_json)
if os.path.exists(package_lock):
@ -165,26 +158,17 @@ def create_git_commit_and_tag(version):
# Add each file individually
for file in files_to_add:
try:
subprocess.run(
["git", "add", file],
check=True
)
subprocess.run(["git", "add", file], check=True)
except subprocess.CalledProcessError:
print(f"Warning: Could not add {file} to git")
# Create commit
commit_message = f"Limbo {version}"
subprocess.run(
["git", "commit", "-m", commit_message],
check=True
)
subprocess.run(["git", "commit", "-m", commit_message], check=True)
# Create tag
tag_name = f"v{version}"
subprocess.run(
["git", "tag", "-a", tag_name, "-m", f"Version {version}"],
check=True
)
subprocess.run(["git", "tag", "-a", tag_name, "-m", f"Version {version}"], check=True)
return True
except Exception as e:

View file

@ -1,9 +1,10 @@
#!/usr/bin/env python3
from cli_tests.test_limbo_cli import TestLimboShell
from pathlib import Path
import time
import os
import time
from pathlib import Path
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
def test_basic_queries():
@ -62,7 +63,7 @@ def test_joins():
shell.run_test(
"file-cross-join",
"select * from users, products limit 1;",
"1|Jamie|Foster|dylan00@example.com|496-522-9493|62375 Johnson Rest Suite 322|West Lauriestad|IL|35865|94|1|hat|79.0",
"1|Jamie|Foster|dylan00@example.com|496-522-9493|62375 Johnson Rest Suite 322|West Lauriestad|IL|35865|94|1|hat|79.0", # noqa: E501
)
shell.quit()
@ -76,7 +77,7 @@ def test_left_join_self():
shell.run_test(
"file-left-join-self",
"select u1.first_name as user_name, u2.first_name as neighbor_name from users u1 left join users as u2 on u1.id = u2.id + 1 limit 2;",
"select u1.first_name as user_name, u2.first_name as neighbor_name from users u1 left join users as u2 on u1.id = u2.id + 1 limit 2;", # noqa: E501
"Jamie|\nCindy|Jamie",
)
shell.quit()
@ -99,9 +100,7 @@ def test_switch_back_to_in_memory():
shell.run_test("open-testing-db-file", ".open testing/testing.db", "")
# Then switch back to :memory:
shell.run_test("switch-back", ".open :memory:", "")
shell.run_test(
"schema-in-memory", ".schema users", "-- Error: Table 'users' not found."
)
shell.run_test("schema-in-memory", ".schema users", "-- Error: Table 'users' not found.")
shell.quit()
@ -172,9 +171,7 @@ SELECT 2;"""
def test_comments():
shell = TestLimboShell()
shell.run_test("single-line-comment", "-- this is a comment\nSELECT 1;", "1")
shell.run_test(
"multi-line-comments", "-- First comment\n-- Second comment\nSELECT 2;", "2"
)
shell.run_test("multi-line-comments", "-- First comment\n-- Second comment\nSELECT 2;", "2")
shell.run_test("block-comment", "/*\nMulti-line block comment\n*/\nSELECT 3;", "3")
shell.run_test(
"inline-comments",
@ -187,9 +184,7 @@ def test_comments():
def test_import_csv():
shell = TestLimboShell()
shell.run_test("memory-db", ".open :memory:", "")
shell.run_test(
"create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", ""
)
shell.run_test("create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", "")
shell.run_test(
"import-csv-no-options",
".import --csv ./testing/test_files/test.csv csv_table",
@ -206,9 +201,7 @@ def test_import_csv():
def test_import_csv_verbose():
shell = TestLimboShell()
shell.run_test("open-memory", ".open :memory:", "")
shell.run_test(
"create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", ""
)
shell.run_test("create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", "")
shell.run_test(
"import-csv-verbose",
".import --csv -v ./testing/test_files/test.csv csv_table",
@ -225,9 +218,7 @@ def test_import_csv_verbose():
def test_import_csv_skip():
shell = TestLimboShell()
shell.run_test("open-memory", ".open :memory:", "")
shell.run_test(
"create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", ""
)
shell.run_test("create-csv-table", "CREATE TABLE csv_table (c1 INT, c2 REAL, c3 String);", "")
shell.run_test(
"import-csv-skip",
".import --csv --skip 1 ./testing/test_files/test.csv csv_table",
@ -250,51 +241,33 @@ def test_update_with_limit():
limbo.run_test("update-limit", "UPDATE t SET a = 10 LIMIT 1;", "")
limbo.run_test("update-limit-result", "SELECT COUNT(*) from t WHERE a = 10;", "1")
limbo.run_test("update-limit-zero", "UPDATE t SET a = 100 LIMIT 0;", "")
limbo.run_test(
"update-limit-zero-result", "SELECT COUNT(*) from t WHERE a = 100;", "0"
)
limbo.run_test("update-limit-zero-result", "SELECT COUNT(*) from t WHERE a = 100;", "0")
limbo.run_test("update-limit-all", "UPDATE t SET a = 100 LIMIT -1;", "")
# negative limit is treated as no limit in sqlite due to check for --val = 0
limbo.run_test("update-limit-result", "SELECT COUNT(*) from t WHERE a = 100;", "6")
limbo.run_test(
"udpate-limit-where", "UPDATE t SET a = 333 WHERE b = 5 LIMIT 1;", ""
)
limbo.run_test(
"update-limit-where-result", "SELECT COUNT(*) from t WHERE a = 333;", "1"
)
limbo.run_test("udpate-limit-where", "UPDATE t SET a = 333 WHERE b = 5 LIMIT 1;", "")
limbo.run_test("update-limit-where-result", "SELECT COUNT(*) from t WHERE a = 333;", "1")
limbo.quit()
def test_update_with_limit_and_offset():
limbo = TestLimboShell(
"CREATE TABLE t (a,b,c); insert into t values (1,2,3), (4,5,6), (7,8,9), (1,2,3),(4,5,6), (7,8,9);"
)
limbo.run_test("update-limit-offset", "UPDATE t SET a = 10 LIMIT 1 OFFSET 3;", "")
limbo.run_test(
"update-limit-offset-result", "SELECT COUNT(*) from t WHERE a = 10;", "1"
)
limbo.run_test("update-limit-offset-result", "SELECT COUNT(*) from t WHERE a = 10;", "1")
limbo.run_test("update-limit-result", "SELECT a from t LIMIT 4;", "1\n4\n7\n10")
limbo.run_test(
"update-limit-offset-zero", "UPDATE t SET a = 100 LIMIT 0 OFFSET 0;", ""
)
limbo.run_test(
"update-limit-zero-result", "SELECT COUNT(*) from t WHERE a = 100;", "0"
)
limbo.run_test("update-limit-offset-zero", "UPDATE t SET a = 100 LIMIT 0 OFFSET 0;", "")
limbo.run_test("update-limit-zero-result", "SELECT COUNT(*) from t WHERE a = 100;", "0")
limbo.run_test("update-limit-all", "UPDATE t SET a = 100 LIMIT -1 OFFSET 1;", "")
limbo.run_test("update-limit-result", "SELECT COUNT(*) from t WHERE a = 100;", "5")
limbo.run_test(
"udpate-limit-where", "UPDATE t SET a = 333 WHERE b = 5 LIMIT 1 OFFSET 2;", ""
)
limbo.run_test(
"update-limit-where-result", "SELECT COUNT(*) from t WHERE a = 333;", "0"
)
limbo.run_test("udpate-limit-where", "UPDATE t SET a = 333 WHERE b = 5 LIMIT 1 OFFSET 2;", "")
limbo.run_test("update-limit-where-result", "SELECT COUNT(*) from t WHERE a = 333;", "0")
limbo.quit()
def test_insert_default_values():
limbo = TestLimboShell(
"CREATE TABLE t (a integer default(42),b integer default (43),c integer default(44));"
)
limbo = TestLimboShell("CREATE TABLE t (a integer default(42),b integer default (43),c integer default(44));")
for _ in range(1, 10):
limbo.execute_dot("INSERT INTO t DEFAULT VALUES;")
limbo.run_test("insert-default-values", "SELECT * FROM t;", "42|43|44\n" * 9)

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python3
import os
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
from pydantic import BaseModel
from cli_tests import console
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
@ -81,13 +81,13 @@ class CollateTest(BaseModel):
)
limbo.run_test(
"Grouping is performed using the NOCASE collating sequence (Values 'abc', 'ABC', and 'Abc' are placed in the same group).",
"Grouping is performed using the NOCASE collating sequence (Values 'abc', 'ABC', and 'Abc' are placed in the same group).", # noqa: E501
"SELECT count(*) FROM t1 GROUP BY d ORDER BY 1;",
"\n".join(map(lambda x: str(x), [4])),
)
limbo.run_test(
"Grouping is performed using the BINARY collating sequence. 'abc' and 'ABC' and 'Abc' form different groups",
"Grouping is performed using the BINARY collating sequence. 'abc' and 'ABC' and 'Abc' form different groups", # noqa: E501
"SELECT count(*) FROM t1 GROUP BY (d || '') ORDER BY 1;",
"\n".join(map(lambda x: str(x), [1, 1, 2])),
)

View file

@ -1,8 +1,8 @@
from typing import Any, Optional, Union
from rich.console import Console, JustifyMethod
from rich.theme import Theme
from rich.style import Style
from rich.console import Console, JustifyMethod
from rich.style import Style
from rich.theme import Theme
custom_theme = Theme(
{
@ -95,6 +95,7 @@ def debug(
_stack_offset=_stack_offset + 1,
)
def test(
*objects: Any,
sep: str = " ",
@ -119,4 +120,4 @@ def test(
highlight=highlight,
log_locals=log_locals,
_stack_offset=_stack_offset + 1,
)
)

View file

@ -2,15 +2,16 @@
# Eventually extract these tests to be in the fuzzing integration tests
import os
import tempfile
from faker import Faker
from faker.providers.lorem.en_US import Provider as P
from cli_tests.test_limbo_cli import TestLimboShell
from pydantic import BaseModel
from cli_tests import console
from enum import Enum
import random
import sqlite3
import tempfile
from enum import Enum
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
from faker import Faker
from faker.providers.lorem.en_US import Provider as P
from pydantic import BaseModel
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
@ -233,11 +234,7 @@ class Table(BaseModel):
# These statements should always cause a constraint error as there is no where clause here
def generate_update(self) -> str:
vals = [
f"{col.name} = {col.col_type.generate(fake)}"
for col in self.columns
if col.primary_key
]
vals = [f"{col.name} = {col.col_type.generate(fake)}" for col in self.columns if col.primary_key]
vals = ", ".join(vals)
return f"UPDATE {self.name} SET {vals};"
@ -374,7 +371,7 @@ def main():
tests = all_tests()
for test in tests:
console.info(test.table)
with tempfile.NamedTemporaryFile(suffix='.db') as tmp:
with tempfile.NamedTemporaryFile(suffix=".db") as tmp:
try:
# Use with syntax to automatically close shell on error
with TestLimboShell("") as limbo:
@ -387,7 +384,7 @@ def main():
tests = [custom_test_2, regression_test_update_single_key]
for test in tests:
with tempfile.NamedTemporaryFile(suffix='.db') as tmp:
with tempfile.NamedTemporaryFile(suffix=".db") as tmp:
try:
with TestLimboShell("") as limbo:
limbo.execute_dot(f".open {tmp.name}")

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python3
import os
from cli_tests.test_limbo_cli import TestLimboShell
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
sqlite_exec = "./scripts/limbo-sqlite3"
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
@ -40,14 +41,10 @@ def test_uuid():
)
limbo.run_test_fn("SELECT uuid4_str();", lambda res: len(res) == 36)
limbo.run_test_fn("SELECT hex(uuid7());", lambda res: int(res, 16) is not None)
limbo.run_test_fn(
"SELECT uuid7_timestamp_ms(uuid7()) / 1000;", lambda res: res.isdigit()
)
limbo.run_test_fn("SELECT uuid7_timestamp_ms(uuid7()) / 1000;", lambda res: res.isdigit())
limbo.run_test_fn("SELECT uuid7_str();", validate_string_uuid)
limbo.run_test_fn("SELECT uuid_str(uuid7());", validate_string_uuid)
limbo.run_test_fn(
"SELECT hex(uuid_blob(uuid7_str()));", lambda res: int(res, 16) is not None
)
limbo.run_test_fn("SELECT hex(uuid_blob(uuid7_str()));", lambda res: int(res, 16) is not None)
limbo.run_test_fn("SELECT uuid_str(uuid_blob(uuid7_str()));", validate_string_uuid)
limbo.run_test_fn(
f"SELECT uuid7_timestamp_ms('{specific_time}') / 1000;",
@ -160,12 +157,8 @@ def test_aggregates():
validate_percentile2,
"test aggregate percentile function with 1 argument works",
)
limbo.run_test_fn(
"SELECT percentile_cont(value, 0.25) from test;", validate_percentile1
)
limbo.run_test_fn(
"SELECT percentile_disc(value, 0.55) from test;", validate_percentile_disc
)
limbo.run_test_fn("SELECT percentile_cont(value, 0.25) from test;", validate_percentile1)
limbo.run_test_fn("SELECT percentile_disc(value, 0.55) from test;", validate_percentile_disc)
limbo.quit()
@ -223,8 +216,7 @@ def test_crypto():
# Hashing and Decode
limbo.run_test_fn(
"SELECT crypto_encode(crypto_blake3('abc'), 'hex');",
lambda res: res
== "6437b3ac38465133ffb63b75273a8db548c558465d79db03fd359c6cd5bd9d85",
lambda res: res == "6437b3ac38465133ffb63b75273a8db548c558465d79db03fd359c6cd5bd9d85",
"blake3 should encrypt correctly",
)
limbo.run_test_fn(
@ -239,8 +231,7 @@ def test_crypto():
)
limbo.run_test_fn(
"SELECT crypto_encode(crypto_sha256('abc'), 'hex');",
lambda a: a
== "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
lambda a: a == "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
"sha256 should encrypt correctly",
)
limbo.run_test_fn(
@ -252,7 +243,7 @@ def test_crypto():
limbo.run_test_fn(
"SELECT crypto_encode(crypto_sha512('abc'), 'hex');",
lambda a: a
== "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f",
== "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", # noqa: E501
"sha512 should encrypt correctly",
)
@ -401,9 +392,7 @@ def test_kv():
)
for i in range(100):
limbo.execute_dot(f"insert into t values ('key{i}', 'val{i}');")
limbo.run_test_fn(
"select count(*) from t;", lambda res: "100" == res, "can insert 100 rows"
)
limbo.run_test_fn("select count(*) from t;", lambda res: "100" == res, "can insert 100 rows")
limbo.run_test_fn("update t set value = 'updated' where key = 'key33';", null)
limbo.run_test_fn(
"select * from t where key = 'key33';",
@ -422,12 +411,8 @@ def test_kv():
"can update all rows",
)
limbo.run_test_fn("delete from t limit 96;", null, "can delete 96 rows")
limbo.run_test_fn(
"select count(*) from t;", lambda res: "4" == res, "four rows remain"
)
limbo.run_test_fn(
"update t set key = '100' where 1;", null, "where clause evaluates properly"
)
limbo.run_test_fn("select count(*) from t;", lambda res: "4" == res, "four rows remain")
limbo.run_test_fn("update t set key = '100' where 1;", null, "where clause evaluates properly")
limbo.run_test_fn(
"select * from t where key = '100';",
lambda res: res == "100|updated2",
@ -509,9 +494,7 @@ def test_vfs():
ext_path = "target/debug/liblimbo_ext_tests"
limbo.run_test_fn(".vfslist", lambda x: "testvfs" not in x, "testvfs not loaded")
limbo.execute_dot(f".load {ext_path}")
limbo.run_test_fn(
".vfslist", lambda res: "testvfs" in res, "testvfs extension loaded"
)
limbo.run_test_fn(".vfslist", lambda res: "testvfs" in res, "testvfs extension loaded")
limbo.execute_dot(".open testing/vfs.db testvfs")
limbo.execute_dot("create table test (id integer primary key, value float);")
limbo.execute_dot("create table vfs (id integer primary key, value blob);")
@ -742,8 +725,7 @@ def test_tablestats():
limbo.run_test_fn(
"SELECT * FROM stats ORDER BY name;",
lambda res: sorted(_split(res))
== sorted(["logs|1", "people|3", "products|11", "users|10000"]),
lambda res: sorted(_split(res)) == sorted(["logs|1", "people|3", "products|11", "users|10000"]),
"stats shows correct initial counts (and skips itself)",
)

View file

@ -1,7 +1,8 @@
#!/usr/bin/env python3
import os
from cli_tests.test_limbo_cli import TestLimboShell
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")

View file

@ -1,12 +1,12 @@
#!/usr/bin/env python3
import os
import select
from time import sleep
import subprocess
from pathlib import Path
from time import sleep
from typing import Callable, List, Optional
from cli_tests import console
from cli_tests import console
PIPE_BUF = 4096
@ -107,7 +107,7 @@ class TestLimboShell:
flags="",
):
if exec_name is None:
exec_name = os.environ.get('SQLITE_EXEC')
exec_name = os.environ.get("SQLITE_EXEC")
if exec_name is None:
exec_name = "./scripts/limbo-sqlite3"
if flags == "":
@ -142,10 +142,7 @@ INSERT INTO t VALUES (zeroblob(1024 - 1), zeroblob(1024 - 2), zeroblob(1024 - 3)
console.test(f"Running test: {name}", _stack_offset=2)
actual = self.shell.execute(sql)
assert actual == expected, (
f"Test failed: {name}\n"
f"SQL: {sql}\n"
f"Expected:\n{repr(expected)}\n"
f"Actual:\n{repr(actual)}"
f"Test failed: {name}\nSQL: {sql}\nExpected:\n{repr(expected)}\nActual:\n{repr(actual)}"
)
def run_debug(self, sql: str):
@ -153,9 +150,7 @@ INSERT INTO t VALUES (zeroblob(1024 - 1), zeroblob(1024 - 2), zeroblob(1024 - 3)
actual = self.shell.execute(sql)
console.debug(f"OUTPUT:\n{repr(actual)}", _stack_offset=2)
def run_test_fn(
self, sql: str, validate: Callable[[str], bool], desc: str = ""
) -> None:
def run_test_fn(self, sql: str, validate: Callable[[str], bool], desc: str = "") -> None:
# Print the test that is executing before executing the sql command
# Printing later confuses the user of the code what test has actually failed
if desc:

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python3
import os
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
from pydantic import BaseModel
from cli_tests import console
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
@ -39,10 +39,7 @@ class UpdateTest(BaseModel):
f"{self.vals}",
)
stmt = [
f"SELECT hex(t1), t2, t3 FROM test LIMIT 1 OFFSET {i};"
for i in range(self.vals)
]
stmt = [f"SELECT hex(t1), t2, t3 FROM test LIMIT 1 OFFSET {i};" for i in range(self.vals)]
expected = [f"{zero_blob}|{t2_val}|{t3_val}" for _ in range(self.vals)]
sqlite.run_test(
@ -84,15 +81,10 @@ class UpdateTest(BaseModel):
f"{self.vals}",
)
stmt = [
f"SELECT hex(t1), t2, t3 FROM test LIMIT 1 OFFSET {i};"
for i in range(self.vals)
]
stmt = [f"SELECT hex(t1), t2, t3 FROM test LIMIT 1 OFFSET {i};" for i in range(self.vals)]
expected = [
f"{zero_blob}|{t2_val}|{t3_val}"
if i != 0
else f"{zero_blob}|{t2_update_val}|{t3_val}"
f"{zero_blob}|{t2_val}|{t3_val}" if i != 0 else f"{zero_blob}|{t2_update_val}|{t3_val}"
for i in range(self.vals)
]
sqlite.run_test(

View file

@ -1,16 +1,16 @@
#!/usr/bin/env python3
# vfs benchmarking/comparison
import os
from pathlib import Path
import subprocess
import statistics
import argparse
import os
import statistics
import subprocess
from pathlib import Path
from time import perf_counter, sleep
from typing import Dict
from cli_tests.console import error, info, test
from cli_tests.test_limbo_cli import TestLimboShell
from cli_tests.console import info, error, test
LIMBO_BIN = Path("./target/release/limbo")
DB_FILE = Path("testing/temp.db")
@ -37,9 +37,7 @@ def bench_one(vfs: str, sql: str, iterations: int) -> list[float]:
for i in range(1, iterations + 1):
start = perf_counter()
_ = shell.run_test_fn(
sql, lambda x: x is not None and append_time(times, start, perf_counter)
)
_ = shell.run_test_fn(sql, lambda x: x is not None and append_time(times, start, perf_counter))
test(f" {vfs} | run {i:>3}: {times[-1]:.6f}s")
shell.quit()
@ -60,9 +58,7 @@ def cleanup_temp_db() -> None:
def main() -> None:
parser = argparse.ArgumentParser(
description="Benchmark a SQL statement against all Limbo VFS backends."
)
parser = argparse.ArgumentParser(description="Benchmark a SQL statement against all Limbo VFS backends.")
parser.add_argument("sql", help="SQL statement to execute (quote it)")
parser.add_argument("iterations", type=int, help="number of repetitions")
args = parser.parse_args()
@ -105,9 +101,7 @@ def main() -> None:
else:
pct = (avg - baseline_avg) / baseline_avg * 100.0
faster_slower = "slower" if pct > 0 else "faster"
info(
f"{vfs:<{name_pad}} : {avg:.6f} ({abs(pct):.1f}% {faster_slower} than {baseline})"
)
info(f"{vfs:<{name_pad}} : {avg:.6f} ({abs(pct):.1f}% {faster_slower} than {baseline})")
info("-" * 60)
cleanup_temp_db()

View file

@ -1,11 +1,11 @@
#!/usr/bin/env python3
import os
import tempfile
from cli_tests.test_limbo_cli import TestLimboShell
from pydantic import BaseModel
from cli_tests import console
from time import sleep
from cli_tests import console
from cli_tests.test_limbo_cli import TestLimboShell
from pydantic import BaseModel
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
@ -46,9 +46,7 @@ class InsertTest(BaseModel):
big_stmt = "".join(big_stmt)
expected = "\n".join(expected)
limbo.run_test_fn(
big_stmt, lambda res: validate_with_expected(res, expected), self.name
)
limbo.run_test_fn(big_stmt, lambda res: validate_with_expected(res, expected), self.name)
def test_compat(self):
console.info("Testing in SQLite\n")

View file

@ -1,13 +1,14 @@
#!/usr/bin/env python3
import sqlite3
from faker import Faker
conn = sqlite3.connect('database.db')
conn = sqlite3.connect("database.db")
cursor = conn.cursor()
# Create the user table
cursor.execute('''
cursor.execute("""
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY,
first_name TEXT,
@ -20,18 +21,29 @@ cursor.execute('''
zipcode TEXT,
age INTEGER
)
''')
""")
cursor.execute('''
cursor.execute("""
CREATE TABLE IF NOT EXISTS products (
id INTEGER PRIMARY KEY,
name TEXT,
price REAL
)
''')
""")
product_list = ["hat", "cap", "shirt", "sweater", "sweatshirt",
"shorts", "jeans", "sneakers", "boots", "coat", "accessories"]
product_list = [
"hat",
"cap",
"shirt",
"sweater",
"sweatshirt",
"shorts",
"jeans",
"sneakers",
"boots",
"coat",
"accessories",
]
fake = Faker()
for _ in range(10000):
@ -45,18 +57,23 @@ for _ in range(10000):
zipcode = fake.zipcode()
age = fake.random_int(min=1, max=100)
cursor.execute('''
cursor.execute(
"""
INSERT INTO users (first_name, last_name, email, phone_number, address, city, state, zipcode, age)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (first_name, last_name, email, phone_number, address, city, state, zipcode, age))
""",
(first_name, last_name, email, phone_number, address, city, state, zipcode, age),
)
for product in product_list:
price = fake.random_int(min=1, max=100)
cursor.execute('''
cursor.execute(
"""
INSERT INTO products (name, price)
VALUES (?, ?)
''', (product, price))
""",
(product, price),
)
conn.commit()