feat(ai): implement AI API

This commit introduces a new AI module that includes schemas for AI requests and responses, as well as event handling for streaming AI responses. It adds functions to manage AI API keys, log usage, and handle streaming data from the AI service.
This commit is contained in:
ByteAtATime 2025-06-27 15:58:05 -07:00
parent 99f6319a7e
commit 87751b30ca
No known key found for this signature in database
12 changed files with 658 additions and 5 deletions

View file

@ -0,0 +1,48 @@
import { z } from 'zod/v4';
export const AiAskStreamPayloadSchema = z.object({
requestId: z.string(),
prompt: z.string(),
options: z
.object({
model: z.string().optional(),
creativity: z.string().optional(),
modelMappings: z.record(z.string(), z.string()).optional()
})
.optional()
});
export const AiAskStreamMessageSchema = z.object({
type: z.literal('ai-ask-stream'),
payload: AiAskStreamPayloadSchema
});
export const AiStreamChunkPayloadSchema = z.object({
requestId: z.string(),
text: z.string()
});
export const AiStreamChunkMessageSchema = z.object({
type: z.literal('ai-stream-chunk'),
payload: AiStreamChunkPayloadSchema
});
export const AiStreamEndPayloadSchema = z.object({
requestId: z.string(),
fullText: z.string()
});
export const AiStreamEndMessageSchema = z.object({
type: z.literal('ai-stream-end'),
payload: AiStreamEndPayloadSchema
});
export const AiStreamErrorPayloadSchema = z.object({
requestId: z.string(),
error: z.string()
});
export const AiStreamErrorMessageSchema = z.object({
type: z.literal('ai-stream-error'),
payload: AiStreamErrorPayloadSchema
});

View file

@ -1,3 +1,4 @@
export * from './ai';
export * from './api';
export * from './command';
export * from './common';

View file

@ -19,6 +19,12 @@ import {
SystemShowInFinderMessageSchema,
SystemTrashMessageSchema
} from './api';
import {
AiAskStreamMessageSchema,
AiStreamChunkMessageSchema,
AiStreamEndMessageSchema,
AiStreamErrorMessageSchema
} from './ai';
import { CommandSchema } from './command';
import { ShowHudMessageSchema } from './hud';
import { GoBackToPluginListSchema, PluginListSchema, PreferenceValuesSchema } from './plugin';
@ -62,6 +68,10 @@ export const SidecarMessageWithPluginsSchema = z.union([
SystemGetDefaultApplicationMessageSchema,
SystemGetFrontmostApplicationMessageSchema,
SystemShowInFinderMessageSchema,
SystemTrashMessageSchema
SystemTrashMessageSchema,
AiAskStreamMessageSchema,
AiStreamChunkMessageSchema,
AiStreamEndMessageSchema,
AiStreamErrorMessageSchema
]);
export type SidecarMessageWithPlugins = z.infer<typeof SidecarMessageWithPluginsSchema>;

169
sidecar/src/api/ai.ts Normal file
View file

@ -0,0 +1,169 @@
import { EventEmitter } from 'events';
import { writeLog, writeOutput } from '../io';
import { inspect } from 'util';
export const Model = {
'OpenAI_GPT4.1': 'openai/gpt-4.1',
'OpenAI_GPT4.1-mini': 'openai/gpt-4.1-mini',
'OpenAI_GPT4.1-nano': 'openai/gpt-4.1-nano',
OpenAI_GPT4: 'openai/gpt-4',
'OpenAI_GPT4-turbo': 'openai/gpt-4-turbo',
OpenAI_GPT4o: 'openai/gpt-4o',
'OpenAI_GPT4o-mini': 'openai/gpt-4o-mini',
OpenAI_o3: 'openai/o3',
'OpenAI_o4-mini': 'openai/o4-mini',
OpenAI_o1: 'openai/o1',
'OpenAI_o3-mini': 'openai/o3-mini',
Anthropic_Claude_Haiku: 'anthropic/claude-3-haiku',
Anthropic_Claude_Sonnet: 'anthropic/claude-3-sonnet',
'Anthropic_Claude_Sonnet_3.7': 'anthropic/claude-3.7-sonnet',
Anthropic_Claude_Opus: 'anthropic/claude-3-opus',
Anthropic_Claude_4_Sonnet: 'anthropic/claude-sonnet-4',
Anthropic_Claude_4_Opus: 'anthropic/claude-opus-4',
Perplexity_Sonar: 'perplexity/sonar',
Perplexity_Sonar_Pro: 'perplexity/sonar-pro',
Perplexity_Sonar_Reasoning: 'perplexity/sonar-reasoning',
Perplexity_Sonar_Reasoning_Pro: 'perplexity/sonar-reasoning-pro',
Llama4_Scout: 'meta-llama/llama-4-scout',
'Llama3.3_70B': 'meta-llama/llama-3.3-70b-instruct',
'Llama3.1_8B': 'meta-llama/llama-3.1-8b-instruct',
'Llama3.1_405B': 'meta-llama/llama-3.1-405b-instruct',
Mistral_Nemo: 'mistralai/mistral-nemo',
Mistral_Large: 'mistralai/mistral-large',
Mistral_Medium: 'mistralai/mistral-medium-3',
Mistral_Small: 'mistralai/mistral-small',
Mistral_Codestral: 'mistralai/codestral-2501',
'DeepSeek_R1_Distill_Llama_3.3_70B': 'deepseek/deepseek-r1-distill-llama-70b',
DeepSeek_R1: 'deepseek/deepseek-r1',
DeepSeek_V3: 'deepseek/deepseek-chat',
'Google_Gemini_2.5_Pro': 'google/gemini-2.5-pro',
'Google_Gemini_2.5_Flash': 'google/gemini-2.5-flash',
'Google_Gemini_2.0_Flash': 'google/gemini-2.0-flash-001',
xAI_Grok_3: 'x-ai/grok-3',
xAI_Grok_3_Mini: 'x-ai/grok-3-mini',
xAI_Grok_2: 'x-ai/grok-2-1212'
} as const;
export type Creativity = 'none' | 'low' | 'medium' | 'high' | 'maximum' | number;
export interface AskOptions {
creativity?: Creativity;
model?: keyof typeof Model;
signal?: AbortSignal;
}
interface AskResult extends Promise<string> {
on(event: 'data', listener: (chunk: string) => void): this;
on(event: 'end', listener: (fullText: string) => void): this;
on(event: 'error', listener: (error: Error) => void): this;
off(event: 'data', listener: (chunk: string) => void): this;
off(event: 'end', listener: (fullText: string) => void): this;
off(event: 'error', listener: (error: Error) => void): this;
}
const pendingRequests = new Map<string, EventEmitter>();
export function handleAiStreamChunk(data: { requestId: string; text: string }) {
const emitter = pendingRequests.get(data.requestId);
if (emitter) {
emitter.emit('data', data.text);
}
}
export function handleAiStreamEnd(data: { requestId: string; full_text: string }) {
const emitter = pendingRequests.get(data.requestId);
if (emitter) {
emitter.emit('end', data.full_text);
pendingRequests.delete(data.requestId);
}
}
export function handleAiStreamError(data: { requestId: string; error: string }) {
const emitter = pendingRequests.get(data.requestId);
if (emitter) {
emitter.emit('error', new Error(data.error));
pendingRequests.delete(data.requestId);
}
}
export function ask(prompt: string, options: AskOptions = {}): AskResult {
const emitter = new EventEmitter();
const requestId = crypto.randomUUID();
const modelMappings: Record<string, string> = {};
if (options.model && Model[options.model]) {
modelMappings[options.model] = Model[options.model];
}
let fullText = '';
let isResolved = false;
pendingRequests.set(requestId, emitter);
const promise = new Promise<string>((resolve, reject) => {
const handleChunk = (chunk: string) => {
fullText += chunk;
};
const handleEnd = (finalText: string) => {
if (!isResolved) {
isResolved = true;
fullText = finalText;
resolve(fullText);
}
};
const handleError = (error: Error) => {
if (!isResolved) {
isResolved = true;
reject(error);
}
};
emitter.on('data', handleChunk);
emitter.on('end', handleEnd);
emitter.on('error', handleError);
if (options.signal) {
options.signal.addEventListener('abort', () => {
if (!isResolved) {
isResolved = true;
const error = new Error('Request aborted');
emitter.emit('error', error);
pendingRequests.delete(requestId);
}
});
}
writeOutput({
type: 'ai-ask-stream',
payload: {
requestId,
prompt,
options: {
model: options.model,
creativity: options.creativity,
modelMappings
}
}
});
});
const result = promise as AskResult;
result.on = emitter.on.bind(emitter);
result.off = emitter.off.bind(emitter);
return result;
}
export const AI = {
ask,
Model,
Creativity: {
none: 'none' as const,
low: 'low' as const,
medium: 'medium' as const,
high: 'high' as const,
maximum: 'maximum' as const
}
};

View file

@ -20,6 +20,7 @@ export interface FileSystemItem {
}
export const BrowserExtension = { name: 'BrowserExtension' };
export const AI = { name: 'AI' };
export const environment = {
appearance: 'dark' as const,
@ -37,6 +38,9 @@ export const environment = {
if (feature && feature.name === 'BrowserExtension') {
return browserExtensionState.isConnected;
}
if (feature && feature.name === 'AI') {
return true;
}
return true;
}
};

View file

@ -19,7 +19,8 @@ import {
getDefaultApplication,
getFrontmostApplication,
showInFinder,
trash
trash,
AI as AIConstant
} from './environment';
import { preferencesStore } from '../preferences';
import { showToast } from './toast';
@ -27,6 +28,7 @@ import { showHUD } from './hud';
import { BrowserExtensionAPI } from './browserExtension';
import { Clipboard } from './clipboard';
import * as OAuth from './oauth';
import { AI } from './ai';
import type { Preference } from '@raycast-linux/protocol';
const Image = {
@ -56,6 +58,10 @@ export const getRaycastApi = () => {
LaunchType,
Toast,
OAuth,
AI: {
...AI,
...AIConstant
},
Action,
ActionPanel,
Detail,

View file

@ -7,6 +7,7 @@ import { preferencesStore } from './preferences';
import type { RaycastInstance } from './types';
import { handleResponse } from './api/rpc';
import { handleOAuthResponse, handleTokenResponse } from './api/oauth';
import { handleAiStreamChunk, handleAiStreamEnd, handleAiStreamError } from './api/ai';
process.on('unhandledRejection', (reason: unknown) => {
writeLog(`--- UNHANDLED PROMISE REJECTION ---`);
@ -31,7 +32,9 @@ rl.on('line', (line) => {
};
if (command.action === 'oauth-authorize-response') {
handleOAuthResponse(state!, code!, state, error);
if (state && code) {
handleOAuthResponse(requestId, code, state, error);
}
} else if (command.action.startsWith('oauth-')) {
handleTokenResponse(requestId, result, error);
} else {
@ -40,6 +43,24 @@ rl.on('line', (line) => {
return;
}
if (command.action === 'ai-stream-chunk') {
const payload = command.payload as { requestId: string; text: string };
handleAiStreamChunk(payload);
return;
}
if (command.action === 'ai-stream-end') {
const payload = command.payload as { requestId: string; full_text: string };
handleAiStreamEnd(payload);
return;
}
if (command.action === 'ai-stream-error') {
const payload = command.payload as { requestId: string; error: string };
handleAiStreamError(payload);
return;
}
switch (command.action) {
case 'request-plugin-list':
sendPluginList();

View file

@ -32,7 +32,7 @@ selection = "1.2.0"
url = "2.5.4"
arboard = "3.5.0"
zbus = "5.7.1"
reqwest = "0.12.20"
reqwest = { version = "0.12.20", features = ["json", "stream"] }
zip = "4.1.0"
bytes = "1.10.1"
tokio-tungstenite = "^0.27"

325
src-tauri/src/ai.rs Normal file
View file

@ -0,0 +1,325 @@
use crate::error::AppError;
use futures_util::StreamExt;
use rusqlite::{params, Connection, Result as RusqliteResult};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
use std::sync::Mutex;
use tauri::{AppHandle, Emitter, Manager, State};
const AI_KEYRING_SERVICE: &str = "dev.byteatatime.raycast.ai";
const AI_KEYRING_USERNAME: &str = "openrouter_api_key";
// --- Structs for API and Events ---
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AskOptions {
pub model: Option<String>,
pub creativity: Option<String>,
#[serde(default)]
model_mappings: HashMap<String, String>,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct StreamChunk {
request_id: String,
text: String,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct StreamEnd {
request_id: String,
full_text: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct GenerationData {
pub id: String,
pub created: i64,
pub model: String,
#[serde(default)]
pub tokens_prompt: i64,
#[serde(default)]
pub tokens_completion: i64,
#[serde(default)]
pub native_tokens_prompt: i64,
#[serde(default)]
pub native_tokens_completion: i64,
#[serde(default)]
pub total_cost: f64,
}
// --- Key Management Commands ---
fn get_keyring_entry() -> Result<keyring::Entry, AppError> {
keyring::Entry::new(AI_KEYRING_SERVICE, AI_KEYRING_USERNAME).map_err(AppError::from)
}
#[tauri::command]
pub fn set_ai_api_key(key: String) -> Result<(), String> {
get_keyring_entry()
.and_then(|entry| entry.set_password(&key).map_err(AppError::from))
.map_err(|e| e.to_string())
}
#[tauri::command]
pub fn is_ai_api_key_set() -> Result<bool, String> {
match get_keyring_entry().and_then(|entry| entry.get_password().map_err(AppError::from)) {
Ok(_) => Ok(true),
Err(AppError::Keyring(keyring::Error::NoEntry)) => Ok(false),
Err(e) => Err(e.to_string()),
}
}
#[tauri::command]
pub fn clear_ai_api_key() -> Result<(), String> {
get_keyring_entry()
.and_then(|entry| entry.delete_credential().map_err(AppError::from))
.map_err(|e| e.to_string())
}
// --- Usage Tracking ---
pub struct AiUsageManager {
db: Mutex<Connection>,
}
impl AiUsageManager {
pub fn new(app_handle: &AppHandle) -> Result<Self, AppError> {
let data_dir = app_handle
.path()
.app_local_data_dir()
.map_err(|_| AppError::DirectoryNotFound)?;
let db_path = data_dir.join("ai_usage.sqlite");
let db = Connection::open(db_path)?;
Ok(Self { db: Mutex::new(db) })
}
pub fn init_db(&self) -> RusqliteResult<()> {
let db = self.db.lock().unwrap();
db.execute(
"CREATE TABLE IF NOT EXISTS ai_generations (
id TEXT PRIMARY KEY,
created INTEGER NOT NULL,
model TEXT NOT NULL,
tokens_prompt INTEGER NOT NULL,
tokens_completion INTEGER NOT NULL,
native_tokens_prompt INTEGER NOT NULL,
native_tokens_completion INTEGER NOT NULL,
total_cost REAL NOT NULL
)",
[],
)?;
Ok(())
}
pub fn log_generation(&self, data: &GenerationData) -> Result<(), AppError> {
let db = self.db.lock().unwrap();
db.execute(
"INSERT OR REPLACE INTO ai_generations (id, created, model, tokens_prompt, tokens_completion, native_tokens_prompt, native_tokens_completion, total_cost)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)",
params![
data.id,
data.created,
data.model,
data.tokens_prompt,
data.tokens_completion,
data.native_tokens_prompt,
data.native_tokens_completion,
data.total_cost
],
)?;
Ok(())
}
pub fn get_history(&self, limit: u32, offset: u32) -> Result<Vec<GenerationData>, AppError> {
let db = self.db.lock().unwrap();
let mut stmt = db.prepare(
"SELECT id, created, model, tokens_prompt, tokens_completion, native_tokens_prompt, native_tokens_completion, total_cost FROM ai_generations ORDER BY created DESC LIMIT ?1 OFFSET ?2",
)?;
let iter = stmt.query_map(params![limit, offset], |row| {
Ok(GenerationData {
id: row.get(0)?,
created: row.get(1)?,
model: row.get(2)?,
tokens_prompt: row.get(3)?,
tokens_completion: row.get(4)?,
native_tokens_prompt: row.get(5)?,
native_tokens_completion: row.get(6)?,
total_cost: row.get(7)?,
})
})?;
iter.collect::<RusqliteResult<Vec<_>>>()
.map_err(|e| e.into())
}
}
#[tauri::command]
pub fn get_ai_usage_history(
manager: State<AiUsageManager>,
limit: u32,
offset: u32,
) -> Result<Vec<GenerationData>, String> {
manager
.get_history(limit, offset)
.map_err(|e| e.to_string())
}
async fn fetch_and_log_usage(
open_router_request_id: String,
api_key: String,
manager: &AiUsageManager,
) -> Result<(), AppError> {
let client = reqwest::Client::new();
let response = client
.get(format!(
"https://openrouter.ai/api/v1/generation?id={}",
open_router_request_id
))
.header("Authorization", format!("Bearer {}", api_key))
.send()
.await
.map_err(|e| AppError::Ai(e.to_string()))?;
if response.status().is_success() {
let generation_response: Value = response
.json()
.await
.map_err(|e| AppError::Ai(e.to_string()))?;
let generation_data: GenerationData =
serde_json::from_value(generation_response["data"].clone())
.map_err(|e| AppError::Ai(format!("Failed to parse generation data: {}", e)))?;
manager.log_generation(&generation_data)?;
} else {
let error_text = response.text().await.unwrap_or_default();
return Err(AppError::Ai(format!(
"Failed to fetch usage data: {}",
error_text
)));
}
Ok(())
}
// --- Core Stream Command ---
#[tauri::command]
pub async fn ai_ask_stream(
app_handle: AppHandle,
manager: State<'_, AiUsageManager>,
request_id: String,
prompt: String,
options: AskOptions,
) -> Result<(), String> {
let api_key =
match get_keyring_entry().and_then(|entry| entry.get_password().map_err(AppError::from)) {
Ok(key) => key,
Err(e) => return Err(e.to_string()),
};
let model_key = options.model.unwrap_or_else(|| "default".to_string());
// For testing, use a free model if "default" is chosen.
let model_id = options.model_mappings.get(&model_key).map_or_else(
|| "mistralai/mistral-7b-instruct:free".to_string(),
|id| id.clone(),
);
let temperature = match options.creativity.as_deref() {
Some("none") => 0.0,
Some("low") => 0.4,
Some("medium") => 0.7,
Some("high") => 1.0,
_ => 0.7,
};
let body = serde_json::json!({
"model": model_id,
"messages": [{"role": "user", "content": prompt}],
"stream": true,
"temperature": temperature,
});
let client = reqwest::Client::new();
let res = client
.post("https://openrouter.ai/api/v1/chat/completions")
.header("Authorization", format!("Bearer {}", api_key))
.header("HTTP-Referer", "http://localhost") // Required by OpenRouter
.json(&body)
.send()
.await
.map_err(|e| e.to_string())?;
let open_router_request_id = res
.headers()
.get("x-request-id")
.and_then(|v| v.to_str().ok())
.map(|s| s.to_string());
if !res.status().is_success() {
let error_body = res.text().await.unwrap_or_else(|_| "Unknown error".into());
return Err(format!("API Error: {}", error_body));
}
let mut stream = res.bytes_stream();
let mut full_text = String::new();
while let Some(item) = stream.next().await {
let chunk = item.map_err(|e| e.to_string())?;
let lines = String::from_utf8_lossy(&chunk);
for line in lines.split("\n\n").filter(|s| !s.is_empty()) {
if line.starts_with("data: ") {
let json_str = &line[6..];
if json_str.trim() == "[DONE]" {
break;
}
if let Ok(json) = serde_json::from_str::<Value>(json_str) {
if let Some(delta) = json
.get("choices")
.and_then(|c| c.get(0))
.and_then(|c0| c0.get("delta"))
{
if let Some(content) = delta.get("content").and_then(|c| c.as_str()) {
full_text.push_str(content);
app_handle
.emit(
"ai-stream-chunk",
StreamChunk {
request_id: request_id.clone(),
text: content.to_string(),
},
)
.map_err(|e| e.to_string())?;
}
}
}
}
}
}
app_handle
.emit(
"ai-stream-end",
StreamEnd {
request_id: request_id.clone(),
full_text: full_text.clone(),
},
)
.map_err(|e| e.to_string())?;
if let Some(or_req_id) = open_router_request_id {
let manager_clone = AiUsageManager {
db: Mutex::new(Connection::open(manager.db.lock().unwrap().path().unwrap()).unwrap()),
};
tokio::spawn(async move {
if let Err(e) = fetch_and_log_usage(or_req_id, api_key, &manager_clone).await {
eprintln!("[AI Usage Tracking] Error: {}", e);
}
});
}
Ok(())
}

View file

@ -10,6 +10,7 @@ pub enum AppError {
ClipboardHistory(String),
Frecency(String),
FileSearch(String),
Ai(String),
}
impl From<io::Error> for AppError {
@ -53,6 +54,7 @@ impl std::fmt::Display for AppError {
AppError::ClipboardHistory(msg) => write!(f, "Clipboard history error: {}", msg),
AppError::Frecency(msg) => write!(f, "Frecency error: {}", msg),
AppError::FileSearch(msg) => write!(f, "File search error: {}", msg),
AppError::Ai(msg) => write!(f, "AI error: {}", msg),
}
}
}

View file

@ -1,3 +1,4 @@
mod ai;
mod app;
mod browser_extension;
mod cache;
@ -14,8 +15,10 @@ mod quicklinks;
mod snippets;
mod system;
use crate::ai::{ai_ask_stream, AskOptions};
use crate::snippets::input_manager::{EvdevInputManager, InputManager};
use crate::{app::App, cache::AppCache};
use ai::AiUsageManager;
use browser_extension::WsState;
use frecency::FrecencyManager;
use quicklinks::QuicklinkManager;
@ -247,7 +250,12 @@ pub fn run() {
snippets::import_snippets,
snippets::paste_snippet_content,
snippets::snippet_was_used,
file_search::search_files
file_search::search_files,
ai::set_ai_api_key,
ai::is_ai_api_key_set,
ai::clear_ai_api_key,
ai::ai_ask_stream,
ai::get_ai_usage_history
])
.setup(|app| {
let app_handle = app.handle().clone();
@ -270,6 +278,10 @@ pub fn run() {
let app_handle_for_file_search = app.handle().clone();
file_search::init(app_handle_for_file_search);
let ai_usage_manager = AiUsageManager::new(app.handle())?;
ai_usage_manager.init_db()?;
app.manage(ai_usage_manager);
setup_background_refresh();
setup_global_shortcut(app)?;
setup_input_listener(app.handle());

View file

@ -4,6 +4,7 @@ import { uiStore } from '$lib/ui.svelte';
import { CommandSchema, SidecarMessageWithPluginsSchema } from '@raycast-linux/protocol';
import { invoke } from '@tauri-apps/api/core';
import { appCacheDir, appLocalDataDir } from '@tauri-apps/api/path';
import { listen } from '@tauri-apps/api/event';
type OauthState = {
url: string;
@ -18,6 +19,7 @@ class SidecarService {
#unpackr = new Unpackr();
#onGoBackToPluginList: (() => void) | null = null;
#browserExtensionConnectionInterval: ReturnType<typeof setInterval> | null = null;
#aiEventUnlisten: (() => void)[] = [];
oauthState: OauthState = $state(null);
logs: string[] = $state([]);
@ -55,6 +57,7 @@ class SidecarService {
this.#log(`Sidecar spawned with PID: ${this.#sidecarChild.pid}`);
this.requestPluginList();
this.#setupAiEventListeners();
this.#browserExtensionConnectionInterval = setInterval(async () => {
try {
@ -81,6 +84,29 @@ class SidecarService {
clearInterval(this.#browserExtensionConnectionInterval);
this.#browserExtensionConnectionInterval = null;
}
this.#aiEventUnlisten.forEach((unlisten) => unlisten());
this.#aiEventUnlisten = [];
};
#setupAiEventListeners = async () => {
try {
const chunkUnlisten = await listen('ai-stream-chunk', (event) => {
console.log(event.payload);
this.dispatchEvent('ai-stream-chunk', event.payload as object);
});
const endUnlisten = await listen('ai-stream-end', (event) => {
this.dispatchEvent('ai-stream-end', event.payload as object);
});
const errorUnlisten = await listen('ai-stream-error', (event) => {
this.dispatchEvent('ai-stream-error', event.payload as object);
});
this.#aiEventUnlisten.push(chunkUnlisten, endUnlisten, errorUnlisten);
} catch (error) {
this.#log(`Error setting up AI event listeners: ${error}`);
}
};
dispatchEvent = (action: string, payload?: object) => {
@ -166,6 +192,35 @@ class SidecarService {
return;
}
if (typedMessage.type === 'ai-ask-stream') {
const { requestId, prompt, options } = typedMessage.payload as {
requestId: string;
prompt: string;
options: {
model?: string;
creativity?: string;
modelMappings?: Record<string, string>;
};
};
try {
await invoke('ai_ask_stream', {
requestId,
prompt,
options: {
model: options.model,
creativity: options.creativity,
model_mappings: options.modelMappings || {}
}
});
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.#log(`ERROR from AI ask stream: ${errorMessage}`);
this.dispatchEvent('ai-stream-error', { requestId, error: errorMessage });
}
return;
}
if (typedMessage.type.startsWith('system-')) {
const { requestId, ...params } = typedMessage.payload as {
requestId: string;