chore: refactoring status updates

This commit is contained in:
adamdottv 2025-05-08 12:03:59 -05:00
parent e35ea2d448
commit f41b7bbd0a
No known key found for this signature in database
GPG key ID: 9CB48779AF150E75
24 changed files with 367 additions and 265 deletions

View file

@ -229,6 +229,7 @@ func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg,
setupSubscriber(ctx, &wg, "sessions", app.Sessions.Subscribe, ch)
setupSubscriber(ctx, &wg, "messages", app.Messages.Subscribe, ch)
setupSubscriber(ctx, &wg, "permissions", app.Permissions.Subscribe, ch)
setupSubscriber(ctx, &wg, "status", app.Status.Subscribe, ch)
cleanupFunc := func() {
logging.Info("Cancelling all subscriptions")

View file

@ -16,6 +16,7 @@ import (
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/permission"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/theme"
)
@ -24,6 +25,7 @@ type App struct {
Messages message.Service
History history.Service
Permissions permission.Service
Status status.Service
CoderAgent agent.Service
@ -38,18 +40,24 @@ type App struct {
func New(ctx context.Context, conn *sql.DB) (*App, error) {
q := db.New(conn)
sessions := session.NewService(q)
messages := message.NewService(q)
files := history.NewService(q, conn)
sessionService := session.NewService(q)
messageService := message.NewService(q)
historyService := history.NewService(q, conn)
permissionService := permission.NewPermissionService()
statusService := status.NewService()
// Initialize session manager
session.InitManager(sessions)
session.InitManager(sessionService)
// Initialize status service
status.InitManager(statusService)
app := &App{
Sessions: sessions,
Messages: messages,
History: files,
Permissions: permission.NewPermissionService(),
Sessions: sessionService,
Messages: messageService,
History: historyService,
Permissions: permissionService,
Status: statusService,
LSPClients: make(map[string]*lsp.Client),
}

View file

@ -17,6 +17,7 @@ import (
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/permission"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
)
// Common errors
@ -96,7 +97,7 @@ func NewAgent(
func (a *agent) Cancel(sessionID string) {
if cancelFunc, exists := a.activeRequests.LoadAndDelete(sessionID); exists {
if cancel, ok := cancelFunc.(context.CancelFunc); ok {
logging.InfoPersist(fmt.Sprintf("Request cancellation initiated for session: %s", sessionID))
status.Info(fmt.Sprintf("Request cancellation initiated for session: %s", sessionID))
cancel()
}
}
@ -186,7 +187,7 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
}
result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
if result.Err() != nil && !errors.Is(result.Err(), ErrRequestCancelled) && !errors.Is(result.Err(), context.Canceled) {
logging.ErrorPersist(result.Err().Error())
status.Error(result.Err().Error())
}
logging.Debug("Request completed", "sessionID", sessionID)
a.activeRequests.Delete(sessionID)
@ -224,11 +225,11 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
if len(sessionMessages) == 0 && currentSession.Summary == "" {
go func() {
defer logging.RecoverPanic("agent.Run", func() {
logging.ErrorPersist("panic while generating title")
status.Error("panic while generating title")
})
titleErr := a.generateTitle(context.Background(), sessionID, content)
if titleErr != nil {
logging.ErrorPersist(fmt.Sprintf("failed to generate title: %v", titleErr))
status.Error(fmt.Sprintf("failed to generate title: %v", titleErr))
}
}()
}
@ -308,11 +309,11 @@ func (a *agent) streamAndHandleEvents(ctx context.Context, sessionID string, msg
// If we're approaching the context window limit, trigger auto-compaction
if false && (*usage+maxTokens) >= threshold {
logging.InfoPersist(fmt.Sprintf("Auto-compaction triggered for session %s. Estimated tokens: %d, Threshold: %d", sessionID, usage, threshold))
status.Info(fmt.Sprintf("Auto-compaction triggered for session %s. Estimated tokens: %d, Threshold: %d", sessionID, usage, threshold))
// Perform compaction with pause/resume to ensure safety
if err := a.CompactSession(ctx, sessionID); err != nil {
logging.ErrorPersist(fmt.Sprintf("Auto-compaction failed: %v", err))
status.Error(fmt.Sprintf("Auto-compaction failed: %v", err))
// Continue with the request even if compaction fails
} else {
// Re-fetch session details after compaction
@ -495,10 +496,10 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
return a.messages.Update(ctx, *assistantMsg)
case provider.EventError:
if errors.Is(event.Error, context.Canceled) {
logging.InfoPersist(fmt.Sprintf("Event processing canceled for session: %s", sessionID))
status.Info(fmt.Sprintf("Event processing canceled for session: %s", sessionID))
return context.Canceled
}
logging.ErrorPersist(event.Error.Error())
status.Error(event.Error.Error())
return event.Error
case provider.EventComplete:
assistantMsg.SetToolCalls(event.Response.ToolCalls)
@ -570,7 +571,7 @@ func (a *agent) PauseSession(sessionID string) error {
return nil // Session is not active, no need to pause
}
logging.InfoPersist(fmt.Sprintf("Pausing session: %s", sessionID))
status.Info(fmt.Sprintf("Pausing session: %s", sessionID))
a.pauseLock.Lock() // Acquire write lock to block new operations
return nil
}
@ -578,7 +579,7 @@ func (a *agent) PauseSession(sessionID string) error {
// ResumeSession resumes message processing for a session
// This should be called after completing operations that required exclusive access
func (a *agent) ResumeSession(sessionID string) error {
logging.InfoPersist(fmt.Sprintf("Resuming session: %s", sessionID))
status.Info(fmt.Sprintf("Resuming session: %s", sessionID))
a.pauseLock.Unlock() // Release write lock to allow operations to continue
return nil
}
@ -592,7 +593,7 @@ func (a *agent) CompactSession(ctx context.Context, sessionID string) error {
}
// Make sure to resume the session when we're done
defer a.ResumeSession(sessionID)
logging.InfoPersist(fmt.Sprintf("Session %s paused for compaction", sessionID))
status.Info(fmt.Sprintf("Session %s paused for compaction", sessionID))
}
// Create a cancellable context

View file

@ -17,6 +17,7 @@ import (
"github.com/opencode-ai/opencode/internal/llm/tools"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/status"
)
type anthropicOptions struct {
@ -227,7 +228,7 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
return nil, retryErr
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@ -365,7 +366,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
return
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
// context cancelled

View file

@ -15,6 +15,7 @@ import (
"github.com/opencode-ai/opencode/internal/llm/tools"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/status"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
@ -195,7 +196,7 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@ -297,7 +298,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
if ctx.Err() != nil {

View file

@ -16,6 +16,7 @@ import (
"github.com/opencode-ai/opencode/internal/llm/tools"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/status"
)
type openaiOptions struct {
@ -214,7 +215,7 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@ -320,7 +321,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
status.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
// context cancelled

View file

@ -12,7 +12,7 @@ import (
"syscall"
"time"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/status"
)
type PersistentShell struct {
@ -101,7 +101,7 @@ func newPersistentShell(cwd string) *PersistentShell {
go func() {
err := cmd.Wait()
if err != nil {
logging.ErrorPersist(fmt.Sprintf("Shell process exited with error: %v", err))
status.Error(fmt.Sprintf("Shell process exited with error: %v", err))
}
shell.isAlive = false
close(shell.commandQueue)

View file

@ -6,6 +6,8 @@ import (
"os"
"runtime/debug"
"time"
"github.com/opencode-ai/opencode/internal/status"
)
func Info(msg string, args ...any) {
@ -24,33 +26,15 @@ func Error(msg string, args ...any) {
slog.Error(msg, args...)
}
func InfoPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Info(msg, args...)
}
func DebugPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Debug(msg, args...)
}
func WarnPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Warn(msg, args...)
}
func ErrorPersist(msg string, args ...any) {
args = append(args, persistKeyArg, true)
slog.Error(msg, args...)
}
// RecoverPanic is a common function to handle panics gracefully.
// It logs the error, creates a panic log file with stack trace,
// and executes an optional cleanup function before returning.
func RecoverPanic(name string, cleanup func()) {
if r := recover(); r != nil {
// Log the panic
ErrorPersist(fmt.Sprintf("Panic in %s: %v", name, r))
errorMsg := fmt.Sprintf("Panic in %s: %v", name, r)
Error(errorMsg)
status.Error(errorMsg)
// Create a timestamped panic log file
timestamp := time.Now().Format("20060102-150405")
@ -58,7 +42,9 @@ func RecoverPanic(name string, cleanup func()) {
file, err := os.Create(filename)
if err != nil {
ErrorPersist(fmt.Sprintf("Failed to create panic log: %v", err))
errMsg := fmt.Sprintf("Failed to create panic log: %v", err)
Error(errMsg)
status.Error(errMsg)
} else {
defer file.Close()
@ -67,7 +53,9 @@ func RecoverPanic(name string, cleanup func()) {
fmt.Fprintf(file, "Time: %s\n\n", time.Now().Format(time.RFC3339))
fmt.Fprintf(file, "Stack Trace:\n%s\n", debug.Stack())
InfoPersist(fmt.Sprintf("Panic details written to %s", filename))
infoMsg := fmt.Sprintf("Panic details written to %s", filename)
Info(infoMsg)
status.Info(infoMsg)
}
// Execute cleanup function if provided

View file

@ -6,13 +6,11 @@ import (
// LogMessage is the event payload for a log message
type LogMessage struct {
ID string
Time time.Time
Level string
Persist bool // used when we want to show the mesage in the status bar
PersistTime time.Duration // used when we want to show the mesage in the status bar
Message string `json:"msg"`
Attributes []Attr
ID string
Time time.Time
Level string
Message string `json:"msg"`
Attributes []Attr
}
type Attr struct {

View file

@ -12,11 +12,6 @@ import (
"github.com/opencode-ai/opencode/internal/pubsub"
)
const (
persistKeyArg = "$_persist"
PersistTimeArg = "$_persist_time"
)
const (
// Maximum number of log messages to keep in memory
maxLogMessages = 1000
@ -76,20 +71,10 @@ func (w *writer) Write(p []byte) (int, error) {
case "msg":
msg.Message = string(d.Value())
default:
if string(d.Key()) == persistKeyArg {
msg.Persist = true
} else if string(d.Key()) == PersistTimeArg {
parsed, err := time.ParseDuration(string(d.Value()))
if err != nil {
continue
}
msg.PersistTime = parsed
} else {
msg.Attributes = append(msg.Attributes, Attr{
Key: string(d.Key()),
Value: string(d.Value()),
})
}
msg.Attributes = append(msg.Attributes, Attr{
Key: string(d.Key()),
Value: string(d.Value()),
})
}
}
defaultLogData.Add(msg)

View file

@ -17,6 +17,7 @@ import (
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/lsp/protocol"
"github.com/opencode-ai/opencode/internal/status"
)
type Client struct {
@ -106,7 +107,7 @@ func NewClient(ctx context.Context, command string, args ...string) (*Client, er
// Start message handling loop
go func() {
defer logging.RecoverPanic("LSP-message-handler", func() {
logging.ErrorPersist("LSP message handler crashed, LSP functionality may be impaired")
status.Error("LSP message handler crashed, LSP functionality may be impaired")
})
client.handleMessages()
}()

View file

@ -0,0 +1,64 @@
package status
import (
"log/slog"
"sync"
)
// Manager handles status message management
type Manager struct {
service Service
mu sync.RWMutex
}
// Global instance of the status manager
var globalManager *Manager
// InitManager initializes the global status manager with the provided service
func InitManager(service Service) {
globalManager = &Manager{
service: service,
}
// Subscribe to status events for any global handling if needed
// go func() {
// ctx := context.Background()
// _ = service.Subscribe(ctx)
// }()
slog.Debug("Status manager initialized")
}
// GetService returns the status service from the global manager
func GetService() Service {
if globalManager == nil {
slog.Warn("Status manager not initialized, initializing with default service")
InitManager(NewService())
}
globalManager.mu.RLock()
defer globalManager.mu.RUnlock()
return globalManager.service
}
// Info publishes an info level status message using the global manager
func Info(message string) {
GetService().Info(message)
}
// Warn publishes a warning level status message using the global manager
func Warn(message string) {
GetService().Warn(message)
}
// Error publishes an error level status message using the global manager
func Error(message string) {
GetService().Error(message)
}
// Debug publishes a debug level status message using the global manager
func Debug(message string) {
GetService().Debug(message)
}

80
internal/status/status.go Normal file
View file

@ -0,0 +1,80 @@
package status
import (
"time"
"github.com/opencode-ai/opencode/internal/pubsub"
)
// Level represents the severity level of a status message
type Level string
const (
// LevelInfo represents an informational status message
LevelInfo Level = "info"
// LevelWarn represents a warning status message
LevelWarn Level = "warn"
// LevelError represents an error status message
LevelError Level = "error"
// LevelDebug represents a debug status message
LevelDebug Level = "debug"
)
// StatusMessage represents a status update to be displayed in the UI
type StatusMessage struct {
Level Level `json:"level"`
Message string `json:"message"`
Timestamp time.Time `json:"timestamp"`
}
// Service defines the interface for the status service
type Service interface {
pubsub.Suscriber[StatusMessage]
Info(message string)
Warn(message string)
Error(message string)
Debug(message string)
}
type service struct {
*pubsub.Broker[StatusMessage]
}
// Info publishes an info level status message
func (s *service) Info(message string) {
s.publish(LevelInfo, message)
}
// Warn publishes a warning level status message
func (s *service) Warn(message string) {
s.publish(LevelWarn, message)
}
// Error publishes an error level status message
func (s *service) Error(message string) {
s.publish(LevelError, message)
}
// Debug publishes a debug level status message
func (s *service) Debug(message string) {
s.publish(LevelDebug, message)
}
// publish creates and publishes a status message with the given level and message
func (s *service) publish(level Level, message string) {
statusMsg := StatusMessage{
Level: level,
Message: message,
Timestamp: time.Now(),
}
s.Publish(pubsub.CreatedEvent, statusMsg)
}
// NewService creates a new status service
func NewService() Service {
broker := pubsub.NewBroker[StatusMessage]()
return &service{
Broker: broker,
}
}

View file

@ -12,9 +12,9 @@ import (
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/opencode-ai/opencode/internal/app"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/components/dialog"
"github.com/opencode-ai/opencode/internal/tui/layout"
"github.com/opencode-ai/opencode/internal/tui/styles"
@ -87,7 +87,8 @@ func (m *editorCmp) openEditor(value string) tea.Cmd {
tmpfile, err := os.CreateTemp("", "msg_*.md")
tmpfile.WriteString(value)
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
tmpfile.Close()
c := exec.Command(editor, tmpfile.Name()) //nolint:gosec
@ -96,14 +97,17 @@ func (m *editorCmp) openEditor(value string) tea.Cmd {
c.Stderr = os.Stderr
return tea.ExecProcess(c, func(err error) tea.Msg {
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
content, err := os.ReadFile(tmpfile.Name())
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
if len(content) == 0 {
return util.ReportWarn("Message is empty")
status.Warn("Message is empty")
return nil
}
os.Remove(tmpfile.Name())
attachments := m.attachments
@ -121,7 +125,8 @@ func (m *editorCmp) Init() tea.Cmd {
func (m *editorCmp) send() tea.Cmd {
if m.app.CoderAgent.IsSessionBusy(m.session.ID) {
return util.ReportWarn("Agent is working, please wait...")
status.Warn("Agent is working, please wait...")
return nil
}
value := m.textarea.Value()
@ -153,7 +158,7 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
case dialog.AttachmentAddedMsg:
if len(m.attachments) >= maxAttachments {
logging.ErrorPersist(fmt.Sprintf("cannot add more than %d images", maxAttachments))
status.Error(fmt.Sprintf("cannot add more than %d images", maxAttachments))
return m, cmd
}
m.attachments = append(m.attachments, msg.Attachment)
@ -185,7 +190,8 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
if key.Matches(msg, editorMaps.OpenEditor) {
if m.app.CoderAgent.IsSessionBusy(m.session.ID) {
return m, util.ReportWarn("Agent is working, please wait...")
status.Warn("Agent is working, please wait...")
return m, nil
}
value := m.textarea.Value()
m.textarea.Reset()

View file

@ -15,10 +15,10 @@ import (
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/pubsub"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/components/dialog"
"github.com/opencode-ai/opencode/internal/tui/styles"
"github.com/opencode-ai/opencode/internal/tui/theme"
"github.com/opencode-ai/opencode/internal/tui/util"
)
type cacheItem struct {
@ -26,17 +26,17 @@ type cacheItem struct {
content []uiMessage
}
type messagesCmp struct {
app *app.App
width, height int
viewport viewport.Model
session session.Session
messages []message.Message
uiMessages []uiMessage
currentMsgID string
cachedContent map[string]cacheItem
spinner spinner.Model
rendering bool
attachments viewport.Model
app *app.App
width, height int
viewport viewport.Model
session session.Session
messages []message.Message
uiMessages []uiMessage
currentMsgID string
cachedContent map[string]cacheItem
spinner spinner.Model
rendering bool
attachments viewport.Model
showToolMessages bool
}
type renderFinishedMsg struct{}
@ -447,7 +447,8 @@ func (m *messagesCmp) SetSession(session session.Session) tea.Cmd {
m.session = session
messages, err := m.app.Messages.List(context.Background(), session.ID)
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
m.messages = messages
if len(m.messages) > 0 {
@ -483,11 +484,11 @@ func NewMessagesCmp(app *app.App) tea.Model {
vp.KeyMap.HalfPageUp = messageKeys.HalfPageUp
vp.KeyMap.HalfPageDown = messageKeys.HalfPageDown
return &messagesCmp{
app: app,
cachedContent: make(map[string]cacheItem),
viewport: vp,
spinner: s,
attachments: attachmets,
app: app,
cachedContent: make(map[string]cacheItem),
viewport: vp,
spinner: s,
attachments: attachmets,
showToolMessages: true,
}
}

View file

@ -13,10 +13,10 @@ import (
"github.com/opencode-ai/opencode/internal/lsp/protocol"
"github.com/opencode-ai/opencode/internal/pubsub"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/components/chat"
"github.com/opencode-ai/opencode/internal/tui/styles"
"github.com/opencode-ai/opencode/internal/tui/theme"
"github.com/opencode-ai/opencode/internal/tui/util"
)
type StatusCmp interface {
@ -25,22 +25,34 @@ type StatusCmp interface {
}
type statusCmp struct {
info util.InfoMsg
width int
messageTTL time.Duration
lspClients map[string]*lsp.Client
session session.Session
statusMessages []statusMessage
width int
messageTTL time.Duration
lspClients map[string]*lsp.Client
session session.Session
}
type statusMessage struct {
Level status.Level
Message string
Timestamp time.Time
ExpiresAt time.Time
}
// clearMessageCmd is a command that clears status messages after a timeout
func (m statusCmp) clearMessageCmd(ttl time.Duration) tea.Cmd {
return tea.Tick(ttl, func(time.Time) tea.Msg {
return util.ClearStatusMsg{}
func (m statusCmp) clearMessageCmd() tea.Cmd {
return tea.Tick(time.Second, func(t time.Time) tea.Msg {
return statusCleanupMsg{time: t}
})
}
// statusCleanupMsg is a message that triggers cleanup of expired status messages
type statusCleanupMsg struct {
time time.Time
}
func (m statusCmp) Init() tea.Cmd {
return nil
return m.clearMessageCmd()
}
func (m statusCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
@ -58,15 +70,26 @@ func (m statusCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
m.session = msg.Payload
}
}
case util.InfoMsg:
m.info = msg
ttl := msg.TTL
if ttl == 0 {
ttl = m.messageTTL
case pubsub.Event[status.StatusMessage]:
if msg.Type == pubsub.CreatedEvent {
statusMsg := statusMessage{
Level: msg.Payload.Level,
Message: msg.Payload.Message,
Timestamp: msg.Payload.Timestamp,
ExpiresAt: msg.Payload.Timestamp.Add(m.messageTTL),
}
m.statusMessages = append(m.statusMessages, statusMsg)
}
return m, m.clearMessageCmd(ttl)
case util.ClearStatusMsg:
m.info = util.InfoMsg{}
case statusCleanupMsg:
// Remove expired messages
var activeMessages []statusMessage
for _, sm := range m.statusMessages {
if sm.ExpiresAt.After(msg.time) {
activeMessages = append(activeMessages, sm)
}
}
m.statusMessages = activeMessages
return m, m.clearMessageCmd()
}
return m, nil
}
@ -128,8 +151,7 @@ func (m statusCmp) View() string {
status += tokensStyle
}
diagnostics :=
styles.Padded().Background(t.BackgroundDarker()).Render(m.projectDiagnostics())
diagnostics := styles.Padded().Background(t.BackgroundDarker()).Render(m.projectDiagnostics())
model := m.model()
@ -141,25 +163,31 @@ func (m statusCmp) View() string {
lipgloss.Width(diagnostics),
)
if m.info.Msg != "" {
// Display the first status message if available
if len(m.statusMessages) > 0 {
sm := m.statusMessages[0]
infoStyle := styles.Padded().
Foreground(t.Background()).
Width(statusWidth)
switch m.info.Type {
case util.InfoTypeInfo:
switch sm.Level {
case "info":
infoStyle = infoStyle.Background(t.Info())
case util.InfoTypeWarn:
case "warn":
infoStyle = infoStyle.Background(t.Warning())
case util.InfoTypeError:
case "error":
infoStyle = infoStyle.Background(t.Error())
case "debug":
infoStyle = infoStyle.Background(t.TextMuted())
}
// Truncate message if it's longer than available width
msg := m.info.Msg
msg := sm.Message
availWidth := statusWidth - 10
if len(msg) > availWidth && availWidth > 0 {
msg = msg[:availWidth] + "..."
}
status += infoStyle.Render(msg)
} else {
status += styles.Padded().
@ -272,8 +300,12 @@ func NewStatusCmp(lspClients map[string]*lsp.Client) StatusCmp {
// Initialize the help widget with default text
helpWidget = getHelpWidget("")
return &statusCmp{
messageTTL: 10 * time.Second,
lspClients: lspClients,
statusComponent := &statusCmp{
statusMessages: []statusMessage{},
messageTTL: 4 * time.Second,
lspClients: lspClients,
}
return statusComponent
}

View file

@ -18,6 +18,7 @@ import (
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/image"
"github.com/opencode-ai/opencode/internal/tui/styles"
"github.com/opencode-ai/opencode/internal/tui/theme"
@ -156,7 +157,7 @@ func (f *filepickerCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
path = f.cwd.Value()
fileInfo, err := os.Stat(path)
if err != nil {
logging.ErrorPersist("Invalid path")
status.Error("Invalid path")
return f, cmd
}
isPathDir = fileInfo.IsDir()
@ -225,7 +226,7 @@ func (f *filepickerCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
func (f *filepickerCmp) addAttachmentToMessage() (tea.Model, tea.Cmd) {
modeInfo := GetSelectedModel(config.Get())
if !modeInfo.SupportsAttachments {
logging.ErrorPersist(fmt.Sprintf("Model %s doesn't support attachments", modeInfo.Name))
status.Error(fmt.Sprintf("Model %s doesn't support attachments", modeInfo.Name))
return f, nil
}
if isExtSupported(f.dirs[f.cursor].Name()) {
@ -233,17 +234,17 @@ func (f *filepickerCmp) addAttachmentToMessage() (tea.Model, tea.Cmd) {
selectedFilePath := filepath.Join(f.cwdDetails.directory, "/", f.selectedFile)
isFileLarge, err := image.ValidateFileSize(selectedFilePath, maxAttachmentSize)
if err != nil {
logging.ErrorPersist("unable to read the image")
status.Error("unable to read the image")
return f, nil
}
if isFileLarge {
logging.ErrorPersist("file too large, max 5MB")
status.Error("file too large, max 5MB")
return f, nil
}
content, err := os.ReadFile(selectedFilePath)
if err != nil {
logging.ErrorPersist("Unable read selected file")
status.Error("Unable read selected file")
return f, nil
}
@ -255,7 +256,7 @@ func (f *filepickerCmp) addAttachmentToMessage() (tea.Model, tea.Cmd) {
return f, util.CmdHandler(AttachmentAddedMsg{attachment})
}
if !isExtSupported(f.selectedFile) {
logging.ErrorPersist("Unsupported file")
status.Error("Unsupported file")
return f, nil
}
return f, nil
@ -425,7 +426,7 @@ func readDir(path string, showHidden bool) []os.DirEntry {
go func() {
dirEntries, err := os.ReadDir(path)
if err != nil {
logging.ErrorPersist(err.Error())
status.Error(err.Error())
errChan <- err
return
}
@ -457,12 +458,12 @@ func readDir(path string, showHidden bool) []os.DirEntry {
return sanitizedDirEntries
case err := <-errChan:
logging.ErrorPersist(fmt.Sprintf("Error reading directory %s", path), err)
case <-errChan:
status.Error(fmt.Sprintf("Error reading directory %s", path))
return []os.DirEntry{}
case <-time.After(5 * time.Second):
logging.ErrorPersist(fmt.Sprintf("Timeout reading directory %s", path), nil)
status.Error(fmt.Sprintf("Timeout reading directory %s", path))
return []os.DirEntry{}
}
}

View file

@ -10,6 +10,7 @@ import (
"github.com/charmbracelet/lipgloss"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/llm/models"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/layout"
"github.com/opencode-ai/opencode/internal/tui/styles"
"github.com/opencode-ai/opencode/internal/tui/theme"
@ -126,7 +127,7 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
m.switchProvider(1)
}
case key.Matches(msg, modelKeys.Enter):
util.ReportInfo(fmt.Sprintf("selected model: %s", m.models[m.selectedIdx].Name))
status.Info(fmt.Sprintf("selected model: %s", m.models[m.selectedIdx].Name))
return m, util.CmdHandler(ModelSelectedMsg{Model: m.models[m.selectedIdx]})
case key.Matches(msg, modelKeys.Escape):
return m, util.CmdHandler(CloseModelDialogMsg{})

View file

@ -4,6 +4,7 @@ import (
"github.com/charmbracelet/bubbles/key"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/layout"
"github.com/opencode-ai/opencode/internal/tui/styles"
"github.com/opencode-ai/opencode/internal/tui/theme"
@ -106,7 +107,8 @@ func (t *themeDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return t, util.CmdHandler(CloseThemeDialogMsg{})
}
if err := theme.SetTheme(selectedTheme); err != nil {
return t, util.ReportError(err)
status.Error(err.Error())
return t, nil
}
return t, util.CmdHandler(ThemeChangedMsg{
ThemeName: selectedTheme,
@ -195,4 +197,3 @@ func NewThemeDialogCmp() ThemeDialog {
currentTheme: "",
}
}

View file

@ -7,9 +7,9 @@ import (
"github.com/charmbracelet/bubbles/key"
tea "github.com/charmbracelet/bubbletea"
"github.com/opencode-ai/opencode/internal/app"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/message"
"github.com/opencode-ai/opencode/internal/session"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/components/chat"
"github.com/opencode-ai/opencode/internal/tui/layout"
"github.com/opencode-ai/opencode/internal/tui/util"
@ -26,9 +26,9 @@ type chatPage struct {
}
type ChatKeyMap struct {
NewSession key.Binding
Cancel key.Binding
ToggleTools key.Binding
NewSession key.Binding
Cancel key.Binding
ToggleTools key.Binding
}
var keyMap = ChatKeyMap{
@ -74,16 +74,17 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
p.session = msg
case chat.CompactSessionMsg:
if p.session.ID == "" {
return p, util.ReportWarn("No active session to compact.")
status.Warn("No active session to compact.")
return p, nil
}
// Run compaction in background
go func(sessionID string) {
err := p.app.CoderAgent.CompactSession(context.Background(), sessionID)
if err != nil {
logging.ErrorPersist(fmt.Sprintf("Compaction failed: %v", err))
status.Error(fmt.Sprintf("Compaction failed: %v", err))
} else {
logging.InfoPersist("Conversation compacted successfully.")
status.Info("Conversation compacted successfully.")
}
}(p.session.ID)
@ -130,13 +131,14 @@ func (p *chatPage) sendMessage(text string, attachments []message.Attachment) te
if p.session.ID == "" {
newSession, err := p.app.Sessions.Create(context.Background(), "New Session")
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
p.session = newSession
// Update the current session in the session manager
session.SetCurrentSession(newSession.ID)
cmd := p.setSidebar()
if cmd != nil {
cmds = append(cmds, cmd)
@ -146,7 +148,8 @@ func (p *chatPage) sendMessage(text string, attachments []message.Attachment) te
_, err := p.app.CoderAgent.Run(context.Background(), p.session.ID, text, attachments...)
if err != nil {
return util.ReportError(err)
status.Error(err.Error())
return nil
}
return tea.Batch(cmds...)
}

View file

@ -12,6 +12,7 @@ import (
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/permission"
"github.com/opencode-ai/opencode/internal/pubsub"
"github.com/opencode-ai/opencode/internal/status"
"github.com/opencode-ai/opencode/internal/tui/components/chat"
"github.com/opencode-ai/opencode/internal/tui/components/core"
"github.com/opencode-ai/opencode/internal/tui/components/dialog"
@ -154,10 +155,8 @@ func (a appModel) Init() tea.Cmd {
cmds = append(cmds, func() tea.Msg {
shouldShow, err := config.ShouldShowInitDialog()
if err != nil {
return util.InfoMsg{
Type: util.InfoTypeError,
Msg: "Failed to check init status: " + err.Error(),
}
status.Error("Failed to check init status: " + err.Error())
return nil
}
return dialog.ShowInitDialogMsg{Show: shouldShow}
})
@ -201,54 +200,11 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
a.initDialog.SetSize(msg.Width, msg.Height)
return a, tea.Batch(cmds...)
// Status
case util.InfoMsg:
s, cmd := a.status.Update(msg)
a.status = s.(core.StatusCmp)
cmds = append(cmds, cmd)
return a, tea.Batch(cmds...)
case pubsub.Event[logging.LogMessage]:
if msg.Payload.Persist {
switch msg.Payload.Level {
case "error":
s, cmd := a.status.Update(util.InfoMsg{
Type: util.InfoTypeError,
Msg: msg.Payload.Message,
TTL: msg.Payload.PersistTime,
})
a.status = s.(core.StatusCmp)
cmds = append(cmds, cmd)
case "info":
s, cmd := a.status.Update(util.InfoMsg{
Type: util.InfoTypeInfo,
Msg: msg.Payload.Message,
TTL: msg.Payload.PersistTime,
})
a.status = s.(core.StatusCmp)
cmds = append(cmds, cmd)
case "warn":
s, cmd := a.status.Update(util.InfoMsg{
Type: util.InfoTypeWarn,
Msg: msg.Payload.Message,
TTL: msg.Payload.PersistTime,
})
a.status = s.(core.StatusCmp)
cmds = append(cmds, cmd)
default:
s, cmd := a.status.Update(util.InfoMsg{
Type: util.InfoTypeInfo,
Msg: msg.Payload.Message,
TTL: msg.Payload.PersistTime,
})
a.status = s.(core.StatusCmp)
cmds = append(cmds, cmd)
}
}
case util.ClearStatusMsg:
s, _ := a.status.Update(msg)
a.status = s.(core.StatusCmp)
// Permission
case pubsub.Event[logging.LogMessage]:
a.pages[page.LogsPage], cmd = a.pages[page.LogsPage].Update(msg)
cmds = append(cmds, cmd)
case pubsub.Event[permission.PermissionRequest]:
a.showPermissions = true
return a, a.permissions.SetPermissions(msg.Payload)
@ -287,7 +243,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case dialog.ThemeChangedMsg:
a.pages[a.currentPage], cmd = a.pages[a.currentPage].Update(msg)
a.showThemeDialog = false
return a, tea.Batch(cmd, util.ReportInfo("Theme changed to: "+msg.ThemeName))
status.Info("Theme changed to: " + msg.ThemeName)
return a, cmd
case dialog.CloseModelDialogMsg:
a.showModelDialog = false
@ -298,10 +255,12 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
model, err := a.app.CoderAgent.Update(config.AgentCoder, msg.Model.ID)
if err != nil {
return a, util.ReportError(err)
status.Error(err.Error())
return a, nil
}
return a, util.ReportInfo(fmt.Sprintf("Model changed to %s", model.Name))
status.Info(fmt.Sprintf("Model changed to %s", model.Name))
return a, nil
case dialog.ShowInitDialogMsg:
a.showInitDialog = msg.Show
@ -315,7 +274,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if cmd.ID == "init" {
// Mark the project as initialized
if err := config.MarkProjectInitialized(); err != nil {
return a, util.ReportError(err)
status.Error(err.Error())
return a, nil
}
return a, cmd.Handler(cmd)
}
@ -323,7 +283,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
} else {
// Mark the project as initialized without running the command
if err := config.MarkProjectInitialized(); err != nil {
return a, util.ReportError(err)
status.Error(err.Error())
return a, nil
}
}
return a, nil
@ -343,11 +304,11 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if msg.Command.Handler != nil {
return a, msg.Command.Handler(msg.Command)
}
return a, util.ReportInfo("Command selected: " + msg.Command.Title)
status.Info("Command selected: " + msg.Command.Title)
return a, nil
case tea.KeyMsg:
switch {
case key.Matches(msg, keys.Quit):
a.showQuit = !a.showQuit
if a.showHelp {
@ -372,10 +333,12 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// Load sessions and show the dialog
sessions, err := a.app.Sessions.List(context.Background())
if err != nil {
return a, util.ReportError(err)
status.Error(err.Error())
return a, nil
}
if len(sessions) == 0 {
return a, util.ReportWarn("No sessions available")
status.Warn("No sessions available")
return a, nil
}
a.sessionDialog.SetSessions(sessions)
a.showSessionDialog = true
@ -386,7 +349,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
if a.currentPage == page.ChatPage && !a.showQuit && !a.showPermissions && !a.showSessionDialog && !a.showThemeDialog && !a.showFilepicker {
// Show commands dialog
if len(a.commands) == 0 {
return a, util.ReportWarn("No commands available")
status.Warn("No commands available")
return a, nil
}
a.commandDialog.SetCommands(a.commands)
a.showCommandDialog = true
@ -427,7 +391,8 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
a.showInitDialog = false
// Mark the project as initialized without running the command
if err := config.MarkProjectInitialized(); err != nil {
return a, util.ReportError(err)
status.Error(err.Error())
return a, nil
}
return a, nil
}
@ -466,7 +431,6 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
f, filepickerCmd := a.filepicker.Update(msg)
a.filepicker = f.(dialog.FilepickerCmp)
cmds = append(cmds, filepickerCmd)
}
if a.showFilepicker {
@ -549,8 +513,10 @@ func (a appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
}
s, _ := a.status.Update(msg)
s, cmd := a.status.Update(msg)
cmds = append(cmds, cmd)
a.status = s.(core.StatusCmp)
a.pages[a.currentPage], cmd = a.pages[a.currentPage].Update(msg)
cmds = append(cmds, cmd)
return a, tea.Batch(cmds...)
@ -565,7 +531,8 @@ func (a *appModel) moveToPage(pageID page.PageID) tea.Cmd {
// Allow navigating to logs page even when agent is busy
if a.app.CoderAgent.IsBusy() && pageID != page.LogsPage {
// Don't move to other pages if the agent is busy
return util.ReportWarn("Agent is busy, please wait...")
status.Warn("Agent is busy, please wait...")
return nil
}
return a.moveToPageUnconditional(pageID)
@ -804,13 +771,13 @@ If there are Cursor rules (in .cursor/rules/ or .cursorrules) or Copilot rules (
Handler: func(cmd dialog.Command) tea.Cmd {
// Get the current session from the appModel
if model.currentPage != page.ChatPage {
return util.ReportWarn("Please navigate to a chat session first.")
status.Warn("Please navigate to a chat session first.")
return nil
}
// Return a message that will be handled by the chat page
return tea.Batch(
util.CmdHandler(chat.CompactSessionMsg{}),
util.ReportInfo("Compacting conversation..."))
status.Info("Compacting conversation...")
return util.CmdHandler(chat.CompactSessionMsg{})
},
})

View file

@ -1,8 +1,6 @@
package util
import (
"time"
tea "github.com/charmbracelet/bubbletea"
)
@ -12,44 +10,6 @@ func CmdHandler(msg tea.Msg) tea.Cmd {
}
}
func ReportError(err error) tea.Cmd {
return CmdHandler(InfoMsg{
Type: InfoTypeError,
Msg: err.Error(),
})
}
type InfoType int
const (
InfoTypeInfo InfoType = iota
InfoTypeWarn
InfoTypeError
)
func ReportInfo(info string) tea.Cmd {
return CmdHandler(InfoMsg{
Type: InfoTypeInfo,
Msg: info,
})
}
func ReportWarn(warn string) tea.Cmd {
return CmdHandler(InfoMsg{
Type: InfoTypeWarn,
Msg: warn,
})
}
type (
InfoMsg struct {
Type InfoType
Msg string
TTL time.Duration
}
ClearStatusMsg struct{}
)
func Clamp(v, low, high int) int {
if high < low {
low, high = high, low

View file

@ -3,11 +3,12 @@ package main
import (
"github.com/opencode-ai/opencode/cmd"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/opencode-ai/opencode/internal/status"
)
func main() {
defer logging.RecoverPanic("main", func() {
logging.ErrorPersist("Application terminated due to unhandled panic")
status.Error("Application terminated due to unhandled panic")
})
cmd.Execute()