Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 31 additions & 12 deletions Sources/Fluid/ContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1612,23 +1612,32 @@ struct ContentView: View {

self.clearActiveRecordingMode()

// Show "Transcribing..." state before calling stop() to keep overlay visible.
// The asr.stop() call performs the final transcription which can take a moment
// (especially for slower models like Whisper Medium/Large).
DebugLogger.shared.debug("Showing transcription processing state", source: "ContentView")
self.menuBarManager.setProcessing(true)
NotchOverlayManager.shared.updateTranscriptionText("Transcribing...")
let hadLivePreviewText = self.asr.partialTranscription
.trimmingCharacters(in: .whitespacesAndNewlines)
.isEmpty == false

// Give SwiftUI a chance to render the processing state before we do heavier work
// (ASR finalization + optional AI post-processing).
await Task.yield()
// Only show a processing transition when we already observed spoken text.
// If there was no spoken text, let the overlay disappear immediately on hotkey release.
if hadLivePreviewText {
Comment on lines +1615 to +1621
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Keep the processing overlay for non-streaming recordings

hadLivePreviewText is only true when a streaming partial has already arrived, but ASRService.start() only produces those updates for supportsStreaming models and very short utterances can end before the first chunk completes. In those cases this new guard skips menuBarManager.setProcessing(true) before await asr.stop(), so MenuBarManager.handleOverlayState() hides the overlay as soon as isRunning flips to false and users lose all Transcribing…/Refining… feedback during the slowest part of the flow. This regresses Whisper Medium/Large and short dictations compared with the previous always-visible processing state.

Useful? React with πŸ‘Β / πŸ‘Ž.

if NotchOverlayManager.shared.isBottomOverlayVisible {
BottomOverlayWindowController.shared.beginReleaseTransition()
}

DebugLogger.shared.debug("Showing transcription processing state", source: "ContentView")
self.menuBarManager.setProcessing(true)
NotchOverlayManager.shared.updateTranscriptionText("Transcribing...")

// Give SwiftUI a chance to render the processing state before heavier work.
await Task.yield()
}

// Stop the ASR service and wait for transcription to complete
// The processing indicator will stay visible during this phase
let transcribedText = await asr.stop()

// Reset the transcription text display after transcription completes
NotchOverlayManager.shared.updateTranscriptionText("")
// Reset transient status text if we showed it.
if hadLivePreviewText {
NotchOverlayManager.shared.updateTranscriptionText("")
}

guard transcribedText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty == false else {
DebugLogger.shared.debug("Transcription returned empty text", source: "ContentView")
Expand All @@ -1644,6 +1653,11 @@ struct ContentView: View {
promptTest.lastOutputText = ""
promptTest.lastError = ""

let overlayStillVisible = NotchOverlayManager.shared.isBottomOverlayVisible || NotchOverlayManager.shared.isAnyNotchVisible
if !hadLivePreviewText, overlayStillVisible {
self.menuBarManager.setProcessing(true)
}

guard DictationAIPostProcessingGate.isConfigured() else {
promptTest.lastError = "AI post-processing is not configured. Enable AI Enhancement and configure a provider/model (and API key for non-local endpoints)."
self.menuBarManager.setProcessing(false)
Expand Down Expand Up @@ -1702,6 +1716,11 @@ struct ContentView: View {
if shouldUseAI {
DebugLogger.shared.debug("Routing transcription through AI post-processing", source: "ContentView")

let overlayStillVisible = NotchOverlayManager.shared.isBottomOverlayVisible || NotchOverlayManager.shared.isAnyNotchVisible
if !hadLivePreviewText, overlayStillVisible {
self.menuBarManager.setProcessing(true)
}

// Update overlay text to show we're now refining (processing already true)
NotchOverlayManager.shared.updateTranscriptionText("Refining...")

Expand Down
4 changes: 2 additions & 2 deletions Sources/Fluid/Services/MenuBarManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,10 @@ final class MenuBarManager: ObservableObject {
asrService.$partialTranscription
.receive(on: DispatchQueue.main)
.sink { [weak self] newText in
guard self != nil else { return }
guard let self else { return }
// CRITICAL FIX: Check if streaming preview is enabled before updating notch
// The "Show Live Preview" toggle in Preferences should control this behavior
if SettingsStore.shared.enableStreamingPreview {
if SettingsStore.shared.enableStreamingPreview, !self.isProcessingActive {
NotchOverlayManager.shared.updateTranscriptionText(newText)
}
}
Expand Down
4 changes: 2 additions & 2 deletions Sources/Fluid/Services/TypingService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ final class TypingService {
/// if the layout data is unavailable.
private static func virtualKeyCode(for character: Character, qwertyFallback: CGKeyCode) -> CGKeyCode {
if Thread.isMainThread {
return tisLookup(for: character, qwertyFallback: qwertyFallback)
return self.tisLookup(for: character, qwertyFallback: qwertyFallback)
}
var result = qwertyFallback
DispatchQueue.main.sync {
result = tisLookup(for: character, qwertyFallback: qwertyFallback)
result = self.tisLookup(for: character, qwertyFallback: qwertyFallback)
}
return result
}
Expand Down
Loading
Loading