diff --git a/Sources/AppleIntelligenceApp/ViewModels/ChatViewModel.swift b/Sources/AppleIntelligenceApp/ViewModels/ChatViewModel.swift index 1ed74b4..c4952b3 100644 --- a/Sources/AppleIntelligenceApp/ViewModels/ChatViewModel.swift +++ b/Sources/AppleIntelligenceApp/ViewModels/ChatViewModel.swift @@ -429,12 +429,10 @@ final class ChatViewModel { utterance.pitchMultiplier = 1.0 utterance.volume = 1.0 - // Use voice matching current speech recognition language - if detectedLanguage == "fr-CA" { - utterance.voice = AVSpeechSynthesisVoice(language: "fr-CA") - } else { - utterance.voice = AVSpeechSynthesisVoice(language: "en-CA") - } + // Detect message language and use appropriate voice + let isFrench = Self.detectFrench(message.content) + let language = isFrench ? "fr-CA" : "en-US" + utterance.voice = AVSpeechSynthesisVoice(language: language) // Create synthesizer and delegate let synthesizer = AVSpeechSynthesizer() @@ -460,6 +458,25 @@ final class ChatViewModel { isSpeaking = false speakingMessageId = nil } + + /// Detect if text is likely French based on common words + private static func detectFrench(_ text: String) -> Bool { + let lowercased = text.lowercased() + let frenchIndicators = [ + " le ", " la ", " les ", " un ", " une ", " des ", + " je ", " tu ", " il ", " elle ", " nous ", " vous ", " ils ", " elles ", + " est ", " sont ", " avoir ", " être ", " fait ", " faire ", + " que ", " qui ", " quoi ", " dans ", " pour ", " avec ", " sur ", + " ce ", " cette ", " ces ", " mon ", " ma ", " mes ", + " pas ", " plus ", " très ", " bien ", " aussi ", + "bonjour", "merci", "salut", "oui", "non", "peut", + " et ", " ou ", " mais ", " donc ", " car ", + "c'est", "j'ai", "qu'est", "n'est", "d'un", "l'on" + ] + + let frenchCount = frenchIndicators.filter { lowercased.contains($0) }.count + return frenchCount >= 2 + } } // MARK: - Speech Synthesizer Delegate diff --git a/Sources/AppleIntelligenceApp/Views/ChatView.swift b/Sources/AppleIntelligenceApp/Views/ChatView.swift index 495e9e8..e71f30c 100644 --- a/Sources/AppleIntelligenceApp/Views/ChatView.swift +++ b/Sources/AppleIntelligenceApp/Views/ChatView.swift @@ -278,17 +278,7 @@ struct ChatView: View { .foregroundStyle(.secondary) } .buttonStyle(.plain) - .help("Add image") - - Button { - viewModel.addImageFromPasteboard() - } label: { - Image(systemName: "doc.on.clipboard") - .font(.title3) - .foregroundStyle(.secondary) - } - .buttonStyle(.plain) - .help("Paste image from clipboard") + .help("Add image (or paste with ⌘V)") // Language toggle for speech recognition Button { @@ -361,6 +351,26 @@ struct ChatView: View { } } .padding() + .onPasteCommand(of: [.image, .png, .jpeg, .tiff]) { providers in + for provider in providers { + // Try to load as image + if provider.hasItemConformingToTypeIdentifier(UTType.image.identifier) { + provider.loadDataRepresentation(forTypeIdentifier: UTType.image.identifier) { data, _ in + if let data = data { + DispatchQueue.main.async { + let attachment = ImageAttachment(data: data, filename: "pasted_image.png") + if viewModel.pendingImages.count < 5 { + viewModel.pendingImages.append(attachment) + } + } + } + } + return + } + } + // Fallback to pasteboard check + viewModel.addImageFromPasteboard() + } } }