swift-apple-intelligence-grpc/Sources/AppleIntelligenceApp/ViewModels/ChatViewModel.swift
Mathias Beaulieu-Duncan 638656e7ca Add vision support, gRPC reflection toggle, and chat improvements
- Add Vision framework integration for image analysis (OCR, classification)
- Add image attachment support in chat UI with drag & drop
- Add recent images sidebar from Downloads/Desktop
- Add copy to clipboard button for assistant responses
- Add gRPC reflection service with toggle in settings
- Create proper .proto file and generate Swift code
- Add server restart when toggling reflection setting
- Fix port number formatting in settings (remove comma grouping)
- Update gRPC dependencies to v2.x

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-30 16:18:06 -05:00

221 lines
6.8 KiB
Swift

import Foundation
import AppKit
import UniformTypeIdentifiers
import AppleIntelligenceCore
@MainActor
@Observable
final class ChatViewModel {
var messages: [ChatMessage] = []
var inputText: String = ""
var isLoading: Bool = false
var errorMessage: String?
// Image attachment state
var pendingImages: [ImageAttachment] = []
private var service: AppleIntelligenceService?
private var currentTask: Task<Void, Never>?
// Maximum images per message
private let maxImagesPerMessage = 5
// Supported image types
static let supportedImageTypes: [UTType] = [.png, .jpeg, .gif, .webP, .heic]
// Recent images from Downloads and Desktop
var recentImages: [URL] = []
func initialize() async {
service = await AppleIntelligenceService()
loadRecentImages()
}
// MARK: - Recent Images
func loadRecentImages() {
let fileManager = FileManager.default
let homeDir = fileManager.homeDirectoryForCurrentUser
let folders = [
homeDir.appendingPathComponent("Downloads"),
homeDir.appendingPathComponent("Desktop")
]
let imageExtensions = ["png", "jpg", "jpeg", "gif", "webp", "heic", "heif"]
var allImages: [(url: URL, date: Date)] = []
for folder in folders {
guard let contents = try? fileManager.contentsOfDirectory(
at: folder,
includingPropertiesForKeys: [.contentModificationDateKey, .isRegularFileKey],
options: [.skipsHiddenFiles]
) else { continue }
for url in contents {
let ext = url.pathExtension.lowercased()
guard imageExtensions.contains(ext) else { continue }
if let attributes = try? url.resourceValues(forKeys: [.contentModificationDateKey, .isRegularFileKey]),
attributes.isRegularFile == true,
let modDate = attributes.contentModificationDate {
allImages.append((url: url, date: modDate))
}
}
}
// Sort by date descending and take last 10
recentImages = allImages
.sorted { $0.date > $1.date }
.prefix(10)
.map { $0.url }
}
func addRecentImage(_ url: URL) {
addImage(from: url)
}
var isServiceAvailable: Bool {
get async {
await service?.isAvailable ?? false
}
}
var canSend: Bool {
!inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || !pendingImages.isEmpty
}
// MARK: - Image Handling
func addImage(from url: URL) {
guard pendingImages.count < maxImagesPerMessage else {
errorMessage = "Maximum \(maxImagesPerMessage) images per message"
return
}
do {
let data = try Data(contentsOf: url)
let attachment = ImageAttachment(data: data, filename: url.lastPathComponent)
pendingImages.append(attachment)
errorMessage = nil
} catch {
errorMessage = "Failed to load image: \(error.localizedDescription)"
}
}
func addImageFromPasteboard() {
guard let image = NSPasteboard.general.readObjects(
forClasses: [NSImage.self],
options: nil
)?.first as? NSImage else {
return
}
guard pendingImages.count < maxImagesPerMessage else {
errorMessage = "Maximum \(maxImagesPerMessage) images per message"
return
}
if let tiffData = image.tiffRepresentation,
let bitmap = NSBitmapImageRep(data: tiffData),
let pngData = bitmap.representation(using: .png, properties: [:]) {
let attachment = ImageAttachment(data: pngData, filename: "pasted_image.png")
pendingImages.append(attachment)
errorMessage = nil
}
}
func removePendingImage(_ attachment: ImageAttachment) {
pendingImages.removeAll { $0.id == attachment.id }
}
func clearPendingImages() {
pendingImages.removeAll()
}
// MARK: - Messaging
func sendMessage() {
let text = inputText.trimmingCharacters(in: .whitespacesAndNewlines)
guard !text.isEmpty || !pendingImages.isEmpty else { return }
guard !isLoading else { return }
// Capture images before clearing
let imagesToSend = pendingImages
// Add user message with images
let userMessage = ChatMessage(role: .user, content: text, images: imagesToSend)
messages.append(userMessage)
inputText = ""
pendingImages = []
errorMessage = nil
// Add placeholder for assistant response
let assistantMessage = ChatMessage(role: .assistant, content: "", isStreaming: true)
messages.append(assistantMessage)
isLoading = true
currentTask = Task {
do {
guard let service = service else {
throw AppleIntelligenceError.modelNotAvailable
}
// Convert attachments to service format
let images = imagesToSend.map { attachment in
(data: attachment.data, filename: attachment.filename)
}
let stream = await service.streamComplete(
prompt: text,
temperature: nil,
maxTokens: nil,
images: images
)
var fullResponse = ""
for try await (partialResponse, _) in stream {
fullResponse = partialResponse
// Update the last message (assistant's response)
if let index = messages.lastIndex(where: { $0.role == .assistant }) {
messages[index].content = fullResponse
}
}
// Mark streaming as complete
if let index = messages.lastIndex(where: { $0.role == .assistant }) {
messages[index].isStreaming = false
}
} catch {
errorMessage = error.localizedDescription
// Remove the empty assistant message on error
if let index = messages.lastIndex(where: { $0.role == .assistant && $0.content.isEmpty }) {
messages.remove(at: index)
}
}
isLoading = false
}
}
func stopGeneration() {
currentTask?.cancel()
currentTask = nil
isLoading = false
// Mark any streaming message as complete
if let index = messages.lastIndex(where: { $0.isStreaming }) {
messages[index].isStreaming = false
}
}
func clearChat() {
stopGeneration()
messages.removeAll()
errorMessage = nil
}
}