Initial implementation of SwiftDBAI
Chat with any SQLite database using natural language. Built on AnyLanguageModel (HuggingFace) for LLM-agnostic provider support and GRDB for SQLite access. Core features: - Auto schema introspection from sqlite_master (zero config) - NL → SQL generation via any AnyLanguageModel provider - Three rendering modes: text summary, data table, Swift Charts - Drop-in DataChatView (SwiftUI) and headless ChatEngine - Operation allowlist with read-only default - Mutation policy with per-table control - ToolExecutionDelegate for destructive operation confirmation - Multi-turn conversation context - 352 tests across 24 suites, all passing Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
49
Tests/SwiftDBAITests/Helpers/MockLanguageModel.swift
Normal file
49
Tests/SwiftDBAITests/Helpers/MockLanguageModel.swift
Normal file
@@ -0,0 +1,49 @@
|
||||
// MockLanguageModel.swift
|
||||
// SwiftDBAI Tests
|
||||
//
|
||||
// A mock LanguageModel for unit tests that returns canned responses.
|
||||
|
||||
import AnyLanguageModel
|
||||
import Foundation
|
||||
|
||||
/// A mock language model that returns a configurable canned response.
|
||||
///
|
||||
/// Used in tests to avoid hitting a real LLM provider.
|
||||
struct MockLanguageModel: LanguageModel {
|
||||
typealias UnavailableReason = Never
|
||||
|
||||
/// The text the mock will return from `respond(...)`.
|
||||
let responseText: String
|
||||
|
||||
init(responseText: String = "Mock summary response.") {
|
||||
self.responseText = responseText
|
||||
}
|
||||
|
||||
func respond<Content>(
|
||||
within session: LanguageModelSession,
|
||||
to prompt: Prompt,
|
||||
generating type: Content.Type,
|
||||
includeSchemaInPrompt: Bool,
|
||||
options: GenerationOptions
|
||||
) async throws -> LanguageModelSession.Response<Content> where Content: Generable {
|
||||
let rawContent = GeneratedContent(kind: .string(responseText))
|
||||
let content = try Content(rawContent)
|
||||
return LanguageModelSession.Response(
|
||||
content: content,
|
||||
rawContent: rawContent,
|
||||
transcriptEntries: [][...]
|
||||
)
|
||||
}
|
||||
|
||||
func streamResponse<Content>(
|
||||
within session: LanguageModelSession,
|
||||
to prompt: Prompt,
|
||||
generating type: Content.Type,
|
||||
includeSchemaInPrompt: Bool,
|
||||
options: GenerationOptions
|
||||
) -> sending LanguageModelSession.ResponseStream<Content> where Content: Generable {
|
||||
let rawContent = GeneratedContent(kind: .string(responseText))
|
||||
let content = try! Content(rawContent)
|
||||
return LanguageModelSession.ResponseStream(content: content, rawContent: rawContent)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user