Skip to content

Commit 6119ae9

Browse files
jdishophilippzagar
andauthored
Swift 6 Complete Concurrency Checking (#85)
Addressing: #54 # Swift6 - Complete Concurrency Checking ## ♻️ Current situation & Problem Xcode is throwing concurrency warnings for the `SpeziLLM`, `SpeziLLMFog`, `SpeziLLMLocal`, `SpeziLLMLocalDownload`, and `SpeziLLMLocalOpenAI` targets because they currently have concurrency concerns. ## ⚙️ Release Notes * Swift 6 - Enabling Complete Concurrency Checking using SwiftPM * Warnings in `SpeziLLM`, `SpeziLLMFog`, `SpeziLLMLocal` and `SpeziLLMLocalDownload` are resolved. For the remaining targets, it would be best to wait until #64 is merged. ## 📚 Documentation **No additional changes.** ## ✅ Testing Warnings will be resolved after #64 is merged. ## 📝 Code of Conduct & Contributing Guidelines By submitting creating this pull request, you agree to follow our [Code of Conduct](https://github.com/StanfordSpezi/.github/blob/main/CODE_OF_CONDUCT.md) and [Contributing Guidelines](https://github.com/StanfordSpezi/.github/blob/main/CONTRIBUTING.md): - [x] I agree to follow the [Code of Conduct](https://github.com/StanfordSpezi/.github/blob/main/CODE_OF_CONDUCT.md) and [Contributing Guidelines](https://github.com/StanfordSpezi/.github/blob/main/CONTRIBUTING.md). --------- Co-authored-by: Philipp Zagar <[email protected]>
1 parent 205564f commit 6119ae9

24 files changed

+142
-140
lines changed

Package.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// swift-tools-version:5.9
1+
// swift-tools-version:6.0
22

33
//
44
// This source file is part of the Stanford Spezi open source project

Sources/SpeziLLM/LLMPlatformState.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
/// Describes the current state of the ``LLMPlatform`` which is responsible for sending ``LLMSchema``s to execution via ``LLMSession``s.
1111
///
1212
/// The ``LLMPlatformState`` is quite minimal with only ``LLMPlatformState/idle`` and ``LLMPlatformState/processing`` states.
13-
public enum LLMPlatformState {
13+
public enum LLMPlatformState: Sendable {
1414
/// Indicates that the ``LLMPlatform`` is currently idle and doesn't execute any ``LLMSession``s.
1515
case idle
1616
/// Indicates that the ``LLMPlatform`` is currently processing and executing ``LLMSession``s.

Sources/SpeziLLM/LLMSessionProvider.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ public struct _LLMSessionProvider<Schema: LLMSchema>: DynamicProperty { // s
4545
}
4646

4747
/// Creates a `Binding` to the ``LLMSession``that one can pass around. Useful for passing the ``LLMSession`` as a `Binding` to the ``LLMChatView``.
48-
public var projectedValue: Binding<Schema.Platform.Session> {
48+
@MainActor public var projectedValue: Binding<Schema.Platform.Session> {
4949
Binding {
5050
wrappedValue
5151
} set: {

Sources/SpeziLLM/Models/LLMContextEntity.swift

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ import Foundation
1414
/// A ``LLMContextEntity`` can be thought of as a single message entity within a ``LLMContext``
1515
/// It consists of a ``LLMContextEntity/Role``, a unique identifier, a timestamp in the form of a `Date` as well as an `String`-based ``LLMContextEntity/content`` property which can contain Markdown-formatted text.
1616
/// Furthermore, the ``LLMContextEntity/complete`` flag indicates if the current state of the ``LLMContextEntity`` is final and the content will not be updated anymore.
17-
public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable {
17+
public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable, Sendable {
1818
/// Represents a tool call by the LLM, including its parameters
19-
public struct ToolCall: Codable, Equatable, Hashable {
19+
public struct ToolCall: Codable, Equatable, Hashable, Sendable {
2020
/// The ID of the function call, uniquely identifying the specific function call and matching the response to it.
2121
public let id: String
2222
/// The name of the function call.
@@ -39,7 +39,7 @@ public struct LLMContextEntity: Codable, Equatable, Hashable, Identifiable {
3939
}
4040

4141
/// Indicates which ``LLMContextEntity/Role`` is associated with a ``LLMContextEntity``.
42-
public enum Role: Codable, Equatable, Hashable {
42+
public enum Role: Codable, Equatable, Hashable, Sendable {
4343
case user
4444
case assistant(toolCalls: [ToolCall] = [])
4545
case system

Sources/SpeziLLM/Models/LLMState.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import Foundation
1111
/// Describes possible states that the ``LLMSession`` can be in.
1212
///
1313
/// Based on the ``LLMState``, `SpeziLLM` performs proper actions on the model as well as state management.
14-
public enum LLMState: CustomStringConvertible, Equatable {
14+
public enum LLMState: CustomStringConvertible, Equatable, Sendable {
1515
/// The Spezi ``LLMSession`` is allocated, but the underlying model has not yet been initialized.
1616
case uninitialized
1717
/// The Spezi ``LLMSession`` is in the process of being initialized.

Sources/SpeziLLMFog/Connection/URLSession+CertVerification.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
import Foundation
1010
import os
11-
import Security
11+
@preconcurrency import Security
1212

1313

1414
final class TransportCertificateValidationDelegate: NSObject, URLSessionDelegate {

Sources/SpeziLLMFog/LLMFogError.swift

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -112,13 +112,7 @@ public enum LLMFogError: LLMError {
112112

113113

114114
extension LLMFogSession {
115-
private static let modelNotFoundRegex: Regex = {
116-
guard let regex = try? Regex("model '([\\w:]+)' not found, try pulling it first") else {
117-
preconditionFailure("SpeziLLMFog: Error Regex could not be parsed")
118-
}
119-
120-
return regex
121-
}()
115+
private static let modelNotFoundRegex = "model '([\\w:]+)' not found, try pulling it first"
122116

123117

124118
func handleErrorCode(statusCode: Int, message: String?) -> LLMFogError {
@@ -128,7 +122,7 @@ extension LLMFogSession {
128122
return .invalidAPIToken
129123
case 404:
130124
if let message,
131-
message.contains(Self.modelNotFoundRegex) {
125+
message.range(of: Self.modelNotFoundRegex, options: .regularExpression) != nil {
132126
LLMFogSession.logger.error("SpeziLLMFog: Model could not be accessed, ensure to pull it first on the Ollama fog node: \(message)")
133127
return .modelAccessError(message)
134128
}

Sources/SpeziLLMLocal/LLMLocalSession+Generate.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
import Foundation
1010
import MLX
1111
import MLXLLM
12-
import MLXLMCommon
12+
@preconcurrency import MLXLMCommon
1313
import MLXRandom
1414
import os
1515
import SpeziChat

Sources/SpeziLLMLocalDownload/LLMLocalDownloadManager.swift

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -79,49 +79,53 @@ public final class LLMLocalDownloadManager: NSObject {
7979
}
8080

8181
/// Starts a `URLSessionDownloadTask` to download the specified model.
82+
@MainActor
8283
public func startDownload() async {
8384
if modelExist {
84-
Task { @MainActor in
85-
self.state = .downloaded
86-
}
85+
state = .downloaded
8786
return
8887
}
8988

9089
await cancelDownload()
9190
downloadTask = Task(priority: .userInitiated) {
9291
do {
9392
try await downloadWithHub()
94-
await MainActor.run {
95-
self.state = .downloaded
96-
}
93+
state = .downloaded
9794
} catch {
98-
await MainActor.run {
99-
self.state = .error(
100-
AnyLocalizedError(
101-
error: error,
102-
defaultErrorDescription: LocalizedStringResource("LLM_DOWNLOAD_FAILED_ERROR", bundle: .atURL(from: .module))
95+
state = .error(
96+
AnyLocalizedError(
97+
error: error,
98+
defaultErrorDescription: LocalizedStringResource(
99+
"LLM_DOWNLOAD_FAILED_ERROR",
100+
bundle: .atURL(from: .module)
103101
)
104102
)
105-
}
103+
)
106104
}
107105
}
108106
}
109107

110108
/// Cancels the download of a specified model via a `URLSessionDownloadTask`.
109+
@MainActor
111110
public func cancelDownload() async {
112111
downloadTask?.cancel()
113-
await MainActor.run {
114-
self.state = .idle
115-
}
112+
state = .idle
116113
}
117-
118-
@MainActor
114+
119115
private func downloadWithHub() async throws {
116+
// Sadly, we need this workaround to make the Swift compiler (strict concurrency checking) happy
117+
@MainActor
118+
func mutate(progress: Progress) {
119+
self.state = .downloading(progress: progress)
120+
}
121+
120122
let repo = Hub.Repo(id: model.hubID)
121123
let modelFiles = ["*.safetensors", "config.json"]
122124

123125
try await HubApi.shared.snapshot(from: repo, matching: modelFiles) { progress in
124-
self.state = .downloading(progress: progress)
126+
Task { @MainActor [mutate] in
127+
mutate(progress)
128+
}
125129
}
126130
}
127131
}

Sources/SpeziLLMOpenAI/FunctionCalling/LLMFunctionParameterWrapper+ArrayTypes.swift

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,14 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: BinaryInteg
3838
"description": String(description),
3939
"items": [
4040
"type": "integer",
41-
"const": const.map { String($0) } as Any?,
42-
"multipleOf": multipleOf as Any?,
43-
"minimum": minimum.map { Double($0) } as Any?,
44-
"maximum": maximum.map { Double($0) } as Any?
41+
"const": const.map { String($0) } as (any Sendable)?,
42+
"multipleOf": multipleOf as (any Sendable)?,
43+
"minimum": minimum.map { Double($0) } as (any Sendable)?,
44+
"maximum": maximum.map { Double($0) } as (any Sendable)?
4545
].compactMapValues { $0 },
46-
"minItems": minItems as Any?,
47-
"maxItems": maxItems as Any?,
48-
"uniqueItems": uniqueItems as Any?
46+
"minItems": minItems as (any Sendable)?,
47+
"maxItems": maxItems as (any Sendable)?,
48+
"uniqueItems": uniqueItems as (any Sendable)?
4949
].compactMapValues { $0 }))
5050
} catch {
5151
preconditionFailure("SpeziLLMOpenAI: Failed to create validated function call schema definition of `LLMFunction/Parameter`: \(error)")
@@ -79,13 +79,13 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: BinaryFloat
7979
"description": String(description),
8080
"items": [
8181
"type": "number",
82-
"const": const.map { String($0) } as Any?,
83-
"minimum": minimum.map { Double($0) } as Any?,
84-
"maximum": maximum.map { Double($0) } as Any?
82+
"const": const.map { String($0) } as (any Sendable)?,
83+
"minimum": minimum.map { Double($0) } as (any Sendable)?,
84+
"maximum": maximum.map { Double($0) } as (any Sendable)?
8585
].compactMapValues { $0 },
86-
"minItems": minItems as Any?,
87-
"maxItems": maxItems as Any?,
88-
"uniqueItems": uniqueItems as Any?
86+
"minItems": minItems as (any Sendable)?,
87+
"maxItems": maxItems as (any Sendable)?,
88+
"uniqueItems": uniqueItems as (any Sendable)?
8989
].compactMapValues { $0 }))
9090
} catch {
9191
preconditionFailure("SpeziLLMOpenAI: Failed to create validated function call schema definition of `LLMFunction/Parameter`: \(error)")
@@ -115,11 +115,11 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element == Bool {
115115
"description": String(description),
116116
"items": [
117117
"type": "boolean",
118-
"const": const.map { String($0) } as Any?
118+
"const": const.map { String($0) } as (any Sendable)?
119119
].compactMapValues { $0 },
120-
"minItems": minItems as Any?,
121-
"maxItems": maxItems as Any?,
122-
"uniqueItems": uniqueItems as Any?
120+
"minItems": minItems as (any Sendable)?,
121+
"maxItems": maxItems as (any Sendable)?,
122+
"uniqueItems": uniqueItems as (any Sendable)?
123123
].compactMapValues { $0 }))
124124
} catch {
125125
preconditionFailure("SpeziLLMOpenAI: Failed to create validated function call schema definition of `LLMFunction/Parameter`: \(error)")
@@ -153,13 +153,13 @@ extension _LLMFunctionParameterWrapper where T: AnyArray, T.Element: StringProto
153153
"description": String(description),
154154
"items": [
155155
"type": "string",
156-
"pattern": pattern.map { String($0) } as Any?,
157-
"const": const.map { String($0) } as Any?,
158-
"enum": `enum`.map { $0.map { String($0) } } as Any?
156+
"pattern": pattern.map { String($0) } as (any Sendable)?,
157+
"const": const.map { String($0) } as (any Sendable)?,
158+
"enum": `enum`.map { $0.map { String($0) } } as (any Sendable)?
159159
].compactMapValues { $0 },
160-
"minItems": minItems as Any?,
161-
"maxItems": maxItems as Any?,
162-
"uniqueItems": uniqueItems as Any?
160+
"minItems": minItems as (any Sendable)?,
161+
"maxItems": maxItems as (any Sendable)?,
162+
"uniqueItems": uniqueItems as (any Sendable)?
163163
].compactMapValues { $0 }))
164164
} catch {
165165
preconditionFailure("SpeziLLMOpenAI: Failed to create validated function call schema definition of `LLMFunction/Parameter`: \(error)")

0 commit comments

Comments
 (0)