sdks/swift/README.md
An easy-to-install Swift package for connecting to Omi devices. Get started in seconds with local Whisper-based transcription - no cloud API required.
<CardGroup cols={3}> <Card title="Swift Package" icon="swift"> Native iOS/macOS support </Card> <Card title="Local Transcription" icon="microphone"> Whisper runs on-device </Card> <Card title="Simple API" icon="code"> Connect in minutes </Card> </CardGroup>Get transcription working in 2 minutes:
<Steps> <Step title="Copy This Code"> Replace your `ViewController.swift` with:```swift
import UIKit
import omi_lib
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
self.lookForDevice()
}
func lookForDevice() {
OmiManager.startScan { device, error in
print("starting scan")
if let device = device {
print("got device ", device)
self.connectToOmiDevice(device: device)
OmiManager.endScan()
}
}
}
func connectToOmiDevice(device: Device) {
OmiManager.connectToDevice(device: device)
self.listenToLiveTranscript(device: device)
self.reconnectIfDisconnects()
}
func reconnectIfDisconnects() {
OmiManager.connectionUpdated { connected in
if connected == false {
self.lookForDevice()
}
}
}
func listenToLiveTranscript(device: Device) {
OmiManager.getLiveTranscription(device: device) { transcription in
print("transcription:", transcription ?? "no transcription")
}
}
}
```
<Note>
There's no UI in this example - transcription appears in the Xcode logs.
</Note>
| Method | Description |
|---|---|
startScan(callback) | Start scanning for Omi devices |
endScan() | Stop scanning |
connectToDevice(device) | Connect to a discovered device |
connectionUpdated(callback) | Monitor connection state changes |
getLiveTranscription(device, callback) | Receive real-time transcription |
getLiveAudio(device, callback) | Receive audio file URLs |