I am trying to call model form #main App where the model has the dependency on a repository with init function. The repository has the URLSession and Baseurl properties . I have passed the required property on both approach ..
Here is approach I have tried based on Xcode suggestions ..
#main
struct HomwWorkWithSwiftUIApp: App {
#StateObject var model = FruitsModel(fruitRepository: FruitsRepository.self as! FruitsRepository)
var body: some Scene {
WindowGroup {
ContentView().environmentObject(model)
}
}
}
As a result as was crashed at run time with error Thread 1: signal SIGABRT
The second approach is passing the require parameters like this ..
#main
struct HomwWorkWithSwiftUIApp: App {
#StateObject var model = FruitsModel(fruitRepository: RealFruitsRepository(session: URLSession, baseURL: EndPoint.baseUrl))
var body: some Scene {
WindowGroup {
ContentView().environmentObject(model)
}
}
}
It giving error ..Cannot convert value of type 'URLSession.Type' to expected argument type 'URLSession'
Here is attempt for URLSession instance.
#main
struct HomwWorkWithSwiftUIApp: App {
init() {
}
var url : URLSession
init(url: URLSession) {
self.url = url
}
#StateObject var model = FruitsModel(fruitRepository: RealFruitsRepository(session: url, baseURL: EndPoint.baseUrl))
var body: some Scene {
WindowGroup {
ContentView().environmentObject(model)
}
}
}
Here is the screenshot ..
Here is the repository code ..
import Foundation
protocol FruitsRepository: WebRepository {
func loadFruits() async throws -> [Fruits]
}
struct RealFruitsRepository: FruitsRepository {
let session: URLSession
let baseURL: String
init(session: URLSession, baseURL: String) {
self.session = session
self.baseURL = baseURL
}
func loadFruits() async throws -> [Fruits] {
guard let request = try? API.allFruits.urlRequest(baseURL: baseURL) else {
throw APIError.invalidURL
}
guard let data = try? await call(request: request) else {
throw APIError.unexpectedResponse
}
guard let fruits = getDecodedFruitesResopnse(from: data) else {
throw APIError.unexpectedResponse
}
return fruits
}
private func getDecodedFruitesResopnse(from data: Data)-> [Fruits]? {
guard let fruites = try? JSONDecoder().decode([Fruits].self, from: data) else {
return nil
}
return fruites
}
}
extension RealFruitsRepository {
enum API {
case allFruits
case fruitDetails(Fruits)
}
}
extension RealFruitsRepository.API: APICall {
var path: String {
switch self {
case .allFruits:
return "/all"
case let .fruitDetails(fruit):
let encodedName = fruit.name.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed)
return "/name/\(encodedName ?? fruit.name)"
}
}
var method: String {
switch self {
case .allFruits, .fruitDetails:
return "GET"
}
}
var headers: [String: String]? {
return ["Accept": "application/json"]
}
func body() throws -> Data? {
return nil
}
}
Here is the model class ..
import Foundation
import Combine
protocol FruitsModelInput {
func getFruits() async
}
protocol FruitsModelOutput {
var state: FruitViewStates { get }
var fruitRecordsCount: Int { get }
func getFruit(index: Int)-> Fruits
func getFruitsDetails(for row:Int)-> FruitsDetails
}
struct FruitsDetails {
let genus, name: String
}
final class FruitsModel: ObservableObject {
private var fruitsRepository: FruitsRepository
var fruits: [Fruits] = []
#Published var state: FruitViewStates = .none
private var cancellables:Set<AnyCancellable> = Set()
init(fruitRepository: FruitsRepository) {
self.fruitsRepository = fruitRepository
}
}
extension FruitsModel: FruitsModelOutput {
func getFruitsDetails(for row: Int) -> FruitsDetails {
if row >= 0 {
let fruit = fruits[row]
return FruitsDetails(genus: fruit.genus, name: fruit.name)
}
return FruitsDetails(genus: "", name: "")
}
var fruitRecordsCount: Int {
return fruits.count
}
func getFruit(index: Int) -> Fruits {
if fruits.count > 0 {
return (fruits[index])
} else {
return Fruits(genus: "", name: "", id: 0, family: "", order: "", nutritions: Nutritions(carbohydrates: 0.0, protein: 0.0, fat: 0.0, calories: 0, sugar: 0.0))
}
}
}
extension FruitsModel: FruitsModelInput {
func getFruits() async {
state = .showActivityIndicator
do {
fruits = try await fruitsRepository.loadFruits()
self.state = .showFruitList
} catch let error {
fruits = []
print(error)
state = .showError((error as! APIError).localizedDescription)
}
}
}
Code from tutorial, want to access contacts from my app, and using this as base
Need to solve this error to move forward and do not know what additional items needs initialization. Any help very appreciated.
import Contacts
final class ContactsViewModel: ObservableObject {
#Published
var contact: [Contact?]
#Published
var contacts: [Contact?] = []
#Published
var permissionsError: PermissionsError? = .none
init( contact: Contact, contacts: [Contact] , permissionsError: PermissionsError) {
permissions()
}. **<-- Error occurs here -**
func openSettings() {
permissionsError = .none
guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else {return}
if UIApplication.shared.canOpenURL(settingsURL) { UIApplication.shared.open(settingsURL)}
}
func getContacts() {
Contact.fetchAll { [weak self] result in
guard let self = self else { return }
switch result {
case .success(let fetchedContacts):
DispatchQueue.main.async {
self.contacts = fetchedContacts.sorted(by: { $0.lastName < $1.lastName })
}
case .failure(let error):
self.permissionsError = .fetchError(error)
}
}
}
func permissions() {
switch CNContactStore.authorizationStatus(for: .contacts) {
case.authorized:
getContacts()
case .notDetermined, .restricted, .denied :
CNContactStore().requestAccess(for: .contacts) { [weak self] granted, error in
switch granted {
case true: self?.getContacts()
case false:
DispatchQueue.main.async {
self?.permissionsError = .userError
}
}
}
I am classifying images per frame from ARSession delegate by Vision framework and CoreML in an Augmented Reality app, with ARKit and RealityKit. While processing a frame.capturedImage I am not requesting another frame.capturedImage for performance.
The camera is not giving smooth experience, it gets stuck time to time. Seems like a frame loss.
And I am getting this Warning:
[Session] ARSession <0x122cc3710>: ARSessionDelegate is retaining 14 ARFrames. This can lead to future camera frames being dropped.
My Codes:
import Foundation
import SwiftUI
import RealityKit
import ARKit
import CoreML
struct ARViewContainer: UIViewRepresentable {
var errorFunc: ()->Void
var frameUpdateFunc: ()->Void
#Binding var finalLabel:String
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal,.vertical]
config.environmentTexturing = .automatic
if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh){
config.sceneReconstruction = .mesh
}
arView.session.delegate = context.coordinator
arView.session.run(config)
context.coordinator.myView = arView
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
func makeCoordinator() -> Coordinator {
Coordinator(finalLabel: $finalLabel, self, funct: self.errorFunc, frameUpdateFunc: self.frameUpdateFunc)
}
class Coordinator: NSObject, ARSessionDelegate {
var objectDetectionService = ObjectDetectionService()
var myView:ARView?
#Binding var finalLabel:String
var parent: ARViewContainer
var efunc:()->Void
var frameUpdateFunc:()->Void
var isLoopShouldContinue = true
var lastLocation: SCNVector3?
//let model = try? MobileNetV2(configuration: .init())
private let classifier = VisionClasifier(mlModel: try? MobileNetV2(configuration: .init()).model)
private var currentBuffer: CVPixelBuffer? = nil
init(finalLabel:Binding<String>,_ arView: ARViewContainer,funct: #escaping ()->Void, frameUpdateFunc:#escaping ()->Void) {
parent = arView
self.efunc = funct
self.frameUpdateFunc = frameUpdateFunc
_finalLabel = finalLabel
}
func session(_ session: ARSession, didFailWithError error: Error) {
//print("Error Tanvir: ",error)
self.efunc()
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
if isLoopShouldContinue{
self.classifyFrame(currentFrame: frame)
}
let transform = SCNMatrix4(frame.camera.transform)
let orientation = SCNVector3(-transform.m31, -transform.m32, transform.m33)
let location = SCNVector3(transform.m41, transform.m42, transform.m43)
let currentPositionOfCamera = orientation + location
if let lastLocation = lastLocation {
let speed = (lastLocation - currentPositionOfCamera).length()
isLoopShouldContinue = speed < 0.0025
}
lastLocation = currentPositionOfCamera
}
// When ARKit detects a new anchor, it will add it to the ARSession
// Whenever there is a newly added ARAnchor, you will get that anchor here.
// In this short tutorial, we will target the ARPlaneAnchor, and use the information stored
// in that anchor for visualization.
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let myView = myView else {
return
}
for anchor in anchors {
if anchor is ARPlaneAnchor {
let planeAnchor = anchor as! ARPlaneAnchor
//addPlaneEntity(with: planeAnchor, to: myView)
}
}
}
// ARKit will automatically track and update the ARPlaneAnchor.
// We use that anchor to update the `skin` of the plane.
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
guard let myView = myView else {
return
}
for anchor in anchors {
if anchor is ARPlaneAnchor {
let planeAnchor = anchor as! ARPlaneAnchor
//updatePlaneEntity(with: planeAnchor, in: myView)
}
}
}
// When ARKit remove an anchor from the ARSession, you will get the removed
// anchor here.
func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
guard let myView = myView else {
return
}
for anchor in anchors {
if anchor is ARPlaneAnchor {
let planeAnchor = anchor as! ARPlaneAnchor
//removePlaneEntity(with: planeAnchor, from: myView)
}
}
}
func addAnnotation(rectOfInterest rect: CGRect, text: String,width:Float,height:Float) {
let point = CGPoint(x: rect.midX, y: rect.midY)
print("point:", point)
//let scnHitTestResults = myView.hitTest(point,
// options: [SCNHitTestOption.searchMode: SCNHitTestSearchMode.all.rawValue])
//guard !scnHitTestResults.contains(where: { $0.node.name == BubbleNode.name }) else { return }
let raycastResult = myView!.raycast(from: point, allowing: .estimatedPlane, alignment: .any)
// guard let raycastQuery = myView!.raycastQuery(from: point,
// allowing: .existingPlaneInfinite,
// alignment: .horizontal),
// let raycastResult = myView.session.raycast(raycastQuery).first else { return }
guard let raycastResult = raycastResult.first else{
print("raycast result failed")
return
}
let anchorExists = myView!.scene.anchors.contains(where: {$0.name == text})
guard anchorExists == false else{
print("anchor Already exists")
return
}
let position = raycastResult.worldTransform.columns.3
let myEntity = create2dEntity(with: position, boundingBox: rect, raycastResult: raycastResult,width:width ,height:height)
let planeAnchorEntity = AnchorEntity()
planeAnchorEntity.name = text
planeAnchorEntity.position = simd_make_float3(position)
planeAnchorEntity.addChild(myEntity)
// Finally, add the entity to scene.
myView!.scene.addAnchor(planeAnchorEntity)
print("anchor added: ", planeAnchorEntity.name)
}
func classifyFrame(currentFrame:ARFrame){
//let currentImageName = photos[currentIndex]
// 2
// 3
print("inside Classify")
//print("CurrentBuffer", currentBuffer)
guard self.currentBuffer == nil else {
//print("CurrentBuffer: ",currentBuffer)
//self.finalLabel = "current buffer problem"
return
}
self.currentBuffer = currentFrame.capturedImage
// guard let model = self.model else {
// return "Model not Found."
// }
let img = CIImage(cvImageBuffer: currentFrame.capturedImage)
let cgImage = convertCIImageToCGImage(inputImage: img)
guard let cgImage = cgImage else{
print("can not convert CGImage")
self.finalLabel = "can not convert CGImage"
return
}
objectDetectionService.detect(on: .init(pixelBuffer: currentFrame.capturedImage)) { [weak self] result in
guard let self = self else { return }
switch result {
case .success(let response):
self.finalLabel = response.classification.description
print("Real Width: ",response.boundingBox.width)
let rectOfInterest = VNImageRectForNormalizedRect(
response.boundingBox,
Int(self.myView!.bounds.width),
Int(self.myView!.bounds.height))
self.addAnnotation(rectOfInterest: rectOfInterest, text: response.classification.description,width: Float(response.boundingBox.width),height: Float(response.boundingBox.height))
print("Success:",response.classification.description)
self.currentBuffer = nil
case .failure(let error):
self.finalLabel = "Detection Failed"
print("Detection failure: ",error.localizedDescription)
self.currentBuffer = nil
break
}
}
}
}
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(inputImage, from: inputImage.extent) {
return cgImage
}
return nil
}
// The ARPlaneAnchor contains the information we need to create the `skin` of the plane.
func addPlaneEntity(with anchor: ARPlaneAnchor, to view: ARView) {
let planeAnchorEntity = AnchorEntity(.plane([.any],
classification: [.any],
minimumBounds: [0.01, 0.01]))
let planeModelEntity = createPlaneModelEntity(with: anchor)
// Give Entity a name for tracking.
planeAnchorEntity.name = anchor.identifier.uuidString + "_anchor"
planeModelEntity.name = anchor.identifier.uuidString + "_model"
// Add ModelEntity as a child of AnchorEntity.
// AnchorEntity handles `position` of the plane.
// ModelEntity handles the `skin` of the plane.
planeAnchorEntity.addChild(planeModelEntity)
// Finally, add the entity to scene.
view.scene.addAnchor(planeAnchorEntity)
}
func create2dEntity(with position: simd_float4, boundingBox: CGRect, raycastResult:ARRaycastResult, width:Float,height:Float ) -> ModelEntity{
var planeMesh: MeshResource
var color: UIColor
print("horizotal plane")
color = UIColor.red.withAlphaComponent(0.5)
print("Constant width: 0.1 but BoundingBox Width: ",boundingBox.width)
planeMesh = .generatePlane(width: 0.1, height: 0.1)
return ModelEntity(mesh: planeMesh, materials: [SimpleMaterial(color: color, roughness: 0.25, isMetallic: false)])
}
func createPlaneModelEntity(with anchor: ARPlaneAnchor) -> ModelEntity {
var planeMesh: MeshResource
var color: UIColor
if anchor.alignment == .horizontal {
print("horizotal plane")
color = UIColor.blue.withAlphaComponent(0.5)
planeMesh = .generatePlane(width: anchor.extent.x, depth: anchor.extent.z)
} else if anchor.alignment == .vertical {
print("vertical plane")
color = UIColor.yellow.withAlphaComponent(0.5)
planeMesh = .generatePlane(width: anchor.extent.x, height: anchor.extent.z)
} else {
fatalError("Anchor is not ARPlaneAnchor")
}
return ModelEntity(mesh: planeMesh, materials: [SimpleMaterial(color: color, roughness: 0.25, isMetallic: false)])
}
func removePlaneEntity(with anchor: ARPlaneAnchor, from arView: ARView) {
guard let planeAnchorEntity = arView.scene.findEntity(named: anchor.identifier.uuidString+"_anchor") else { return }
arView.scene.removeAnchor(planeAnchorEntity as! AnchorEntity)
}
func updatePlaneEntity(with anchor: ARPlaneAnchor, in view: ARView) {
var planeMesh: MeshResource
guard let entity = view.scene.findEntity(named: anchor.identifier.uuidString+"_model") else { return }
let modelEntity = entity as! ModelEntity
if anchor.alignment == .horizontal {
planeMesh = .generatePlane(width: anchor.extent.x, depth: anchor.extent.z)
} else if anchor.alignment == .vertical {
planeMesh = .generatePlane(width: anchor.extent.x, height: anchor.extent.z)
} else {
fatalError("Anchor is not ARPlaneAnchor")
}
modelEntity.model!.mesh = planeMesh
}
import SceneKit
extension SCNVector3 {
func length() -> Float {
return sqrtf(x * x + y * y + z * z)
}
}
func -(l: SCNVector3, r: SCNVector3) -> SCNVector3 {
return SCNVector3Make(l.x - r.x, l.y - r.y, l.z - r.z)
}
func +(l: SCNVector3, r: SCNVector3) -> SCNVector3 {
return SCNVector3(l.x + r.x, l.y + r.y, l.z + r.z)
}
func /(l: SCNVector3, r: Float) -> SCNVector3 {
return SCNVector3(l.x / r, l.y / r, l.z / r)
}
Detection: (Here is the problem, I guess, in detect method)
import Foundation
import UIKit
import CoreML
import Vision
import SceneKit
class ObjectDetectionService {
var mlModel = try! VNCoreMLModel(for: YOLOv3Int8LUT().model)
//let model = try? YOLOv3Int8LUT(configuration: .init())
lazy var coreMLRequest: VNCoreMLRequest = {
return VNCoreMLRequest(model: mlModel,
completionHandler: self.coreMlRequestHandler)
}()
private var completion: ((Result<Response, Error>) -> Void)?
func detect(on request: Request, completion: #escaping (Result<Response, Error>) -> Void) {
self.completion = completion
//let orientation = .up
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: request.pixelBuffer)
do {
try imageRequestHandler.perform([coreMLRequest])
} catch {
self.complete(.failure(error))
return
}
}
}
private extension ObjectDetectionService {
func coreMlRequestHandler(_ request: VNRequest?, error: Error?) {
if let error = error {
complete(.failure(error))
return
}
guard let request = request, let results = request.results as? [VNRecognizedObjectObservation] else {
complete(.failure(RecognitionError.resultIsEmpty))
return
}
guard let result = results.first(where: { $0.confidence > 0.8 }),
let classification = result.labels.first else {
complete(.failure(RecognitionError.lowConfidence))
return
}
let response = Response(boundingBox: result.boundingBox,
classification: classification.identifier)
complete(.success(response))
}
func complete(_ result: Result<Response, Error>) {
DispatchQueue.main.async {
self.completion?(result)
self.completion = nil
}
}
}
enum RecognitionError: Error {
case unableToInitializeCoreMLModel
case resultIsEmpty
case lowConfidence
}
extension ObjectDetectionService {
struct Request {
let pixelBuffer: CVPixelBuffer
}
struct Response {
let boundingBox: CGRect
let classification: String
}
}
Why am I getting this warning, and How to get the camera smooth experience?
The session(_ session: ARSession, didUpdate frame: ARFrame) delegate method is called very frequently: many times per second. If your classifyFrame method is doing too much work, it will retain the ARFrame object until after the next frame is delivered to the delegate.
ARKit will warn you when too many frames are retained, typically because a queue is blocked in your delegate.
I'm writing a chat application using firebase , I notice memory leak in ChatView while observing database changes which is when a message is sent or received.
when I comment out the database observation the memory leak dose not happen anymore so I'm guessing this is a firebase problem .
I'm sharing the code so please if you know what is acutely causing the memory leak help me out.
ChatViewModel:
class ChatViewModel : ObservableObject {
/// - sub ViewModels :
#Published private(set) var messages : [MessageModel] = []
private(set) var conversationID : String? = nil
/// set shared conversationID
/// - Parameter convesationID: shared conversationID if exist
func setConverationID(convesationID : String?){
guard let convesationID = convesationID else {
print("CONVERSATION ID DOUS NOT EXIT")
return
}
self.conversationID = convesationID
startObservingConversation()
}
/// start observing the conversation with viewModel conversationID
private func startObservingConversation(){
guard let conversationID = self.conversationID else {
return
}
DatabaseManager.shared.observeMessagesForConversation(conversationId: conversationID) { [weak self] message in
self?.messages += message
}
}}
ChatView :
struct ChatView: View {
#StateObject var viewModel = ChatViewModel()
var body: some View {
VStack(alignment : .leading , spacing: 0){
ScrollViewReader { scrollViewReader in
List{
ForEach(viewModel.messages) { item in
MessageView(messsage: item.text)
.id(item.id)
}
}
}
}
}}
observerMessages :
func observeMessagesForConversation(conversationId id :String,compelition : #escaping ([MessageModel]) -> Void ) {
database.child(id).child("messages").observe(.childAdded) { snapshot in
guard let value = snapshot.value as? [String:Any] else {
compelition([])
return
}
var messages : [MessageModel] = []
let decoder = JSONDecoder()
guard
let jsonData = try? JSONSerialization.data(withJSONObject:value),
let message = try? decoder.decode(MessageModel.self, from: jsonData) else {
compelition([])
return
}
messages.append(message)
compelition(messages)
}
}
here is something that keeps me awake for three days already: I'm writing a little app that connects via BlueTooth to an Arduino. To get visual feedback about the connection state and the transmitted data, I use a view that allows me to connect/disconnect as well as shows me the state and data:
VStack {
Text("Glove Training App")
.font(.title)
HStack {
Button(action: { MyBluetoothManager.shared.scan() }) {
Text("Connect")
.padding(30)
}
Text(" | ")
Button(action: { MyBluetoothManager.shared.disconnect()}) {
Text("Disconnect")
.padding(30)
}
}
Text(manager.stateChange)
.font(.subheadline)
.padding(.bottom, 30)
Text(peripheral.transmittedString)
.font(.subheadline)
.padding(.bottom, 30)
}
}
In a separate file I have all the BT management:
class MyBluetoothManager: NSObject, ObservableObject {
#Published var stateChange: String = "Initializing..." {
willSet { objectWillChange.send() }
}
static let shared = MyBluetoothManager()
let central = CBCentralManager(delegate: MyCentralManagerDelegate.shared,
queue: nil, options: [
CBCentralManagerOptionRestoreIdentifierKey: restoreIdKey,
])
(...)
func setConnected(peripheral: CBPeripheral) {
(...)
state = .connected(peripheral)
self.stateChange = "Connected"
print("Connected")
}
}
class MyPeripheralDelegate: NSObject, ObservableObject, CBPeripheralDelegate {
let objectWillChange = ObservableObjectPublisher()
var transmittedString: String = "No data" {
willSet { objectWillChange.send()
}
}
func peripheral(_ peripheral: CBPeripheral,
didUpdateValueFor characteristic: CBCharacteristic, error: Error?) {
(...)
let rxData = characteristic.value
if let str = NSString(data: rxData!, encoding: String.Encoding.utf8.rawValue) as String? {
print(str)
self.transmittedString = str
let measurement = str.components(separatedBy: "|")
(...)
} else {
print("not a valid UTF-8 sequence")
}
}
}
The values are initially set correctly, but then never updated. In the terminal I can see the printed values and the app works otherwise as expected. I'm on the latest version of XCode.
I looked at several tutorials, and this seems to be tricky. Any help would be highly appreciated.
Cheers,
Christian
EDIT: Here is the full BluetoothManager class (not my code mostly but works fine):
class MyBluetoothManager: NSObject, ObservableObject {
#Published var stateChange: String = "Initializing..." {
willSet { objectWillChange.send() }
}
static let shared = MyBluetoothManager()
let central = CBCentralManager(delegate: MyCentralManagerDelegate.shared,
queue: nil, options: [
CBCentralManagerOptionRestoreIdentifierKey: restoreIdKey,
])
var state = State.poweredOff
enum State {
case poweredOff
case restoringConnectingPeripheral(CBPeripheral)
case restoringConnectedPeripheral(CBPeripheral)
case disconnected
case scanning(Countdown)
case connecting(CBPeripheral, Countdown)
case discoveringServices(CBPeripheral, Countdown)
case discoveringCharacteristics(CBPeripheral, Countdown)
case connected(CBPeripheral)
case outOfRange(CBPeripheral)
var peripheral: CBPeripheral? {
switch self {
case .poweredOff: return nil
case .restoringConnectingPeripheral(let p): return p
case .restoringConnectedPeripheral(let p): return p
case .disconnected: return nil
case .scanning: return nil
case .connecting(let p, _): return p
case .discoveringServices(let p, _): return p
case .discoveringCharacteristics(let p, _): return p
case .connected(let p): return p
case .outOfRange(let p): return p
}
}
}
func scan() {
guard central.state == .poweredOn else {
self.stateChange = "Cannot scan, BT is not powered on"
print("Cannot scan, BT is not powered on")
return
}
central.scanForPeripherals(withServices: [myDesiredServiceId], options: nil)
state = .scanning(Countdown(seconds: 10, closure: {
self.central.stopScan()
self.state = .disconnected
self.stateChange = "Scan timed out"
print("Scan timed out")
}))
}
func disconnect(forget: Bool = false) {
if let peripheral = state.peripheral {
central.cancelPeripheralConnection(peripheral)
}
if forget {
UserDefaults.standard.removeObject(forKey: peripheralIdDefaultsKey)
UserDefaults.standard.synchronize()
}
self.stateChange = "Disconnected"
state = .disconnected
}
func connect(peripheral: CBPeripheral) {
central.connect(peripheral, options: nil)
state = .connecting(peripheral, Countdown(seconds: 10, closure: {
self.central.cancelPeripheralConnection(peripheral)
self.state = .disconnected
self.stateChange = "Connect timed out"
print("Connect timed out")
}))
}
func discoverServices(peripheral: CBPeripheral) {
peripheral.delegate = MyPeripheralDelegate.shared
peripheral.discoverServices([myDesiredServiceId])
state = .discoveringServices(peripheral, Countdown(seconds: 10, closure: {
self.disconnect()
self.stateChange = "Could not discover services"
print("Could not discover services")
}))
}
func discoverCharacteristics(peripheral: CBPeripheral) {
guard let myDesiredService = peripheral.myDesiredService else {
self.disconnect()
return
}
peripheral.delegate = MyPeripheralDelegate.shared
peripheral.discoverCharacteristics([myDesiredCharacteristicId],
for: myDesiredService)
state = .discoveringCharacteristics(peripheral, Countdown(seconds: 10,
closure: {
self.disconnect()
self.stateChange = "Could not discover characteristics"
print("Could not discover characteristics")
}))
}
func setConnected(peripheral: CBPeripheral) {
guard let myDesiredCharacteristic = peripheral.myDesiredCharacteristic
else {
self.stateChange = "Missing characteristic"
print("Missing characteristic")
disconnect()
return
}
UserDefaults.standard.set(peripheral.identifier.uuidString,
forKey: peripheralIdDefaultsKey)
UserDefaults.standard.synchronize()
peripheral.delegate = MyPeripheralDelegate.shared
peripheral.setNotifyValue(true, for: myDesiredCharacteristic)
state = .connected(peripheral)
self.stateChange = "Connected"
print("Connected")
}
}
Button(action: { MyBluetoothManager.shared.scan() }) {
Text("Connect")
.padding(30)
}
Text(" | ")
Button(action: { MyBluetoothManager.shared.disconnect()}) {
Text("Disconnect")
.padding(30)
}
}
Text(manager.stateChange) << why don't you use MyBluetoothManager.shared here ? is there a second instance? this might be the error...but unfortunately you just showed us a small piece of code...