I’m playing with some example code for the RoomPlan API, trying to combine it with SwiftUI. While I was able to use RoomCaptureView in SwiftUI, I‘m struggling to get the more flexible RoomCaptureSession running together with an arSession. Compiler message says it‘s „missing argument for parameter ‚frame‘ in call“, but I rather guess to have mixed up the delegation.
This is based on an example at it-jim.com.
Some hints would be very appreciated!
The Delegate:
import Foundation
import ARKit
import RealityKit
import RoomPlan
class CustomCaptureView: ARView, RoomCaptureSessionDelegate {
static var shared = CustomCaptureView() // here comes „Missing argument for parameter ‚frame‘ in call
let captureSession: RoomCaptureSession = RoomCaptureSession()
let roomBuilder: RoomBuilder = RoomBuilder(options: \[.beautifyObjects\])
var delegate: RoomCaptureSessionDelegate?
required init(frame: CGRect) {
super.init(frame: frame)
initSession()
}
#MainActor required dynamic init?(coder decoder: NSCoder) {
super.init(coder: decoder)
initSession()
}
func initSession() {
self.cameraMode = .ar
captureSession.delegate = self
self.session = captureSession.arSession
}
func captureSession(_ session: RoomCaptureSession, didUpdate: CapturedRoom) {
DispatchQueue.main.async {
self.scene.anchors.removeAll()
for wall in didUpdate.walls {
self.drawBox(scene: self.scene, dimensions: wall.dimensions, transform: wall.transform, confidence: wall.confidence)
}
for object in didUpdate.objects {
self.drawBox(scene: self.scene, dimensions: object.dimensions, transform: object.transform, confidence: object.confidence)
}
}
}
func drawBox(scene: Scene, dimensions: simd_float3, transform: float4x4, confidence: CapturedRoom.Confidence) {
// some colored boxes replacing detected objects
}
func captureSession(_ session: RoomCaptureSession, didEndWith data: CapturedRoomData, error: (Error)?) {}
}
And the ContentView:
import SwiftUI
import RoomPlan
import ARKit
import RealityKit
struct RoomCaptureViewRep : UIViewRepresentable {
func makeUIView(context: Context) -> CustomCaptureView {
CustomCaptureView.shared.captureSession
}
func updateUIView(_ uiView: UIViewType, context: Context) {}
}
struct ActivityViewControllerRep: UIViewControllerRepresentable {
var items: [Any]
var activities: [UIActivity]? = nil
func makeUIViewController(context: UIViewControllerRepresentableContext<ActivityViewControllerRep>) -> UIActivityViewController { // Erzeugt einen UIKit View controller im SwiftUI Kontext
let controller = UIActivityViewController(activityItems: items, applicationActivities: activities)
return controller
}
func updateUIViewController(_ uiViewController: UIActivityViewController, context: UIViewControllerRepresentableContext<ActivityViewControllerRep>) { }
}
struct ContentView: View { // eigentliche Scan-Ansicht, wird aufgerufen von ContentView
var body: some View {
RoomCaptureViewRep()
}
}
Related
I have followed two tutorials on UIViewRepresentable and thought the following would work, yet it didn't and I think my situation is more complex than in the tutorials.
Hello, I am trying to turn this code
import SpriteKit
import AVFoundation
class ViewController: NSViewController {
#IBOutlet var skView: SKView!
var videoPlayer: AVPlayer!
override func viewDidLoad() {
super.viewDidLoad()
if let view = self.skView {
// Load the SKScene from 'backgroundScene.sks'
guard let scene = SKScene(fileNamed: "backgroundScene") else {
print ("Could not create a background scene")
return
}
// Set the scale mode to scale to fit the window
scene.scaleMode = .aspectFill
// Present the scene
view.presentScene(scene)
// Add the video node
guard let alphaMovieURL = Bundle.main.url(forResource: "camera_city_animated", withExtension: "mov") else {
print("Failed to overlay alpha movie on the background")
return
}
videoPlayer = AVPlayer(url: alphaMovieURL)
let video = SKVideoNode(avPlayer: videoPlayer)
video.size = CGSize(width: view.frame.width, height: view.frame.height)
print( "Video size is %f x %f", video.size.width, video.size.height)
scene.addChild(video)
// Play video
videoPlayer.play()
videoPlayer?.actionAtItemEnd = .none
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidReachEnd(notification:)),
name: .AVPlayerItemDidPlayToEndTime,
object: videoPlayer?.currentItem)
}
}
#objc func playerItemDidReachEnd(notification: Notification) {
if let playerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: CMTime.zero, completionHandler: nil)
}
}
}
Into a SwiftUI View by placing it inside the func makeUIView(context: Context) -> UITextView {} of my struct TransparentVideoLoop: UIViewRepresentable {} struct.
What am I missing?
Full code:
struct TransparentVideoLoop: UIViewRepresentable {
func makeUIView(context: Context) -> UITextView {
#IBOutlet var skView: SKView!
var videoPlayer: AVPlayer!
override func viewDidLoad() {
super.viewDidLoad()
if let view = self.skView {
// Load the SKScene from 'backgroundScene.sks'
guard let scene = SKScene(fileNamed: "backgroundScene") else {
print ("Could not create a background scene")
return
}
// Set the scale mode to scale to fit the window
scene.scaleMode = .aspectFill
// Present the scene
view.presentScene(scene)
// Add the video node
guard let alphaMovieURL = Bundle.main.url(forResource: "camera_city_animated", withExtension: "mov") else {
print("Failed to overlay alpha movie on the background")
return
}
videoPlayer = AVPlayer(url: alphaMovieURL)
let video = SKVideoNode(avPlayer: videoPlayer)
video.size = CGSize(width: view.frame.width, height: view.frame.height)
print( "Video size is %f x %f", video.size.width, video.size.height)
scene.addChild(video)
// Play video
videoPlayer.play()
videoPlayer?.actionAtItemEnd = .none
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidReachEnd(notification:)),
name: .AVPlayerItemDidPlayToEndTime,
object: videoPlayer?.currentItem)
}
}
#objc func playerItemDidReachEnd(notification: Notification) {
if let playerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: CMTime.zero, completionHandler: nil)
}
}
}
func updateUIView(_ uiView: UITextView, context: Context) {
}
}
I have to return the view, but this is more complex than in the tutorials.
Use UIViewControllerRepresentable instead, e.g.
import SwiftUI
struct ImagePicker: UIViewControllerRepresentable {
#Binding var selectedImage: UIImage?
#Environment(\.presentationMode) var presentationMode
func makeCoordinator() -> ImagePicker.Coordinator {
Coordinator()
}
func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {}
func makeUIViewController(context: Context) -> some UIViewController {
context.coordinator.imagePicker
}
final class Coordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
lazy var imagePicker: UIImagePickerController = {
let imagePickerController = UIImagePickerController()
imagePickerController.sourceType = .photoLibrary
imagePickerController.delegate = self
return imagePickerController
}()
var imageSelected: ((UIImage) -> Void)?
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]) {
DispatchQueue.main.async {
if let selectedImage = (info[.editedImage] ?? info[.originalImage]) as? UIImage {
imageSelected?(selectedImage)
}
//self.parent.presentationMode.wrappedValue.dismiss()
}
}
}
}
Note this is inspired by ImagePicker.swift from an Apple sample but the developer got the Coordinator wrong so I have corrected that. It also needs the update func fixed.
I tried to using QuickLook framework.
For editting PDF, I implemented "previewController(_: editingModeFor: )" in Coordinator.
In Xcode(ver 11.6) simulator, quicklook view has pencil markup tool.
In Xcode simulator
But in my iPad(PadOS 13.6), there is no markup tool.
In my iPad device, PadOS 13.6
Is there any bugs in QuickLook framework?
Here is my code.
PreviewController.swift
import SwiftUI
import QuickLook
struct PreviewController: UIViewControllerRepresentable {
let url: URL
#Binding var isPresented: Bool
func makeUIViewController(context: Context) -> UINavigationController {
let controller = QLPreviewController()
controller.dataSource = context.coordinator
controller.delegate = context.coordinator
let navigationController = UINavigationController(rootViewController: controller)
return navigationController
}
func updateUIViewController(_ uiViewController: UINavigationController, context: Context) {}
func makeCoordinator() -> Coordinator {
return Coordinator(parent: self)
}
class Coordinator: NSObject, QLPreviewControllerDelegate, QLPreviewControllerDataSource {
let parent: PreviewController
init(parent: PreviewController) {
self.parent = parent
}
func numberOfPreviewItems(in controller: QLPreviewController) -> Int {
return 1
}
func previewController(_ controller: QLPreviewController, previewItemAt index: Int) -> QLPreviewItem {
return parent.url as NSURL
}
/*
Return .updateContents so QLPreviewController takes care of updating the contents of the provided QLPreviewItems whenever users save changes.
*/
func previewController(_ controller: QLPreviewController, editingModeFor previewItem: QLPreviewItem) -> QLPreviewItemEditingMode {
return .updateContents
}
}
}
ContentView.swift
import SwiftUI
struct ContentView: View {
let fileUrl = Bundle.main.url(forResource: "LoremIpsum", withExtension: "pdf")!
#State private var showingPreview = false
var body: some View {
Button("Preview File") {
self.showingPreview = true
}
.sheet(isPresented: $showingPreview) {
PreviewController(url: self.fileUrl, isPresented: self.$showingPreview)
}
}
}
I am trying to build out an application with the ability to use both apple pencil and finger input to draw. However, when I set the width of the tool to a sufficiently small number (1) the pencil input displays correctly, but when I use my finger it draws thicker lines. I would like uniformity in width when switching between pencil and finger. Any help would be appreciated. Bellow is the minimal reproducible code.
import Foundation
import SwiftUI
import PencilKit
struct ContentView: View {
var body: some View {
CanvasRepresentable()
}
}
struct CanvasRepresentable: UIViewRepresentable {
class Coordinator: NSObject, PKCanvasViewDelegate {
var parent: CanvasRepresentable
init(_ parent: CanvasRepresentable) {
self.parent = parent
}
func canvasViewDrawingDidChange(_ canvasView: PKCanvasView) {
}
}
func makeCoordinator() -> Coordinator {
return Coordinator(self)
}
func makeUIView(context: Context) -> PKCanvasView {
let c = PKCanvasView()
c.isOpaque = false
c.allowsFingerDrawing = true
c.delegate = context.coordinator
c.tool = PKInkingTool(.pen, color: .black, width: 1)
return c
}
func updateUIView(_ uiView: PKCanvasView, context: Context) {
let tool = uiView.tool as! PKInkingTool
print(tool.width)
}
}
I am trying to implement auto search for address in my App using SwiftUI. Google tutorial are based on UIKit, I was hoping if someone has tried and can guide me to the right direction. Thanks
Try this code; it works for me.
import Foundation
import UIKit
import SwiftUI
import GooglePlaces
struct PlacePicker: UIViewControllerRepresentable {
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
#Environment(\.presentationMode) var presentationMode
#Binding var address: String
func makeUIViewController(context: UIViewControllerRepresentableContext<PlacePicker>) -> GMSAutocompleteViewController {
let autocompleteController = GMSAutocompleteViewController()
autocompleteController.delegate = context.coordinator
let fields: GMSPlaceField = GMSPlaceField(rawValue: UInt(GMSPlaceField.name.rawValue) |
UInt(GMSPlaceField.placeID.rawValue))!
autocompleteController.placeFields = fields
let filter = GMSAutocompleteFilter()
filter.type = .address
filter.country = "ZA"
autocompleteController.autocompleteFilter = filter
return autocompleteController
}
func updateUIViewController(_ uiViewController: GMSAutocompleteViewController, context: UIViewControllerRepresentableContext<PlacePicker>) {
}
class Coordinator: NSObject, UINavigationControllerDelegate, GMSAutocompleteViewControllerDelegate {
var parent: PlacePicker
init(_ parent: PlacePicker) {
self.parent = parent
}
func viewController(_ viewController: GMSAutocompleteViewController, didAutocompleteWith place: GMSPlace) {
DispatchQueue.main.async {
print(place.description.description as Any)
self.parent.address = place.name!
self.parent.presentationMode.wrappedValue.dismiss()
}
}
func viewController(_ viewController: GMSAutocompleteViewController, didFailAutocompleteWithError error: Error) {
print("Error: ", error.localizedDescription)
}
func wasCancelled(_ viewController: GMSAutocompleteViewController) {
parent.presentationMode.wrappedValue.dismiss()
}
}
}
I'm trying to integrate a UISegmentedControl into my view, which works fine. But I want to update the SwiftUI ContentView when the control's index changes.
For this purpose I want to create a Coordinator for accessing the ContentView's State through a Binding.
I followed the official
WWDC video
(relevant from min 13:37), but it throws out an error saying "Cannot assign to property: '$textString' is immutable" when I try to implement the init function of the Coordinator class.
import SwiftUI
import UIKit
struct SegControl: UIViewRepresentable {
class Coordinator: NSObject {
#Binding var textString: String
init(textString: Binding<String>) {
//Error: Cannot assign to property: '$textString' is immutable
$textString = textString
}
#objc func testFunc() {
textString = "Updated!"
}
}
#Binding var textString: String
func makeCoordinator() -> Coordinator {
return Coordinator(textString: $textString)
}
func makeUIView(context: Context) -> UISegmentedControl {
return UISegmentedControl(items: ["One", "Two"])
}
func updateUIView(_ uiView: UISegmentedControl, context: Context) {
uiView.center = uiView.center
uiView.selectedSegmentIndex = 0
uiView.addTarget(context.coordinator, action: #selector(Coordinator.testFunc), for: .valueChanged)
}
}