Skip to content

Instantly share code, notes, and snippets.

import SwiftUI
import MultipeerConnectivity
import Combine
let feedback = PassthroughSubject<CGPoint,Never>()
let screenSize: CGRect = UIScreen.main.bounds
let screenWidth: CGFloat = UIScreen.main.bounds.width
let screenHeight: CGFloat = UIScreen.main.bounds.height
import Foundation
import MultipeerConnectivity
import SwiftUI
import os
class MultipeerSession: NSObject, ObservableObject {
static var shared = MultipeerSession()
private let serviceType = "connect"
private let myPeerId = MCPeerID(displayName: UIDevice.current.name)
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
lastTime = time
if time > spawnTime {
if faceAnchor != nil {
DispatchQueue.main.async { [self] in
self.cubeNode.simdOrientation = faceAnchor!.leftEyeTransform.orientation
let leftOut = faceAnchor?.blendShapes[.eyeLookInLeft]?.doubleValue
let rightOut = faceAnchor?.blendShapes[.eyeLookInRight]?.doubleValue
self.cubeNode.simdOrientation = faceAnchor!.leftEyeTransform.orientation
let newX = (satelliteNode.worldPosition.x * 100).rounded() / 100
let newY = (satelliteNode.worldPosition.y * 100).rounded() / 100
let formatter = NumberFormatter()
formatter.numberStyle = .decimal
formatter.maximumFractionDigits = 2
let number = NSNumber(value: newX)
class Coordinator: NSObject, ARSCNViewDelegate, ARSessionDelegate {
private var trackingView:ARSCNView
private var sphereNode: SCNNode!
private var cubeNode: SCNNode!
init(_ view: ARSCNView) {
self.trackingView = view
super.init()
struct PassForm: View {
@ObservedObject var model: PassModel
@State private var buttonIsDisabled = true
var body: some View {
VStack {
Text("Password Form")
.font(.headline)
struct ContentView: View {
@State var image:UIImage!
var body: some View {
if image == nil {
TextView()
.onTapGesture {
image = TextView().snapshot()
let fileManager = FileManager.default
let cacheDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask).first
struct ContentView: View {
@State private var sourceType: UIImagePickerController.SourceType = .camera
@State private var selectedImage: UIImage?
@State private var isImagePickerDisplay = false
@State private var sync2 = speaking()
@State var strings:[String] = []
var body: some View {
NavigationView {
struct ImagePickerView: UIViewControllerRepresentable {
@Binding var selectedImage: UIImage?
@Environment(\.presentationMode) var isPresented
var sourceType: UIImagePickerController.SourceType
func makeUIViewController(context: Context) -> UIImagePickerController {
let imagePicker = UIImagePickerController()
imagePicker.sourceType = self.sourceType
imagePicker.delegate = context.coordinator
class speaking: NSObject, AVSpeechSynthesizerDelegate {
var synth:AVSpeechSynthesizer!
override init() {
super.init()
synth = AVSpeechSynthesizer()
}
func speaker(words:[String]) {
for word in words {