// // ViewController.swift // CCCB Display // // Created by Dirk Engling on 26.05.23. // import UIKit import AVFoundation import CoreImage import Network class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate { @IBOutlet weak var frameRateLabel: UILabel! @IBOutlet weak var cameraView: UIView! var device: AVCaptureDevice? var input: AVCaptureDeviceInput? var prevLayer: AVCaptureVideoPreviewLayer? private let captureSession = AVCaptureSession() private let videoDataOutput = AVCaptureVideoDataOutput() private let sessionQueue = DispatchQueue(label: "sessionQueue") private let context = CIContext() // var hostUDP: NWEndpoint.Host = "172.23.42.29" var hostUDP: NWEndpoint.Host? var portUDP: NWEndpoint.Port = 2342 var connectionUDP: NWConnection? var lastTimeStamp: CFTimeInterval = CACurrentMediaTime() /* Physical Display control packet parameters: */ private let HEADERLEN = 10 private let WIDTH = 448 private let HEIGHT = 160 private let VHEIGHT = 236 override func viewDidLoad() { super.viewDidLoad() UserDefaults.standard.addObserver(self, forKeyPath: "Display_Address", options: .new, context: nil) constructSocket() switch AVCaptureDevice.authorizationStatus(for: .video) { case .authorized: // the user has already authorized to access the camera. DispatchQueue.main.async { self.createSession() } case .notDetermined: AVCaptureDevice.requestAccess (for: .video) { (granted) in if granted { print("the user has granted to access the camera") DispatchQueue.main.async { self.createSession () } } else { print("the user has not granted to access the camera") let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) self.present(dialogMessage, animated: true, completion: nil) } } case .denied: print("the user has denied previously to access the camera.") let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) self.present(dialogMessage, animated: true, completion: nil) case .restricted: print("the user can't give camera access due to some restriction.") let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) self.present(dialogMessage, animated: true, completion: nil) default: print("something has wrong due to we can't access the camera.") let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) self.present(dialogMessage, animated: true, completion: nil) } } override func viewDidAppear(_ animated: Bool) { super.viewDidAppear(animated) prevLayer?.frame.size = cameraView.frame.size } func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { let now = CACurrentMediaTime() let freq = (Int)(1 / (now - lastTimeStamp)) // print ("Elapsed: \(now - lastTimeStamp) - Frequency: \(1 / (now - lastTimeStamp))") lastTimeStamp = now guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) let bufferWidth = CVPixelBufferGetWidth(pixelBuffer) let bufferHeight = CVPixelBufferGetHeight(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) let kBytesPerPixel = 4 print("\(bufferWidth) \(bufferHeight) \(bytesPerRow)") guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { return } var packet: [UInt8] = [0, 0x12, 0, 0, 0x23, 0, 0, 0, 0, 0] var scratch: [Int] = Array(repeating: 0, count: WIDTH*(2+VHEIGHT)) let t1 = CACurrentMediaTime() // 160 real rows are interleaved with 19 gaps of 4 pixels height on the display // so we create 20 virtual blocks of 8 real and 4 virtual pixels // we overlay VHEIGHT==236 virtual rows on the image and later skip the 4 invisble rows var off = 0 for row in 0..> 23) accv += 1 if accv == 8 { packet.append((UInt8)(acc)) acc = 0 accv = 0 } } let err = (pixel - bwpixel) / 42 func AddSatShift(_ scr: inout Array, _ X: Int, _ Y: Int, _ SHIFT: Int) { let inner_p = (row + Y) * WIDTH + column + X var r = scr[inner_p] + (err << (16 - SHIFT)) if r < 0 { r = 0 } if r > 0xffffff { r = 0xffffff } scr[inner_p] = r } AddSatShift(&scratch, 0, 1, 13) AddSatShift(&scratch, 0, 2, 14) if (column > 0) { AddSatShift(&scratch, -1, 1, 14) AddSatShift(&scratch, -1, 2, 15) } if (column > 1) { AddSatShift(&scratch, -2, 1, 15) AddSatShift(&scratch, -2, 2, 16) } if (column < WIDTH - 1) { AddSatShift(&scratch, 1, 0, 13) AddSatShift(&scratch, 1, 1, 14) AddSatShift(&scratch, 1, 2, 15) } if (column < WIDTH - 2) { AddSatShift(&scratch, 2, 0, 14) AddSatShift(&scratch, 2, 1, 15) AddSatShift(&scratch, 2, 2, 16) } } } let t2 = CACurrentMediaTime() // print("dur \(t2 - t1)") DispatchQueue.main.async { self.frameRateLabel.text = String(format: "%.04f (%d Hz)", t2 - t1, freq) } self.connectionUDP?.send(content: packet, completion: NWConnection.SendCompletion.contentProcessed(({ (NWError) in if (NWError == nil) { print("Data was sent to UDP") } else { print("ERROR! Error when data (Type: Data) sending. NWError: \n \(NWError!)") self.constructSocket() } }))) } func createSession() { guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return } do { input = try AVCaptureDeviceInput(device: device) } catch { print(error) } captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480 if let input = input { captureSession.addInput(input) } prevLayer = AVCaptureVideoPreviewLayer(session: captureSession) prevLayer?.frame.size = cameraView.frame.size prevLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill cameraView.layer.addSublayer(prevLayer!) captureSession.addOutput(videoDataOutput) captureSession.commitConfiguration() videoDataOutput.videoSettings.updateValue(kCVPixelFormatType_32BGRA, forKey: "PixelFormatType") videoDataOutput.setSampleBufferDelegate(self, queue: self.sessionQueue) do { try device.lockForConfiguration() device.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 60) device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 60) device.unlockForConfiguration() } catch { print(error) } let captureConnection = videoDataOutput.connection(with: .video) captureConnection?.isEnabled = true deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) // captureConnection?.videoOrientation = .landscapeRight sessionQueue.async { self.captureSession.startRunning() } } func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? { if #available(iOS 11.1, *) { let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInTrueDepthCamera, .builtInWideAngleCamera, ], mediaType: .video, position: position) if let device = deviceDiscoverySession.devices.first { return device } else { //add code here } return nil } return device } func transformOrientation(orientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { switch orientation { case .landscapeLeft: return .landscapeLeft case .landscapeRight: return .landscapeRight case .portraitUpsideDown: return .portraitUpsideDown default: return .portrait } } override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) { if keyPath == "Display_Address" { constructSocket() } } func constructSocket() { let defaults = UserDefaults.standard if let ip = defaults.string(forKey: "Display_Address") { hostUDP = NWEndpoint.Host(ip) } else { hostUDP = NWEndpoint.Host("172.23.42.29") // hostUDP = NWEndpoint.Host("84.200.61.9") // hostUDP = NWEndpoint.Host("192.168.178.69") } // hostUDP = NWEndpoint.Host("192.168.178.69") self.connectionUDP = NWConnection(host: hostUDP!, port: portUDP, using: .udp) self.connectionUDP?.start(queue: .global()) } @IBAction func switchCameraSide(sender: AnyObject) { let currentCameraInput: AVCaptureInput = captureSession.inputs[0] captureSession.removeInput(currentCameraInput) var newCamera: AVCaptureDevice if (currentCameraInput as! AVCaptureDeviceInput).device.position == .back { newCamera = self.cameraWithPosition(position: .front)! } else { newCamera = self.cameraWithPosition(position: .back)! } var newVideoInput: AVCaptureDeviceInput? do{ newVideoInput = try AVCaptureDeviceInput(device: newCamera) } catch{ print(error) } if let newVideoInput = newVideoInput{ captureSession.addInput(newVideoInput) deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) } } override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) NotificationCenter.default.addObserver(self, selector: #selector(deviceOrientationDidChange), name: UIDevice.orientationDidChangeNotification, object: nil) deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) sessionQueue.async { self.captureSession.startRunning() } } override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) sessionQueue.async { self.captureSession.stopRunning() } NotificationCenter.default.removeObserver(self) } @objc func deviceOrientationDidChange(_ notification: Notification) { let orientation = UIDevice.current.orientation let captureConnection = videoDataOutput.connection(with: .video) if orientation == .landscapeLeft { self.prevLayer?.connection?.videoOrientation = .landscapeRight captureConnection?.videoOrientation = .landscapeRight } else { self.prevLayer?.connection?.videoOrientation = .landscapeLeft captureConnection?.videoOrientation = .landscapeLeft } } }