summaryrefslogtreecommitdiff
path: root/CCCB Display/ViewController.swift
blob: d0f8dbef467ee553207ccea5b3c8f53abb44e6a5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
//
//  ViewController.swift
//  CCCB Display
//
//  Created by Dirk Engling on 26.05.23.
//

import UIKit
import AVFoundation
import CoreImage
import Network

class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate {

    @IBOutlet weak var frameRateLabel: UILabel!
    @IBOutlet weak var cameraView: UIView!

    var device: AVCaptureDevice?
    var input: AVCaptureDeviceInput?
    var prevLayer: AVCaptureVideoPreviewLayer?

    private let captureSession = AVCaptureSession()
    private let videoDataOutput = AVCaptureVideoDataOutput()
    private let sessionQueue = DispatchQueue(label: "sessionQueue")
    private let context = CIContext()

//    var hostUDP: NWEndpoint.Host = "172.23.42.29"
    var hostUDP: NWEndpoint.Host?
    var portUDP: NWEndpoint.Port = 2342
    var connectionUDP: NWConnection?

    var lastTimeStamp: CFTimeInterval = CACurrentMediaTime()

    /* Physical Display control packet parameters: */
    private let HEADERLEN = 10
    private let WIDTH     = 448
    private let HEIGHT    = 160
    private let VHEIGHT   = 236

    override func viewDidLoad() {
        super.viewDidLoad()

        UserDefaults.standard.addObserver(self, forKeyPath: "Display_Address", options: .new, context: nil)
        constructSocket()

        switch AVCaptureDevice.authorizationStatus(for: .video) {
        case .authorized: // the user has already authorized to access the camera.
            DispatchQueue.main.async {
                self.createSession()
            }
        case .notDetermined:
            AVCaptureDevice.requestAccess (for: .video) { (granted) in
                if granted {
                    print("the user has granted to access the camera")
                    DispatchQueue.main.async {
                        self.createSession ()
                    }
                } else {
                    print("the user has not granted to access the camera")
                    let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert)
                    self.present(dialogMessage, animated: true, completion: nil)
                }
            }
        case .denied:
            print("the user has denied previously to access the camera.")
            let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert)
            self.present(dialogMessage, animated: true, completion: nil)

        case .restricted:
            print("the user can't give camera access due to some restriction.")
            let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert)
            self.present(dialogMessage, animated: true, completion: nil)

        default:
            print("something has wrong due to we can't access the camera.")
            let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert)
            self.present(dialogMessage, animated: true, completion: nil)
          }
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        prevLayer?.frame.size = cameraView.frame.size
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

        let now = CACurrentMediaTime()
        let freq = (Int)(1 / (now - lastTimeStamp))
//        print ("Elapsed: \(now - lastTimeStamp) - Frequency: \(1 / (now - lastTimeStamp))")
        lastTimeStamp = now

        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
        let bufferWidth = CVPixelBufferGetWidth(pixelBuffer)
        let bufferHeight = CVPixelBufferGetHeight(pixelBuffer)
        let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
        let kBytesPerPixel = 4

        print("\(bufferWidth) \(bufferHeight) \(bytesPerRow)")
        guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { return }

        var packet: [UInt8] = [0, 0x12, 0, 0, 0x23, 0, 0, 0, 0, 0]
        var scratch: [Int] = Array(repeating: 0, count: WIDTH*(2+VHEIGHT))

        let t1 = CACurrentMediaTime()

        // 160 real rows are interleaved with 19 gaps of 4 pixels height on the display
        // so we create 20 virtual blocks of 8 real and 4 virtual pixels
        // we overlay VHEIGHT==236 virtual rows on the image and later skip the 4 invisble rows
        var off = 0
        for row in 0..<VHEIGHT {
            let zeile = (Int)(((Double)(row) / (Double)(VHEIGHT)) * (Double)(bufferHeight)) * bytesPerRow
            for column in 0..<WIDTH {
                let pixel = kBytesPerPixel * (Int)(((Double)(column) / (Double)(WIDTH)) * (Double)(bufferWidth))

                let red   = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 0, as: UInt8.self)) * 19535
                let green = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 1, as: UInt8.self)) * 38470
                let blue  = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 2, as: UInt8.self)) * 7448
                //scratch[row * WIDTH + column] = red + blue + green
                scratch[off] = red + blue + green
                off += 1
            }
        }

        var acc = 0
        var accv = 0

        for row in 0..<VHEIGHT {
            for column in 0..<WIDTH {
                let pixel = scratch[row * WIDTH + column]
                let bwpixel = (pixel < 0x810000) ? 0 : 0xffffff

                // account for gaps on display (virtual lines 9-12)
                if (row % 12) < 8 {
                    acc = (acc << 1) + (bwpixel >> 23)
                    accv += 1
                    if accv == 8 {
                        packet.append((UInt8)(acc))
                        acc =  0
                        accv = 0
                    }
                }

                let err = (pixel - bwpixel) / 42

                func AddSatShift(_ scr: inout Array<Int>, _ X: Int, _ Y: Int, _ SHIFT: Int) {
                    let inner_p = (row + Y) * WIDTH + column + X
                    var r = scr[inner_p] + (err << (16 - SHIFT))
                    if r < 0 {
                        r = 0
                    }
                    if r > 0xffffff {
                        r = 0xffffff
                    }
                    scr[inner_p] = r
                }

                AddSatShift(&scratch, 0, 1, 13)
                AddSatShift(&scratch, 0, 2, 14)
                if (column > 0) {
                    AddSatShift(&scratch, -1, 1, 14)
                    AddSatShift(&scratch, -1, 2, 15)
                }

                if (column > 1) {
                    AddSatShift(&scratch, -2, 1, 15)
                    AddSatShift(&scratch, -2, 2, 16)
                }

                if (column < WIDTH - 1) {
                    AddSatShift(&scratch,  1, 0, 13)
                    AddSatShift(&scratch,  1, 1, 14)
                    AddSatShift(&scratch,  1, 2, 15)
                }

                if (column < WIDTH - 2) {
                    AddSatShift(&scratch,  2, 0, 14)
                    AddSatShift(&scratch,  2, 1, 15)
                    AddSatShift(&scratch,  2, 2, 16)
                }

            }
        }

        let t2 = CACurrentMediaTime()

//        print("dur \(t2 - t1)")
        DispatchQueue.main.async {
            self.frameRateLabel.text = String(format: "%.04f (%d Hz)", t2 - t1, freq)
        }

        self.connectionUDP?.send(content: packet, completion: NWConnection.SendCompletion.contentProcessed(({ (NWError) in
            if (NWError == nil) {
                print("Data was sent to UDP")
            } else {
                print("ERROR! Error when data (Type: Data) sending. NWError: \n \(NWError!)")
                self.constructSocket()
            }
        })))
    }

    func createSession() {
        guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return }
        do {
            input = try AVCaptureDeviceInput(device: device)
        }
        catch {
            print(error)
        }

        captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480
        if let input = input {
            captureSession.addInput(input)
        }

        prevLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        prevLayer?.frame.size = cameraView.frame.size
        prevLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill

        cameraView.layer.addSublayer(prevLayer!)
        captureSession.addOutput(videoDataOutput)
        captureSession.commitConfiguration()

        videoDataOutput.videoSettings.updateValue(kCVPixelFormatType_32BGRA, forKey: "PixelFormatType")
        videoDataOutput.setSampleBufferDelegate(self, queue: self.sessionQueue)

        do {
            try device.lockForConfiguration()
            device.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 60)
            device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 60)
            device.unlockForConfiguration()
        } catch {
            print(error)
        }

        let captureConnection = videoDataOutput.connection(with: .video)
        captureConnection?.isEnabled = true
        deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification))
        // captureConnection?.videoOrientation = .landscapeRight

        sessionQueue.async { self.captureSession.startRunning() }
    }

    func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
        if #available(iOS 11.1, *) {
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInTrueDepthCamera, .builtInWideAngleCamera, ], mediaType: .video, position: position)

        if let device = deviceDiscoverySession.devices.first {
            return device
        }
            else {
                //add code here
            }
        return nil
        }

        return device
    }

    func transformOrientation(orientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation {
        switch orientation {
        case .landscapeLeft:
            return .landscapeLeft
        case .landscapeRight:
            return .landscapeRight
        case .portraitUpsideDown:
            return .portraitUpsideDown
        default:
            return .portrait
        }
    }

    override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
        if keyPath == "Display_Address" {
            constructSocket()
        }
    }

    func constructSocket() {
        let defaults = UserDefaults.standard
        if let ip = defaults.string(forKey: "Display_Address") {
            hostUDP = NWEndpoint.Host(ip)
        } else {
            hostUDP = NWEndpoint.Host("172.23.42.29")
            // hostUDP = NWEndpoint.Host("84.200.61.9")
            // hostUDP = NWEndpoint.Host("192.168.178.69")
        }
//        hostUDP = NWEndpoint.Host("192.168.178.69")

        self.connectionUDP = NWConnection(host: hostUDP!, port: portUDP, using: .udp)
        self.connectionUDP?.start(queue: .global())
    }

    @IBAction func switchCameraSide(sender: AnyObject) {
        let currentCameraInput: AVCaptureInput = captureSession.inputs[0]
        captureSession.removeInput(currentCameraInput)
        var newCamera: AVCaptureDevice
        if (currentCameraInput as! AVCaptureDeviceInput).device.position == .back {
            newCamera = self.cameraWithPosition(position: .front)!
        } else {
            newCamera = self.cameraWithPosition(position: .back)!
        }

        var newVideoInput: AVCaptureDeviceInput?
        do{
            newVideoInput = try AVCaptureDeviceInput(device: newCamera)
        }
        catch{
            print(error)
        }

        if let newVideoInput = newVideoInput{
            captureSession.addInput(newVideoInput)
            deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification))
        }
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)

        NotificationCenter.default.addObserver(self, selector: #selector(deviceOrientationDidChange),
                                               name: UIDevice.orientationDidChangeNotification, object: nil)
        deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification))

        sessionQueue.async { self.captureSession.startRunning() }
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        sessionQueue.async { self.captureSession.stopRunning() }
        NotificationCenter.default.removeObserver(self)
    }

    @objc func deviceOrientationDidChange(_ notification: Notification) {
        let orientation = UIDevice.current.orientation
        let captureConnection = videoDataOutput.connection(with: .video)

        if orientation == .landscapeLeft {
            self.prevLayer?.connection?.videoOrientation = .landscapeRight
            captureConnection?.videoOrientation = .landscapeRight
        } else {
            self.prevLayer?.connection?.videoOrientation = .landscapeLeft
            captureConnection?.videoOrientation = .landscapeLeft
        }
    }
}