diff options
| author | erdgeist <erdgeist@erdgeist.org> | 2023-05-31 15:39:59 +0200 |
|---|---|---|
| committer | erdgeist <erdgeist@erdgeist.org> | 2023-05-31 15:39:59 +0200 |
| commit | 32773a08eb11d286682ca8cad41171ce804f1631 (patch) | |
| tree | aee35b1dadbf06d4c09e2004f96d3a4e11716f87 /CCCB Display/ViewController.swift | |
| parent | 825d3442c320c5567317109947c8d1267704645b (diff) | |
Works
Diffstat (limited to 'CCCB Display/ViewController.swift')
| -rw-r--r-- | CCCB Display/ViewController.swift | 336 |
1 files changed, 333 insertions, 3 deletions
diff --git a/CCCB Display/ViewController.swift b/CCCB Display/ViewController.swift index 53f3cee..d0f8dbe 100644 --- a/CCCB Display/ViewController.swift +++ b/CCCB Display/ViewController.swift | |||
| @@ -6,14 +6,344 @@ | |||
| 6 | // | 6 | // |
| 7 | 7 | ||
| 8 | import UIKit | 8 | import UIKit |
| 9 | import AVFoundation | ||
| 10 | import CoreImage | ||
| 11 | import Network | ||
| 9 | 12 | ||
| 10 | class ViewController: UIViewController { | 13 | class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate { |
| 14 | |||
| 15 | @IBOutlet weak var frameRateLabel: UILabel! | ||
| 16 | @IBOutlet weak var cameraView: UIView! | ||
| 17 | |||
| 18 | var device: AVCaptureDevice? | ||
| 19 | var input: AVCaptureDeviceInput? | ||
| 20 | var prevLayer: AVCaptureVideoPreviewLayer? | ||
| 21 | |||
| 22 | private let captureSession = AVCaptureSession() | ||
| 23 | private let videoDataOutput = AVCaptureVideoDataOutput() | ||
| 24 | private let sessionQueue = DispatchQueue(label: "sessionQueue") | ||
| 25 | private let context = CIContext() | ||
| 26 | |||
| 27 | // var hostUDP: NWEndpoint.Host = "172.23.42.29" | ||
| 28 | var hostUDP: NWEndpoint.Host? | ||
| 29 | var portUDP: NWEndpoint.Port = 2342 | ||
| 30 | var connectionUDP: NWConnection? | ||
| 31 | |||
| 32 | var lastTimeStamp: CFTimeInterval = CACurrentMediaTime() | ||
| 33 | |||
| 34 | /* Physical Display control packet parameters: */ | ||
| 35 | private let HEADERLEN = 10 | ||
| 36 | private let WIDTH = 448 | ||
| 37 | private let HEIGHT = 160 | ||
| 38 | private let VHEIGHT = 236 | ||
| 11 | 39 | ||
| 12 | override func viewDidLoad() { | 40 | override func viewDidLoad() { |
| 13 | super.viewDidLoad() | 41 | super.viewDidLoad() |
| 14 | // Do any additional setup after loading the view. | 42 | |
| 43 | UserDefaults.standard.addObserver(self, forKeyPath: "Display_Address", options: .new, context: nil) | ||
| 44 | constructSocket() | ||
| 45 | |||
| 46 | switch AVCaptureDevice.authorizationStatus(for: .video) { | ||
| 47 | case .authorized: // the user has already authorized to access the camera. | ||
| 48 | DispatchQueue.main.async { | ||
| 49 | self.createSession() | ||
| 50 | } | ||
| 51 | case .notDetermined: | ||
| 52 | AVCaptureDevice.requestAccess (for: .video) { (granted) in | ||
| 53 | if granted { | ||
| 54 | print("the user has granted to access the camera") | ||
| 55 | DispatchQueue.main.async { | ||
| 56 | self.createSession () | ||
| 57 | } | ||
| 58 | } else { | ||
| 59 | print("the user has not granted to access the camera") | ||
| 60 | let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) | ||
| 61 | self.present(dialogMessage, animated: true, completion: nil) | ||
| 62 | } | ||
| 63 | } | ||
| 64 | case .denied: | ||
| 65 | print("the user has denied previously to access the camera.") | ||
| 66 | let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) | ||
| 67 | self.present(dialogMessage, animated: true, completion: nil) | ||
| 68 | |||
| 69 | case .restricted: | ||
| 70 | print("the user can't give camera access due to some restriction.") | ||
| 71 | let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) | ||
| 72 | self.present(dialogMessage, animated: true, completion: nil) | ||
| 73 | |||
| 74 | default: | ||
| 75 | print("something has wrong due to we can't access the camera.") | ||
| 76 | let dialogMessage = UIAlertController(title: "Attention", message: "Can not work without camera access", preferredStyle: .alert) | ||
| 77 | self.present(dialogMessage, animated: true, completion: nil) | ||
| 78 | } | ||
| 79 | } | ||
| 80 | |||
| 81 | override func viewDidAppear(_ animated: Bool) { | ||
| 82 | super.viewDidAppear(animated) | ||
| 83 | prevLayer?.frame.size = cameraView.frame.size | ||
| 15 | } | 84 | } |
| 16 | 85 | ||
| 86 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { | ||
| 17 | 87 | ||
| 18 | } | 88 | let now = CACurrentMediaTime() |
| 89 | let freq = (Int)(1 / (now - lastTimeStamp)) | ||
| 90 | // print ("Elapsed: \(now - lastTimeStamp) - Frequency: \(1 / (now - lastTimeStamp))") | ||
| 91 | lastTimeStamp = now | ||
| 92 | |||
| 93 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } | ||
| 94 | |||
| 95 | CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) | ||
| 96 | let bufferWidth = CVPixelBufferGetWidth(pixelBuffer) | ||
| 97 | let bufferHeight = CVPixelBufferGetHeight(pixelBuffer) | ||
| 98 | let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) | ||
| 99 | let kBytesPerPixel = 4 | ||
| 100 | |||
| 101 | print("\(bufferWidth) \(bufferHeight) \(bytesPerRow)") | ||
| 102 | guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { return } | ||
| 103 | |||
| 104 | var packet: [UInt8] = [0, 0x12, 0, 0, 0x23, 0, 0, 0, 0, 0] | ||
| 105 | var scratch: [Int] = Array(repeating: 0, count: WIDTH*(2+VHEIGHT)) | ||
| 106 | |||
| 107 | let t1 = CACurrentMediaTime() | ||
| 108 | |||
| 109 | // 160 real rows are interleaved with 19 gaps of 4 pixels height on the display | ||
| 110 | // so we create 20 virtual blocks of 8 real and 4 virtual pixels | ||
| 111 | // we overlay VHEIGHT==236 virtual rows on the image and later skip the 4 invisble rows | ||
| 112 | var off = 0 | ||
| 113 | for row in 0..<VHEIGHT { | ||
| 114 | let zeile = (Int)(((Double)(row) / (Double)(VHEIGHT)) * (Double)(bufferHeight)) * bytesPerRow | ||
| 115 | for column in 0..<WIDTH { | ||
| 116 | let pixel = kBytesPerPixel * (Int)(((Double)(column) / (Double)(WIDTH)) * (Double)(bufferWidth)) | ||
| 117 | |||
| 118 | let red = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 0, as: UInt8.self)) * 19535 | ||
| 119 | let green = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 1, as: UInt8.self)) * 38470 | ||
| 120 | let blue = (Int)(baseAddress.load(fromByteOffset: zeile + pixel + 2, as: UInt8.self)) * 7448 | ||
| 121 | //scratch[row * WIDTH + column] = red + blue + green | ||
| 122 | scratch[off] = red + blue + green | ||
| 123 | off += 1 | ||
| 124 | } | ||
| 125 | } | ||
| 126 | |||
| 127 | var acc = 0 | ||
| 128 | var accv = 0 | ||
| 129 | |||
| 130 | for row in 0..<VHEIGHT { | ||
| 131 | for column in 0..<WIDTH { | ||
| 132 | let pixel = scratch[row * WIDTH + column] | ||
| 133 | let bwpixel = (pixel < 0x810000) ? 0 : 0xffffff | ||
| 134 | |||
| 135 | // account for gaps on display (virtual lines 9-12) | ||
| 136 | if (row % 12) < 8 { | ||
| 137 | acc = (acc << 1) + (bwpixel >> 23) | ||
| 138 | accv += 1 | ||
| 139 | if accv == 8 { | ||
| 140 | packet.append((UInt8)(acc)) | ||
| 141 | acc = 0 | ||
| 142 | accv = 0 | ||
| 143 | } | ||
| 144 | } | ||
| 145 | |||
| 146 | let err = (pixel - bwpixel) / 42 | ||
| 147 | |||
| 148 | func AddSatShift(_ scr: inout Array<Int>, _ X: Int, _ Y: Int, _ SHIFT: Int) { | ||
| 149 | let inner_p = (row + Y) * WIDTH + column + X | ||
| 150 | var r = scr[inner_p] + (err << (16 - SHIFT)) | ||
| 151 | if r < 0 { | ||
| 152 | r = 0 | ||
| 153 | } | ||
| 154 | if r > 0xffffff { | ||
| 155 | r = 0xffffff | ||
| 156 | } | ||
| 157 | scr[inner_p] = r | ||
| 158 | } | ||
| 159 | |||
| 160 | AddSatShift(&scratch, 0, 1, 13) | ||
| 161 | AddSatShift(&scratch, 0, 2, 14) | ||
| 162 | if (column > 0) { | ||
| 163 | AddSatShift(&scratch, -1, 1, 14) | ||
| 164 | AddSatShift(&scratch, -1, 2, 15) | ||
| 165 | } | ||
| 166 | |||
| 167 | if (column > 1) { | ||
| 168 | AddSatShift(&scratch, -2, 1, 15) | ||
| 169 | AddSatShift(&scratch, -2, 2, 16) | ||
| 170 | } | ||
| 171 | |||
| 172 | if (column < WIDTH - 1) { | ||
| 173 | AddSatShift(&scratch, 1, 0, 13) | ||
| 174 | AddSatShift(&scratch, 1, 1, 14) | ||
| 175 | AddSatShift(&scratch, 1, 2, 15) | ||
| 176 | } | ||
| 177 | |||
| 178 | if (column < WIDTH - 2) { | ||
| 179 | AddSatShift(&scratch, 2, 0, 14) | ||
| 180 | AddSatShift(&scratch, 2, 1, 15) | ||
| 181 | AddSatShift(&scratch, 2, 2, 16) | ||
| 182 | } | ||
| 183 | |||
| 184 | } | ||
| 185 | } | ||
| 186 | |||
| 187 | let t2 = CACurrentMediaTime() | ||
| 188 | |||
| 189 | // print("dur \(t2 - t1)") | ||
| 190 | DispatchQueue.main.async { | ||
| 191 | self.frameRateLabel.text = String(format: "%.04f (%d Hz)", t2 - t1, freq) | ||
| 192 | } | ||
| 193 | |||
| 194 | self.connectionUDP?.send(content: packet, completion: NWConnection.SendCompletion.contentProcessed(({ (NWError) in | ||
| 195 | if (NWError == nil) { | ||
| 196 | print("Data was sent to UDP") | ||
| 197 | } else { | ||
| 198 | print("ERROR! Error when data (Type: Data) sending. NWError: \n \(NWError!)") | ||
| 199 | self.constructSocket() | ||
| 200 | } | ||
| 201 | }))) | ||
| 202 | } | ||
| 19 | 203 | ||
| 204 | func createSession() { | ||
| 205 | guard let device = AVCaptureDevice.default(for: AVMediaType.video) else { return } | ||
| 206 | do { | ||
| 207 | input = try AVCaptureDeviceInput(device: device) | ||
| 208 | } | ||
| 209 | catch { | ||
| 210 | print(error) | ||
| 211 | } | ||
| 212 | |||
| 213 | captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480 | ||
| 214 | if let input = input { | ||
| 215 | captureSession.addInput(input) | ||
| 216 | } | ||
| 217 | |||
| 218 | prevLayer = AVCaptureVideoPreviewLayer(session: captureSession) | ||
| 219 | prevLayer?.frame.size = cameraView.frame.size | ||
| 220 | prevLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill | ||
| 221 | |||
| 222 | cameraView.layer.addSublayer(prevLayer!) | ||
| 223 | captureSession.addOutput(videoDataOutput) | ||
| 224 | captureSession.commitConfiguration() | ||
| 225 | |||
| 226 | videoDataOutput.videoSettings.updateValue(kCVPixelFormatType_32BGRA, forKey: "PixelFormatType") | ||
| 227 | videoDataOutput.setSampleBufferDelegate(self, queue: self.sessionQueue) | ||
| 228 | |||
| 229 | do { | ||
| 230 | try device.lockForConfiguration() | ||
| 231 | device.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 60) | ||
| 232 | device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 60) | ||
| 233 | device.unlockForConfiguration() | ||
| 234 | } catch { | ||
| 235 | print(error) | ||
| 236 | } | ||
| 237 | |||
| 238 | let captureConnection = videoDataOutput.connection(with: .video) | ||
| 239 | captureConnection?.isEnabled = true | ||
| 240 | deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) | ||
| 241 | // captureConnection?.videoOrientation = .landscapeRight | ||
| 242 | |||
| 243 | sessionQueue.async { self.captureSession.startRunning() } | ||
| 244 | } | ||
| 245 | |||
| 246 | func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? { | ||
| 247 | if #available(iOS 11.1, *) { | ||
| 248 | let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInTrueDepthCamera, .builtInWideAngleCamera, ], mediaType: .video, position: position) | ||
| 249 | |||
| 250 | if let device = deviceDiscoverySession.devices.first { | ||
| 251 | return device | ||
| 252 | } | ||
| 253 | else { | ||
| 254 | //add code here | ||
| 255 | } | ||
| 256 | return nil | ||
| 257 | } | ||
| 258 | |||
| 259 | return device | ||
| 260 | } | ||
| 261 | |||
| 262 | func transformOrientation(orientation: UIInterfaceOrientation) -> AVCaptureVideoOrientation { | ||
| 263 | switch orientation { | ||
| 264 | case .landscapeLeft: | ||
| 265 | return .landscapeLeft | ||
| 266 | case .landscapeRight: | ||
| 267 | return .landscapeRight | ||
| 268 | case .portraitUpsideDown: | ||
| 269 | return .portraitUpsideDown | ||
| 270 | default: | ||
| 271 | return .portrait | ||
| 272 | } | ||
| 273 | } | ||
| 274 | |||
| 275 | override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) { | ||
| 276 | if keyPath == "Display_Address" { | ||
| 277 | constructSocket() | ||
| 278 | } | ||
| 279 | } | ||
| 280 | |||
| 281 | func constructSocket() { | ||
| 282 | let defaults = UserDefaults.standard | ||
| 283 | if let ip = defaults.string(forKey: "Display_Address") { | ||
| 284 | hostUDP = NWEndpoint.Host(ip) | ||
| 285 | } else { | ||
| 286 | hostUDP = NWEndpoint.Host("172.23.42.29") | ||
| 287 | // hostUDP = NWEndpoint.Host("84.200.61.9") | ||
| 288 | // hostUDP = NWEndpoint.Host("192.168.178.69") | ||
| 289 | } | ||
| 290 | // hostUDP = NWEndpoint.Host("192.168.178.69") | ||
| 291 | |||
| 292 | self.connectionUDP = NWConnection(host: hostUDP!, port: portUDP, using: .udp) | ||
| 293 | self.connectionUDP?.start(queue: .global()) | ||
| 294 | } | ||
| 295 | |||
| 296 | @IBAction func switchCameraSide(sender: AnyObject) { | ||
| 297 | let currentCameraInput: AVCaptureInput = captureSession.inputs[0] | ||
| 298 | captureSession.removeInput(currentCameraInput) | ||
| 299 | var newCamera: AVCaptureDevice | ||
| 300 | if (currentCameraInput as! AVCaptureDeviceInput).device.position == .back { | ||
| 301 | newCamera = self.cameraWithPosition(position: .front)! | ||
| 302 | } else { | ||
| 303 | newCamera = self.cameraWithPosition(position: .back)! | ||
| 304 | } | ||
| 305 | |||
| 306 | var newVideoInput: AVCaptureDeviceInput? | ||
| 307 | do{ | ||
| 308 | newVideoInput = try AVCaptureDeviceInput(device: newCamera) | ||
| 309 | } | ||
| 310 | catch{ | ||
| 311 | print(error) | ||
| 312 | } | ||
| 313 | |||
| 314 | if let newVideoInput = newVideoInput{ | ||
| 315 | captureSession.addInput(newVideoInput) | ||
| 316 | deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) | ||
| 317 | } | ||
| 318 | } | ||
| 319 | |||
| 320 | override func viewWillAppear(_ animated: Bool) { | ||
| 321 | super.viewWillAppear(animated) | ||
| 322 | |||
| 323 | NotificationCenter.default.addObserver(self, selector: #selector(deviceOrientationDidChange), | ||
| 324 | name: UIDevice.orientationDidChangeNotification, object: nil) | ||
| 325 | deviceOrientationDidChange(Notification(name: UIDevice.orientationDidChangeNotification)) | ||
| 326 | |||
| 327 | sessionQueue.async { self.captureSession.startRunning() } | ||
| 328 | } | ||
| 329 | |||
| 330 | override func viewWillDisappear(_ animated: Bool) { | ||
| 331 | super.viewWillDisappear(animated) | ||
| 332 | |||
| 333 | sessionQueue.async { self.captureSession.stopRunning() } | ||
| 334 | NotificationCenter.default.removeObserver(self) | ||
| 335 | } | ||
| 336 | |||
| 337 | @objc func deviceOrientationDidChange(_ notification: Notification) { | ||
| 338 | let orientation = UIDevice.current.orientation | ||
| 339 | let captureConnection = videoDataOutput.connection(with: .video) | ||
| 340 | |||
| 341 | if orientation == .landscapeLeft { | ||
| 342 | self.prevLayer?.connection?.videoOrientation = .landscapeRight | ||
| 343 | captureConnection?.videoOrientation = .landscapeRight | ||
| 344 | } else { | ||
| 345 | self.prevLayer?.connection?.videoOrientation = .landscapeLeft | ||
| 346 | captureConnection?.videoOrientation = .landscapeLeft | ||
| 347 | } | ||
| 348 | } | ||
| 349 | } | ||
