diff --git a/js/MediaStreamRenderer.js b/js/MediaStreamRenderer.js index 39c30c5e..41062488 100644 --- a/js/MediaStreamRenderer.js +++ b/js/MediaStreamRenderer.js @@ -30,6 +30,26 @@ function MediaStreamRenderer(element) { this.videoWidth = undefined; this.videoHeight = undefined; + // Support canvas to render + this.videoStopped = false; + this.webSocketUrl = undefined; + this.webSocketClient = undefined; + this.canvasCtx = undefined; + this.canvasId = (element.id || Date.now()) + '__canvas'; + + var hasCanvas = false; + var canvasElement = document.getElementById(this.canvasId); + debug('render() [canvas-id:%s, ele:%o]', this.canvasId, canvasElement); + if (!canvasElement) { + canvasElement = document.createElement('canvas'); + canvasElement.id = this.canvasId; + element.parentNode.insertBefore(canvasElement, element.nextSibling); + } + if (canvasElement) { + hasCanvas = true; + this.canvasCtx = createCanvasContext(canvasElement); + } + // Private attributes. this.id = randomNumber(); @@ -37,7 +57,7 @@ function MediaStreamRenderer(element) { onEvent.call(self, data); } - exec(onResultOK, null, 'iosrtcPlugin', 'new_MediaStreamRenderer', [this.id]); + exec(onResultOK, null, 'iosrtcPlugin', 'new_MediaStreamRenderer', [this.id, hasCanvas]); this.refresh(); @@ -62,6 +82,7 @@ MediaStreamRenderer.prototype.render = function (stream) { } self.stream = stream; + self.videoStopped = false; exec(null, null, 'iosrtcPlugin', 'MediaStreamRenderer_render', [self.id, stream.id]); @@ -364,6 +385,16 @@ MediaStreamRenderer.prototype.close = function () { } this.stream = undefined; + if (this.webSocketClient) { + this.webSocketClient.close(); + this.webSocketClient = undefined; + } + + if (this.canvasCtx) { + this.canvasCtx.fillBlack(); + this.canvasCtx = undefined; + } + exec(null, null, 'iosrtcPlugin', 'MediaStreamRenderer_close', [this.id]); if (this.refreshInterval) { clearInterval(this.refreshInterval); @@ -371,6 +402,327 @@ MediaStreamRenderer.prototype.close = function () { } }; +MediaStreamRenderer.prototype.openWebSocket = function (host, port, uuid) { + if (!this.canvasCtx) { + debug('websocket no canvas context'); + return; + } + + var self = this; + this.webSocketUrl = 'ws://' + host + ':' + port + '?uuid=' + uuid; + debug('websocket url=' + this.webSocketUrl); + + this.webSocketClient = new window.WebSocket(this.webSocketUrl); + this.webSocketClient.binaryType = 'arraybuffer'; + this.webSocketClient.onopen = function () { + debug('websocket open for uuid:' + uuid); + }; + this.webSocketClient.onerror = function (event) { + var errorStr = JSON.stringify(event, null, 4); + debug('websocket error for uuid:' + uuid + ', error:' + errorStr); + }; + this.webSocketClient.onclose = function (event) { + var errorStr = JSON.stringify(event, null, 4); + debug('websocket close for uuid:' + uuid + ', error:' + errorStr); + }; + this.webSocketClient.onmessage = function (event) { + //debug('websocket message uuid:' + uuid + ', length:' + event.data.length); + if (!self.stream) { + return; + } + if (self.videoStopped) { + return; + } + + // data format: 16B-head + body + // head: type(2B) + len(4B) + width(2B) + height(2B) + rotation(2B) + timestamp(4B) + // body: len + var headLen = 16; + var pdu = new DataView(event.data); + var pduLen = pdu.byteLength; + if (pduLen < headLen) { + return; + } + + // parse head + //var pduType = pdu.getUint16(0); + var bodyLen = pdu.getUint32(2, true); + var width = pdu.getUint16(6, true); + var height = pdu.getUint16(8, true); + //var rotation = pdu.getUint16(10, true); + //var timestamp = pdu.getUint32(12, true); + //debug('websocket message: body='+bodyLen+',resloution='+width+'x'+height+",size="+pduLen); + if (pduLen !== headLen + bodyLen) { + debug('websocket message, wrong data length'); + } else { + var typedArray = new Uint8Array(event.data); + var frame = typedArray.subarray(headLen, headLen + bodyLen); + self.drawFrame(frame, width, height); + } + }; +}; + +MediaStreamRenderer.prototype.drawFrame = function (frame, width, height) { + //debug('drawFrame canvas, length=' + frame.length); + if (this.canvasCtx) { + var uOffset = parseInt(width * height); + var vOffset = parseInt(uOffset + uOffset / 4); + this.canvasCtx.render(frame, width, height, uOffset, vOffset); + } +}; + +/** + * WebGLTexture API. + */ + +function WebGLTexture(gl) { + this.gl = gl; + this.texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, this.texture); + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); +} + +WebGLTexture.prototype.bind = function (n, program, name) { + var gl = this.gl; + gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]); + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.uniform1i(gl.getUniformLocation(program, name), n); +}; + +WebGLTexture.prototype.fill = function (width, height, data) { + var gl = this.gl; + var level = 0; + var internalFormat = gl.LUMINANCE; //gl.RGBA; + var border = 0; + var srcFormat = gl.LUMINANCE; //gl.RGBA; + var srcType = gl.UNSIGNED_BYTE; + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.texImage2D( + gl.TEXTURE_2D, + level, + internalFormat, + width, + height, + border, + srcFormat, + srcType, + data + ); +}; + +/** + * Canvas API. + */ + +function setupCanvasWebGL(canvas, options) { + var gl = canvas.getContext('webgl', { + preserveDrawingBuffer: Boolean(options.preserveDrawingBuffer) + }); + if (!gl) { + return null; + } + var program = gl.createProgram(); + var vertexShaderSource = [ + 'attribute highp vec4 aVertexPosition;', + 'attribute vec2 aTextureCoord;', + 'varying highp vec2 vTextureCoord;', + 'void main(void) {', + ' gl_Position = aVertexPosition;', + ' vTextureCoord = aTextureCoord;', + '}' + ].join('\n'); + + var vertexShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertexShader, vertexShaderSource); + gl.compileShader(vertexShader); + + var fragmentShaderSource = [ + 'precision highp float;', + 'varying lowp vec2 vTextureCoord;', + 'uniform sampler2D YTexture;', + 'uniform sampler2D UTexture;', + 'uniform sampler2D VTexture;', + 'const mat4 YUV2RGB = mat4', + '(', + ' 1.1643828125, 0, 1.59602734375, -.87078515625,', + ' 1.1643828125, -.39176171875, -.81296875, .52959375,', + ' 1.1643828125, 2.017234375, 0, -1.081390625,', + ' 0, 0, 0, 1', + ');', + 'void main(void) {', + ' gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;', + '}' + ].join('\n'); + + var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragmentShader, fragmentShaderSource); + gl.compileShader(fragmentShader); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + gl.useProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + debug('gl Shader link failed.'); + } + var vertexPositionAttribute = gl.getAttribLocation(program, 'aVertexPosition'); + gl.enableVertexAttribArray(vertexPositionAttribute); + var textureCoordAttribute = gl.getAttribLocation(program, 'aTextureCoord'); + gl.enableVertexAttribArray(textureCoordAttribute); + + var verticesBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer); + gl.bufferData( + gl.ARRAY_BUFFER, + new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, -1.0, 0.0]), + gl.STATIC_DRAW + ); + gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData( + gl.ARRAY_BUFFER, + new Float32Array([1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0]), + gl.STATIC_DRAW + ); + gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0); + + gl.y = new WebGLTexture(gl); + gl.u = new WebGLTexture(gl); + gl.v = new WebGLTexture(gl); + gl.y.bind(0, program, 'YTexture'); + gl.u.bind(1, program, 'UTexture'); + gl.v.bind(2, program, 'VTexture'); + return gl; +} + +function CanvasI420Context(canvas, gl) { + this.canvas = canvas; + this.gl = gl; + this.width = 0; + this.height = 0; +} + +CanvasI420Context.prototype.render = function (frame, width, height, uOffset, vOffset) { + if (width === 0 || height === 0) { + return; + } + + var canvas = this.canvas; + if (width !== this.width || height !== this.height) { + var glWidth = canvas.clientWidth; + //var glHeight = canvas.clientHeight; + var glHeight = glWidth * (height / width); + + var glX = 0; + var glY = 0; + if (canvas.clientHeight > glHeight) { + glY = parseInt((canvas.clientHeight - glHeight) / 2); + } + + debug( + 'canvas render change from=' + + this.width + + 'x' + + this.height + + ' to ' + + width + + 'x' + + height + + ', clientSize=' + + canvas.clientWidth + + 'x' + + canvas.clientHeight + + ', offsetSize=' + + canvas.offsetWidth + + 'x' + + canvas.offsetHeight + + ', glSize=' + + glWidth + + 'x' + + glHeight + + '-' + + glX + + 'x' + + glY + ); + + this.frameSetup(glX, glY, glWidth, glHeight); + this.width = width; + this.height = height; + } + this.renderFrame(frame, uOffset, vOffset); +}; + +CanvasI420Context.prototype.frameSetup = function (glx, gly, width, height) { + var canvas = this.canvas; + var gl = this.gl; + if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) { + canvas.width = canvas.clientWidth; + canvas.height = canvas.clientHeight; + } + gl.viewport(glx, gly, width, height); +}; + +CanvasI420Context.prototype.renderFrame = function (frame, uOffset, vOffset) { + var gl = this.gl; + var width = this.width; + var height = this.height; + gl.y.fill(width, height, frame.subarray(0, uOffset)); + gl.u.fill(width >> 1, height >> 1, frame.subarray(uOffset, vOffset)); + gl.v.fill(width >> 1, height >> 1, frame.subarray(vOffset, frame.length)); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); +}; + +CanvasI420Context.prototype.fillBlack = function () { + var gl = this.gl; + var arr1 = new Uint8Array(1), + arr2 = new Uint8Array(1); + + arr1[0] = 0; + arr2[0] = 128; + + gl.y.fill(1, 1, arr1); + gl.u.fill(1, 1, arr2); + gl.v.fill(1, 1, arr2); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); +}; + +function createCanvasContext(element, options) { + var canvas; + if (typeof element === 'string') { + canvas = window.document.querySelector(element); + } else if (element instanceof HTMLCanvasElement) { + canvas = element; + } + + if (!canvas) { + debug('find no canvas element=' + element); + return null; + } + + if (!options) { + options = { + preserveDrawingBuffer: false + }; + } + + var glCtx = setupCanvasWebGL(canvas, options); + if (!glCtx) { + debug('fail to setupCanvasWebGL'); + return null; + } + + var i420Ctx = new CanvasI420Context(canvas, glCtx); + i420Ctx.fillBlack(); + return i420Ctx; +} + /** * Private API. */ @@ -392,6 +744,19 @@ function onEvent(data) { event.videoHeight = data.size.height; this.dispatchEvent(event); + break; + case 'videowebsocket': + switch (data.action) { + case 'run': + if (data.ws) { + this.videoStopped = false; + this.openWebSocket('localhost', data.ws.port, data.ws.uuid); + } + break; + case 'stop': + this.videoStopped = true; + break; + } break; } } diff --git a/js/iosrtc.js b/js/iosrtc.js index 52b4d7fd..54325591 100644 --- a/js/iosrtc.js +++ b/js/iosrtc.js @@ -211,6 +211,9 @@ function registerGlobals(doNotRestoreCallbacksSupport) { ); } + // Prevent WebRTC-adapter to overide navigator.mediaDevices after shim is applied since ios 14.3 + Object.freeze(navigator.mediaDevices); + window.RTCPeerConnection = RTCPeerConnection; window.webkitRTCPeerConnection = RTCPeerConnection; window.RTCSessionDescription = RTCSessionDescription; diff --git a/lib/psocket/headers/PSWebSocket.h b/lib/psocket/headers/PSWebSocket.h new file mode 100644 index 00000000..8735403a --- /dev/null +++ b/lib/psocket/headers/PSWebSocket.h @@ -0,0 +1,161 @@ +// Copyright 2014 Zwopple Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "PSWebSocketTypes.h" + +typedef NS_ENUM(NSInteger, PSWebSocketReadyState) { + PSWebSocketReadyStateConnecting = 0, + PSWebSocketReadyStateOpen, + PSWebSocketReadyStateClosing, + PSWebSocketReadyStateClosed +}; + +@class PSWebSocket; + +/** + * PSWebSocketDelegate + */ +@protocol PSWebSocketDelegate + +@required +- (void)webSocketDidOpen:(PSWebSocket *)webSocket; +- (void)webSocket:(PSWebSocket *)webSocket didFailWithError:(NSError *)error; +- (void)webSocket:(PSWebSocket *)webSocket didReceiveMessage:(id)message; +- (void)webSocket:(PSWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean; +@optional +- (void)webSocketIsHungry:(PSWebSocket *)webSocket; +- (BOOL)webSocket:(PSWebSocket *)webSocket validateServerTrust: (SecTrustRef)trust; +@end + +/** + * PSWebSocket + */ +@interface PSWebSocket : NSObject + +#pragma mark - Class Methods + +/** + * Given a NSURLRequest determine if it is a websocket request based on it's headers + * + * @param request request to check + * + * @return whether or not the given request is a websocket request + */ ++ (BOOL)isWebSocketRequest:(NSURLRequest *)request; + ++ (NSData*) peerAddressOfStream: (NSInputStream*)inputStream; + +#pragma mark - Properties + +@property (nonatomic, assign, readonly) PSWebSocketReadyState readyState; +@property (nonatomic, weak) id delegate; +@property (nonatomic, strong) dispatch_queue_t delegateQueue; + +@property (nonatomic, strong, readonly) NSURLRequest* URLRequest; +@property (nonatomic, strong, readonly) NSData* remoteAddress; +@property (nonatomic, strong, readonly) NSString* remoteHost; +@property (nonatomic, strong) NSArray* SSLClientCertificates; + +@property (nonatomic, strong) NSString* protocol; + +#pragma mark - Initialization + +/** + * Initialize a PSWebSocket instance in client mode. + * + * @param request that is to be used to initiate the handshake + * + * @return an initialized instance of PSWebSocket in client mode + */ ++ (instancetype)clientSocketWithRequest:(NSURLRequest *)request; + +/** + * Initialize a PSWebSocket instance in server mode + * + * @param request request that is to be used to initiate the handshake response + * @param inputStream opened input stream to be taken over by the websocket + * @param outputStream opened output stream to be taken over by the websocket + * + * @return an initialized instance of PSWebSocket in server mode + */ ++ (instancetype)serverSocketWithRequest:(NSURLRequest *)request inputStream:(NSInputStream *)inputStream outputStream:(NSOutputStream *)outputStream; + +#pragma mark - Actions + +/** + * Opens the websocket connection and initiates the handshake. Once + * opened an instance of PSWebSocket can never be opened again. The + * connection obeys any timeout interval set on the NSURLRequest used + * to initialize the websocket. + */ +- (void)open; + +/** + * Setting this property to YES stops the WebSocket from reading data from the TCP stream. + * This can be useful for flow control, if messages are arriving faster than the application + * can process them, so the messages don't pile up in memory. + */ +@property BOOL readPaused; + +/** + * Send a message over the websocket + * + * @param message an instance of NSData or NSString to send + */ +- (void)send:(id)message; + +/** + * Send a ping over the websocket + * + * @param pingData data to include with the ping + * @param handler optional callback handler when the corrosponding pong is received + */ +- (void)ping:(NSData *)pingData handler:(void (^)(NSData *pongData))handler; + + +/** + * Close the websocket will default to code 1000 and nil reason + */ +- (void)close; + +/** + * Close the websocket with a specific code and/or reason + * + * @param code close code reason + * @param reason short textual reason why the connection was closed + */ +- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason; + +#pragma mark - Stream Properties + +/** + * Copy a property from the streams this websocket is backed by + * + * @param key property key - see kCFStreamProperty constants + * + * @return property value + */ +- (CFTypeRef)copyStreamPropertyForKey:(NSString *)key; + +/** + * Set a property on the streams this websocket is backed by. Calling this + * method once the websocket has been opened will raise an exception. + * + * @param property property value + * @param key property key - see kCFStreamProperty constants + */ +- (void)setStreamProperty:(CFTypeRef)property forKey:(NSString *)key; + +@end diff --git a/lib/psocket/headers/PSWebSocketServer.h b/lib/psocket/headers/PSWebSocketServer.h new file mode 100644 index 00000000..871037a3 --- /dev/null +++ b/lib/psocket/headers/PSWebSocketServer.h @@ -0,0 +1,65 @@ +// Copyright 2014 Zwopple Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "PSWebSocket.h" + +@class PSWebSocketServer; + +@protocol PSWebSocketServerDelegate + +@required + +- (void)serverDidStart:(PSWebSocketServer *)server; +- (void)server:(PSWebSocketServer *)server didFailWithError:(NSError *)error; +- (void)serverDidStop:(PSWebSocketServer *)server; + +- (void)server:(PSWebSocketServer *)server webSocketDidOpen:(PSWebSocket *)webSocket; +- (void)server:(PSWebSocketServer *)server webSocket:(PSWebSocket *)webSocket didReceiveMessage:(id)message; +- (void)server:(PSWebSocketServer *)server webSocket:(PSWebSocket *)webSocket didFailWithError:(NSError *)error; +- (void)server:(PSWebSocketServer *)server webSocket:(PSWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean; + +@optional +// Delegate may implement either one of these; variant with response is preferred: +- (BOOL)server:(PSWebSocketServer *)server + acceptWebSocketWithRequest:(NSURLRequest *)request; +- (BOOL)server:(PSWebSocketServer *)server + acceptWebSocketFrom:(NSData*)address + withRequest:(NSURLRequest *)request + trust:(SecTrustRef)trust + response:(NSHTTPURLResponse **)response; + +- (void)server:(PSWebSocketServer *)server webSocketIsHungry:(PSWebSocket *)webSocket; +@end + +@interface PSWebSocketServer : NSObject + +#pragma mark - Properties + +@property (nonatomic, weak) id delegate; +@property (nonatomic, strong) dispatch_queue_t delegateQueue; +@property (readonly) uint16_t realPort; + +#pragma mark - Initialization + ++ (instancetype)serverWithHost:(NSString *)host port:(NSUInteger)port; ++ (instancetype)serverWithHost:(NSString *)host port:(NSUInteger)port SSLCertificates:(NSArray *)SSLCertificates; + +#pragma mark - Actions + +- (void)start; +- (void)stop; +- (void)setTcpNoDelay:(BOOL)on; + +@end diff --git a/lib/psocket/headers/PSWebSocketTypes.h b/lib/psocket/headers/PSWebSocketTypes.h new file mode 100644 index 00000000..406b3353 --- /dev/null +++ b/lib/psocket/headers/PSWebSocketTypes.h @@ -0,0 +1,47 @@ +// Copyright 2014 Zwopple Limited +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +typedef NS_ENUM(NSInteger, PSWebSocketMode) { + PSWebSocketModeClient = 0, + PSWebSocketModeServer +}; + +typedef NS_ENUM(NSInteger, PSWebSocketErrorCodes) { + PSWebSocketErrorCodeUnknown = 0, + PSWebSocketErrorCodeTimedOut, + PSWebSocketErrorCodeHandshakeFailed, + PSWebSocketErrorCodeConnectionFailed +}; + +typedef NS_ENUM(NSInteger, PSWebSocketStatusCode) { + PSWebSocketStatusCodeNormal = 1000, + PSWebSocketStatusCodeGoingAway = 1001, + PSWebSocketStatusCodeProtocolError = 1002, + PSWebSocketStatusCodeUnhandledType = 1003, + // 1004 reserved + PSWebSocketStatusCodeNoStatusReceived = 1005, + // 1006 reserved + PSWebSocketStatusCodeInvalidUTF8 = 1007, + PSWebSocketStatusCodePolicyViolated = 1008, + PSWebSocketStatusCodeMessageTooBig = 1009 +}; + +#define PSWebSocketGUID @"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" +#define PSWebSocketErrorDomain @"PSWebSocketErrorDomain" + +// NSError userInfo keys, used with PSWebSocketErrorCodeHandshakeFailed: +#define PSHTTPStatusErrorKey @"HTTPStatus" // The HTTP status (404, etc.) +#define PSHTTPResponseErrorKey @"HTTPResponse" // The entire HTTP response as an CFHTTPMessageRef diff --git a/lib/psocket/libPocketSocket.a b/lib/psocket/libPocketSocket.a new file mode 100644 index 00000000..91421aad Binary files /dev/null and b/lib/psocket/libPocketSocket.a differ diff --git a/plugin.xml b/plugin.xml index f56a9be9..6e7b524d 100644 --- a/plugin.xml +++ b/plugin.xml @@ -83,6 +83,13 @@ + + + + + + + @@ -101,6 +108,13 @@ + + + + + + + diff --git a/src/PluginInterface.swift b/src/PluginInterface.swift new file mode 100644 index 00000000..700ed6fc --- /dev/null +++ b/src/PluginInterface.swift @@ -0,0 +1,226 @@ +// Copyright (c) 2015 Stefan van den Oord. All rights reserved. + +import Foundation + +/** + * This class represents a network interface in your system. For example, `en0` with a certain IP address. + * It is a wrapper around the `getifaddrs` system call. + * + * Typical use of this class is to first call `Interface.allInterfaces()` and then use the properties of the interface(s) that you need. + * + * - See: `/usr/include/ifaddrs.h` + */ +open class PluginInterface : CustomStringConvertible, CustomDebugStringConvertible { + + /// The network interface family (IPv4 or IPv6). + public enum Family : Int { + /// IPv4. + case ipv4 + + /// IPv6. + case ipv6 + + /// Used in case of errors. + case other + + /// String representation of the address family. + public func toString() -> String { + switch (self) { + case .ipv4: return "IPv4" + case .ipv6: return "IPv6" + default: return "other" + } + } + } + + /** + * Returns all network interfaces in your system. If you have an interface name (e.g. `en0`) that has + * multiple IP addresses (e.g. one IPv4 address and a few IPv6 addresses), then they will be returned + * as separate instances of Interface. + * - Returns: An array containing all network interfaces in your system. + */ + public static func allInterfaces() -> [PluginInterface] { + var interfaces : [PluginInterface] = [] + + var ifaddrsPtr : UnsafeMutablePointer? = nil + if getifaddrs(&ifaddrsPtr) == 0 { + var ifaddrPtr = ifaddrsPtr + while ifaddrPtr != nil { + let addr = ifaddrPtr?.pointee.ifa_addr.pointee + if addr?.sa_family == UInt8(AF_INET) || addr?.sa_family == UInt8(AF_INET6) { + interfaces.append(PluginInterface(data: (ifaddrPtr?.pointee)!)) + } + ifaddrPtr = ifaddrPtr?.pointee.ifa_next + } + freeifaddrs(ifaddrsPtr) + } + + return interfaces + } + + /** + * Returns a new Interface instance that does not represent a real network interface, but can be used for (unit) testing. + * - Returns: An instance of Interface that does *not* represent a real network interface. + */ + public static func createTestDummy(_ name:String, family:Family, address:String, multicastSupported:Bool, broadcastAddress:String?) -> PluginInterface + { + return PluginInterface(name: name, family: family, address: address, netmask: nil, running: true, up: true, loopback: false, multicastSupported: multicastSupported, broadcastAddress: broadcastAddress) + } + + /** + * Initialize a new Interface with the given properties. + */ + public init(name:String, family:Family, address:String?, netmask:String?, running:Bool, up:Bool, loopback:Bool, multicastSupported:Bool, broadcastAddress:String?) { + self.name = name + self.family = family + self.address = address + self.netmask = netmask + self.running = running + self.up = up + self.loopback = loopback + self.multicastSupported = multicastSupported + self.broadcastAddress = broadcastAddress + } + + convenience init(data:ifaddrs) { + let flags = Int32(data.ifa_flags) + let broadcastValid : Bool = ((flags & IFF_BROADCAST) == IFF_BROADCAST) + self.init(name: String(cString: data.ifa_name), + family: PluginInterface.extractFamily(data), + address: PluginInterface.extractAddress(data.ifa_addr.pointee), + netmask: PluginInterface.extractAddress(data.ifa_netmask.pointee), + running: ((flags & IFF_RUNNING) == IFF_RUNNING), + up: ((flags & IFF_UP) == IFF_UP), + loopback: ((flags & IFF_LOOPBACK) == IFF_LOOPBACK), + multicastSupported: ((flags & IFF_MULTICAST) == IFF_MULTICAST), + broadcastAddress: ((broadcastValid && data.ifa_dstaddr != nil) ? PluginInterface.extractAddress(data.ifa_dstaddr.pointee) : nil)) + } + + fileprivate static func extractFamily(_ data:ifaddrs) -> Family { + var family : Family = .other + let addr = data.ifa_addr.pointee + if addr.sa_family == UInt8(AF_INET) { + family = .ipv4 + } + else if addr.sa_family == UInt8(AF_INET6) { + family = .ipv6 + } + else { + family = .other + } + return family + } + + fileprivate static func extractAddress(_ address:sockaddr) -> String? { + if (address.sa_family == sa_family_t(AF_INET)) { + return extractAddress_ipv4(address) + } + else if (address.sa_family == sa_family_t(AF_INET6)) { + return extractAddress_ipv6(address) + } + else { + return nil + } + } + + fileprivate static func extractAddress_ipv4(_ address:sockaddr) -> String? { + var addr = address + var address : String? = nil + var hostname = [CChar](repeating: 0, count: Int(2049)) + if (getnameinfo(&addr, socklen_t(addr.sa_len), &hostname, + socklen_t(hostname.count), nil, socklen_t(0), NI_NUMERICHOST) == 0) { + address = String(cString: hostname) + } + else { +// var error = String.fromCString(gai_strerror(errno))! +// println("ERROR: \(error)") + } + return address + } + + fileprivate static func extractAddress_ipv6(_ address:sockaddr) -> String? { + var addr = address + var ip : [Int8] = [Int8](repeating: Int8(0), count: Int(INET6_ADDRSTRLEN)) + return inetNtoP(&addr, ip: &ip) + } + + fileprivate static func inetNtoP(_ addr:UnsafeMutablePointer, ip:UnsafeMutablePointer) -> String? { + let addr6 = withUnsafeMutablePointer(to: &addr.pointee) { + $0.withMemoryRebound(to: sockaddr_in6.self, capacity: 1) { + UnsafeMutableRawPointer(&$0.pointee.sin6_addr) + } + } + let conversion:UnsafePointer = inet_ntop(AF_INET6, addr6, ip, socklen_t(INET6_ADDRSTRLEN)) + let s = String(cString: conversion) + return s + } + + /** + * Creates the network format representation of the interface's IP address. Wraps `inet_pton`. + */ + open var addressBytes: [UInt8]? { + guard let addr = address else { return nil } + + let af:Int32 + let len:Int + switch family { + case .ipv4: + af = AF_INET + len = 4 + case .ipv6: + af = AF_INET6 + len = 16 + default: + return nil + } + var bytes = [UInt8](repeating: 0, count: len) + let result = inet_pton(af, addr, &bytes) + return ( result == 1 ) ? bytes : nil + } + + /// `IFF_RUNNING` flag of `ifaddrs->ifa_flags`. + open var isRunning: Bool { return running } + + /// `IFF_UP` flag of `ifaddrs->ifa_flags`. + open var isUp: Bool { return up } + + /// `IFF_LOOPBACK` flag of `ifaddrs->ifa_flags`. + open var isLoopback: Bool { return loopback } + + /// `IFF_MULTICAST` flag of `ifaddrs->ifa_flags`. + open var supportsMulticast: Bool { return multicastSupported } + + /// Field `ifaddrs->ifa_name`. + public let name : String + + /// Field `ifaddrs->ifa_addr->sa_family`. + public let family : Family + + /// Extracted from `ifaddrs->ifa_addr`, supports both IPv4 and IPv6. + public let address : String? + + /// Extracted from `ifaddrs->ifa_netmask`, supports both IPv4 and IPv6. + public let netmask : String? + + /// Extracted from `ifaddrs->ifa_dstaddr`. Not applicable for IPv6. + public let broadcastAddress : String? + + fileprivate let running : Bool + fileprivate let up : Bool + fileprivate let loopback : Bool + fileprivate let multicastSupported : Bool + + /// Returns the interface name. + open var description: String { return name } + + /// Returns a string containing a few properties of the Interface. + open var debugDescription: String { + var s = "Interface name:\(name) family:\(family)" + if let ip = address { + s += " ip:\(ip)" + } + s += isUp ? " (up)" : " (down)" + s += isRunning ? " (running)" : "(not running)" + return s + } +} diff --git a/src/PluginMediaStreamRenderer.swift b/src/PluginMediaStreamRenderer.swift index ac4d828f..f3e1cd0a 100644 --- a/src/PluginMediaStreamRenderer.swift +++ b/src/PluginMediaStreamRenderer.swift @@ -1,22 +1,27 @@ import Foundation import AVFoundation -class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { +class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate, RTCVideoRenderer { var id: String var eventListener: (_ data: NSDictionary) -> Void var closed: Bool + var servicePort: Int + var cbData: (_ uuid: String, _ data: NSData?) -> Void + var webView: UIView - var elementView: UIView + var elementView: UIView? var pluginMediaStream: PluginMediaStream? - var videoView: RTCEAGLVideoView + var videoView: RTCEAGLVideoView? var rtcAudioTrack: RTCAudioTrack? var rtcVideoTrack: RTCVideoTrack? var pluginVideoTrack: PluginMediaStreamTrack? init( + servicePort: Int, + cbData: @escaping (_ uuid:String, _ data: NSData?) -> Void, webView: UIView, eventListener: @escaping (_ data: NSDictionary) -> Void ) { @@ -27,9 +32,16 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { self.closed = false // The browser HTML view. + self.servicePort = servicePort + self.cbData = cbData self.webView = webView self.eventListener = eventListener + if (self.servicePort > 0) { + // will render in canvas over websocket + return; + } + let useManualLayoutRenderer = Bundle.main.object(forInfoDictionaryKey: "UseManualLayoutRenderer") as? Bool ?? false // The video element view. @@ -41,7 +53,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { // The effective video view in which the the video stream is shown. // It's placed over the elementView. self.videoView = RTCEAGLVideoView() - self.videoView.isUserInteractionEnabled = false + self.videoView?.isUserInteractionEnabled = false self.elementView.isUserInteractionEnabled = false self.elementView.isHidden = true @@ -51,13 +63,14 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { self.elementView.translatesAutoresizingMaskIntoConstraints = false // Place the video element view inside the WebView's superview - self.webView.addSubview(self.elementView) + self.webView.addSubview(view) self.webView.isOpaque = false self.webView.backgroundColor = UIColor.clear // https://stackoverflow.com/questions/46317061/use-safe-area-layout-programmatically // https://developer.apple.com/documentation/uikit/uiview/2891102-safearealayoutguide // https://developer.apple.com/documentation/uikit/ + let view = self.elementView; if !useManualLayoutRenderer { if #available(iOS 11.0, *) { @@ -81,7 +94,18 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { func run() { NSLog("PluginMediaStreamRenderer#run()") - self.videoView.delegate = self + if (self.videoView != nil) { + self.videoView?.delegate = self + } else { + self.eventListener([ + "type": "videowebsocket", + "action": "run", + "ws" : [ + "uuid": self.id, + "port": self.servicePort + ] + ]) + } } func render(_ pluginMediaStream: PluginMediaStream) { @@ -106,7 +130,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { break } - + if self.rtcVideoTrack != nil { self.rtcVideoTrack!.add(self.videoView) self.pluginVideoTrack?.registerRender(render: self) @@ -122,11 +146,11 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { let oldPluginVideoTrack: PluginMediaStreamTrack? = self.pluginVideoTrack let oldRtcVideoTrack: RTCVideoTrack? = self.rtcVideoTrack - + self.rtcAudioTrack = nil self.rtcVideoTrack = nil self.pluginVideoTrack = nil - + // Take the first audio track. for (_, track) in self.pluginMediaStream!.audioTracks { self.rtcAudioTrack = track.rtcMediaStreamTrack as? RTCAudioTrack @@ -140,6 +164,8 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { break } + let view = getVideoView(); + // If same video track as before do nothing. if oldRtcVideoTrack != nil && self.rtcVideoTrack != nil && oldRtcVideoTrack!.trackId == self.rtcVideoTrack!.trackId { @@ -150,6 +176,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { else if oldRtcVideoTrack != nil && self.rtcVideoTrack != nil && oldRtcVideoTrack!.trackId != self.rtcVideoTrack!.trackId { NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | has a new video track") + oldPluginVideoTrack?.unregisterRender(render: self) oldRtcVideoTrack!.remove(self.videoView) self.pluginVideoTrack?.registerRender(render: self) @@ -159,6 +186,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { // Did not have video but now it has. else if oldRtcVideoTrack == nil && self.rtcVideoTrack != nil { NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | video track added") + if oldPluginVideoTrack != nil{ oldPluginVideoTrack?.unregisterRender(render: self) } @@ -169,12 +197,17 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { // Had video but now it has not. else if oldRtcVideoTrack != nil && self.rtcVideoTrack == nil { NSLog("PluginMediaStreamRenderer#mediaStreamChanged() | video track removed") + oldPluginVideoTrack?.unregisterRender(render: self) oldRtcVideoTrack!.remove(self.videoView) } } func refresh(_ data: NSDictionary) { + if (self.elementView == nil) { + return; + } + let view = self.elementView! let elementLeft = data.object(forKey: "elementLeft") as? Double ?? 0 let elementTop = data.object(forKey: "elementTop") as? Double ?? 0 @@ -198,7 +231,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { let videoViewLeft: Double = (elementWidth - videoViewWidth) / 2 let videoViewTop: Double = (elementHeight - videoViewHeight) / 2 - self.elementView.frame = CGRect( + view.frame = CGRect( x: CGFloat(elementLeft), y: CGFloat(elementTop), width: CGFloat(elementWidth), @@ -209,12 +242,12 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { if videoViewWidth == 0 || videoViewHeight == 0 { videoViewWidth = 1 videoViewHeight = 1 - self.videoView.isHidden = true + self.videoView?.isHidden = true } else { - self.videoView.isHidden = false + self.videoView?.isHidden = false } - self.videoView.frame = CGRect( + self.videoView?.frame = CGRect( x: CGFloat(videoViewLeft), y: CGFloat(videoViewTop), width: CGFloat(videoViewWidth), @@ -222,66 +255,73 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { ) if visible { - self.elementView.isHidden = false + view.isHidden = false } else { - self.elementView.isHidden = true + view.isHidden = true } - self.elementView.alpha = CGFloat(opacity) - self.elementView.layer.zPosition = CGFloat(zIndex) + view.alpha = CGFloat(opacity) + view.layer.zPosition = CGFloat(zIndex) // if the zIndex is 0 (the default) bring the view to the top, last one wins if zIndex == 0 { - self.webView.bringSubviewToFront(self.elementView) + self.webView.bringSubviewToFront(view) //self.webView?.bringSubview(toFront: self.elementView) } if !mirrored { - self.elementView.transform = CGAffineTransform.identity + view.transform = CGAffineTransform.identity } else { - self.elementView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0) + view.transform = CGAffineTransform(scaleX: -1.0, y: 1.0) } if clip { - self.elementView.clipsToBounds = true + view.clipsToBounds = true } else { - self.elementView.clipsToBounds = false + view.clipsToBounds = false } - self.elementView.layer.cornerRadius = CGFloat(borderRadius) + view.layer.cornerRadius = CGFloat(borderRadius) let rgb = backgroundColor.components(separatedBy: ",").map{ CGFloat(($0 as NSString).floatValue) / 256.0 } let color = UIColor(red: rgb[0], green: rgb[1], blue: rgb[2], alpha: 1) - self.elementView.backgroundColor = color + view.backgroundColor = color } func save(callback: (_ data: String) -> Void, errback: (_ error: String) -> Void) { //NSLog("PluginMediaStreamRenderer#save()") - UIGraphicsBeginImageContextWithOptions(videoView.bounds.size, videoView.isOpaque, 0.0) - videoView.drawHierarchy(in: videoView.bounds, afterScreenUpdates: false) - let snapshotImageFromMyView = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - let imageData = snapshotImageFromMyView?.jpegData(compressionQuality: 1.0) - let strBase64 = imageData?.base64EncodedString(options: .lineLength64Characters) - - callback(strBase64!); + if (self.videoView != nil) { + let view = self.videoView! + UIGraphicsBeginImageContextWithOptions(view.bounds.size, view.isOpaque, 0.0) + view.drawHierarchy(in: view.bounds, afterScreenUpdates: false) + let snapshotImageFromMyView = UIGraphicsGetImageFromCurrentImageContext() + UIGraphicsEndImageContext() + let imageData = snapshotImageFromMyView?.jpegData(compressionQuality: 1.0) + let strBase64 = imageData?.base64EncodedString(options: .lineLength64Characters) + + callback(strBase64!); + } } func stop() { NSLog("PluginMediaStreamRenderer | video stop") - self.eventListener([ - "type": "videostop" - ]) + if (self.videoView != nil) { + self.eventListener([ + "type": "videowebsocket", + "action": "stop" + ]) + } } func close() { NSLog("PluginMediaStreamRenderer#close()") self.closed = true self.reset() - self.elementView.removeFromSuperview() + self.elementView?.removeFromSuperview() } + /** * Private API. */ @@ -290,7 +330,7 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { NSLog("PluginMediaStreamRenderer#reset()") if self.rtcVideoTrack != nil { - self.rtcVideoTrack!.remove(self.videoView) + self.rtcVideoTrack!.remove(getVideoView()) } if self.pluginVideoTrack != nil { self.pluginVideoTrack?.unregisterRender(render: self) @@ -301,14 +341,20 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { self.rtcVideoTrack = nil } - /** - * Methods inherited from RTCEAGLVideoViewDelegate. - */ - - func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) { + fileprivate func getVideoView() -> RTCVideoRenderer { + /** + * when current is canvas render, will use self(RTCVideoRenderer) as video view + */ + if (self.videoView != nil) { + return self.videoView! + } else { + return self + } + } + fileprivate func onVideoChanged(size: CGSize) { NSLog("PluginMediaStreamRenderer | video size changed [width:%@, height:%@]", - String(describing: size.width), String(describing: size.height)) + String(describing: size.width), String(describing: size.height)) self.eventListener([ "type": "videoresize", @@ -319,32 +365,76 @@ class PluginMediaStreamRenderer : NSObject, RTCEAGLVideoViewDelegate { ]) } - func videoView(_ videoView: RTCVideoRenderer, didChange frame: RTCVideoFrame?) { - // TODO save from frame buffer instead of renderer - /* - let i420: RTCI420BufferProtocol = frame!.buffer.toI420() - let YPtr: UnsafePointer = i420.dataY - let UPtr: UnsafePointer = i420.dataU - let VPtr: UnsafePointer = i420.dataV - let YSize: Int = Int(frame!.width * frame!.height) - let USize: Int = Int(YSize / 4) - let VSize: Int = Int(YSize / 4) - var frameSize:Int32 = Int32(YSize + USize + VSize) - var width: Int16 = Int16(frame!.width) - var height: Int16 = Int16(frame!.height) + /** + * Methods inherited from RTCEAGLVideoViewDelegate. + */ + + func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) { + onVideoChanged(size: size); + } + + /** + * Methods inherited from RTCVideoRenderer + */ + + func setSize(_ size: CGSize) { + onVideoChanged(size: size); + } + + func renderFrame(_ frame: RTCVideoFrame?) { + if (frame == nil) { + return + } + var rotation: Int16 = Int16(frame!.rotation.rawValue) - var timestamp: Int32 = Int32(frame!.timeStamp) + //var timestamp: UInt32 = UInt32(frame!.timeStamp) - // head + body - // head: type(2B)+len(4B)+width(2B)+height(2B)+rotation(2B)+timestamp(4B) - // body: data(len) - let headSize:Int32 = 16 - let dataSize:Int32 = headSize + frameSize - let pduData: NSMutableData? = NSMutableData(length: Int(dataSize)) + // format: head + body + // head: type(2B)+len(4B)+width(2B)+height(2B)+rotation(2B)+timestamp(4B) + // body: data(len) + let headSize: Int32 = 16 + var pduType: UInt16 = 0x2401 + + // copy buffer + let i420: RTCI420BufferProtocol = frame!.buffer.toI420() + var width: Int16 = Int16(i420.width); + var height: Int16 = Int16(i420.height); + var frameSize: Int32 = Int32(width) * Int32(height) * 3 / 2; + + let pduData: NSMutableData? = NSMutableData(length: Int(headSize + frameSize)) + let headPtr: UnsafeMutableRawPointer = pduData!.mutableBytes + let YPtr = headPtr + Int(headSize); + let UPtr = YPtr + Int(i420.width*i420.height); + let VPtr = UPtr + Int(i420.chromaWidth*i420.chromaHeight); + + // copy Y: e.g, width(640),height(480),strideY(704) + if (i420.width != i420.strideY) { + for y in 0.. WSClient? { + for (_, client) in clients { + if (client.sock == sock) { + return client + } + } + return nil + } + + public func start(lport:Int, tcpNoDelay:Bool?) { + NSLog("WebSocketServer: start") + + if didStopOrDidFail { + wsserver = nil + didStopOrDidFail = false + } + + if wsserver != nil { + NSLog("Server already running") + return + } + + port = lport + realport = 0 + if port < 0 || port > 65535 { + NSLog("Port number error"); + return + } + + if let server = PSWebSocketServer(host: nil, port: UInt(port)) { + wsserver = server + server.delegate = self + + if tcpNoDelay != nil { + server.setTcpNoDelay(tcpNoDelay!) + } + + server.start() + } + } + + public func isStarted() -> Bool { + if (didStopOrDidFail || self.wsserver == nil) { + return false + } else { + return true + } + } + + @objc public func stop() { + NSLog("WebSocketServer: stop") + + if didStopOrDidFail { + wsserver = nil + didStopOrDidFail = false + } + + if wsserver == nil { + NSLog("Server is not running"); + return + } + + if let server = wsserver { + server.stop() + } + } + + @objc public func send(uuid: String?, msg: NSData?) { + //NSLog("WebSocketServer: send") + + if uuid != nil && msg != nil { + if let client = clients[uuid!] { + client.sock.send(msg) + } else { + //NSLog("WebSocketServer: Send: unknown socket.") + } + } else { + NSLog("WebSocketServer: Send: UUID or msg not specified.") + } + } + + @objc public func close(uuid: String?, code: Int, reason: String?) { + NSLog("WebSocketServer: close") + + if uuid != nil { + if let client = clients[uuid!] { + if (code == -1) { + client.sock.close() + } else { + client.sock.close(withCode: code, reason: reason) + } + } else { + //NSLog("WebSocketServer: Close: unknown socket.") + } + } else { + NSLog("WebSocketServer: Close: UUID not specified.") + } + } + + + /** Events from PSWebSocketServerDelegate + */ + + public func serverDidStart(_ server: PSWebSocketServer!) { + NSLog("WebSocketServer: Server did start…") + + realport = Int(server.realPort) + + let status: NSDictionary = NSDictionary( + objects: ["0.0.0.0", Int(server.realPort)], + forKeys: ["addr" as NSCopying, "port" as NSCopying]) + NSLog("WebSocketServer: Server did start:%@", status) + } + + public func serverDidStop(_ server: PSWebSocketServer!) { + NSLog("WebSocketServer: Server did stop…") + + didStopOrDidFail = true + clients.removeAll() + + let status: NSDictionary = NSDictionary( + objects: ["0.0.0.0", Int(server.realPort)], + forKeys: ["addr" as NSCopying, "port" as NSCopying]) + NSLog("WebSocketServer: Server did stop:%@", status) + } + + public func server(_ server: PSWebSocketServer!, didFailWithError error: Error!) { + NSLog("WebSocketServer: Server did fail with error:%@", error.localizedDescription) + + // normally already stopped. just making sure! + wsserver?.stop() + didStopOrDidFail = true + clients.removeAll(); + + let status: NSDictionary = NSDictionary( + objects: ["onFailure", "0.0.0.0", port, error.localizedDescription], + forKeys: ["action" as NSCopying, "addr" as NSCopying, "port" as NSCopying, "reason" as String as NSCopying]) + NSLog("WebSocketServer: Server did fail with error:%@", status) + } + + public func server(_ server: PSWebSocketServer!, acceptWebSocketFrom address: Data, with request: URLRequest, + trust: SecTrust, response: AutoreleasingUnsafeMutablePointer) -> Bool { + NSLog("WebSocketServer: Server should accept request:%@, %@", + request.description, request.allHTTPHeaderFields!) + + if let o = origins { + let origin = request.value(forHTTPHeaderField: "Origin") + if o.index(of: origin!) == nil { + NSLog("WebSocketServer: Invalid Origin:%@", origin!) + //return false + } + } + + if let _ = protocols { + if let acceptedProtocol = getAcceptedProtocol(request) { + let headerFields = [ + "Sec-WebSocket-Protocol" : acceptedProtocol, + "Sec-WebSocket-Extensions" : "permessage-deflate" + ] + let r = HTTPURLResponse.init( + url: request.url!, statusCode: 200, httpVersion: "1.1", headerFields: headerFields)! + response.pointee = r + } else { + if let secWebSocketProtocol = request.value(forHTTPHeaderField: "Sec-WebSocket-Protocol") { + NSLog("WebSocketServer: Sec-WebSocket-Protocol denied:%@", secWebSocketProtocol) + return false + }else { + NSLog("WebSocketServer: Sec-WebSocket-Protocol not exist") + } + } + } + + return true + } + + public func server(_ server: PSWebSocketServer!, webSocketDidOpen webSocket: PSWebSocket!) { + NSLog("WebSocketServer: WebSocket did open") + + // clean previously closed sockets + var closeds: [String] = [] + for (key, client) in clients { + if (client.didclose) { + closeds.append(key) + } + } + for key in closeds { + clients.removeValue(forKey: key) + } + + let remoteAddr = webSocket.remoteHost! + + var acceptedProtocol = "" + if let _ = protocols { + if let proto = getAcceptedProtocol(webSocket.urlRequest) { + acceptedProtocol = proto + } + } + + var uuid: String! + var resource = "" + if (webSocket.urlRequest.url!.query != nil) { + resource = String(cString: (webSocket.urlRequest.url!.query?.cString(using: String.Encoding.utf8))! ) + let pos:Range? = resource.range(of: "uuid=") + if (pos != nil) { + //let index = resource.index(after: pos!.upperBound) + uuid = resource.substring(from: pos!.upperBound) + } + } + if uuid == nil || clients[uuid] != nil { + NSLog("uuid is null or exists, uuid=%@", uuid) + // prevent collision + uuid = UUID().uuidString + } + + let client = WSClient(uuid:uuid, sock: webSocket) + client.raddr = remoteAddr + client.protocols = acceptedProtocol + client.resource = resource + clients[uuid] = client + NSLog("WebSocketServer: WebSocket did open, uuid:%@, resource:%@", uuid, resource) + } + + public func server(_ server: PSWebSocketServer!, webSocket: PSWebSocket!, didReceiveMessage message: Any) { + NSLog("WebSocketServer: Websocket did receive message") + + if let client = getClient(sock: webSocket) { + let uuid: String = client.uuid + NSLog("WebSocketServer: Websocket did receive message from:%@", uuid) + } else { + NSLog("WebSocketServer: unknown socket") + } + } + + public func server(_ server: PSWebSocketServer!, webSocket: PSWebSocket!, didCloseWithCode code: Int, reason: String, wasClean: Bool) { + //NSLog("WebSocketServer: WebSocket did close with code: %@, reason: %@, wasClean: %@", code, reason, wasClean) + + if let client = getClient(sock: webSocket) { + client.didclose = true + let uuid: String = client.uuid + let status: NSDictionary = NSDictionary( + objects: ["onClose", uuid, code, reason, wasClean], + forKeys: ["action" as NSCopying, "uuid" as NSCopying, "code" as NSCopying, "reason" as NSCopying, + "wasClean" as NSCopying]) + NSLog("WebSocketServer: WebSocket did close:%@", status) + } else { + NSLog("WebSocketServer: unknown socket") + } + } + + public func server(_ server: PSWebSocketServer!, webSocket: PSWebSocket!, didFailWithError error: Error!) { + NSLog("WebSocketServer: WebSocket did fail with error:%@", error!.localizedDescription) + if (webSocket.readyState == PSWebSocketReadyState.open) { + webSocket.close(withCode: 1011, reason: "") + } + } + + fileprivate func getAcceptedProtocol(_ request: URLRequest) -> String? { + var acceptedProtocol: String? + if let secWebSocketProtocol = request.value(forHTTPHeaderField: "Sec-WebSocket-Protocol") { + let requestedProtocols = secWebSocketProtocol.components(separatedBy: ", ") + for requestedProtocol in requestedProtocols { + if protocols!.index(of: requestedProtocol) != nil { + // returns first matching protocol. + // assumes in order of preference. + acceptedProtocol = requestedProtocol + break + } + } + + NSLog("WebSocketServer: Sec-WebSocket-Protocol:%@", secWebSocketProtocol) + NSLog("WebSocketServer: Accepted Protocol:%@", acceptedProtocol!) + } + return acceptedProtocol + } +} + diff --git a/src/cordova-plugin-iosrtc-Bridging-Header.h b/src/cordova-plugin-iosrtc-Bridging-Header.h index 661de540..64c4f648 100644 --- a/src/cordova-plugin-iosrtc-Bridging-Header.h +++ b/src/cordova-plugin-iosrtc-Bridging-Header.h @@ -25,6 +25,10 @@ #import #import +#import "PSWebSocket.h" +#import "PSWebSocketServer.h" +#include + // Expose RTCEAGLVideoViewDelegate to Swift RTC_OBJC_EXPORT @protocol RTCEAGLVideoViewDelegate diff --git a/src/iosrtcPlugin.swift b/src/iosrtcPlugin.swift index 8bd1e4c0..4720dc8a 100644 --- a/src/iosrtcPlugin.swift +++ b/src/iosrtcPlugin.swift @@ -20,6 +20,9 @@ class iosrtcPlugin : CDVPlugin { var queue: DispatchQueue! // Auto selecting output speaker var audioOutputController: PluginRTCAudioController! + // Single PluginWebSocketServer instance. + var webSocketServer: PluginWebSocketServer? + // This is just called if in plugin.xml. @@ -50,6 +53,13 @@ class iosrtcPlugin : CDVPlugin { // Create a PluginRTCAudioController instance. self.audioOutputController = PluginRTCAudioController() + + // Create a PluginWebSocketServer instance. + self.webSocketServer = PluginWebSocketServer() + + // Start server here for testing. + // (there have interfaces start/stopCanvasServer which can be called from js) + startWebSocketServer(listenPort: 12345) } private func initPeerConnectionFactory() { @@ -74,6 +84,17 @@ class iosrtcPlugin : CDVPlugin { } } + private func startWebSocketServer(listenPort: Int) { + let started = self.webSocketServer?.isStarted() ?? false + if (!started) { + self.webSocketServer?.start(lport: listenPort, tcpNoDelay: true) + } + } + + private func stopWebSocketServer() { + self.webSocketServer?.stop() + } + @objc(onReset) override func onReset() { NSLog("iosrtcPlugin#onReset() | cleanup") cleanup(); @@ -82,6 +103,7 @@ class iosrtcPlugin : CDVPlugin { @objc(onAppTerminate) override func onAppTerminate() { NSLog("iosrtcPlugin#onAppTerminate() | cleanup") cleanup(); + stopWebSocketServer(); } @objc(new_RTCPeerConnection:) func new_RTCPeerConnection(_ command: CDVInvokedUrlCommand) { @@ -1053,8 +1075,19 @@ class iosrtcPlugin : CDVPlugin { NSLog("iosrtcPlugin#new_MediaStreamRenderer()") let id = command.argument(at: 0) as! Int + let isCanvas = command.argument(at: 1) as! Bool + var lport = 0 + if (isCanvas) { + lport = self.webSocketServer?.realport ?? 0 + } + + NSLog("iosrtcPlugin#new_MediaStreamRenderer(), isCanvas:%d", isCanvas) let pluginMediaStreamRenderer = PluginMediaStreamRenderer( + servicePort: lport, + cbData: { (uuid: String, data: NSData?) -> Void in + self.webSocketServer?.send(uuid: uuid, msg: data) + }, webView: self.webView!, eventListener: { (data: NSDictionary) -> Void in let result = CDVPluginResult( @@ -1171,10 +1204,21 @@ class iosrtcPlugin : CDVPlugin { pluginMediaStreamRenderer!.close() + self.webSocketServer?.close(uuid: pluginMediaStreamRenderer?.id, code: -1, reason: "active close") + // Remove from the dictionary. self.pluginMediaStreamRenderers[id] = nil } + @objc(startWebSocketServer:) func startCanvasServer(_ command: CDVInvokedUrlCommand) { + let lport = command.argument(at: 0) as! Int + startWebSocketServer(listenPort:lport) + } + + @objc(stopWebSocketServer:) func stopCanvasServer(_ command: CDVInvokedUrlCommand) { + stopWebSocketServer() + } + @objc(getUserMedia:) func getUserMedia(_ command: CDVInvokedUrlCommand) { NSLog("iosrtcPlugin#getUserMedia()") diff --git a/www/cordova-plugin-iosrtc.js b/www/cordova-plugin-iosrtc.js index 3a80074d..8da6e1fb 100644 --- a/www/cordova-plugin-iosrtc.js +++ b/www/cordova-plugin-iosrtc.js @@ -747,6 +747,26 @@ function MediaStreamRenderer(element) { this.videoWidth = undefined; this.videoHeight = undefined; + // Support canvas to render + this.videoStopped = false; + this.webSocketUrl = undefined; + this.webSocketClient = undefined; + this.canvasCtx = undefined; + this.canvasId = (element.id || Date.now()) + '__canvas'; + + var hasCanvas = false; + var canvasElement = document.getElementById(this.canvasId); + debug('render() [canvas-id:%s, ele:%o]', this.canvasId, canvasElement); + if (!canvasElement) { + canvasElement = document.createElement('canvas'); + canvasElement.id = this.canvasId; + element.parentNode.insertBefore(canvasElement, element.nextSibling); + } + if (canvasElement) { + hasCanvas = true; + this.canvasCtx = createCanvasContext(canvasElement); + } + // Private attributes. this.id = randomNumber(); @@ -754,7 +774,7 @@ function MediaStreamRenderer(element) { onEvent.call(self, data); } - exec(onResultOK, null, 'iosrtcPlugin', 'new_MediaStreamRenderer', [this.id]); + exec(onResultOK, null, 'iosrtcPlugin', 'new_MediaStreamRenderer', [this.id, hasCanvas]); this.refresh(); @@ -779,6 +799,7 @@ MediaStreamRenderer.prototype.render = function (stream) { } self.stream = stream; + self.videoStopped = false; exec(null, null, 'iosrtcPlugin', 'MediaStreamRenderer_render', [self.id, stream.id]); @@ -1081,6 +1102,16 @@ MediaStreamRenderer.prototype.close = function () { } this.stream = undefined; + if (this.webSocketClient) { + this.webSocketClient.close(); + this.webSocketClient = undefined; + } + + if (this.canvasCtx) { + this.canvasCtx.fillBlack(); + this.canvasCtx = undefined; + } + exec(null, null, 'iosrtcPlugin', 'MediaStreamRenderer_close', [this.id]); if (this.refreshInterval) { clearInterval(this.refreshInterval); @@ -1088,6 +1119,327 @@ MediaStreamRenderer.prototype.close = function () { } }; +MediaStreamRenderer.prototype.openWebSocket = function (host, port, uuid) { + if (!this.canvasCtx) { + debug('websocket no canvas context'); + return; + } + + var self = this; + this.webSocketUrl = 'ws://' + host + ':' + port + '?uuid=' + uuid; + debug('websocket url=' + this.webSocketUrl); + + this.webSocketClient = new window.WebSocket(this.webSocketUrl); + this.webSocketClient.binaryType = 'arraybuffer'; + this.webSocketClient.onopen = function () { + debug('websocket open for uuid:' + uuid); + }; + this.webSocketClient.onerror = function (event) { + var errorStr = JSON.stringify(event, null, 4); + debug('websocket error for uuid:' + uuid + ', error:' + errorStr); + }; + this.webSocketClient.onclose = function (event) { + var errorStr = JSON.stringify(event, null, 4); + debug('websocket close for uuid:' + uuid + ', error:' + errorStr); + }; + this.webSocketClient.onmessage = function (event) { + //debug('websocket message uuid:' + uuid + ', length:' + event.data.length); + if (!self.stream) { + return; + } + if (self.videoStopped) { + return; + } + + // data format: 16B-head + body + // head: type(2B) + len(4B) + width(2B) + height(2B) + rotation(2B) + timestamp(4B) + // body: len + var headLen = 16; + var pdu = new DataView(event.data); + var pduLen = pdu.byteLength; + if (pduLen < headLen) { + return; + } + + // parse head + //var pduType = pdu.getUint16(0); + var bodyLen = pdu.getUint32(2, true); + var width = pdu.getUint16(6, true); + var height = pdu.getUint16(8, true); + //var rotation = pdu.getUint16(10, true); + //var timestamp = pdu.getUint32(12, true); + //debug('websocket message: body='+bodyLen+',resloution='+width+'x'+height+",size="+pduLen); + if (pduLen !== headLen + bodyLen) { + debug('websocket message, wrong data length'); + } else { + var typedArray = new Uint8Array(event.data); + var frame = typedArray.subarray(headLen, headLen + bodyLen); + self.drawFrame(frame, width, height); + } + }; +}; + +MediaStreamRenderer.prototype.drawFrame = function (frame, width, height) { + //debug('drawFrame canvas, length=' + frame.length); + if (this.canvasCtx) { + var uOffset = parseInt(width * height); + var vOffset = parseInt(uOffset + uOffset / 4); + this.canvasCtx.render(frame, width, height, uOffset, vOffset); + } +}; + +/** + * WebGLTexture API. + */ + +function WebGLTexture(gl) { + this.gl = gl; + this.texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, this.texture); + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); +} + +WebGLTexture.prototype.bind = function (n, program, name) { + var gl = this.gl; + gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]); + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.uniform1i(gl.getUniformLocation(program, name), n); +}; + +WebGLTexture.prototype.fill = function (width, height, data) { + var gl = this.gl; + var level = 0; + var internalFormat = gl.LUMINANCE; //gl.RGBA; + var border = 0; + var srcFormat = gl.LUMINANCE; //gl.RGBA; + var srcType = gl.UNSIGNED_BYTE; + gl.bindTexture(gl.TEXTURE_2D, this.texture); + gl.texImage2D( + gl.TEXTURE_2D, + level, + internalFormat, + width, + height, + border, + srcFormat, + srcType, + data + ); +}; + +/** + * Canvas API. + */ + +function setupCanvasWebGL(canvas, options) { + var gl = canvas.getContext('webgl', { + preserveDrawingBuffer: Boolean(options.preserveDrawingBuffer) + }); + if (!gl) { + return null; + } + var program = gl.createProgram(); + var vertexShaderSource = [ + 'attribute highp vec4 aVertexPosition;', + 'attribute vec2 aTextureCoord;', + 'varying highp vec2 vTextureCoord;', + 'void main(void) {', + ' gl_Position = aVertexPosition;', + ' vTextureCoord = aTextureCoord;', + '}' + ].join('\n'); + + var vertexShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertexShader, vertexShaderSource); + gl.compileShader(vertexShader); + + var fragmentShaderSource = [ + 'precision highp float;', + 'varying lowp vec2 vTextureCoord;', + 'uniform sampler2D YTexture;', + 'uniform sampler2D UTexture;', + 'uniform sampler2D VTexture;', + 'const mat4 YUV2RGB = mat4', + '(', + ' 1.1643828125, 0, 1.59602734375, -.87078515625,', + ' 1.1643828125, -.39176171875, -.81296875, .52959375,', + ' 1.1643828125, 2.017234375, 0, -1.081390625,', + ' 0, 0, 0, 1', + ');', + 'void main(void) {', + ' gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;', + '}' + ].join('\n'); + + var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragmentShader, fragmentShaderSource); + gl.compileShader(fragmentShader); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + gl.useProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + debug('gl Shader link failed.'); + } + var vertexPositionAttribute = gl.getAttribLocation(program, 'aVertexPosition'); + gl.enableVertexAttribArray(vertexPositionAttribute); + var textureCoordAttribute = gl.getAttribLocation(program, 'aTextureCoord'); + gl.enableVertexAttribArray(textureCoordAttribute); + + var verticesBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer); + gl.bufferData( + gl.ARRAY_BUFFER, + new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, -1.0, 0.0]), + gl.STATIC_DRAW + ); + gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData( + gl.ARRAY_BUFFER, + new Float32Array([1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0]), + gl.STATIC_DRAW + ); + gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0); + + gl.y = new WebGLTexture(gl); + gl.u = new WebGLTexture(gl); + gl.v = new WebGLTexture(gl); + gl.y.bind(0, program, 'YTexture'); + gl.u.bind(1, program, 'UTexture'); + gl.v.bind(2, program, 'VTexture'); + return gl; +} + +function CanvasI420Context(canvas, gl) { + this.canvas = canvas; + this.gl = gl; + this.width = 0; + this.height = 0; +} + +CanvasI420Context.prototype.render = function (frame, width, height, uOffset, vOffset) { + if (width === 0 || height === 0) { + return; + } + + var canvas = this.canvas; + if (width !== this.width || height !== this.height) { + var glWidth = canvas.clientWidth; + //var glHeight = canvas.clientHeight; + var glHeight = glWidth * (height / width); + + var glX = 0; + var glY = 0; + if (canvas.clientHeight > glHeight) { + glY = parseInt((canvas.clientHeight - glHeight) / 2); + } + + debug( + 'canvas render change from=' + + this.width + + 'x' + + this.height + + ' to ' + + width + + 'x' + + height + + ', clientSize=' + + canvas.clientWidth + + 'x' + + canvas.clientHeight + + ', offsetSize=' + + canvas.offsetWidth + + 'x' + + canvas.offsetHeight + + ', glSize=' + + glWidth + + 'x' + + glHeight + + '-' + + glX + + 'x' + + glY + ); + + this.frameSetup(glX, glY, glWidth, glHeight); + this.width = width; + this.height = height; + } + this.renderFrame(frame, uOffset, vOffset); +}; + +CanvasI420Context.prototype.frameSetup = function (glx, gly, width, height) { + var canvas = this.canvas; + var gl = this.gl; + if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) { + canvas.width = canvas.clientWidth; + canvas.height = canvas.clientHeight; + } + gl.viewport(glx, gly, width, height); +}; + +CanvasI420Context.prototype.renderFrame = function (frame, uOffset, vOffset) { + var gl = this.gl; + var width = this.width; + var height = this.height; + gl.y.fill(width, height, frame.subarray(0, uOffset)); + gl.u.fill(width >> 1, height >> 1, frame.subarray(uOffset, vOffset)); + gl.v.fill(width >> 1, height >> 1, frame.subarray(vOffset, frame.length)); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); +}; + +CanvasI420Context.prototype.fillBlack = function () { + var gl = this.gl; + var arr1 = new Uint8Array(1), + arr2 = new Uint8Array(1); + + arr1[0] = 0; + arr2[0] = 128; + + gl.y.fill(1, 1, arr1); + gl.u.fill(1, 1, arr2); + gl.v.fill(1, 1, arr2); + + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); +}; + +function createCanvasContext(element, options) { + var canvas; + if (typeof element === 'string') { + canvas = window.document.querySelector(element); + } else if (element instanceof HTMLCanvasElement) { + canvas = element; + } + + if (!canvas) { + debug('find no canvas element=' + element); + return null; + } + + if (!options) { + options = { + preserveDrawingBuffer: false + }; + } + + var glCtx = setupCanvasWebGL(canvas, options); + if (!glCtx) { + debug('fail to setupCanvasWebGL'); + return null; + } + + var i420Ctx = new CanvasI420Context(canvas, glCtx); + i420Ctx.fillBlack(); + return i420Ctx; +} + /** * Private API. */ @@ -1109,6 +1461,19 @@ function onEvent(data) { event.videoHeight = data.size.height; this.dispatchEvent(event); + break; + case 'videowebsocket': + switch (data.action) { + case 'run': + if (data.ws) { + this.videoStopped = false; + this.openWebSocket('localhost', data.ws.port, data.ws.uuid); + } + break; + case 'stop': + this.videoStopped = true; + break; + } break; } } @@ -3980,6 +4345,9 @@ function registerGlobals(doNotRestoreCallbacksSupport) { ); } + // Prevent WebRTC-adapter to overide navigator.mediaDevices after shim is applied since ios 14.3 + Object.freeze(navigator.mediaDevices); + window.RTCPeerConnection = RTCPeerConnection; window.webkitRTCPeerConnection = RTCPeerConnection; window.RTCSessionDescription = RTCSessionDescription;