ScreenCapturer.m 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268
  1. // From https://github.com/react-native-webrtc/react-native-webrtc (MIT License)
  2. // SPDX-FileCopyrightText: 2023 React-Native-WebRTC authors
  3. // SPDX-License-Identifier: MIT
  4. #include <mach/mach_time.h>
  5. #import <ReplayKit/ReplayKit.h>
  6. #import <WebRTC/RTCCVPixelBuffer.h>
  7. #import <WebRTC/RTCVideoFrameBuffer.h>
  8. #import "ScreenCapturer.h"
  9. #import "SocketConnection.h"
  10. #import "NextcloudTalk-Swift.h"
  11. const NSUInteger kMaxReadLength = 10 * 1024;
  12. @interface Message : NSObject
  13. @property(nonatomic, assign, readonly) CVImageBufferRef imageBuffer;
  14. @property(nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message *message);
  15. - (NSInteger)appendBytes:(UInt8 *)buffer length:(NSUInteger)length;
  16. @end
  17. @interface Message ()
  18. @property(nonatomic, assign) CVImageBufferRef imageBuffer;
  19. @property(nonatomic, assign) int imageOrientation;
  20. @property(nonatomic, assign) CFHTTPMessageRef framedMessage;
  21. @end
  22. @implementation Message
  23. - (instancetype)init {
  24. self = [super init];
  25. if (self) {
  26. self.imageBuffer = NULL;
  27. }
  28. return self;
  29. }
  30. - (void)dealloc {
  31. CVPixelBufferRelease(_imageBuffer);
  32. }
  33. /** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were provided to compute
  34. * the message length */
  35. - (NSInteger)appendBytes:(UInt8 *)buffer length:(NSUInteger)length {
  36. if (!_framedMessage) {
  37. _framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false);
  38. }
  39. CFHTTPMessageAppendBytes(_framedMessage, buffer, length);
  40. if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) {
  41. return -1;
  42. }
  43. NSInteger contentLength =
  44. [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef) @"Content-Length"))
  45. integerValue];
  46. NSInteger bodyLength = (NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length];
  47. NSInteger missingBytesCount = contentLength - bodyLength;
  48. if (missingBytesCount == 0) {
  49. BOOL success = [self unwrapMessage:self.framedMessage];
  50. self.didComplete(success, self);
  51. CFRelease(self.framedMessage);
  52. self.framedMessage = NULL;
  53. }
  54. return missingBytesCount;
  55. }
  56. // MARK: Private Methods
  57. - (CIContext *)imageContext {
  58. // Initializing a CIContext object is costly, so we use a singleton instead
  59. static CIContext *imageContext = nil;
  60. static dispatch_once_t onceToken;
  61. dispatch_once(&onceToken, ^{
  62. imageContext = [[CIContext alloc] initWithOptions:nil];
  63. });
  64. return imageContext;
  65. }
  66. - (BOOL)unwrapMessage:(CFHTTPMessageRef)framedMessage {
  67. size_t width =
  68. [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef) @"Buffer-Width"))
  69. integerValue];
  70. size_t height =
  71. [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef) @"Buffer-Height"))
  72. integerValue];
  73. _imageOrientation = [CFBridgingRelease(
  74. CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef) @"Buffer-Orientation")) intValue];
  75. NSData *messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage));
  76. // Copy the pixel buffer
  77. CVReturn status =
  78. CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, NULL, &_imageBuffer);
  79. if (status != kCVReturnSuccess) {
  80. NSLog(@"CVPixelBufferCreate failed");
  81. return false;
  82. }
  83. [self copyImageData:messageData toPixelBuffer:&_imageBuffer];
  84. return true;
  85. }
  86. - (void)copyImageData:(NSData *)data toPixelBuffer:(CVPixelBufferRef *)pixelBuffer {
  87. CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
  88. CIImage *image = [CIImage imageWithData:data];
  89. [self.imageContext render:image toCVPixelBuffer:*pixelBuffer];
  90. CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0);
  91. }
  92. @end
  93. // MARK: -
  94. @interface ScreenCapturer ()<NSStreamDelegate>
  95. @property(nonatomic, strong) SocketConnection *connection;
  96. @property(nonatomic, strong) Message *message;
  97. @end
  98. @implementation ScreenCapturer {
  99. mach_timebase_info_data_t _timebaseInfo;
  100. NSInteger _readLength;
  101. int64_t _startTimeStampNs;
  102. }
  103. - (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
  104. self = [super initWithDelegate:delegate];
  105. if (self) {
  106. mach_timebase_info(&_timebaseInfo);
  107. }
  108. return self;
  109. }
  110. - (void)setConnection:(SocketConnection *)connection {
  111. if (_connection != connection) {
  112. [_connection close];
  113. _connection = connection;
  114. }
  115. }
  116. - (void)startCaptureWithConnection:(SocketConnection *)connection {
  117. mach_timebase_info(&_timebaseInfo);
  118. _startTimeStampNs = -1;
  119. self.connection = connection;
  120. self.message = nil;
  121. [self.connection openWithStreamDelegate:self];
  122. }
  123. - (void)stopCapture {
  124. self.connection = nil;
  125. }
  126. // MARK: Private Methods
  127. - (void)readBytesFromStream:(NSInputStream *)stream {
  128. if (!stream.hasBytesAvailable) {
  129. return;
  130. }
  131. if (!self.message) {
  132. self.message = [[Message alloc] init];
  133. _readLength = kMaxReadLength;
  134. __weak __typeof__(self) weakSelf = self;
  135. self.message.didComplete = ^(BOOL success, Message *message) {
  136. if (success) {
  137. [weakSelf didCaptureVideoFrame:message.imageBuffer withOrientation:message.imageOrientation];
  138. }
  139. weakSelf.message = nil;
  140. };
  141. }
  142. uint8_t buffer[_readLength];
  143. NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength];
  144. if (numberOfBytesRead < 0) {
  145. NSLog(@"error reading bytes from stream");
  146. return;
  147. }
  148. _readLength = [self.message appendBytes:buffer length:numberOfBytesRead];
  149. if (_readLength == -1 || _readLength > kMaxReadLength) {
  150. _readLength = kMaxReadLength;
  151. }
  152. }
  153. - (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer withOrientation:(CGImagePropertyOrientation)orientation {
  154. int64_t currentTime = mach_absolute_time();
  155. int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom;
  156. if (_startTimeStampNs < 0) {
  157. _startTimeStampNs = currentTimeStampNs;
  158. }
  159. RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
  160. int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs;
  161. RTCVideoRotation rotation;
  162. switch (orientation) {
  163. case kCGImagePropertyOrientationLeft:
  164. rotation = RTCVideoRotation_90;
  165. break;
  166. case kCGImagePropertyOrientationDown:
  167. rotation = RTCVideoRotation_180;
  168. break;
  169. case kCGImagePropertyOrientationRight:
  170. rotation = RTCVideoRotation_270;
  171. break;
  172. default:
  173. rotation = RTCVideoRotation_0;
  174. break;
  175. }
  176. RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
  177. rotation:rotation
  178. timeStampNs:frameTimeStampNs];
  179. [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
  180. }
  181. @end
  182. @implementation ScreenCapturer (NSStreamDelegate)
  183. - (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode {
  184. switch (eventCode) {
  185. case NSStreamEventOpenCompleted:
  186. NSLog(@"server stream open completed");
  187. break;
  188. case NSStreamEventHasBytesAvailable:
  189. [self readBytesFromStream:(NSInputStream *)aStream];
  190. break;
  191. case NSStreamEventEndEncountered:
  192. NSLog(@"server stream end encountered");
  193. [self stopCapture];
  194. [self.eventsDelegate capturerDidEnd:self];
  195. break;
  196. case NSStreamEventErrorOccurred:
  197. NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription);
  198. break;
  199. default:
  200. break;
  201. }
  202. }
  203. @end