|
21 | 21 | #include "third_party/libyuv/include/libyuv.h"
|
22 | 22 |
|
23 | 23 | #import "components/capturer/RTCDesktopCapturer+Private.h"
|
| 24 | +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" |
24 | 25 |
|
25 | 26 | namespace webrtc {
|
26 | 27 |
|
27 | 28 | enum { kCaptureDelay = 33, kCaptureMessageId = 1000 };
|
28 | 29 |
|
29 | 30 | ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type,
|
30 |
| - webrtc::DesktopCapturer::SourceId source_id, |
31 |
| - id<RTC_OBJC_TYPE(DesktopCapturerDelegate)> delegate) |
| 31 | + webrtc::DesktopCapturer::SourceId source_id, |
| 32 | + id<RTC_OBJC_TYPE(DesktopCapturerDelegate)> delegate) |
32 | 33 | : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) {
|
33 | 34 | RTC_DCHECK(thread_);
|
34 | 35 | type_ = type;
|
|
38 | 39 | options_.set_allow_iosurface(true);
|
39 | 40 | thread_->BlockingCall([this, type] {
|
40 | 41 | if (type == kScreen) {
|
41 |
| - capturer_ = std::make_unique<DesktopAndCursorComposer>(webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); |
42 |
| - } else { |
43 |
| - capturer_ = std::make_unique<DesktopAndCursorComposer>(webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); |
| 42 | + capturer_ = std::make_unique<DesktopAndCursorComposer>( |
| 43 | + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); |
| 44 | + } else { |
| 45 | + capturer_ = std::make_unique<DesktopAndCursorComposer>( |
| 46 | + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); |
44 | 47 | }
|
45 | 48 | });
|
46 | 49 | }
|
|
61 | 64 | return capture_state_;
|
62 | 65 | }
|
63 | 66 |
|
64 |
| - if(fps >= 60) { |
| 67 | + if (fps >= 60) { |
65 | 68 | capture_delay_ = uint32_t(1000.0 / 60.0);
|
66 | 69 | } else {
|
67 | 70 | capture_delay_ = uint32_t(1000.0 / fps);
|
68 | 71 | }
|
69 | 72 |
|
70 |
| - if(source_id_ != -1) { |
71 |
| - if(!capturer_->SelectSource(source_id_)) { |
72 |
| - capture_state_ = CS_FAILED; |
73 |
| - return capture_state_; |
| 73 | + if (source_id_ != -1) { |
| 74 | + if (!capturer_->SelectSource(source_id_)) { |
| 75 | + capture_state_ = CS_FAILED; |
| 76 | + return capture_state_; |
74 | 77 | }
|
75 |
| - if(type_ == kWindow) { |
76 |
| - if(!capturer_->FocusOnSelectedSource()) { |
| 78 | + if (type_ == kWindow) { |
| 79 | + if (!capturer_->FocusOnSelectedSource()) { |
77 | 80 | capture_state_ = CS_FAILED;
|
78 | 81 | return capture_state_;
|
79 | 82 | }
|
|
103 | 106 | }
|
104 | 107 |
|
105 | 108 | void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
|
106 |
| - std::unique_ptr<webrtc::DesktopFrame> frame) { |
| 109 | + std::unique_ptr<webrtc::DesktopFrame> frame) { |
107 | 110 | if (result != result_) {
|
108 | 111 | if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) {
|
109 | 112 | [delegate_ didSourceCaptureError];
|
|
124 | 127 | }
|
125 | 128 |
|
126 | 129 | if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) {
|
127 |
| - return; |
| 130 | + return; |
128 | 131 | }
|
129 | 132 |
|
130 | 133 | int width = frame->size().width();
|
131 | 134 | int height = frame->size().height();
|
132 | 135 | int real_width = width;
|
133 | 136 |
|
134 |
| - if(type_ == kWindow) { |
| 137 | + if (type_ == kWindow) { |
135 | 138 | int multiple = 0;
|
136 | 139 | #if defined(WEBRTC_ARCH_X86_FAMILY)
|
137 | 140 | multiple = 16;
|
|
140 | 143 | #endif
|
141 | 144 | // A multiple of $multiple must be used as the width of the src frame,
|
142 | 145 | // and the right black border needs to be cropped during conversion.
|
143 |
| - if( multiple != 0 && (width % multiple) != 0 ) { |
| 146 | + if (multiple != 0 && (width % multiple) != 0) { |
144 | 147 | width = (width / multiple + 1) * multiple;
|
145 | 148 | }
|
146 | 149 | }
|
147 |
| - |
148 |
| - if (!i420_buffer_ || !i420_buffer_.get() || |
149 |
| - i420_buffer_->width() * i420_buffer_->height() != real_width * height) { |
150 |
| - i420_buffer_ = webrtc::I420Buffer::Create(real_width, height); |
151 |
| - } |
152 | 150 |
|
153 |
| - libyuv::ConvertToI420(frame->data(), |
154 |
| - 0, |
155 |
| - i420_buffer_->MutableDataY(), |
156 |
| - i420_buffer_->StrideY(), |
157 |
| - i420_buffer_->MutableDataU(), |
158 |
| - i420_buffer_->StrideU(), |
159 |
| - i420_buffer_->MutableDataV(), |
160 |
| - i420_buffer_->StrideV(), |
| 151 | + CVPixelBufferRef pixelBuffer = NULL; |
| 152 | + |
| 153 | + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; |
| 154 | + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, |
| 155 | + width, |
| 156 | + height, |
| 157 | + kCVPixelFormatType_32BGRA, |
| 158 | + (__bridge CFDictionaryRef)(pixelAttributes), |
| 159 | + &pixelBuffer); |
| 160 | + CVPixelBufferLockBaseAddress(pixelBuffer, 0); |
| 161 | + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); |
| 162 | + libyuv::ConvertToARGB(reinterpret_cast<uint8_t *>(frame->data()), |
| 163 | + real_width * height * 4, |
| 164 | + reinterpret_cast<uint8_t *>(pxdata), |
| 165 | + width * 4, |
161 | 166 | 0,
|
162 | 167 | 0,
|
163 | 168 | width,
|
|
166 | 171 | height,
|
167 | 172 | libyuv::kRotate0,
|
168 | 173 | libyuv::FOURCC_ARGB);
|
| 174 | + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); |
| 175 | + |
| 176 | + if (res != kCVReturnSuccess) { |
| 177 | + NSLog(@"Unable to create cvpixelbuffer %d", res); |
| 178 | + return; |
| 179 | + } |
| 180 | + |
| 181 | + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = |
| 182 | + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; |
169 | 183 | NSTimeInterval timeStampSeconds = CACurrentMediaTime();
|
170 | 184 | int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
|
171 |
| - RTCVideoFrame* rtc_video_frame = |
172 |
| - ToObjCVideoFrame( |
173 |
| - webrtc::VideoFrame::Builder() |
174 |
| - .set_video_frame_buffer(i420_buffer_) |
175 |
| - .set_rotation(webrtc::kVideoRotation_0) |
176 |
| - .set_timestamp_us(timeStampNs / 1000) |
177 |
| - .build() |
178 |
| - ); |
179 |
| - [delegate_ didCaptureVideoFrame:rtc_video_frame]; |
| 185 | + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = |
| 186 | + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer |
| 187 | + rotation:RTCVideoRotation_0 |
| 188 | + timeStampNs:timeStampNs]; |
| 189 | + CVPixelBufferRelease(pixelBuffer); |
| 190 | + [delegate_ didCaptureVideoFrame:videoFrame]; |
180 | 191 | }
|
181 | 192 |
|
182 | 193 | void ObjCDesktopCapturer::CaptureFrame() {
|
|
0 commit comments