1 | /* $Id: VideoRec.cpp 65256 2017-01-12 11:11:03Z vboxsync $ */
|
---|
2 | /** @file
|
---|
3 | * Encodes the screen content in VPX format.
|
---|
4 | */
|
---|
5 |
|
---|
6 | /*
|
---|
7 | * Copyright (C) 2012-2017 Oracle Corporation
|
---|
8 | *
|
---|
9 | * This file is part of VirtualBox Open Source Edition (OSE), as
|
---|
10 | * available from http://www.virtualbox.org. This file is free software;
|
---|
11 | * you can redistribute it and/or modify it under the terms of the GNU
|
---|
12 | * General Public License (GPL) as published by the Free Software
|
---|
13 | * Foundation, in version 2 as it comes in the "COPYING" file of the
|
---|
14 | * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
|
---|
15 | * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
|
---|
16 | */
|
---|
17 |
|
---|
18 | #define LOG_GROUP LOG_GROUP_MAIN
|
---|
19 |
|
---|
20 | #include <vector>
|
---|
21 |
|
---|
22 | #include <VBox/log.h>
|
---|
23 | #include <iprt/asm.h>
|
---|
24 | #include <iprt/assert.h>
|
---|
25 | #include <iprt/semaphore.h>
|
---|
26 | #include <iprt/thread.h>
|
---|
27 | #include <iprt/time.h>
|
---|
28 |
|
---|
29 | #include <VBox/com/VirtualBox.h>
|
---|
30 | #include <VBox/com/com.h>
|
---|
31 | #include <VBox/com/string.h>
|
---|
32 |
|
---|
33 | #include "EbmlWriter.h"
|
---|
34 | #include "VideoRec.h"
|
---|
35 |
|
---|
36 | #define VPX_CODEC_DISABLE_COMPAT 1
|
---|
37 | #include <vpx/vp8cx.h>
|
---|
38 | #include <vpx/vpx_image.h>
|
---|
39 |
|
---|
40 | /** Default VPX codec to use. */
|
---|
41 | #define DEFAULTCODEC (vpx_codec_vp8_cx())
|
---|
42 |
|
---|
43 | static int videoRecEncodeAndWrite(PVIDEORECSTREAM pStrm);
|
---|
44 | static int videoRecRGBToYUV(PVIDEORECSTREAM pStrm);
|
---|
45 |
|
---|
46 | using namespace com;
|
---|
47 |
|
---|
48 | /**
|
---|
49 | * Enumeration for a video recording state.
|
---|
50 | */
|
---|
51 | enum
|
---|
52 | {
|
---|
53 | /** Not initialized. */
|
---|
54 | VIDREC_UNINITIALIZED = 0,
|
---|
55 | /** Initialized, idle. */
|
---|
56 | VIDREC_IDLE = 1,
|
---|
57 | /** Currently in VideoRecCopyToIntBuf(), delay termination. */
|
---|
58 | VIDREC_COPYING = 2,
|
---|
59 | /** Signal that we are terminating. */
|
---|
60 | VIDREC_TERMINATING = 3
|
---|
61 | };
|
---|
62 |
|
---|
63 | /* Must be always accessible and therefore cannot be part of VIDEORECCONTEXT */
|
---|
64 | static uint32_t g_enmState = VIDREC_UNINITIALIZED;
|
---|
65 |
|
---|
66 | /**
|
---|
67 | * Structure for keeping specific video recording codec data.
|
---|
68 | */
|
---|
69 | typedef struct VIDEORECCODEC
|
---|
70 | {
|
---|
71 | union
|
---|
72 | {
|
---|
73 | struct
|
---|
74 | {
|
---|
75 | /** VPX codec context. */
|
---|
76 | vpx_codec_ctx_t CodecCtx;
|
---|
77 | /** VPX codec configuration. */
|
---|
78 | vpx_codec_enc_cfg_t Config;
|
---|
79 | /** VPX image context. */
|
---|
80 | vpx_image_t RawImage;
|
---|
81 | } VPX;
|
---|
82 | };
|
---|
83 | } VIDEORECCODEC, *PVIDEORECCODEC;
|
---|
84 |
|
---|
85 | /**
|
---|
86 | * Strucutre for maintaining a video recording stream.
|
---|
87 | */
|
---|
88 | typedef struct VIDEORECSTREAM
|
---|
89 | {
|
---|
90 | /** Container context. */
|
---|
91 | WebMWriter *pEBML;
|
---|
92 | /** Codec data. */
|
---|
93 | VIDEORECCODEC Codec;
|
---|
94 | /** Target X resolution (in pixels). */
|
---|
95 | uint32_t uTargetWidth;
|
---|
96 | /** Target Y resolution (in pixels). */
|
---|
97 | uint32_t uTargetHeight;
|
---|
98 | /** X resolution of the last encoded frame. */
|
---|
99 | uint32_t uLastSourceWidth;
|
---|
100 | /** Y resolution of the last encoded frame. */
|
---|
101 | uint32_t uLastSourceHeight;
|
---|
102 | /** Current frame number. */
|
---|
103 | uint64_t cFrame;
|
---|
104 | /** RGB buffer containing the most recent frame of the framebuffer. */
|
---|
105 | uint8_t *pu8RgbBuf;
|
---|
106 | /** YUV buffer the encode function fetches the frame from. */
|
---|
107 | uint8_t *pu8YuvBuf;
|
---|
108 | /** Whether video recording is enabled or not. */
|
---|
109 | bool fEnabled;
|
---|
110 | /** Whether the RGB buffer is filled or not. */
|
---|
111 | bool fRgbFilled;
|
---|
112 | /** Pixel format of the current frame. */
|
---|
113 | uint32_t u32PixelFormat;
|
---|
114 | /** Minimal delay between two frames. */
|
---|
115 | uint32_t uDelay;
|
---|
116 | /** Time stamp of the last frame we encoded. */
|
---|
117 | uint64_t u64LastTimeStamp;
|
---|
118 | /** Time stamp of the current frame. */
|
---|
119 | uint64_t u64TimeStamp;
|
---|
120 | /** Encoder deadline. */
|
---|
121 | unsigned int uEncoderDeadline;
|
---|
122 | } VIDEORECSTREAM, *PVIDEORECSTREAM;
|
---|
123 |
|
---|
124 | /** Vector of video recording streams. */
|
---|
125 | typedef std::vector <PVIDEORECSTREAM> VideoRecStreams;
|
---|
126 |
|
---|
127 | /**
|
---|
128 | * Structure for keeping a video recording context.
|
---|
129 | */
|
---|
130 | typedef struct VIDEORECCONTEXT
|
---|
131 | {
|
---|
132 | /** Semaphore to signal the encoding worker thread. */
|
---|
133 | RTSEMEVENT WaitEvent;
|
---|
134 | /** Semaphore required during termination. */
|
---|
135 | RTSEMEVENT TermEvent;
|
---|
136 | /** Whether video recording is enabled or not. */
|
---|
137 | bool fEnabled;
|
---|
138 | /** Worker thread. */
|
---|
139 | RTTHREAD Thread;
|
---|
140 | /** Maximal time stamp. */
|
---|
141 | uint64_t u64MaxTimeStamp;
|
---|
142 | /** Maximal file size in MB. */
|
---|
143 | uint32_t uMaxFileSize;
|
---|
144 | /** Vector of current video recording stream contexts. */
|
---|
145 | VideoRecStreams vecStreams;
|
---|
146 | } VIDEORECCONTEXT, *PVIDEORECCONTEXT;
|
---|
147 |
|
---|
148 |
|
---|
149 | /**
|
---|
150 | * Iterator class for running through a BGRA32 image buffer and converting
|
---|
151 | * it to RGB.
|
---|
152 | */
|
---|
153 | class ColorConvBGRA32Iter
|
---|
154 | {
|
---|
155 | private:
|
---|
156 | enum { PIX_SIZE = 4 };
|
---|
157 | public:
|
---|
158 | ColorConvBGRA32Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
|
---|
159 | {
|
---|
160 | LogFlow(("width = %d height=%d aBuf=%lx\n", aWidth, aHeight, aBuf));
|
---|
161 | mPos = 0;
|
---|
162 | mSize = aWidth * aHeight * PIX_SIZE;
|
---|
163 | mBuf = aBuf;
|
---|
164 | }
|
---|
165 | /**
|
---|
166 | * Convert the next pixel to RGB.
|
---|
167 | * @returns true on success, false if we have reached the end of the buffer
|
---|
168 | * @param aRed where to store the red value
|
---|
169 | * @param aGreen where to store the green value
|
---|
170 | * @param aBlue where to store the blue value
|
---|
171 | */
|
---|
172 | bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
|
---|
173 | {
|
---|
174 | bool rc = false;
|
---|
175 | if (mPos + PIX_SIZE <= mSize)
|
---|
176 | {
|
---|
177 | *aRed = mBuf[mPos + 2];
|
---|
178 | *aGreen = mBuf[mPos + 1];
|
---|
179 | *aBlue = mBuf[mPos ];
|
---|
180 | mPos += PIX_SIZE;
|
---|
181 | rc = true;
|
---|
182 | }
|
---|
183 | return rc;
|
---|
184 | }
|
---|
185 |
|
---|
186 | /**
|
---|
187 | * Skip forward by a certain number of pixels
|
---|
188 | * @param aPixels how many pixels to skip
|
---|
189 | */
|
---|
190 | void skip(unsigned aPixels)
|
---|
191 | {
|
---|
192 | mPos += PIX_SIZE * aPixels;
|
---|
193 | }
|
---|
194 | private:
|
---|
195 | /** Size of the picture buffer */
|
---|
196 | unsigned mSize;
|
---|
197 | /** Current position in the picture buffer */
|
---|
198 | unsigned mPos;
|
---|
199 | /** Address of the picture buffer */
|
---|
200 | uint8_t *mBuf;
|
---|
201 | };
|
---|
202 |
|
---|
203 | /**
|
---|
204 | * Iterator class for running through an BGR24 image buffer and converting
|
---|
205 | * it to RGB.
|
---|
206 | */
|
---|
207 | class ColorConvBGR24Iter
|
---|
208 | {
|
---|
209 | private:
|
---|
210 | enum { PIX_SIZE = 3 };
|
---|
211 | public:
|
---|
212 | ColorConvBGR24Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
|
---|
213 | {
|
---|
214 | mPos = 0;
|
---|
215 | mSize = aWidth * aHeight * PIX_SIZE;
|
---|
216 | mBuf = aBuf;
|
---|
217 | }
|
---|
218 | /**
|
---|
219 | * Convert the next pixel to RGB.
|
---|
220 | * @returns true on success, false if we have reached the end of the buffer
|
---|
221 | * @param aRed where to store the red value
|
---|
222 | * @param aGreen where to store the green value
|
---|
223 | * @param aBlue where to store the blue value
|
---|
224 | */
|
---|
225 | bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
|
---|
226 | {
|
---|
227 | bool rc = false;
|
---|
228 | if (mPos + PIX_SIZE <= mSize)
|
---|
229 | {
|
---|
230 | *aRed = mBuf[mPos + 2];
|
---|
231 | *aGreen = mBuf[mPos + 1];
|
---|
232 | *aBlue = mBuf[mPos ];
|
---|
233 | mPos += PIX_SIZE;
|
---|
234 | rc = true;
|
---|
235 | }
|
---|
236 | return rc;
|
---|
237 | }
|
---|
238 |
|
---|
239 | /**
|
---|
240 | * Skip forward by a certain number of pixels
|
---|
241 | * @param aPixels how many pixels to skip
|
---|
242 | */
|
---|
243 | void skip(unsigned aPixels)
|
---|
244 | {
|
---|
245 | mPos += PIX_SIZE * aPixels;
|
---|
246 | }
|
---|
247 | private:
|
---|
248 | /** Size of the picture buffer */
|
---|
249 | unsigned mSize;
|
---|
250 | /** Current position in the picture buffer */
|
---|
251 | unsigned mPos;
|
---|
252 | /** Address of the picture buffer */
|
---|
253 | uint8_t *mBuf;
|
---|
254 | };
|
---|
255 |
|
---|
256 | /**
|
---|
257 | * Iterator class for running through an BGR565 image buffer and converting
|
---|
258 | * it to RGB.
|
---|
259 | */
|
---|
260 | class ColorConvBGR565Iter
|
---|
261 | {
|
---|
262 | private:
|
---|
263 | enum { PIX_SIZE = 2 };
|
---|
264 | public:
|
---|
265 | ColorConvBGR565Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
|
---|
266 | {
|
---|
267 | mPos = 0;
|
---|
268 | mSize = aWidth * aHeight * PIX_SIZE;
|
---|
269 | mBuf = aBuf;
|
---|
270 | }
|
---|
271 | /**
|
---|
272 | * Convert the next pixel to RGB.
|
---|
273 | * @returns true on success, false if we have reached the end of the buffer
|
---|
274 | * @param aRed where to store the red value
|
---|
275 | * @param aGreen where to store the green value
|
---|
276 | * @param aBlue where to store the blue value
|
---|
277 | */
|
---|
278 | bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
|
---|
279 | {
|
---|
280 | bool rc = false;
|
---|
281 | if (mPos + PIX_SIZE <= mSize)
|
---|
282 | {
|
---|
283 | unsigned uFull = (((unsigned) mBuf[mPos + 1]) << 8)
|
---|
284 | | ((unsigned) mBuf[mPos]);
|
---|
285 | *aRed = (uFull >> 8) & ~7;
|
---|
286 | *aGreen = (uFull >> 3) & ~3 & 0xff;
|
---|
287 | *aBlue = (uFull << 3) & ~7 & 0xff;
|
---|
288 | mPos += PIX_SIZE;
|
---|
289 | rc = true;
|
---|
290 | }
|
---|
291 | return rc;
|
---|
292 | }
|
---|
293 |
|
---|
294 | /**
|
---|
295 | * Skip forward by a certain number of pixels
|
---|
296 | * @param aPixels how many pixels to skip
|
---|
297 | */
|
---|
298 | void skip(unsigned aPixels)
|
---|
299 | {
|
---|
300 | mPos += PIX_SIZE * aPixels;
|
---|
301 | }
|
---|
302 | private:
|
---|
303 | /** Size of the picture buffer */
|
---|
304 | unsigned mSize;
|
---|
305 | /** Current position in the picture buffer */
|
---|
306 | unsigned mPos;
|
---|
307 | /** Address of the picture buffer */
|
---|
308 | uint8_t *mBuf;
|
---|
309 | };
|
---|
310 |
|
---|
311 | /**
|
---|
312 | * Convert an image to YUV420p format
|
---|
313 | * @returns true on success, false on failure
|
---|
314 | * @param aWidth width of image
|
---|
315 | * @param aHeight height of image
|
---|
316 | * @param aDestBuf an allocated memory buffer large enough to hold the
|
---|
317 | * destination image (i.e. width * height * 12bits)
|
---|
318 | * @param aSrcBuf the source image as an array of bytes
|
---|
319 | */
|
---|
320 | template <class T>
|
---|
321 | inline bool colorConvWriteYUV420p(unsigned aWidth, unsigned aHeight, uint8_t *aDestBuf, uint8_t *aSrcBuf)
|
---|
322 | {
|
---|
323 | AssertReturn(!(aWidth & 1), false);
|
---|
324 | AssertReturn(!(aHeight & 1), false);
|
---|
325 | bool fRc = true;
|
---|
326 | T iter1(aWidth, aHeight, aSrcBuf);
|
---|
327 | T iter2 = iter1;
|
---|
328 | iter2.skip(aWidth);
|
---|
329 | unsigned cPixels = aWidth * aHeight;
|
---|
330 | unsigned offY = 0;
|
---|
331 | unsigned offU = cPixels;
|
---|
332 | unsigned offV = cPixels + cPixels / 4;
|
---|
333 | unsigned const cyHalf = aHeight / 2;
|
---|
334 | unsigned const cxHalf = aWidth / 2;
|
---|
335 | for (unsigned i = 0; i < cyHalf && fRc; ++i)
|
---|
336 | {
|
---|
337 | for (unsigned j = 0; j < cxHalf; ++j)
|
---|
338 | {
|
---|
339 | unsigned red, green, blue;
|
---|
340 | fRc = iter1.getRGB(&red, &green, &blue);
|
---|
341 | AssertReturn(fRc, false);
|
---|
342 | aDestBuf[offY] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
|
---|
343 | unsigned u = (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
|
---|
344 | unsigned v = (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
|
---|
345 |
|
---|
346 | fRc = iter1.getRGB(&red, &green, &blue);
|
---|
347 | AssertReturn(fRc, false);
|
---|
348 | aDestBuf[offY + 1] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
|
---|
349 | u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
|
---|
350 | v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
|
---|
351 |
|
---|
352 | fRc = iter2.getRGB(&red, &green, &blue);
|
---|
353 | AssertReturn(fRc, false);
|
---|
354 | aDestBuf[offY + aWidth] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
|
---|
355 | u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
|
---|
356 | v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
|
---|
357 |
|
---|
358 | fRc = iter2.getRGB(&red, &green, &blue);
|
---|
359 | AssertReturn(fRc, false);
|
---|
360 | aDestBuf[offY + aWidth + 1] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
|
---|
361 | u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
|
---|
362 | v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
|
---|
363 |
|
---|
364 | aDestBuf[offU] = u;
|
---|
365 | aDestBuf[offV] = v;
|
---|
366 | offY += 2;
|
---|
367 | ++offU;
|
---|
368 | ++offV;
|
---|
369 | }
|
---|
370 |
|
---|
371 | iter1.skip(aWidth);
|
---|
372 | iter2.skip(aWidth);
|
---|
373 | offY += aWidth;
|
---|
374 | }
|
---|
375 |
|
---|
376 | return true;
|
---|
377 | }
|
---|
378 |
|
---|
379 | /**
|
---|
380 | * Convert an image to RGB24 format
|
---|
381 | * @returns true on success, false on failure
|
---|
382 | * @param aWidth width of image
|
---|
383 | * @param aHeight height of image
|
---|
384 | * @param aDestBuf an allocated memory buffer large enough to hold the
|
---|
385 | * destination image (i.e. width * height * 12bits)
|
---|
386 | * @param aSrcBuf the source image as an array of bytes
|
---|
387 | */
|
---|
388 | template <class T>
|
---|
389 | inline bool colorConvWriteRGB24(unsigned aWidth, unsigned aHeight,
|
---|
390 | uint8_t *aDestBuf, uint8_t *aSrcBuf)
|
---|
391 | {
|
---|
392 | enum { PIX_SIZE = 3 };
|
---|
393 | bool rc = true;
|
---|
394 | AssertReturn(0 == (aWidth & 1), false);
|
---|
395 | AssertReturn(0 == (aHeight & 1), false);
|
---|
396 | T iter(aWidth, aHeight, aSrcBuf);
|
---|
397 | unsigned cPixels = aWidth * aHeight;
|
---|
398 | for (unsigned i = 0; i < cPixels && rc; ++i)
|
---|
399 | {
|
---|
400 | unsigned red, green, blue;
|
---|
401 | rc = iter.getRGB(&red, &green, &blue);
|
---|
402 | if (rc)
|
---|
403 | {
|
---|
404 | aDestBuf[i * PIX_SIZE ] = red;
|
---|
405 | aDestBuf[i * PIX_SIZE + 1] = green;
|
---|
406 | aDestBuf[i * PIX_SIZE + 2] = blue;
|
---|
407 | }
|
---|
408 | }
|
---|
409 | return rc;
|
---|
410 | }
|
---|
411 |
|
---|
412 | /**
|
---|
413 | * Worker thread for all streams of a video recording context.
|
---|
414 | *
|
---|
415 | * Does RGB/YUV conversion and encoding.
|
---|
416 | */
|
---|
417 | static DECLCALLBACK(int) videoRecThread(RTTHREAD hThreadSelf, void *pvUser)
|
---|
418 | {
|
---|
419 | RT_NOREF(hThreadSelf);
|
---|
420 | PVIDEORECCONTEXT pCtx = (PVIDEORECCONTEXT)pvUser;
|
---|
421 | for (;;)
|
---|
422 | {
|
---|
423 | int rc = RTSemEventWait(pCtx->WaitEvent, RT_INDEFINITE_WAIT);
|
---|
424 | AssertRCBreak(rc);
|
---|
425 |
|
---|
426 | if (ASMAtomicReadU32(&g_enmState) == VIDREC_TERMINATING)
|
---|
427 | break;
|
---|
428 |
|
---|
429 | for (VideoRecStreams::iterator it = pCtx->vecStreams.begin(); it != pCtx->vecStreams.end(); it++)
|
---|
430 | {
|
---|
431 | PVIDEORECSTREAM pStream = (*it);
|
---|
432 |
|
---|
433 | if ( pStream->fEnabled
|
---|
434 | && ASMAtomicReadBool(&pStream->fRgbFilled))
|
---|
435 | {
|
---|
436 | rc = videoRecRGBToYUV(pStream);
|
---|
437 |
|
---|
438 | ASMAtomicWriteBool(&pStream->fRgbFilled, false);
|
---|
439 |
|
---|
440 | if (RT_SUCCESS(rc))
|
---|
441 | rc = videoRecEncodeAndWrite(pStream);
|
---|
442 |
|
---|
443 | if (RT_FAILURE(rc))
|
---|
444 | {
|
---|
445 | static unsigned cErrors = 100;
|
---|
446 | if (cErrors > 0)
|
---|
447 | {
|
---|
448 | LogRel(("VideoRec: Error %Rrc encoding / writing video frame\n", rc));
|
---|
449 | cErrors--;
|
---|
450 | }
|
---|
451 | }
|
---|
452 | }
|
---|
453 | }
|
---|
454 | }
|
---|
455 |
|
---|
456 | return VINF_SUCCESS;
|
---|
457 | }
|
---|
458 |
|
---|
459 | /**
|
---|
460 | * Creates a video recording context.
|
---|
461 | *
|
---|
462 | * @returns IPRT status code.
|
---|
463 | * @param cScreens Number of screens to create context for.
|
---|
464 | * @param ppCtx Pointer to created video recording context on success.
|
---|
465 | */
|
---|
466 | int VideoRecContextCreate(uint32_t cScreens, PVIDEORECCONTEXT *ppCtx)
|
---|
467 | {
|
---|
468 | AssertReturn(cScreens, VERR_INVALID_PARAMETER);
|
---|
469 | AssertPtrReturn(ppCtx, VERR_INVALID_POINTER);
|
---|
470 |
|
---|
471 | Assert(ASMAtomicReadU32(&g_enmState) == VIDREC_UNINITIALIZED);
|
---|
472 |
|
---|
473 | int rc = VINF_SUCCESS;
|
---|
474 |
|
---|
475 | PVIDEORECCONTEXT pCtx = (PVIDEORECCONTEXT)RTMemAllocZ(sizeof(VIDEORECCONTEXT));
|
---|
476 | if (!pCtx)
|
---|
477 | return VERR_NO_MEMORY;
|
---|
478 |
|
---|
479 | for (uint32_t uScreen = 0; uScreen < cScreens; uScreen++)
|
---|
480 | {
|
---|
481 | PVIDEORECSTREAM pStream = (PVIDEORECSTREAM)RTMemAllocZ(sizeof(VIDEORECSTREAM));
|
---|
482 | if (!pStream)
|
---|
483 | {
|
---|
484 | rc = VERR_NO_MEMORY;
|
---|
485 | break;
|
---|
486 | }
|
---|
487 |
|
---|
488 | try
|
---|
489 | {
|
---|
490 | pCtx->vecStreams.push_back(pStream);
|
---|
491 |
|
---|
492 | pStream->pEBML = new WebMWriter();
|
---|
493 | }
|
---|
494 | catch (std::bad_alloc)
|
---|
495 | {
|
---|
496 | rc = VERR_NO_MEMORY;
|
---|
497 | break;
|
---|
498 | }
|
---|
499 | }
|
---|
500 |
|
---|
501 | if (RT_SUCCESS(rc))
|
---|
502 | {
|
---|
503 | rc = RTSemEventCreate(&pCtx->WaitEvent);
|
---|
504 | AssertRCReturn(rc, rc);
|
---|
505 |
|
---|
506 | rc = RTSemEventCreate(&pCtx->TermEvent);
|
---|
507 | AssertRCReturn(rc, rc);
|
---|
508 |
|
---|
509 | rc = RTThreadCreate(&pCtx->Thread, videoRecThread, (void*)pCtx, 0,
|
---|
510 | RTTHREADTYPE_MAIN_WORKER, RTTHREADFLAGS_WAITABLE, "VideoRec");
|
---|
511 | AssertRCReturn(rc, rc);
|
---|
512 |
|
---|
513 | ASMAtomicWriteU32(&g_enmState, VIDREC_IDLE);
|
---|
514 |
|
---|
515 | if (ppCtx)
|
---|
516 | *ppCtx = pCtx;
|
---|
517 | }
|
---|
518 | else
|
---|
519 | {
|
---|
520 | /* Roll back allocations on error. */
|
---|
521 | VideoRecStreams::iterator it = pCtx->vecStreams.begin();
|
---|
522 | while (it != pCtx->vecStreams.end())
|
---|
523 | {
|
---|
524 | PVIDEORECSTREAM pStream = (*it);
|
---|
525 |
|
---|
526 | if (pStream->pEBML)
|
---|
527 | delete pStream->pEBML;
|
---|
528 |
|
---|
529 | it = pCtx->vecStreams.erase(it);
|
---|
530 |
|
---|
531 | RTMemFree(pStream);
|
---|
532 | pStream = NULL;
|
---|
533 | }
|
---|
534 |
|
---|
535 | Assert(pCtx->vecStreams.empty());
|
---|
536 | }
|
---|
537 |
|
---|
538 | return rc;
|
---|
539 | }
|
---|
540 |
|
---|
541 | /**
|
---|
542 | * Destroys a video recording context.
|
---|
543 | *
|
---|
544 | * @param pCtx Video recording context to destroy.
|
---|
545 | */
|
---|
546 | void VideoRecContextDestroy(PVIDEORECCONTEXT pCtx)
|
---|
547 | {
|
---|
548 | if (!pCtx)
|
---|
549 | return;
|
---|
550 |
|
---|
551 | uint32_t enmState = VIDREC_IDLE;
|
---|
552 |
|
---|
553 | for (;;) /** @todo r=andy Remove busy waiting! */
|
---|
554 | {
|
---|
555 | if (ASMAtomicCmpXchgExU32(&g_enmState, VIDREC_TERMINATING, enmState, &enmState))
|
---|
556 | break;
|
---|
557 | if (enmState == VIDREC_UNINITIALIZED)
|
---|
558 | return;
|
---|
559 | }
|
---|
560 |
|
---|
561 | if (enmState == VIDREC_COPYING)
|
---|
562 | {
|
---|
563 | int rc = RTSemEventWait(pCtx->TermEvent, RT_INDEFINITE_WAIT);
|
---|
564 | AssertRC(rc);
|
---|
565 | }
|
---|
566 |
|
---|
567 | RTSemEventSignal(pCtx->WaitEvent);
|
---|
568 | RTThreadWait(pCtx->Thread, 10 * 1000, NULL);
|
---|
569 | RTSemEventDestroy(pCtx->WaitEvent);
|
---|
570 | RTSemEventDestroy(pCtx->TermEvent);
|
---|
571 |
|
---|
572 | VideoRecStreams::iterator it = pCtx->vecStreams.begin();
|
---|
573 | while (it != pCtx->vecStreams.end())
|
---|
574 | {
|
---|
575 | PVIDEORECSTREAM pStream = (*it);
|
---|
576 |
|
---|
577 | if (pStream->fEnabled)
|
---|
578 | {
|
---|
579 | AssertPtr(pStream->pEBML);
|
---|
580 | pStream->pEBML->Close();
|
---|
581 |
|
---|
582 | vpx_img_free(&pStream->Codec.VPX.RawImage);
|
---|
583 | vpx_codec_err_t rcv = vpx_codec_destroy(&pStream->Codec.VPX.CodecCtx);
|
---|
584 | Assert(rcv == VPX_CODEC_OK); RT_NOREF(rcv);
|
---|
585 |
|
---|
586 | if (pStream->pu8RgbBuf)
|
---|
587 | {
|
---|
588 | RTMemFree(pStream->pu8RgbBuf);
|
---|
589 | pStream->pu8RgbBuf = NULL;
|
---|
590 | }
|
---|
591 | }
|
---|
592 |
|
---|
593 | if (pStream->pEBML)
|
---|
594 | {
|
---|
595 | delete pStream->pEBML;
|
---|
596 | pStream->pEBML = NULL;
|
---|
597 | }
|
---|
598 |
|
---|
599 | it = pCtx->vecStreams.erase(it);
|
---|
600 |
|
---|
601 | RTMemFree(pStream);
|
---|
602 | pStream = NULL;
|
---|
603 | }
|
---|
604 |
|
---|
605 | Assert(pCtx->vecStreams.empty());
|
---|
606 | RTMemFree(pCtx);
|
---|
607 |
|
---|
608 | ASMAtomicWriteU32(&g_enmState, VIDREC_UNINITIALIZED);
|
---|
609 | }
|
---|
610 |
|
---|
611 | /**
|
---|
612 | * VideoRec utility function to initialize video recording context.
|
---|
613 | *
|
---|
614 | * @returns IPRT status code.
|
---|
615 | * @param pCtx Pointer to video recording context to initialize Framebuffer width.
|
---|
616 | * @param uScreen Screen number.
|
---|
617 | * @param pszFile File to save the recorded data
|
---|
618 | * @param uWidth Width of the target image in the video recoriding file (movie)
|
---|
619 | * @param uHeight Height of the target image in video recording file.
|
---|
620 | * @param uRate Rate.
|
---|
621 | * @param uFps FPS.
|
---|
622 | * @param uMaxTime
|
---|
623 | * @param uMaxFileSize
|
---|
624 | * @param pszOptions
|
---|
625 | */
|
---|
626 | int VideoRecStreamInit(PVIDEORECCONTEXT pCtx, uint32_t uScreen, const char *pszFile,
|
---|
627 | uint32_t uWidth, uint32_t uHeight, uint32_t uRate, uint32_t uFps,
|
---|
628 | uint32_t uMaxTime, uint32_t uMaxFileSize, const char *pszOptions)
|
---|
629 | {
|
---|
630 | AssertPtrReturn(pCtx, VERR_INVALID_PARAMETER);
|
---|
631 | AssertReturn(uScreen < pCtx->vecStreams.size(), VERR_INVALID_PARAMETER);
|
---|
632 |
|
---|
633 | pCtx->u64MaxTimeStamp = (uMaxTime > 0 ? RTTimeProgramMilliTS() + uMaxTime * 1000 : 0);
|
---|
634 | pCtx->uMaxFileSize = uMaxFileSize;
|
---|
635 |
|
---|
636 | PVIDEORECSTREAM pStream = pCtx->vecStreams.at(uScreen);
|
---|
637 |
|
---|
638 | pStream->uTargetWidth = uWidth;
|
---|
639 | pStream->uTargetHeight = uHeight;
|
---|
640 | pStream->pu8RgbBuf = (uint8_t *)RTMemAllocZ(uWidth * uHeight * 4);
|
---|
641 | AssertReturn(pStream->pu8RgbBuf, VERR_NO_MEMORY);
|
---|
642 | pStream->uEncoderDeadline = VPX_DL_REALTIME;
|
---|
643 |
|
---|
644 | /* Play safe: the file must not exist, overwriting is potentially
|
---|
645 | * hazardous as nothing prevents the user from picking a file name of some
|
---|
646 | * other important file, causing unintentional data loss. */
|
---|
647 |
|
---|
648 | vpx_codec_err_t rcv = vpx_codec_enc_config_default(DEFAULTCODEC, &pStream->Codec.VPX.Config, 0);
|
---|
649 | if (rcv != VPX_CODEC_OK)
|
---|
650 | {
|
---|
651 | LogRel(("VideoRec: Failed to get default configuration for VPX codec: %s\n", vpx_codec_err_to_string(rcv)));
|
---|
652 | return VERR_INVALID_PARAMETER;
|
---|
653 | }
|
---|
654 |
|
---|
655 | com::Utf8Str options(pszOptions);
|
---|
656 | size_t pos = 0;
|
---|
657 |
|
---|
658 | /* By default we enable everything (if available). */
|
---|
659 | bool fHasVideoTrack = true;
|
---|
660 | #ifdef VBOX_WITH_AUDIO_VIDEOREC
|
---|
661 | bool fHasAudioTrack = true;
|
---|
662 | #endif
|
---|
663 |
|
---|
664 | com::Utf8Str key, value;
|
---|
665 | while ((pos = options.parseKeyValue(key, value, pos)) != com::Utf8Str::npos)
|
---|
666 | {
|
---|
667 | if (key.compare("vc_quality", Utf8Str::CaseInsensitive) == 0)
|
---|
668 | {
|
---|
669 | if (value.compare("realtime", Utf8Str::CaseInsensitive) == 0)
|
---|
670 | {
|
---|
671 | pStream->uEncoderDeadline = VPX_DL_REALTIME;
|
---|
672 | }
|
---|
673 | else if (value.compare("good", Utf8Str::CaseInsensitive) == 0)
|
---|
674 | {
|
---|
675 | pStream->uEncoderDeadline = 1000000 / uFps;
|
---|
676 | }
|
---|
677 | else if (value.compare("best", Utf8Str::CaseInsensitive) == 0)
|
---|
678 | {
|
---|
679 | pStream->uEncoderDeadline = VPX_DL_BEST_QUALITY;
|
---|
680 | }
|
---|
681 | else
|
---|
682 | {
|
---|
683 | LogRel(("VideoRec: Setting quality deadline to '%s'\n", value.c_str()));
|
---|
684 | pStream->uEncoderDeadline = value.toUInt32();
|
---|
685 | }
|
---|
686 | }
|
---|
687 | else if (key.compare("vc_enabled", Utf8Str::CaseInsensitive) == 0)
|
---|
688 | {
|
---|
689 | #ifdef VBOX_WITH_AUDIO_VIDEOREC
|
---|
690 | if (value.compare("false", Utf8Str::CaseInsensitive) == 0) /* Disable audio. */
|
---|
691 | {
|
---|
692 | fHasVideoTrack = false;
|
---|
693 | LogRel(("VideoRec: Only audio will be recorded\n"));
|
---|
694 | }
|
---|
695 | #endif
|
---|
696 | }
|
---|
697 | else if (key.compare("ac_enabled", Utf8Str::CaseInsensitive) == 0)
|
---|
698 | {
|
---|
699 | #ifdef VBOX_WITH_AUDIO_VIDEOREC
|
---|
700 | if (value.compare("false", Utf8Str::CaseInsensitive)) /* Disable audio. */
|
---|
701 | {
|
---|
702 | fHasAudioTrack = false;
|
---|
703 | LogRel(("VideoRec: Only video will be recorded\n"));
|
---|
704 | }
|
---|
705 | #endif
|
---|
706 | }
|
---|
707 | else
|
---|
708 | LogRel(("VideoRec: Unknown option '%s' (value '%s'), skipping\n", key.c_str(), value.c_str()));
|
---|
709 |
|
---|
710 | } /* while */
|
---|
711 |
|
---|
712 | uint64_t fOpen = RTFILE_O_WRITE | RTFILE_O_DENY_WRITE;
|
---|
713 | #ifdef DEBUG
|
---|
714 | fOpen |= RTFILE_O_CREATE_REPLACE;
|
---|
715 | #else
|
---|
716 | fOpen |= RTFILE_O_CREATE;
|
---|
717 | #endif
|
---|
718 |
|
---|
719 | int rc = pStream->pEBML->Create(pszFile, fOpen, WebMWriter::AudioCodec_Opus, WebMWriter::VideoCodec_VP8);
|
---|
720 | if (RT_FAILURE(rc))
|
---|
721 | {
|
---|
722 | LogRel(("VideoRec: Failed to create the video capture output file '%s' (%Rrc)\n", pszFile, rc));
|
---|
723 | return rc;
|
---|
724 | }
|
---|
725 |
|
---|
726 | /* target bitrate in kilobits per second */
|
---|
727 | pStream->Codec.VPX.Config.rc_target_bitrate = uRate;
|
---|
728 | /* frame width */
|
---|
729 | pStream->Codec.VPX.Config.g_w = uWidth;
|
---|
730 | /* frame height */
|
---|
731 | pStream->Codec.VPX.Config.g_h = uHeight;
|
---|
732 | /* 1ms per frame */
|
---|
733 | pStream->Codec.VPX.Config.g_timebase.num = 1;
|
---|
734 | pStream->Codec.VPX.Config.g_timebase.den = 1000;
|
---|
735 | /* disable multithreading */
|
---|
736 | pStream->Codec.VPX.Config.g_threads = 0;
|
---|
737 |
|
---|
738 | pStream->uDelay = 1000 / uFps;
|
---|
739 |
|
---|
740 | if (fHasVideoTrack)
|
---|
741 | {
|
---|
742 | rc = pStream->pEBML->AddVideoTrack(pStream->Codec.VPX.Config.g_w, /* Width */
|
---|
743 | pStream->Codec.VPX.Config.g_h, /* Height */
|
---|
744 | uFps);
|
---|
745 | if (RT_FAILURE(rc))
|
---|
746 | {
|
---|
747 | LogRel(("VideoRec: Failed to add video track to output file '%s' (%Rrc)\n", pszFile, rc));
|
---|
748 | return rc;
|
---|
749 | }
|
---|
750 | }
|
---|
751 |
|
---|
752 | /* Initialize codec */
|
---|
753 | rcv = vpx_codec_enc_init(&pStream->Codec.VPX.CodecCtx, DEFAULTCODEC, &pStream->Codec.VPX.Config, 0);
|
---|
754 | if (rcv != VPX_CODEC_OK)
|
---|
755 | {
|
---|
756 | LogFlow(("Failed to initialize VP8 encoder %s", vpx_codec_err_to_string(rcv)));
|
---|
757 | return VERR_INVALID_PARAMETER;
|
---|
758 | }
|
---|
759 |
|
---|
760 | if (!vpx_img_alloc(&pStream->Codec.VPX.RawImage, VPX_IMG_FMT_I420, uWidth, uHeight, 1))
|
---|
761 | {
|
---|
762 | LogFlow(("Failed to allocate image %dx%d", uWidth, uHeight));
|
---|
763 | return VERR_NO_MEMORY;
|
---|
764 | }
|
---|
765 |
|
---|
766 | pStream->pu8YuvBuf = pStream->Codec.VPX.RawImage.planes[0];
|
---|
767 |
|
---|
768 | pCtx->fEnabled = true;
|
---|
769 | pStream->fEnabled = true;
|
---|
770 |
|
---|
771 | return VINF_SUCCESS;
|
---|
772 | }
|
---|
773 |
|
---|
774 | /**
|
---|
775 | * VideoRec utility function to check if recording is enabled.
|
---|
776 | *
|
---|
777 | * @returns true if recording is enabled
|
---|
778 | * @param pCtx Pointer to video recording context.
|
---|
779 | */
|
---|
780 | bool VideoRecIsEnabled(PVIDEORECCONTEXT pCtx)
|
---|
781 | {
|
---|
782 | RT_NOREF(pCtx);
|
---|
783 | uint32_t enmState = ASMAtomicReadU32(&g_enmState);
|
---|
784 | return ( enmState == VIDREC_IDLE
|
---|
785 | || enmState == VIDREC_COPYING);
|
---|
786 | }
|
---|
787 |
|
---|
788 | /**
|
---|
789 | * VideoRec utility function to check if recording engine is ready to accept a new frame
|
---|
790 | * for the given screen.
|
---|
791 | *
|
---|
792 | * @returns true if recording engine is ready
|
---|
793 | * @param pCtx Pointer to video recording context.
|
---|
794 | * @param uScreen Screen ID.
|
---|
795 | * @param u64TimeStamp Current time stamp.
|
---|
796 | */
|
---|
797 | bool VideoRecIsReady(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint64_t u64TimeStamp)
|
---|
798 | {
|
---|
799 | uint32_t enmState = ASMAtomicReadU32(&g_enmState);
|
---|
800 | if (enmState != VIDREC_IDLE)
|
---|
801 | return false;
|
---|
802 |
|
---|
803 | PVIDEORECSTREAM pStream = pCtx->vecStreams.at(uScreen);
|
---|
804 | if (!pStream->fEnabled)
|
---|
805 | return false;
|
---|
806 |
|
---|
807 | if (u64TimeStamp < pStream->u64LastTimeStamp + pStream->uDelay)
|
---|
808 | return false;
|
---|
809 |
|
---|
810 | if (ASMAtomicReadBool(&pStream->fRgbFilled))
|
---|
811 | return false;
|
---|
812 |
|
---|
813 | return true;
|
---|
814 | }
|
---|
815 |
|
---|
816 | /**
|
---|
817 | * VideoRec utility function to check if the file size has reached
|
---|
818 | * specified limits (if any).
|
---|
819 | *
|
---|
820 | * @returns true if any limit has been reached.
|
---|
821 | * @param pCtx Pointer to video recording context.
|
---|
822 | * @param uScreen Screen ID.
|
---|
823 | * @param u64TimeStamp Current time stamp.
|
---|
824 | */
|
---|
825 |
|
---|
826 | bool VideoRecIsFull(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint64_t u64TimeStamp)
|
---|
827 | {
|
---|
828 | PVIDEORECSTREAM pStream = pCtx->vecStreams.at(uScreen);
|
---|
829 | if(!pStream->fEnabled)
|
---|
830 | return false;
|
---|
831 |
|
---|
832 | if(pCtx->u64MaxTimeStamp > 0 && u64TimeStamp >= pCtx->u64MaxTimeStamp)
|
---|
833 | return true;
|
---|
834 |
|
---|
835 | if (pCtx->uMaxFileSize > 0)
|
---|
836 | {
|
---|
837 | uint64_t sizeInMB = pStream->pEBML->GetFileSize() / (1024 * 1024);
|
---|
838 | if(sizeInMB >= pCtx->uMaxFileSize)
|
---|
839 | return true;
|
---|
840 | }
|
---|
841 | /* Check for available free disk space */
|
---|
842 | if (pStream->pEBML->GetAvailableSpace() < 0x100000)
|
---|
843 | {
|
---|
844 | LogRel(("VideoRec: Not enough free storage space available, stopping video capture\n"));
|
---|
845 | return true;
|
---|
846 | }
|
---|
847 |
|
---|
848 | return false;
|
---|
849 | }
|
---|
850 |
|
---|
851 | /**
|
---|
852 | * VideoRec utility function to encode the source image and write the encoded
|
---|
853 | * image to target file.
|
---|
854 | *
|
---|
855 | * @returns IPRT status code.
|
---|
856 | * @param pStream Stream to encode and write.
|
---|
857 | */
|
---|
858 | static int videoRecEncodeAndWrite(PVIDEORECSTREAM pStream)
|
---|
859 | {
|
---|
860 | /* presentation time stamp */
|
---|
861 | vpx_codec_pts_t pts = pStream->u64TimeStamp;
|
---|
862 | vpx_codec_err_t rcv = vpx_codec_encode(&pStream->Codec.VPX.CodecCtx,
|
---|
863 | &pStream->Codec.VPX.RawImage,
|
---|
864 | pts /* time stamp */,
|
---|
865 | pStream->uDelay /* how long to show this frame */,
|
---|
866 | 0 /* flags */,
|
---|
867 | pStream->uEncoderDeadline /* quality setting */);
|
---|
868 | if (rcv != VPX_CODEC_OK)
|
---|
869 | {
|
---|
870 | LogFlow(("Failed to encode:%s\n", vpx_codec_err_to_string(rcv)));
|
---|
871 | return VERR_GENERAL_FAILURE;
|
---|
872 | }
|
---|
873 |
|
---|
874 | vpx_codec_iter_t iter = NULL;
|
---|
875 | int rc = VERR_NO_DATA;
|
---|
876 | for (;;)
|
---|
877 | {
|
---|
878 | const vpx_codec_cx_pkt_t *pPacket = vpx_codec_get_cx_data(&pStream->Codec.VPX.CodecCtx, &iter);
|
---|
879 | if (!pPacket)
|
---|
880 | break;
|
---|
881 |
|
---|
882 | switch (pPacket->kind)
|
---|
883 | {
|
---|
884 | case VPX_CODEC_CX_FRAME_PKT:
|
---|
885 | {
|
---|
886 | WebMWriter::BlockData_VP8 blockData { &pStream->Codec.VPX.Config, pPacket };
|
---|
887 | rc = pStream->pEBML->WriteBlock(WebMWriter::BlockType_Video, &blockData, sizeof(blockData));
|
---|
888 | break;
|
---|
889 | }
|
---|
890 |
|
---|
891 | default:
|
---|
892 | LogFlow(("Unexpected CODEC packet kind %ld\n", pPacket->kind));
|
---|
893 | break;
|
---|
894 | }
|
---|
895 | }
|
---|
896 |
|
---|
897 | pStream->cFrame++;
|
---|
898 | return rc;
|
---|
899 | }
|
---|
900 |
|
---|
901 | /**
|
---|
902 | * VideoRec utility function to convert RGB to YUV.
|
---|
903 | *
|
---|
904 | * @returns IPRT status code.
|
---|
905 | * @param pStrm Strm.
|
---|
906 | */
|
---|
907 | static int videoRecRGBToYUV(PVIDEORECSTREAM pStrm)
|
---|
908 | {
|
---|
909 | switch (pStrm->u32PixelFormat)
|
---|
910 | {
|
---|
911 | case VPX_IMG_FMT_RGB32:
|
---|
912 | LogFlow(("32 bit\n"));
|
---|
913 | if (!colorConvWriteYUV420p<ColorConvBGRA32Iter>(pStrm->uTargetWidth,
|
---|
914 | pStrm->uTargetHeight,
|
---|
915 | pStrm->pu8YuvBuf,
|
---|
916 | pStrm->pu8RgbBuf))
|
---|
917 | return VERR_GENERAL_FAILURE;
|
---|
918 | break;
|
---|
919 | case VPX_IMG_FMT_RGB24:
|
---|
920 | LogFlow(("24 bit\n"));
|
---|
921 | if (!colorConvWriteYUV420p<ColorConvBGR24Iter>(pStrm->uTargetWidth,
|
---|
922 | pStrm->uTargetHeight,
|
---|
923 | pStrm->pu8YuvBuf,
|
---|
924 | pStrm->pu8RgbBuf))
|
---|
925 | return VERR_GENERAL_FAILURE;
|
---|
926 | break;
|
---|
927 | case VPX_IMG_FMT_RGB565:
|
---|
928 | LogFlow(("565 bit\n"));
|
---|
929 | if (!colorConvWriteYUV420p<ColorConvBGR565Iter>(pStrm->uTargetWidth,
|
---|
930 | pStrm->uTargetHeight,
|
---|
931 | pStrm->pu8YuvBuf,
|
---|
932 | pStrm->pu8RgbBuf))
|
---|
933 | return VERR_GENERAL_FAILURE;
|
---|
934 | break;
|
---|
935 | default:
|
---|
936 | return VERR_GENERAL_FAILURE;
|
---|
937 | }
|
---|
938 | return VINF_SUCCESS;
|
---|
939 | }
|
---|
940 |
|
---|
941 | /**
|
---|
942 | * VideoRec utility function to copy a source image (FrameBuf) to the intermediate
|
---|
943 | * RGB buffer. This function is executed only once per time.
|
---|
944 | *
|
---|
945 | * @thread EMT
|
---|
946 | *
|
---|
947 | * @returns IPRT status code.
|
---|
948 | * @param pCtx Pointer to the video recording context.
|
---|
949 | * @param uScreen Screen number.
|
---|
950 | * @param x Starting x coordinate of the source buffer (Framebuffer).
|
---|
951 | * @param y Starting y coordinate of the source buffer (Framebuffer).
|
---|
952 | * @param uPixelFormat Pixel Format.
|
---|
953 | * @param uBitsPerPixel Bits Per Pixel
|
---|
954 | * @param uBytesPerLine Bytes per source scanlineName.
|
---|
955 | * @param uSourceWidth Width of the source image (framebuffer).
|
---|
956 | * @param uSourceHeight Height of the source image (framebuffer).
|
---|
957 | * @param pu8BufAddr Pointer to source image(framebuffer).
|
---|
958 | * @param u64TimeStamp Time stamp (milliseconds).
|
---|
959 | */
|
---|
960 | int VideoRecCopyToIntBuf(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint32_t x, uint32_t y,
|
---|
961 | uint32_t uPixelFormat, uint32_t uBitsPerPixel, uint32_t uBytesPerLine,
|
---|
962 | uint32_t uSourceWidth, uint32_t uSourceHeight, uint8_t *pu8BufAddr,
|
---|
963 | uint64_t u64TimeStamp)
|
---|
964 | {
|
---|
965 | /* Do not execute during termination and guard against termination */
|
---|
966 | if (!ASMAtomicCmpXchgU32(&g_enmState, VIDREC_COPYING, VIDREC_IDLE))
|
---|
967 | return VINF_TRY_AGAIN;
|
---|
968 |
|
---|
969 | int rc = VINF_SUCCESS;
|
---|
970 | do
|
---|
971 | {
|
---|
972 | AssertPtrBreakStmt(pu8BufAddr, rc = VERR_INVALID_PARAMETER);
|
---|
973 | AssertBreakStmt(uSourceWidth, rc = VERR_INVALID_PARAMETER);
|
---|
974 | AssertBreakStmt(uSourceHeight, rc = VERR_INVALID_PARAMETER);
|
---|
975 | AssertBreakStmt(uScreen < pCtx->vecStreams.size(), rc = VERR_INVALID_PARAMETER);
|
---|
976 |
|
---|
977 | PVIDEORECSTREAM pStream = pCtx->vecStreams.at(uScreen);
|
---|
978 | if (!pStream->fEnabled)
|
---|
979 | {
|
---|
980 | rc = VINF_TRY_AGAIN; /* not (yet) enabled */
|
---|
981 | break;
|
---|
982 | }
|
---|
983 | if (u64TimeStamp < pStream->u64LastTimeStamp + pStream->uDelay)
|
---|
984 | {
|
---|
985 | rc = VINF_TRY_AGAIN; /* respect maximum frames per second */
|
---|
986 | break;
|
---|
987 | }
|
---|
988 | if (ASMAtomicReadBool(&pStream->fRgbFilled))
|
---|
989 | {
|
---|
990 | rc = VERR_TRY_AGAIN; /* previous frame not yet encoded */
|
---|
991 | break;
|
---|
992 | }
|
---|
993 |
|
---|
994 | pStream->u64LastTimeStamp = u64TimeStamp;
|
---|
995 |
|
---|
996 | int xDiff = ((int)pStream->uTargetWidth - (int)uSourceWidth) / 2;
|
---|
997 | uint32_t w = uSourceWidth;
|
---|
998 | if ((int)w + xDiff + (int)x <= 0) /* nothing visible */
|
---|
999 | {
|
---|
1000 | rc = VERR_INVALID_PARAMETER;
|
---|
1001 | break;
|
---|
1002 | }
|
---|
1003 |
|
---|
1004 | uint32_t destX;
|
---|
1005 | if ((int)x < -xDiff)
|
---|
1006 | {
|
---|
1007 | w += xDiff + x;
|
---|
1008 | x = -xDiff;
|
---|
1009 | destX = 0;
|
---|
1010 | }
|
---|
1011 | else
|
---|
1012 | destX = x + xDiff;
|
---|
1013 |
|
---|
1014 | uint32_t h = uSourceHeight;
|
---|
1015 | int yDiff = ((int)pStream->uTargetHeight - (int)uSourceHeight) / 2;
|
---|
1016 | if ((int)h + yDiff + (int)y <= 0) /* nothing visible */
|
---|
1017 | {
|
---|
1018 | rc = VERR_INVALID_PARAMETER;
|
---|
1019 | break;
|
---|
1020 | }
|
---|
1021 |
|
---|
1022 | uint32_t destY;
|
---|
1023 | if ((int)y < -yDiff)
|
---|
1024 | {
|
---|
1025 | h += yDiff + (int)y;
|
---|
1026 | y = -yDiff;
|
---|
1027 | destY = 0;
|
---|
1028 | }
|
---|
1029 | else
|
---|
1030 | destY = y + yDiff;
|
---|
1031 |
|
---|
1032 | if ( destX > pStream->uTargetWidth
|
---|
1033 | || destY > pStream->uTargetHeight)
|
---|
1034 | {
|
---|
1035 | rc = VERR_INVALID_PARAMETER; /* nothing visible */
|
---|
1036 | break;
|
---|
1037 | }
|
---|
1038 |
|
---|
1039 | if (destX + w > pStream->uTargetWidth)
|
---|
1040 | w = pStream->uTargetWidth - destX;
|
---|
1041 |
|
---|
1042 | if (destY + h > pStream->uTargetHeight)
|
---|
1043 | h = pStream->uTargetHeight - destY;
|
---|
1044 |
|
---|
1045 | /* Calculate bytes per pixel */
|
---|
1046 | uint32_t bpp = 1;
|
---|
1047 | if (uPixelFormat == BitmapFormat_BGR)
|
---|
1048 | {
|
---|
1049 | switch (uBitsPerPixel)
|
---|
1050 | {
|
---|
1051 | case 32:
|
---|
1052 | pStream->u32PixelFormat = VPX_IMG_FMT_RGB32;
|
---|
1053 | bpp = 4;
|
---|
1054 | break;
|
---|
1055 | case 24:
|
---|
1056 | pStream->u32PixelFormat = VPX_IMG_FMT_RGB24;
|
---|
1057 | bpp = 3;
|
---|
1058 | break;
|
---|
1059 | case 16:
|
---|
1060 | pStream->u32PixelFormat = VPX_IMG_FMT_RGB565;
|
---|
1061 | bpp = 2;
|
---|
1062 | break;
|
---|
1063 | default:
|
---|
1064 | AssertMsgFailed(("Unknown color depth! mBitsPerPixel=%d\n", uBitsPerPixel));
|
---|
1065 | break;
|
---|
1066 | }
|
---|
1067 | }
|
---|
1068 | else
|
---|
1069 | AssertMsgFailed(("Unknown pixel format! mPixelFormat=%d\n", uPixelFormat));
|
---|
1070 |
|
---|
1071 | /* One of the dimensions of the current frame is smaller than before so
|
---|
1072 | * clear the entire buffer to prevent artifacts from the previous frame */
|
---|
1073 | if ( uSourceWidth < pStream->uLastSourceWidth
|
---|
1074 | || uSourceHeight < pStream->uLastSourceHeight)
|
---|
1075 | memset(pStream->pu8RgbBuf, 0, pStream->uTargetWidth * pStream->uTargetHeight * 4);
|
---|
1076 |
|
---|
1077 | pStream->uLastSourceWidth = uSourceWidth;
|
---|
1078 | pStream->uLastSourceHeight = uSourceHeight;
|
---|
1079 |
|
---|
1080 | /* Calculate start offset in source and destination buffers */
|
---|
1081 | uint32_t offSrc = y * uBytesPerLine + x * bpp;
|
---|
1082 | uint32_t offDst = (destY * pStream->uTargetWidth + destX) * bpp;
|
---|
1083 | /* do the copy */
|
---|
1084 | for (unsigned int i = 0; i < h; i++)
|
---|
1085 | {
|
---|
1086 | /* Overflow check */
|
---|
1087 | Assert(offSrc + w * bpp <= uSourceHeight * uBytesPerLine);
|
---|
1088 | Assert(offDst + w * bpp <= pStream->uTargetHeight * pStream->uTargetWidth * bpp);
|
---|
1089 | memcpy(pStream->pu8RgbBuf + offDst, pu8BufAddr + offSrc, w * bpp);
|
---|
1090 | offSrc += uBytesPerLine;
|
---|
1091 | offDst += pStream->uTargetWidth * bpp;
|
---|
1092 | }
|
---|
1093 |
|
---|
1094 | pStream->u64TimeStamp = u64TimeStamp;
|
---|
1095 |
|
---|
1096 | ASMAtomicWriteBool(&pStream->fRgbFilled, true);
|
---|
1097 | RTSemEventSignal(pCtx->WaitEvent);
|
---|
1098 | } while (0);
|
---|
1099 |
|
---|
1100 | if (!ASMAtomicCmpXchgU32(&g_enmState, VIDREC_IDLE, VIDREC_COPYING))
|
---|
1101 | {
|
---|
1102 | rc = RTSemEventSignal(pCtx->TermEvent);
|
---|
1103 | AssertRC(rc);
|
---|
1104 | }
|
---|
1105 |
|
---|
1106 | return rc;
|
---|
1107 | }
|
---|