VirtualBox

source: vbox/trunk/src/VBox/Main/src-client/VideoRec.cpp@ 74955

Last change on this file since 74955 was 74955, checked in by vboxsync, 6 years ago

VideoRec/Main: Implemented applying audio driver configuration at runtime (when being attached).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
  • Property svn:mergeinfo set to (toggle deleted branches)
    /branches/VBox-3.0/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp58652,​70973
    /branches/VBox-3.2/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp66309,​66318
    /branches/VBox-4.0/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp70873
    /branches/VBox-4.1/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp74233
    /branches/VBox-4.2/src/VBox/Main/src-client/VideoRec.cpp91503-91504,​91506-91508,​91510,​91514-91515,​91521
    /branches/VBox-4.3/src/VBox/Main/src-client/VideoRec.cpp91223
    /branches/VBox-4.3/trunk/src/VBox/Main/src-client/VideoRec.cpp91223
    /branches/dsen/gui/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp79076-79078,​79089,​79109-79110,​79112-79113,​79127-79130,​79134,​79141,​79151,​79155,​79157-79159,​79193,​79197
    /branches/dsen/gui2/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp79224,​79228,​79233,​79235,​79258,​79262-79263,​79273,​79341,​79345,​79354,​79357,​79387-79388,​79559-79569,​79572-79573,​79578,​79581-79582,​79590-79591,​79598-79599,​79602-79603,​79605-79606,​79632,​79635,​79637,​79644
    /branches/dsen/gui3/src/VBox/Frontends/VBoxHeadless/VideoCapture/EncodeAndWrite.cpp79645-79692
File size: 60.8 KB
Line 
1/* $Id: VideoRec.cpp 74955 2018-10-19 18:14:51Z vboxsync $ */
2/** @file
3 * Video recording (with optional audio recording) code.
4 *
5 * This code employs a separate encoding thread per recording context
6 * to keep time spent in EMT as short as possible. Each configured VM display
7 * is represented by an own recording stream, which in turn has its own rendering
8 * queue. Common recording data across all recording streams is kept in a
9 * separate queue in the recording context to minimize data duplication and
10 * multiplexing overhead in EMT.
11 */
12
13/*
14 * Copyright (C) 2012-2018 Oracle Corporation
15 *
16 * This file is part of VirtualBox Open Source Edition (OSE), as
17 * available from http://www.virtualbox.org. This file is free software;
18 * you can redistribute it and/or modify it under the terms of the GNU
19 * General Public License (GPL) as published by the Free Software
20 * Foundation, in version 2 as it comes in the "COPYING" file of the
21 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
22 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
23 */
24
25#ifdef LOG_GROUP
26# undef LOG_GROUP
27#endif
28#define LOG_GROUP LOG_GROUP_MAIN_DISPLAY
29#include "LoggingNew.h"
30
31#include <stdexcept>
32#include <vector>
33
34#include <iprt/asm.h>
35#include <iprt/assert.h>
36#include <iprt/critsect.h>
37#include <iprt/path.h>
38#include <iprt/semaphore.h>
39#include <iprt/thread.h>
40#include <iprt/time.h>
41
42#include <VBox/err.h>
43#include <VBox/com/VirtualBox.h>
44
45#include "WebMWriter.h"
46#include "VideoRec.h"
47
48#ifdef VBOX_WITH_LIBVPX
49# define VPX_CODEC_DISABLE_COMPAT 1
50# include "vpx/vp8cx.h"
51# include "vpx/vpx_image.h"
52# include "vpx/vpx_encoder.h"
53#endif /* VBOX_WITH_LIBVPX */
54
55using namespace com;
56
57#ifdef DEBUG_andy
58/** Enables dumping audio / video data for debugging reasons. */
59//# define VBOX_VIDEOREC_DUMP
60#endif
61
62/**
63 * Enumeration for a video recording state.
64 */
65enum VIDEORECSTS
66{
67 /** Not initialized. */
68 VIDEORECSTS_UNINITIALIZED = 0,
69 /** Initialized. */
70 VIDEORECSTS_INITIALIZED = 1,
71 /** The usual 32-bit hack. */
72 VIDEORECSTS_32BIT_HACK = 0x7fffffff
73};
74
75/**
76 * Enumeration for supported pixel formats.
77 */
78enum VIDEORECPIXELFMT
79{
80 /** Unknown pixel format. */
81 VIDEORECPIXELFMT_UNKNOWN = 0,
82 /** RGB 24. */
83 VIDEORECPIXELFMT_RGB24 = 1,
84 /** RGB 24. */
85 VIDEORECPIXELFMT_RGB32 = 2,
86 /** RGB 565. */
87 VIDEORECPIXELFMT_RGB565 = 3,
88 /** The usual 32-bit hack. */
89 VIDEORECPIXELFMT_32BIT_HACK = 0x7fffffff
90};
91
92/**
93 * Structure for keeping specific video recording codec data.
94 */
95typedef struct VIDEORECVIDEOCODEC
96{
97 union
98 {
99#ifdef VBOX_WITH_LIBVPX
100 struct
101 {
102 /** VPX codec context. */
103 vpx_codec_ctx_t Ctx;
104 /** VPX codec configuration. */
105 vpx_codec_enc_cfg_t Cfg;
106 /** VPX image context. */
107 vpx_image_t RawImage;
108 } VPX;
109#endif /* VBOX_WITH_LIBVPX */
110 };
111} VIDEORECVIDEOCODEC, *PVIDEORECVIDEOCODEC;
112
113/**
114 * Structure for keeping a single video recording video frame.
115 */
116typedef struct VIDEORECVIDEOFRAME
117{
118 /** X resolution of this frame. */
119 uint32_t uWidth;
120 /** Y resolution of this frame. */
121 uint32_t uHeight;
122 /** Pixel format of this frame. */
123 uint32_t uPixelFormat;
124 /** RGB buffer containing the unmodified frame buffer data from Main's display. */
125 uint8_t *pu8RGBBuf;
126 /** Size (in bytes) of the RGB buffer. */
127 size_t cbRGBBuf;
128} VIDEORECVIDEOFRAME, *PVIDEORECVIDEOFRAME;
129
130#ifdef VBOX_WITH_AUDIO_VIDEOREC
131/**
132 * Structure for keeping a single video recording audio frame.
133 */
134typedef struct VIDEORECAUDIOFRAME
135{
136 /** Pointer to audio data. */
137 uint8_t *pvBuf;
138 /** Size (in bytes) of audio data. */
139 size_t cbBuf;
140} VIDEORECAUDIOFRAME, *PVIDEORECAUDIOFRAME;
141#endif
142
143/**
144 * Enumeration for specifying a video recording block type.
145 */
146typedef enum VIDEORECBLOCKTYPE
147{
148 /** Uknown block type, do not use. */
149 VIDEORECBLOCKTYPE_UNKNOWN = 0,
150 /** The block is a video frame. */
151 VIDEORECBLOCKTYPE_VIDEO,
152#ifdef VBOX_WITH_AUDIO_VIDEOREC
153 /** The block is an audio frame. */
154 VIDEORECBLOCKTYPE_AUDIO
155#endif
156} VIDEORECBLOCKTYPE;
157
158/**
159 * Generic structure for keeping a single video recording (data) block.
160 */
161typedef struct VIDEORECBLOCK
162{
163 /** The block's type. */
164 VIDEORECBLOCKTYPE enmType;
165 /** Number of references held of this block. */
166 uint16_t cRefs;
167 /** The (absolute) time stamp (in ms, PTS) of this block. */
168 uint64_t uTimeStampMs;
169 /** Opaque data block to the actual block data, depending on the block's type. */
170 void *pvData;
171 /** Size (in bytes) of the (opaque) data block. */
172 size_t cbData;
173} VIDEORECBLOCK, *PVIDEORECBLOCK;
174
175/** List for keeping video recording (data) blocks. */
176typedef std::list<PVIDEORECBLOCK> VideoRecBlockList;
177
178static void videoRecBlockFree(PVIDEORECBLOCK pBlock);
179
180/** Structure for queuing all blocks bound to a single timecode.
181 * This can happen if multiple tracks are being involved. */
182struct VideoRecBlocks
183{
184 virtual ~VideoRecBlocks()
185 {
186 Clear();
187 }
188
189 /**
190 * Resets a video recording block list by removing (destroying)
191 * all current elements.
192 */
193 void Clear()
194 {
195 while (!List.empty())
196 {
197 PVIDEORECBLOCK pBlock = List.front();
198 videoRecBlockFree(pBlock);
199 List.pop_front();
200 }
201
202 Assert(List.size() == 0);
203 }
204
205 /** The actual block list for this timecode. */
206 VideoRecBlockList List;
207};
208
209/** A block map containing all currently queued blocks.
210 * The key specifies a unique timecode, whereas the value
211 * is a list of blocks which all correlate to the same key (timecode). */
212typedef std::map<uint64_t, VideoRecBlocks *> VideoRecBlockMap;
213
214/**
215 * Structure for holding a set of video recording (data) blocks.
216 */
217struct VideoRecBlockSet
218{
219 virtual ~VideoRecBlockSet()
220 {
221 Clear();
222 }
223
224 /**
225 * Resets a video recording block set by removing (destroying)
226 * all current elements.
227 */
228 void Clear()
229 {
230 VideoRecBlockMap::iterator it = Map.begin();
231 while (it != Map.end())
232 {
233 it->second->Clear();
234 delete it->second;
235 Map.erase(it);
236 it = Map.begin();
237 }
238
239 Assert(Map.size() == 0);
240 }
241
242 /** Timestamp (in ms) when this set was last processed. */
243 uint64_t tsLastProcessedMs;
244 /** All blocks related to this block set. */
245 VideoRecBlockMap Map;
246};
247
248/**
249 * Structure for maintaining a video recording stream.
250 */
251struct VIDEORECSTREAM
252{
253 /** Video recording context this stream is associated to. */
254 PVIDEORECCONTEXT pCtx;
255 /** Destination where to write the stream to. */
256 VIDEORECDEST enmDst;
257 union
258 {
259 struct
260 {
261 /** File handle to use for writing. */
262 RTFILE hFile;
263 /** File name being used for this stream. */
264 char *pszFile;
265 /** Pointer to WebM writer instance being used. */
266 WebMWriter *pWEBM;
267 } File;
268 };
269#ifdef VBOX_WITH_AUDIO_VIDEOREC
270 /** Track number of audio stream. */
271 uint8_t uTrackAudio;
272#endif
273 /** Track number of video stream. */
274 uint8_t uTrackVideo;
275 /** Screen ID. */
276 uint16_t uScreenID;
277 /** Whether video recording is enabled or not. */
278 bool fEnabled;
279 /** Critical section to serialize access. */
280 RTCRITSECT CritSect;
281
282 struct
283 {
284 /** Codec-specific data. */
285 VIDEORECVIDEOCODEC Codec;
286 /** Minimal delay (in ms) between two video frames.
287 * This value is based on the configured FPS rate. */
288 uint32_t uDelayMs;
289 /** Target X resolution (in pixels). */
290 uint32_t uWidth;
291 /** Target Y resolution (in pixels). */
292 uint32_t uHeight;
293 /** Time stamp (in ms) of the last video frame we encoded. */
294 uint64_t uLastTimeStampMs;
295 /** Pointer to the codec's internal YUV buffer. */
296 uint8_t *pu8YuvBuf;
297 /** Number of failed attempts to encode the current video frame in a row. */
298 uint16_t cFailedEncodingFrames;
299 } Video;
300
301 /** Common set of video recording (data) blocks, needed for
302 * multiplexing to all recording streams. */
303 VideoRecBlockSet Blocks;
304};
305
306/** Vector of video recording streams. */
307typedef std::vector <PVIDEORECSTREAM> VideoRecStreams;
308
309/**
310 * Structure for keeping a video recording context.
311 */
312struct VIDEORECCONTEXT
313{
314 /** Used recording configuration. */
315 VIDEORECCFG Cfg;
316 /** The current state. */
317 uint32_t enmState;
318 /** Critical section to serialize access. */
319 RTCRITSECT CritSect;
320 /** Semaphore to signal the encoding worker thread. */
321 RTSEMEVENT WaitEvent;
322 /** Whether this conext is in started state or not. */
323 bool fStarted;
324 /** Shutdown indicator. */
325 bool fShutdown;
326 /** Worker thread. */
327 RTTHREAD Thread;
328 /** Vector of current recording streams.
329 * Per VM screen (display) one recording stream is being used. */
330 VideoRecStreams vecStreams;
331 /** Timestamp (in ms) of when recording has been started. */
332 uint64_t tsStartMs;
333 /** Block map of common blocks which need to get multiplexed
334 * to all recording streams. This common block maps should help
335 * reducing the time spent in EMT and avoid doing the (expensive)
336 * multiplexing work in there.
337 *
338 * For now this only affects audio, e.g. all recording streams
339 * need to have the same audio data at a specific point in time. */
340 VideoRecBlockMap mapBlocksCommon;
341};
342
343#ifdef VBOX_VIDEOREC_DUMP
344#pragma pack(push)
345#pragma pack(1)
346typedef struct
347{
348 uint16_t u16Magic;
349 uint32_t u32Size;
350 uint16_t u16Reserved1;
351 uint16_t u16Reserved2;
352 uint32_t u32OffBits;
353} VIDEORECBMPHDR, *PVIDEORECBMPHDR;
354AssertCompileSize(VIDEORECBMPHDR, 14);
355
356typedef struct
357{
358 uint32_t u32Size;
359 uint32_t u32Width;
360 uint32_t u32Height;
361 uint16_t u16Planes;
362 uint16_t u16BitCount;
363 uint32_t u32Compression;
364 uint32_t u32SizeImage;
365 uint32_t u32XPelsPerMeter;
366 uint32_t u32YPelsPerMeter;
367 uint32_t u32ClrUsed;
368 uint32_t u32ClrImportant;
369} VIDEORECBMPDIBHDR, *PVIDEORECBMPDIBHDR;
370AssertCompileSize(VIDEORECBMPDIBHDR, 40);
371
372#pragma pack(pop)
373#endif /* VBOX_VIDEOREC_DUMP */
374
375/**
376 * Iterator class for running through a BGRA32 image buffer and converting
377 * it to RGB.
378 */
379class ColorConvBGRA32Iter
380{
381private:
382 enum { PIX_SIZE = 4 };
383public:
384 ColorConvBGRA32Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
385 {
386 LogFlow(("width = %d height=%d aBuf=%lx\n", aWidth, aHeight, aBuf));
387 mPos = 0;
388 mSize = aWidth * aHeight * PIX_SIZE;
389 mBuf = aBuf;
390 }
391 /**
392 * Convert the next pixel to RGB.
393 * @returns true on success, false if we have reached the end of the buffer
394 * @param aRed where to store the red value
395 * @param aGreen where to store the green value
396 * @param aBlue where to store the blue value
397 */
398 bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
399 {
400 bool rc = false;
401 if (mPos + PIX_SIZE <= mSize)
402 {
403 *aRed = mBuf[mPos + 2];
404 *aGreen = mBuf[mPos + 1];
405 *aBlue = mBuf[mPos ];
406 mPos += PIX_SIZE;
407 rc = true;
408 }
409 return rc;
410 }
411
412 /**
413 * Skip forward by a certain number of pixels
414 * @param aPixels how many pixels to skip
415 */
416 void skip(unsigned aPixels)
417 {
418 mPos += PIX_SIZE * aPixels;
419 }
420private:
421 /** Size of the picture buffer */
422 unsigned mSize;
423 /** Current position in the picture buffer */
424 unsigned mPos;
425 /** Address of the picture buffer */
426 uint8_t *mBuf;
427};
428
429/**
430 * Iterator class for running through an BGR24 image buffer and converting
431 * it to RGB.
432 */
433class ColorConvBGR24Iter
434{
435private:
436 enum { PIX_SIZE = 3 };
437public:
438 ColorConvBGR24Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
439 {
440 mPos = 0;
441 mSize = aWidth * aHeight * PIX_SIZE;
442 mBuf = aBuf;
443 }
444 /**
445 * Convert the next pixel to RGB.
446 * @returns true on success, false if we have reached the end of the buffer
447 * @param aRed where to store the red value
448 * @param aGreen where to store the green value
449 * @param aBlue where to store the blue value
450 */
451 bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
452 {
453 bool rc = false;
454 if (mPos + PIX_SIZE <= mSize)
455 {
456 *aRed = mBuf[mPos + 2];
457 *aGreen = mBuf[mPos + 1];
458 *aBlue = mBuf[mPos ];
459 mPos += PIX_SIZE;
460 rc = true;
461 }
462 return rc;
463 }
464
465 /**
466 * Skip forward by a certain number of pixels
467 * @param aPixels how many pixels to skip
468 */
469 void skip(unsigned aPixels)
470 {
471 mPos += PIX_SIZE * aPixels;
472 }
473private:
474 /** Size of the picture buffer */
475 unsigned mSize;
476 /** Current position in the picture buffer */
477 unsigned mPos;
478 /** Address of the picture buffer */
479 uint8_t *mBuf;
480};
481
482/**
483 * Iterator class for running through an BGR565 image buffer and converting
484 * it to RGB.
485 */
486class ColorConvBGR565Iter
487{
488private:
489 enum { PIX_SIZE = 2 };
490public:
491 ColorConvBGR565Iter(unsigned aWidth, unsigned aHeight, uint8_t *aBuf)
492 {
493 mPos = 0;
494 mSize = aWidth * aHeight * PIX_SIZE;
495 mBuf = aBuf;
496 }
497 /**
498 * Convert the next pixel to RGB.
499 * @returns true on success, false if we have reached the end of the buffer
500 * @param aRed where to store the red value
501 * @param aGreen where to store the green value
502 * @param aBlue where to store the blue value
503 */
504 bool getRGB(unsigned *aRed, unsigned *aGreen, unsigned *aBlue)
505 {
506 bool rc = false;
507 if (mPos + PIX_SIZE <= mSize)
508 {
509 unsigned uFull = (((unsigned) mBuf[mPos + 1]) << 8)
510 | ((unsigned) mBuf[mPos]);
511 *aRed = (uFull >> 8) & ~7;
512 *aGreen = (uFull >> 3) & ~3 & 0xff;
513 *aBlue = (uFull << 3) & ~7 & 0xff;
514 mPos += PIX_SIZE;
515 rc = true;
516 }
517 return rc;
518 }
519
520 /**
521 * Skip forward by a certain number of pixels
522 * @param aPixels how many pixels to skip
523 */
524 void skip(unsigned aPixels)
525 {
526 mPos += PIX_SIZE * aPixels;
527 }
528private:
529 /** Size of the picture buffer */
530 unsigned mSize;
531 /** Current position in the picture buffer */
532 unsigned mPos;
533 /** Address of the picture buffer */
534 uint8_t *mBuf;
535};
536
537#ifdef VBOX_WITH_AUDIO_VIDEOREC
538static void videoRecAudioFrameFree(PVIDEORECAUDIOFRAME pFrame);
539#endif
540static int videoRecEncodeAndWrite(PVIDEORECSTREAM pStream, uint64_t uTimeStampMs, PVIDEORECVIDEOFRAME pFrame);
541static int videoRecRGBToYUV(uint32_t uPixelFormat,
542 uint8_t *paDst, uint32_t uDstWidth, uint32_t uDstHeight,
543 uint8_t *paSrc, uint32_t uSrcWidth, uint32_t uSrcHeight);
544static int videoRecStreamCloseFile(PVIDEORECSTREAM pStream);
545static void videoRecStreamLock(PVIDEORECSTREAM pStream);
546static void videoRecStreamUnlock(PVIDEORECSTREAM pStream);
547static void videoRecVideoFrameFree(PVIDEORECVIDEOFRAME pFrame);
548
549/**
550 * Convert an image to YUV420p format.
551 *
552 * @return true on success, false on failure.
553 * @param aDstBuf The destination image buffer.
554 * @param aDstWidth Width (in pixel) of destination buffer.
555 * @param aDstHeight Height (in pixel) of destination buffer.
556 * @param aSrcBuf The source image buffer.
557 * @param aSrcWidth Width (in pixel) of source buffer.
558 * @param aSrcHeight Height (in pixel) of source buffer.
559 */
560template <class T>
561inline bool colorConvWriteYUV420p(uint8_t *aDstBuf, unsigned aDstWidth, unsigned aDstHeight,
562 uint8_t *aSrcBuf, unsigned aSrcWidth, unsigned aSrcHeight)
563{
564 RT_NOREF(aDstWidth, aDstHeight);
565
566 AssertReturn(!(aSrcWidth & 1), false);
567 AssertReturn(!(aSrcHeight & 1), false);
568
569 bool fRc = true;
570 T iter1(aSrcWidth, aSrcHeight, aSrcBuf);
571 T iter2 = iter1;
572 iter2.skip(aSrcWidth);
573 unsigned cPixels = aSrcWidth * aSrcHeight;
574 unsigned offY = 0;
575 unsigned offU = cPixels;
576 unsigned offV = cPixels + cPixels / 4;
577 unsigned const cyHalf = aSrcHeight / 2;
578 unsigned const cxHalf = aSrcWidth / 2;
579 for (unsigned i = 0; i < cyHalf && fRc; ++i)
580 {
581 for (unsigned j = 0; j < cxHalf; ++j)
582 {
583 unsigned red, green, blue;
584 fRc = iter1.getRGB(&red, &green, &blue);
585 AssertReturn(fRc, false);
586 aDstBuf[offY] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
587 unsigned u = (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
588 unsigned v = (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
589
590 fRc = iter1.getRGB(&red, &green, &blue);
591 AssertReturn(fRc, false);
592 aDstBuf[offY + 1] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
593 u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
594 v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
595
596 fRc = iter2.getRGB(&red, &green, &blue);
597 AssertReturn(fRc, false);
598 aDstBuf[offY + aSrcWidth] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
599 u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
600 v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
601
602 fRc = iter2.getRGB(&red, &green, &blue);
603 AssertReturn(fRc, false);
604 aDstBuf[offY + aSrcWidth + 1] = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
605 u += (((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128) / 4;
606 v += (((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128) / 4;
607
608 aDstBuf[offU] = u;
609 aDstBuf[offV] = v;
610 offY += 2;
611 ++offU;
612 ++offV;
613 }
614
615 iter1.skip(aSrcWidth);
616 iter2.skip(aSrcWidth);
617 offY += aSrcWidth;
618 }
619
620 return true;
621}
622
623/**
624 * Convert an image to RGB24 format
625 * @returns true on success, false on failure
626 * @param aWidth width of image
627 * @param aHeight height of image
628 * @param aDestBuf an allocated memory buffer large enough to hold the
629 * destination image (i.e. width * height * 12bits)
630 * @param aSrcBuf the source image as an array of bytes
631 */
632template <class T>
633inline bool colorConvWriteRGB24(unsigned aWidth, unsigned aHeight,
634 uint8_t *aDestBuf, uint8_t *aSrcBuf)
635{
636 enum { PIX_SIZE = 3 };
637 bool rc = true;
638 AssertReturn(0 == (aWidth & 1), false);
639 AssertReturn(0 == (aHeight & 1), false);
640 T iter(aWidth, aHeight, aSrcBuf);
641 unsigned cPixels = aWidth * aHeight;
642 for (unsigned i = 0; i < cPixels && rc; ++i)
643 {
644 unsigned red, green, blue;
645 rc = iter.getRGB(&red, &green, &blue);
646 if (rc)
647 {
648 aDestBuf[i * PIX_SIZE ] = red;
649 aDestBuf[i * PIX_SIZE + 1] = green;
650 aDestBuf[i * PIX_SIZE + 2] = blue;
651 }
652 }
653 return rc;
654}
655
656#ifdef VBOX_WITH_AUDIO_VIDEOREC
657/**
658 * Frees a previously allocated video recording audio frame.
659 *
660 * @param pFrame Audio frame to free. The pointer will be invalid after return.
661 */
662static void videoRecAudioFrameFree(PVIDEORECAUDIOFRAME pFrame)
663{
664 if (!pFrame)
665 return;
666
667 if (pFrame->pvBuf)
668 {
669 Assert(pFrame->cbBuf);
670 RTMemFree(pFrame->pvBuf);
671 }
672 RTMemFree(pFrame);
673 pFrame = NULL;
674}
675#endif
676
677/**
678 * Frees a video recording (data) block.
679 *
680 * @returns IPRT status code.
681 * @param pBlock Video recording (data) block to free. The pointer will be invalid after return.
682 */
683static void videoRecBlockFree(PVIDEORECBLOCK pBlock)
684{
685 if (!pBlock)
686 return;
687
688 switch (pBlock->enmType)
689 {
690 case VIDEORECBLOCKTYPE_VIDEO:
691 videoRecVideoFrameFree((PVIDEORECVIDEOFRAME)pBlock->pvData);
692 break;
693
694#ifdef VBOX_WITH_AUDIO_VIDEOREC
695 case VIDEORECBLOCKTYPE_AUDIO:
696 videoRecAudioFrameFree((PVIDEORECAUDIOFRAME)pBlock->pvData);
697 break;
698#endif
699 default:
700 AssertFailed();
701 break;
702 }
703
704 RTMemFree(pBlock);
705 pBlock = NULL;
706}
707
708/**
709 * Worker thread for all streams of a video recording context.
710 *
711 * For video frames, this also does the RGB/YUV conversion and encoding.
712 */
713static DECLCALLBACK(int) videoRecThread(RTTHREAD hThreadSelf, void *pvUser)
714{
715 PVIDEORECCONTEXT pCtx = (PVIDEORECCONTEXT)pvUser;
716
717 /* Signal that we're up and rockin'. */
718 RTThreadUserSignal(hThreadSelf);
719
720 LogFunc(("Thread started\n"));
721
722 for (;;)
723 {
724 int rc = RTSemEventWait(pCtx->WaitEvent, RT_INDEFINITE_WAIT);
725 AssertRCBreak(rc);
726
727 /** @todo r=andy This is inefficient -- as we already wake up this thread
728 * for every screen from Main, we here go again (on every wake up) through
729 * all screens. */
730 for (VideoRecStreams::iterator itStream = pCtx->vecStreams.begin(); itStream != pCtx->vecStreams.end(); itStream++)
731 {
732 PVIDEORECSTREAM pStream = (*itStream);
733
734 videoRecStreamLock(pStream);
735
736 if (!pStream->fEnabled)
737 {
738 videoRecStreamUnlock(pStream);
739 continue;
740 }
741
742 VideoRecBlockMap::iterator itBlockStream = pStream->Blocks.Map.begin();
743 while (itBlockStream != pStream->Blocks.Map.end())
744 {
745 const uint64_t uTimeStampMs = itBlockStream->first;
746 VideoRecBlocks *pBlocks = itBlockStream->second;
747
748 AssertPtr(pBlocks);
749
750 while (!pBlocks->List.empty())
751 {
752 PVIDEORECBLOCK pBlock = pBlocks->List.front();
753 AssertPtr(pBlock);
754
755 if (pBlock->enmType == VIDEORECBLOCKTYPE_VIDEO)
756 {
757 PVIDEORECVIDEOFRAME pVideoFrame = (PVIDEORECVIDEOFRAME)pBlock->pvData;
758
759 rc = videoRecRGBToYUV(pVideoFrame->uPixelFormat,
760 /* Destination */
761 pStream->Video.pu8YuvBuf, pVideoFrame->uWidth, pVideoFrame->uHeight,
762 /* Source */
763 pVideoFrame->pu8RGBBuf, pStream->Video.uWidth, pStream->Video.uHeight);
764 if (RT_SUCCESS(rc))
765 rc = videoRecEncodeAndWrite(pStream, uTimeStampMs, pVideoFrame);
766 }
767
768 videoRecBlockFree(pBlock);
769 pBlock = NULL;
770
771 pBlocks->List.pop_front();
772 }
773
774#ifdef VBOX_WITH_AUDIO_VIDEOREC
775 /* As each (enabled) screen has to get the same audio data, look for common (audio) data which needs to be
776 * written to the screen's assigned recording stream. */
777 VideoRecBlockMap::iterator itCommon = pCtx->mapBlocksCommon.begin();
778 while (itCommon != pCtx->mapBlocksCommon.end())
779 {
780 VideoRecBlockList::iterator itBlockCommon = itCommon->second->List.begin();
781 while (itBlockCommon != itCommon->second->List.end())
782 {
783 PVIDEORECBLOCK pBlockCommon = (PVIDEORECBLOCK)(*itBlockCommon);
784 switch (pBlockCommon->enmType)
785 {
786 case VIDEORECBLOCKTYPE_AUDIO:
787 {
788 PVIDEORECAUDIOFRAME pAudioFrame = (PVIDEORECAUDIOFRAME)pBlockCommon->pvData;
789 AssertPtr(pAudioFrame);
790 AssertPtr(pAudioFrame->pvBuf);
791 Assert(pAudioFrame->cbBuf);
792
793 WebMWriter::BlockData_Opus blockData = { pAudioFrame->pvBuf, pAudioFrame->cbBuf,
794 pBlockCommon->uTimeStampMs };
795 rc = pStream->File.pWEBM->WriteBlock(pStream->uTrackAudio, &blockData, sizeof(blockData));
796 break;
797 }
798
799 default:
800 AssertFailed();
801 break;
802 }
803
804 Assert(pBlockCommon->cRefs);
805 if (--pBlockCommon->cRefs == 0)
806 {
807 videoRecBlockFree(pBlockCommon);
808 itCommon->second->List.erase(itBlockCommon);
809 itBlockCommon = itCommon->second->List.begin();
810 }
811 else
812 ++itBlockCommon;
813 }
814
815 /* If no entries are left over in the block map, remove it altogether. */
816 if (itCommon->second->List.empty())
817 {
818 delete itCommon->second;
819 pCtx->mapBlocksCommon.erase(itCommon);
820 }
821
822 itCommon = pCtx->mapBlocksCommon.begin();
823
824 LogFunc(("Common blocks: %zu\n", pCtx->mapBlocksCommon.size()));
825 }
826#endif
827 ++itBlockStream;
828 }
829
830 videoRecStreamUnlock(pStream);
831 }
832
833 /* Keep going in case of errors. */
834
835 if (ASMAtomicReadBool(&pCtx->fShutdown))
836 {
837 LogFunc(("Thread is shutting down ...\n"));
838 break;
839 }
840
841 } /* for */
842
843 LogFunc(("Thread ended\n"));
844 return VINF_SUCCESS;
845}
846
847/**
848 * Notifies a recording context's encoding thread.
849 *
850 * @returns IPRT status code.
851 * @param pCtx Video recording context to notify thread for.
852 */
853static int videoRecThreadNotify(PVIDEORECCONTEXT pCtx)
854{
855 AssertPtrReturn(pCtx, VERR_INVALID_POINTER);
856
857 return RTSemEventSignal(pCtx->WaitEvent);
858}
859
860/**
861 * Creates a video recording context.
862 *
863 * @returns IPRT status code.
864 * @param cScreens Number of screens to create context for.
865 * @param pVideoRecCfg Pointer to video recording configuration to use.
866 * @param ppCtx Pointer to created video recording context on success.
867 */
868int VideoRecContextCreate(uint32_t cScreens, PVIDEORECCFG pVideoRecCfg, PVIDEORECCONTEXT *ppCtx)
869{
870 AssertReturn(cScreens, VERR_INVALID_PARAMETER);
871 AssertPtrReturn(pVideoRecCfg, VERR_INVALID_POINTER);
872 AssertPtrReturn(ppCtx, VERR_INVALID_POINTER);
873
874 VIDEORECCONTEXT *pCtx = NULL;
875 try
876 {
877 pCtx = new VIDEORECCONTEXT();
878 }
879 catch (std::bad_alloc &)
880 {
881 return VERR_NO_MEMORY;
882 }
883
884 int rc = RTCritSectInit(&pCtx->CritSect);
885 if (RT_FAILURE(rc))
886 {
887 delete pCtx;
888 return rc;
889 }
890
891 for (uint32_t uScreen = 0; uScreen < cScreens; uScreen++)
892 {
893 VIDEORECSTREAM *pStream = NULL;
894 try
895 {
896 pStream = new VIDEORECSTREAM();
897 }
898 catch (std::bad_alloc &)
899 {
900 rc = VERR_NO_MEMORY;
901 break;
902 }
903
904 rc = RTCritSectInit(&pStream->CritSect);
905 if (RT_FAILURE(rc))
906 break;
907
908 try
909 {
910 pStream->uScreenID = uScreen;
911
912 pCtx->vecStreams.push_back(pStream);
913
914 pStream->File.pWEBM = new WebMWriter();
915 }
916 catch (std::bad_alloc &)
917 {
918 rc = VERR_NO_MEMORY;
919 break;
920 }
921 }
922
923 if (RT_SUCCESS(rc))
924 {
925 pCtx->tsStartMs = RTTimeMilliTS();
926 pCtx->enmState = VIDEORECSTS_UNINITIALIZED;
927 pCtx->fStarted = false;
928 pCtx->fShutdown = false;
929
930 /* Copy the configuration to our context. */
931 pCtx->Cfg = *pVideoRecCfg;
932
933 rc = RTSemEventCreate(&pCtx->WaitEvent);
934 AssertRCReturn(rc, rc);
935
936 rc = RTThreadCreate(&pCtx->Thread, videoRecThread, (void *)pCtx, 0,
937 RTTHREADTYPE_MAIN_WORKER, RTTHREADFLAGS_WAITABLE, "VideoRec");
938
939 if (RT_SUCCESS(rc)) /* Wait for the thread to start. */
940 rc = RTThreadUserWait(pCtx->Thread, 30 * RT_MS_1SEC /* 30s timeout */);
941
942 if (RT_SUCCESS(rc))
943 {
944 pCtx->enmState = VIDEORECSTS_INITIALIZED;
945 pCtx->fStarted = true;
946
947 if (ppCtx)
948 *ppCtx = pCtx;
949 }
950 }
951
952 if (RT_FAILURE(rc))
953 {
954 int rc2 = VideoRecContextDestroy(pCtx);
955 AssertRC(rc2);
956 }
957
958 return rc;
959}
960
961/**
962 * Destroys a video recording context.
963 *
964 * @param pCtx Video recording context to destroy.
965 */
966int VideoRecContextDestroy(PVIDEORECCONTEXT pCtx)
967{
968 if (!pCtx)
969 return VINF_SUCCESS;
970
971 int rc = VINF_SUCCESS;
972
973 if (pCtx->enmState == VIDEORECSTS_INITIALIZED)
974 {
975 LogFunc(("Shutting down thread ...\n"));
976
977 /* Set shutdown indicator. */
978 ASMAtomicWriteBool(&pCtx->fShutdown, true);
979
980 /* Signal the thread and wait for it to shut down. */
981 rc = videoRecThreadNotify(pCtx);
982 if (RT_SUCCESS(rc))
983 rc = RTThreadWait(pCtx->Thread, 10 * 1000 /* 10s timeout */, NULL);
984
985 if (RT_SUCCESS(rc))
986 {
987 /* Disable the context. */
988 ASMAtomicWriteBool(&pCtx->fStarted, false);
989
990 int rc2 = RTSemEventDestroy(pCtx->WaitEvent);
991 AssertRC(rc2);
992
993 pCtx->WaitEvent = NIL_RTSEMEVENT;
994 }
995 }
996
997 if (RT_FAILURE(rc))
998 {
999 AssertRC(rc);
1000 return rc;
1001 }
1002
1003 rc = RTCritSectEnter(&pCtx->CritSect);
1004 if (RT_SUCCESS(rc))
1005 {
1006 VideoRecStreams::iterator it = pCtx->vecStreams.begin();
1007 while (it != pCtx->vecStreams.end())
1008 {
1009 PVIDEORECSTREAM pStream = (*it);
1010
1011 videoRecStreamLock(pStream);
1012
1013 if (pStream->fEnabled)
1014 {
1015 switch (pStream->enmDst)
1016 {
1017 case VIDEORECDEST_FILE:
1018 {
1019 if (pStream->File.pWEBM)
1020 pStream->File.pWEBM->Close();
1021 break;
1022 }
1023
1024 default:
1025 AssertFailed(); /* Should never happen. */
1026 break;
1027 }
1028
1029 vpx_img_free(&pStream->Video.Codec.VPX.RawImage);
1030 vpx_codec_err_t rcv = vpx_codec_destroy(&pStream->Video.Codec.VPX.Ctx);
1031 Assert(rcv == VPX_CODEC_OK); RT_NOREF(rcv);
1032
1033 pStream->Blocks.Clear();
1034
1035 LogRel(("VideoRec: Recording screen #%u stopped\n", pStream->uScreenID));
1036 }
1037
1038 switch (pStream->enmDst)
1039 {
1040 case VIDEORECDEST_FILE:
1041 {
1042 int rc2 = videoRecStreamCloseFile(pStream);
1043 AssertRC(rc2);
1044
1045 if (pStream->File.pWEBM)
1046 {
1047 delete pStream->File.pWEBM;
1048 pStream->File.pWEBM = NULL;
1049 }
1050 break;
1051 }
1052
1053 default:
1054 AssertFailed(); /* Should never happen. */
1055 break;
1056 }
1057
1058 pCtx->vecStreams.erase(it);
1059 it = pCtx->vecStreams.begin();
1060
1061 videoRecStreamUnlock(pStream);
1062
1063 RTCritSectDelete(&pStream->CritSect);
1064
1065 delete pStream;
1066 pStream = NULL;
1067 }
1068
1069 /* Sanity. */
1070 Assert(pCtx->vecStreams.empty());
1071 Assert(pCtx->mapBlocksCommon.size() == 0);
1072
1073 int rc2 = RTCritSectLeave(&pCtx->CritSect);
1074 AssertRC(rc2);
1075
1076 RTCritSectDelete(&pCtx->CritSect);
1077
1078 delete pCtx;
1079 pCtx = NULL;
1080 }
1081
1082 return rc;
1083}
1084
1085/**
1086 * Retrieves a specific recording stream of a recording context.
1087 *
1088 * @returns Pointer to recording stream if found, or NULL if not found.
1089 * @param pCtx Recording context to look up stream for.
1090 * @param uScreen Screen number of recording stream to look up.
1091 */
1092DECLINLINE(PVIDEORECSTREAM) videoRecStreamGet(PVIDEORECCONTEXT pCtx, uint32_t uScreen)
1093{
1094 AssertPtrReturn(pCtx, NULL);
1095
1096 PVIDEORECSTREAM pStream;
1097
1098 try
1099 {
1100 pStream = pCtx->vecStreams.at(uScreen);
1101 }
1102 catch (std::out_of_range &)
1103 {
1104 pStream = NULL;
1105 }
1106
1107 return pStream;
1108}
1109
1110/**
1111 * Locks a recording stream.
1112 *
1113 * @param pStream Recording stream to lock.
1114 */
1115static void videoRecStreamLock(PVIDEORECSTREAM pStream)
1116{
1117 int rc = RTCritSectEnter(&pStream->CritSect);
1118 AssertRC(rc);
1119}
1120
1121/**
1122 * Unlocks a locked recording stream.
1123 *
1124 * @param pStream Recording stream to unlock.
1125 */
1126static void videoRecStreamUnlock(PVIDEORECSTREAM pStream)
1127{
1128 int rc = RTCritSectLeave(&pStream->CritSect);
1129 AssertRC(rc);
1130}
1131
1132/**
1133 * Opens a file for a given recording stream to capture to.
1134 *
1135 * @returns IPRT status code.
1136 * @param pStream Recording stream to open file for.
1137 * @param pCfg Recording configuration to use.
1138 */
1139static int videoRecStreamOpenFile(PVIDEORECSTREAM pStream, PVIDEORECCFG pCfg)
1140{
1141 AssertPtrReturn(pStream, VERR_INVALID_POINTER);
1142 AssertPtrReturn(pCfg, VERR_INVALID_POINTER);
1143
1144 Assert(pStream->enmDst == VIDEORECDEST_INVALID);
1145 Assert(pCfg->enmDst == VIDEORECDEST_FILE);
1146
1147 Assert(pCfg->File.strName.isNotEmpty());
1148
1149 char *pszAbsPath = RTPathAbsDup(com::Utf8Str(pCfg->File.strName).c_str());
1150 AssertPtrReturn(pszAbsPath, VERR_NO_MEMORY);
1151
1152 RTPathStripSuffix(pszAbsPath);
1153
1154 char *pszSuff = RTStrDup(".webm");
1155 if (!pszSuff)
1156 {
1157 RTStrFree(pszAbsPath);
1158 return VERR_NO_MEMORY;
1159 }
1160
1161 char *pszFile = NULL;
1162
1163 int rc;
1164 if (pCfg->aScreens.size() > 1)
1165 rc = RTStrAPrintf(&pszFile, "%s-%u%s", pszAbsPath, pStream->uScreenID + 1, pszSuff);
1166 else
1167 rc = RTStrAPrintf(&pszFile, "%s%s", pszAbsPath, pszSuff);
1168
1169 if (RT_SUCCESS(rc))
1170 {
1171 uint64_t fOpen = RTFILE_O_WRITE | RTFILE_O_DENY_WRITE;
1172
1173 /* Play safe: the file must not exist, overwriting is potentially
1174 * hazardous as nothing prevents the user from picking a file name of some
1175 * other important file, causing unintentional data loss. */
1176 fOpen |= RTFILE_O_CREATE;
1177
1178 RTFILE hFile;
1179 rc = RTFileOpen(&hFile, pszFile, fOpen);
1180 if (rc == VERR_ALREADY_EXISTS)
1181 {
1182 RTStrFree(pszFile);
1183 pszFile = NULL;
1184
1185 RTTIMESPEC ts;
1186 RTTimeNow(&ts);
1187 RTTIME time;
1188 RTTimeExplode(&time, &ts);
1189
1190 if (pCfg->aScreens.size() > 1)
1191 rc = RTStrAPrintf(&pszFile, "%s-%04d-%02u-%02uT%02u-%02u-%02u-%09uZ-%u%s",
1192 pszAbsPath, time.i32Year, time.u8Month, time.u8MonthDay,
1193 time.u8Hour, time.u8Minute, time.u8Second, time.u32Nanosecond,
1194 pStream->uScreenID + 1, pszSuff);
1195 else
1196 rc = RTStrAPrintf(&pszFile, "%s-%04d-%02u-%02uT%02u-%02u-%02u-%09uZ%s",
1197 pszAbsPath, time.i32Year, time.u8Month, time.u8MonthDay,
1198 time.u8Hour, time.u8Minute, time.u8Second, time.u32Nanosecond,
1199 pszSuff);
1200
1201 if (RT_SUCCESS(rc))
1202 rc = RTFileOpen(&hFile, pszFile, fOpen);
1203 }
1204
1205 if (RT_SUCCESS(rc))
1206 {
1207 pStream->enmDst = VIDEORECDEST_FILE;
1208 pStream->File.hFile = hFile;
1209 pStream->File.pszFile = pszFile; /* Assign allocated string to our stream's config. */
1210 }
1211 }
1212
1213 RTStrFree(pszSuff);
1214 RTStrFree(pszAbsPath);
1215
1216 if (RT_FAILURE(rc))
1217 {
1218 LogRel(("VideoRec: Failed to open file '%s' for screen %RU32, rc=%Rrc\n",
1219 pszFile ? pszFile : "<Unnamed>", pStream->uScreenID, rc));
1220 RTStrFree(pszFile);
1221 }
1222
1223 return rc;
1224}
1225
1226/**
1227 * Closes a recording stream's file again.
1228 *
1229 * @returns IPRT status code.
1230 * @param pStream Recording stream to close file for.
1231 */
1232static int videoRecStreamCloseFile(PVIDEORECSTREAM pStream)
1233{
1234 Assert(pStream->enmDst == VIDEORECDEST_FILE);
1235
1236 pStream->enmDst = VIDEORECDEST_INVALID;
1237
1238 AssertPtr(pStream->File.pszFile);
1239
1240 if (RTFileIsValid(pStream->File.hFile))
1241 {
1242 RTFileClose(pStream->File.hFile);
1243 LogRel(("VideoRec: Closed file '%s'\n", pStream->File.pszFile));
1244 }
1245
1246 RTStrFree(pStream->File.pszFile);
1247 pStream->File.pszFile = NULL;
1248
1249 return VINF_SUCCESS;
1250}
1251
1252/**
1253 * VideoRec utility function to initialize video recording context.
1254 *
1255 * @returns IPRT status code.
1256 * @param pCtx Pointer to video recording context.
1257 * @param uScreen Screen number to record.
1258 */
1259int VideoRecStreamInit(PVIDEORECCONTEXT pCtx, uint32_t uScreen)
1260{
1261 AssertPtrReturn(pCtx, VERR_INVALID_POINTER);
1262
1263 PVIDEORECCFG pCfg = &pCtx->Cfg;
1264
1265#ifdef VBOX_WITH_AUDIO_VIDEOREC
1266 if (pCfg->Audio.fEnabled)
1267 {
1268 /* Sanity. */
1269 AssertReturn(pCfg->Audio.uHz, VERR_INVALID_PARAMETER);
1270 AssertReturn(pCfg->Audio.cBits, VERR_INVALID_PARAMETER);
1271 AssertReturn(pCfg->Audio.cChannels, VERR_INVALID_PARAMETER);
1272 }
1273#endif
1274
1275 PVIDEORECSTREAM pStream = videoRecStreamGet(pCtx, uScreen);
1276 if (!pStream)
1277 return VERR_NOT_FOUND;
1278
1279 int rc = videoRecStreamOpenFile(pStream, &pCtx->Cfg);
1280 if (RT_FAILURE(rc))
1281 return rc;
1282
1283 pStream->pCtx = pCtx;
1284
1285 /** @todo Make the following parameters configurable on a per-stream basis? */
1286 pStream->Video.uWidth = pCfg->Video.uWidth;
1287 pStream->Video.uHeight = pCfg->Video.uHeight;
1288 pStream->Video.cFailedEncodingFrames = 0;
1289
1290 PVIDEORECVIDEOCODEC pVC = &pStream->Video.Codec;
1291
1292 pStream->Video.uDelayMs = RT_MS_1SEC / pCfg->Video.uFPS;
1293
1294 switch (pStream->enmDst)
1295 {
1296 case VIDEORECDEST_FILE:
1297 {
1298 rc = pStream->File.pWEBM->OpenEx(pStream->File.pszFile, &pStream->File.hFile,
1299#ifdef VBOX_WITH_AUDIO_VIDEOREC
1300 pCfg->Audio.fEnabled ? WebMWriter::AudioCodec_Opus : WebMWriter::AudioCodec_None,
1301#else
1302 WebMWriter::AudioCodec_None,
1303#endif
1304 pCfg->Video.fEnabled ? WebMWriter::VideoCodec_VP8 : WebMWriter::VideoCodec_None);
1305 if (RT_FAILURE(rc))
1306 {
1307 LogRel(("VideoRec: Failed to create the capture output file '%s' (%Rrc)\n", pStream->File.pszFile, rc));
1308 break;
1309 }
1310
1311 const char *pszFile = pStream->File.pszFile;
1312
1313 if (pCfg->Video.fEnabled)
1314 {
1315 rc = pStream->File.pWEBM->AddVideoTrack(pCfg->Video.uWidth, pCfg->Video.uHeight, pCfg->Video.uFPS,
1316 &pStream->uTrackVideo);
1317 if (RT_FAILURE(rc))
1318 {
1319 LogRel(("VideoRec: Failed to add video track to output file '%s' (%Rrc)\n", pszFile, rc));
1320 break;
1321 }
1322
1323 LogRel(("VideoRec: Recording screen #%u with %RU32x%RU32 @ %RU32 kbps, %RU32 FPS\n",
1324 uScreen, pCfg->Video.uWidth, pCfg->Video.uHeight, pCfg->Video.uRate, pCfg->Video.uFPS));
1325 }
1326
1327#ifdef VBOX_WITH_AUDIO_VIDEOREC
1328 if (pCfg->Audio.fEnabled)
1329 {
1330 rc = pStream->File.pWEBM->AddAudioTrack(pCfg->Audio.uHz, pCfg->Audio.cChannels, pCfg->Audio.cBits,
1331 &pStream->uTrackAudio);
1332 if (RT_FAILURE(rc))
1333 {
1334 LogRel(("VideoRec: Failed to add audio track to output file '%s' (%Rrc)\n", pszFile, rc));
1335 break;
1336 }
1337
1338 LogRel(("VideoRec: Recording audio in %RU16Hz, %RU8 bit, %RU8 %s\n",
1339 pCfg->Audio.uHz, pCfg->Audio.cBits, pCfg->Audio.cChannels, pCfg->Audio.cChannels ? "channels" : "channel"));
1340 }
1341#endif
1342
1343 if ( pCfg->Video.fEnabled
1344#ifdef VBOX_WITH_AUDIO_VIDEOREC
1345 || pCfg->Audio.fEnabled
1346#endif
1347 )
1348 {
1349 char szWhat[32] = { 0 };
1350 if (pCfg->Video.fEnabled)
1351 RTStrCat(szWhat, sizeof(szWhat), "video");
1352#ifdef VBOX_WITH_AUDIO_VIDEOREC
1353 if (pCfg->Audio.fEnabled)
1354 {
1355 if (pCfg->Video.fEnabled)
1356 RTStrCat(szWhat, sizeof(szWhat), " + ");
1357 RTStrCat(szWhat, sizeof(szWhat), "audio");
1358 }
1359#endif
1360 LogRel(("VideoRec: Recording %s to '%s'\n", szWhat, pszFile));
1361 }
1362
1363 break;
1364 }
1365
1366 default:
1367 AssertFailed(); /* Should never happen. */
1368 rc = VERR_NOT_IMPLEMENTED;
1369 break;
1370 }
1371
1372 if (RT_FAILURE(rc))
1373 return rc;
1374
1375#ifdef VBOX_WITH_LIBVPX
1376# ifdef VBOX_WITH_LIBVPX_VP9
1377 vpx_codec_iface_t *pCodecIface = vpx_codec_vp9_cx();
1378# else /* Default is using VP8. */
1379 vpx_codec_iface_t *pCodecIface = vpx_codec_vp8_cx();
1380# endif
1381
1382 vpx_codec_err_t rcv = vpx_codec_enc_config_default(pCodecIface, &pVC->VPX.Cfg, 0 /* Reserved */);
1383 if (rcv != VPX_CODEC_OK)
1384 {
1385 LogRel(("VideoRec: Failed to get default config for VPX encoder: %s\n", vpx_codec_err_to_string(rcv)));
1386 return VERR_AVREC_CODEC_INIT_FAILED;
1387 }
1388
1389 /* Target bitrate in kilobits per second. */
1390 pVC->VPX.Cfg.rc_target_bitrate = pCfg->Video.uRate;
1391 /* Frame width. */
1392 pVC->VPX.Cfg.g_w = pCfg->Video.uWidth;
1393 /* Frame height. */
1394 pVC->VPX.Cfg.g_h = pCfg->Video.uHeight;
1395 /* 1ms per frame. */
1396 pVC->VPX.Cfg.g_timebase.num = 1;
1397 pVC->VPX.Cfg.g_timebase.den = 1000;
1398 /* Disable multithreading. */
1399 pVC->VPX.Cfg.g_threads = 0;
1400
1401 /* Initialize codec. */
1402 rcv = vpx_codec_enc_init(&pVC->VPX.Ctx, pCodecIface, &pVC->VPX.Cfg, 0 /* Flags */);
1403 if (rcv != VPX_CODEC_OK)
1404 {
1405 LogRel(("VideoRec: Failed to initialize VPX encoder: %s\n", vpx_codec_err_to_string(rcv)));
1406 return VERR_AVREC_CODEC_INIT_FAILED;
1407 }
1408
1409 if (!vpx_img_alloc(&pVC->VPX.RawImage, VPX_IMG_FMT_I420, pCfg->Video.uWidth, pCfg->Video.uHeight, 1))
1410 {
1411 LogRel(("VideoRec: Failed to allocate image %RU32x%RU32\n", pCfg->Video.uWidth, pCfg->Video.uHeight));
1412 return VERR_NO_MEMORY;
1413 }
1414
1415 /* Save a pointer to the first raw YUV plane. */
1416 pStream->Video.pu8YuvBuf = pVC->VPX.RawImage.planes[0];
1417#endif
1418 pStream->fEnabled = true;
1419
1420 return VINF_SUCCESS;
1421}
1422
1423/**
1424 * Returns which recording features currently are enabled for a given configuration.
1425 *
1426 * @returns Enabled video recording features.
1427 * @param pCfg Pointer to recording configuration.
1428 */
1429VIDEORECFEATURES VideoRecGetFeatures(PVIDEORECCFG pCfg)
1430{
1431 if (!pCfg)
1432 return VIDEORECFEATURE_NONE;
1433
1434 VIDEORECFEATURES fFeatures = VIDEORECFEATURE_NONE;
1435
1436 if (pCfg->Video.fEnabled)
1437 fFeatures |= VIDEORECFEATURE_VIDEO;
1438
1439#ifdef VBOX_WITH_AUDIO_VIDEOREC
1440 if (pCfg->Audio.fEnabled)
1441 fFeatures |= VIDEORECFEATURE_AUDIO;
1442#endif
1443
1444 return fFeatures;
1445}
1446
1447/**
1448 * Checks if recording engine is ready to accept new recording data for a given screen.
1449 *
1450 * @returns true if recording engine is ready, false if not.
1451 * @param pCtx Pointer to video recording context.
1452 * @param uScreen Screen ID.
1453 * @param uTimeStampMs Current time stamp (in ms). Currently not being used.
1454 */
1455bool VideoRecIsReady(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint64_t uTimeStampMs)
1456{
1457 AssertPtrReturn(pCtx, false);
1458 RT_NOREF(uTimeStampMs);
1459
1460 if (ASMAtomicReadU32(&pCtx->enmState) != VIDEORECSTS_INITIALIZED)
1461 return false;
1462
1463 bool fIsReady = false;
1464
1465 PVIDEORECSTREAM pStream = videoRecStreamGet(pCtx, uScreen);
1466 if (pStream)
1467 {
1468 videoRecStreamLock(pStream);
1469 fIsReady = pStream->fEnabled;
1470 videoRecStreamUnlock(pStream);
1471 }
1472
1473 /* Note: Do not check for other constraints like the video FPS rate here,
1474 * as this check then also would affect other (non-FPS related) stuff
1475 * like audio data. */
1476
1477 return fIsReady;
1478}
1479
1480/**
1481 * Returns whether a given recording context has been started or not.
1482 *
1483 * @returns true if active, false if not.
1484 * @param pCtx Pointer to video recording context.
1485 */
1486bool VideoRecIsStarted(PVIDEORECCONTEXT pCtx)
1487{
1488 if (!pCtx)
1489 return false;
1490
1491 return ASMAtomicReadBool(&pCtx->fStarted);
1492}
1493
1494/**
1495 * Checks if a specified limit for recording has been reached.
1496 *
1497 * @returns true if any limit has been reached.
1498 * @param pCtx Pointer to video recording context.
1499 * @param uScreen Screen ID.
1500 * @param tsNowMs Current time stamp (in ms).
1501 */
1502bool VideoRecIsLimitReached(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint64_t tsNowMs)
1503{
1504 PVIDEORECSTREAM pStream = videoRecStreamGet(pCtx, uScreen);
1505 if ( !pStream
1506 || !pStream->fEnabled)
1507 {
1508 return false;
1509 }
1510
1511 const PVIDEORECCFG pCfg = &pCtx->Cfg;
1512
1513 if ( pCfg->uMaxTimeS
1514 && tsNowMs >= pCtx->tsStartMs + (pCfg->uMaxTimeS * RT_MS_1SEC))
1515 {
1516 return true;
1517 }
1518
1519 if (pCfg->enmDst == VIDEORECDEST_FILE)
1520 {
1521
1522 if (pCfg->File.uMaxSizeMB)
1523 {
1524 uint64_t sizeInMB = pStream->File.pWEBM->GetFileSize() / _1M;
1525 if(sizeInMB >= pCfg->File.uMaxSizeMB)
1526 return true;
1527 }
1528
1529 /* Check for available free disk space */
1530 if ( pStream->File.pWEBM
1531 && pStream->File.pWEBM->GetAvailableSpace() < 0x100000) /** @todo r=andy WTF? Fix this. */
1532 {
1533 LogRel(("VideoRec: Not enough free storage space available, stopping video capture\n"));
1534 return true;
1535 }
1536 }
1537
1538 return false;
1539}
1540
1541/**
1542 * Encodes the source image and write the encoded image to the stream's destination.
1543 *
1544 * @returns IPRT status code.
1545 * @param pStream Stream to encode and submit to.
1546 * @param uTimeStampMs Absolute timestamp (PTS) of frame (in ms) to encode.
1547 * @param pFrame Frame to encode and submit.
1548 */
1549static int videoRecEncodeAndWrite(PVIDEORECSTREAM pStream, uint64_t uTimeStampMs, PVIDEORECVIDEOFRAME pFrame)
1550{
1551 AssertPtrReturn(pStream, VERR_INVALID_POINTER);
1552 AssertPtrReturn(pFrame, VERR_INVALID_POINTER);
1553
1554 int rc;
1555
1556 AssertPtr(pStream->pCtx);
1557 PVIDEORECCFG pCfg = &pStream->pCtx->Cfg;
1558 PVIDEORECVIDEOCODEC pVC = &pStream->Video.Codec;
1559#ifdef VBOX_WITH_LIBVPX
1560 /* Presentation Time Stamp (PTS). */
1561 vpx_codec_pts_t pts = uTimeStampMs;
1562 vpx_codec_err_t rcv = vpx_codec_encode(&pVC->VPX.Ctx,
1563 &pVC->VPX.RawImage,
1564 pts /* Time stamp */,
1565 pStream->Video.uDelayMs /* How long to show this frame */,
1566 0 /* Flags */,
1567 pCfg->Video.Codec.VPX.uEncoderDeadline /* Quality setting */);
1568 if (rcv != VPX_CODEC_OK)
1569 {
1570 if (pStream->Video.cFailedEncodingFrames++ < 64)
1571 {
1572 LogRel(("VideoRec: Failed to encode video frame: %s\n", vpx_codec_err_to_string(rcv)));
1573 return VERR_GENERAL_FAILURE;
1574 }
1575 }
1576
1577 pStream->Video.cFailedEncodingFrames = 0;
1578
1579 vpx_codec_iter_t iter = NULL;
1580 rc = VERR_NO_DATA;
1581 for (;;)
1582 {
1583 const vpx_codec_cx_pkt_t *pPacket = vpx_codec_get_cx_data(&pVC->VPX.Ctx, &iter);
1584 if (!pPacket)
1585 break;
1586
1587 switch (pPacket->kind)
1588 {
1589 case VPX_CODEC_CX_FRAME_PKT:
1590 {
1591 WebMWriter::BlockData_VP8 blockData = { &pVC->VPX.Cfg, pPacket };
1592 rc = pStream->File.pWEBM->WriteBlock(pStream->uTrackVideo, &blockData, sizeof(blockData));
1593 break;
1594 }
1595
1596 default:
1597 AssertFailed();
1598 LogFunc(("Unexpected video packet type %ld\n", pPacket->kind));
1599 break;
1600 }
1601 }
1602#else
1603 RT_NOREF(pStream);
1604 rc = VERR_NOT_SUPPORTED;
1605#endif /* VBOX_WITH_LIBVPX */
1606 return rc;
1607}
1608
1609/**
1610 * Converts a RGB to YUV buffer.
1611 *
1612 * @returns IPRT status code.
1613 * TODO
1614 */
1615static int videoRecRGBToYUV(uint32_t uPixelFormat,
1616 uint8_t *paDst, uint32_t uDstWidth, uint32_t uDstHeight,
1617 uint8_t *paSrc, uint32_t uSrcWidth, uint32_t uSrcHeight)
1618{
1619 switch (uPixelFormat)
1620 {
1621 case VIDEORECPIXELFMT_RGB32:
1622 if (!colorConvWriteYUV420p<ColorConvBGRA32Iter>(paDst, uDstWidth, uDstHeight,
1623 paSrc, uSrcWidth, uSrcHeight))
1624 return VERR_INVALID_PARAMETER;
1625 break;
1626 case VIDEORECPIXELFMT_RGB24:
1627 if (!colorConvWriteYUV420p<ColorConvBGR24Iter>(paDst, uDstWidth, uDstHeight,
1628 paSrc, uSrcWidth, uSrcHeight))
1629 return VERR_INVALID_PARAMETER;
1630 break;
1631 case VIDEORECPIXELFMT_RGB565:
1632 if (!colorConvWriteYUV420p<ColorConvBGR565Iter>(paDst, uDstWidth, uDstHeight,
1633 paSrc, uSrcWidth, uSrcHeight))
1634 return VERR_INVALID_PARAMETER;
1635 break;
1636 default:
1637 AssertFailed();
1638 return VERR_NOT_SUPPORTED;
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643/**
1644 * Sends an audio frame to the video encoding thread.
1645 *
1646 * @thread EMT
1647 *
1648 * @returns IPRT status code.
1649 * @param pCtx Pointer to the video recording context.
1650 * @param pvData Audio frame data to send.
1651 * @param cbData Size (in bytes) of (encoded) audio frame data.
1652 * @param uTimeStampMs Time stamp (in ms) of audio playback.
1653 */
1654int VideoRecSendAudioFrame(PVIDEORECCONTEXT pCtx, const void *pvData, size_t cbData, uint64_t uTimeStampMs)
1655{
1656#ifdef VBOX_WITH_AUDIO_VIDEOREC
1657 AssertPtrReturn(pvData, VERR_INVALID_POINTER);
1658 AssertReturn(cbData, VERR_INVALID_PARAMETER);
1659
1660 /* To save time spent in EMT, do the required audio multiplexing in the encoding thread.
1661 *
1662 * The multiplexing is needed to supply all recorded (enabled) screens with the same
1663 * audio data at the same given point in time.
1664 */
1665 PVIDEORECBLOCK pBlock = (PVIDEORECBLOCK)RTMemAlloc(sizeof(VIDEORECBLOCK));
1666 AssertPtrReturn(pBlock, VERR_NO_MEMORY);
1667 pBlock->enmType = VIDEORECBLOCKTYPE_AUDIO;
1668
1669 PVIDEORECAUDIOFRAME pFrame = (PVIDEORECAUDIOFRAME)RTMemAlloc(sizeof(VIDEORECAUDIOFRAME));
1670 AssertPtrReturn(pFrame, VERR_NO_MEMORY);
1671
1672 pFrame->pvBuf = (uint8_t *)RTMemAlloc(cbData);
1673 AssertPtrReturn(pFrame->pvBuf, VERR_NO_MEMORY);
1674 pFrame->cbBuf = cbData;
1675
1676 memcpy(pFrame->pvBuf, pvData, cbData);
1677
1678 pBlock->pvData = pFrame;
1679 pBlock->cbData = sizeof(VIDEORECAUDIOFRAME) + cbData;
1680 pBlock->cRefs = (uint16_t)pCtx->vecStreams.size(); /* All streams need the same audio data. */
1681 pBlock->uTimeStampMs = uTimeStampMs;
1682
1683 int rc = RTCritSectEnter(&pCtx->CritSect);
1684 if (RT_FAILURE(rc))
1685 return rc;
1686
1687 try
1688 {
1689 VideoRecBlockMap::iterator itBlocks = pCtx->mapBlocksCommon.find(uTimeStampMs);
1690 if (itBlocks == pCtx->mapBlocksCommon.end())
1691 {
1692 VideoRecBlocks *pVideoRecBlocks = new VideoRecBlocks();
1693 pVideoRecBlocks->List.push_back(pBlock);
1694
1695 pCtx->mapBlocksCommon.insert(std::make_pair(uTimeStampMs, pVideoRecBlocks));
1696 }
1697 else
1698 itBlocks->second->List.push_back(pBlock);
1699 }
1700 catch (const std::exception &ex)
1701 {
1702 RT_NOREF(ex);
1703 rc = VERR_NO_MEMORY;
1704 }
1705
1706 int rc2 = RTCritSectLeave(&pCtx->CritSect);
1707 AssertRC(rc2);
1708
1709 if (RT_SUCCESS(rc))
1710 rc = videoRecThreadNotify(pCtx);
1711
1712 return rc;
1713#else
1714 RT_NOREF(pCtx, pvData, cbData, uTimeStampMs);
1715 return VINF_SUCCESS;
1716#endif
1717}
1718
1719/**
1720 * Copies a source video frame to the intermediate RGB buffer.
1721 * This function is executed only once per time.
1722 *
1723 * @thread EMT
1724 *
1725 * @returns IPRT status code.
1726 * @param pCtx Pointer to the video recording context.
1727 * @param uScreen Screen number.
1728 * @param x Starting x coordinate of the video frame.
1729 * @param y Starting y coordinate of the video frame.
1730 * @param uPixelFormat Pixel format.
1731 * @param uBPP Bits Per Pixel (BPP).
1732 * @param uBytesPerLine Bytes per scanline.
1733 * @param uSrcWidth Width of the video frame.
1734 * @param uSrcHeight Height of the video frame.
1735 * @param puSrcData Pointer to video frame data.
1736 * @param uTimeStampMs Time stamp (in ms).
1737 */
1738int VideoRecSendVideoFrame(PVIDEORECCONTEXT pCtx, uint32_t uScreen, uint32_t x, uint32_t y,
1739 uint32_t uPixelFormat, uint32_t uBPP, uint32_t uBytesPerLine,
1740 uint32_t uSrcWidth, uint32_t uSrcHeight, uint8_t *puSrcData,
1741 uint64_t uTimeStampMs)
1742{
1743 AssertPtrReturn(pCtx, VERR_INVALID_POINTER);
1744 AssertReturn(uSrcWidth, VERR_INVALID_PARAMETER);
1745 AssertReturn(uSrcHeight, VERR_INVALID_PARAMETER);
1746 AssertReturn(puSrcData, VERR_INVALID_POINTER);
1747
1748 int rc = RTCritSectEnter(&pCtx->CritSect);
1749 AssertRC(rc);
1750
1751 PVIDEORECSTREAM pStream = videoRecStreamGet(pCtx, uScreen);
1752 if (!pStream)
1753 {
1754 rc = RTCritSectLeave(&pCtx->CritSect);
1755 AssertRC(rc);
1756
1757 return VERR_NOT_FOUND;
1758 }
1759
1760 videoRecStreamLock(pStream);
1761
1762 PVIDEORECVIDEOFRAME pFrame = NULL;
1763
1764 do
1765 {
1766 if (!pStream->fEnabled)
1767 {
1768 rc = VINF_TRY_AGAIN; /* Not (yet) enabled. */
1769 break;
1770 }
1771
1772 if (uTimeStampMs < pStream->Video.uLastTimeStampMs + pStream->Video.uDelayMs)
1773 {
1774 rc = VINF_TRY_AGAIN; /* Respect maximum frames per second. */
1775 break;
1776 }
1777
1778 pStream->Video.uLastTimeStampMs = uTimeStampMs;
1779
1780 int xDiff = ((int)pStream->Video.uWidth - (int)uSrcWidth) / 2;
1781 uint32_t w = uSrcWidth;
1782 if ((int)w + xDiff + (int)x <= 0) /* Nothing visible. */
1783 {
1784 rc = VERR_INVALID_PARAMETER;
1785 break;
1786 }
1787
1788 uint32_t destX;
1789 if ((int)x < -xDiff)
1790 {
1791 w += xDiff + x;
1792 x = -xDiff;
1793 destX = 0;
1794 }
1795 else
1796 destX = x + xDiff;
1797
1798 uint32_t h = uSrcHeight;
1799 int yDiff = ((int)pStream->Video.uHeight - (int)uSrcHeight) / 2;
1800 if ((int)h + yDiff + (int)y <= 0) /* Nothing visible. */
1801 {
1802 rc = VERR_INVALID_PARAMETER;
1803 break;
1804 }
1805
1806 uint32_t destY;
1807 if ((int)y < -yDiff)
1808 {
1809 h += yDiff + (int)y;
1810 y = -yDiff;
1811 destY = 0;
1812 }
1813 else
1814 destY = y + yDiff;
1815
1816 if ( destX > pStream->Video.uWidth
1817 || destY > pStream->Video.uHeight)
1818 {
1819 rc = VERR_INVALID_PARAMETER; /* Nothing visible. */
1820 break;
1821 }
1822
1823 if (destX + w > pStream->Video.uWidth)
1824 w = pStream->Video.uWidth - destX;
1825
1826 if (destY + h > pStream->Video.uHeight)
1827 h = pStream->Video.uHeight - destY;
1828
1829 pFrame = (PVIDEORECVIDEOFRAME)RTMemAllocZ(sizeof(VIDEORECVIDEOFRAME));
1830 AssertBreakStmt(pFrame, rc = VERR_NO_MEMORY);
1831
1832 /* Calculate bytes per pixel and set pixel format. */
1833 const unsigned uBytesPerPixel = uBPP / 8;
1834 if (uPixelFormat == BitmapFormat_BGR)
1835 {
1836 switch (uBPP)
1837 {
1838 case 32:
1839 pFrame->uPixelFormat = VIDEORECPIXELFMT_RGB32;
1840 break;
1841 case 24:
1842 pFrame->uPixelFormat = VIDEORECPIXELFMT_RGB24;
1843 break;
1844 case 16:
1845 pFrame->uPixelFormat = VIDEORECPIXELFMT_RGB565;
1846 break;
1847 default:
1848 AssertMsgFailed(("Unknown color depth (%RU32)\n", uBPP));
1849 break;
1850 }
1851 }
1852 else
1853 AssertMsgFailed(("Unknown pixel format (%RU32)\n", uPixelFormat));
1854
1855 const size_t cbRGBBuf = pStream->Video.uWidth
1856 * pStream->Video.uHeight
1857 * uBytesPerPixel;
1858 AssertBreakStmt(cbRGBBuf, rc = VERR_INVALID_PARAMETER);
1859
1860 pFrame->pu8RGBBuf = (uint8_t *)RTMemAlloc(cbRGBBuf);
1861 AssertBreakStmt(pFrame->pu8RGBBuf, rc = VERR_NO_MEMORY);
1862 pFrame->cbRGBBuf = cbRGBBuf;
1863 pFrame->uWidth = uSrcWidth;
1864 pFrame->uHeight = uSrcHeight;
1865
1866 /* If the current video frame is smaller than video resolution we're going to encode,
1867 * clear the frame beforehand to prevent artifacts. */
1868 if ( uSrcWidth < pStream->Video.uWidth
1869 || uSrcHeight < pStream->Video.uHeight)
1870 {
1871 RT_BZERO(pFrame->pu8RGBBuf, pFrame->cbRGBBuf);
1872 }
1873
1874 /* Calculate start offset in source and destination buffers. */
1875 uint32_t offSrc = y * uBytesPerLine + x * uBytesPerPixel;
1876 uint32_t offDst = (destY * pStream->Video.uWidth + destX) * uBytesPerPixel;
1877
1878#ifdef VBOX_VIDEOREC_DUMP
1879 VIDEORECBMPHDR bmpHdr;
1880 RT_ZERO(bmpHdr);
1881
1882 VIDEORECBMPDIBHDR bmpDIBHdr;
1883 RT_ZERO(bmpDIBHdr);
1884
1885 bmpHdr.u16Magic = 0x4d42; /* Magic */
1886 bmpHdr.u32Size = (uint32_t)(sizeof(VIDEORECBMPHDR) + sizeof(VIDEORECBMPDIBHDR) + (w * h * uBytesPerPixel));
1887 bmpHdr.u32OffBits = (uint32_t)(sizeof(VIDEORECBMPHDR) + sizeof(VIDEORECBMPDIBHDR));
1888
1889 bmpDIBHdr.u32Size = sizeof(VIDEORECBMPDIBHDR);
1890 bmpDIBHdr.u32Width = w;
1891 bmpDIBHdr.u32Height = h;
1892 bmpDIBHdr.u16Planes = 1;
1893 bmpDIBHdr.u16BitCount = uBPP;
1894 bmpDIBHdr.u32XPelsPerMeter = 5000;
1895 bmpDIBHdr.u32YPelsPerMeter = 5000;
1896
1897 RTFILE fh;
1898 int rc2 = RTFileOpen(&fh, "/tmp/VideoRecFrame.bmp",
1899 RTFILE_O_CREATE_REPLACE | RTFILE_O_WRITE | RTFILE_O_DENY_NONE);
1900 if (RT_SUCCESS(rc2))
1901 {
1902 RTFileWrite(fh, &bmpHdr, sizeof(bmpHdr), NULL);
1903 RTFileWrite(fh, &bmpDIBHdr, sizeof(bmpDIBHdr), NULL);
1904 }
1905#endif
1906 Assert(pFrame->cbRGBBuf >= w * h * uBytesPerPixel);
1907
1908 /* Do the copy. */
1909 for (unsigned int i = 0; i < h; i++)
1910 {
1911 /* Overflow check. */
1912 Assert(offSrc + w * uBytesPerPixel <= uSrcHeight * uBytesPerLine);
1913 Assert(offDst + w * uBytesPerPixel <= pStream->Video.uHeight * pStream->Video.uWidth * uBytesPerPixel);
1914
1915 memcpy(pFrame->pu8RGBBuf + offDst, puSrcData + offSrc, w * uBytesPerPixel);
1916
1917#ifdef VBOX_VIDEOREC_DUMP
1918 if (RT_SUCCESS(rc2))
1919 RTFileWrite(fh, pFrame->pu8RGBBuf + offDst, w * uBytesPerPixel, NULL);
1920#endif
1921 offSrc += uBytesPerLine;
1922 offDst += pStream->Video.uWidth * uBytesPerPixel;
1923 }
1924
1925#ifdef VBOX_VIDEOREC_DUMP
1926 if (RT_SUCCESS(rc2))
1927 RTFileClose(fh);
1928#endif
1929
1930 } while (0);
1931
1932 if (rc == VINF_SUCCESS) /* Note: Also could be VINF_TRY_AGAIN. */
1933 {
1934 PVIDEORECBLOCK pBlock = (PVIDEORECBLOCK)RTMemAlloc(sizeof(VIDEORECBLOCK));
1935 if (pBlock)
1936 {
1937 AssertPtr(pFrame);
1938
1939 pBlock->enmType = VIDEORECBLOCKTYPE_VIDEO;
1940 pBlock->pvData = pFrame;
1941 pBlock->cbData = sizeof(VIDEORECVIDEOFRAME) + pFrame->cbRGBBuf;
1942
1943 try
1944 {
1945 VideoRecBlocks *pVideoRecBlocks = new VideoRecBlocks();
1946 pVideoRecBlocks->List.push_back(pBlock);
1947
1948 Assert(pStream->Blocks.Map.find(uTimeStampMs) == pStream->Blocks.Map.end());
1949 pStream->Blocks.Map.insert(std::make_pair(uTimeStampMs, pVideoRecBlocks));
1950 }
1951 catch (const std::exception &ex)
1952 {
1953 RT_NOREF(ex);
1954
1955 RTMemFree(pBlock);
1956 rc = VERR_NO_MEMORY;
1957 }
1958 }
1959 else
1960 rc = VERR_NO_MEMORY;
1961 }
1962
1963 if (RT_FAILURE(rc))
1964 videoRecVideoFrameFree(pFrame);
1965
1966 videoRecStreamUnlock(pStream);
1967
1968 int rc2 = RTCritSectLeave(&pCtx->CritSect);
1969 AssertRC(rc2);
1970
1971 if ( RT_SUCCESS(rc)
1972 && rc != VINF_TRY_AGAIN) /* Only signal the thread if operation was successful. */
1973 {
1974 videoRecThreadNotify(pCtx);
1975 }
1976
1977 return rc;
1978}
1979
1980/**
1981 * Frees a video recording video frame.
1982 *
1983 * @returns IPRT status code.
1984 * @param pFrame Pointer to video frame to free. The pointer will be invalid after return.
1985 */
1986static void videoRecVideoFrameFree(PVIDEORECVIDEOFRAME pFrame)
1987{
1988 if (!pFrame)
1989 return;
1990
1991 if (pFrame->pu8RGBBuf)
1992 {
1993 Assert(pFrame->cbRGBBuf);
1994 RTMemFree(pFrame->pu8RGBBuf);
1995 }
1996 RTMemFree(pFrame);
1997}
1998
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette