AJA NTV2 SDK  18.0.0.2122
NTV2 SDK 18.0.0.2122
ntv2encodehevcfileac.cpp
Go to the documentation of this file.
1 /* SPDX-License-Identifier: MIT */
8 #include <stdio.h>
9 
10 #include "ntv2encodehevcfileac.h"
11 #include "ntv2utils.h"
12 #include "ntv2devicefeatures.h"
13 #include "ajabase/system/process.h"
15 
16 using namespace std;
17 
18 #define NTV2_AUDIOSIZE_MAX (401 * 1024)
19 
20 
21 NTV2EncodeHEVCFileAc::NTV2EncodeHEVCFileAc (const string inDeviceSpecifier,
22  const NTV2Channel inChannel,
23  const M31VideoPreset inPreset,
24  const NTV2FrameBufferFormat inPixelFormat,
25  const bool inQuadMode,
26  const uint32_t inAudioChannels,
27  const bool inTimeCodeBurn,
28  const bool inInfoData,
29  const uint32_t inMaxFrames)
30 
31 : mVideoInputThread (AJAThread()),
32  mVideoProcessThread (AJAThread()),
33  mCodecRawThread (AJAThread()),
34  mCodecHevcThread (AJAThread()),
35  mVideoFileThread (AJAThread()),
36  mAudioFileThread (AJAThread()),
37  mM31 (AJA_NULL),
38  mHevcCommon (AJA_NULL),
39  mDeviceID (DEVICE_ID_NOTFOUND),
40  mDeviceSpecifier (inDeviceSpecifier),
41  mWithAudio (inAudioChannels != 0),
42  mInputChannel (inChannel),
43  mEncodeChannel (M31_CH0),
44  mPreset (inPreset),
45  mInputSource (NTV2_INPUTSOURCE_SDI1),
47  mPixelFormat (inPixelFormat),
48  mQuad (inQuadMode),
49  mInterlaced (false),
50  mMultiStream (false),
51  mWithInfo (inInfoData),
52  mWithAnc (inTimeCodeBurn),
53  mAudioSystem (NTV2_AUDIOSYSTEM_1),
54  mSavedTaskMode (NTV2_STANDARD_TASKS),
55  mNumAudioChannels (0),
56  mFileAudioChannels (inAudioChannels),
57  mMaxFrames (inMaxFrames),
58  mLastFrame (false),
59  mLastFrameInput (false),
60  mLastFrameRaw (false),
61  mLastFrameHevc (false),
62  mLastFrameVideo (false),
63  mLastFrameAudio (false),
64  mGlobalQuit (false),
65  mVideoInputFrameCount (0),
66  mVideoProcessFrameCount (0),
67  mCodecRawFrameCount (0),
68  mCodecHevcFrameCount (0),
69  mVideoFileFrameCount (0),
70  mAudioFileFrameCount (0)
71 {
72  ::memset (mVideoInputBuffer, 0x0, sizeof (mVideoInputBuffer));
73  ::memset (mVideoRawBuffer, 0x0, sizeof (mVideoRawBuffer));
74  ::memset (mVideoHevcBuffer, 0x0, sizeof (mVideoHevcBuffer));
75  ::memset (mAudioInputBuffer, 0x0, sizeof (mAudioInputBuffer));
76 
77 } // constructor
78 
79 
81 {
82  // Stop my capture and consumer threads, then destroy them...
83  Quit ();
84 
85  if (mM31 != AJA_NULL)
86  {
87  delete mM31;
88  mM31 = AJA_NULL;
89  }
90 
91  if (mHevcCommon != AJA_NULL)
92  {
93  delete mHevcCommon;
94  mHevcCommon = AJA_NULL;
95  }
96 
97  // unsubscribe from input vertical event...
98  mDevice.UnsubscribeInputVerticalEvent (mInputChannel);
99 
100  // free all my buffers...
101  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++)
102  {
103  if (mVideoInputBuffer[bufferNdx].pVideoBuffer)
104  {
105  delete [] mVideoInputBuffer[bufferNdx].pVideoBuffer;
106  mVideoInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
107  }
108  if (mVideoInputBuffer[bufferNdx].pInfoBuffer)
109  {
110  delete [] mVideoInputBuffer[bufferNdx].pInfoBuffer;
111  mVideoInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
112  }
113  if (mVideoInputBuffer[bufferNdx].pAudioBuffer)
114  {
115  delete [] mVideoInputBuffer[bufferNdx].pAudioBuffer;
116  mVideoInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
117  }
118 
119  if (mVideoRawBuffer[bufferNdx].pVideoBuffer)
120  {
121  delete [] mVideoRawBuffer[bufferNdx].pVideoBuffer;
122  mVideoRawBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
123  }
124  if (mVideoRawBuffer[bufferNdx].pInfoBuffer)
125  {
126  delete [] mVideoRawBuffer[bufferNdx].pInfoBuffer;
127  mVideoRawBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
128  }
129  if (mVideoRawBuffer[bufferNdx].pAudioBuffer)
130  {
131  delete [] mVideoRawBuffer[bufferNdx].pAudioBuffer;
132  mVideoRawBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
133  }
134 
135  if (mVideoHevcBuffer[bufferNdx].pVideoBuffer)
136  {
137  delete [] mVideoHevcBuffer[bufferNdx].pVideoBuffer;
138  mVideoHevcBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
139  }
140  if (mVideoHevcBuffer[bufferNdx].pInfoBuffer)
141  {
142  delete [] mVideoHevcBuffer[bufferNdx].pInfoBuffer;
143  mVideoHevcBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
144  }
145  if (mVideoHevcBuffer[bufferNdx].pAudioBuffer)
146  {
147  delete [] mVideoHevcBuffer[bufferNdx].pAudioBuffer;
148  mVideoHevcBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
149  }
150  }
151 
152  if (mWithAudio)
153  {
154  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++)
155  {
156  if (mAudioInputBuffer[bufferNdx].pVideoBuffer)
157  {
158  delete [] mAudioInputBuffer[bufferNdx].pVideoBuffer;
159  mAudioInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
160  }
161  if (mAudioInputBuffer[bufferNdx].pInfoBuffer)
162  {
163  delete [] mAudioInputBuffer[bufferNdx].pInfoBuffer;
164  mAudioInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
165  }
166  if (mAudioInputBuffer[bufferNdx].pAudioBuffer)
167  {
168  delete [] mAudioInputBuffer[bufferNdx].pAudioBuffer;
169  mAudioInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
170  }
171  }
172  }
173 } // destructor
174 
175 
177 {
178  if (mM31 && !mLastFrame && !mGlobalQuit)
179  {
180  // Set the last frame flag to start the quit process
181  mLastFrame = true;
182 
183  // Wait for the last frame to be written to disk
184  int i;
185  int timeout = 300;
186  for (i = 0; i < timeout; i++)
187  {
188  if (mLastFrameVideo && (!mWithAudio || mLastFrameAudio)) break;
189  AJATime::Sleep (10);
190  }
191  if (i == timeout)
192  { cerr << "## ERROR: Wait for last frame timeout" << endl; }
193 
194  // Stop the encoder stream
195  if (!mM31->ChangeEHState(Hevc_EhState_ReadyToStop, mEncodeChannel))
196  { cerr << "## ERROR: ChangeEHState ready to stop failed" << endl; }
197 
198  if (!mM31->ChangeEHState(Hevc_EhState_Stop, mEncodeChannel))
199  { cerr << "## ERROR: ChangeEHState stop failed" << endl; }
200 
201  // stop the video input stream
202  if (!mM31->ChangeVInState(Hevc_VinState_Stop, mEncodeChannel))
203  { cerr << "## ERROR: ChangeVInState stop failed" << endl; }
204 
205  if(!mMultiStream)
206  {
207  // Now go to the init state
208  if (!mM31->ChangeMainState(Hevc_MainState_Init, Hevc_EncodeMode_Single))
209  { cerr << "## ERROR: ChangeMainState to init failed" << endl; }
210  }
211  }
212 
213  // Stop the worker threads
214  mGlobalQuit = true;
215 
216  while (mVideoInputThread.Active())
217  AJATime::Sleep(10);
218 
219  while (mVideoProcessThread.Active())
220  AJATime::Sleep(10);
221 
222  while (mCodecRawThread.Active())
223  AJATime::Sleep(10);
224 
225  while (mCodecHevcThread.Active())
226  AJATime::Sleep(10);
227 
228  while (mVideoFileThread.Active())
229  AJATime::Sleep(10);
230 
231  while (mAudioFileThread.Active())
232  AJATime::Sleep(10);
233 
234  // Stop video capture
235  mDevice.SetMode(mInputChannel, NTV2_MODE_DISPLAY, false);
236 
237  // Release board
238  if (!mMultiStream)
239  {
240  mDevice.ReleaseStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid()));
241  mDevice.SetEveryFrameServices (mSavedTaskMode); // Restore prior task mode
242  }
243 
244  // Close output files
245  mHevcCommon->CloseHevcFile();
246  if (mWithInfo)
247  mHevcCommon->CloseEncFile();
248  if (mWithAudio)
249  mHevcCommon->CloseAiffFile();
250 
251 } // Quit
252 
253 
255 {
256  AJAStatus status (AJA_STATUS_SUCCESS);
257 
258  // Open the device...
259  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, mDevice))
260  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return AJA_STATUS_OPEN; }
261 
262  // Grab board in a shared environment
263  if (!mMultiStream)
264  {
265  if (!mDevice.AcquireStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid())))
266  return AJA_STATUS_BUSY; // Another app is using the device
267  mDevice.GetEveryFrameServices (mSavedTaskMode); // Save the current state before we change it
268  }
269  mDevice.SetEveryFrameServices (NTV2_OEM_TASKS); // Since this is an OEM demo, use the OEM service level
270 
271  mDeviceID = mDevice.GetDeviceID (); // Keep the device ID handy, as it's used frequently
272 
273  // Make sure this device has an M31
274  if (!mDevice.features().HasHEVCM31())
275  {
276  cerr << "## ERROR: M31 not found" << endl;
277  return AJA_STATUS_FAIL;
278  }
279 
280  // Allocate our M31 helper class and our HEVC common class
281  mM31 = new CNTV2m31 (&mDevice);
282  mHevcCommon = new CNTV2DemoHevcCommon ();
283 
284  if ((mM31 == AJA_NULL) || (mHevcCommon == AJA_NULL))
285  {
286  return AJA_STATUS_FAIL;
287  }
288 
289  // Preset specification takes precedence
290  if (mPreset < M31_NUMVIDEOPRESETS)
291  {
292  // This class only handles file based presets so make sure they didn't pass in a vif one
293  if (CNTV2m31::IsPresetVIF(mPreset))
294  return AJA_STATUS_FAIL;
295 
296  // Get NTV2 formats to match codec preset
297  mVideoFormat = CNTV2m31::GetPresetVideoFormat(mPreset);
298  mPixelFormat = CNTV2m31::GetPresetFrameBufferFormat(mPreset);
299  mQuad = CNTV2m31::IsPresetUHD(mPreset);
300  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
301  }
302  // Otherwise use the pixel format and SDI input format
303  else if (mPixelFormat >= NTV2_FBF_NUMFRAMEBUFFERFORMATS)
304  {
305  mPixelFormat = NTV2_FBF_8BIT_YCBCR_420PL2;
306  }
307 
308  // Quad mode must be channel 1
309  if (mQuad)
310  {
311  mInputChannel = NTV2_CHANNEL1;
312  mEncodeChannel = M31_CH0;
313  }
314  else
315  {
316  // When input channel specified we are multistream
317  switch (mInputChannel)
318  {
319  case NTV2_CHANNEL1: { mEncodeChannel = M31_CH0; mMultiStream = true; break; }
320  case NTV2_CHANNEL2: { mEncodeChannel = M31_CH1; mMultiStream = true; break; }
321  case NTV2_CHANNEL3: { mEncodeChannel = M31_CH2; mMultiStream = true; break; }
322  case NTV2_CHANNEL4: { mEncodeChannel = M31_CH3; mMultiStream = true; break; }
323  default: { mInputChannel = NTV2_CHANNEL1; mEncodeChannel = M31_CH0; }
324  }
325  }
326 
327  // When video format is unknown determine from SDI input
328  if (mVideoFormat >= NTV2_MAX_NUM_VIDEO_FORMATS)
329  {
330  bool is3Gb = false;
331  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
332 
333  // Get SDI input format
334  status = mHevcCommon->DetermineInputFormat(mDevice.GetSDIInputVideoFormat(mInputChannel, is3Gb), mQuad, mVideoFormat);
335  if (AJA_FAILURE(status))
336  return status;
337 
338  // Get codec preset for input format
339  if(!CNTV2m31::ConvertVideoFormatToPreset(mVideoFormat, mPixelFormat, false, mPreset))
340  return AJA_STATUS_FAIL;
341 
342  mQuad = CNTV2m31::IsPresetUHD(mPreset);
343  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
344  }
345 
346  // Setup frame buffer
347  status = SetupVideo ();
348  if (AJA_FAILURE (status))
349  return status;
350 
351  // Route input signals to frame buffers
352  RouteInputSignal ();
353 
354  // Setup audio buffer
355  status = SetupAudio ();
356  if (AJA_FAILURE (status))
357  return status;
358 
359  // Setup to capture video/audio/anc input
361 
362  // Setup codec
363  status = mHevcCommon->SetupHEVC (mM31, mPreset, mEncodeChannel, mMultiStream, mWithInfo);
364  if (AJA_FAILURE (status))
365  return status;
366 
367  // Setup the circular buffers
368  SetupHostBuffers ();
369 
370  {
371  // Create encoded video output file
372  ostringstream fileName;
373  if (mMultiStream)
374  fileName << "raw_" << (mInputChannel+1) << ".hevc";
375  else
376  fileName << "raw.hevc";
377  status = mHevcCommon->CreateHevcFile (fileName.str(), mMaxFrames);
378  if (AJA_FAILURE (status))
379  return status;
380  }
381 
382  if (mWithInfo)
383  {
384  // Create encoded data output file
385  ostringstream fileName;
386  if (mMultiStream)
387  fileName << "raw_" << (mInputChannel+1) << ".txt";
388  else
389  fileName << "raw.txt";
390  status = mHevcCommon->CreateEncFile (fileName.str(), mMaxFrames);
391  if (AJA_FAILURE (status))
392  return status;
393  }
394 
395  if (mWithAudio)
396  {
397  // Create audio output file
398  ostringstream fileName;
399  if (mMultiStream)
400  fileName << "raw_" << (mInputChannel+1) << ".aiff";
401  else
402  fileName << "raw.aiff";
403  status = mHevcCommon->CreateAiffFile (fileName.str(), mFileAudioChannels, mMaxFrames, NTV2_AUDIOSIZE_MAX);
404  if (AJA_FAILURE (status))
405  return status;
406  }
407 
408  return AJA_STATUS_SUCCESS;
409 
410 } // Init
411 
412 
414 {
415  return mPreset;
416 }
417 
418 
420 {
421  // Setup frame buffer
422  if (mQuad)
423  {
424  if (mInputChannel != NTV2_CHANNEL1)
425  return AJA_STATUS_FAIL;
426 
427  // Disable multiformat
428  if (mDevice.features().CanDoMultiFormat())
429  mDevice.SetMultiFormatMode (false);
430 
431  // Set the board video format
432  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
433 
434  // Set frame buffer format
435  mDevice.SetFrameBufferFormat (NTV2_CHANNEL1, mPixelFormat);
436  mDevice.SetFrameBufferFormat (NTV2_CHANNEL2, mPixelFormat);
437  mDevice.SetFrameBufferFormat (NTV2_CHANNEL3, mPixelFormat);
438  mDevice.SetFrameBufferFormat (NTV2_CHANNEL4, mPixelFormat);
439  mDevice.SetFrameBufferFormat (NTV2_CHANNEL5, mPixelFormat);
440  mDevice.SetFrameBufferFormat (NTV2_CHANNEL6, mPixelFormat);
441  mDevice.SetFrameBufferFormat (NTV2_CHANNEL7, mPixelFormat);
442  mDevice.SetFrameBufferFormat (NTV2_CHANNEL8, mPixelFormat);
443 
444  // Set catpure mode
445  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_CAPTURE, false);
446  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_CAPTURE, false);
447  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_CAPTURE, false);
448  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_CAPTURE, false);
449  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
450  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
451  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
452  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
453 
454  // Enable frame buffers
455  mDevice.EnableChannel (NTV2_CHANNEL1);
456  mDevice.EnableChannel (NTV2_CHANNEL2);
457  mDevice.EnableChannel (NTV2_CHANNEL3);
458  mDevice.EnableChannel (NTV2_CHANNEL4);
459  mDevice.EnableChannel (NTV2_CHANNEL5);
460  mDevice.EnableChannel (NTV2_CHANNEL6);
461  mDevice.EnableChannel (NTV2_CHANNEL7);
462  mDevice.EnableChannel (NTV2_CHANNEL8);
463 
464  // Save input source
465  mInputSource = ::NTV2ChannelToInputSource (NTV2_CHANNEL1);
466  }
467  else if (mMultiStream)
468  {
469  // Configure for multiformat
470  if (mDevice.features().CanDoMultiFormat())
471  mDevice.SetMultiFormatMode (true);
472 
473  // Set the channel video format
474  mDevice.SetVideoFormat (mVideoFormat, false, false, mInputChannel);
475 
476  // Set frame buffer format
477  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
478 
479  // Set catpure mode
480  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
481 
482  // Enable frame buffer
483  mDevice.EnableChannel (mInputChannel);
484 
485  // Save input source
486  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
487  }
488  else
489  {
490  // Disable multiformat mode
491  if (mDevice.features().CanDoMultiFormat())
492  mDevice.SetMultiFormatMode (false);
493 
494  // Set the board format
495  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
496 
497  // Set frame buffer format
498  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
499 
500  // Set display mode
501  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_DISPLAY, false);
502  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_DISPLAY, false);
503  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_DISPLAY, false);
504  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_DISPLAY, false);
505  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
506  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
507  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
508  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
509 
510  // Set catpure mode
511  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
512 
513  // Enable frame buffer
514  mDevice.EnableChannel (mInputChannel);
515 
516  // Save input source
517  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
518  }
519 
520  // Set the device reference to the input...
521  if (mMultiStream)
522  {
524  }
525  else
526  {
527  mDevice.SetReference (::NTV2InputSourceToReferenceSource (mInputSource));
528  }
529 
530  // Enable and subscribe to the interrupts for the channel to be used...
531  mDevice.EnableInputInterrupt (mInputChannel);
532  mDevice.SubscribeInputVerticalEvent (mInputChannel);
533 
534  // Setup for timecode burn
535  mTimeBase.SetAJAFrameRate (mHevcCommon->GetAJAFrameRate(GetNTV2FrameRateFromVideoFormat (mVideoFormat)));
536  mTimeCodeBurn.RenderTimeCodeFont (mHevcCommon->GetAJAPixelFormat (mPixelFormat),
537  GetDisplayWidth (mVideoFormat),
538  GetDisplayHeight (mVideoFormat));
539 
540  return AJA_STATUS_SUCCESS;
541 
542 } // SetupVideo
543 
544 
546 {
547  // In multiformat mode, base the audio system on the channel...
548  if (mMultiStream && mDevice.features().GetNumAudioSystems() > 1 && UWord(mInputChannel) < mDevice.features().GetNumAudioSystems())
549  mAudioSystem = ::NTV2ChannelToAudioSystem(mInputChannel);
550 
551  // Have the audio system capture audio from the designated device input (i.e., ch1 uses SDIIn1, ch2 uses SDIIn2, etc.)...
552  mDevice.SetAudioSystemInputSource (mAudioSystem, NTV2_AUDIO_EMBEDDED, ::NTV2ChannelToEmbeddedAudioInput (mInputChannel));
553 
554  mNumAudioChannels = mDevice.features().GetMaxAudioChannels();
555  mDevice.SetNumberAudioChannels (mNumAudioChannels, mAudioSystem);
556  mDevice.SetAudioRate (NTV2_AUDIO_48K, mAudioSystem);
558 
559  // The on-device audio buffer should be 4MB to work best across all devices & platforms...
560  mDevice.SetAudioBufferSize (NTV2_AUDIO_BUFFER_BIG, mAudioSystem);
561 
562  return AJA_STATUS_SUCCESS;
563 
564 } // SetupAudio
565 
566 
568 {
569  mVideoBufferSize = GetVideoActiveSize (mVideoFormat, mPixelFormat, NTV2_VANCMODE_OFF);
570  mPicInfoBufferSize = sizeof(HevcPictureInfo)*2;
571  mEncInfoBufferSize = sizeof(HevcEncodedInfo)*2;
572  mAudioBufferSize = NTV2_AUDIOSIZE_MAX;
573 
574  // video input ring
575  mVideoInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
576  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
577  {
578  memset (&mVideoInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
579  mVideoInputBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
580  mVideoInputBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
581  mVideoInputBuffer[bufferNdx].videoDataSize = 0;
582  mVideoInputBuffer[bufferNdx].videoDataSize2 = 0;
583  mVideoInputBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
584  mVideoInputBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
585  mVideoInputBuffer[bufferNdx].infoDataSize = 0;
586  mVideoInputBuffer[bufferNdx].infoDataSize2 = 0;
587  mVideoInputCircularBuffer.Add (& mVideoInputBuffer[bufferNdx]);
588  }
589 
590  // video raw ring
591  mVideoRawCircularBuffer.SetAbortFlag (&mGlobalQuit);
592  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
593  {
594  memset (&mVideoRawBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
595  mVideoRawBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
596  mVideoRawBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
597  mVideoRawBuffer[bufferNdx].videoDataSize = 0;
598  mVideoRawBuffer[bufferNdx].videoDataSize2 = 0;
599  mVideoRawBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
600  mVideoRawBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
601  mVideoRawBuffer[bufferNdx].infoDataSize = 0;
602  mVideoRawBuffer[bufferNdx].infoDataSize2 = 0;
603  mVideoRawCircularBuffer.Add (& mVideoRawBuffer[bufferNdx]);
604  }
605 
606  // video hevc ring
607  mVideoHevcCircularBuffer.SetAbortFlag (&mGlobalQuit);
608  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
609  {
610  memset (&mVideoHevcBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
611  mVideoHevcBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
612  mVideoHevcBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
613  mVideoHevcBuffer[bufferNdx].videoDataSize = 0;
614  mVideoHevcBuffer[bufferNdx].videoDataSize2 = 0;
615  mVideoHevcBuffer[bufferNdx].pInfoBuffer = new uint32_t [mEncInfoBufferSize/4];
616  mVideoHevcBuffer[bufferNdx].infoBufferSize = mEncInfoBufferSize;
617  mVideoHevcBuffer[bufferNdx].infoDataSize = 0;
618  mVideoHevcBuffer[bufferNdx].infoDataSize2 = 0;
619  mVideoHevcCircularBuffer.Add (& mVideoHevcBuffer[bufferNdx]);
620  }
621 
622  if (mWithAudio)
623  {
624  // audio input ring
625  mAudioInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
626  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++ )
627  {
628  memset (&mAudioInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
629  mAudioInputBuffer[bufferNdx].pAudioBuffer = new uint32_t [mAudioBufferSize/4];
630  mAudioInputBuffer[bufferNdx].audioBufferSize = mAudioBufferSize;
631  mAudioInputBuffer[bufferNdx].audioDataSize = 0;
632  mAudioInputCircularBuffer.Add (& mAudioInputBuffer[bufferNdx]);
633  }
634  }
635 
636 } // SetupHostBuffers
637 
638 
640 {
641  // setup sdi io
642  mDevice.SetSDITransmitEnable (NTV2_CHANNEL1, false);
643  mDevice.SetSDITransmitEnable (NTV2_CHANNEL2, false);
644  mDevice.SetSDITransmitEnable (NTV2_CHANNEL3, false);
645  mDevice.SetSDITransmitEnable (NTV2_CHANNEL4, false);
646  mDevice.SetSDITransmitEnable (NTV2_CHANNEL5, true);
647  mDevice.SetSDITransmitEnable (NTV2_CHANNEL6, true);
648  mDevice.SetSDITransmitEnable (NTV2_CHANNEL7, true);
649  mDevice.SetSDITransmitEnable (NTV2_CHANNEL8, true);
650 
651  // Give the device some time to lock to the input signal...
652  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
653 
654  // When input is 3Gb convert to 3Ga for capture (no RGB support?)
655  bool is3Gb = false;
656  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
657 
658  if (mQuad)
659  {
668  }
669  else
670  {
671  mDevice.SetSDIInLevelBtoLevelAConversion (mInputChannel, is3Gb);
672  }
673 
674  // Use a "Routing" object, which handles the details of writing
675  // the appropriate values into the appropriate device registers...
676  CNTV2SignalRouter router;
677 
686 
687  // Add this signal routing (or replace if not doing multistream)...
688  mDevice.ApplySignalRoute (router, !mMultiStream);
689 
690  // Give the device some time to lock to the input signal...
691  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
692 
693 } // RouteInputSignal
694 
695 
697 {
698  // Tell capture AutoCirculate to use 16 frame buffers on the device...
699  mDevice.AutoCirculateStop (mInputChannel);
700  mDevice.AutoCirculateInitForInput (mInputChannel, 16, // Frames to circulate
701  mWithAudio ? mAudioSystem : NTV2_AUDIOSYSTEM_INVALID, // Which audio system (if any)?
702  AUTOCIRCULATE_WITH_RP188); // With RP188?
703 } // SetupInputAutoCirculate
704 
705 
707 {
708  if (mDevice.GetInputVideoFormat (mInputSource) == NTV2_FORMAT_UNKNOWN)
709  cout << endl << "## WARNING: No video signal present on the input connector" << endl;
710 
711  // Start the playout and capture threads...
717  if (mWithAudio)
718  {
720  }
721 
722  return AJA_STATUS_SUCCESS;
723 
724 } // Run
725 
726 
727 // This is where we will start the video input thread
729 {
730  mVideoInputThread.Attach(VideoInputThreadStatic, this);
731  mVideoInputThread.SetPriority(AJA_ThreadPriority_High);
732  mVideoInputThread.Start();
733 
734 } // StartVideoInputThread
735 
736 
737 // The video input thread static callback
739 {
740  (void) pThread;
741 
742  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
743  pApp->VideoInputWorker ();
744 
745 } // VideoInputThreadStatic
746 
747 
749 {
750  CNTV2Card ntv2Device;
751  AUTOCIRCULATE_TRANSFER inputXfer;
752 
753  // Open the device...
754  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
755  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
756 
757  ntv2Device.SubscribeInputVerticalEvent (mInputChannel);
758 
759  // start AutoCirculate running...
760  ntv2Device.AutoCirculateStart (mInputChannel);
761 
762  while (!mGlobalQuit)
763  {
764  AUTOCIRCULATE_STATUS acStatus;
765  ntv2Device.AutoCirculateGetStatus (mInputChannel, acStatus);
766 
767  // wait for captured frame
768  if (acStatus.IsRunning() && acStatus.HasAvailableInputFrame())
769  {
770  // At this point, there's at least one fully-formed frame available in the device's
771  // frame buffer to transfer to the host. Reserve an AvaDataBuffer to "produce", and
772  // use it in the next transfer from the device...
773  AVHevcDataBuffer * pVideoData (mVideoInputCircularBuffer.StartProduceNextBuffer ());
774  if (pVideoData)
775  {
776  // setup buffer pointers for transfer
777  inputXfer.SetBuffers (pVideoData->pVideoBuffer, pVideoData->videoBufferSize, AJA_NULL, 0, AJA_NULL, 0);
778 
779  AVHevcDataBuffer * pAudioData = AJA_NULL;
780  if (mWithAudio)
781  {
782  pAudioData = mAudioInputCircularBuffer.StartProduceNextBuffer ();
783  if (pAudioData)
784  {
785  inputXfer.SetAudioBuffer (pAudioData->pAudioBuffer, pAudioData->audioBufferSize);
786  }
787  }
788 
789  // do the transfer from the device into our host AvaDataBuffer...
790  ntv2Device.AutoCirculateTransfer (mInputChannel, inputXfer);
791 
792  // get the video data size
793  pVideoData->videoDataSize = pVideoData->videoBufferSize;
794  pVideoData->audioDataSize = 0;
795  pVideoData->lastFrame = mLastFrame;
796 
797  if (mWithAudio && pAudioData)
798  {
799  // get the audio data size
800  pAudioData->audioDataSize = inputXfer.GetCapturedAudioByteCount();
801  pAudioData->lastFrame = mLastFrame;
802  }
803 
804  if (mWithAnc)
805  {
806  // get the sdi input anc data
807  NTV2_RP188 timecode;
808  inputXfer.GetInputTimeCode (timecode);
809  pVideoData->timeCodeDBB = timecode.fDBB;
810  pVideoData->timeCodeLow = timecode.fLo;
811  pVideoData->timeCodeHigh = timecode.fHi;
812  }
813 
814  if (mWithInfo)
815  {
816  // get picture and additional data pointers
817  HevcPictureInfo * pInfo = (HevcPictureInfo*)pVideoData->pInfoBuffer;
818  HevcPictureData * pPicData = &pInfo->pictureData;
819 
820  // initialize info buffer to 0
821  memset(pInfo, 0, pVideoData->infoBufferSize);
822 
823  // calculate pts based on 90 Khz clock tick
824  uint64_t pts = (uint64_t)mTimeBase.FramesToMicroseconds(mVideoInputFrameCount)*90000/1000000;
825 
826  // set serial number, pts and picture number
827  pPicData->serialNumber = mVideoInputFrameCount; // can be anything
828  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff); // (frame 5720000@60 test roll over)
829  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1); // only use 1 bit
830  pPicData->pictureNumber = mVideoInputFrameCount + 1; // must count starting with 1
831 
832  // set info data size
833  pVideoData->infoDataSize = sizeof(HevcPictureData);
834 
835  if(mInterlaced)
836  {
837  pPicData->serialNumber = mVideoInputFrameCount*2;
838  pPicData->pictureNumber = mVideoInputFrameCount*2 + 1;
839 
840  // get picture and additional data pointers
841  pInfo = (HevcPictureInfo*)(pVideoData->pInfoBuffer + sizeof(HevcPictureInfo)/4);
842  pPicData = &pInfo->pictureData;
843 
844  // add half a frame time to pts
845  pts = pts + (uint64_t)mTimeBase.FramesToMicroseconds(1)*90000/1000000/2;
846 
847  // set serial number, pts and picture number
848  pPicData->serialNumber = mVideoInputFrameCount*2 + 1;
849  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff);
850  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1);
851  pPicData->pictureNumber = mVideoInputFrameCount*2 + 2;
852 
853  // set info data size
854  pVideoData->infoDataSize2 = sizeof(HevcPictureData);
855  }
856  }
857 
858  if(pVideoData->lastFrame && !mLastFrameInput)
859  {
860  printf ( "\nCapture last frame number %d\n", mVideoInputFrameCount );
861  mLastFrameInput = true;
862  }
863 
864  mVideoInputFrameCount++;
865 
866  if (mWithAudio && pAudioData)
867  {
868  mAudioInputCircularBuffer.EndProduceNextBuffer ();
869  }
870 
871  // signal that we're done "producing" the frame, making it available for future "consumption"...
872  mVideoInputCircularBuffer.EndProduceNextBuffer ();
873  } // if A/C running and frame(s) are available for transfer
874  }
875  else
876  {
877  // Either AutoCirculate is not running, or there were no frames available on the device to transfer.
878  // Rather than waste CPU cycles spinning, waiting until a frame becomes available, it's far more
879  // efficient to wait for the next input vertical interrupt event to get signaled...
880  ntv2Device.WaitForInputVerticalInterrupt (mInputChannel);
881  }
882  } // loop til quit signaled
883 
884  // Stop AutoCirculate...
885  ntv2Device.AutoCirculateStop (mInputChannel);
886 
887  ntv2Device.UnsubscribeInputVerticalEvent (mInputChannel);
888 
889 } // VideoInputWorker
890 
891 
892 // This is where we start the video process thread
894 {
895  mVideoProcessThread.Attach(VideoProcessThreadStatic, this);
896  mVideoProcessThread.SetPriority(AJA_ThreadPriority_High);
897  mVideoProcessThread.Start();
898 
899 } // StartVideoProcessThread
900 
901 
902 // The video process static callback
904 {
905  (void) pThread;
906 
907  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
908  pApp->VideoProcessWorker ();
909 
910 } // VideoProcessThreadStatic
911 
912 
914 {
915  while (!mGlobalQuit)
916  {
917  // wait for the next video input buffer
918  AVHevcDataBuffer * pSrcFrameData (mVideoInputCircularBuffer.StartConsumeNextBuffer ());
919  if (pSrcFrameData)
920  {
921  // wait for the next video raw buffer
922  AVHevcDataBuffer * pDstFrameData (mVideoRawCircularBuffer.StartProduceNextBuffer ());
923  if (pDstFrameData)
924  {
925  // do something useful with the frame data...
926  ProcessVideoFrame(pSrcFrameData, pDstFrameData, mVideoProcessFrameCount);
927 
928  mVideoProcessFrameCount++;
929 
930  // release the video raw buffer
931  mVideoRawCircularBuffer.EndProduceNextBuffer ();
932  }
933 
934  // release the video input buffer
935  mVideoInputCircularBuffer.EndConsumeNextBuffer ();
936 
937  }
938  } // loop til quit signaled
939 
940 } // VideoProcessWorker
941 
942 
943 // This is where we start the codec raw thread
945 {
946  mCodecRawThread.Attach(CodecRawThreadStatic, this);
947  mCodecRawThread.SetPriority(AJA_ThreadPriority_High);
948  mCodecRawThread.Start();
949 
950 } // StartCodecRawThread
951 
952 
953 // The codec raw static callback
954 void NTV2EncodeHEVCFileAc::CodecRawThreadStatic (AJAThread * pThread, void * pContext)
955 {
956  (void) pThread;
957 
958  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
959  pApp->CodecRawWorker ();
960 
961 } // CodecRawThreadStatic
962 
963 
965 {
966  CNTV2Card ntv2Device;
967  CNTV2m31 * m31;
968 
969  // Open the device...
970  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
971  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
972 
973  // Allocate our M31 helper class and our HEVC common class
974  m31 = new CNTV2m31 (&ntv2Device);
975 
976  while (!mGlobalQuit)
977  {
978  // wait for the next raw video frame
979  AVHevcDataBuffer * pFrameData (mVideoRawCircularBuffer.StartConsumeNextBuffer ());
980  if (pFrameData)
981  {
982  if (!mLastFrameRaw)
983  {
984  // transfer the raw video frame to the codec
985  if (mInterlaced)
986  {
987  if (mWithInfo)
988  {
989  m31->RawTransfer(mPreset, mEncodeChannel,
990  (uint8_t*)pFrameData->pVideoBuffer,
991  pFrameData->videoDataSize,
992  (uint8_t*)pFrameData->pInfoBuffer,
993  pFrameData->infoDataSize,
994  false, false);
995 
996  m31->RawTransfer(mPreset, mEncodeChannel,
997  (uint8_t*)pFrameData->pVideoBuffer,
998  pFrameData->videoDataSize,
999  ((uint8_t*)pFrameData->pInfoBuffer) + sizeof(HevcPictureInfo),
1000  pFrameData->infoDataSize2,
1001  true, pFrameData->lastFrame);
1002  }
1003  else
1004  {
1005  m31->RawTransfer(mPreset, mEncodeChannel,
1006  (uint8_t*)pFrameData->pVideoBuffer,
1007  pFrameData->videoDataSize,
1008  false, false);
1009 
1010  m31->RawTransfer(mPreset, mEncodeChannel,
1011  (uint8_t*)pFrameData->pVideoBuffer,
1012  pFrameData->videoDataSize,
1013  true, pFrameData->lastFrame);
1014  }
1015  }
1016  else
1017  {
1018  if (mWithInfo)
1019  {
1020  m31->RawTransfer(mEncodeChannel,
1021  (uint8_t*)pFrameData->pVideoBuffer,
1022  pFrameData->videoDataSize,
1023  (uint8_t*)pFrameData->pInfoBuffer,
1024  pFrameData->infoDataSize,
1025  pFrameData->lastFrame);
1026  }
1027  else
1028  {
1029  m31->RawTransfer(mEncodeChannel,
1030  (uint8_t*)pFrameData->pVideoBuffer,
1031  pFrameData->videoDataSize,
1032  pFrameData->lastFrame);
1033  }
1034  }
1035  if (pFrameData->lastFrame)
1036  {
1037  mLastFrameRaw = true;
1038  }
1039 
1040  mCodecRawFrameCount++;
1041  }
1042 
1043  // release the raw video frame
1044  mVideoRawCircularBuffer.EndConsumeNextBuffer ();
1045  }
1046  } // loop til quit signaled
1047 
1048  delete m31;
1049 } // CodecRawWorker
1050 
1051 
1052 // This is where we will start the codec hevc thread
1054 {
1055  mCodecHevcThread.Attach(CodecHevcThreadStatic, this);
1056  mCodecHevcThread.SetPriority(AJA_ThreadPriority_High);
1057  mCodecHevcThread.Start();
1058 
1059 } // StartCodecHevcThread
1060 
1061 
1062 // The codec hevc static callback
1064 {
1065  (void) pThread;
1066 
1067  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
1068  pApp->CodecHevcWorker ();
1069 
1070 } // CodecHevcThreadStatic
1071 
1072 
1074 {
1075  CNTV2Card ntv2Device;
1076  CNTV2m31 * m31;
1077 
1078  // Open the device...
1079  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
1080  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
1081 
1082  // Allocate our M31 helper class and our HEVC common class
1083  m31 = new CNTV2m31 (&ntv2Device);
1084 
1085  while (!mGlobalQuit)
1086  {
1087  // wait for the next hevc frame
1088  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartProduceNextBuffer ());
1089  if (pFrameData)
1090  {
1091  if (!mLastFrameHevc)
1092  {
1093  if (mInterlaced)
1094  {
1095  // get field 1 video and info buffer and size
1096  uint8_t* pVideoBuffer = (uint8_t*)pFrameData->pVideoBuffer;
1097  uint8_t* pInfoBuffer = (uint8_t*)pFrameData->pInfoBuffer;
1098  uint32_t videoBufferSize = pFrameData->videoBufferSize;
1099  uint32_t infoBufferSize = sizeof(HevcEncodedInfo);
1100 
1101  // transfer an hevc field 1 from the codec including encoded information
1102  m31->EncTransfer(mEncodeChannel,
1103  pVideoBuffer,
1104  videoBufferSize,
1105  pInfoBuffer,
1106  infoBufferSize,
1107  pFrameData->videoDataSize,
1108  pFrameData->infoDataSize,
1109  pFrameData->lastFrame);
1110 
1111  // round the video size up
1112  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1113  videoBufferSize,
1114  pFrameData->videoDataSize,
1115  8, 0xff);
1116  // round the info size up
1117  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1118  infoBufferSize,
1119  pFrameData->infoDataSize,
1120  8, 0);
1121 
1122  // get field 2 video and info buffer and size
1123  pVideoBuffer = ((uint8_t*)pFrameData->pVideoBuffer) + pFrameData->videoDataSize;
1124  pInfoBuffer = ((uint8_t*)pFrameData->pInfoBuffer) + sizeof(HevcEncodedInfo);
1125  videoBufferSize = pFrameData->videoBufferSize - pFrameData->videoDataSize;
1126  infoBufferSize = sizeof(HevcEncodedInfo);
1127 
1128  // transfer an hevc field 2 from the codec including encoded information
1129  m31->EncTransfer(mEncodeChannel,
1130  pVideoBuffer,
1131  videoBufferSize,
1132  pInfoBuffer,
1133  infoBufferSize,
1134  pFrameData->videoDataSize2,
1135  pFrameData->infoDataSize2,
1136  pFrameData->lastFrame);
1137 
1138  // round the video size up
1139  pFrameData->videoDataSize2 = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1140  videoBufferSize,
1141  pFrameData->videoDataSize2,
1142  8, 0xff);
1143  // round the info size up
1144  pFrameData->infoDataSize2 = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1145  infoBufferSize,
1146  pFrameData->infoDataSize2,
1147  8, 0);
1148  }
1149  else
1150  {
1151  // transfer an hevc frame from the codec including encoded information
1152  m31->EncTransfer(mEncodeChannel,
1153  (uint8_t*)pFrameData->pVideoBuffer,
1154  pFrameData->videoBufferSize,
1155  (uint8_t*)pFrameData->pInfoBuffer,
1156  pFrameData->infoBufferSize,
1157  pFrameData->videoDataSize,
1158  pFrameData->infoDataSize,
1159  pFrameData->lastFrame);
1160 
1161  // round the video size up
1162  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pVideoBuffer,
1163  pFrameData->videoBufferSize,
1164  pFrameData->videoDataSize,
1165  8, 0xff);
1166  // round the info size up
1167  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pInfoBuffer,
1168  pFrameData->infoBufferSize,
1169  pFrameData->infoDataSize,
1170  8, 0);
1171  }
1172 
1173  if (pFrameData->lastFrame)
1174  {
1175  mLastFrameHevc = true;
1176  }
1177 
1178  mCodecHevcFrameCount++;
1179  }
1180 
1181  // release and recycle the buffer...
1182  mVideoHevcCircularBuffer.EndProduceNextBuffer ();
1183  }
1184  } // loop til quit signaled
1185 
1186  delete m31;
1187 } // EncTransferFrames
1188 
1189 
1190 // This is where we start the video file writer thread
1192 {
1193  mVideoFileThread.Attach(VideoFileThreadStatic, this);
1194  mVideoFileThread.SetPriority(AJA_ThreadPriority_High);
1195  mVideoFileThread.Start();
1196 
1197 } // StartVideoFileThread
1198 
1199 
1200 // The file writer static callback
1202 {
1203  (void) pThread;
1204 
1205  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
1206  pApp->VideoFileWorker ();
1207 
1208 } // VideoFileStatic
1209 
1210 
1212 {
1213  while (!mGlobalQuit)
1214  {
1215  // wait for the next codec hevc frame
1216  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartConsumeNextBuffer ());
1217  if (pFrameData)
1218  {
1219  if (!mLastFrameVideo)
1220  {
1221  // write the frame / fields hevc to the output file
1222  mHevcCommon->WriteHevcData(pFrameData->pVideoBuffer, pFrameData->videoDataSize + pFrameData->videoDataSize2);
1223 
1224  if (mWithInfo)
1225  {
1226  // write the frame / field 1 encoded data to the output file
1227  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer, pFrameData->infoDataSize);
1228  // write the field 2 encoded data to the output file
1229  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer + sizeof(HevcEncodedInfo)/4, pFrameData->infoDataSize2);
1230  }
1231 
1232  if (pFrameData->lastFrame)
1233  {
1234  printf ( "Video file last frame number %d\n", mVideoFileFrameCount );
1235  mLastFrameVideo = true;
1236  }
1237 
1238  mVideoFileFrameCount++;
1239  }
1240 
1241  // release the hevc buffer
1242  mVideoHevcCircularBuffer.EndConsumeNextBuffer ();
1243  }
1244  } // loop til quit signaled
1245 
1246 } // VideoFileWorker
1247 
1248 
1249 // This is where we start the audio file writer thread
1251 {
1252  mAudioFileThread.Attach(AudioFileThreadStatic, this);
1253  mAudioFileThread.SetPriority(AJA_ThreadPriority_High);
1254  mAudioFileThread.Start();
1255 
1256 } // StartAudioFileThread
1257 
1258 
1259 // The file writer static callback
1261 {
1262  (void) pThread;
1263 
1264  NTV2EncodeHEVCFileAc * pApp (reinterpret_cast <NTV2EncodeHEVCFileAc *> (pContext));
1265  pApp->AudioFileWorker ();
1266 
1267 } // AudioFileStatic
1268 
1269 
1271 {
1272  while (!mGlobalQuit)
1273  {
1274  // wait for the next codec hevc frame
1275  AVHevcDataBuffer * pFrameData (mAudioInputCircularBuffer.StartConsumeNextBuffer ());
1276  if (pFrameData)
1277  {
1278  if (!mLastFrameAudio)
1279  {
1280  // write the audio samples to the output file
1281  mHevcCommon->WriteAiffData(pFrameData->pAudioBuffer, mNumAudioChannels, pFrameData->audioDataSize/mNumAudioChannels/4);
1282 
1283  if (pFrameData->lastFrame)
1284  {
1285  printf ( "Audio file last frame number %d\n", mAudioFileFrameCount );
1286  mLastFrameAudio = true;
1287  }
1288  }
1289 
1290  mAudioFileFrameCount++;
1291 
1292  // release the hevc buffer
1293  mAudioInputCircularBuffer.EndConsumeNextBuffer ();
1294  }
1295  } // loop til quit signaled
1296 
1297 } // AudioFileWorker
1298 
1299 
1301 
1302 
1304 {
1305  AUTOCIRCULATE_STATUS ACStatus;
1306  mDevice.AutoCirculateGetStatus (mInputChannel, ACStatus);
1307  outStatus.framesProcessed = ACStatus.GetProcessedFrameCount();
1308  outStatus.framesDropped = ACStatus.GetDroppedFrameCount();
1309  outStatus.bufferLevel = ACStatus.GetBufferLevel();
1310 } // GetStatus
1311 
1312 
1314 {
1315 
1316  // Override this function to use the frame data in the way your application requires
1317  memcpy(pDstFrame->pVideoBuffer, pSrcFrame->pVideoBuffer, pSrcFrame->videoDataSize);
1318  pDstFrame->videoDataSize = pSrcFrame->videoDataSize;
1319  pDstFrame->timeCodeDBB = pSrcFrame->timeCodeDBB;
1320  pDstFrame->timeCodeLow = pSrcFrame->timeCodeLow;
1321  pDstFrame->timeCodeHigh = pSrcFrame->timeCodeHigh;
1322  pDstFrame->lastFrame = pSrcFrame->lastFrame;
1323  if (mWithInfo)
1324  {
1325  memcpy(pDstFrame->pInfoBuffer, pSrcFrame->pInfoBuffer, pSrcFrame->infoDataSize + pSrcFrame->infoDataSize2);
1326  pDstFrame->infoDataSize = pSrcFrame->infoDataSize;
1327  pDstFrame->infoDataSize2 = pSrcFrame->infoDataSize2;
1328  }
1329 
1330  if (mWithAnc)
1331  {
1332  std::string timeString;
1333  mTimeCode.Set(frameNumber);
1334  mTimeCode.SetStdTimecodeForHfr(false);
1335  mTimeCode.QueryString(timeString, mTimeBase, false);
1336  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 10);
1337  mTimeCode.SetRP188(pDstFrame->timeCodeDBB, pDstFrame->timeCodeLow, pDstFrame->timeCodeHigh, mTimeBase);
1338  mTimeCode.QueryString(timeString, mTimeBase, false);
1339  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 20);
1340  }
1341 
1342  return AJA_STATUS_SUCCESS;
1343 
1344 } // ProcessVideoFrame
static void CodecRawThreadStatic(AJAThread *pThread, void *pContext)
This is the codec raw thread&#39;s static callback function that gets called when the thread starts...
virtual void RouteInputSignal(void)
Sets up device routing for capture.
NTV2FrameRate GetNTV2FrameRateFromVideoFormat(const NTV2VideoFormat inVideoFormat)
Definition: ntv2utils.cpp:3630
virtual NTV2VideoFormat GetSDIInputVideoFormat(NTV2Channel inChannel, bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given SDI input source.
virtual AJAStatus SetupAudio(void)
Sets up everything I need for capturing audio.
virtual void StartVideoProcessThread(void)
Start the video process thread.
static void VideoInputThreadStatic(AJAThread *pThread, void *pContext)
This is the video input thread&#39;s static callback function that gets called when the thread starts...
virtual void VideoInputWorker(void)
Repeatedly captures video frames using AutoCirculate and add them to the video input ring...
#define VIDEO_RING_SIZE
Specifies the device&#39;s internal clock.
Definition: ntv2enums.h:1459
virtual bool SetReference(const NTV2ReferenceSource inRefSource, const bool inKeepFramePulseSelect=(0))
Sets the device&#39;s clock reference source. See Video Output Clocking & Synchronization for more inform...
virtual bool ReleaseStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Releases exclusive use of the AJA device for the given process, permitting other processes to acquire...
void SetAJAFrameRate(AJA_FrameRate ajaFrameRate)
Definition: timebase.cpp:164
virtual bool AddConnection(const NTV2InputXptID inSignalInput, const NTV2OutputXptID inSignalOutput=NTV2_XptBlack)
Adds a connection between a widget&#39;s signal input (sink) and another widget&#39;s signal output (source)...
AJAStatus Add(FrameDataPtr pInFrameData)
Appends a new frame buffer to me, increasing my frame storage capacity by one frame.
uint32_t * pAudioBuffer
Pointer to host audio buffer.
I interrogate and control an AJA video/audio capture/playout device.
Definition: ntv2card.h:28
NTV2FrameBufferFormat
Identifies a particular video frame buffer pixel format. See Device Frame Buffer Formats for details...
Definition: ntv2enums.h:219
bool SetBuffers(ULWord *pInVideoBuffer, const ULWord inVideoByteCount, ULWord *pInAudioBuffer, const ULWord inAudioByteCount, ULWord *pInANCBuffer, const ULWord inANCByteCount, ULWord *pInANCF2Buffer=NULL, const ULWord inANCF2ByteCount=0)
Sets my buffers for use in a subsequent call to CNTV2Card::AutoCirculateTransfer. ...
virtual bool SetVideoFormat(const NTV2VideoFormat inVideoFormat, const bool inIsAJARetail=(!(0)), const bool inKeepVancSettings=(0), const NTV2Channel inChannel=NTV2_CHANNEL1)
Configures the AJA device to handle a specific video format.
AJAStatus
Definition: types.h:380
ULWord GetCapturedAudioByteCount(void) const
virtual AJAStatus ProcessVideoFrame(AVHevcDataBuffer *pSrcFrame, AVHevcDataBuffer *pDstFrame, uint32_t frameNumber)
Default do-nothing function for processing the captured frames.
void Set(uint32_t frame)
Definition: timecode.cpp:420
static uint64_t GetPid()
Definition: process.cpp:35
ULWord GetBufferLevel(void) const
virtual void StartVideoFileThread(void)
Start the video file writer thread.
#define AJA_FAILURE(_status_)
Definition: types.h:373
NTV2InputSource NTV2ChannelToInputSource(const NTV2Channel inChannel, const NTV2IOKinds inKinds=NTV2_IOKINDS_SDI)
Definition: ntv2utils.cpp:5132
Capture (input) mode, which writes into device SDRAM.
Definition: ntv2enums.h:1243
static void CodecHevcThreadStatic(AJAThread *pThread, void *pContext)
This is the codec hevc thread&#39;s static callback function that gets called when the thread starts...
uint32_t videoBufferSize
Size of host video buffer (bytes)
struct HevcEncodedInfo HevcEncodedInfo
virtual void SetupAutoCirculate(void)
Initializes AutoCirculate.
Declares the AJATime class.
virtual AJAStatus SetPriority(AJAThreadPriority priority)
Definition: thread.cpp:133
virtual bool GetSDIInput3GbPresent(bool &outValue, const NTV2Channel channel)
FrameDataPtr StartConsumeNextBuffer(void)
The thread that&#39;s responsible for processing incoming frames – the consumer – calls this function t...
Obtain audio samples from the audio that&#39;s embedded in the video HANC.
Definition: ntv2enums.h:2007
AJAStatus CreateAiffFile(const std::string &inFileName, uint32_t numChannels, uint32_t maxFrames, uint32_t bufferSize)
Definition: json.hpp:5362
virtual bool SetAudioRate(const NTV2AudioRate inRate, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2AudioRate for the given Audio System.
Definition: ntv2audio.cpp:205
virtual AJAStatus Start()
Definition: thread.cpp:91
virtual bool EnableInputInterrupt(const NTV2Channel channel=NTV2_CHANNEL1)
Allows the CNTV2Card instance to wait for and respond to input vertical blanking interrupts originati...
virtual bool SubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Causes me to be notified when an input vertical blanking interrupt occurs on the given input channel...
void EndConsumeNextBuffer(void)
The consumer thread calls this function to signal that it has finished processing the frame it obtain...
#define false
virtual bool AutoCirculateGetStatus(const NTV2Channel inChannel, AUTOCIRCULATE_STATUS &outStatus)
Returns the current AutoCirculate status for the given channel.
virtual void SetupHostBuffers(void)
Sets up my circular buffers.
virtual bool SetFrameBufferFormat(NTV2Channel inChannel, NTV2FrameBufferFormat inNewFormat, bool inIsAJARetail=(!(0)), NTV2HDRXferChars inXferChars=NTV2_VPID_TC_SDR_TV, NTV2HDRColorimetry inColorimetry=NTV2_VPID_Color_Rec709, NTV2HDRLuminance inLuminance=NTV2_VPID_Luminance_YCbCr)
Sets the frame buffer format for the given FrameStore on the AJA device.
NTV2EmbeddedAudioInput NTV2ChannelToEmbeddedAudioInput(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2EmbeddedAudioInput.
Definition: ntv2utils.cpp:4861
NTV2Channel
These enum values are mostly used to identify a specific widget_framestore. They&#39;re also commonly use...
Definition: ntv2enums.h:1357
mVideoFormat
Definition: ntv2vcam.cpp:801
Instances of me capture frames in real time from a video signal provided to an input of an AJA device...
This class is a collection of widget input-to-output connections that can be applied all-at-once to a...
virtual class DeviceCapabilities & features(void)
Definition: ntv2card.h:148
virtual bool SetMultiFormatMode(const bool inEnable)
Enables or disables multi-format (per channel) device operation. If enabled, each device channel can ...
AJA_FrameRate GetAJAFrameRate(NTV2FrameRate frameRate)
AJAStatus DetermineInputFormat(NTV2VideoFormat sdiFormat, bool quad, NTV2VideoFormat &videoFormat)
int64_t FramesToMicroseconds(int64_t frames, bool round=false) const
Definition: timebase.cpp:223
#define AUDIO_RING_SIZE
uint32_t timeCodeHigh
Time code data high.
mPixelFormat
Definition: ntv2vcam.cpp:710
ULWord GetProcessedFrameCount(void) const
uint32_t audioBufferSize
Size of host audio buffer (bytes)
virtual void StartCodecRawThread(void)
Start the codec raw thread.
uint32_t infoBufferSize
Size of the host information buffer (bytes)
This struct replaces the old RP188_STRUCT.
virtual void VideoFileWorker(void)
Repeatedly removes hevc frame from the hevc ring and writes them to the hevc output file...
M31VideoPreset
Definition: ntv2m31enums.h:13
NTV2EncodeHEVCFileAc(const std::string inDeviceSpecifier="0", const NTV2Channel inChannel=NTV2_CHANNEL1, const M31VideoPreset inM31Preset=M31_FILE_1280X720_420_8_5994p, const NTV2FrameBufferFormat inPixelFormat=NTV2_FBF_10BIT_YCBCR_420PL2, const bool inQuadMode=(0), const uint32_t inAudioChannels=0, const bool inTimeCodeBurn=(0), const bool inInfoData=(0), const uint32_t inMaxFrames=0xffffffff)
Constructs me using the given settings.
virtual bool Active()
Definition: thread.cpp:116
virtual bool SetAudioSystemInputSource(const NTV2AudioSystem inAudioSystem, const NTV2AudioSource inAudioSource, const NTV2EmbeddedAudioInput inEmbeddedInput)
Sets the audio source for the given NTV2AudioSystem on the device.
Definition: ntv2audio.cpp:485
HevcPictureData pictureData
struct HevcPictureData HevcPictureData
Playout (output) mode, which reads from device SDRAM.
Definition: ntv2enums.h:1241
ULWord GetDisplayHeight(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4207
virtual bool SetMode(const NTV2Channel inChannel, const NTV2Mode inNewValue, const bool inIsRetail=(!(0)))
Determines if a given FrameStore on the AJA device will be used to capture or playout video...
AJAStatus CreateEncFile(const std::string &inFileName, uint32_t maxFrames)
Invalid or "not found".
Definition: ntv2enums.h:98
static bool GetFirstDeviceFromArgument(const std::string &inArgument, CNTV2Card &outDevice)
Rescans the host, and returns an open CNTV2Card instance for the AJA device that matches a command li...
virtual bool SetSDITransmitEnable(const NTV2Channel inChannel, const bool inEnable)
Sets the specified bidirectional SDI connector to act as an input or an output.
virtual bool AutoCirculateTransfer(const NTV2Channel inChannel, AUTOCIRCULATE_TRANSFER &transferInfo)
Transfers all or part of a frame as specified in the given AUTOCIRCULATE_TRANSFER object to/from the ...
virtual bool AcquireStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Reserves exclusive use of the AJA device for a given process, preventing other processes on the host ...
#define AJA_NULL
Definition: ajatypes.h:167
void SetAbortFlag(const bool *pAbortFlag)
Tells me the boolean variable I should monitor such that when it gets set to "true" will cause any th...
AJAStatus SetupHEVC(CNTV2m31 *pM31, M31VideoPreset preset, M31Channel encodeChannel, bool multiStream, bool withInfo)
ULWord GetVideoActiveSize(const NTV2VideoFormat inVideoFormat, const NTV2FrameBufferFormat inFBFormat, const NTV2VANCMode inVancMode=NTV2_VANCMODE_OFF)
Definition: ntv2utils.cpp:2858
virtual bool SetEveryFrameServices(const NTV2TaskMode m)
Definition: ntv2card.h:1195
virtual NTV2VideoFormat GetInputVideoFormat(const NTV2InputSource inVideoSource=NTV2_INPUTSOURCE_SDI1, const bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given input source. ...
AJA_PixelFormat GetAJAPixelFormat(NTV2FrameBufferFormat pixelFormat)
static void Sleep(const int32_t inMilliseconds)
Suspends execution of the current thread for a given number of milliseconds.
Definition: systemtime.cpp:284
void WriteEncData(void *pBuffer, uint32_t bufferSize)
2: OEM (recommended): device configured by client application(s) with some driver involvement...
uint32_t AlignDataBuffer(void *pBuffer, uint32_t bufferSize, uint32_t dataSize, uint32_t alignBytes, uint8_t fill)
static void VideoFileThreadStatic(AJAThread *pThread, void *pContext)
This is the video file writer thread&#39;s static callback function that gets called when the thread star...
virtual bool ApplySignalRoute(const CNTV2SignalRouter &inRouter, const bool inReplace=(0))
Applies the given routing table to the AJA device.
virtual AJAStatus SetupVideo(void)
Sets up everything I need for capturing video.
mInputChannel
Definition: ntv2vcam.cpp:1010
virtual void Quit(void)
Gracefully stops me from running.
void WriteAiffData(void *pBuffer, uint32_t numChannels, uint32_t numSamples)
ULWord GetDroppedFrameCount(void) const
Specifies channel or FrameStore 8 (or the 8th item).
Definition: ntv2enums.h:1366
struct HevcPictureInfo HevcPictureInfo
AJA_EXPORT bool RenderTimeCodeFont(AJA_PixelFormat pixelFormat, uint32_t numPixels, uint32_t numLines)
uint32_t timeCodeLow
Time code data low.
Specifies channel or FrameStore 2 (or the 2nd item).
Definition: ntv2enums.h:1360
virtual NTV2DeviceID GetDeviceID(void)
void WriteHevcData(void *pBuffer, uint32_t bufferSize)
virtual AJAStatus Run(void)
Runs me.
uint32_t audioDataSize
Size of audio data (bytes)
virtual bool AutoCirculateStop(const NTV2Channel inChannel, const bool inAbort=(0))
Stops AutoCirculate for the given channel, and releases the on-device frame buffers that were allocat...
Declares the AJAProcess class.
virtual bool UnsubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Unregisters me so I&#39;m no longer notified when an input VBI is signaled on the given input channel...
This object specifies the information that will be transferred to or from the AJA device in the CNTV2...
virtual void CodecRawWorker(void)
Repeatedly removes video frames from the raw video ring and transfers them to the codec...
virtual bool SetAudioBufferSize(const NTV2AudioBufferSize inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Changes the size of the audio buffer that is used for a given Audio System in the AJA device...
Definition: ntv2audio.cpp:249
virtual bool GetEveryFrameServices(NTV2TaskMode &m)
Definition: ntv2card.h:1194
#define NTV2_AUDIOSIZE_MAX
void EndProduceNextBuffer(void)
The producer thread calls this function to signal that it has finished populating the frame it obtain...
This identifies the first Audio System.
Definition: ntv2enums.h:3897
virtual bool WaitForOutputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
static const ULWord kDemoAppSignature((((uint32_t)( 'D'))<< 24)|(((uint32_t)( 'E'))<< 16)|(((uint32_t)( 'M'))<< 8)|(((uint32_t)( 'O'))<< 0))
Declares numerous NTV2 utility functions.
uint32_t videoDataSize2
Size of field 2 video data (bytes)
virtual AJAStatus Attach(AJAThreadFunction *pThreadFunction, void *pUserContext)
Definition: thread.cpp:169
FrameDataPtr StartProduceNextBuffer(void)
The thread that&#39;s responsible for providing frames – the producer – calls this function to populate...
virtual void CodecHevcWorker(void)
Repeatedly transfers hevc frames from the codec and adds them to the hevc ring.
void SetStdTimecodeForHfr(bool bStdTc)
Definition: timecode.h:220
virtual bool SetSDIOutLevelAtoLevelBConversion(const UWord inOutputSpigot, const bool inEnable)
Enables or disables 3G level A to 3G level B conversion at the SDI output widget (assuming the AJA de...
This is returned from the CNTV2Card::AutoCirculateGetStatus function.
bool lastFrame
Indicates last captured frame.
uint16_t UWord
Definition: ajatypes.h:221
void QueryString(std::string &str, const AJATimeBase &timeBase, bool bDropFrame, bool bStdTcForHfr, AJATimecodeNotation notation=AJA_TIMECODE_LEGACY)
Definition: timecode.cpp:299
virtual void AudioFileWorker(void)
Repeatedly removes audio samples from the audio input ring and writes them to the audio output file...
1: Standard/Retail: device configured by AJA ControlPanel, service/daemon, and driver.
Specifies channel or FrameStore 1 (or the first item).
Definition: ntv2enums.h:1359
virtual bool AutoCirculateStart(const NTV2Channel inChannel, const ULWord64 inStartTime=0)
Starts AutoCirculating the specified channel that was previously initialized by CNTV2Card::AutoCircul...
Declares the NTV2EncodeHEVCFileAc class.
static void AudioFileThreadStatic(AJAThread *pThread, void *pContext)
This is the audio file writer thread&#39;s static callback function that gets called when the thread star...
virtual bool SetSDIInLevelBtoLevelAConversion(const NTV2ChannelSet &inSDIInputs, const bool inEnable)
Enables or disables 3G level B to 3G level A conversion at the SDI input(s).
uint32_t * pVideoBuffer
Pointer to host video buffer.
virtual void StartCodecHevcThread(void)
Start the codec hevc thread.
virtual M31VideoPreset GetCodecPreset(void)
Get the codec preset.
ULWord GetDisplayWidth(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4199
virtual void VideoProcessWorker(void)
Repeatedly removes video frames from the video input ring, calls a custom video process method and ad...
bool GetInputTimeCode(NTV2_RP188 &outTimeCode, const NTV2TCIndex inTCIndex=NTV2_TCINDEX_SDI1) const
Intended for capture, answers with a specific timecode captured in my acTransferStatus member&#39;s acFra...
This structure encapsulates the video and audio buffers used by the HEVC demo applications. The demo programs that employ producer/consumer threads use a fixed number of these buffers.
AJA_EXPORT bool BurnTimeCode(void *pBaseVideoAddress, const std::string &inTimeCodeStr, const uint32_t inYPercent)
virtual void GetStatus(AVHevcStatus &outStatus)
Provides status information about my input (capture) process.
NTV2AudioSystem NTV2ChannelToAudioSystem(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2AudioSystem.
Definition: ntv2utils.cpp:4869
uint32_t * pInfoBuffer
Picture information (raw) or encode information (hevc)
bool HasAvailableInputFrame(void) const
Specifies channel or FrameStore 4 (or the 4th item).
Definition: ntv2enums.h:1362
Specifies channel or FrameStore 5 (or the 5th item).
Definition: ntv2enums.h:1363
bool IsRunning(void) const
uint32_t videoDataSize
Size of video data (bytes)
ULWord fLo
| BG 4 | Secs10 | BG 3 | Secs 1 | BG 2 | Frms10 | BG 1 | Frms 1 |
Specifies channel or FrameStore 6 (or the 6th item).
Definition: ntv2enums.h:1364
uint32_t infoDataSize2
Size of the field 2 information data (bytes)
virtual bool AutoCirculateInitForInput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate ingest, designating a contiguous block of frame buffers on the ...
static void VideoProcessThreadStatic(AJAThread *pThread, void *pContext)
This is the video process thread&#39;s static callback function that gets called when the thread starts...
#define AUTOCIRCULATE_WITH_RP188
Use this to AutoCirculate with RP188.
Specifies channel or FrameStore 7 (or the 7th item).
Definition: ntv2enums.h:1365
bool SetAudioBuffer(ULWord *pInAudioBuffer, const ULWord inAudioByteCount)
Sets my audio buffer for use in a subsequent call to CNTV2Card::AutoCirculateTransfer.
8-Bit 4:2:0 2-Plane YCbCr
Definition: ntv2enums.h:253
Identifies the 1st SDI video input.
Definition: ntv2enums.h:1269
uint32_t infoDataSize
Size of the information data (bytes)
AJAStatus CreateHevcFile(const std::string &inFileName, uint32_t maxFrames)
virtual void StartAudioFileThread(void)
Start the audio file writer thread.
virtual bool SetNumberAudioChannels(const ULWord inNumChannels, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the number of audio channels to be concurrently captured or played for a given Audio System on t...
Definition: ntv2audio.cpp:146
ULWord fHi
| BG 8 | Hrs 10 | BG 7 | Hrs 1 | BG 6 | Mins10 | BG 5 | Mins 1 |
Declares device capability functions.
virtual bool WaitForInputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
This identifies the mode in which there are no VANC lines in the frame buffer.
Definition: ntv2enums.h:3799
Specifies channel or FrameStore 3 (or the 3rd item).
Definition: ntv2enums.h:1361
virtual void StartVideoInputThread(void)
Start the video input thread.
NTV2ReferenceSource NTV2InputSourceToReferenceSource(const NTV2InputSource inInputSource)
Converts a given NTV2InputSource to its equivalent NTV2ReferenceSource value.
Definition: ntv2utils.cpp:5023
Audio clock derived from the video input.
Definition: ntv2enums.h:1993
virtual bool SetEmbeddedAudioClock(const NTV2EmbeddedAudioClock inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2EmbeddedAudioClock setting for the given NTV2AudioSystem.
Definition: ntv2audio.cpp:417
virtual AJAStatus Init(void)
Initializes me and prepares me to Run.
uint32_t timeCodeDBB
Time code data dbb.
void SetRP188(const uint32_t inDBB, const uint32_t inLo, const uint32_t inHi, const AJATimeBase &inTimeBase)
Definition: timecode.cpp:570
virtual bool EnableChannel(const NTV2Channel inChannel)
Enables the given FrameStore.