AJA NTV2 SDK  18.0.0.2122
NTV2 SDK 18.0.0.2122
ntv2encodehevcvifac.cpp
Go to the documentation of this file.
1 /* SPDX-License-Identifier: MIT */
8 #include <stdio.h>
9 
10 #include "ntv2encodehevcvifac.h"
11 #include "ntv2utils.h"
12 #include "ntv2devicefeatures.h"
13 #include "ajabase/system/process.h"
15 
16 using namespace std;
17 
18 #define NTV2_AUDIOSIZE_MAX (401 * 1024)
19 
20 
21 NTV2EncodeHEVCVifAc::NTV2EncodeHEVCVifAc (const string inDeviceSpecifier,
22  const NTV2Channel inChannel,
23  const M31VideoPreset inPreset,
24  const NTV2FrameBufferFormat inPixelFormat,
25  const bool inQuadMode,
26  const uint32_t inAudioChannels,
27  const bool inTimeCodeBurn,
28  const bool inInfoData,
29  const uint32_t inMaxFrames)
30 
31 : mVideoInputThread (AJAThread()),
32  mVideoProcessThread (AJAThread()),
33  mCodecRawThread (AJAThread()),
34  mCodecHevcThread (AJAThread()),
35  mVideoFileThread (AJAThread()),
36  mAudioFileThread (AJAThread()),
37  mM31 (AJA_NULL),
38  mHevcCommon (AJA_NULL),
39  mDeviceID (DEVICE_ID_NOTFOUND),
40  mDeviceSpecifier (inDeviceSpecifier),
41  mWithAudio (inAudioChannels != 0),
42  mInputChannel (inChannel),
43  mEncodeChannel (M31_CH0),
44  mPreset (inPreset),
45  mInputSource (NTV2_INPUTSOURCE_SDI1),
47  mPixelFormat (inPixelFormat),
48  mQuad (inQuadMode),
49  mInterlaced (false),
50  mMultiStream (false),
51  mWithInfo (inInfoData),
52  mWithAnc (inTimeCodeBurn),
53  mAudioSystem (NTV2_AUDIOSYSTEM_1),
54  mSavedTaskMode (NTV2_STANDARD_TASKS),
55  mNumAudioChannels (0),
56  mFileAudioChannels (inAudioChannels),
57  mMaxFrames (inMaxFrames),
58  mLastFrame (false),
59  mLastFrameInput (false),
60  mLastFrameRaw (false),
61  mLastFrameHevc (false),
62  mLastFrameVideo (false),
63  mLastFrameAudio (false),
64  mGlobalQuit (false),
65  mVideoInputFrameCount (0),
66  mVideoProcessFrameCount (0),
67  mCodecRawFrameCount (0),
68  mCodecHevcFrameCount (0),
69  mVideoFileFrameCount (0),
70  mAudioFileFrameCount (0)
71 {
72  ::memset (mVideoInputBuffer, 0x0, sizeof (mVideoInputBuffer));
73  ::memset (mVideoRawBuffer, 0x0, sizeof (mVideoRawBuffer));
74  ::memset (mVideoHevcBuffer, 0x0, sizeof (mVideoHevcBuffer));
75  ::memset (mAudioInputBuffer, 0x0, sizeof (mAudioInputBuffer));
76 
77 } // constructor
78 
79 
81 {
82  // Stop my capture and consumer threads, then destroy them...
83  Quit ();
84 
85  if (mM31 != AJA_NULL)
86  {
87  delete mM31;
88  mM31 = AJA_NULL;
89  }
90 
91  if (mHevcCommon != AJA_NULL)
92  {
93  delete mHevcCommon;
94  mHevcCommon = AJA_NULL;
95  }
96 
97  // unsubscribe from input vertical event...
98  mDevice.UnsubscribeInputVerticalEvent (mInputChannel);
99 
100  // free all my buffers...
101  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++)
102  {
103  if (mVideoInputBuffer[bufferNdx].pVideoBuffer)
104  {
105  delete [] mVideoInputBuffer[bufferNdx].pVideoBuffer;
106  mVideoInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
107  }
108  if (mVideoInputBuffer[bufferNdx].pInfoBuffer)
109  {
110  delete [] mVideoInputBuffer[bufferNdx].pInfoBuffer;
111  mVideoInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
112  }
113  if (mVideoInputBuffer[bufferNdx].pAudioBuffer)
114  {
115  delete [] mVideoInputBuffer[bufferNdx].pAudioBuffer;
116  mVideoInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
117  }
118 
119  if (mVideoRawBuffer[bufferNdx].pVideoBuffer)
120  {
121  delete [] mVideoRawBuffer[bufferNdx].pVideoBuffer;
122  mVideoRawBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
123  }
124  if (mVideoRawBuffer[bufferNdx].pInfoBuffer)
125  {
126  delete [] mVideoRawBuffer[bufferNdx].pInfoBuffer;
127  mVideoRawBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
128  }
129  if (mVideoRawBuffer[bufferNdx].pAudioBuffer)
130  {
131  delete [] mVideoRawBuffer[bufferNdx].pAudioBuffer;
132  mVideoRawBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
133  }
134 
135  if (mVideoHevcBuffer[bufferNdx].pVideoBuffer)
136  {
137  delete [] mVideoHevcBuffer[bufferNdx].pVideoBuffer;
138  mVideoHevcBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
139  }
140  if (mVideoHevcBuffer[bufferNdx].pInfoBuffer)
141  {
142  delete [] mVideoHevcBuffer[bufferNdx].pInfoBuffer;
143  mVideoHevcBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
144  }
145  if (mVideoHevcBuffer[bufferNdx].pAudioBuffer)
146  {
147  delete [] mVideoHevcBuffer[bufferNdx].pAudioBuffer;
148  mVideoHevcBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
149  }
150  }
151 
152  if (mWithAudio)
153  {
154  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++)
155  {
156  if (mAudioInputBuffer[bufferNdx].pVideoBuffer)
157  {
158  delete [] mAudioInputBuffer[bufferNdx].pVideoBuffer;
159  mAudioInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
160  }
161  if (mAudioInputBuffer[bufferNdx].pInfoBuffer)
162  {
163  delete [] mAudioInputBuffer[bufferNdx].pInfoBuffer;
164  mAudioInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
165  }
166  if (mAudioInputBuffer[bufferNdx].pAudioBuffer)
167  {
168  delete [] mAudioInputBuffer[bufferNdx].pAudioBuffer;
169  mAudioInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
170  }
171  }
172  }
173 
174 } // destructor
175 
176 
178 {
179  if (mM31 && !mLastFrame && !mGlobalQuit)
180  {
181  // Set the last frame flag to start the quit process
182  mLastFrame = true;
183 
184  // Wait for the last frame to be written to disk
185  int i;
186  int timeout = 300;
187  for (i = 0; i < timeout; i++)
188  {
189  if (mLastFrameVideo && (!mWithAudio || mLastFrameAudio)) break;
190  AJATime::Sleep (10);
191  }
192  if (i == timeout)
193  { cerr << "## ERROR: Wait for last frame timeout" << endl; }
194 
195  // Stop the encoder stream
196  if (!mM31->ChangeEHState(Hevc_EhState_ReadyToStop, mEncodeChannel))
197  { cerr << "## ERROR: ChangeEHState ready to stop failed" << endl; }
198 
199  if (!mM31->ChangeEHState(Hevc_EhState_Stop, mEncodeChannel))
200  { cerr << "## ERROR: ChangeEHState stop failed" << endl; }
201 
202  // stop the video input stream
203  if (!mM31->ChangeVInState(Hevc_VinState_Stop, mEncodeChannel))
204  { cerr << "## ERROR: ChangeVInState stop failed" << endl; }
205 
206  if(!mMultiStream)
207  {
208  // Now go to the init state
209  if (!mM31->ChangeMainState(Hevc_MainState_Init, Hevc_EncodeMode_Single))
210  { cerr << "## ERROR: ChangeMainState to init failed" << endl; }
211  }
212  }
213 
214  // Stop the worker threads
215  mGlobalQuit = true;
216 
217  while (mVideoInputThread.Active())
218  AJATime::Sleep(10);
219 
220  while (mVideoProcessThread.Active())
221  AJATime::Sleep(10);
222 
223  while (mCodecRawThread.Active())
224  AJATime::Sleep(10);
225 
226  while (mCodecHevcThread.Active())
227  AJATime::Sleep(10);
228 
229  while (mVideoFileThread.Active())
230  AJATime::Sleep(10);
231 
232  while (mAudioFileThread.Active())
233  AJATime::Sleep(10);
234 
235  // Stop video capture
236  mDevice.SetMode(mInputChannel, NTV2_MODE_DISPLAY, false);
237 
238  // Release board
239  if (!mMultiStream)
240  {
241  mDevice.ReleaseStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid()));
242  mDevice.SetEveryFrameServices (mSavedTaskMode); // Restore prior task mode
243  }
244 
245  // Close output files
246  mHevcCommon->CloseHevcFile ();
247  if (mWithInfo)
248  mHevcCommon->CloseEncFile ();
249  if (mWithAudio)
250  mHevcCommon->CloseAiffFile ();
251 
252 } // Quit
253 
254 
256 {
257  AJAStatus status (AJA_STATUS_SUCCESS);
258 
259  // Open the device...
260  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, mDevice))
261  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return AJA_STATUS_OPEN; }
262 
263  // Grab board in a shared environment
264  if (!mMultiStream)
265  {
266  if (!mDevice.AcquireStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid())))
267  return AJA_STATUS_BUSY; // Another app is using the device
268  mDevice.GetEveryFrameServices (mSavedTaskMode); // Save the current state before we change it
269  }
270  mDevice.SetEveryFrameServices (NTV2_OEM_TASKS); // Since this is an OEM demo, use the OEM service level
271 
272  mDeviceID = mDevice.GetDeviceID (); // Keep the device ID handy, as it's used frequently
273 
274  // Make sure this device has an M31
275  if (!mDevice.features().HasHEVCM31())
276  {
277  cerr << "## ERROR: M31 not found" << endl;
278  return AJA_STATUS_FAIL;
279  }
280 
281  // Allocate our M31 helper class and our HEVC common class
282  mM31 = new CNTV2m31 (&mDevice);
283  mHevcCommon = new CNTV2DemoHevcCommon ();
284 
285  if ((mM31 == AJA_NULL) || (mHevcCommon == AJA_NULL))
286  {
287  return AJA_STATUS_FAIL;
288  }
289 
290  // Preset specification takes precedence
291  if (mPreset < M31_NUMVIDEOPRESETS)
292  {
293  // This class only handles vif based presets so make sure they didn't pass in a file one
294  if (!CNTV2m31::IsPresetVIF(mPreset))
295  return AJA_STATUS_FAIL;
296 
297  // Get NTV2 formats to match codec preset
298  mVideoFormat = CNTV2m31::GetPresetVideoFormat(mPreset);
299  mPixelFormat = CNTV2m31::GetPresetFrameBufferFormat(mPreset);
300  mQuad = CNTV2m31::IsPresetUHD(mPreset);
301  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
302  }
303  // Otherwise use the pixel format and SDI input format
304  else if (mPixelFormat >= NTV2_FBF_NUMFRAMEBUFFERFORMATS)
305  {
306  mPixelFormat = NTV2_FBF_8BIT_YCBCR;
307  }
308 
309  // Quad mode must be channel 1
310  if (mQuad)
311  {
312  mInputChannel = NTV2_CHANNEL1;
313  mOutputChannel = NTV2_CHANNEL5;
314  mEncodeChannel = M31_CH0;
315  }
316  else
317  {
318  // When input channel specified we are multistream
319  switch (mInputChannel)
320  {
321  case NTV2_CHANNEL1: { mEncodeChannel = M31_CH0; mOutputChannel = NTV2_CHANNEL5; mMultiStream = true; break; }
322  case NTV2_CHANNEL2: { mEncodeChannel = M31_CH1; mOutputChannel = NTV2_CHANNEL6; mMultiStream = true; break; }
323  case NTV2_CHANNEL3: { mEncodeChannel = M31_CH2; mOutputChannel = NTV2_CHANNEL7; mMultiStream = true; break; }
324  case NTV2_CHANNEL4: { mEncodeChannel = M31_CH3; mOutputChannel = NTV2_CHANNEL8; mMultiStream = true; break; }
325  default: { mInputChannel = NTV2_CHANNEL1; mOutputChannel = NTV2_CHANNEL5; mEncodeChannel = M31_CH0; }
326  }
327  }
328 
329  // When video format is unknown determine from SDI input
330  if (mVideoFormat >= NTV2_MAX_NUM_VIDEO_FORMATS)
331  {
332  // Get SDI input format
333  status = mHevcCommon->DetermineInputFormat(mDevice.GetSDIInputVideoFormat(mInputChannel), mQuad, mVideoFormat);
334  if (AJA_FAILURE(status))
335  return status;
336 
337  // Get codec preset for input format
338  if(!CNTV2m31::ConvertVideoFormatToPreset(mVideoFormat, mPixelFormat, true, mPreset))
339  return AJA_STATUS_FAIL;
340 
341  mQuad = CNTV2m31::IsPresetUHD(mPreset);
342  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
343  }
344 
345  // Setup frame buffer
346  status = SetupVideo ();
347  if (AJA_FAILURE (status))
348  return status;
349 
350  // Route input signals to frame buffers
351  RouteInputSignal ();
352 
353  // Setup audio buffer
354  status = SetupAudio ();
355  if (AJA_FAILURE (status))
356  return status;
357 
358  // Setup to capture video/audio/anc input
360 
361  // Setup codec
362  status = mHevcCommon->SetupHEVC (mM31, mPreset, mEncodeChannel, mMultiStream, mWithInfo);
363  if (AJA_FAILURE (status))
364  return status;
365 
366  // Setup the circular buffers
367  SetupHostBuffers ();
368 
369  {
370  // Create encoded video output file
371  ostringstream fileName;
372  if (mMultiStream)
373  fileName << "raw_" << (mInputChannel+1) << ".hevc";
374  else
375  fileName << "raw.hevc";
376  status = mHevcCommon->CreateHevcFile (fileName.str(), mMaxFrames);
377  if (AJA_FAILURE (status))
378  return status;
379  }
380 
381  if (mWithInfo)
382  {
383  // Create encoded data output file
384  ostringstream fileName;
385  if (mMultiStream)
386  fileName << "raw_" << (mInputChannel+1) << ".txt";
387  else
388  fileName << "raw.txt";
389  status = mHevcCommon->CreateEncFile (fileName.str(), mMaxFrames);
390  if (AJA_FAILURE (status))
391  return status;
392  }
393 
394  if (mWithAudio)
395  {
396  // Create audio output file
397  ostringstream fileName;
398  if (mMultiStream)
399  fileName << "raw_" << (mInputChannel+1) << ".aiff";
400  else
401  fileName << "raw.aiff";
402  status = mHevcCommon->CreateAiffFile (fileName.str(), mFileAudioChannels, mMaxFrames, NTV2_AUDIOSIZE_MAX);
403  if (AJA_FAILURE (status))
404  return status;
405  }
406 
407  return AJA_STATUS_SUCCESS;
408 
409 } // Init
410 
411 
413 {
414  return mPreset;
415 }
416 
417 
419 {
420  // Setup frame buffer
421  if (mQuad)
422  {
423  if (mInputChannel != NTV2_CHANNEL1)
424  return AJA_STATUS_FAIL;
425 
426  // Disable multiformat
427  if (mDevice.features().CanDoMultiFormat())
428  mDevice.SetMultiFormatMode (false);
429 
430  // Set the board video format
431  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
432 
433  // Set frame buffer format
434  mDevice.SetFrameBufferFormat (NTV2_CHANNEL1, mPixelFormat);
435  mDevice.SetFrameBufferFormat (NTV2_CHANNEL2, mPixelFormat);
436  mDevice.SetFrameBufferFormat (NTV2_CHANNEL3, mPixelFormat);
437  mDevice.SetFrameBufferFormat (NTV2_CHANNEL4, mPixelFormat);
438  mDevice.SetFrameBufferFormat (NTV2_CHANNEL5, mPixelFormat);
439  mDevice.SetFrameBufferFormat (NTV2_CHANNEL6, mPixelFormat);
440  mDevice.SetFrameBufferFormat (NTV2_CHANNEL7, mPixelFormat);
441  mDevice.SetFrameBufferFormat (NTV2_CHANNEL8, mPixelFormat);
442 
443  // Set catpure mode
444  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_CAPTURE, false);
445  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_CAPTURE, false);
446  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_CAPTURE, false);
447  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_CAPTURE, false);
448  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
449  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
450  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
451  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
452 
453  // Enable frame buffers
454  mDevice.EnableChannel (NTV2_CHANNEL1);
455  mDevice.EnableChannel (NTV2_CHANNEL2);
456  mDevice.EnableChannel (NTV2_CHANNEL3);
457  mDevice.EnableChannel (NTV2_CHANNEL4);
458  mDevice.EnableChannel (NTV2_CHANNEL5);
459  mDevice.EnableChannel (NTV2_CHANNEL6);
460  mDevice.EnableChannel (NTV2_CHANNEL7);
461  mDevice.EnableChannel (NTV2_CHANNEL8);
462 
463  // Save input source
464  mInputSource = ::NTV2ChannelToInputSource (NTV2_CHANNEL1);
465  }
466  else if (mMultiStream)
467  {
468  // Configure for multiformat
469  if (mDevice.features().CanDoMultiFormat())
470  mDevice.SetMultiFormatMode (true);
471 
472  // Set the channel video format for both input and output
473  mDevice.SetVideoFormat (mVideoFormat, false, false, mInputChannel);
474  mDevice.SetVideoFormat (mVideoFormat, false, false, mOutputChannel);
475 
476  // Set frame buffer format
477  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
478  mDevice.SetFrameBufferFormat (mOutputChannel, mPixelFormat);
479 
480  // Set catpure mode
481  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
482  mDevice.SetMode (mOutputChannel, NTV2_MODE_DISPLAY, false);
483 
484  // Enable frame buffer
485  mDevice.EnableChannel (mInputChannel);
486  mDevice.EnableChannel (mOutputChannel);
487 
488  // Save input source
489  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
490  }
491  else
492  {
493  // Disable multiformat mode
494  if (mDevice.features().CanDoMultiFormat())
495  mDevice.SetMultiFormatMode (false);
496 
497  // Set the board format for both the input and output channel
498  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
499  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL5);
500 
501  // Set frame buffer format
502  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
503  mDevice.SetFrameBufferFormat (mOutputChannel, mPixelFormat);
504 
505  // Set display mode
506  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_DISPLAY, false);
507  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_DISPLAY, false);
508  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_DISPLAY, false);
509  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_DISPLAY, false);
510  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
511  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
512  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
513  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
514 
515  // Set catpure mode
516  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
517 
518  // Enable frame buffer
519  mDevice.EnableChannel (mInputChannel);
520  mDevice.EnableChannel (mOutputChannel);
521 
522  // Save input source
523  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
524  }
525 
526  // Set the device reference to the input...
527  if (mMultiStream)
528  {
530  }
531  else
532  {
533  mDevice.SetReference (::NTV2InputSourceToReferenceSource (mInputSource));
534  }
535 
536  // Enable and subscribe to the interrupts for the channel to be used...
537  mDevice.EnableInputInterrupt (mInputChannel);
538  mDevice.SubscribeInputVerticalEvent (mInputChannel);
539 
540  // Setup for timecode burn
541  mTimeBase.SetAJAFrameRate (mHevcCommon->GetAJAFrameRate(GetNTV2FrameRateFromVideoFormat (mVideoFormat)));
542  mTimeCodeBurn.RenderTimeCodeFont (mHevcCommon->GetAJAPixelFormat (mPixelFormat),
543  GetDisplayWidth (mVideoFormat),
544  GetDisplayHeight (mVideoFormat));
545 
546  return AJA_STATUS_SUCCESS;
547 
548 } // SetupVideo
549 
550 
552 {
553  // In multiformat mode, base the audio system on the channel...
554  if (mMultiStream && mDevice.features().GetNumAudioSystems() > 1 && UWord(mInputChannel) < mDevice.features().GetNumAudioSystems())
555  mAudioSystem = ::NTV2ChannelToAudioSystem(mInputChannel);
556 
557  // Have the audio system capture audio from the designated device input (i.e., ch1 uses SDIIn1, ch2 uses SDIIn2, etc.)...
558  mDevice.SetAudioSystemInputSource (mAudioSystem, NTV2_AUDIO_EMBEDDED, ::NTV2ChannelToEmbeddedAudioInput (mInputChannel));
559 
560  mNumAudioChannels = mDevice.features().GetMaxAudioChannels();
561  mDevice.SetNumberAudioChannels (mNumAudioChannels, mAudioSystem);
562  mDevice.SetAudioRate (NTV2_AUDIO_48K, mAudioSystem);
564 
565  // The on-device audio buffer should be 4MB to work best across all devices & platforms...
566  mDevice.SetAudioBufferSize (NTV2_AUDIO_BUFFER_BIG, mAudioSystem);
567 
568  return AJA_STATUS_SUCCESS;
569 
570 } // SetupAudio
571 
572 
574 {
575  mVideoBufferSize = GetVideoActiveSize (mVideoFormat, mPixelFormat, NTV2_VANCMODE_OFF);
576  mPicInfoBufferSize = sizeof(HevcPictureInfo)*2;
577  mEncInfoBufferSize = sizeof(HevcEncodedInfo)*2;
578  mAudioBufferSize = NTV2_AUDIOSIZE_MAX;
579 
580  // video input ring
581  mVideoInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
582  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
583  {
584  memset (&mVideoInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
585  mVideoInputBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
586  mVideoInputBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
587  mVideoInputBuffer[bufferNdx].videoDataSize = 0;
588  mVideoInputBuffer[bufferNdx].videoDataSize2 = 0;
589  mVideoInputBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
590  mVideoInputBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
591  mVideoInputBuffer[bufferNdx].infoDataSize = 0;
592  mVideoInputBuffer[bufferNdx].infoDataSize2 = 0;
593  mVideoInputCircularBuffer.Add (& mVideoInputBuffer[bufferNdx]);
594  }
595 
596  // video raw ring
597  mVideoRawCircularBuffer.SetAbortFlag (&mGlobalQuit);
598  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
599  {
600  memset (&mVideoRawBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
601  mVideoRawBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
602  mVideoRawBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
603  mVideoRawBuffer[bufferNdx].videoDataSize = 0;
604  mVideoRawBuffer[bufferNdx].videoDataSize2 = 0;
605  mVideoRawBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
606  mVideoRawBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
607  mVideoRawBuffer[bufferNdx].infoDataSize = 0;
608  mVideoRawBuffer[bufferNdx].infoDataSize2 = 0;
609  mVideoRawCircularBuffer.Add (& mVideoRawBuffer[bufferNdx]);
610  }
611 
612  // video hevc ring
613  mVideoHevcCircularBuffer.SetAbortFlag (&mGlobalQuit);
614  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
615  {
616  memset (&mVideoHevcBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
617  mVideoHevcBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
618  mVideoHevcBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
619  mVideoHevcBuffer[bufferNdx].videoDataSize = 0;
620  mVideoHevcBuffer[bufferNdx].videoDataSize2 = 0;
621  mVideoHevcBuffer[bufferNdx].pInfoBuffer = new uint32_t [mEncInfoBufferSize/4];
622  mVideoHevcBuffer[bufferNdx].infoBufferSize = mEncInfoBufferSize;
623  mVideoHevcBuffer[bufferNdx].infoDataSize = 0;
624  mVideoHevcBuffer[bufferNdx].infoDataSize2 = 0;
625  mVideoHevcCircularBuffer.Add (& mVideoHevcBuffer[bufferNdx]);
626  }
627 
628  if (mWithAudio)
629  {
630  // audio input ring
631  mAudioInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
632  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++ )
633  {
634  memset (&mAudioInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
635  mAudioInputBuffer[bufferNdx].pAudioBuffer = new uint32_t [mAudioBufferSize/4];
636  mAudioInputBuffer[bufferNdx].audioBufferSize = mAudioBufferSize;
637  mAudioInputBuffer[bufferNdx].audioDataSize = 0;
638  mAudioInputCircularBuffer.Add (& mAudioInputBuffer[bufferNdx]);
639  }
640  }
641 
642 } // SetupHostBuffers
643 
644 
646 {
647  // setup sdi io
648  mDevice.SetSDITransmitEnable (NTV2_CHANNEL1, false);
649  mDevice.SetSDITransmitEnable (NTV2_CHANNEL2, false);
650  mDevice.SetSDITransmitEnable (NTV2_CHANNEL3, false);
651  mDevice.SetSDITransmitEnable (NTV2_CHANNEL4, false);
652  mDevice.SetSDITransmitEnable (NTV2_CHANNEL5, true);
653  mDevice.SetSDITransmitEnable (NTV2_CHANNEL6, true);
654  mDevice.SetSDITransmitEnable (NTV2_CHANNEL7, true);
655  mDevice.SetSDITransmitEnable (NTV2_CHANNEL8, true);
656 
657  // Give the device some time to lock to the input signal...
658  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
659 
660  // When input is 3Gb convert to 3Ga for capture (no RGB support?)
661  bool is3Gb = false;
662  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
663 
664  if (mQuad)
665  {
674  }
675  else
676  {
677  mDevice.SetSDIInLevelBtoLevelAConversion (mInputChannel, is3Gb);
678  mDevice.SetSDIOutLevelAtoLevelBConversion (mOutputChannel, false);
679  }
680 
681  // Signal routing...
682  if (!mMultiStream)
683  mDevice.ClearRouting ();
692 
693  // Give the device some time to lock to the input signal...
694  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
695 
696 } // RouteInputSignal
697 
698 
700 {
701  // Tell capture AutoCirculate to use 16 frame buffers on the device...
702  mDevice.AutoCirculateStop (mInputChannel);
703  mDevice.AutoCirculateInitForInput (mInputChannel, 16, // Frames to circulate
704  mWithAudio ? mAudioSystem : NTV2_AUDIOSYSTEM_INVALID, // Which audio system (if any)?
705  AUTOCIRCULATE_WITH_RP188); // With RP188?
706 
707  // Driving output using AutoCirculate which is routed into the M31
708  mDevice.AutoCirculateStop (mOutputChannel);
709  mDevice.AutoCirculateInitForOutput (mOutputChannel, 8); // Frames to circulate
710 } // SetupInputAutoCirculate
711 
712 
714 {
715  if (mDevice.GetInputVideoFormat (mInputSource) == NTV2_FORMAT_UNKNOWN)
716  cout << endl << "## WARNING: No video signal present on the input connector" << endl;
717 
718  // Start the playout and capture threads...
724  if (mWithAudio)
725  {
727  }
728 
729  return AJA_STATUS_SUCCESS;
730 
731 } // Run
732 
733 
734 // This is where we will start the video input thread
736 {
737  mVideoInputThread.Attach(VideoInputThreadStatic, this);
738  mVideoInputThread.SetPriority(AJA_ThreadPriority_High);
739  mVideoInputThread.Start();
740 
741 } // StartVideoInputThread
742 
743 
744 // The video input thread static callback
746 {
747  (void) pThread;
748 
749  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
750  pApp->VideoInputWorker ();
751 
752 } // VideoInputThreadStatic
753 
754 
756 {
757  CNTV2Card ntv2Device;
758  AUTOCIRCULATE_TRANSFER inputXfer;
759 
760  // Open the device...
761  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
762  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
763 
764  ntv2Device.SubscribeInputVerticalEvent (mInputChannel);
765 
766  // start AutoCirculate running...
767  ntv2Device.AutoCirculateStart (mInputChannel);
768 
769  while (!mGlobalQuit)
770  {
771  AUTOCIRCULATE_STATUS acStatus;
772  ntv2Device.AutoCirculateGetStatus (mInputChannel, acStatus);
773 
774  // wait for captured frame
775  if (acStatus.IsRunning() && acStatus.HasAvailableInputFrame())
776  {
777  // At this point, there's at least one fully-formed frame available in the device's
778  // frame buffer to transfer to the host. Reserve an AvaDataBuffer to "produce", and
779  // use it in the next transfer from the device...
780  AVHevcDataBuffer * pVideoData (mVideoInputCircularBuffer.StartProduceNextBuffer ());
781  if (pVideoData)
782  {
783  // setup buffer pointers for transfer
784  inputXfer.SetBuffers (pVideoData->pVideoBuffer, pVideoData->videoBufferSize, AJA_NULL, 0, AJA_NULL, 0);
785 
786  AVHevcDataBuffer * pAudioData = 0;
787  if (mWithAudio)
788  {
789  pAudioData = mAudioInputCircularBuffer.StartProduceNextBuffer ();
790  if (pAudioData)
791  {
792  inputXfer.SetAudioBuffer (pAudioData->pAudioBuffer, pAudioData->audioBufferSize);
793  }
794  }
795 
796  // do the transfer from the device into our host AvaDataBuffer...
797  ntv2Device.AutoCirculateTransfer (mInputChannel, inputXfer);
798 
799  // get the video data size
800  pVideoData->videoDataSize = pVideoData->videoBufferSize;
801  pVideoData->audioDataSize = 0;
802  pVideoData->lastFrame = mLastFrame;
803 
804  if (mWithAudio && pAudioData)
805  {
806  // get the audio data size
807  pAudioData->audioDataSize = inputXfer.GetCapturedAudioByteCount();
808  pAudioData->lastFrame = mLastFrame;
809  }
810 
811  if (mWithAnc)
812  {
813  // get the sdi input anc data
814  NTV2_RP188 timecode;
815  inputXfer.GetInputTimeCode (timecode);
816  pVideoData->timeCodeDBB = timecode.fDBB;
817  pVideoData->timeCodeLow = timecode.fLo;
818  pVideoData->timeCodeHigh = timecode.fHi;
819  }
820 
821  if (mWithInfo)
822  {
823  // get picture and additional data pointers
824  HevcPictureInfo * pInfo = (HevcPictureInfo*)pVideoData->pInfoBuffer;
825  HevcPictureData * pPicData = &pInfo->pictureData;
826 
827  // initialize info buffer to 0
828  memset(pInfo, 0, pVideoData->infoBufferSize);
829 
830  // calculate pts based on 90 Khz clock tick
831  uint64_t pts = (uint64_t)mTimeBase.FramesToMicroseconds(mVideoInputFrameCount)*90000/1000000;
832 
833  // set serial number, pts and picture number
834  pPicData->serialNumber = mVideoInputFrameCount; // can be anything
835  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff); // (frame 5720000@60 test roll over)
836  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1); // only use 1 bit
837  pPicData->pictureNumber = mVideoInputFrameCount + 1; // must count starting with 1
838 
839  // set info data size
840  pVideoData->infoDataSize = sizeof(HevcPictureData);
841 
842  if(mInterlaced)
843  {
844  pPicData->serialNumber = mVideoInputFrameCount*2;
845  pPicData->pictureNumber = mVideoInputFrameCount*2 + 1;
846 
847  // get picture and additional data pointers
848  pInfo = (HevcPictureInfo*)(pVideoData->pInfoBuffer + sizeof(HevcPictureInfo)/4);
849  pPicData = &pInfo->pictureData;
850 
851  // add half a frame time to pts
852  pts = pts + (uint64_t)mTimeBase.FramesToMicroseconds(1)*90000/1000000/2;
853 
854  // set serial number, pts and picture number
855  pPicData->serialNumber = mVideoInputFrameCount*2 + 1;
856  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff);
857  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1);
858  pPicData->pictureNumber = mVideoInputFrameCount*2 + 2;
859 
860  // set info data size
861  pVideoData->infoDataSize2 = sizeof(HevcPictureData);
862  }
863  }
864 
865  if(pVideoData->lastFrame && !mLastFrameInput)
866  {
867  printf ( "\nCapture last frame number %d\n", mVideoInputFrameCount );
868  mLastFrameInput = true;
869  }
870 
871  mVideoInputFrameCount++;
872 
873  if (mWithAudio && pAudioData)
874  {
875  mAudioInputCircularBuffer.EndProduceNextBuffer ();
876  }
877 
878  // signal that we're done "producing" the frame, making it available for future "consumption"...
879  mVideoInputCircularBuffer.EndProduceNextBuffer ();
880  } // if A/C running and frame(s) are available for transfer
881  }
882  else
883  {
884  // Either AutoCirculate is not running, or there were no frames available on the device to transfer.
885  // Rather than waste CPU cycles spinning, waiting until a frame becomes available, it's far more
886  // efficient to wait for the next input vertical interrupt event to get signaled...
887  ntv2Device.WaitForInputVerticalInterrupt (mInputChannel);
888  }
889  } // loop til quit signaled
890 
891  // Stop AutoCirculate...
892  ntv2Device.AutoCirculateStop (mInputChannel);
893 
894  ntv2Device.UnsubscribeInputVerticalEvent (mInputChannel);
895 
896 } // VideoInputWorker
897 
898 
899 // This is where we start the video process thread
901 {
902  mVideoProcessThread.Attach(VideoProcessThreadStatic, this);
903  mVideoProcessThread.SetPriority(AJA_ThreadPriority_High);
904  mVideoProcessThread.Start();
905 
906 } // StartVideoProcessThread
907 
908 
909 // The video process static callback
911 {
912  (void) pThread;
913 
914  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
915  pApp->VideoProcessWorker ();
916 
917 } // VideoProcessThreadStatic
918 
919 
921 {
922  while (!mGlobalQuit)
923  {
924  // wait for the next video input buffer
925  AVHevcDataBuffer * pSrcFrameData (mVideoInputCircularBuffer.StartConsumeNextBuffer ());
926  if (pSrcFrameData)
927  {
928  // wait for the next video raw buffer
929  AVHevcDataBuffer * pDstFrameData (mVideoRawCircularBuffer.StartProduceNextBuffer ());
930  if (pDstFrameData)
931  {
932  // do something useful with the frame data...
933  ProcessVideoFrame(pSrcFrameData, pDstFrameData, mVideoProcessFrameCount);
934 
935  mVideoProcessFrameCount++;
936 
937  // release the video raw buffer
938  mVideoRawCircularBuffer.EndProduceNextBuffer ();
939  }
940 
941  // release the video input buffer
942  mVideoInputCircularBuffer.EndConsumeNextBuffer ();
943 
944  }
945  } // loop til quit signaled
946 
947 } // VideoProcessWorker
948 
949 
950 // This is where we start the codec raw thread
952 {
953  mCodecRawThread.Attach(CodecRawThreadStatic, this);
954  mCodecRawThread.SetPriority(AJA_ThreadPriority_High);
955  mCodecRawThread.Start();
956 
957 } // StartCodecRawThread
958 
959 
960 // The codec raw static callback
961 void NTV2EncodeHEVCVifAc::CodecRawThreadStatic (AJAThread * pThread, void * pContext)
962 {
963  (void) pThread;
964 
965  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
966  pApp->CodecRawWorker ();
967 
968 } // CodecRawThreadStatic
969 
970 
972 {
973  CNTV2Card ntv2Device;
974  AUTOCIRCULATE_TRANSFER outputXfer;
975 
976  // Open the device...
977  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
978  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
979 
980  while (!mGlobalQuit)
981  {
982  // wait for the next raw video frame
983  AVHevcDataBuffer * pFrameData (mVideoRawCircularBuffer.StartConsumeNextBuffer ());
984  if (pFrameData)
985  {
986  if (!mLastFrameRaw)
987  {
988  // In vif mode we are just driving the AutoCirculate output which is routed into the M31, no need
989  // to DMA frames using RawTransfer
990  outputXfer.SetBuffers (pFrameData->pVideoBuffer, pFrameData->videoBufferSize, AJA_NULL, 0, AJA_NULL, 0);
991 
992  ntv2Device.AutoCirculateTransfer (mOutputChannel, outputXfer);
993 
994  if (mCodecRawFrameCount == 3)
995  {
996  // start AutoCirculate running...
997  mDevice.AutoCirculateStart (mOutputChannel);
998  }
999 
1000  if (pFrameData->lastFrame)
1001  {
1002  mLastFrameRaw = true;
1003  }
1004 
1005  mCodecRawFrameCount++;
1006  }
1007 
1008  // release the raw video frame
1009  mVideoRawCircularBuffer.EndConsumeNextBuffer ();
1010  }
1011  } // loop til quit signaled
1012 
1013  // Stop AutoCirculate...
1014  ntv2Device.AutoCirculateStop (mOutputChannel);
1015 
1016 } // CodecRawWorker
1017 
1018 
1019 // This is where we will start the codec hevc thread
1021 {
1022  mCodecHevcThread.Attach(CodecHevcThreadStatic, this);
1023  mCodecHevcThread.SetPriority(AJA_ThreadPriority_High);
1024  mCodecHevcThread.Start();
1025 
1026 } // StartCodecHevcThread
1027 
1028 
1029 // The codec hevc static callback
1031 {
1032  (void) pThread;
1033 
1034  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
1035  pApp->CodecHevcWorker ();
1036 
1037 } // CodecHevcThreadStatic
1038 
1039 
1041 {
1042  CNTV2Card ntv2Device;
1043  CNTV2m31 * m31;
1044 
1045  // Open the device...
1046  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
1047  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
1048 
1049  // Allocate our M31 helper class and our HEVC common class
1050  m31 = new CNTV2m31 (&ntv2Device);
1051 
1052  while (!mGlobalQuit)
1053  {
1054  // wait for the next hevc frame
1055  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartProduceNextBuffer ());
1056  if (pFrameData)
1057  {
1058  if (!mLastFrameHevc)
1059  {
1060  if (mInterlaced)
1061  {
1062  // get field 1 video and info buffer and size
1063  uint8_t* pVideoBuffer = (uint8_t*)pFrameData->pVideoBuffer;
1064  uint8_t* pInfoBuffer = (uint8_t*)pFrameData->pInfoBuffer;
1065  uint32_t videoBufferSize = pFrameData->videoBufferSize;
1066  uint32_t infoBufferSize = sizeof(HevcEncodedInfo);
1067 
1068  // transfer an hevc field 1 from the codec including encoded information
1069  m31->EncTransfer(mEncodeChannel,
1070  pVideoBuffer,
1071  videoBufferSize,
1072  pInfoBuffer,
1073  infoBufferSize,
1074  pFrameData->videoDataSize,
1075  pFrameData->infoDataSize,
1076  pFrameData->lastFrame);
1077 
1078  // round the video size up
1079  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1080  videoBufferSize,
1081  pFrameData->videoDataSize,
1082  8, 0xff);
1083  // round the info size up
1084  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1085  infoBufferSize,
1086  pFrameData->infoDataSize,
1087  8, 0);
1088 
1089  // get field 2 video and info buffer and size
1090  pVideoBuffer = ((uint8_t*)pFrameData->pVideoBuffer) + pFrameData->videoDataSize;
1091  pInfoBuffer = ((uint8_t*)pFrameData->pInfoBuffer) + sizeof(HevcEncodedInfo);
1092  videoBufferSize = pFrameData->videoBufferSize - pFrameData->videoDataSize;
1093  infoBufferSize = sizeof(HevcEncodedInfo);
1094 
1095  // transfer an hevc field 2 from the codec including encoded information
1096  m31->EncTransfer(mEncodeChannel,
1097  pVideoBuffer,
1098  videoBufferSize,
1099  pInfoBuffer,
1100  infoBufferSize,
1101  pFrameData->videoDataSize2,
1102  pFrameData->infoDataSize2,
1103  pFrameData->lastFrame);
1104 
1105  // round the video size up
1106  pFrameData->videoDataSize2 = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1107  videoBufferSize,
1108  pFrameData->videoDataSize2,
1109  8, 0xff);
1110  // round the info size up
1111  pFrameData->infoDataSize2 = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1112  infoBufferSize,
1113  pFrameData->infoDataSize2,
1114  8, 0);
1115  }
1116  else
1117  {
1118  // transfer an hevc frame from the codec including encoded information
1119  m31->EncTransfer(mEncodeChannel,
1120  (uint8_t*)pFrameData->pVideoBuffer,
1121  pFrameData->videoBufferSize,
1122  (uint8_t*)pFrameData->pInfoBuffer,
1123  pFrameData->infoBufferSize,
1124  pFrameData->videoDataSize,
1125  pFrameData->infoDataSize,
1126  pFrameData->lastFrame);
1127 
1128  // round the video size up
1129  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pVideoBuffer,
1130  pFrameData->videoBufferSize,
1131  pFrameData->videoDataSize,
1132  8, 0xff);
1133  // round the info size up
1134  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pInfoBuffer,
1135  pFrameData->infoBufferSize,
1136  pFrameData->infoDataSize,
1137  8, 0);
1138  }
1139 
1140  if (pFrameData->lastFrame)
1141  {
1142  mLastFrameHevc = true;
1143  }
1144 
1145  mCodecHevcFrameCount++;
1146  }
1147 
1148  // release and recycle the buffer...
1149  mVideoHevcCircularBuffer.EndProduceNextBuffer ();
1150  }
1151  } // loop til quit signaled
1152 
1153  delete m31;
1154 } // EncTransferFrames
1155 
1156 
1157 // This is where we start the video file writer thread
1159 {
1160  mVideoFileThread.Attach(VideoFileThreadStatic, this);
1161  mVideoFileThread.SetPriority(AJA_ThreadPriority_High);
1162  mVideoFileThread.Start();
1163 
1164 } // StartVideoFileThread
1165 
1166 
1167 // The file writer static callback
1169 {
1170  (void) pThread;
1171 
1172  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
1173  pApp->VideoFileWorker ();
1174 
1175 } // VideoFileStatic
1176 
1177 
1179 {
1180  while (!mGlobalQuit)
1181  {
1182  // wait for the next codec hevc frame
1183  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartConsumeNextBuffer ());
1184  if (pFrameData)
1185  {
1186  if (!mLastFrameVideo)
1187  {
1188  // write the frame / fields hevc to the output file
1189  mHevcCommon->WriteHevcData(pFrameData->pVideoBuffer, pFrameData->videoDataSize + pFrameData->videoDataSize2);
1190 
1191  if (mWithInfo)
1192  {
1193  // write the frame / field 1 encoded data to the output file
1194  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer, pFrameData->infoDataSize);
1195  // write the field 2 encoded data to the output file
1196  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer + sizeof(HevcEncodedInfo)/4, pFrameData->infoDataSize2);
1197  }
1198 
1199  if (pFrameData->lastFrame)
1200  {
1201  printf ( "Video file last frame number %d\n", mVideoFileFrameCount );
1202  mLastFrameVideo = true;
1203  }
1204 
1205  mVideoFileFrameCount++;
1206  }
1207 
1208  // release the hevc buffer
1209  mVideoHevcCircularBuffer.EndConsumeNextBuffer ();
1210  }
1211  } // loop til quit signaled
1212 
1213 } // VideoFileWorker
1214 
1215 
1216 // This is where we start the audio file writer thread
1218 {
1219  mAudioFileThread.Attach(AudioFileThreadStatic, this);
1220  mAudioFileThread.SetPriority(AJA_ThreadPriority_High);
1221  mAudioFileThread.Start();
1222 
1223 } // StartAudioFileThread
1224 
1225 
1226 // The file writer static callback
1228 {
1229  (void) pThread;
1230 
1231  NTV2EncodeHEVCVifAc * pApp (reinterpret_cast <NTV2EncodeHEVCVifAc *> (pContext));
1232  pApp->AudioFileWorker ();
1233 
1234 } // AudioFileStatic
1235 
1236 
1238 {
1239  while (!mGlobalQuit)
1240  {
1241  // wait for the next codec hevc frame
1242  AVHevcDataBuffer * pFrameData (mAudioInputCircularBuffer.StartConsumeNextBuffer ());
1243  if (pFrameData)
1244  {
1245  if (!mLastFrameAudio)
1246  {
1247  // write the audio samples to the output file
1248  mHevcCommon->WriteAiffData(pFrameData->pAudioBuffer, mNumAudioChannels, pFrameData->audioDataSize/mNumAudioChannels/4);
1249 
1250  if (pFrameData->lastFrame)
1251  {
1252  printf ( "Audio file last frame number %d\n", mAudioFileFrameCount );
1253  mLastFrameAudio = true;
1254  }
1255  }
1256 
1257  mAudioFileFrameCount++;
1258 
1259  // release the hevc buffer
1260  mAudioInputCircularBuffer.EndConsumeNextBuffer ();
1261  }
1262  } // loop til quit signaled
1263 
1264 } // AudioFileWorker
1265 
1266 
1268 
1269 
1271 {
1272  AUTOCIRCULATE_STATUS inputACStatus;
1273 
1274  mDevice.AutoCirculateGetStatus (mInputChannel, inputACStatus);
1275  outInputStatus->framesProcessed = inputACStatus.GetProcessedFrameCount();
1276  outInputStatus->framesDropped = inputACStatus.GetDroppedFrameCount();
1277  outInputStatus->bufferLevel = inputACStatus.GetBufferLevel();
1278 
1279 } // GetStatus
1280 
1281 
1283 {
1284 
1285  // Override this function to use the frame data in the way your application requires
1286  memcpy(pDstFrame->pVideoBuffer, pSrcFrame->pVideoBuffer, pSrcFrame->videoDataSize);
1287  pDstFrame->videoDataSize = pSrcFrame->videoDataSize;
1288  pDstFrame->timeCodeDBB = pSrcFrame->timeCodeDBB;
1289  pDstFrame->timeCodeLow = pSrcFrame->timeCodeLow;
1290  pDstFrame->timeCodeHigh = pSrcFrame->timeCodeHigh;
1291  pDstFrame->lastFrame = pSrcFrame->lastFrame;
1292  if (mWithInfo)
1293  {
1294  memcpy(pDstFrame->pInfoBuffer, pSrcFrame->pInfoBuffer, pSrcFrame->infoDataSize + pSrcFrame->infoDataSize2);
1295  pDstFrame->infoDataSize = pSrcFrame->infoDataSize;
1296  pDstFrame->infoDataSize2 = pSrcFrame->infoDataSize2;
1297  }
1298 
1299  if (mWithAnc)
1300  {
1301  std::string timeString;
1302  mTimeCode.Set(frameNumber);
1303  mTimeCode.SetStdTimecodeForHfr(false);
1304  mTimeCode.QueryString(timeString, mTimeBase, false);
1305  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 10);
1306  mTimeCode.SetRP188(pDstFrame->timeCodeDBB, pDstFrame->timeCodeLow, pDstFrame->timeCodeHigh, mTimeBase);
1307  mTimeCode.QueryString(timeString, mTimeBase, false);
1308  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 20);
1309  }
1310 
1311  return AJA_STATUS_SUCCESS;
1312 
1313 } // ProcessFrame
NTV2FrameRate GetNTV2FrameRateFromVideoFormat(const NTV2VideoFormat inVideoFormat)
Definition: ntv2utils.cpp:3630
virtual NTV2VideoFormat GetSDIInputVideoFormat(NTV2Channel inChannel, bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given SDI input source.
virtual AJAStatus Run(void)
Runs me.
Declares the NTV2EncodeHEVCVifAc class.
#define VIDEO_RING_SIZE
Specifies the device&#39;s internal clock.
Definition: ntv2enums.h:1459
virtual bool SetReference(const NTV2ReferenceSource inRefSource, const bool inKeepFramePulseSelect=(0))
Sets the device&#39;s clock reference source. See Video Output Clocking & Synchronization for more inform...
virtual bool ReleaseStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Releases exclusive use of the AJA device for the given process, permitting other processes to acquire...
void SetAJAFrameRate(AJA_FrameRate ajaFrameRate)
Definition: timebase.cpp:164
AJAStatus Add(FrameDataPtr pInFrameData)
Appends a new frame buffer to me, increasing my frame storage capacity by one frame.
uint32_t * pAudioBuffer
Pointer to host audio buffer.
virtual void StartVideoProcessThread(void)
Start the video process thread.
I interrogate and control an AJA video/audio capture/playout device.
Definition: ntv2card.h:28
NTV2FrameBufferFormat
Identifies a particular video frame buffer pixel format. See Device Frame Buffer Formats for details...
Definition: ntv2enums.h:219
bool SetBuffers(ULWord *pInVideoBuffer, const ULWord inVideoByteCount, ULWord *pInAudioBuffer, const ULWord inAudioByteCount, ULWord *pInANCBuffer, const ULWord inANCByteCount, ULWord *pInANCF2Buffer=NULL, const ULWord inANCF2ByteCount=0)
Sets my buffers for use in a subsequent call to CNTV2Card::AutoCirculateTransfer. ...
virtual bool SetVideoFormat(const NTV2VideoFormat inVideoFormat, const bool inIsAJARetail=(!(0)), const bool inKeepVancSettings=(0), const NTV2Channel inChannel=NTV2_CHANNEL1)
Configures the AJA device to handle a specific video format.
AJAStatus
Definition: types.h:380
virtual void VideoInputWorker(void)
Repeatedly captures video frames using AutoCirculate and add them to the video input ring...
ULWord GetCapturedAudioByteCount(void) const
void Set(uint32_t frame)
Definition: timecode.cpp:420
static uint64_t GetPid()
Definition: process.cpp:35
ULWord GetBufferLevel(void) const
#define AJA_FAILURE(_status_)
Definition: types.h:373
NTV2InputSource NTV2ChannelToInputSource(const NTV2Channel inChannel, const NTV2IOKinds inKinds=NTV2_IOKINDS_SDI)
Definition: ntv2utils.cpp:5132
Capture (input) mode, which writes into device SDRAM.
Definition: ntv2enums.h:1243
uint32_t videoBufferSize
Size of host video buffer (bytes)
virtual void StartVideoFileThread(void)
Start the video file writer thread.
struct HevcEncodedInfo HevcEncodedInfo
#define NTV2_AUDIOSIZE_MAX
virtual void AudioFileWorker(void)
Repeatedly removes audio samples from the audio input ring and writes them to the audio output file...
Declares the AJATime class.
virtual AJAStatus SetPriority(AJAThreadPriority priority)
Definition: thread.cpp:133
virtual bool GetSDIInput3GbPresent(bool &outValue, const NTV2Channel channel)
FrameDataPtr StartConsumeNextBuffer(void)
The thread that&#39;s responsible for processing incoming frames – the consumer – calls this function t...
Obtain audio samples from the audio that&#39;s embedded in the video HANC.
Definition: ntv2enums.h:2007
AJAStatus CreateAiffFile(const std::string &inFileName, uint32_t numChannels, uint32_t maxFrames, uint32_t bufferSize)
static void CodecRawThreadStatic(AJAThread *pThread, void *pContext)
This is the codec raw thread&#39;s static callback function that gets called when the thread starts...
Definition: json.hpp:5362
virtual bool SetAudioRate(const NTV2AudioRate inRate, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2AudioRate for the given Audio System.
Definition: ntv2audio.cpp:205
virtual AJAStatus Start()
Definition: thread.cpp:91
virtual bool EnableInputInterrupt(const NTV2Channel channel=NTV2_CHANNEL1)
Allows the CNTV2Card instance to wait for and respond to input vertical blanking interrupts originati...
virtual bool SubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Causes me to be notified when an input vertical blanking interrupt occurs on the given input channel...
void EndConsumeNextBuffer(void)
The consumer thread calls this function to signal that it has finished processing the frame it obtain...
#define false
virtual bool AutoCirculateGetStatus(const NTV2Channel inChannel, AUTOCIRCULATE_STATUS &outStatus)
Returns the current AutoCirculate status for the given channel.
virtual AJAStatus ProcessVideoFrame(AVHevcDataBuffer *pSrcFrame, AVHevcDataBuffer *pDstFrame, uint32_t frameNumber)
Default do-nothing function for processing the captured frames.
virtual bool SetFrameBufferFormat(NTV2Channel inChannel, NTV2FrameBufferFormat inNewFormat, bool inIsAJARetail=(!(0)), NTV2HDRXferChars inXferChars=NTV2_VPID_TC_SDR_TV, NTV2HDRColorimetry inColorimetry=NTV2_VPID_Color_Rec709, NTV2HDRLuminance inLuminance=NTV2_VPID_Luminance_YCbCr)
Sets the frame buffer format for the given FrameStore on the AJA device.
NTV2EmbeddedAudioInput NTV2ChannelToEmbeddedAudioInput(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2EmbeddedAudioInput.
Definition: ntv2utils.cpp:4861
NTV2Channel
These enum values are mostly used to identify a specific widget_framestore. They&#39;re also commonly use...
Definition: ntv2enums.h:1357
mVideoFormat
Definition: ntv2vcam.cpp:801
virtual class DeviceCapabilities & features(void)
Definition: ntv2card.h:148
virtual bool SetMultiFormatMode(const bool inEnable)
Enables or disables multi-format (per channel) device operation. If enabled, each device channel can ...
AJA_FrameRate GetAJAFrameRate(NTV2FrameRate frameRate)
AJAStatus DetermineInputFormat(NTV2VideoFormat sdiFormat, bool quad, NTV2VideoFormat &videoFormat)
virtual void SetupAutoCirculate(void)
Initializes AutoCirculate.
int64_t FramesToMicroseconds(int64_t frames, bool round=false) const
Definition: timebase.cpp:223
#define AUDIO_RING_SIZE
virtual void VideoProcessWorker(void)
Repeatedly removes video frames from the video input ring, calls a custom video process method and ad...
uint32_t timeCodeHigh
Time code data high.
mPixelFormat
Definition: ntv2vcam.cpp:710
ULWord GetProcessedFrameCount(void) const
virtual void VideoFileWorker(void)
Repeatedly removes hevc frame from the hevc ring and writes them to the hevc output file...
virtual bool ClearRouting(void)
Removes all existing signal path connections between any and all widgets on the AJA device...
uint32_t audioBufferSize
Size of host audio buffer (bytes)
uint32_t infoBufferSize
Size of the host information buffer (bytes)
This struct replaces the old RP188_STRUCT.
M31VideoPreset
Definition: ntv2m31enums.h:13
virtual bool Active()
Definition: thread.cpp:116
virtual bool SetAudioSystemInputSource(const NTV2AudioSystem inAudioSystem, const NTV2AudioSource inAudioSource, const NTV2EmbeddedAudioInput inEmbeddedInput)
Sets the audio source for the given NTV2AudioSystem on the device.
Definition: ntv2audio.cpp:485
HevcPictureData pictureData
struct HevcPictureData HevcPictureData
Playout (output) mode, which reads from device SDRAM.
Definition: ntv2enums.h:1241
ULWord GetDisplayHeight(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4207
virtual bool SetMode(const NTV2Channel inChannel, const NTV2Mode inNewValue, const bool inIsRetail=(!(0)))
Determines if a given FrameStore on the AJA device will be used to capture or playout video...
AJAStatus CreateEncFile(const std::string &inFileName, uint32_t maxFrames)
Invalid or "not found".
Definition: ntv2enums.h:98
static bool GetFirstDeviceFromArgument(const std::string &inArgument, CNTV2Card &outDevice)
Rescans the host, and returns an open CNTV2Card instance for the AJA device that matches a command li...
virtual bool SetSDITransmitEnable(const NTV2Channel inChannel, const bool inEnable)
Sets the specified bidirectional SDI connector to act as an input or an output.
Instances of me capture frames in real time from a video signal provided to an input of an AJA device...
virtual bool AutoCirculateTransfer(const NTV2Channel inChannel, AUTOCIRCULATE_TRANSFER &transferInfo)
Transfers all or part of a frame as specified in the given AUTOCIRCULATE_TRANSFER object to/from the ...
virtual bool AcquireStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Reserves exclusive use of the AJA device for a given process, preventing other processes on the host ...
#define AJA_NULL
Definition: ajatypes.h:167
void SetAbortFlag(const bool *pAbortFlag)
Tells me the boolean variable I should monitor such that when it gets set to "true" will cause any th...
static void VideoFileThreadStatic(AJAThread *pThread, void *pContext)
This is the video file writer thread&#39;s static callback function that gets called when the thread star...
AJAStatus SetupHEVC(CNTV2m31 *pM31, M31VideoPreset preset, M31Channel encodeChannel, bool multiStream, bool withInfo)
ULWord GetVideoActiveSize(const NTV2VideoFormat inVideoFormat, const NTV2FrameBufferFormat inFBFormat, const NTV2VANCMode inVancMode=NTV2_VANCMODE_OFF)
Definition: ntv2utils.cpp:2858
virtual bool SetEveryFrameServices(const NTV2TaskMode m)
Definition: ntv2card.h:1195
virtual NTV2VideoFormat GetInputVideoFormat(const NTV2InputSource inVideoSource=NTV2_INPUTSOURCE_SDI1, const bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given input source. ...
AJA_PixelFormat GetAJAPixelFormat(NTV2FrameBufferFormat pixelFormat)
virtual void SetupHostBuffers(void)
Sets up my circular buffers.
static void Sleep(const int32_t inMilliseconds)
Suspends execution of the current thread for a given number of milliseconds.
Definition: systemtime.cpp:284
void WriteEncData(void *pBuffer, uint32_t bufferSize)
2: OEM (recommended): device configured by client application(s) with some driver involvement...
uint32_t AlignDataBuffer(void *pBuffer, uint32_t bufferSize, uint32_t dataSize, uint32_t alignBytes, uint8_t fill)
virtual void CodecRawWorker(void)
Repeatedly removes video frames from the raw video ring and transfers them to the codec...
mInputChannel
Definition: ntv2vcam.cpp:1010
void WriteAiffData(void *pBuffer, uint32_t numChannels, uint32_t numSamples)
ULWord GetDroppedFrameCount(void) const
Specifies channel or FrameStore 8 (or the 8th item).
Definition: ntv2enums.h:1366
struct HevcPictureInfo HevcPictureInfo
AJA_EXPORT bool RenderTimeCodeFont(AJA_PixelFormat pixelFormat, uint32_t numPixels, uint32_t numLines)
uint32_t timeCodeLow
Time code data low.
virtual void StartCodecHevcThread(void)
Start the codec hevc thread.
Specifies channel or FrameStore 2 (or the 2nd item).
Definition: ntv2enums.h:1360
virtual NTV2DeviceID GetDeviceID(void)
void WriteHevcData(void *pBuffer, uint32_t bufferSize)
virtual AJAStatus Init(void)
Initializes me and prepares me to Run.
virtual void CodecHevcWorker(void)
Repeatedly transfers hevc frames from the codec and adds them to the hevc ring.
uint32_t audioDataSize
Size of audio data (bytes)
virtual bool AutoCirculateStop(const NTV2Channel inChannel, const bool inAbort=(0))
Stops AutoCirculate for the given channel, and releases the on-device frame buffers that were allocat...
Declares the AJAProcess class.
virtual bool Connect(const NTV2InputCrosspointID inInputXpt, const NTV2OutputCrosspointID inOutputXpt, const bool inValidate=(0))
Connects the given widget signal input (sink) to the given widget signal output (source).
virtual bool UnsubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Unregisters me so I&#39;m no longer notified when an input VBI is signaled on the given input channel...
virtual void Quit(void)
Gracefully stops me from running.
This object specifies the information that will be transferred to or from the AJA device in the CNTV2...
virtual bool SetAudioBufferSize(const NTV2AudioBufferSize inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Changes the size of the audio buffer that is used for a given Audio System in the AJA device...
Definition: ntv2audio.cpp:249
virtual bool GetEveryFrameServices(NTV2TaskMode &m)
Definition: ntv2card.h:1194
virtual void StartAudioFileThread(void)
Start the audio file writer thread.
void EndProduceNextBuffer(void)
The producer thread calls this function to signal that it has finished populating the frame it obtain...
This identifies the first Audio System.
Definition: ntv2enums.h:3897
virtual bool WaitForOutputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
static const ULWord kDemoAppSignature((((uint32_t)( 'D'))<< 24)|(((uint32_t)( 'E'))<< 16)|(((uint32_t)( 'M'))<< 8)|(((uint32_t)( 'O'))<< 0))
Declares numerous NTV2 utility functions.
uint32_t videoDataSize2
Size of field 2 video data (bytes)
static void CodecHevcThreadStatic(AJAThread *pThread, void *pContext)
This is the codec hevc thread&#39;s static callback function that gets called when the thread starts...
virtual AJAStatus Attach(AJAThreadFunction *pThreadFunction, void *pUserContext)
Definition: thread.cpp:169
FrameDataPtr StartProduceNextBuffer(void)
The thread that&#39;s responsible for providing frames – the producer – calls this function to populate...
virtual void GetStatus(AVHevcStatus *outInputStatus)
Provides status information about my input (capture) process.
void SetStdTimecodeForHfr(bool bStdTc)
Definition: timecode.h:220
virtual bool SetSDIOutLevelAtoLevelBConversion(const UWord inOutputSpigot, const bool inEnable)
Enables or disables 3G level A to 3G level B conversion at the SDI output widget (assuming the AJA de...
This is returned from the CNTV2Card::AutoCirculateGetStatus function.
bool lastFrame
Indicates last captured frame.
uint16_t UWord
Definition: ajatypes.h:221
void QueryString(std::string &str, const AJATimeBase &timeBase, bool bDropFrame, bool bStdTcForHfr, AJATimecodeNotation notation=AJA_TIMECODE_LEGACY)
Definition: timecode.cpp:299
1: Standard/Retail: device configured by AJA ControlPanel, service/daemon, and driver.
Specifies channel or FrameStore 1 (or the first item).
Definition: ntv2enums.h:1359
virtual bool AutoCirculateStart(const NTV2Channel inChannel, const ULWord64 inStartTime=0)
Starts AutoCirculating the specified channel that was previously initialized by CNTV2Card::AutoCircul...
virtual AJAStatus SetupVideo(void)
Sets up everything I need for capturing video.
virtual bool AutoCirculateInitForOutput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate playout, designating a contiguous block of frame buffers on the...
virtual bool SetSDIInLevelBtoLevelAConversion(const NTV2ChannelSet &inSDIInputs, const bool inEnable)
Enables or disables 3G level B to 3G level A conversion at the SDI input(s).
uint32_t * pVideoBuffer
Pointer to host video buffer.
ULWord GetDisplayWidth(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4199
bool GetInputTimeCode(NTV2_RP188 &outTimeCode, const NTV2TCIndex inTCIndex=NTV2_TCINDEX_SDI1) const
Intended for capture, answers with a specific timecode captured in my acTransferStatus member&#39;s acFra...
This structure encapsulates the video and audio buffers used by the HEVC demo applications. The demo programs that employ producer/consumer threads use a fixed number of these buffers.
AJA_EXPORT bool BurnTimeCode(void *pBaseVideoAddress, const std::string &inTimeCodeStr, const uint32_t inYPercent)
NTV2AudioSystem NTV2ChannelToAudioSystem(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2AudioSystem.
Definition: ntv2utils.cpp:4869
uint32_t * pInfoBuffer
Picture information (raw) or encode information (hevc)
bool HasAvailableInputFrame(void) const
Specifies channel or FrameStore 4 (or the 4th item).
Definition: ntv2enums.h:1362
virtual void RouteInputSignal(void)
Sets up device routing for capture.
Specifies channel or FrameStore 5 (or the 5th item).
Definition: ntv2enums.h:1363
bool IsRunning(void) const
uint32_t videoDataSize
Size of video data (bytes)
ULWord fLo
| BG 4 | Secs10 | BG 3 | Secs 1 | BG 2 | Frms10 | BG 1 | Frms 1 |
NTV2EncodeHEVCVifAc(const std::string inDeviceSpecifier="0", const NTV2Channel inChannel=NTV2_CHANNEL1, const M31VideoPreset inM31Preset=M31_FILE_1280X720_420_8_5994p, const NTV2FrameBufferFormat inPixelFormat=NTV2_FBF_10BIT_YCBCR_420PL2, const bool inQuadMode=(0), const uint32_t inAudioChannels=0, const bool inTimeCodeBurn=(0), const bool inInfoData=(0), const uint32_t inMaxFrames=0xffffffff)
Constructs me using the given settings.
virtual void StartCodecRawThread(void)
Start the codec raw thread.
virtual void StartVideoInputThread(void)
Start the video input thread.
static void VideoProcessThreadStatic(AJAThread *pThread, void *pContext)
This is the video process thread&#39;s static callback function that gets called when the thread starts...
Specifies channel or FrameStore 6 (or the 6th item).
Definition: ntv2enums.h:1364
uint32_t infoDataSize2
Size of the field 2 information data (bytes)
virtual bool AutoCirculateInitForInput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate ingest, designating a contiguous block of frame buffers on the ...
#define AUTOCIRCULATE_WITH_RP188
Use this to AutoCirculate with RP188.
virtual AJAStatus SetupAudio(void)
Sets up everything I need for capturing audio.
Specifies channel or FrameStore 7 (or the 7th item).
Definition: ntv2enums.h:1365
bool SetAudioBuffer(ULWord *pInAudioBuffer, const ULWord inAudioByteCount)
Sets my audio buffer for use in a subsequent call to CNTV2Card::AutoCirculateTransfer.
Identifies the 1st SDI video input.
Definition: ntv2enums.h:1269
uint32_t infoDataSize
Size of the information data (bytes)
AJAStatus CreateHevcFile(const std::string &inFileName, uint32_t maxFrames)
static void AudioFileThreadStatic(AJAThread *pThread, void *pContext)
This is the audio file writer thread&#39;s static callback function that gets called when the thread star...
virtual M31VideoPreset GetCodecPreset(void)
Get the codec preset.
virtual bool SetNumberAudioChannels(const ULWord inNumChannels, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the number of audio channels to be concurrently captured or played for a given Audio System on t...
Definition: ntv2audio.cpp:146
ULWord fHi
| BG 8 | Hrs 10 | BG 7 | Hrs 1 | BG 6 | Mins10 | BG 5 | Mins 1 |
Declares device capability functions.
virtual bool WaitForInputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
This identifies the mode in which there are no VANC lines in the frame buffer.
Definition: ntv2enums.h:3799
Specifies channel or FrameStore 3 (or the 3rd item).
Definition: ntv2enums.h:1361
static void VideoInputThreadStatic(AJAThread *pThread, void *pContext)
This is the video input thread&#39;s static callback function that gets called when the thread starts...
NTV2ReferenceSource NTV2InputSourceToReferenceSource(const NTV2InputSource inInputSource)
Converts a given NTV2InputSource to its equivalent NTV2ReferenceSource value.
Definition: ntv2utils.cpp:5023
See 8-Bit YCbCr Format.
Definition: ntv2enums.h:223
Audio clock derived from the video input.
Definition: ntv2enums.h:1993
virtual bool SetEmbeddedAudioClock(const NTV2EmbeddedAudioClock inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2EmbeddedAudioClock setting for the given NTV2AudioSystem.
Definition: ntv2audio.cpp:417
uint32_t timeCodeDBB
Time code data dbb.
void SetRP188(const uint32_t inDBB, const uint32_t inLo, const uint32_t inHi, const AJATimeBase &inTimeBase)
Definition: timecode.cpp:570
virtual bool EnableChannel(const NTV2Channel inChannel)
Enables the given FrameStore.