AJA NTV2 SDK  18.0.0.2122
NTV2 SDK 18.0.0.2122
ntv2encodehevc.cpp
Go to the documentation of this file.
1 /* SPDX-License-Identifier: MIT */
8 #include <stdio.h>
9 
10 #include "ntv2encodehevc.h"
11 #include "ntv2utils.h"
12 #include "ntv2devicefeatures.h"
13 #include "ajabase/system/process.h"
15 
16 using namespace std;
17 
18 #define NTV2_AUDIOSIZE_MAX (401 * 1024)
19 
20 
21 NTV2EncodeHEVC::NTV2EncodeHEVC (const string inDeviceSpecifier,
22  const NTV2Channel inChannel,
23  const M31VideoPreset inPreset,
24  const NTV2FrameBufferFormat inPixelFormat,
25  const bool inQuadMode,
26  const uint32_t inAudioChannels,
27  const bool inTimeCodeBurn,
28  const bool inInfoMode,
29  const bool inTsiMode,
30  const uint32_t inMaxFrames)
31 
32 : mACInputThread (AJAThread()),
33  mVideoProcessThread (AJAThread()),
34  mCodecRawThread (AJAThread()),
35  mCodecHevcThread (AJAThread()),
36  mVideoFileThread (AJAThread()),
37  mAudioFileThread (AJAThread()),
38  mM31 (AJA_NULL),
39  mHevcCommon (AJA_NULL),
40  mDeviceID (DEVICE_ID_NOTFOUND),
41  mDeviceSpecifier (inDeviceSpecifier),
42  mWithAudio (inAudioChannels != 0),
43  mInputChannel (inChannel),
44  mEncodeChannel (M31_CH0),
45  mPreset (inPreset),
46  mInputSource (NTV2_INPUTSOURCE_SDI1),
48  mPixelFormat (inPixelFormat),
49  mQuad (inQuadMode),
50  mTsi (inTsiMode),
51  m60p (false),
52  mVif (false),
53  mInterlaced (false),
54  mMultiStream (false),
55  mWithInfo (inInfoMode),
56  mWithAnc (inTimeCodeBurn),
57  mAudioSystem (NTV2_AUDIOSYSTEM_1),
58  mSavedTaskMode (NTV2_STANDARD_TASKS),
59  mNumAudioChannels (0),
60  mFileAudioChannels (inAudioChannels),
61  mMaxFrames (inMaxFrames),
62  mLastFrame (false),
63  mLastFrameInput (false),
64  mLastFrameRaw (false),
65  mLastFrameHevc (false),
66  mLastFrameVideo (false),
67  mLastFrameAudio (false),
68  mGlobalQuit (false),
69  mVideoInputFrameCount (0),
70  mVideoProcessFrameCount (0),
71  mCodecRawFrameCount (0),
72  mCodecHevcFrameCount (0),
73  mVideoFileFrameCount (0),
74  mAudioFileFrameCount (0)
75 {
76  ::memset (mACInputBuffer, 0x0, sizeof (mACInputBuffer));
77  ::memset (mVideoRawBuffer, 0x0, sizeof (mVideoRawBuffer));
78  ::memset (mVideoHevcBuffer, 0x0, sizeof (mVideoHevcBuffer));
79  ::memset (mAudioInputBuffer, 0x0, sizeof (mAudioInputBuffer));
80 
81 } // constructor
82 
83 
85 {
86  // Stop my capture and consumer threads, then destroy them...
87  Quit ();
88 
89  if (mM31 != AJA_NULL)
90  {
91  delete mM31;
92  mM31 = AJA_NULL;
93  }
94 
95  if (mHevcCommon != AJA_NULL)
96  {
97  delete mHevcCommon;
98  mHevcCommon = AJA_NULL;
99  }
100 
101  // unsubscribe from input vertical event...
102  mDevice.UnsubscribeInputVerticalEvent (mInputChannel);
103 
104  // free all my buffers...
105  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++)
106  {
107  if (mACInputBuffer[bufferNdx].pVideoBuffer)
108  {
109  delete [] mACInputBuffer[bufferNdx].pVideoBuffer;
110  mACInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
111  }
112  if (mACInputBuffer[bufferNdx].pInfoBuffer)
113  {
114  delete [] mACInputBuffer[bufferNdx].pInfoBuffer;
115  mACInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
116  }
117  if (mACInputBuffer[bufferNdx].pAudioBuffer)
118  {
119  delete [] mACInputBuffer[bufferNdx].pAudioBuffer;
120  mACInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
121  }
122 
123  if (mVideoRawBuffer[bufferNdx].pVideoBuffer)
124  {
125  delete [] mVideoRawBuffer[bufferNdx].pVideoBuffer;
126  mVideoRawBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
127  }
128  if (mVideoRawBuffer[bufferNdx].pInfoBuffer)
129  {
130  delete [] mVideoRawBuffer[bufferNdx].pInfoBuffer;
131  mVideoRawBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
132  }
133  if (mVideoRawBuffer[bufferNdx].pAudioBuffer)
134  {
135  delete [] mVideoRawBuffer[bufferNdx].pAudioBuffer;
136  mVideoRawBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
137  }
138 
139  if (mVideoHevcBuffer[bufferNdx].pVideoBuffer)
140  {
141  delete [] mVideoHevcBuffer[bufferNdx].pVideoBuffer;
142  mVideoHevcBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
143  }
144  if (mVideoHevcBuffer[bufferNdx].pInfoBuffer)
145  {
146  delete [] mVideoHevcBuffer[bufferNdx].pInfoBuffer;
147  mVideoHevcBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
148  }
149  if (mVideoHevcBuffer[bufferNdx].pAudioBuffer)
150  {
151  delete [] mVideoHevcBuffer[bufferNdx].pAudioBuffer;
152  mVideoHevcBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
153  }
154  }
155 
156  if (mWithAudio)
157  {
158  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++)
159  {
160  if (mAudioInputBuffer[bufferNdx].pVideoBuffer)
161  {
162  delete [] mAudioInputBuffer[bufferNdx].pVideoBuffer;
163  mAudioInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
164  }
165  if (mAudioInputBuffer[bufferNdx].pInfoBuffer)
166  {
167  delete [] mAudioInputBuffer[bufferNdx].pInfoBuffer;
168  mAudioInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
169  }
170  if (mAudioInputBuffer[bufferNdx].pAudioBuffer)
171  {
172  delete [] mAudioInputBuffer[bufferNdx].pAudioBuffer;
173  mAudioInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
174  }
175  }
176  }
177 
178 } // destructor
179 
180 
182 {
183  if (mM31 && !mLastFrame && !mGlobalQuit)
184  {
185  // Set the last frame flag to start the quit process
186  mLastFrame = true;
187 
188  // Wait for the last frame to be written to disk
189  int i;
190  int timeout = 300;
191  for (i = 0; i < timeout; i++)
192  {
193  if (mLastFrameVideo && (!mWithAudio || mLastFrameAudio)) break;
194  AJATime::Sleep (10);
195  }
196  if (i == timeout)
197  { cerr << "## ERROR: Wait for last frame timeout" << endl; }
198 
199  // Stop the encoder stream
200  if (!mM31->ChangeEHState(Hevc_EhState_ReadyToStop, mEncodeChannel))
201  { cerr << "## ERROR: ChangeEHState ready to stop failed" << endl; }
202 
203  if (!mM31->ChangeEHState(Hevc_EhState_Stop, mEncodeChannel))
204  { cerr << "## ERROR: ChangeEHState stop failed" << endl; }
205 
206  // stop the video input stream
207  if (!mM31->ChangeVInState(Hevc_VinState_Stop, mEncodeChannel))
208  { cerr << "## ERROR: ChangeVInState stop failed" << endl; }
209 
210  if(!mMultiStream)
211  {
212  // Now go to the init state
213  if (!mM31->ChangeMainState(Hevc_MainState_Init, Hevc_EncodeMode_Single))
214  { cerr << "## ERROR: ChangeMainState to init failed" << endl; }
215  }
216  }
217 
218  // Stop the worker threads
219  mGlobalQuit = true;
220 
221  while (mACInputThread.Active())
222  AJATime::Sleep (10);
223 
224  while (mVideoProcessThread.Active())
225  AJATime::Sleep (10);
226 
227  while (mCodecRawThread.Active())
228  AJATime::Sleep (10);
229 
230  while (mCodecHevcThread.Active())
231  AJATime::Sleep (10);
232 
233  while (mVideoFileThread.Active())
234  AJATime::Sleep (10);
235 
236  while (mAudioFileThread.Active())
237  AJATime::Sleep (10);
238 
239  // Stop video capture
240  mDevice.SetMode(mInputChannel, NTV2_MODE_DISPLAY, false);
241 
242  // Release board
243  if (!mMultiStream)
244  {
245  mDevice.ReleaseStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid()));
246  mDevice.SetEveryFrameServices (mSavedTaskMode); // Restore prior task mode
247  }
248 
249  // Close output files
250  mHevcCommon->CloseHevcFile ();
251  if (mWithInfo)
252  mHevcCommon->CloseEncFile ();
253  if (mWithAudio)
254  mHevcCommon->CloseAiffFile ();
255 
256 } // Quit
257 
258 
260 {
261  AJAStatus status (AJA_STATUS_SUCCESS);
262 
263  // Open the device...
264  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, mDevice))
265  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return AJA_STATUS_OPEN; }
266 
267  // Allocate our M31 helper class and our HEVC common class
268  mM31 = new CNTV2m31 (&mDevice);
269 
270  mHevcCommon = new CNTV2DemoHevcCommon ();
271 
272  if ((mM31 == AJA_NULL) || (mHevcCommon == AJA_NULL))
273  {
274  return AJA_STATUS_FAIL;
275  }
276 
277  // Preset specification takes precedence
278  if (mPreset < M31_NUMVIDEOPRESETS)
279  {
280  // Get NTV2 formats to match codec preset
281  mVideoFormat = CNTV2m31::GetPresetVideoFormat(mPreset);
282  mPixelFormat = CNTV2m31::GetPresetFrameBufferFormat(mPreset);
283  mVif = CNTV2m31::IsPresetVIF(mPreset);
284  mQuad = CNTV2m31::IsPresetUHD(mPreset);
285  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
286  }
287  // Otherwise use the pixel format and SDI input format
288  else if (mPixelFormat >= NTV2_FBF_NUMFRAMEBUFFERFORMATS)
289  {
290  mPixelFormat = NTV2_FBF_8BIT_YCBCR_420PL2;
291  if (mVif)
292  {
293  mPixelFormat = NTV2_FBF_8BIT_YCBCR;
294  }
295  }
296 
297  // Quad mode must be channel 1
298  if (mQuad)
299  {
300  mInputChannel = NTV2_CHANNEL1;
301  mOutputChannel = NTV2_CHANNEL5;
302  mEncodeChannel = M31_CH0;
303  }
304  else
305  {
306  // When input channel specified we are multistream
307  switch (mInputChannel)
308  {
309  case NTV2_CHANNEL1: { mEncodeChannel = M31_CH0; mOutputChannel = NTV2_CHANNEL5; mMultiStream = true; break; }
310  case NTV2_CHANNEL2: { mEncodeChannel = M31_CH1; mOutputChannel = NTV2_CHANNEL6; mMultiStream = true; break; }
311  case NTV2_CHANNEL3: { mEncodeChannel = M31_CH2; mOutputChannel = NTV2_CHANNEL7; mMultiStream = true; break; }
312  case NTV2_CHANNEL4: { mEncodeChannel = M31_CH3; mOutputChannel = NTV2_CHANNEL8; mMultiStream = true; break; }
313  default: { mInputChannel = NTV2_CHANNEL1; mOutputChannel = NTV2_CHANNEL5; mEncodeChannel = M31_CH0; }
314  }
315  }
316 
317  // Grab board in a shared environment
318  if (!mMultiStream)
319  {
320  if (!mDevice.AcquireStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid())))
321  return AJA_STATUS_BUSY; // Another app is using the device
322  mDevice.GetEveryFrameServices (mSavedTaskMode); // Save the current state before we change it
323  }
324  mDevice.SetEveryFrameServices (NTV2_OEM_TASKS); // Since this is an OEM demo, use the OEM service level
325 
326  mDeviceID = mDevice.GetDeviceID (); // Keep the device ID handy, as it's used frequently
327 
328  // When video format is unknown determine from SDI input
329  if (mVideoFormat >= NTV2_MAX_NUM_VIDEO_FORMATS)
330  {
331  bool is3Gb = false;
332  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
333 
334  // Get SDI input format
335  status = mHevcCommon->DetermineInputFormat(mDevice.GetSDIInputVideoFormat(mInputChannel, is3Gb), mQuad, mVideoFormat);
336  if (AJA_FAILURE(status))
337  return status;
338 
339  // Get codec preset for input format
340  if(!CNTV2m31::ConvertVideoFormatToPreset(mVideoFormat, mPixelFormat, mVif, mPreset))
341  return AJA_STATUS_FAIL;
342 
343  mQuad = CNTV2m31::IsPresetUHD(mPreset);
344  mInterlaced = CNTV2m31::IsPresetInterlaced(mPreset);
345  }
346 
347  // Video format frame rate above 30
348  m60p = GetFramesPerSecond(GetNTV2FrameRateFromVideoFormat(mVideoFormat)) > 31.0;
349 
350  // Setup frame buffer
351  status = SetupVideo ();
352  if (AJA_FAILURE (status))
353  return status;
354 
355  // Route input signals to frame buffers
356  RouteInputSignal ();
357 
358  // Setup audio buffer
359  status = SetupAudio ();
360  if (AJA_FAILURE (status))
361  return status;
362 
363  // Setup to capture video/audio/anc input
365 
366  // Setup codec
367  status = mHevcCommon->SetupHEVC (mM31, mPreset, mEncodeChannel, mMultiStream, mWithInfo);
368  if (AJA_FAILURE (status))
369  return status;
370 
371  // Setup the circular buffers
372  SetupHostBuffers ();
373 
374  // Create encoded video output file
375  {
376  ostringstream oss;
377  if (mMultiStream) oss << "raw_" << (mInputChannel+1) << ".hevc"; else oss << "raw.hevc";
378  status = mHevcCommon->CreateHevcFile (oss.str(), mMaxFrames);
379  if (AJA_FAILURE (status))
380  return status;
381  }
382 
383  if (mWithInfo)
384  {
385  // Create encoded data output file
386  ostringstream oss;
387  if (mMultiStream) oss << "raw_" << (mInputChannel+1) << ".txt"; else oss << "raw.txt";
388  status = mHevcCommon->CreateEncFile (oss.str(), mMaxFrames);
389  if (AJA_FAILURE (status))
390  return status;
391  }
392 
393  if (mWithAudio)
394  {
395  // Create audio output file
396  ostringstream oss;
397  if (mMultiStream) oss << "raw_" << (mInputChannel+1) << ".aiff"; else oss << "raw.aiff";
398  status = mHevcCommon->CreateAiffFile (oss.str(), mFileAudioChannels, mMaxFrames, NTV2_AUDIOSIZE_MAX);
399  if (AJA_FAILURE (status))
400  return status;
401  }
402 
403  return AJA_STATUS_SUCCESS;
404 
405 } // Init
406 
407 
409 {
410  return mPreset;
411 }
412 
413 
415 {
416  // Setup frame buffer
417  if (mQuad)
418  {
419  if (mInputChannel != NTV2_CHANNEL1)
420  return AJA_STATUS_FAIL;
421 
422  // Disable multiformat
423  if (mDevice.features().CanDoMultiFormat())
424  mDevice.SetMultiFormatMode (false);
425 
426  // Set the board video format
427  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
428 
429  // Set two sample interleave
430  if(mTsi)
431  mDevice.SetTsiFrameEnable(true, NTV2_CHANNEL1);
432  else
433  mDevice.Set4kSquaresEnable(true, NTV2_CHANNEL1);
434 
435  // Set frame buffer format
436  mDevice.SetFrameBufferFormat (NTV2_CHANNEL1, mPixelFormat);
437  mDevice.SetFrameBufferFormat (NTV2_CHANNEL2, mPixelFormat);
438  mDevice.SetFrameBufferFormat(NTV2_CHANNEL3, mPixelFormat);
439  mDevice.SetFrameBufferFormat(NTV2_CHANNEL4, mPixelFormat);
440  mDevice.SetFrameBufferFormat (NTV2_CHANNEL5, mPixelFormat);
441  mDevice.SetFrameBufferFormat (NTV2_CHANNEL6, mPixelFormat);
442  mDevice.SetFrameBufferFormat (NTV2_CHANNEL7, mPixelFormat);
443  mDevice.SetFrameBufferFormat (NTV2_CHANNEL8, mPixelFormat);
444 
445  // Set catpure mode
446  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_CAPTURE, false);
447  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_CAPTURE, false);
448  if (!mTsi)
449  {
450  mDevice.SetMode(NTV2_CHANNEL3, NTV2_MODE_CAPTURE, false);
451  mDevice.SetMode(NTV2_CHANNEL4, NTV2_MODE_CAPTURE, false);
452  }
453  else
454  {
455  mDevice.SetMode(NTV2_CHANNEL3, NTV2_MODE_DISPLAY, false);
456  mDevice.SetMode(NTV2_CHANNEL4, NTV2_MODE_DISPLAY, false);
457  }
458  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
459  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
460  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
461  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
462 
463  // Enable frame buffers
464  mDevice.EnableChannel (NTV2_CHANNEL1);
465  mDevice.EnableChannel (NTV2_CHANNEL2);
466  mDevice.EnableChannel (NTV2_CHANNEL3);
467  mDevice.EnableChannel (NTV2_CHANNEL4);
468  mDevice.EnableChannel (NTV2_CHANNEL5);
469  mDevice.EnableChannel (NTV2_CHANNEL6);
470  mDevice.EnableChannel (NTV2_CHANNEL7);
471  mDevice.EnableChannel (NTV2_CHANNEL8);
472 
473  // Save input source
474  mInputSource = ::NTV2ChannelToInputSource (NTV2_CHANNEL1);
475  }
476  else if (mMultiStream)
477  {
478  // Configure for multiformat
479  if (mDevice.features().CanDoMultiFormat())
480  mDevice.SetMultiFormatMode (true);
481 
482  // Set the channel video format
483  mDevice.SetVideoFormat (mVideoFormat, false, false, mInputChannel);
484 
485  // Set frame buffer format
486  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
487  mDevice.SetFrameBufferFormat (mOutputChannel, mPixelFormat);
488 
489  // Set catpure mode
490  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
491  mDevice.SetMode (mOutputChannel, NTV2_MODE_DISPLAY, false);
492 
493  // Enable frame buffer
494  mDevice.EnableChannel (mInputChannel);
495  mDevice.EnableChannel (mOutputChannel);
496 
497  // Save input source
498  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
499  }
500  else
501  {
502  // Disable multiformat mode
503  if (mDevice.features().CanDoMultiFormat())
504  mDevice.SetMultiFormatMode (false);
505 
506  // Set the board format
507  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
508 
509  // Set frame buffer format
510  mDevice.SetFrameBufferFormat (mInputChannel, mPixelFormat);
511  mDevice.SetFrameBufferFormat (mOutputChannel, mPixelFormat);
512 
513  // Set display mode
514  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_DISPLAY, false);
515  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_DISPLAY, false);
516  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_DISPLAY, false);
517  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_DISPLAY, false);
518  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
519  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
520  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
521  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
522 
523  // Set catpure mode
524  mDevice.SetMode (mInputChannel, NTV2_MODE_CAPTURE, false);
525 
526  // Enable frame buffer
527  mDevice.EnableChannel (mInputChannel);
528  mDevice.EnableChannel (mOutputChannel);
529 
530  // Save input source
531  mInputSource = ::NTV2ChannelToInputSource (mInputChannel);
532  }
533 
534  // Set the device reference to the input...
535  if (mMultiStream)
536  {
538  }
539  else
540  {
541  mDevice.SetReference (::NTV2InputSourceToReferenceSource (mInputSource));
542  }
543 
544  // Enable and subscribe to the interrupts for the channel to be used...
545  mDevice.EnableInputInterrupt (mInputChannel);
546  mDevice.SubscribeInputVerticalEvent (mInputChannel);
547 
548  // Setup for timecode burn
549  mTimeBase.SetAJAFrameRate (mHevcCommon->GetAJAFrameRate(GetNTV2FrameRateFromVideoFormat (mVideoFormat)));
550  mTimeCodeBurn.RenderTimeCodeFont (mHevcCommon->GetAJAPixelFormat (mPixelFormat),
551  GetDisplayWidth (mVideoFormat),
552  GetDisplayHeight (mVideoFormat));
553 
554  return AJA_STATUS_SUCCESS;
555 
556 } // SetupVideo
557 
558 
560 {
561  // In multiformat mode, base the audio system on the channel...
562  if (mMultiStream && mDevice.features().GetNumAudioSystems() > 1 && UWord(mInputChannel) < mDevice.features().GetNumAudioSystems())
563  mAudioSystem = ::NTV2ChannelToAudioSystem(mInputChannel);
564 
565  // Have the audio system capture audio from the designated device input (i.e., ch1 uses SDIIn1, ch2 uses SDIIn2, etc.)...
566  mDevice.SetAudioSystemInputSource (mAudioSystem, NTV2_AUDIO_EMBEDDED, ::NTV2ChannelToEmbeddedAudioInput (mInputChannel));
567 
568  mNumAudioChannels = mDevice.features().GetMaxAudioChannels();
569  mDevice.SetNumberAudioChannels (mNumAudioChannels, mAudioSystem);
570  mDevice.SetAudioRate (NTV2_AUDIO_48K, mAudioSystem);
572 
573  // The on-device audio buffer should be 4MB to work best across all devices & platforms...
574  mDevice.SetAudioBufferSize (NTV2_AUDIO_BUFFER_BIG, mAudioSystem);
575 
576  return AJA_STATUS_SUCCESS;
577 
578 } // SetupAudio
579 
580 
582 {
583  mVideoBufferSize = GetVideoActiveSize (mVideoFormat, mPixelFormat, NTV2_VANCMODE_OFF);
584  mPicInfoBufferSize = sizeof(HevcPictureInfo)*2;
585  mEncInfoBufferSize = sizeof(HevcEncodedInfo)*2;
586  mAudioBufferSize = NTV2_AUDIOSIZE_MAX;
587 
588  // video input ring
589  mACInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
590  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
591  {
592  memset (&mACInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
593  mACInputBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
594  mACInputBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
595  mACInputBuffer[bufferNdx].videoDataSize = 0;
596  mACInputBuffer[bufferNdx].videoDataSize2 = 0;
597  mACInputBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
598  mACInputBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
599  mACInputBuffer[bufferNdx].infoDataSize = 0;
600  mACInputBuffer[bufferNdx].infoDataSize2 = 0;
601  mACInputCircularBuffer.Add (& mACInputBuffer[bufferNdx]);
602  }
603 
604  // video raw ring
605  mVideoRawCircularBuffer.SetAbortFlag (&mGlobalQuit);
606  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
607  {
608  memset (&mVideoRawBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
609  mVideoRawBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
610  mVideoRawBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
611  mVideoRawBuffer[bufferNdx].videoDataSize = 0;
612  mVideoRawBuffer[bufferNdx].videoDataSize2 = 0;
613  mVideoRawBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
614  mVideoRawBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
615  mVideoRawBuffer[bufferNdx].infoDataSize = 0;
616  mVideoRawBuffer[bufferNdx].infoDataSize2 = 0;
617  mVideoRawCircularBuffer.Add (& mVideoRawBuffer[bufferNdx]);
618  }
619 
620  // video hevc ring
621  mVideoHevcCircularBuffer.SetAbortFlag (&mGlobalQuit);
622  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
623  {
624  memset (&mVideoHevcBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
625  mVideoHevcBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
626  mVideoHevcBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
627  mVideoHevcBuffer[bufferNdx].videoDataSize = 0;
628  mVideoHevcBuffer[bufferNdx].videoDataSize2 = 0;
629  mVideoHevcBuffer[bufferNdx].pInfoBuffer = new uint32_t [mEncInfoBufferSize/4];
630  mVideoHevcBuffer[bufferNdx].infoBufferSize = mEncInfoBufferSize;
631  mVideoHevcBuffer[bufferNdx].infoDataSize = 0;
632  mVideoHevcBuffer[bufferNdx].infoDataSize2 = 0;
633  mVideoHevcCircularBuffer.Add (& mVideoHevcBuffer[bufferNdx]);
634  }
635 
636  if (mWithAudio)
637  {
638  // audio input ring
639  mAudioInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
640  for (unsigned bufferNdx = 0; bufferNdx < AUDIO_RING_SIZE; bufferNdx++ )
641  {
642  memset (&mAudioInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
643  mAudioInputBuffer[bufferNdx].pAudioBuffer = new uint32_t [mAudioBufferSize/4];
644  mAudioInputBuffer[bufferNdx].audioBufferSize = mAudioBufferSize;
645  mAudioInputBuffer[bufferNdx].audioDataSize = 0;
646  mAudioInputCircularBuffer.Add (& mAudioInputBuffer[bufferNdx]);
647  }
648  }
649 
650 } // SetupHostBuffers
651 
652 
654 {
655  // setup sdi io
656  mDevice.SetSDITransmitEnable (NTV2_CHANNEL1, false);
657  mDevice.SetSDITransmitEnable (NTV2_CHANNEL2, false);
658  mDevice.SetSDITransmitEnable (NTV2_CHANNEL3, false);
659  mDevice.SetSDITransmitEnable (NTV2_CHANNEL4, false);
660  mDevice.SetSDITransmitEnable (NTV2_CHANNEL5, true);
661  mDevice.SetSDITransmitEnable (NTV2_CHANNEL6, true);
662  mDevice.SetSDITransmitEnable (NTV2_CHANNEL7, true);
663  mDevice.SetSDITransmitEnable (NTV2_CHANNEL8, true);
664 
665  // Give the device some time to lock to the input signal...
666  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
667 
668  // When input is 3Gb convert to 3Ga for capture (no RGB support?)
669  bool is3Gb = false;
670  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
671 
672  if (mQuad && mTsi)
673  {
674  if (m60p)
675  {
680  }
681  else
682  {
687  }
692  }
693  else if (mQuad)
694  {
703  }
704  else
705  {
706  mDevice.SetSDIInLevelBtoLevelAConversion (mInputChannel, is3Gb);
707  mDevice.SetSDIOutLevelAtoLevelBConversion (mOutputChannel, false);
708  }
709 
710  // Use a "Routing" object, which handles the details of writing
711  // the appropriate values into the appropriate device registers...
712  CNTV2SignalRouter router;
713 
714  if (mQuad && mTsi)
715  {
716  if (m60p || !is3Gb)
717  {
722  }
723  else
724  {
729  }
734  }
735  else
736  {
741  }
746 
747  // Add this signal routing (or replace if not doing multistream)...
748  mDevice.ApplySignalRoute (router, !mMultiStream);
749 
750  // Give the device some time to lock to the input signal...
751  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
752 
753 } // RouteInputSignal
754 
755 
757 {
758  // Tell capture AutoCirculate to use 8 frame buffers on the device...
759  mDevice.AutoCirculateStop (mInputChannel);
760  mDevice.AutoCirculateInitForInput (mInputChannel, 16, // Frames to circulate
761  mWithAudio ? mAudioSystem : NTV2_AUDIOSYSTEM_INVALID, // Which audio system (if any)?
762  AUTOCIRCULATE_WITH_RP188); // With RP188?
763 
764  if (mVif)
765  {
766  mDevice.AutoCirculateStop (mOutputChannel);
767  mDevice.AutoCirculateInitForOutput (mOutputChannel, 8); // Frames to circulate
768  }
769 } // SetupInputAutoCirculate
770 
771 
773 {
774  if (mDevice.GetInputVideoFormat (mInputSource) == NTV2_FORMAT_UNKNOWN)
775  cout << endl << "## WARNING: No video signal present on the input connector" << endl;
776 
777  // Start the playout and capture threads...
783  if (mWithAudio)
784  {
786  }
787 
788  return AJA_STATUS_SUCCESS;
789 
790 } // Run
791 
792 
793 // This is where we will start the video input thread
795 {
796  mACInputThread.Attach(VideoInputThreadStatic, this);
797  mACInputThread.SetPriority(AJA_ThreadPriority_High);
798  mACInputThread.Start();
799 
800 } // StartVideoInputThread
801 
802 
803 // The video input thread static callback
804 void NTV2EncodeHEVC::VideoInputThreadStatic (AJAThread * pThread, void * pContext)
805 {
806  (void) pThread;
807 
808  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
809  pApp->VideoInputWorker ();
810 
811 } // VideoInputThreadStatic
812 
813 
815 {
816  CNTV2Card ntv2Device;
817  AUTOCIRCULATE_TRANSFER inputXfer;
818 
819  // Open the device...
820  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
821  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
822 
823  ntv2Device.SubscribeInputVerticalEvent (mInputChannel);
824 
825  // start AutoCirculate running...
826  ntv2Device.AutoCirculateStart (mInputChannel);
827 
828  while (!mGlobalQuit)
829  {
830  AUTOCIRCULATE_STATUS acStatus;
831  ntv2Device.AutoCirculateGetStatus (mInputChannel, acStatus);
832 
833  // wait for captured frame
834  if (acStatus.IsRunning() && acStatus.HasAvailableInputFrame())
835  {
836  // At this point, there's at least one fully-formed frame available in the device's
837  // frame buffer to transfer to the host. Reserve an AVHevcDataBuffer to "produce", and
838  // use it in the next transfer from the device...
839  AVHevcDataBuffer * pVideoData (mACInputCircularBuffer.StartProduceNextBuffer ());
840  if (pVideoData)
841  {
842  // setup buffer pointers for transfer
843  inputXfer.SetVideoBuffer (pVideoData->pVideoBuffer, pVideoData->videoBufferSize);
844  inputXfer.SetAudioBuffer (AJA_NULL, 0);
845 
846  AVHevcDataBuffer * pAudioData = AJA_NULL;
847  if (mWithAudio)
848  {
849  pAudioData = mAudioInputCircularBuffer.StartProduceNextBuffer ();
850  if (pAudioData)
851  inputXfer.SetAudioBuffer (pAudioData->pAudioBuffer, pAudioData->audioBufferSize);
852  }
853 
854  // do the transfer from the device into our host AvaDataBuffer...
855  ntv2Device.AutoCirculateTransfer (mInputChannel, inputXfer);
856 
857  // get the video data size
858  pVideoData->videoDataSize = pVideoData->videoBufferSize;
859  pVideoData->audioDataSize = 0;
860  pVideoData->lastFrame = mLastFrame;
861 
862  if (mWithAudio && pAudioData)
863  {
864  // get the audio data size
865  pAudioData->audioDataSize = inputXfer.acTransferStatus.acAudioTransferSize;
866  pAudioData->lastFrame = mLastFrame;
867  }
868 
869  if (mWithAnc)
870  {
871  // get the sdi input anc data
872  NTV2_RP188 timecode;
873  inputXfer.GetInputTimeCode (timecode);
874  pVideoData->timeCodeDBB = timecode.fDBB;
875  pVideoData->timeCodeLow = timecode.fLo;
876  pVideoData->timeCodeHigh = timecode.fHi;
877  }
878 
879  if (mWithInfo)
880  {
881  // get picture and additional data pointers
882  HevcPictureInfo * pInfo = (HevcPictureInfo*)pVideoData->pInfoBuffer;
883  HevcPictureData * pPicData = &pInfo->pictureData;
884 
885  // initialize info buffer to 0
886  memset(pInfo, 0, pVideoData->infoBufferSize);
887 
888  // calculate pts based on 90 Khz clock tick
889  uint64_t pts = ((uint64_t)mTimeBase.FramesToMicroseconds(mVideoInputFrameCount))*90000/1000000;
890 
891  // set serial number, pts and picture number
892  pPicData->serialNumber = mVideoInputFrameCount; // can be anything
893  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff); // (frame 5720000@60 test roll over)
894  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1); // only use 1 bit
895  pPicData->pictureNumber = mVideoInputFrameCount + 1; // must count starting with 1
896 
897  // set info data size
898  pVideoData->infoDataSize = sizeof(HevcPictureData);
899 
900  if(mInterlaced)
901  {
902  pPicData->serialNumber = mVideoInputFrameCount*2;
903  pPicData->pictureNumber = mVideoInputFrameCount*2 + 1;
904 
905  // get picture and additional data pointers
906  pInfo = (HevcPictureInfo*)(pVideoData->pInfoBuffer + sizeof(HevcPictureInfo)/4);
907  pPicData = &pInfo->pictureData;
908 
909  // add half a frame time to pts
910  pts = pts + (uint64_t)mTimeBase.FramesToMicroseconds(1)*90000/1000000/2;
911 
912  // set serial number, pts and picture number
913  pPicData->serialNumber = mVideoInputFrameCount*2 + 1;
914  pPicData->ptsValueLow = (uint32_t)(pts & 0xffffffff);
915  pPicData->ptsValueHigh = (uint32_t)((pts >> 32) & 0x1);
916  pPicData->pictureNumber = mVideoInputFrameCount*2 + 2;
917 
918  // set info data size
919  pVideoData->infoDataSize2 = sizeof(HevcPictureData);
920  }
921  }
922 
923  if(pVideoData->lastFrame && !mLastFrameInput)
924  {
925  printf ( "\nCapture last frame number %d\n", mVideoInputFrameCount );
926  mLastFrameInput = true;
927  }
928 
929  mVideoInputFrameCount++;
930 
931  if (mWithAudio && pAudioData)
932  {
933  mAudioInputCircularBuffer.EndProduceNextBuffer ();
934  }
935 
936  // signal that we're done "producing" the frame, making it available for future "consumption"...
937  mACInputCircularBuffer.EndProduceNextBuffer ();
938  } // if A/C running and frame(s) are available for transfer
939  }
940  else
941  {
942  // Either AutoCirculate is not running, or there were no frames available on the device to transfer.
943  // Rather than waste CPU cycles spinning, waiting until a frame becomes available, it's far more
944  // efficient to wait for the next input vertical interrupt event to get signaled...
945  ntv2Device.WaitForInputVerticalInterrupt (mInputChannel);
946  }
947  } // loop til quit signaled
948 
949  // Stop AutoCirculate...
950  ntv2Device.AutoCirculateStop (mInputChannel);
951  ntv2Device.UnsubscribeInputVerticalEvent (mInputChannel);
952 
953 } // VideoInputWorker
954 
955 
956 // This is where we start the video process thread
958 {
959  mVideoProcessThread.Attach(VideoProcessThreadStatic, this);
960  mVideoProcessThread.SetPriority(AJA_ThreadPriority_High);
961  mVideoProcessThread.Start();
962 
963 } // StartVideoProcessThread
964 
965 
966 // The video process static callback
967 void NTV2EncodeHEVC::VideoProcessThreadStatic (AJAThread * pThread, void * pContext)
968 {
969  (void) pThread;
970 
971  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
972  pApp->VideoProcessWorker ();
973 
974 } // VideoProcessThreadStatic
975 
976 
978 {
979  while (!mGlobalQuit)
980  {
981  // wait for the next video input buffer
982  AVHevcDataBuffer * pSrcFrameData (mACInputCircularBuffer.StartConsumeNextBuffer ());
983  if (pSrcFrameData)
984  {
985  // wait for the next video raw buffer
986  AVHevcDataBuffer * pDstFrameData (mVideoRawCircularBuffer.StartProduceNextBuffer ());
987  if (pDstFrameData)
988  {
989  // do something useful with the frame data...
990  ProcessVideoFrame(pSrcFrameData, pDstFrameData, mVideoProcessFrameCount);
991 
992  mVideoProcessFrameCount++;
993 
994  // release the video raw buffer
995  mVideoRawCircularBuffer.EndProduceNextBuffer ();
996  }
997 
998  // release the video input buffer
999  mACInputCircularBuffer.EndConsumeNextBuffer ();
1000 
1001  }
1002  } // loop til quit signaled
1003 
1004 } // VideoProcessWorker
1005 
1006 
1007 // This is where we start the codec raw thread
1009 {
1010  mCodecRawThread.Attach(CodecRawThreadStatic, this);
1011  mCodecRawThread.SetPriority(AJA_ThreadPriority_High);
1012  mCodecRawThread.Start();
1013 
1014 } // StartCodecRawThread
1015 
1016 
1017 // The codec raw static callback
1018 void NTV2EncodeHEVC::CodecRawThreadStatic (AJAThread * pThread, void * pContext)
1019 {
1020  (void) pThread;
1021 
1022  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
1023  pApp->CodecRawWorker ();
1024 
1025 } // CodecRawThreadStatic
1026 
1027 
1029 {
1030  CNTV2Card ntv2Device;
1031  CNTV2m31 * m31;
1032  AUTOCIRCULATE_TRANSFER outputXfer;
1033 
1034  // Open the device...
1035  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
1036  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
1037 
1038  // Allocate our M31 helper class and our HEVC common class
1039  m31 = new CNTV2m31 (&ntv2Device);
1040 
1041  while (!mGlobalQuit)
1042  {
1043  // wait for the next raw video frame
1044  AVHevcDataBuffer * pFrameData (mVideoRawCircularBuffer.StartConsumeNextBuffer ());
1045  if (pFrameData)
1046  {
1047  if (!mLastFrameRaw)
1048  {
1049  if (mVif)
1050  {
1051  outputXfer.SetBuffers (pFrameData->pVideoBuffer, pFrameData->videoBufferSize, AJA_NULL, 0, AJA_NULL, 0);
1052 
1053  ntv2Device.AutoCirculateTransfer (mOutputChannel, outputXfer);
1054 
1055  if (mCodecRawFrameCount == 3)
1056  {
1057  // start AutoCirculate running...
1058  ntv2Device.AutoCirculateStart (mOutputChannel);
1059  }
1060  }
1061  else
1062  {
1063  // transfer the raw video frame to the codec
1064  if (mInterlaced)
1065  {
1066  if (mWithInfo)
1067  {
1068  m31->RawTransfer(mPreset, mEncodeChannel,
1069  (uint8_t*)pFrameData->pVideoBuffer,
1070  pFrameData->videoDataSize,
1071  (uint8_t*)pFrameData->pInfoBuffer,
1072  pFrameData->infoDataSize,
1073  false, false);
1074 
1075  m31->RawTransfer(mPreset, mEncodeChannel,
1076  (uint8_t*)pFrameData->pVideoBuffer,
1077  pFrameData->videoDataSize,
1078  ((uint8_t*)pFrameData->pInfoBuffer) + sizeof(HevcPictureInfo),
1079  pFrameData->infoDataSize2,
1080  true, pFrameData->lastFrame);
1081  }
1082  else
1083  {
1084  m31->RawTransfer(mPreset, mEncodeChannel,
1085  (uint8_t*)pFrameData->pVideoBuffer,
1086  pFrameData->videoDataSize,
1087  false, false);
1088 
1089  m31->RawTransfer(mPreset, mEncodeChannel,
1090  (uint8_t*)pFrameData->pVideoBuffer,
1091  pFrameData->videoDataSize,
1092  true, pFrameData->lastFrame);
1093  }
1094  }
1095  else
1096  {
1097  if (mWithInfo)
1098  {
1099  m31->RawTransfer(mEncodeChannel,
1100  (uint8_t*)pFrameData->pVideoBuffer,
1101  pFrameData->videoDataSize,
1102  (uint8_t*)pFrameData->pInfoBuffer,
1103  pFrameData->infoDataSize,
1104  pFrameData->lastFrame);
1105  }
1106  else
1107  {
1108  m31->RawTransfer(mEncodeChannel,
1109  (uint8_t*)pFrameData->pVideoBuffer,
1110  pFrameData->videoDataSize,
1111  pFrameData->lastFrame);
1112  }
1113  }
1114  }
1115  if (pFrameData->lastFrame)
1116  {
1117  mLastFrameRaw = true;
1118  }
1119 
1120  mCodecRawFrameCount++;
1121  }
1122 
1123  // release the raw video frame
1124  mVideoRawCircularBuffer.EndConsumeNextBuffer ();
1125  }
1126  } // loop til quit signaled
1127 
1128  if (mVif)
1129  {
1130  // Stop AutoCirculate...
1131  ntv2Device.AutoCirculateStop (mOutputChannel);
1132  }
1133 
1134  delete m31;
1135 
1136 } // CodecRawWorker
1137 
1138 
1139 // This is where we will start the codec hevc thread
1141 {
1142  mCodecHevcThread.Attach(CodecHevcThreadStatic, this);
1143  mCodecHevcThread.SetPriority(AJA_ThreadPriority_High);
1144  mCodecHevcThread.Start();
1145 
1146 } // StartCodecHevcThread
1147 
1148 
1149 // The codec hevc static callback
1150 void NTV2EncodeHEVC::CodecHevcThreadStatic (AJAThread * pThread, void * pContext)
1151 {
1152  (void) pThread;
1153 
1154  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
1155  pApp->CodecHevcWorker ();
1156 
1157 } // CodecHevcThreadStatic
1158 
1159 
1161 {
1162  CNTV2Card ntv2Device;
1163  CNTV2m31 * m31;
1164 
1165  // Open the device...
1166  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, ntv2Device))
1167  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
1168 
1169  // Allocate our M31 helper class and our HEVC common class
1170  m31 = new CNTV2m31 (&ntv2Device);
1171 
1172  while (!mGlobalQuit)
1173  {
1174  // wait for the next hevc frame
1175  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartProduceNextBuffer ());
1176  if (pFrameData)
1177  {
1178  if (!mLastFrameHevc)
1179  {
1180  if (mInterlaced)
1181  {
1182  // get field 1 video and info buffer and size
1183  uint8_t* pVideoBuffer = (uint8_t*)pFrameData->pVideoBuffer;
1184  uint8_t* pInfoBuffer = (uint8_t*)pFrameData->pInfoBuffer;
1185  uint32_t videoBufferSize = pFrameData->videoBufferSize;
1186  uint32_t infoBufferSize = sizeof(HevcEncodedInfo);
1187 
1188  // transfer an hevc field 1 from the codec including encoded information
1189  m31->EncTransfer(mEncodeChannel,
1190  pVideoBuffer,
1191  videoBufferSize,
1192  pInfoBuffer,
1193  infoBufferSize,
1194  pFrameData->videoDataSize,
1195  pFrameData->infoDataSize,
1196  pFrameData->lastFrame);
1197 
1198  // round the video size up
1199  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1200  videoBufferSize,
1201  pFrameData->videoDataSize,
1202  8, 0xff);
1203  // round the info size up
1204  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1205  infoBufferSize,
1206  pFrameData->infoDataSize,
1207  8, 0);
1208 
1209  // get field 2 video and info buffer and size
1210  pVideoBuffer = ((uint8_t*)pFrameData->pVideoBuffer) + pFrameData->videoDataSize;
1211  pInfoBuffer = ((uint8_t*)pFrameData->pInfoBuffer) + sizeof(HevcEncodedInfo);
1212  videoBufferSize = pFrameData->videoBufferSize - pFrameData->videoDataSize;
1213  infoBufferSize = sizeof(HevcEncodedInfo);
1214 
1215  // transfer an hevc field 2 from the codec including encoded information
1216  m31->EncTransfer(mEncodeChannel,
1217  pVideoBuffer,
1218  videoBufferSize,
1219  pInfoBuffer,
1220  infoBufferSize,
1221  pFrameData->videoDataSize2,
1222  pFrameData->infoDataSize2,
1223  pFrameData->lastFrame);
1224 
1225  // round the video size up
1226  pFrameData->videoDataSize2 = mHevcCommon->AlignDataBuffer(pVideoBuffer,
1227  videoBufferSize,
1228  pFrameData->videoDataSize2,
1229  8, 0xff);
1230  // round the info size up
1231  pFrameData->infoDataSize2 = mHevcCommon->AlignDataBuffer(pInfoBuffer,
1232  infoBufferSize,
1233  pFrameData->infoDataSize2,
1234  8, 0);
1235  }
1236  else
1237  {
1238  // transfer an hevc frame from the codec including encoded information
1239  m31->EncTransfer(mEncodeChannel,
1240  (uint8_t*)pFrameData->pVideoBuffer,
1241  pFrameData->videoBufferSize,
1242  (uint8_t*)pFrameData->pInfoBuffer,
1243  pFrameData->infoBufferSize,
1244  pFrameData->videoDataSize,
1245  pFrameData->infoDataSize,
1246  pFrameData->lastFrame);
1247 
1248  // round the video size up
1249  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pVideoBuffer,
1250  pFrameData->videoBufferSize,
1251  pFrameData->videoDataSize,
1252  8, 0xff);
1253  // round the info size up
1254  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pInfoBuffer,
1255  pFrameData->infoBufferSize,
1256  pFrameData->infoDataSize,
1257  8, 0);
1258  }
1259 
1260  if (pFrameData->lastFrame)
1261  {
1262  mLastFrameHevc = true;
1263  }
1264 
1265  mCodecHevcFrameCount++;
1266  }
1267 
1268  // release and recycle the buffer...
1269  mVideoHevcCircularBuffer.EndProduceNextBuffer ();
1270  }
1271  } // loop til quit signaled
1272 
1273  delete m31;
1274 } // EncTransferFrames
1275 
1276 
1277 // This is where we start the video file writer thread
1279 {
1280  mVideoFileThread.Attach(VideoFileThreadStatic, this);
1281  mVideoFileThread.SetPriority(AJA_ThreadPriority_High);
1282  mVideoFileThread.Start();
1283 
1284 } // StartVideoFileThread
1285 
1286 
1287 // The file writer static callback
1288 void NTV2EncodeHEVC::VideoFileThreadStatic (AJAThread * pThread, void * pContext)
1289 {
1290  (void) pThread;
1291 
1292  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
1293  pApp->VideoFileWorker ();
1294 
1295 } // VideoFileStatic
1296 
1297 
1299 {
1300  while (!mGlobalQuit)
1301  {
1302  // wait for the next codec hevc frame
1303  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartConsumeNextBuffer ());
1304  if (pFrameData)
1305  {
1306  if (!mLastFrameVideo)
1307  {
1308  // write the frame / fields hevc to the output file
1309  mHevcCommon->WriteHevcData(pFrameData->pVideoBuffer, pFrameData->videoDataSize + pFrameData->videoDataSize2);
1310 
1311  if (mWithInfo)
1312  {
1313  // write the frame / field 1 encoded data to the output file
1314  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer, pFrameData->infoDataSize);
1315  // write the field 2 encoded data to the output file
1316  mHevcCommon->WriteEncData(pFrameData->pInfoBuffer + sizeof(HevcEncodedInfo)/4, pFrameData->infoDataSize2);
1317  }
1318 
1319  if (pFrameData->lastFrame)
1320  {
1321  printf ( "Video file last frame number %d\n", mVideoFileFrameCount );
1322  mLastFrameVideo = true;
1323  }
1324 
1325  mVideoFileFrameCount++;
1326  }
1327 
1328  // release the hevc buffer
1329  mVideoHevcCircularBuffer.EndConsumeNextBuffer ();
1330  }
1331  } // loop til quit signaled
1332 
1333 } // VideoFileWorker
1334 
1335 
1336 // This is where we start the audio file writer thread
1338 {
1339  mAudioFileThread.Attach(AudioFileThreadStatic, this);
1340  mAudioFileThread.SetPriority(AJA_ThreadPriority_High);
1341  mAudioFileThread.Start();
1342 
1343 } // StartAudioFileThread
1344 
1345 
1346 // The file writer static callback
1347 void NTV2EncodeHEVC::AudioFileThreadStatic (AJAThread * pThread, void * pContext)
1348 {
1349  (void) pThread;
1350 
1351  NTV2EncodeHEVC * pApp (reinterpret_cast <NTV2EncodeHEVC *> (pContext));
1352  pApp->AudioFileWorker ();
1353 
1354 } // AudioFileStatic
1355 
1356 
1358 {
1359  while (!mGlobalQuit)
1360  {
1361  // wait for the next codec hevc frame
1362  AVHevcDataBuffer * pFrameData (mAudioInputCircularBuffer.StartConsumeNextBuffer ());
1363  if (pFrameData)
1364  {
1365  if (!mLastFrameAudio)
1366  {
1367  // write the audio samples to the output file
1368  mHevcCommon->WriteAiffData(pFrameData->pAudioBuffer, mNumAudioChannels, pFrameData->audioDataSize/mNumAudioChannels/4);
1369 
1370  if (pFrameData->lastFrame)
1371  {
1372  printf ( "Audio file last frame number %d\n", mAudioFileFrameCount );
1373  mLastFrameAudio = true;
1374  }
1375  }
1376 
1377  mAudioFileFrameCount++;
1378 
1379  // release the hevc buffer
1380  mAudioInputCircularBuffer.EndConsumeNextBuffer ();
1381  }
1382  } // loop til quit signaled
1383 
1384 } // AudioFileWorker
1385 
1386 
1388 
1389 
1391 {
1392  AUTOCIRCULATE_STATUS ACStatus;
1393  mDevice.AutoCirculateGetStatus (mInputChannel, ACStatus);
1394  outStatus.framesProcessed = ACStatus.GetProcessedFrameCount();
1395  outStatus.framesDropped = ACStatus.GetDroppedFrameCount();
1396  outStatus.bufferLevel = ACStatus.GetBufferLevel();
1397 
1398 } // GetStatus
1399 
1400 
1401 AJAStatus NTV2EncodeHEVC::ProcessVideoFrame (AVHevcDataBuffer * pSrcFrame, AVHevcDataBuffer * pDstFrame, uint32_t frameNumber)
1402 {
1403  // Override this function to use the frame data in the way your application requires
1404  memcpy(pDstFrame->pVideoBuffer, pSrcFrame->pVideoBuffer, pSrcFrame->videoDataSize);
1405  pDstFrame->videoDataSize = pSrcFrame->videoDataSize;
1406  pDstFrame->timeCodeDBB = pSrcFrame->timeCodeDBB;
1407  pDstFrame->timeCodeLow = pSrcFrame->timeCodeLow;
1408  pDstFrame->timeCodeHigh = pSrcFrame->timeCodeHigh;
1409  pDstFrame->lastFrame = pSrcFrame->lastFrame;
1410  if (mWithInfo)
1411  {
1412  memcpy(pDstFrame->pInfoBuffer, pSrcFrame->pInfoBuffer, pSrcFrame->infoDataSize + pSrcFrame->infoDataSize2);
1413  pDstFrame->infoDataSize = pSrcFrame->infoDataSize;
1414  pDstFrame->infoDataSize2 = pSrcFrame->infoDataSize2;
1415  }
1416 
1417  if (mWithAnc)
1418  {
1419  std::string timeString;
1420 
1421  mTimeCode.Set(frameNumber);
1422  mTimeCode.SetStdTimecodeForHfr(false);
1423  mTimeCode.QueryString(timeString, mTimeBase, false);
1424  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 10);
1425  mTimeCode.SetRP188(pDstFrame->timeCodeDBB, pDstFrame->timeCodeLow, pDstFrame->timeCodeHigh, mTimeBase);
1426  mTimeCode.QueryString(timeString, mTimeBase, false);
1427  mTimeCodeBurn.BurnTimeCode((char*)pDstFrame->pVideoBuffer, timeString.c_str(), 20);
1428  }
1429 
1430  return AJA_STATUS_SUCCESS;
1431 
1432 } // ProcessFrame
virtual void RouteInputSignal(void)
Sets up device routing for capture.
NTV2FrameRate GetNTV2FrameRateFromVideoFormat(const NTV2VideoFormat inVideoFormat)
Definition: ntv2utils.cpp:3630
virtual NTV2VideoFormat GetSDIInputVideoFormat(NTV2Channel inChannel, bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given SDI input source.
#define VIDEO_RING_SIZE
Specifies the device&#39;s internal clock.
Definition: ntv2enums.h:1459
virtual bool SetReference(const NTV2ReferenceSource inRefSource, const bool inKeepFramePulseSelect=(0))
Sets the device&#39;s clock reference source. See Video Output Clocking & Synchronization for more inform...
virtual bool ReleaseStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Releases exclusive use of the AJA device for the given process, permitting other processes to acquire...
virtual ~NTV2EncodeHEVC()
void SetAJAFrameRate(AJA_FrameRate ajaFrameRate)
Definition: timebase.cpp:164
virtual bool AddConnection(const NTV2InputXptID inSignalInput, const NTV2OutputXptID inSignalOutput=NTV2_XptBlack)
Adds a connection between a widget&#39;s signal input (sink) and another widget&#39;s signal output (source)...
AJAStatus Add(FrameDataPtr pInFrameData)
Appends a new frame buffer to me, increasing my frame storage capacity by one frame.
uint32_t * pAudioBuffer
Pointer to host audio buffer.
I interrogate and control an AJA video/audio capture/playout device.
Definition: ntv2card.h:28
NTV2FrameBufferFormat
Identifies a particular video frame buffer pixel format. See Device Frame Buffer Formats for details...
Definition: ntv2enums.h:219
bool SetBuffers(ULWord *pInVideoBuffer, const ULWord inVideoByteCount, ULWord *pInAudioBuffer, const ULWord inAudioByteCount, ULWord *pInANCBuffer, const ULWord inANCByteCount, ULWord *pInANCF2Buffer=NULL, const ULWord inANCF2ByteCount=0)
Sets my buffers for use in a subsequent call to CNTV2Card::AutoCirculateTransfer. ...
virtual bool SetVideoFormat(const NTV2VideoFormat inVideoFormat, const bool inIsAJARetail=(!(0)), const bool inKeepVancSettings=(0), const NTV2Channel inChannel=NTV2_CHANNEL1)
Configures the AJA device to handle a specific video format.
virtual bool Set4kSquaresEnable(const bool inIsEnabled, const NTV2Channel inChannel)
Enables or disables SMPTE 425 "2K quadrants" mode for the given FrameStore bank on the device...
AJAStatus
Definition: types.h:380
void Set(uint32_t frame)
Definition: timecode.cpp:420
virtual void AudioFileWorker(void)
Repeatedly removes audio samples from the audio input ring and writes them to the audio output file...
static uint64_t GetPid()
Definition: process.cpp:35
ULWord GetBufferLevel(void) const
double GetFramesPerSecond(const NTV2FrameRate inFrameRate)
Definition: ntv2utils.cpp:1833
#define AJA_FAILURE(_status_)
Definition: types.h:373
NTV2InputSource NTV2ChannelToInputSource(const NTV2Channel inChannel, const NTV2IOKinds inKinds=NTV2_IOKINDS_SDI)
Definition: ntv2utils.cpp:5132
virtual void StartVideoProcessThread(void)
Start the video process thread.
Capture (input) mode, which writes into device SDRAM.
Definition: ntv2enums.h:1243
uint32_t videoBufferSize
Size of host video buffer (bytes)
struct HevcEncodedInfo HevcEncodedInfo
Declares the AJATime class.
virtual AJAStatus SetPriority(AJAThreadPriority priority)
Definition: thread.cpp:133
virtual bool GetSDIInput3GbPresent(bool &outValue, const NTV2Channel channel)
virtual void VideoInputWorker(void)
Repeatedly captures video frames using AutoCirculate and add them to the video input ring...
FrameDataPtr StartConsumeNextBuffer(void)
The thread that&#39;s responsible for processing incoming frames – the consumer – calls this function t...
Obtain audio samples from the audio that&#39;s embedded in the video HANC.
Definition: ntv2enums.h:2007
AJAStatus CreateAiffFile(const std::string &inFileName, uint32_t numChannels, uint32_t maxFrames, uint32_t bufferSize)
virtual M31VideoPreset GetCodecPreset(void)
Get the codec preset.
Definition: json.hpp:5362
virtual bool SetAudioRate(const NTV2AudioRate inRate, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2AudioRate for the given Audio System.
Definition: ntv2audio.cpp:205
virtual AJAStatus Start()
Definition: thread.cpp:91
virtual bool EnableInputInterrupt(const NTV2Channel channel=NTV2_CHANNEL1)
Allows the CNTV2Card instance to wait for and respond to input vertical blanking interrupts originati...
virtual bool SubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Causes me to be notified when an input vertical blanking interrupt occurs on the given input channel...
Declares the NTV2EncodeHEVC class.
void EndConsumeNextBuffer(void)
The consumer thread calls this function to signal that it has finished processing the frame it obtain...
#define false
virtual bool AutoCirculateGetStatus(const NTV2Channel inChannel, AUTOCIRCULATE_STATUS &outStatus)
Returns the current AutoCirculate status for the given channel.
NTV2EncodeHEVC(const std::string inDeviceSpecifier="0", const NTV2Channel inChannel=NTV2_CHANNEL1, const M31VideoPreset inM31Preset=M31_FILE_1280X720_420_8_5994p, const NTV2FrameBufferFormat inPixelFormat=NTV2_FBF_10BIT_YCBCR_420PL2, const bool inQuadMode=(0), const uint32_t inAudioChannels=0, const bool inTimeCodeBurn=(0), const bool inInfoMode=(0), const bool inTsiMode=(0), const uint32_t inMaxFrames=0xffffffff)
Constructs me using the given settings.
virtual bool SetFrameBufferFormat(NTV2Channel inChannel, NTV2FrameBufferFormat inNewFormat, bool inIsAJARetail=(!(0)), NTV2HDRXferChars inXferChars=NTV2_VPID_TC_SDR_TV, NTV2HDRColorimetry inColorimetry=NTV2_VPID_Color_Rec709, NTV2HDRLuminance inLuminance=NTV2_VPID_Luminance_YCbCr)
Sets the frame buffer format for the given FrameStore on the AJA device.
NTV2EmbeddedAudioInput NTV2ChannelToEmbeddedAudioInput(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2EmbeddedAudioInput.
Definition: ntv2utils.cpp:4861
NTV2Channel
These enum values are mostly used to identify a specific widget_framestore. They&#39;re also commonly use...
Definition: ntv2enums.h:1357
mVideoFormat
Definition: ntv2vcam.cpp:801
This class is a collection of widget input-to-output connections that can be applied all-at-once to a...
virtual class DeviceCapabilities & features(void)
Definition: ntv2card.h:148
virtual bool SetMultiFormatMode(const bool inEnable)
Enables or disables multi-format (per channel) device operation. If enabled, each device channel can ...
AJA_FrameRate GetAJAFrameRate(NTV2FrameRate frameRate)
AJAStatus DetermineInputFormat(NTV2VideoFormat sdiFormat, bool quad, NTV2VideoFormat &videoFormat)
int64_t FramesToMicroseconds(int64_t frames, bool round=false) const
Definition: timebase.cpp:223
#define AUDIO_RING_SIZE
uint32_t timeCodeHigh
Time code data high.
mPixelFormat
Definition: ntv2vcam.cpp:710
ULWord GetProcessedFrameCount(void) const
uint32_t audioBufferSize
Size of host audio buffer (bytes)
uint32_t infoBufferSize
Size of the host information buffer (bytes)
This struct replaces the old RP188_STRUCT.
M31VideoPreset
Definition: ntv2m31enums.h:13
virtual bool Active()
Definition: thread.cpp:116
virtual bool SetAudioSystemInputSource(const NTV2AudioSystem inAudioSystem, const NTV2AudioSource inAudioSource, const NTV2EmbeddedAudioInput inEmbeddedInput)
Sets the audio source for the given NTV2AudioSystem on the device.
Definition: ntv2audio.cpp:485
HevcPictureData pictureData
struct HevcPictureData HevcPictureData
Playout (output) mode, which reads from device SDRAM.
Definition: ntv2enums.h:1241
ULWord GetDisplayHeight(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4207
virtual bool SetMode(const NTV2Channel inChannel, const NTV2Mode inNewValue, const bool inIsRetail=(!(0)))
Determines if a given FrameStore on the AJA device will be used to capture or playout video...
AJAStatus CreateEncFile(const std::string &inFileName, uint32_t maxFrames)
Invalid or "not found".
Definition: ntv2enums.h:98
static bool GetFirstDeviceFromArgument(const std::string &inArgument, CNTV2Card &outDevice)
Rescans the host, and returns an open CNTV2Card instance for the AJA device that matches a command li...
virtual bool SetSDITransmitEnable(const NTV2Channel inChannel, const bool inEnable)
Sets the specified bidirectional SDI connector to act as an input or an output.
virtual bool AutoCirculateTransfer(const NTV2Channel inChannel, AUTOCIRCULATE_TRANSFER &transferInfo)
Transfers all or part of a frame as specified in the given AUTOCIRCULATE_TRANSFER object to/from the ...
virtual void CodecRawWorker(void)
Repeatedly removes video frames from the raw video ring and transfers them to the codec...
virtual bool AcquireStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Reserves exclusive use of the AJA device for a given process, preventing other processes on the host ...
#define AJA_NULL
Definition: ajatypes.h:167
void SetAbortFlag(const bool *pAbortFlag)
Tells me the boolean variable I should monitor such that when it gets set to "true" will cause any th...
virtual AJAStatus SetupVideo(void)
Sets up everything I need for capturing video.
AJAStatus SetupHEVC(CNTV2m31 *pM31, M31VideoPreset preset, M31Channel encodeChannel, bool multiStream, bool withInfo)
ULWord GetVideoActiveSize(const NTV2VideoFormat inVideoFormat, const NTV2FrameBufferFormat inFBFormat, const NTV2VANCMode inVancMode=NTV2_VANCMODE_OFF)
Definition: ntv2utils.cpp:2858
virtual bool SetEveryFrameServices(const NTV2TaskMode m)
Definition: ntv2card.h:1195
virtual NTV2VideoFormat GetInputVideoFormat(const NTV2InputSource inVideoSource=NTV2_INPUTSOURCE_SDI1, const bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given input source. ...
AJA_PixelFormat GetAJAPixelFormat(NTV2FrameBufferFormat pixelFormat)
static void Sleep(const int32_t inMilliseconds)
Suspends execution of the current thread for a given number of milliseconds.
Definition: systemtime.cpp:284
static void VideoFileThreadStatic(AJAThread *pThread, void *pContext)
This is the video file writer thread&#39;s static callback function that gets called when the thread star...
void WriteEncData(void *pBuffer, uint32_t bufferSize)
2: OEM (recommended): device configured by client application(s) with some driver involvement...
uint32_t AlignDataBuffer(void *pBuffer, uint32_t bufferSize, uint32_t dataSize, uint32_t alignBytes, uint8_t fill)
ULWord acAudioTransferSize
Number of bytes captured into the audio buffer.
virtual bool ApplySignalRoute(const CNTV2SignalRouter &inRouter, const bool inReplace=(0))
Applies the given routing table to the AJA device.
mInputChannel
Definition: ntv2vcam.cpp:1010
void WriteAiffData(void *pBuffer, uint32_t numChannels, uint32_t numSamples)
ULWord GetDroppedFrameCount(void) const
virtual bool SetTsiFrameEnable(const bool inIsEnabled, const NTV2Channel inChannel)
Enables or disables SMPTE 425 two-sample interleave (Tsi) frame mode on the device.
Specifies channel or FrameStore 8 (or the 8th item).
Definition: ntv2enums.h:1366
struct HevcPictureInfo HevcPictureInfo
AJA_EXPORT bool RenderTimeCodeFont(AJA_PixelFormat pixelFormat, uint32_t numPixels, uint32_t numLines)
uint32_t timeCodeLow
Time code data low.
virtual void Quit(void)
Gracefully stops me from running.
Specifies channel or FrameStore 2 (or the 2nd item).
Definition: ntv2enums.h:1360
virtual NTV2DeviceID GetDeviceID(void)
void WriteHevcData(void *pBuffer, uint32_t bufferSize)
virtual void GetStatus(AVHevcStatus &outStatus)
Provides status information about my input (capture) process.
virtual void SetupHostBuffers(void)
Sets up my circular buffers.
uint32_t audioDataSize
Size of audio data (bytes)
virtual void StartCodecHevcThread(void)
Start the codec hevc thread.
virtual void SetupAutoCirculate(void)
Initializes AutoCirculate.
virtual bool AutoCirculateStop(const NTV2Channel inChannel, const bool inAbort=(0))
Stops AutoCirculate for the given channel, and releases the on-device frame buffers that were allocat...
static void CodecHevcThreadStatic(AJAThread *pThread, void *pContext)
This is the codec hevc thread&#39;s static callback function that gets called when the thread starts...
Declares the AJAProcess class.
virtual bool UnsubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Unregisters me so I&#39;m no longer notified when an input VBI is signaled on the given input channel...
virtual AJAStatus Run(void)
Runs me.
virtual void VideoProcessWorker(void)
Repeatedly removes video frames from the video input ring, calls a custom video process method and ad...
This object specifies the information that will be transferred to or from the AJA device in the CNTV2...
virtual bool SetAudioBufferSize(const NTV2AudioBufferSize inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Changes the size of the audio buffer that is used for a given Audio System in the AJA device...
Definition: ntv2audio.cpp:249
virtual bool GetEveryFrameServices(NTV2TaskMode &m)
Definition: ntv2card.h:1194
#define NTV2_AUDIOSIZE_MAX
static void VideoProcessThreadStatic(AJAThread *pThread, void *pContext)
This is the video process thread&#39;s static callback function that gets called when the thread starts...
void EndProduceNextBuffer(void)
The producer thread calls this function to signal that it has finished populating the frame it obtain...
This identifies the first Audio System.
Definition: ntv2enums.h:3897
virtual void StartAudioFileThread(void)
Start the audio file writer thread.
virtual bool WaitForOutputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
static const ULWord kDemoAppSignature((((uint32_t)( 'D'))<< 24)|(((uint32_t)( 'E'))<< 16)|(((uint32_t)( 'M'))<< 8)|(((uint32_t)( 'O'))<< 0))
Declares numerous NTV2 utility functions.
uint32_t videoDataSize2
Size of field 2 video data (bytes)
virtual AJAStatus SetupAudio(void)
Sets up everything I need for capturing audio.
virtual void CodecHevcWorker(void)
Repeatedly transfers hevc frames from the codec and adds them to the hevc ring.
virtual AJAStatus Attach(AJAThreadFunction *pThreadFunction, void *pUserContext)
Definition: thread.cpp:169
FrameDataPtr StartProduceNextBuffer(void)
The thread that&#39;s responsible for providing frames – the producer – calls this function to populate...
virtual void VideoFileWorker(void)
Repeatedly removes hevc frame from the hevc ring and writes them to the hevc output file...
static void VideoInputThreadStatic(AJAThread *pThread, void *pContext)
This is the video input thread&#39;s static callback function that gets called when the thread starts...
void SetStdTimecodeForHfr(bool bStdTc)
Definition: timecode.h:220
virtual bool SetSDIOutLevelAtoLevelBConversion(const UWord inOutputSpigot, const bool inEnable)
Enables or disables 3G level A to 3G level B conversion at the SDI output widget (assuming the AJA de...
This is returned from the CNTV2Card::AutoCirculateGetStatus function.
bool lastFrame
Indicates last captured frame.
Instances of me capture frames in real time from a video signal provided to an input of an AJA device...
uint16_t UWord
Definition: ajatypes.h:221
void QueryString(std::string &str, const AJATimeBase &timeBase, bool bDropFrame, bool bStdTcForHfr, AJATimecodeNotation notation=AJA_TIMECODE_LEGACY)
Definition: timecode.cpp:299
1: Standard/Retail: device configured by AJA ControlPanel, service/daemon, and driver.
Specifies channel or FrameStore 1 (or the first item).
Definition: ntv2enums.h:1359
virtual bool AutoCirculateStart(const NTV2Channel inChannel, const ULWord64 inStartTime=0)
Starts AutoCirculating the specified channel that was previously initialized by CNTV2Card::AutoCircul...
AUTOCIRCULATE_TRANSFER_STATUS acTransferStatus
Contains status information that&#39;s valid after CNTV2Card::AutoCirculateTransfer returns, including the driver buffer level, number of frames processed or dropped, audio and anc transfer byte counts, and a complete FRAME_STAMP that has even more detailed clocking information.
virtual bool AutoCirculateInitForOutput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate playout, designating a contiguous block of frame buffers on the...
static void AudioFileThreadStatic(AJAThread *pThread, void *pContext)
This is the audio file writer thread&#39;s static callback function that gets called when the thread star...
virtual bool SetSDIInLevelBtoLevelAConversion(const NTV2ChannelSet &inSDIInputs, const bool inEnable)
Enables or disables 3G level B to 3G level A conversion at the SDI input(s).
static void CodecRawThreadStatic(AJAThread *pThread, void *pContext)
This is the codec raw thread&#39;s static callback function that gets called when the thread starts...
uint32_t * pVideoBuffer
Pointer to host video buffer.
ULWord GetDisplayWidth(const NTV2VideoFormat videoFormat)
Definition: ntv2utils.cpp:4199
virtual void StartVideoInputThread(void)
Start the video input thread.
bool GetInputTimeCode(NTV2_RP188 &outTimeCode, const NTV2TCIndex inTCIndex=NTV2_TCINDEX_SDI1) const
Intended for capture, answers with a specific timecode captured in my acTransferStatus member&#39;s acFra...
This structure encapsulates the video and audio buffers used by the HEVC demo applications. The demo programs that employ producer/consumer threads use a fixed number of these buffers.
AJA_EXPORT bool BurnTimeCode(void *pBaseVideoAddress, const std::string &inTimeCodeStr, const uint32_t inYPercent)
NTV2AudioSystem NTV2ChannelToAudioSystem(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2AudioSystem.
Definition: ntv2utils.cpp:4869
uint32_t * pInfoBuffer
Picture information (raw) or encode information (hevc)
bool HasAvailableInputFrame(void) const
Specifies channel or FrameStore 4 (or the 4th item).
Definition: ntv2enums.h:1362
Specifies channel or FrameStore 5 (or the 5th item).
Definition: ntv2enums.h:1363
bool IsRunning(void) const
uint32_t videoDataSize
Size of video data (bytes)
ULWord fLo
| BG 4 | Secs10 | BG 3 | Secs 1 | BG 2 | Frms10 | BG 1 | Frms 1 |
Specifies channel or FrameStore 6 (or the 6th item).
Definition: ntv2enums.h:1364
uint32_t infoDataSize2
Size of the field 2 information data (bytes)
virtual void StartCodecRawThread(void)
Start the codec raw thread.
virtual bool AutoCirculateInitForInput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate ingest, designating a contiguous block of frame buffers on the ...
virtual AJAStatus ProcessVideoFrame(AVHevcDataBuffer *pSrcFrame, AVHevcDataBuffer *pDstFrame, uint32_t frameNumber)
Default do-nothing function for processing the captured frames.
#define AUTOCIRCULATE_WITH_RP188
Use this to AutoCirculate with RP188.
Specifies channel or FrameStore 7 (or the 7th item).
Definition: ntv2enums.h:1365
bool SetAudioBuffer(ULWord *pInAudioBuffer, const ULWord inAudioByteCount)
Sets my audio buffer for use in a subsequent call to CNTV2Card::AutoCirculateTransfer.
virtual void StartVideoFileThread(void)
Start the video file writer thread.
8-Bit 4:2:0 2-Plane YCbCr
Definition: ntv2enums.h:253
Identifies the 1st SDI video input.
Definition: ntv2enums.h:1269
uint32_t infoDataSize
Size of the information data (bytes)
AJAStatus CreateHevcFile(const std::string &inFileName, uint32_t maxFrames)
virtual bool SetNumberAudioChannels(const ULWord inNumChannels, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the number of audio channels to be concurrently captured or played for a given Audio System on t...
Definition: ntv2audio.cpp:146
ULWord fHi
| BG 8 | Hrs 10 | BG 7 | Hrs 1 | BG 6 | Mins10 | BG 5 | Mins 1 |
Declares device capability functions.
virtual bool WaitForInputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
This identifies the mode in which there are no VANC lines in the frame buffer.
Definition: ntv2enums.h:3799
bool SetVideoBuffer(ULWord *pInVideoBuffer, const ULWord inVideoByteCount)
Sets my video buffer for use in a subsequent call to CNTV2Card::AutoCirculateTransfer.
Specifies channel or FrameStore 3 (or the 3rd item).
Definition: ntv2enums.h:1361
NTV2ReferenceSource NTV2InputSourceToReferenceSource(const NTV2InputSource inInputSource)
Converts a given NTV2InputSource to its equivalent NTV2ReferenceSource value.
Definition: ntv2utils.cpp:5023
See 8-Bit YCbCr Format.
Definition: ntv2enums.h:223
Audio clock derived from the video input.
Definition: ntv2enums.h:1993
virtual bool SetEmbeddedAudioClock(const NTV2EmbeddedAudioClock inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2EmbeddedAudioClock setting for the given NTV2AudioSystem.
Definition: ntv2audio.cpp:417
uint32_t timeCodeDBB
Time code data dbb.
void SetRP188(const uint32_t inDBB, const uint32_t inLo, const uint32_t inHi, const AJATimeBase &inTimeBase)
Definition: timecode.cpp:570
virtual AJAStatus Init(void)
Initializes me and prepares me to Run.
virtual bool EnableChannel(const NTV2Channel inChannel)
Enables the given FrameStore.