AJA NTV2 SDK  18.0.0.2122
NTV2 SDK 18.0.0.2122
ntv2encodehevcvif.cpp
Go to the documentation of this file.
1 /* SPDX-License-Identifier: MIT */
8 #include <stdio.h>
9 
10 #include "ntv2encodehevcvif.h"
11 #include "ntv2utils.h"
12 #include "ntv2formatdescriptor.h"
13 #include "ntv2devicefeatures.h"
14 #include "ajabase/system/process.h"
16 
17 using namespace std;
18 
19 #define NTV2_AUDIOSIZE_MAX (401 * 1024)
20 
21 #define NUM_OVERLAY_BARS 12
22 //static const uint32_t sOverlayBar0[] = {
23 // 0xc00000c0, 0x00000000, 0xc000c000, 0x00000000, 0xc0c00000, 0x00000000,
24 // 0xc0c000c0, 0x00000000, 0xc0c0c000, 0x00000000, 0xc000c0c0, 0x00000000,
25 // 0xc00000c0, 0x00000000, 0xc000c000, 0x00000000, 0xc0c00000, 0x00000000,
26 // 0xc0c000c0, 0x00000000, 0xc0c0c000, 0x00000000, 0xc000c0c0, 0x00000000 };
27 static const uint32_t sOverlayBar0[] = {
28  0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
29  0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
30  0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
31  0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 };
32 static const uint32_t sOverlayBar1[] = {
33  0x00000000, 0xc00000c0, 0x00000000, 0xc000c000, 0x00000000, 0xc0c00000,
34  0x00000000, 0xc0c000c0, 0x00000000, 0xc0c0c000, 0x00000000, 0xc000c0c0,
35  0x00000000, 0xc00000c0, 0x00000000, 0xc000c000, 0x00000000, 0xc0c00000,
36  0x00000000, 0xc0c000c0, 0x00000000, 0xc0c0c000, 0x00000000, 0xc000c0c0 };
37 //static const uint32_t sOverlayBar1[] = {
38 // 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
39 // 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
40 // 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000,
41 // 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 };
42 
43 
44 NTV2EncodeHEVCVif::NTV2EncodeHEVCVif (const string inDeviceSpecifier,
45  const M31VideoPreset inPreset,
46  const NTV2FrameBufferFormat inPixelFormat,
47  const uint32_t inAudioChannels,
48  const bool inInfoData,
49  const uint32_t inMaxFrames)
50 
51 : mACInputThread (AJAThread()),
52  mCodecHevcThread (AJAThread()),
53  mAVFileThread (AJAThread()),
54  mM31 (AJA_NULL),
55  mHevcCommon (AJA_NULL),
56  mDeviceID (DEVICE_ID_NOTFOUND),
57  mDeviceSpecifier (inDeviceSpecifier),
58  mWithAudio (inAudioChannels != 0),
60  mOutputChannel (NTV2_CHANNEL5),
61  mEncodeChannel (M31_CH0),
62  mPreset (inPreset),
63  mInputSource (NTV2_INPUTSOURCE_SDI1),
64  mInputFormat (NTV2_MAX_NUM_VIDEO_FORMATS),
66  mCapturePixelFormat (NTV2_FBF_8BIT_YCBCR),
67  mOverlayPixelFormat (NTV2_FBF_ABGR),
68  mCodecPixelFormat (inPixelFormat),
69  mWithInfo (inInfoData),
70  mAudioSystem (NTV2_AUDIOSYSTEM_1),
71  mSavedTaskMode (NTV2_STANDARD_TASKS),
72  mNumAudioChannels (0),
73  mFileAudioChannels (inAudioChannels),
74  mMaxFrames (inMaxFrames),
75  mLastFrame (false),
76  mLastFrameInput (false),
77  mLastFrameHevc (false),
78  mLastFrameVideo (false),
79  mGlobalQuit (false),
80  mFrameData (AJA_NULL),
81  mSilentBuffer (AJA_NULL),
82  mVideoInputFrameCount (0),
83  mCodecHevcFrameCount (0),
84  mAVFileFrameCount (0),
85  mRawFrameCount (0),
86  mInfoFrameCount (0),
87  mOverlayIndex (0)
88 {
89  ::memset (mACInputBuffer, 0x0, sizeof (mACInputBuffer));
90  ::memset (mVideoHevcBuffer, 0x0, sizeof (mVideoHevcBuffer));
91  ::memset (&mOverlayBuffer, 0, sizeof (mOverlayBuffer));
92  ::memset (&mOverlayFrame, 0, sizeof (mOverlayFrame));
93 
94 } // constructor
95 
96 
98 {
99  // Stop my capture and consumer threads, then destroy them...
100  Quit ();
101 
102  // unsubscribe from input vertical event...
103  mDevice.UnsubscribeInputVerticalEvent (mInputChannel);
104 
105  // free all my buffers...
106  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++)
107  {
108  if (mACInputBuffer[bufferNdx].pVideoBuffer)
109  {
110  delete [] mACInputBuffer[bufferNdx].pVideoBuffer;
111  mACInputBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
112  }
113  if (mACInputBuffer[bufferNdx].pInfoBuffer)
114  {
115  delete [] mACInputBuffer[bufferNdx].pInfoBuffer;
116  mACInputBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
117  }
118  if (mACInputBuffer[bufferNdx].pAudioBuffer)
119  {
120  delete [] mACInputBuffer[bufferNdx].pAudioBuffer;
121  mACInputBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
122  }
123 
124  if (mVideoHevcBuffer[bufferNdx].pVideoBuffer)
125  {
126  delete [] mVideoHevcBuffer[bufferNdx].pVideoBuffer;
127  mVideoHevcBuffer[bufferNdx].pVideoBuffer = AJA_NULL;
128  }
129  if (mVideoHevcBuffer[bufferNdx].pInfoBuffer)
130  {
131  delete [] mVideoHevcBuffer[bufferNdx].pInfoBuffer;
132  mVideoHevcBuffer[bufferNdx].pInfoBuffer = AJA_NULL;
133  }
134  if (mVideoHevcBuffer[bufferNdx].pAudioBuffer)
135  {
136  delete [] mVideoHevcBuffer[bufferNdx].pAudioBuffer;
137  mVideoHevcBuffer[bufferNdx].pAudioBuffer = AJA_NULL;
138  }
139  }
140 
141  if (mSilentBuffer != AJA_NULL)
142  {
143  delete [] mSilentBuffer;
144  mSilentBuffer = AJA_NULL;
145  }
146  if (mOverlayBuffer[0] != AJA_NULL)
147  {
148  delete [] mOverlayBuffer[0];
149  mOverlayBuffer[0] = AJA_NULL;
150  }
151  if (mOverlayBuffer[1] != AJA_NULL)
152  {
153  delete [] mOverlayBuffer[1];
154  mOverlayBuffer[1] = AJA_NULL;
155  }
156 
157 } // destructor
158 
159 
161 {
162  if (mM31 && !mLastFrame && !mGlobalQuit)
163  {
164  // Set the last frame flag to start the quit process
165  mLastFrame = true;
166 
167  // Stop the encoder stream
168  if (!mM31->ChangeEHState(Hevc_EhState_ReadyToStop, mEncodeChannel))
169  { cerr << "## ERROR: ChangeEHState ready to stop failed" << endl; }
170 
171  // Wait for the last frame to be written to disk
172  int i;
173  int timeout = 300;
174  for (i = 0; i < timeout; i++)
175  {
176  if (mLastFrameVideo) break;
177  AJATime::Sleep (10);
178  }
179  if (i == timeout)
180  { cerr << "## ERROR: Wait for last frame timeout" << endl; }
181 
182  if (!mM31->ChangeEHState(Hevc_EhState_Stop, mEncodeChannel))
183  { cerr << "## ERROR: ChangeEHState stop failed" << endl; }
184 
185  // Stop the video input stream
186  if (!mM31->ChangeVInState(Hevc_VinState_Stop, mEncodeChannel))
187  { cerr << "## ERROR: ChangeVInState stop failed" << endl; }
188 
189  // Now go to the init state
190  if (!mM31->ChangeMainState(Hevc_MainState_Init, Hevc_EncodeMode_Single))
191  { cerr << "## ERROR: ChangeMainState to init failed" << endl; }
192  }
193 
194  // Stop the worker threads
195  mGlobalQuit = true;
196 
197  while (mACInputThread.Active())
198  AJATime::Sleep(10);
199 
200  while (mCodecHevcThread.Active())
201  AJATime::Sleep(10);
202 
203  while (mAVFileThread.Active())
204  AJATime::Sleep(10);
205 
206  // Stop video capture
207  mDevice.SetMode(mInputChannel, NTV2_MODE_DISPLAY, false);
208 
209  // Release board
210  mDevice.ReleaseStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid()));
211  mDevice.SetEveryFrameServices (mSavedTaskMode); // Restore prior task mode
212 
213  // Close output files
214  mHevcCommon->CloseHevcFile();
215  mHevcCommon->CloseRawFile();
216  if (mWithInfo)
217  mHevcCommon->CloseEncFile();
218  if (mWithAudio)
219  mHevcCommon->CloseAiffFile();
220 
221 } // Quit
222 
223 
225 {
226  AJAStatus status (AJA_STATUS_SUCCESS);
227 
228  // Open the device...
229  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, mDevice))
230  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return AJA_STATUS_OPEN; }
231 
232  // Grab board in a shared environment
233  if (!mDevice.AcquireStreamForApplication (kDemoAppSignature, static_cast<int32_t>(AJAProcess::GetPid())))
234  return AJA_STATUS_BUSY; // Another app is using the device
235  mDevice.GetEveryFrameServices (mSavedTaskMode); // Save the current state before we change it
236  mDevice.SetEveryFrameServices (NTV2_OEM_TASKS); // Since this is an OEM demo, use the OEM service level
237 
238  mDeviceID = mDevice.GetDeviceID (); // Keep the device ID handy, as it's used frequently
239 
240  // Make sure this device has an M31
241  if (!mDevice.features().HasHEVCM31())
242  {
243  cerr << "## ERROR: M31 not found" << endl;
244  return AJA_STATUS_FAIL;
245  }
246 
247  // Allocate our M31 helper class and our HEVC common class
248  mM31 = new CNTV2m31 (&mDevice);
249  mHevcCommon = new CNTV2DemoHevcCommon ();
250 
251  if ((mM31 == AJA_NULL) || (mHevcCommon == AJA_NULL))
252  {
253  return AJA_STATUS_FAIL;
254  }
255 
256  // Preset specification takes precedence
257  if (mPreset < M31_NUMVIDEOPRESETS)
258  {
259  // This class only handles vif based presets so make sure they didn't pass in a file one
260  if (!CNTV2m31::IsPresetVIF(mPreset))
261  return AJA_STATUS_FAIL;
262 
263  // Get NTV2 formats to match codec preset
264  mInputFormat = CNTV2m31::GetPresetVideoFormat(mPreset);
265  mCodecPixelFormat = CNTV2m31::GetPresetFrameBufferFormat(mPreset);
266  }
267  // Otherwise use the pixel format and SDI input format
268  else if (mCodecPixelFormat >= NTV2_FBF_NUMFRAMEBUFFERFORMATS)
269  {
270  mCodecPixelFormat = NTV2_FBF_8BIT_YCBCR_420PL2;
271  }
272 
273  // When video format is unknown determine from SDI input
274  if (mInputFormat >= NTV2_MAX_NUM_VIDEO_FORMATS)
275  {
276  bool is3Gb = false;
277  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
278 
279  // Get SDI input format
280  status = mHevcCommon->DetermineInputFormat(mDevice.GetSDIInputVideoFormat(mInputChannel, is3Gb), true, mInputFormat);
281  if (AJA_FAILURE(status))
282  return status;
283 
284  // Get codec preset for input format
285  if(!CNTV2m31::ConvertVideoFormatToPreset(mInputFormat, mCodecPixelFormat, true, mPreset))
286  return AJA_STATUS_FAIL;
287  }
288 
289  // Capture format is scaled HD
290  switch (mInputFormat)
291  {
292  case NTV2_FORMAT_4x1920x1080p_5000: mVideoFormat = NTV2_FORMAT_1080p_5000_A; break;
293  case NTV2_FORMAT_4x1920x1080p_5994: mVideoFormat = NTV2_FORMAT_1080p_5994_A; break;
294  case NTV2_FORMAT_4x1920x1080p_6000: mVideoFormat = NTV2_FORMAT_1080p_6000_A; break;
295  default:
296  return AJA_STATUS_FAIL;
297  }
298 
299  // Setup the circular buffers
300  SetupHostBuffers ();
301 
302  // Setup frame buffer
303  status = SetupVideo ();
304  if (AJA_FAILURE (status))
305  return status;
306 
307  // Setup audio buffer
308  status = SetupAudio ();
309  if (AJA_FAILURE (status))
310  return status;
311 
312  // Route input signals
313  RouteInputSignal ();
314 
315  // Setup to capture video/audio/anc input
317 
318  // Setup codec
319  status = mHevcCommon->SetupHEVC (mM31, mPreset, mEncodeChannel, false, mWithInfo);
320  if (AJA_FAILURE (status))
321  return status;
322 
323  // Create encoded video output file
324  status = mHevcCommon->CreateHevcFile ("raw.hevc", mMaxFrames);
325  if (AJA_FAILURE (status))
326  return status;
327 
328  // Create rw video output file
329  status = mHevcCommon->CreateRawFile ("raw.yuv", mMaxFrames);
330  if (AJA_FAILURE (status))
331  return status;
332 
333  if (mWithInfo)
334  {
335  // Create encoded data output file
336  status = mHevcCommon->CreateEncFile ("raw.txt", mMaxFrames);
337  if (AJA_FAILURE (status))
338  return status;
339  }
340 
341  if (mWithAudio)
342  {
343  // Create audio output file
344  status = mHevcCommon->CreateAiffFile ("raw.aiff", mFileAudioChannels, mMaxFrames, NTV2_AUDIOSIZE_MAX);
345  if (AJA_FAILURE (status))
346  return status;
347  }
348 
349  return AJA_STATUS_SUCCESS;
350 
351 } // Init
352 
353 
355 {
356  return mPreset;
357 }
358 
359 
361 {
362  // Have the audio system capture audio from the designated device input (i.e., ch1 uses SDIIn1, ch2 uses SDIIn2, etc.)...
363  mDevice.SetAudioSystemInputSource (mAudioSystem, NTV2_AUDIO_EMBEDDED, ::NTV2ChannelToEmbeddedAudioInput (mInputChannel));
364 
365  mNumAudioChannels = ::NTV2DeviceGetMaxAudioChannels (mDeviceID);
366  mDevice.SetNumberAudioChannels (mNumAudioChannels, mAudioSystem);
367  mDevice.SetAudioRate (NTV2_AUDIO_48K, mAudioSystem);
369  mDevice.GetAudioRate (mAudioRate, mAudioSystem);
370 
371  // The on-device audio buffer should be 4MB to work best across all devices & platforms...
372  mDevice.SetAudioBufferSize (NTV2_AUDIO_BUFFER_BIG, mAudioSystem);
373 
374  return AJA_STATUS_SUCCESS;
375 
376 } // SetupAudio
377 
378 
380 {
381  mVideoBufferSize = GetVideoActiveSize (mVideoFormat, mCapturePixelFormat, NTV2_VANCMODE_OFF);
382  mPicInfoBufferSize = sizeof(HevcPictureInfo)*2;
383  mEncInfoBufferSize = sizeof(HevcEncodedInfo)*2;
384  mAudioBufferSize = NTV2_AUDIOSIZE_MAX;
385  mOverlayBufferSize = GetVideoActiveSize(mVideoFormat, mOverlayPixelFormat, NTV2_VANCMODE_OFF) * 4;
386  NTV2FormatDescriptor overlayD (mVideoFormat, mOverlayPixelFormat);
387 
388  // audio/video input ring
389  mACInputCircularBuffer.SetAbortFlag (&mGlobalQuit);
390  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
391  {
392  memset (&mACInputBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
393  mACInputBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
394  mACInputBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
395  mACInputBuffer[bufferNdx].videoDataSize = 0;
396  mACInputBuffer[bufferNdx].videoDataSize2 = 0;
397  mACInputBuffer[bufferNdx].pAudioBuffer = new uint32_t [mAudioBufferSize/4];
398  mACInputBuffer[bufferNdx].audioBufferSize = mAudioBufferSize;
399  mACInputBuffer[bufferNdx].audioDataSize = 0;
400  mACInputBuffer[bufferNdx].pInfoBuffer = new uint32_t [mPicInfoBufferSize/4];
401  mACInputBuffer[bufferNdx].infoBufferSize = mPicInfoBufferSize;
402  mACInputBuffer[bufferNdx].infoDataSize = 0;
403  mACInputBuffer[bufferNdx].infoDataSize2 = 0;
404  mACInputCircularBuffer.Add (& mACInputBuffer[bufferNdx]);
405  }
406 
407  // video hevc ring
408  mVideoHevcCircularBuffer.SetAbortFlag (&mGlobalQuit);
409  for (unsigned bufferNdx = 0; bufferNdx < VIDEO_RING_SIZE; bufferNdx++ )
410  {
411  memset (&mVideoHevcBuffer[bufferNdx], 0, sizeof(AVHevcDataBuffer));
412  mVideoHevcBuffer[bufferNdx].pVideoBuffer = new uint32_t [mVideoBufferSize/4];
413  mVideoHevcBuffer[bufferNdx].videoBufferSize = mVideoBufferSize;
414  mVideoHevcBuffer[bufferNdx].videoDataSize = 0;
415  mVideoHevcBuffer[bufferNdx].videoDataSize2 = 0;
416  mVideoHevcBuffer[bufferNdx].pInfoBuffer = new uint32_t [mEncInfoBufferSize/4];
417  mVideoHevcBuffer[bufferNdx].infoBufferSize = mEncInfoBufferSize;
418  mVideoHevcBuffer[bufferNdx].infoDataSize = 0;
419  mVideoHevcBuffer[bufferNdx].infoDataSize2 = 0;
420  mVideoHevcCircularBuffer.Add (& mVideoHevcBuffer[bufferNdx]);
421  }
422 
423  // audio silent buffer
424  mSilentBuffer = new uint32_t [mAudioBufferSize/4];
425  memset(mSilentBuffer, 0, mAudioBufferSize);
426 
427  // overlay buffers
428  mOverlayBuffer[0] = new uint32_t [mOverlayBufferSize/4];
429  mOverlayBuffer[1] = new uint32_t [mOverlayBufferSize/4];
430 
431  uint32_t* buf0 = mOverlayBuffer[0];
432  uint32_t* buf1 = mOverlayBuffer[1];
433  uint32_t pixelsPerLine = overlayD.linePitch*2;
434  uint32_t linesPerBar = overlayD.numLines*2/NUM_OVERLAY_BARS;
435 
436  for (uint32_t i = 0; i < NUM_OVERLAY_BARS; i++)
437  {
438  for (uint32_t j = 0; j < linesPerBar; j++)
439  {
440  for (uint32_t k = 0; k < pixelsPerLine; k++)
441  {
442  *buf0++ = sOverlayBar0[i];
443  *buf1++ = sOverlayBar1[i];
444  }
445  }
446  }
447 
448 } // SetupHostBuffers
449 
450 
452 {
453  // Disable multiformat
454  mDevice.SetMultiFormatMode (false);
455 
456  // Set the board video format
457  mDevice.SetVideoFormat (mVideoFormat, false, false, NTV2_CHANNEL1);
458  mDevice.GetFrameRate (mFrameRate, NTV2_CHANNEL1);
459  mDevice.SetQuadFrameEnable (true, NTV2_CHANNEL5);
460  mDevice.Set4kSquaresEnable (true, NTV2_CHANNEL5);
461 
462  // Set frame buffer format
463  mDevice.SetFrameBufferFormat (NTV2_CHANNEL1, mCapturePixelFormat);
464  mDevice.SetFrameBufferFormat (NTV2_CHANNEL2, mCapturePixelFormat);
465  mDevice.SetFrameBufferFormat (NTV2_CHANNEL3, mCapturePixelFormat);
466  mDevice.SetFrameBufferFormat (NTV2_CHANNEL4, mCapturePixelFormat);
467  mDevice.SetFrameBufferFormat (NTV2_CHANNEL5, mOverlayPixelFormat);
468  mDevice.SetFrameBufferFormat (NTV2_CHANNEL6, mOverlayPixelFormat);
469  mDevice.SetFrameBufferFormat (NTV2_CHANNEL7, mOverlayPixelFormat);
470  mDevice.SetFrameBufferFormat (NTV2_CHANNEL8, mOverlayPixelFormat);
471 
472  // Setup overlay
473  mOverlayFrame[0] = 32;
474  mOverlayFrame[1] = 36;
475  mDevice.DMAWriteFrame (mOverlayFrame[0], mOverlayBuffer[0], mOverlayBufferSize);
476  mDevice.DMAWriteFrame (mOverlayFrame[1], mOverlayBuffer[1], mOverlayBufferSize);
477  mDevice.SetOutputFrame (NTV2_CHANNEL5, mOverlayFrame[0]/4);
478  mOverlayIndex = 0;
479 
480  // Set catpure mode
481  mDevice.SetMode (NTV2_CHANNEL1, NTV2_MODE_CAPTURE, false);
482  mDevice.SetMode (NTV2_CHANNEL2, NTV2_MODE_DISPLAY, false);
483  mDevice.SetMode (NTV2_CHANNEL3, NTV2_MODE_DISPLAY, false);
484  mDevice.SetMode (NTV2_CHANNEL4, NTV2_MODE_DISPLAY, false);
485  mDevice.SetMode (NTV2_CHANNEL5, NTV2_MODE_DISPLAY, false);
486  mDevice.SetMode (NTV2_CHANNEL6, NTV2_MODE_DISPLAY, false);
487  mDevice.SetMode (NTV2_CHANNEL7, NTV2_MODE_DISPLAY, false);
488  mDevice.SetMode (NTV2_CHANNEL8, NTV2_MODE_DISPLAY, false);
489 
490  // Enable frame buffers
491  mDevice.EnableChannel (NTV2_CHANNEL1);
492  mDevice.DisableChannel (NTV2_CHANNEL2);
493  mDevice.DisableChannel (NTV2_CHANNEL3);
494  mDevice.DisableChannel (NTV2_CHANNEL4);
495  mDevice.EnableChannel (NTV2_CHANNEL5);
496  mDevice.EnableChannel (NTV2_CHANNEL6);
497  mDevice.EnableChannel (NTV2_CHANNEL7);
498  mDevice.EnableChannel (NTV2_CHANNEL8);
499 
500  // Setup mixers to key fg over bg
501  for (UWord i = 0; i < 4; i++)
502  {
503  mDevice.SetMixerVancOutputFromForeground (i, true);
506  mDevice.SetMixerCoefficient (i, 0x00000); // 0x10000 forground - 0x00000 background
508  }
509 
510  // Save input source
511  mInputSource = ::NTV2ChannelToInputSource (NTV2_CHANNEL1);
512 
513  // Set the device reference to the input...
514  mDevice.SetReference (::NTV2InputSourceToReferenceSource (mInputSource));
515 
516  // Enable and subscribe to the interrupts for the channel to be used...
517  mDevice.EnableInputInterrupt (mInputChannel);
518  mDevice.SubscribeInputVerticalEvent (mInputChannel);
519 
520  // Setup for picture info
521  mTimeBase.SetAJAFrameRate (mHevcCommon->GetAJAFrameRate(GetNTV2FrameRateFromVideoFormat (mVideoFormat)));
522 
523  return AJA_STATUS_SUCCESS;
524 
525 } // SetupVideo
526 
527 
529 {
530  // setup sdi io
531  mDevice.SetSDITransmitEnable (NTV2_CHANNEL1, false);
532  mDevice.SetSDITransmitEnable (NTV2_CHANNEL2, false);
533  mDevice.SetSDITransmitEnable (NTV2_CHANNEL3, false);
534  mDevice.SetSDITransmitEnable (NTV2_CHANNEL4, false);
535  mDevice.SetSDITransmitEnable (NTV2_CHANNEL5, true);
536  mDevice.SetSDITransmitEnable (NTV2_CHANNEL6, true);
537  mDevice.SetSDITransmitEnable (NTV2_CHANNEL7, true);
538  mDevice.SetSDITransmitEnable (NTV2_CHANNEL8, true);
539 
540  // Give the device some time to lock to the input signal...
541  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
542 
543  // When input is 3Gb convert to 3Ga for capture (no RGB support?)
544  bool is3Gb = false;
545  mDevice.GetSDIInput3GbPresent (is3Gb, mInputChannel);
546 
555 
556  // Use a "Routing" object, which handles the details of writing
557  // the appropriate values into the appropriate device registers...
558  CNTV2SignalRouter router;
559 
560  // quad csc from frame buffer
565  // quad mixer foreground from csc
574  // quad mixer background from sdi input
583  // quad sdi output (to codec) from mixer
588  // 4k down converter from mixer
593  // frame buffer from 4k down converter
595 
596  // Add this signal routing (or replace if not doing multistream)...
597  mDevice.ApplySignalRoute (router, true);
598 
599  // Give the device some time to lock to the input signal...
600  mDevice.WaitForOutputVerticalInterrupt (mInputChannel, 8);
601 
602 } // RouteInputSignal
603 
604 
606 {
607  // Tell capture AutoCirculate to use 8 frame buffers on the device...
608  mDevice.AutoCirculateStop (mInputChannel);
609  mDevice.AutoCirculateInitForInput (mInputChannel, 16, // Frames to circulate
610  mWithAudio ? mAudioSystem : NTV2_AUDIOSYSTEM_INVALID); // Which audio system (if any)?
611 } // SetupInputAutoCirculate
612 
613 
615 {
616  if (mDevice.GetInputVideoFormat (mInputSource) == NTV2_FORMAT_UNKNOWN)
617  cout << endl << "## WARNING: No video signal present on the input connector" << endl;
618 
619  // Start the playout and capture threads...
622 
623  if (mWithInfo)
624  {
625  // Transfer initial picture info
626  for (int i = 0; i < 32; i++)
627  {
628  TransferPictureInfo(mM31);
629  }
630  }
631 
633 
634  return AJA_STATUS_SUCCESS;
635 
636 } // Run
637 
638 
639 // This is where we will start the video input thread
641 {
642  mACInputThread.Attach(VideoInputThreadStatic, this);
643  mACInputThread.SetPriority(AJA_ThreadPriority_High);
644  mACInputThread.Start();
645 
646 } // StartVideoInputThread
647 
648 
649 // The video input thread static callback
650 void NTV2EncodeHEVCVif::VideoInputThreadStatic (AJAThread * pThread, void * pContext)
651 {
652  (void) pThread;
653 
654  NTV2EncodeHEVCVif * pApp (reinterpret_cast <NTV2EncodeHEVCVif *> (pContext));
655  pApp->VideoInputWorker ();
656 
657 } // VideoInputThreadStatic
658 
659 
661 {
662  CNTV2Card device;
663  CNTV2m31 * m31;
664  AUTOCIRCULATE_TRANSFER inputXfer;
665 
666  // Open the device...
667  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, device))
668  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
669 
670  // Allocate our M31 helper class and our HEVC common class
671  m31 = new CNTV2m31 (&device);
672 
673  // start AutoCirculate running...
674  device.AutoCirculateStart (mInputChannel);
675 
676  while (!mGlobalQuit)
677  {
678  AUTOCIRCULATE_STATUS acStatus;
679  device.AutoCirculateGetStatus (mInputChannel, acStatus);
680 
681  // wait for captured frame
682  if (acStatus.IsRunning() && acStatus.HasAvailableInputFrame())
683  {
684  // At this point, there's at least one fully-formed frame available in the device's
685  // frame buffer to transfer to the host. Reserve an AvaDataBuffer to "produce", and
686  // use it in the next transfer from the device...
687  AVHevcDataBuffer * pVideoData (mACInputCircularBuffer.StartProduceNextBuffer ());
688  if (pVideoData)
689  {
690  // setup buffer pointers for transfer
691  inputXfer.SetBuffers (pVideoData->pVideoBuffer, pVideoData->videoBufferSize, AJA_NULL, 0, AJA_NULL, 0);
692 
693  if (mWithAudio)
694  {
695  inputXfer.SetAudioBuffer (pVideoData->pAudioBuffer, pVideoData->audioBufferSize);
696  }
697 
698  // do the transfer from the device into our host AvaDataBuffer...
699  device.AutoCirculateTransfer (mInputChannel, inputXfer);
700 
701  // get the video data size
702  pVideoData->videoDataSize = pVideoData->videoBufferSize;
703  pVideoData->audioDataSize = 0;
704  pVideoData->frameTime = inputXfer.GetFrameInfo().acFrameTime;
705  pVideoData->lastFrame = mLastFrame;
706 
707  if (mWithAudio)
708  {
709  // get the audio data size
710  pVideoData->audioDataSize = inputXfer.GetCapturedAudioByteCount();
711  }
712 
713  if (pVideoData->lastFrame && !mLastFrameInput)
714  {
715  printf ( "\nCapture last frame number %d\n", mVideoInputFrameCount );
716  mLastFrameInput = true;
717  }
718 
719  mVideoInputFrameCount++;
720 
721  // signal that we're done "producing" the frame, making it available for future "consumption"...
722  mACInputCircularBuffer.EndProduceNextBuffer ();
723 
724  if ((mVideoInputFrameCount%60) == 0)
725  {
726  if (mOverlayIndex == 0)
727  {
728  mOverlayIndex = 1;
729  }
730  else
731  {
732  mOverlayIndex = 0;
733  }
734  device.SetOutputFrame (NTV2_CHANNEL5, mOverlayFrame[mOverlayIndex]/4);
735  }
736  } // if A/C running and frame(s) are available for transfer
737  }
738  else
739  {
740  // Either AutoCirculate is not running, or there were no frames available on the device to transfer.
741  // Rather than waste CPU cycles spinning, waiting until a frame becomes available, it's far more
742  // efficient to wait for the next input vertical interrupt event to get signaled...
743  device.WaitForInputVerticalInterrupt (mInputChannel);
744  }
745  } // loop til quit signaled
746 
747  // Stop AutoCirculate...
748  device.AutoCirculateStop (mInputChannel);
749 
750  if (m31 != AJA_NULL)
751  {
752  delete m31;
753  }
754 } // VideoInputWorker
755 
756 
757 // This is where we will start the codec hevc thread
759 {
760  mCodecHevcThread.Attach(CodecHevcThreadStatic, this);
761  mCodecHevcThread.SetPriority(AJA_ThreadPriority_High);
762  mCodecHevcThread.Start();
763 
764 } // StartCodecHevcThread
765 
766 
767 // The codec hevc static callback
768 void NTV2EncodeHEVCVif::CodecHevcThreadStatic (AJAThread * pThread, void * pContext)
769 {
770  (void) pThread;
771 
772  NTV2EncodeHEVCVif * pApp (reinterpret_cast <NTV2EncodeHEVCVif *> (pContext));
773  pApp->CodecHevcWorker ();
774 
775 } // CodecHevcThreadStatic
776 
777 
779 {
780  CNTV2Card device;
781  CNTV2m31 * m31;
782 
783  // Open the device...
784  if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument (mDeviceSpecifier, device))
785  { cerr << "## ERROR: Device '" << mDeviceSpecifier << "' not found" << endl; return; }
786 
787  // Allocate our M31 helper class and our HEVC common class
788  m31 = new CNTV2m31 (&device);
789 
790  while (!mGlobalQuit)
791  {
792  // wait for the next hevc frame
793  AVHevcDataBuffer * pFrameData (mVideoHevcCircularBuffer.StartProduceNextBuffer ());
794  if (pFrameData)
795  {
796  if (!mLastFrameHevc)
797  {
798  // transfer an hevc frame from the codec including encoded information
799  m31->EncTransfer(mEncodeChannel,
800  (uint8_t*)pFrameData->pVideoBuffer,
801  pFrameData->videoBufferSize,
802  (uint8_t*)pFrameData->pInfoBuffer,
803  pFrameData->infoBufferSize,
804  pFrameData->videoDataSize,
805  pFrameData->infoDataSize,
806  pFrameData->frameTime,
807  pFrameData->lastFrame);
808 
809  // round the video size up
810  pFrameData->videoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pVideoBuffer,
811  pFrameData->videoBufferSize,
812  pFrameData->videoDataSize,
813  8, 0xff);
814  // round the info size up
815  pFrameData->infoDataSize = mHevcCommon->AlignDataBuffer(pFrameData->pInfoBuffer,
816  pFrameData->infoBufferSize,
817  pFrameData->infoDataSize,
818  8, 0);
819 
820  if (mWithInfo && !mLastFrame)
821  {
822  // transfer more picture info
823  TransferPictureInfo(m31);
824  }
825  }
826 
827  if (pFrameData->lastFrame)
828  {
829  mLastFrameHevc = true;
830  }
831  mCodecHevcFrameCount++;
832 
833  // release and recycle the buffer...
834  mVideoHevcCircularBuffer.EndProduceNextBuffer ();
835  }
836  } // loop til quit signaled
837 
838  if (m31 != AJA_NULL)
839  {
840  delete m31;
841  }
842 } // EncTransferFrames
843 
844 
845 // This is where we start the audio/video file writer thread
847 {
848  mAVFileThread.Attach(AVFileThreadStatic, this);
849  mAVFileThread.SetPriority(AJA_ThreadPriority_High);
850  mAVFileThread.Start();
851 
852 } // StartAVFileThread
853 
854 
855 // The file writer static callback
856 void NTV2EncodeHEVCVif::AVFileThreadStatic (AJAThread * pThread, void * pContext)
857 {
858  (void) pThread;
859 
860  NTV2EncodeHEVCVif * pApp (reinterpret_cast <NTV2EncodeHEVCVif *> (pContext));
861  pApp->AVFileWorker ();
862 
863 } // AVFileStatic
864 
865 
867 {
868  int64_t encodeTime;
869  bool addData;
870 
871  mFrameData = AJA_NULL;
872 
873  while (!mGlobalQuit)
874  {
875  encodeTime = 0;
876 
877  // wait for the next codec hevc frame
878  AVHevcDataBuffer * pHevcData (mVideoHevcCircularBuffer.StartConsumeNextBuffer ());
879  if (pHevcData)
880  {
881  if (!mLastFrameVideo)
882  {
883  // write the frame / fields hevc to the output file
884  mHevcCommon->WriteHevcData(pHevcData->pVideoBuffer, pHevcData->videoDataSize);
885  encodeTime = pHevcData->frameTime;
886 
887  if (mWithInfo)
888  {
889  // write the frame encoded data to the output file
890  mHevcCommon->WriteEncData(pHevcData->pInfoBuffer, pHevcData->infoDataSize);
891  }
892 
893  if (pHevcData->lastFrame)
894  {
895  printf ( "Video file last frame number %d\n", mAVFileFrameCount );
896  mLastFrameVideo = true;
897  }
898 
899  mAVFileFrameCount++;
900  }
901 
902  // release the hevc buffer
903  mVideoHevcCircularBuffer.EndConsumeNextBuffer ();
904  }
905 
906  if (encodeTime != 0)
907  {
908  while (true)
909  {
910  addData = false;
911  if (mFrameData == AJA_NULL)
912  {
913  mFrameData = mACInputCircularBuffer.StartConsumeNextBuffer ();
914  }
915  if (mFrameData)
916  {
917  if (abs(mFrameData->frameTime - encodeTime) < 50000)
918  {
919  if (mWithAudio)
920  {
921  // write the audio samples to the output file
922  mHevcCommon->WriteAiffData(mFrameData->pAudioBuffer, mNumAudioChannels,
923  mFrameData->audioDataSize/mNumAudioChannels/4);
924  }
925  if (mRawFrameCount == 0)
926  {
927  mHevcCommon->WriteRawData(mFrameData->pVideoBuffer, mFrameData->videoDataSize);
928  mRawFrameCount++;
929  }
930 
931  // release the hevc buffer
932 // printf ( "Found autocirculate raw audio/video frame %d\n", (int32_t)(mFrameData->frameTime - encodeTime));
933  mACInputCircularBuffer.EndConsumeNextBuffer ();
934  mFrameData = AJA_NULL;
935  break;
936  }
937  else if (mFrameData->frameTime < encodeTime)
938  {
939  printf ( "Skip autocirculate raw audio/video frame - time diff %d us\n",
940  (int32_t)(mFrameData->frameTime - encodeTime)/10);
941  mACInputCircularBuffer.EndConsumeNextBuffer ();
942  mFrameData = AJA_NULL;
943  continue;
944  }
945  else
946  {
947  printf ( "Add autocirculate raw audio/video frame - time diff %d us\n",
948  (int32_t)(mFrameData->frameTime - encodeTime)/10);
949  addData = true;
950  }
951  }
952  else
953  {
954  printf ( "Add autocirculate raw audio/video frame - no input\n");
955  addData = true;
956  }
957  if (addData)
958  {
959  if (mWithAudio)
960  {
961  uint32_t numSamples = GetAudioSamplesPerFrame (mFrameRate, mAudioRate, mAVFileFrameCount);
962  // write the audio samples to the output file
963  mHevcCommon->WriteAiffData(mSilentBuffer, mNumAudioChannels, numSamples);
964  }
965  break;
966  }
967  }
968  }
969  } // loop til quit signaled
970 
971 } // VideoFileWorker
972 
974 
975 
977 {
978  HevcPictureData picData;
979 
980  // initialize info buffer to 0
981  memset(&picData, 0, sizeof(HevcPictureData));
982 
983  // calculate pts based on 90 Khz clock tick
984  uint64_t pts = (uint64_t)mTimeBase.FramesToMicroseconds(mInfoFrameCount)*90000/1000000;
985 
986  // set serial number, pts and picture number
987  picData.serialNumber = mInfoFrameCount; // can be anything
988  picData.ptsValueLow = (uint32_t)(pts & 0xffffffff);
989  picData.ptsValueHigh = (uint32_t)((pts >> 32) & 0x1); // roll over at 33 bits
990  picData.pictureNumber = mInfoFrameCount + 1; // must count starting with 1
991  mInfoFrameCount++;
992 
993  // transfer only picture information
994  pM31->RawTransfer(mEncodeChannel,
995  AJA_NULL,
996  0,
997  (uint8_t*)&picData,
998  sizeof(HevcPictureData),
999  false);
1000 }
1001 
1002 
1004 {
1005  AUTOCIRCULATE_STATUS inputACStatus;
1006 
1007  mDevice.AutoCirculateGetStatus (mInputChannel, inputACStatus);
1008  outInputStatus->framesProcessed = inputACStatus.GetProcessedFrameCount();
1009  outInputStatus->framesDropped = inputACStatus.GetDroppedFrameCount();
1010  outInputStatus->bufferLevel = inputACStatus.GetBufferLevel();
1011 
1012 } // GetStatus
Instances of me capture frames in real time from a video signal provided to an input of an AJA device...
Passes only foreground video + key to the Mixer output.
Definition: ntv2enums.h:1793
NTV2FrameRate GetNTV2FrameRateFromVideoFormat(const NTV2VideoFormat inVideoFormat)
Definition: ntv2utils.cpp:3630
virtual NTV2VideoFormat GetSDIInputVideoFormat(NTV2Channel inChannel, bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given SDI input source.
const FRAME_STAMP & GetFrameInfo(void) const
Returns a constant reference to my FRAME_STAMP.
int64_t frameTime
Capture time stamp.
virtual void VideoInputWorker(void)
Repeatedly captures video frames using AutoCirculate and add them to the video input ring...
virtual void SetupHostBuffers(void)
Sets up my circular buffers.
virtual void AVFileWorker(void)
Repeatedly removes hevc frame from the hevc ring and writes them to the hevc output file...
virtual AJAStatus Init(void)
Initializes me and prepares me to Run.
#define VIDEO_RING_SIZE
virtual bool SetReference(const NTV2ReferenceSource inRefSource, const bool inKeepFramePulseSelect=(0))
Sets the device&#39;s clock reference source. See Video Output Clocking & Synchronization for more inform...
virtual bool ReleaseStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Releases exclusive use of the AJA device for the given process, permitting other processes to acquire...
void SetAJAFrameRate(AJA_FrameRate ajaFrameRate)
Definition: timebase.cpp:164
virtual bool AddConnection(const NTV2InputXptID inSignalInput, const NTV2OutputXptID inSignalOutput=NTV2_XptBlack)
Adds a connection between a widget&#39;s signal input (sink) and another widget&#39;s signal output (source)...
AJAStatus Add(FrameDataPtr pInFrameData)
Appends a new frame buffer to me, increasing my frame storage capacity by one frame.
uint32_t * pAudioBuffer
Pointer to host audio buffer.
I interrogate and control an AJA video/audio capture/playout device.
Definition: ntv2card.h:28
NTV2FrameBufferFormat
Identifies a particular video frame buffer pixel format. See Device Frame Buffer Formats for details...
Definition: ntv2enums.h:219
virtual bool SetMixerMode(const UWord inWhichMixer, const NTV2MixerKeyerMode inMode)
Sets the mode for the given mixer/keyer.
virtual AJAStatus SetupVideo(void)
Sets up everything I need for capturing video.
virtual void RouteInputSignal(void)
Sets up device routing for capture.
bool SetBuffers(ULWord *pInVideoBuffer, const ULWord inVideoByteCount, ULWord *pInAudioBuffer, const ULWord inAudioByteCount, ULWord *pInANCBuffer, const ULWord inANCByteCount, ULWord *pInANCF2Buffer=NULL, const ULWord inANCF2ByteCount=0)
Sets my buffers for use in a subsequent call to CNTV2Card::AutoCirculateTransfer. ...
virtual bool SetVideoFormat(const NTV2VideoFormat inVideoFormat, const bool inIsAJARetail=(!(0)), const bool inKeepVancSettings=(0), const NTV2Channel inChannel=NTV2_CHANNEL1)
Configures the AJA device to handle a specific video format.
virtual void StartAVFileThread(void)
Start the audio/video file writer thread.
virtual bool Set4kSquaresEnable(const bool inIsEnabled, const NTV2Channel inChannel)
Enables or disables SMPTE 425 "2K quadrants" mode for the given FrameStore bank on the device...
AJAStatus
Definition: types.h:380
ULWord GetCapturedAudioByteCount(void) const
virtual bool DisableChannel(const NTV2Channel inChannel)
Disables the given FrameStore.
static uint64_t GetPid()
Definition: process.cpp:35
ULWord GetBufferLevel(void) const
#define AJA_FAILURE(_status_)
Definition: types.h:373
virtual void StartCodecHevcThread(void)
Start the codec hevc thread.
virtual bool GetAudioRate(NTV2AudioRate &outRate, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Returns the current NTV2AudioRate for the given Audio System.
Definition: ntv2audio.cpp:226
NTV2InputSource NTV2ChannelToInputSource(const NTV2Channel inChannel, const NTV2IOKinds inKinds=NTV2_IOKINDS_SDI)
Definition: ntv2utils.cpp:5132
virtual bool DMAWriteFrame(const ULWord inFrameNumber, const ULWord *pInFrameBuffer, const ULWord inByteCount)
Transfers a single frame from the host to the AJA device.
Definition: ntv2dma.cpp:65
Capture (input) mode, which writes into device SDRAM.
Definition: ntv2enums.h:1243
uint32_t videoBufferSize
Size of host video buffer (bytes)
struct HevcEncodedInfo HevcEncodedInfo
Declares the AJATime class.
virtual AJAStatus SetPriority(AJAThreadPriority priority)
Definition: thread.cpp:133
virtual bool GetSDIInput3GbPresent(bool &outValue, const NTV2Channel channel)
LWord64 acFrameTime
(input/ingest/capture only) The absolute timestamp at the VBI when the frame started recording into d...
FrameDataPtr StartConsumeNextBuffer(void)
The thread that&#39;s responsible for processing incoming frames – the consumer – calls this function t...
Obtain audio samples from the audio that&#39;s embedded in the video HANC.
Definition: ntv2enums.h:2007
virtual bool SetMixerBGInputControl(const UWord inWhichMixer, const NTV2MixerKeyerInputControl inInputControl)
Sets the background input control value for the given mixer/keyer.
AJAStatus CreateAiffFile(const std::string &inFileName, uint32_t numChannels, uint32_t maxFrames, uint32_t bufferSize)
Definition: json.hpp:5362
virtual bool SetAudioRate(const NTV2AudioRate inRate, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2AudioRate for the given Audio System.
Definition: ntv2audio.cpp:205
virtual AJAStatus Start()
Definition: thread.cpp:91
static const uint32_t sOverlayBar1[]
virtual bool EnableInputInterrupt(const NTV2Channel channel=NTV2_CHANNEL1)
Allows the CNTV2Card instance to wait for and respond to input vertical blanking interrupts originati...
virtual bool SubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Causes me to be notified when an input vertical blanking interrupt occurs on the given input channel...
void EndConsumeNextBuffer(void)
The consumer thread calls this function to signal that it has finished processing the frame it obtain...
#define false
virtual bool AutoCirculateGetStatus(const NTV2Channel inChannel, AUTOCIRCULATE_STATUS &outStatus)
Returns the current AutoCirculate status for the given channel.
virtual bool SetFrameBufferFormat(NTV2Channel inChannel, NTV2FrameBufferFormat inNewFormat, bool inIsAJARetail=(!(0)), NTV2HDRXferChars inXferChars=NTV2_VPID_TC_SDR_TV, NTV2HDRColorimetry inColorimetry=NTV2_VPID_Color_Rec709, NTV2HDRLuminance inLuminance=NTV2_VPID_Luminance_YCbCr)
Sets the frame buffer format for the given FrameStore on the AJA device.
NTV2EmbeddedAudioInput NTV2ChannelToEmbeddedAudioInput(const NTV2Channel inChannel)
Converts the given NTV2Channel value into its equivalent NTV2EmbeddedAudioInput.
Definition: ntv2utils.cpp:4861
mVideoFormat
Definition: ntv2vcam.cpp:801
This class is a collection of widget input-to-output connections that can be applied all-at-once to a...
virtual class DeviceCapabilities & features(void)
Definition: ntv2card.h:148
virtual bool SetMultiFormatMode(const bool inEnable)
Enables or disables multi-format (per channel) device operation. If enabled, each device channel can ...
AJA_FrameRate GetAJAFrameRate(NTV2FrameRate frameRate)
AJAStatus DetermineInputFormat(NTV2VideoFormat sdiFormat, bool quad, NTV2VideoFormat &videoFormat)
int64_t FramesToMicroseconds(int64_t frames, bool round=false) const
Definition: timebase.cpp:223
ULWord GetProcessedFrameCount(void) const
uint32_t audioBufferSize
Size of host audio buffer (bytes)
uint32_t infoBufferSize
Size of the host information buffer (bytes)
virtual M31VideoPreset GetCodecPreset(void)
Get the codec preset.
virtual void Quit(void)
Gracefully stops me from running.
M31VideoPreset
Definition: ntv2m31enums.h:13
virtual bool Active()
Definition: thread.cpp:116
virtual bool SetQuadFrameEnable(const bool inValue, const NTV2Channel inChannel=NTV2_CHANNEL1)
Enables or disables quad-frame mode on the device.
virtual bool SetAudioSystemInputSource(const NTV2AudioSystem inAudioSystem, const NTV2AudioSource inAudioSource, const NTV2EmbeddedAudioInput inEmbeddedInput)
Sets the audio source for the given NTV2AudioSystem on the device.
Definition: ntv2audio.cpp:485
virtual void GetStatus(AVHevcStatus *outInputStatus)
Provides status information about my input (capture) process.
virtual bool SetOutputFrame(const NTV2Channel inChannel, const ULWord inValue)
Sets the output frame index number for the given FrameStore. This identifies which frame in device SD...
Playout (output) mode, which reads from device SDRAM.
Definition: ntv2enums.h:1241
virtual bool SetMode(const NTV2Channel inChannel, const NTV2Mode inNewValue, const bool inIsRetail=(!(0)))
Determines if a given FrameStore on the AJA device will be used to capture or playout video...
AJAStatus CreateEncFile(const std::string &inFileName, uint32_t maxFrames)
virtual AJAStatus SetupAudio(void)
Sets up everything I need for capturing audio.
Invalid or "not found".
Definition: ntv2enums.h:98
static bool GetFirstDeviceFromArgument(const std::string &inArgument, CNTV2Card &outDevice)
Rescans the host, and returns an open CNTV2Card instance for the AJA device that matches a command li...
virtual bool SetSDITransmitEnable(const NTV2Channel inChannel, const bool inEnable)
Sets the specified bidirectional SDI connector to act as an input or an output.
virtual bool AutoCirculateTransfer(const NTV2Channel inChannel, AUTOCIRCULATE_TRANSFER &transferInfo)
Transfers all or part of a frame as specified in the given AUTOCIRCULATE_TRANSFER object to/from the ...
virtual bool AcquireStreamForApplication(ULWord inApplicationType, int32_t inProcessID)
Reserves exclusive use of the AJA device for a given process, preventing other processes on the host ...
#define AJA_NULL
Definition: ajatypes.h:167
void SetAbortFlag(const bool *pAbortFlag)
Tells me the boolean variable I should monitor such that when it gets set to "true" will cause any th...
AJAStatus SetupHEVC(CNTV2m31 *pM31, M31VideoPreset preset, M31Channel encodeChannel, bool multiStream, bool withInfo)
ULWord GetVideoActiveSize(const NTV2VideoFormat inVideoFormat, const NTV2FrameBufferFormat inFBFormat, const NTV2VANCMode inVancMode=NTV2_VANCMODE_OFF)
Definition: ntv2utils.cpp:2858
virtual bool SetEveryFrameServices(const NTV2TaskMode m)
Definition: ntv2card.h:1195
virtual NTV2VideoFormat GetInputVideoFormat(const NTV2InputSource inVideoSource=NTV2_INPUTSOURCE_SDI1, const bool inIsProgressive=(0))
Returns the video format of the signal that is present on the given input source. ...
Describes a video frame for a given video standard or format and pixel format, including the total nu...
static void Sleep(const int32_t inMilliseconds)
Suspends execution of the current thread for a given number of milliseconds.
Definition: systemtime.cpp:284
void WriteEncData(void *pBuffer, uint32_t bufferSize)
2: OEM (recommended): device configured by client application(s) with some driver involvement...
uint32_t AlignDataBuffer(void *pBuffer, uint32_t bufferSize, uint32_t dataSize, uint32_t alignBytes, uint8_t fill)
virtual bool ApplySignalRoute(const CNTV2SignalRouter &inRouter, const bool inReplace=(0))
Applies the given routing table to the AJA device.
mInputChannel
Definition: ntv2vcam.cpp:1010
See 8-Bit ARGB, RGBA, ABGR Formats.
Definition: ntv2enums.h:228
virtual bool SetMixerCoefficient(const UWord inWhichMixer, const ULWord inMixCoefficient)
Sets the current mix coefficient of the given mixer/keyer.
void WriteAiffData(void *pBuffer, uint32_t numChannels, uint32_t numSamples)
ULWord GetDroppedFrameCount(void) const
Specifies channel or FrameStore 8 (or the 8th item).
Definition: ntv2enums.h:1366
struct HevcPictureInfo HevcPictureInfo
virtual void CodecHevcWorker(void)
Repeatedly transfers hevc frames from the codec and adds them to the hevc ring.
virtual AJAStatus Run(void)
Runs me.
Specifies channel or FrameStore 2 (or the 2nd item).
Definition: ntv2enums.h:1360
virtual NTV2DeviceID GetDeviceID(void)
void WriteHevcData(void *pBuffer, uint32_t bufferSize)
NTV2EncodeHEVCVif(const std::string inDeviceSpecifier="0", const M31VideoPreset inM31Preset=M31_FILE_1280X720_420_8_5994p, const NTV2FrameBufferFormat inPixelFormat=NTV2_FBF_10BIT_YCBCR_420PL2, const uint32_t inAudioChannels=0, const bool inInfoData=(0), const uint32_t inMaxFrames=0xffffffff)
Constructs me using the given settings.
uint32_t audioDataSize
Size of audio data (bytes)
virtual bool AutoCirculateStop(const NTV2Channel inChannel, const bool inAbort=(0))
Stops AutoCirculate for the given channel, and releases the on-device frame buffers that were allocat...
Declares the AJAProcess class.
virtual bool SetMixerVancOutputFromForeground(const UWord inWhichMixer, const bool inFromForegroundSource=(!(0)))
Sets the VANC source for the given mixer/keyer to the foreground video (or not). See the SDI Ancillar...
virtual bool UnsubscribeInputVerticalEvent(const NTV2Channel inChannel=NTV2_CHANNEL1)
Unregisters me so I&#39;m no longer notified when an input VBI is signaled on the given input channel...
This object specifies the information that will be transferred to or from the AJA device in the CNTV2...
virtual bool SetAudioBufferSize(const NTV2AudioBufferSize inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Changes the size of the audio buffer that is used for a given Audio System in the AJA device...
Definition: ntv2audio.cpp:249
virtual bool GetEveryFrameServices(NTV2TaskMode &m)
Definition: ntv2card.h:1194
virtual void SetupAutoCirculate(void)
Initializes AutoCirculate.
virtual bool SetMixerFGInputControl(const UWord inWhichMixer, const NTV2MixerKeyerInputControl inInputControl)
Sets the foreground input control value for the given mixer/keyer.
void EndProduceNextBuffer(void)
The producer thread calls this function to signal that it has finished populating the frame it obtain...
This identifies the first Audio System.
Definition: ntv2enums.h:3897
virtual bool WaitForOutputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
static const uint32_t sOverlayBar0[]
static const ULWord kDemoAppSignature((((uint32_t)( 'D'))<< 24)|(((uint32_t)( 'E'))<< 16)|(((uint32_t)( 'M'))<< 8)|(((uint32_t)( 'O'))<< 0))
Declares numerous NTV2 utility functions.
uint32_t videoDataSize2
Size of field 2 video data (bytes)
ULWord numLines
Height – total number of lines.
AJAStatus CreateRawFile(const std::string &inFileName, uint32_t maxFrames)
virtual AJAStatus Attach(AJAThreadFunction *pThreadFunction, void *pUserContext)
Definition: thread.cpp:169
FrameDataPtr StartProduceNextBuffer(void)
The thread that&#39;s responsible for providing frames – the producer – calls this function to populate...
void WriteRawData(void *pBuffer, uint32_t bufferSize)
static void VideoInputThreadStatic(AJAThread *pThread, void *pContext)
This is the video input thread&#39;s static callback function that gets called when the thread starts...
virtual bool SetSDIOutLevelAtoLevelBConversion(const UWord inOutputSpigot, const bool inEnable)
Enables or disables 3G level A to 3G level B conversion at the SDI output widget (assuming the AJA de...
static void CodecHevcThreadStatic(AJAThread *pThread, void *pContext)
This is the codec hevc thread&#39;s static callback function that gets called when the thread starts...
This is returned from the CNTV2Card::AutoCirculateGetStatus function.
uint16_t UWord
Definition: ajatypes.h:221
ULWord GetAudioSamplesPerFrame(const NTV2FrameRate inFrameRate, const NTV2AudioRate inAudioRate, ULWord inCadenceFrame=0, bool inIsSMPTE372Enabled=false)
Returns the number of audio samples for a given video frame rate, audio sample rate, and frame number. This is useful since AJA devices use fixed audio sample rates (typically 48KHz), and some video frame rates will necessarily result in some frames having more audio samples than others.
Definition: ntv2utils.cpp:2889
1: Standard/Retail: device configured by AJA ControlPanel, service/daemon, and driver.
Specifies channel or FrameStore 1 (or the first item).
Definition: ntv2enums.h:1359
virtual bool AutoCirculateStart(const NTV2Channel inChannel, const ULWord64 inStartTime=0)
Starts AutoCirculating the specified channel that was previously initialized by CNTV2Card::AutoCircul...
virtual bool SetSDIInLevelBtoLevelAConversion(const NTV2ChannelSet &inSDIInputs, const bool inEnable)
Enables or disables 3G level B to 3G level A conversion at the SDI input(s).
uint32_t * pVideoBuffer
Pointer to host video buffer.
static void AVFileThreadStatic(AJAThread *pThread, void *pContext)
This is the video file writer thread&#39;s static callback function that gets called when the thread star...
virtual bool GetFrameRate(NTV2FrameRate &outValue, NTV2Channel inChannel=NTV2_CHANNEL1)
Returns the AJA device&#39;s currently configured frame rate via its "value" parameter.
Declares the NTV2FormatDescriptor class.
This structure encapsulates the video and audio buffers used by the HEVC demo applications. The demo programs that employ producer/consumer threads use a fixed number of these buffers.
virtual void TransferPictureInfo(CNTV2m31 *pM31)
Transfer picture information to the codec.
uint32_t * pInfoBuffer
Picture information (raw) or encode information (hevc)
bool HasAvailableInputFrame(void) const
Specifies channel or FrameStore 4 (or the 4th item).
Definition: ntv2enums.h:1362
Specifies channel or FrameStore 5 (or the 5th item).
Definition: ntv2enums.h:1363
bool IsRunning(void) const
Declares the NTV2EncodeHEVCVif class.
uint32_t videoDataSize
Size of video data (bytes)
ULWord linePitch
Number of 32-bit words per line – shadows mLinePitch[0] / sizeof(ULWord)
Specifies channel or FrameStore 6 (or the 6th item).
Definition: ntv2enums.h:1364
uint32_t infoDataSize2
Size of the field 2 information data (bytes)
virtual bool AutoCirculateInitForInput(const NTV2Channel inChannel, const UWord inFrameCount=7, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_INVALID, const ULWord inOptionFlags=0, const UByte inNumChannels=1, const UWord inStartFrameNumber=0, const UWord inEndFrameNumber=0)
Prepares for subsequent AutoCirculate ingest, designating a contiguous block of frame buffers on the ...
Specifies channel or FrameStore 7 (or the 7th item).
Definition: ntv2enums.h:1365
#define NUM_OVERLAY_BARS
bool SetAudioBuffer(ULWord *pInAudioBuffer, const ULWord inAudioByteCount)
Sets my audio buffer for use in a subsequent call to CNTV2Card::AutoCirculateTransfer.
UWord NTV2DeviceGetMaxAudioChannels(const NTV2DeviceID inDeviceID)
8-Bit 4:2:0 2-Plane YCbCr
Definition: ntv2enums.h:253
Identifies the 1st SDI video input.
Definition: ntv2enums.h:1269
uint32_t infoDataSize
Size of the information data (bytes)
AJAStatus CreateHevcFile(const std::string &inFileName, uint32_t maxFrames)
virtual bool SetNumberAudioChannels(const ULWord inNumChannels, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the number of audio channels to be concurrently captured or played for a given Audio System on t...
Definition: ntv2audio.cpp:146
virtual void StartVideoInputThread(void)
Start the video input thread.
#define NTV2_AUDIOSIZE_MAX
Declares device capability functions.
virtual bool WaitForInputVerticalInterrupt(const NTV2Channel inChannel=NTV2_CHANNEL1, UWord inRepeatCount=1)
Efficiently sleeps the calling thread/process until the next one or more field (interlaced video) or ...
This identifies the mode in which there are no VANC lines in the frame buffer.
Definition: ntv2enums.h:3799
Specifies channel or FrameStore 3 (or the 3rd item).
Definition: ntv2enums.h:1361
NTV2ReferenceSource NTV2InputSourceToReferenceSource(const NTV2InputSource inInputSource)
Converts a given NTV2InputSource to its equivalent NTV2ReferenceSource value.
Definition: ntv2utils.cpp:5023
See 8-Bit YCbCr Format.
Definition: ntv2enums.h:223
Audio clock derived from the video input.
Definition: ntv2enums.h:1993
virtual bool SetEmbeddedAudioClock(const NTV2EmbeddedAudioClock inValue, const NTV2AudioSystem inAudioSystem=NTV2_AUDIOSYSTEM_1)
Sets the NTV2EmbeddedAudioClock setting for the given NTV2AudioSystem.
Definition: ntv2audio.cpp:417
virtual bool EnableChannel(const NTV2Channel inChannel)
Enables the given FrameStore.