字节格式运行时媒体基础框架

问题描述:

我使用媒体基础来捕获活着的网络摄像头视频,是否有可能获得以运行时间字节流格式捕获的帧,并将它们写入一个位流在每个时间周期之后的文本文件中?字节格式运行时媒体基础框架

我不知道我可以在字节格式的流(没有容器),我也不能在运行时做到这一点?

这并不完全清楚你问的是什么。如果你想从网络摄像头捕捉原始帧并将它们保存到文件中,那么答案是肯定的,可以完成。 Media Foundation SDK MFCaptureToFile示例的确如此,但由于它使用SinkWriter,因此在创建容器时必须指定容器文件类型,例如mp4。

如果你真的想要逐一获取原始帧,那么你需要免除SinkWriter(或写一个自定义的)。下面是一段代码片断,显示从IMFSourceReader获取样本并将它们转换为字节数组(以及其他一些内容)。您可以将字节数组写入文本文件,但除非您在其上执行类似于将位图标头放在其上的功能,否则它不会非常有用。在能够调用ReadSample之前,IMFSourceReader,IMFMediaTypes都需要正确设置,但希望它能让您大致了解进一步查看的位置。

HRESULT MFVideoSampler::GetSample(/* out */ array<Byte> ^% buffer) 
{ 
    if (_videoReader == NULL) { 
     return -1; 
    } 
    else { 
     IMFSample *videoSample = NULL; 
     DWORD streamIndex, flags; 
     LONGLONG llVideoTimeStamp; 

     // Initial read results in a null pSample?? 
     CHECK_HR(_videoReader->ReadSample(
      //MF_SOURCE_READER_ANY_STREAM, // Stream index. 
      MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
      0,        // Flags. 
      &streamIndex,     // Receives the actual stream index. 
      &flags,       // Receives status flags. 
      &llVideoTimeStamp,     // Receives the time stamp. 
      &videoSample      // Receives the sample or NULL. 
      ), L"Error reading video sample."); 

     if (flags & MF_SOURCE_READERF_ENDOFSTREAM) 
     { 
      wprintf(L"\tEnd of stream\n"); 
     } 
     if (flags & MF_SOURCE_READERF_NEWSTREAM) 
     { 
      wprintf(L"\tNew stream\n"); 
     } 
     if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED) 
     { 
      wprintf(L"\tNative type changed\n"); 
     } 
     if (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) 
     { 
      wprintf(L"\tCurrent type changed\n"); 

      IMFMediaType *videoType = NULL; 
      CHECK_HR(_videoReader->GetCurrentMediaType(
       (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
       &videoType), L"Error retrieving current media type from first video stream."); 

      Console::WriteLine(GetMediaTypeDescription(videoType)); 

      // Get the frame dimensions and stride 
      UINT32 nWidth, nHeight; 
      MFGetAttributeSize(videoType, MF_MT_FRAME_SIZE, &nWidth, &nHeight); 
      _width = nWidth; 
      _height = nHeight; 

      //LONG lFrameStride; 
      //videoType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lFrameStride); 

      videoType->Release(); 
     } 
     if (flags & MF_SOURCE_READERF_STREAMTICK) 
     { 
      wprintf(L"\tStream tick\n"); 
     } 

     if (!videoSample) 
     { 
      printf("Failed to get video sample from MF.\n"); 
     } 
     else 
     { 
      DWORD nCurrBufferCount = 0; 
      CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n"); 

      IMFMediaBuffer * pMediaBuffer; 
      CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n"); 

      DWORD nCurrLen = 0; 
      CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n"); 

      byte *imgBuff; 
      DWORD buffCurrLen = 0; 
      DWORD buffMaxLen = 0; 
      pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen); 

      if (Stride != -1 && Stride < 0) { 
       // Bitmap needs to be flipped. 
       int bmpSize = buffCurrLen; // ToDo: Don't assume RGB/BGR 24. 
       int absStride = Stride * -1; 
       byte *flipBuf = new byte[bmpSize]; 

       for (int row = 0; row < _height; row++) { 
        for (int col = 0; col < absStride; col += 3) { 
         flipBuf[row * absStride + col] = imgBuff[((_height - row - 1) * absStride) + col]; 
         flipBuf[row * absStride + col + 1] = imgBuff[((_height - row - 1) * absStride) + col + 1]; 
         flipBuf[row * absStride + col + 2] = imgBuff[((_height - row - 1) * absStride) + col + 2]; 
        } 
       } 

       buffer = gcnew array<Byte>(buffCurrLen); 
       Marshal::Copy((IntPtr)flipBuf, buffer, 0, buffCurrLen); 

       delete flipBuf; 
      } 
      else { 
       buffer = gcnew array<Byte>(buffCurrLen); 
       Marshal::Copy((IntPtr)imgBuff, buffer, 0, buffCurrLen); 
      } 

      pMediaBuffer->Unlock(); 
      pMediaBuffer->Release(); 

      videoSample->Release(); 

      return S_OK; 
     } 
    } 
}