让 DirectShow VideoRender 过滤器响应其输入引脚上的 MediaType 更改?

发布于 2024-08-25 15:25:53 字数 3020 浏览 11 评论 0原文

下面是我的解码器转换过滤器的代码摘录,它从源过滤器获取数据,源过滤器从 IP 摄像机获取 RTP 网络数据。源过滤器、解码过滤器可以动态响应相机图像尺寸的变化,因为我需要处理解码库中的分辨率变化。

我使用了 DirectShow 帮助中描述的“ReceiveConnection”方法,在下一个示例中传递新的 MediaType 数据。但是,我无法让视频混合渲染器动态接受分辨率更改,即使如果图形停止并重新启动,渲染器将渲染不同的分辨率。

谁能指出我需要做什么才能让渲染器处理动态分辨率变化?

HRESULT CDecoder::Receive(IMediaSample* pIn)
{
   //Input data does not necessarily correspond one-to-one
   //with output frames, so we must override Receive instead
   //of Transform.
   HRESULT hr = S_OK;

   //Deliver input to library
   long cBytes = pIn->GetActualDataLength();
   BYTE* pSrc;
   pIn->GetPointer(&pSrc);

   try
   {
      hr = m_codec.Decode(pSrc, cBytes, (hr == S_OK)?&tStart : NULL);
   } 
   catch (...)
   {
      hr = E_UNEXPECTED;
   }
   if (FAILED(hr))
   {
      if (theLog.enabled()){theLog.strm() << "Decoder Error " << hex << hr << dec << "     - resetting input"; theLog.write();}
      //Force reset of decoder
      m_bReset = true;
      m_codec.ResetInput();

      //We have handled the error -- don't pass upstream or the source may stop.
      return S_OK;
   }

   //Extract and deliver any decoded frames
   hr = DeliverDecodedFrames();

   return hr;
}

HRESULT CDecoder::DeliverDecodedFrames()
{
   HRESULT hr = S_OK;  

   for (;;)
   {
      DecodedFrame frame;
      bool bFrame = m_codec.GetDecodedFrame(frame);
      if (!bFrame)
      {
         break;
      }

      CMediaType mtIn;
      CMediaType mtOut;
      GetMediaType( PINDIR_INPUT, &mtIn);
      GetMediaType( PINDIR_OUTPUT, &mtOut);

      //Get the output pin's current image resolution
      VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*)mtOut.Format();

      if( pvi->bmiHeader.biWidth != m_cxInput || 
          pvi->bmiHeader.biHeight != m_cyInput)
      {
         HRESULT hr = GetPin(PINDIR_OUTPUT)->GetConnected()->ReceiveConnection(GetPin(PINDIR_OUTPUT), &mtIn);

         if(SUCCEEDED(hr))
         {
            SetMediaType(PINDIR_OUTPUT, &mtIn);
         }
      } 

      IMediaSamplePtr pOut;
      hr = m_pOutput->GetDeliveryBuffer(&pOut, 0, 0, NULL);

      if (FAILED(hr))
      {
         break;
      }

      AM_MEDIA_TYPE* pmt;
      if (pOut->GetMediaType(&pmt) == S_OK)
      {
         CMediaType mt(*pmt);
         DeleteMediaType(pmt);
         SetMediaType(PINDIR_OUTPUT, &mt);

         pOut->SetMediaType(&mt);
      }

      // crop, tramslate and deliver
     BYTE* pDest;
     pOut->GetPointer(&pDest);
     m_pConverter->Convert(frame.Width(), frame.Height(), frame.GetY(), frame.GetU(),  frame.GetV(), pDest);
     pOut->SetActualDataLength(m_pOutput->CurrentMediaType().GetSampleSize());
     pOut->SetSyncPoint(true);

     if (frame.HasTimestamp())
     {
        REFERENCE_TIME tStart = frame.Timestamp();
        REFERENCE_TIME tStop = tStart+1;
        pOut->SetTime(&tStart, &tStop);
     }

     m_pOutput->Deliver(pOut);
  }
  return hr;
}

Below is the code extract from my decoder transform filter which takes in data from my source filter which is taking RTP network data from an IP camera. The source filter, decode filter can dynamically respond to changes in the camera image dimensions since I need to handle resolution changes in the decode library.

I've used the 'ReceiveConnection' method as described in the DirectShow help, passing the new MediaType data in the next sample. However, I can't get the Video Mixing Renderer to accept the resolution changes dynamically even though the renderer will render the different resolution if the graph is stopped and restarted.

Can anyone point out what I need to do to get the renderer to handle dynamic resolution changes?

HRESULT CDecoder::Receive(IMediaSample* pIn)
{
   //Input data does not necessarily correspond one-to-one
   //with output frames, so we must override Receive instead
   //of Transform.
   HRESULT hr = S_OK;

   //Deliver input to library
   long cBytes = pIn->GetActualDataLength();
   BYTE* pSrc;
   pIn->GetPointer(&pSrc);

   try
   {
      hr = m_codec.Decode(pSrc, cBytes, (hr == S_OK)?&tStart : NULL);
   } 
   catch (...)
   {
      hr = E_UNEXPECTED;
   }
   if (FAILED(hr))
   {
      if (theLog.enabled()){theLog.strm() << "Decoder Error " << hex << hr << dec << "     - resetting input"; theLog.write();}
      //Force reset of decoder
      m_bReset = true;
      m_codec.ResetInput();

      //We have handled the error -- don't pass upstream or the source may stop.
      return S_OK;
   }

   //Extract and deliver any decoded frames
   hr = DeliverDecodedFrames();

   return hr;
}

HRESULT CDecoder::DeliverDecodedFrames()
{
   HRESULT hr = S_OK;  

   for (;;)
   {
      DecodedFrame frame;
      bool bFrame = m_codec.GetDecodedFrame(frame);
      if (!bFrame)
      {
         break;
      }

      CMediaType mtIn;
      CMediaType mtOut;
      GetMediaType( PINDIR_INPUT, &mtIn);
      GetMediaType( PINDIR_OUTPUT, &mtOut);

      //Get the output pin's current image resolution
      VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*)mtOut.Format();

      if( pvi->bmiHeader.biWidth != m_cxInput || 
          pvi->bmiHeader.biHeight != m_cyInput)
      {
         HRESULT hr = GetPin(PINDIR_OUTPUT)->GetConnected()->ReceiveConnection(GetPin(PINDIR_OUTPUT), &mtIn);

         if(SUCCEEDED(hr))
         {
            SetMediaType(PINDIR_OUTPUT, &mtIn);
         }
      } 

      IMediaSamplePtr pOut;
      hr = m_pOutput->GetDeliveryBuffer(&pOut, 0, 0, NULL);

      if (FAILED(hr))
      {
         break;
      }

      AM_MEDIA_TYPE* pmt;
      if (pOut->GetMediaType(&pmt) == S_OK)
      {
         CMediaType mt(*pmt);
         DeleteMediaType(pmt);
         SetMediaType(PINDIR_OUTPUT, &mt);

         pOut->SetMediaType(&mt);
      }

      // crop, tramslate and deliver
     BYTE* pDest;
     pOut->GetPointer(&pDest);
     m_pConverter->Convert(frame.Width(), frame.Height(), frame.GetY(), frame.GetU(),  frame.GetV(), pDest);
     pOut->SetActualDataLength(m_pOutput->CurrentMediaType().GetSampleSize());
     pOut->SetSyncPoint(true);

     if (frame.HasTimestamp())
     {
        REFERENCE_TIME tStart = frame.Timestamp();
        REFERENCE_TIME tStop = tStart+1;
        pOut->SetTime(&tStart, &tStop);
     }

     m_pOutput->Deliver(pOut);
  }
  return hr;
}

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

帅气称霸 2024-09-01 15:25:53

当失败时你会得到什么错误?也许您需要确保没有未完成的缓冲区——所有样本都需要返回到分配器才能成功。您是否将您的代码与 www.gdcl.co.uk/gmfbridge 中的版本进行了比较?

What error do you get when it fails? Probably you need to ensure that there are no outstanding buffers -- all the samples need to have been returned to the allocator before this will succeed. Did you compare your code to the version in www.gdcl.co.uk/gmfbridge?

~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文