Below is the code extract from my decoder transform filter which takes in data from my source filter which is taking RTP network data from an IP camera. The source filter, decode filter can dynamically respond to changes in the camera image dimensions since I need to handle resolution changes in the decode library.
I've used the 'ReceiveConnection' method as described in the DirectShow help, passing the new MediaType data in the next sample. However, I can't get the Video Mixing Renderer to accept the resolution changes dynamically even though the renderer will render the different resolution if the graph is stopped and restarted.
Can anyone point out what I need to do to get the renderer to handle dynamic resolution changes?
HRESULT CDecoder::Receive(IMediaSample* pIn)
{
//Input data does not necessarily correspond one-to-one
//with output frames, so we must override Receive instead
//of Transform.
HRESULT hr = S_OK;
//Deliver input to library
long cBytes = pIn->GetActualDataLength();
BYTE* pSrc;
pIn->GetPointer(&pSrc);
try
{
hr = m_codec.Decode(pSrc, cBytes, (hr == S_OK)?&tStart : NULL);
}
catch (...)
{
hr = E_UNEXPECTED;
}
if (FAILED(hr))
{
if (theLog.enabled()){theLog.strm() << "Decoder Error " << hex << hr << dec << " - resetting input"; theLog.write();}
//Force reset of decoder
m_bReset = true;
m_codec.ResetInput();
//We have handled the error -- don't pass upstream or the source may stop.
return S_OK;
}
//Extract and deliver any decoded frames
hr = DeliverDecodedFrames();
return hr;
}
HRESULT CDecoder::DeliverDecodedFrames()
{
HRESULT hr = S_OK;
for (;;)
{
DecodedFrame frame;
bool bFrame = m_codec.GetDecodedFrame(frame);
if (!bFrame)
{
break;
}
CMediaType mtIn;
CMediaType mtOut;
GetMediaType( PINDIR_INPUT, &mtIn);
GetMediaType( PINDIR_OUTPUT, &mtOut);
//Get the output pin's current image resolution
VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*)mtOut.Format();
if( pvi->bmiHeader.biWidth != m_cxInput ||
pvi->bmiHeader.biHeight != m_cyInput)
{
HRESULT hr = GetPin(PINDIR_OUTPUT)->GetConnected()->ReceiveConnection(GetPin(PINDIR_OUTPUT), &mtIn);
if(SUCCEEDED(hr))
{
SetMediaType(PINDIR_OUTPUT, &mtIn);
}
}
IMediaSamplePtr pOut;
hr = m_pOutput->GetDeliveryBuffer(&pOut, 0, 0, NULL);
if (FAILED(hr))
{
break;
}
AM_MEDIA_TYPE* pmt;
if (pOut->GetMediaType(&pmt) == S_OK)
{
CMediaType mt(*pmt);
DeleteMediaType(pmt);
SetMediaType(PINDIR_OUTPUT, &mt);
pOut->SetMediaType(&mt);
}
// crop, tramslate and deliver
BYTE* pDest;
pOut->GetPointer(&pDest);
m_pConverter->Convert(frame.Width(), frame.Height(), frame.GetY(), frame.GetU(), frame.GetV(), pDest);
pOut->SetActualDataLength(m_pOutput->CurrentMediaType().GetSampleSize());
pOut->SetSyncPoint(true);
if (frame.HasTimestamp())
{
REFERENCE_TIME tStart = frame.Timestamp();
REFERENCE_TIME tStop = tStart+1;
pOut->SetTime(&tStart, &tStop);
}
m_pOutput->Deliver(pOut);
}
return hr;
}