Why my application ask for a codec to pla the MVI(.MOV) video files while i can play them on WMP and QuickTime?
Posted
by
Daniel Lip
on Stack Overflow
See other posts from Stack Overflow
or by Daniel Lip
Published on 2012-10-03T21:36:27Z
Indexed on
2012/10/03
21:37 UTC
Read the original article
Hit count: 321
c#
I have an application i did some time ago when im loading the video file its ok when trying to play/use the file im getting the messageBox message say that its need a codec to use gspot or search the internet.
Wehn im playing this files on my hard disk with Windows Media Play or either QuickTime there is no problems.
The Video files for example name are: MVI_2483 in the file name properties i see its type:
Quick Time Movie (.MOV)
In my application im using DirectShowLib-2005.dll this is the class im using in my case to extract the video file im using it in my application to extract only lightnings from the video file name.
In Form1 i have a button click event that just starting the action:
private void button8_Click(object sender, EventArgs e)
{
viewToolStripMenuItem.Enabled = false;
fileToolStripMenuItem.Enabled = false;
button2.Enabled = false;
label14.Visible = false;
label15.Visible = false;
label21.Visible = false;
label22.Visible = false;
label24.Visible = false;
label25.Visible = false;
ExtractAutomatic = true;
DirectoryInfo info = new DirectoryInfo(_videoFile);
string dirName = info.Name;
automaticModeDirectory = dirName + "_Automatic";
subDirectoryName = _outputDir + "\\" + automaticModeDirectory;
if (secondPass == true)
{
Start(true);
}
Start(false);
}
This is the function start in Form1:
private void Start(bool secondpass)
{
setpicture(-1);
if (Directory.Exists(_outputDir) && secondpass == false)
{
}
else
{
Directory.CreateDirectory(_outputDir);
}
if (ExtractAutomatic == true)
{
string subDirectory_Automatic_Name = _outputDir + "\\" + automaticModeDirectory;
Directory.CreateDirectory(subDirectory_Automatic_Name);
f = new WmvAdapter(_videoFile,
Path.Combine(subDirectory_Automatic_Name));
}
else
{
string subDirectory_Manual_Name;
if (Directory.Exists(subDirectoryName))
{
subDirectory_Manual_Name = subDirectoryName;
f = new WmvAdapter(_videoFile,
Path.Combine(subDirectory_Manual_Name));
}
else
{
subDirectory_Manual_Name = _outputDir + "\\" + averagesListTextFileDirectory + "_Manual";
Directory.CreateDirectory(subDirectory_Manual_Name);
f = new WmvAdapter(_videoFile,
Path.Combine(subDirectory_Manual_Name));
}
}
button1.Enabled = false;
f.Secondpass = secondpass;
f.FramesToSave = _fts;
f.FrameCountAvailable += new WmvAdapter.FrameCountEventHandler(f_FrameCountAvailable);
f.StatusChanged += new WmvAdapter.EventHandler(f_StatusChanged);
f.ProgressChanged += new WmvAdapter.ProgressEventHandler(f_ProgressChanged);
this.Text = "Processing Please Wait...";
label5.ForeColor = Color.Green;
label5.Text = "Processing Please Wait";
button8.Enabled = false;
button5.Enabled = false;
label5.Visible = true;
pictureBox1.Image = Lightnings_Extractor.Properties.Resources.Weather_Michmoret;
Hrs = 0; //number of hours
Min = 0; //number of Minutes
Sec = 0; //number of Sec
timeElapsed = 0;
label10.Text = "00:00:00";
label11.Visible = false;
label12.Visible = false;
label9.Visible = false;
label8.Visible = false;
this.button1.Enabled = false;
myTrackPanelss1.trackBar1.Enabled = false;
this.checkBox2.Enabled = false;
this.checkBox1.Enabled = false;
numericUpDown1.Enabled = false;
timer1.Start();
label2.Text = "";
label1.Visible = true;
label2.Visible = true;
label3.Visible = true;
label4.Visible = true;
f.Start();
}
And this is the class wich is not my oqn class i just just defined it in some places wich making the problem:
using System;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Runtime.InteropServices;
using DirectShowLib;
using System.Collections.Generic;
using Extracting_Frames;
using System.Windows.Forms;
namespace Polkan.DataSource
{
internal class WmvAdapter : ISampleGrabberCB, IDisposable
{
#region Fields_Properties_and_Events
bool dis = false;
int count = 0;
const string fileName = @"d:\histogramValues.dat";
private IFilterGraph2 _filterGraph;
private IMediaControl _mediaCtrl;
private IMediaEvent _mediaEvent;
private int _width;
private int _height;
private readonly string _outFolder;
private int _frameId;
//better use a custom EventHandler that passes the results of the action to the subscriber.
public delegate void EventHandler(object sender, EventArgs e);
public event EventHandler StatusChanged;
public delegate void FrameCountEventHandler(object sender, FrameCountEventArgs e);
public event FrameCountEventHandler FrameCountAvailable;
public delegate void ProgressEventHandler(object sender, ProgressEventArgs e);
public event ProgressEventHandler ProgressChanged;
private IMediaSeeking _mSeek;
private long _duration = 0;
private long _avgFrameTime = 0;
//just save the averages to a List (not to fs)
public List<double> AveragesList { get; set; }
public List<long> histogramValuesList;
public bool Secondpass { get; set; }
public List<int> FramesToSave { get; set; }
#endregion
#region Constructors and Destructors
public WmvAdapter(string file, string outFolder)
{
_outFolder = outFolder;
try
{
SetupGraph(file);
}
catch
{
Dispose();
MessageBox.Show("A codec is required to load this video file. Please use http://www.headbands.com/gspot/ or search the web for the correct codec");
}
}
~WmvAdapter()
{
CloseInterfaces();
}
#endregion
public void Dispose()
{
CloseInterfaces();
}
public void Start()
{
EstimateFrameCount();
int hr = _mediaCtrl.Run();
WaitUntilDone();
DsError.ThrowExceptionForHR(hr);
}
public void WaitUntilDone()
{
int hr;
const int eAbort = unchecked((int)0x80004004);
do
{
System.Windows.Forms.Application.DoEvents();
EventCode evCode;
if (dis == true)
{
return;
}
hr = _mediaEvent.WaitForCompletion(100, out evCode);
}while (hr == eAbort);
DsError.ThrowExceptionForHR(hr);
OnStatusChanged();
}
//Edit: added events
protected virtual void OnStatusChanged()
{
if (StatusChanged != null)
StatusChanged(this, new EventArgs());
}
protected virtual void OnFrameCountAvailable(long frameCount)
{
if (FrameCountAvailable != null)
FrameCountAvailable(this, new FrameCountEventArgs() { FrameCount = frameCount });
}
protected virtual void OnProgressChanged(int frameID)
{
if (ProgressChanged != null)
ProgressChanged(this, new ProgressEventArgs() { FrameID = frameID });
}
/// <summary> build the capture graph for grabber. </summary>
private void SetupGraph(string file)
{
ISampleGrabber sampGrabber = null;
IBaseFilter capFilter = null;
IBaseFilter nullrenderer = null;
_filterGraph = (IFilterGraph2)new FilterGraph();
_mediaCtrl = (IMediaControl)_filterGraph;
_mediaEvent = (IMediaEvent)_filterGraph;
_mSeek = (IMediaSeeking)_filterGraph;
var mediaFilt = (IMediaFilter)_filterGraph;
try
{
// Add the video source
int hr = _filterGraph.AddSourceFilter(file, "Ds.NET FileFilter", out capFilter);
DsError.ThrowExceptionForHR(hr);
// Get the SampleGrabber interface
sampGrabber = new SampleGrabber() as ISampleGrabber;
var baseGrabFlt = sampGrabber as IBaseFilter;
ConfigureSampleGrabber(sampGrabber);
// Add the frame grabber to the graph
hr = _filterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber");
DsError.ThrowExceptionForHR(hr);
// ---------------------------------
// Connect the file filter to the sample grabber
// Hopefully this will be the video pin, we could check by reading it's mediatype
IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0);
// Get the input pin from the sample grabber
IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0);
hr = _filterGraph.Connect(iPinOut, iPinIn);
DsError.ThrowExceptionForHR(hr);
// Add the null renderer to the graph
nullrenderer = new NullRenderer() as IBaseFilter;
hr = _filterGraph.AddFilter(nullrenderer, "Null renderer");
DsError.ThrowExceptionForHR(hr);
// ---------------------------------
// Connect the sample grabber to the null renderer
iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0);
iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0);
hr = _filterGraph.Connect(iPinOut, iPinIn);
DsError.ThrowExceptionForHR(hr);
// Turn off the clock. This causes the frames to be sent
// thru the graph as fast as possible
hr = mediaFilt.SetSyncSource(null);
DsError.ThrowExceptionForHR(hr);
// Read and cache the image sizes
SaveSizeInfo(sampGrabber);
//Edit: get the duration
hr = _mSeek.GetDuration(out _duration);
DsError.ThrowExceptionForHR(hr);
}
finally
{
if (capFilter != null)
{
Marshal.ReleaseComObject(capFilter);
}
if (sampGrabber != null)
{
Marshal.ReleaseComObject(sampGrabber);
}
if (nullrenderer != null)
{
Marshal.ReleaseComObject(nullrenderer);
}
GC.Collect();
}
}
private void EstimateFrameCount()
{
try
{
//1sec / averageFrameTime
double fr = 10000000.0 / _avgFrameTime;
double frameCount = fr * (_duration / 10000000.0);
OnFrameCountAvailable((long)frameCount);
}
catch
{
}
}
public double framesCounts()
{
double fr = 10000000.0 / _avgFrameTime;
double frameCount = fr * (_duration / 10000000.0);
return frameCount;
}
private void SaveSizeInfo(ISampleGrabber sampGrabber)
{
// Get the media type from the SampleGrabber
var media = new AMMediaType();
int hr = sampGrabber.GetConnectedMediaType(media);
DsError.ThrowExceptionForHR(hr);
if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
{
throw new NotSupportedException("Unknown Grabber Media Format");
}
// Grab the size info
var videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
_width = videoInfoHeader.BmiHeader.Width;
_height = videoInfoHeader.BmiHeader.Height;
//Edit: get framerate
_avgFrameTime = videoInfoHeader.AvgTimePerFrame;
DsUtils.FreeAMMediaType(media);
GC.Collect();
}
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
{
var media = new AMMediaType
{
majorType = MediaType.Video,
subType = MediaSubType.RGB24,
formatType = FormatType.VideoInfo
};
int hr = sampGrabber.SetMediaType(media);
DsError.ThrowExceptionForHR(hr);
DsUtils.FreeAMMediaType(media);
GC.Collect();
hr = sampGrabber.SetCallback(this, 1);
DsError.ThrowExceptionForHR(hr);
}
private void CloseInterfaces()
{
try
{
if (_mediaCtrl != null)
{
_mediaCtrl.Stop();
_mediaCtrl = null;
dis = true;
}
}
catch (Exception ex)
{
Debug.WriteLine(ex);
}
if (_filterGraph != null)
{
Marshal.ReleaseComObject(_filterGraph);
_filterGraph = null;
}
GC.Collect();
}
int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample)
{
Marshal.ReleaseComObject(pSample);
return 0;
}
int ISampleGrabberCB.BufferCB(double sampleTime, IntPtr pBuffer, int bufferLen)
{
if (Form1.ExtractAutomatic == true)
{
using (var bitmap = new Bitmap(_width, _height, _width * 3, PixelFormat.Format24bppRgb, pBuffer))
{
if (!this.Secondpass)
{
long[] HistogramValues = Form1.GetHistogram(bitmap);
long t = Form1.GetTopLumAmount(HistogramValues, 1000);
Form1.averagesTest.Add(t);
}
else
{
//this is the changed part
if (_frameId > 0)
{
if (Form1.averagesTest[_frameId] / 1000.0 - Form1.averagesTest[_frameId - 1] / 1000.0 > 150.0)
{
count = 6;
}
if (count > 0)
{
bitmap.RotateFlip(RotateFlipType.Rotate180FlipX);
bitmap.Save(Path.Combine(_outFolder, _frameId.ToString("D6") + ".bmp"));
count --;
}
}
}
_frameId++;
//let only report each 100 frames for performance
if (_frameId % 100 == 0)
OnProgressChanged(_frameId);
}
}
else
{
using (var bitmap = new Bitmap(_width, _height, _width * 3, PixelFormat.Format24bppRgb, pBuffer))
{
if (!this.Secondpass)
{
//get avg
double average = GetAveragePixelValue(bitmap);
if (AveragesList == null)
AveragesList = new List<double>();
//save avg
AveragesList.Add(average);
//***************************\\
// for (int i = 0; i < (int)framesCounts(); i++)
// {
// get histogram values
long[] HistogramValues = Form1.GetHistogram(bitmap);
if (histogramValuesList == null)
histogramValuesList = new List<long>(256);
histogramValuesList.AddRange(HistogramValues);
//***************************\\
//}
}
else
{
if (FramesToSave != null && FramesToSave.Contains(_frameId))
{
bitmap.RotateFlip(RotateFlipType.Rotate180FlipX);
bitmap.Save(Path.Combine(_outFolder, _frameId.ToString("D6") + ".bmp"));
// get histogram values
long[] HistogramValues = Form1.GetHistogram(bitmap);
if (histogramValuesList == null)
histogramValuesList = new List<long>(256);
histogramValuesList.AddRange(HistogramValues);
using (BinaryWriter binWriter =
new BinaryWriter(File.Open(fileName, FileMode.Create)))
{
for (int i = 0; i < histogramValuesList.Count; i++)
{
binWriter.Write(histogramValuesList[(int)i]);
}
binWriter.Close();
}
}
}
_frameId++;
//let only report each 100 frames for performance
if (_frameId % 100 == 0)
OnProgressChanged(_frameId);
}
}
return 0;
}
/* int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample)
{
Marshal.ReleaseComObject(pSample);
return 0;
}
int ISampleGrabberCB.BufferCB(double sampleTime, IntPtr pBuffer, int bufferLen)
{
using (var bitmap = new Bitmap(_width, _height, _width * 3, PixelFormat.Format24bppRgb, pBuffer))
{
if (!this.Secondpass)
{
//get avg
double average = GetAveragePixelValue(bitmap);
if (AveragesList == null)
AveragesList = new List<double>();
//save avg
AveragesList.Add(average);
//***************************\\
// for (int i = 0; i < (int)framesCounts(); i++)
// {
// get histogram values
long[] HistogramValues = Form1.GetHistogram(bitmap);
if (histogramValuesList == null)
histogramValuesList = new List<long>(256);
histogramValuesList.AddRange(HistogramValues);
long t = Form1.GetTopLumAmount(HistogramValues, 1000);
//***************************\\
Form1.averagesTest.Add(t); // to add this list to a text file or binary file and read the averages from the file when its is Secondpass !!!!!
//}
}
else
{
if (FramesToSave != null && FramesToSave.Contains(_frameId))
{
bitmap.RotateFlip(RotateFlipType.Rotate180FlipX);
bitmap.Save(Path.Combine(_outFolder, _frameId.ToString("D6") + ".bmp"));
// get histogram values
long[] HistogramValues = Form1.GetHistogram(bitmap);
if (histogramValuesList == null)
histogramValuesList = new List<long>(256);
histogramValuesList.AddRange(HistogramValues);
using (BinaryWriter binWriter =
new BinaryWriter(File.Open(fileName, FileMode.Create)))
{
for (int i = 0; i < histogramValuesList.Count; i++)
{
binWriter.Write(histogramValuesList[(int)i]);
}
binWriter.Close();
}
}
for (int x = 1; x < Form1.averagesTest.Count; x++)
{
double fff = Form1.averagesTest[x] / 1000.0 - Form1.averagesTest[x - 1] / 1000.0;
if (Form1.averagesTest[x] / 1000.0 - Form1.averagesTest[x - 1] / 1000.0 > 180.0)
{
bitmap.RotateFlip(RotateFlipType.Rotate180FlipX);
bitmap.Save(Path.Combine(_outFolder, _frameId.ToString("D6") + ".bmp"));
_frameId++;
}
}
}
_frameId++;
//let only report each 100 frames for performance
if (_frameId % 100 == 0)
OnProgressChanged(_frameId);
}
return 0;
}*/
private unsafe double GetAveragePixelValue(Bitmap bmp)
{
BitmapData bmData = null;
try
{
bmData = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
int stride = bmData.Stride;
IntPtr scan0 = bmData.Scan0;
int w = bmData.Width;
int h = bmData.Height;
double sum = 0;
long pixels = bmp.Width * bmp.Height;
byte* p = (byte*)scan0.ToPointer();
for (int y = 0; y < h; y++)
{
p = (byte*)scan0.ToPointer();
p += y * stride;
for (int x = 0; x < w; x++)
{
double i = ((double)p[0] + p[1] + p[2]) / 3.0;
sum += i;
p += 3;
}
//no offset incrementation needed when getting
//the pointer at the start of each row
}
bmp.UnlockBits(bmData);
double result = sum / (double)pixels;
return result;
}
catch
{
try
{
bmp.UnlockBits(bmData);
}
catch
{
}
}
return -1;
}
}
public class FrameCountEventArgs
{
public long FrameCount { get; set; }
}
public class ProgressEventArgs
{
public int FrameID { get; set; }
}
}
I remember i had this codec problem/s before and i installed the codec/'s that were needed but in this case both quick time and windows media player can play the video files so why the application cant detect and find the codec/'s on my computer ? Gspot say that the codec is AVC1 but again wmp and quicktime play the video files no problems.
The video files are from my digital camera !
© Stack Overflow or respective owner