Wednesday, February 27, 2013

AVI CODE IN

http://dis.eafit.edu.co/areas/telematica/proyectos/jmf/src/jmf211a_scsl_src/src/share/com/sun/media/parser/video/AviParser.java
http://themonospot.googlecode.com/svn-history/r77/trunk/themonospot-base/src/components/clsParserAVI.cs

http://www.codeforge.com/s/0/mp4-parser
http://www.experts-exchange.com/Programming/Languages/CPP/Q_20884897.html
http://read.pudn.com/downloads170/sourcecode/app/787664/aviparser.cpp__.htm
http://read.pudn.com/downloads170/sourcecode/app/787664/incaviparser.cpp__.htm
http://www.codeproject.com/KB/audio-video/avifilewrapper/avifilewrapper_src.zip


AVI frame split using C or C++

Asked by: dhyanesh
Hi

I wish to split an AVI file into individual frames. I would like to use these frames and later store in a BMP file.

I have the AVI file format from

http://www.wotsit.org

I can get all the header info etc.

The problem is the main data part. The format says that the file contains DIB in compressed and/or uncompressed format starting with 'dc' and 'db' respectively. I tried to get those parts but the image does not come out proper.

I also noticed that the total no. of 'db's and 'dc's in the file are twice the no. of frames.

I am working with gcc on Linux platform. This is part of my project.

Dhyanesh

This question has been solved and asker verified All Experts Exchange premium technology solutions are available to subscription members.


#include <iostream.h>
#include <fstream.h>
#include <string.h>
#include <stdlib.h>


typedef unsigned char BYTE;
typedef unsigned short WORD;
typedef unsigned long DWORD;

/*RIFF ('AVI '
       LIST ('hdrl'
                   'avih'(<Main AVI Header>)
                   LIST ('strl'
                   'strh'(<Stream header>)
                   'strf'(<Stream format>)
                   'strd'(additional header data)
                       .
                       .
                       .
             )
                          .
                   .
             .
       )

LIST ('movi'
                   {SubChunk | LIST('rec '
                              SubChunk1
                              SubChunk2
                                .
                               .
                                 .
                         )

                 .
                 .
                 .
             }

                        .
                        .
                        .
   )

       ['idx1'<AVIIndex>]
)*/





typedef struct {                     //The Main AVI Header
            DWORD  dwMicroSecPerFrame;
            DWORD  dwMaxBytesPerSec;
            DWORD  dwReserved1;
            DWORD  dwFlags;
            DWORD  dwTotalFrames;
            DWORD  dwInitialFrames;
            DWORD  dwStreams;
            DWORD  dwSuggestedBufferSize;
            DWORD  dwWidth;
            DWORD  dwHeight;
            DWORD  dwScale;
            DWORD  dwRate;
            DWORD  dwStart;
            DWORD  dwLength;
} MainAVIHeader;


typedef struct {                          //AVI Stream Header
            char    fccType[4];
            char    fccHandler[4];
            DWORD   dwFlags;
            DWORD   dwReserved1;
            DWORD   dwInitialFrames;
            DWORD   dwScale;
            DWORD   dwRate;
            DWORD   dwStart;
            DWORD   dwLength;
            DWORD   dwSuggestedBufferSize;
            DWORD   dwQuality;
            DWORD   dwSampleSize;
} AVIStreamHeader;


/*WAVE  Bytes   '##wb'
                  BYTE    abBytes[];*/

/*DIB  Bits   '##db'
             BYTE   abBits[];*/

typedef struct {
            BYTE          bFirstEntry;
            BYTE          bNumEntries;
            WORD          wFlags;
            BYTE                  *peNew;
} AVIPALCHANGE;

typedef struct {
            DWORD  ckid;
            DWORD  dwFlags;
            DWORD  dwChunkOffset;
            DWORD  dwChunkLength;
} AVIINDEXENTRY;

typedef struct BMPFILEHEADER {
 char type[2];
 DWORD size;
 DWORD reserved;
 DWORD offset;
}FileHeader;


typedef struct BMPINFOHEADER {
 DWORD headersize;
 DWORD width;
 DWORD height;
 WORD planes;
 WORD bitsperpixel;
 DWORD compression;
 DWORD sizeimage;
 DWORD xpixelspermeter;
 DWORD ypixelspermeter;
 DWORD colorsused;
 DWORD colorsimportant;
} BmpHeader;



typedef struct tagBITMAP {
 DWORD width;
 DWORD height;
 BYTE *data;
}BITMAP;




int main(void)
{
      ifstream infile;
      ofstream outfile;
      infile.open("filedisk.avi",ios::binary|ios::in);
      outfile.open("testnew.bmp",ios::binary|ios::out);
      if (!infile)
            cout << "Error opening file";
//      DWORD x;

      FileHeader bmpf;
      bmpf.type[0] = 'B';
      bmpf.type[1] = 'M';
      bmpf.reserved = 0;


// DWORD size;

// DWORD offset;

      BmpHeader bmph;
      BITMAP bitmap;
      char palette[768];
      char ch,che[4];
      int i=0,count;
      int imageno = 1;
      int skip = 0;                    //This is no. of fields delimited by 'db' and/or 'dc' to skip
      int x=0,y=261;
      MainAVIHeader avih;
      while (!infile.eof())
      {
                  infile >> ch;
                  if (ch == 'a')
                  {
                        infile >> che[0];
                        infile >> che[1];
                        infile >> che[2];
                        che[3] = '\0';
                        if (!strcmp(che,"vih"))
                        {
                              infile.ignore(4);
                              infile.read((char *)&avih,14*4);
                              break;
                        }
                  }
      }
      while (!infile.eof())
      {
      infile >> ch;
      if (ch == 'v')
      {
            infile >> che[0];
                       infile >> che[1];
            infile >> che[2];
            che[3] = '\0';
            if (!strcmp(che,"ids"))
            {
               while(1)
            {
                infile >> ch;
                if (ch == 's')
               {
                  infile >> che[0];
                  infile >> che[1];
                  infile >> che[2];
                  che[3] = '\0';
                  if (!strcmp(che,"trf"))
                  {
                                  infile.ignore(4);
                        infile.read((char *)&bmph,40);
//                        cout << bmph.bitsperpixel;
                        if (bmph.bitsperpixel != 24)
                        {
                   /***read palette information***/
                         for(i=0;i<256;i++)
                         {
                                         palette[i*3+2]=infile.get()>>2;
                               palette[i*3+1]=infile.get()>>2;
                               palette[i*3+0]=infile.get()>>2;
                               infile.get();
                         }
                                         }
                                          bitmap.width=bmph.width;
                                     bitmap.height=bmph.height;
                                   bitmap.data=new BYTE[bitmap.width*bitmap.height];
                         count = bitmap.width*bitmap.height;
                         while(1)
                        {
                                infile >> ch;
                                                                   if (ch == '0')
                               {
                              infile >> che[0];
                              infile >> che[1];
                                        infile >> che[2];
                              che[3] = '\0';
                              if (!strcmp(che,"0db") || !strcmp(che,"0dc"))
                              {
                                        imageno--;
                                                        if (imageno == 0)
                                      {
                                     infile >> bitmap.data[i++];
                                       while (1)
                                     {
                                                              infile >> ch;
                                          if (ch == '0')
                                          {
                                                            infile >> che[0];
                                                                 infile >> che[1];
                                                          infile >> che[2];
                                                che[3] = '\0';
                                                                    if (!strcmp(che,"0db") || !strcmp(che,"0dc"))
                                                {
                                                skip--;
                                                                     if (skip == -1)
                                                {
                                                count = i;
                                                bmpf.size = i;
                                                break;
                                                }
                                          }
                                          else
                                          {
                                                bitmap.data[i++] = ch;
                                                bitmap.data[i++] = che[0];
                                                bitmap.data[i++] = che[1];
                                                bitmap.data[i++] = che[2];
                                                if (i > bmph.sizeimage)
                                                      break;
                                          }
                                                         }
                                                         else
                                    {
                                    bitmap.data[i++] = ch;
                                    if (i > bmph.sizeimage)
                                                           break;
                              }
                         }
                                            break;
                  }
            }
        }
         }
         break;
     }
  }
}
}
}
//            infile.read((char *)&x,4);
//            outfile.write((char *)&x,4);
      }
      bmpf.offset = 54;
      bmpf.size += 54;
      if (bmph.bitsperpixel != 24)
      {
            bmpf.offset += 1024;
            bmpf.size += 1024;
      }
      outfile.write((char *)&bmpf,14);
      outfile.write((char *)&bmph,40);

      for(i=0;i<256;i++)
      {
            outfile.put(palette[i*3+2]<<2);
            outfile.put(palette[i*3+1]<<2);
            outfile.put(palette[i*3+0]<<2);
            outfile.put(0);
      }



      outfile.write(bitmap.data,count);
      delete bitmap.data;

      outfile.close();
      infile.close();
      getch();
      closegraph();
      return 0;
}
 
by: dhyaneshPosted on 2004-02-16 at 07:38:52ID: 10372764
Really sorry for the indenting quite bad.

Also the logic I used is not very elegant. At present I am only trying to understand the AVI file format.

Dhyanesh

 
by: dhyaneshPosted on 2004-02-16 at 07:47:34ID: 10372852
please remove

closegraph();

getch();

from the end of the code.

I was actually trying out the code on Turbo C++ but will later port it to gcc

Dhyanesh
 
by: grg99Posted on 2004-02-16 at 09:05:09ID: 10373592
You might check whether your structs are getting the right packing and alignment.  You might need a #pragma pack(1) to get things lined up straight.

What I usually do in a case like this is to "tidy up" the code. Almost always find the problem this way:


#1: comment each loop and if.  It would be nice to see comments like "getting bitmap info", "reading bitmap", etc.

#2:  Add calls to a Bomb() function for impossible or unexpected input.  It would be nice to see lines like: Bomb("Not a db");
Bomb("Impossible dimension"); Bomb("Impossible length");

-----------------

A good 88% of the time I find the problem just by following these practices.  Someday I will catch on and write code like this the FIRST time and avoid a lot of headaches.

 
by: ssnkumarPosted on 2004-02-16 at 21:42:34ID: 10379322
>>I have tried reading exactly that but in between two 'db's or 'dc's the no. of bytes are much less than biSizeImage. I also tried reading until I get biSizeimage still I do not get correct bmp.

Don't you think biSizeImage includes the size of header!? Looking at your comments, I think you have not taken into consideration the header as part of data.
So, do try by taking size of header also into consideration....

-ssnkumar
 
by: dhyaneshPosted on 2004-02-17 at 02:50:50ID: 10380901
ssnkumar

I do not think biSizeimage contains the size of the header. The header of a bitmap is given only ONCE in the avi file and this header is used for all.

Even if biSizeimage contains the header the frame is still a long way from being correct.

Dhyanesh
 
by: dhyaneshPosted on 2004-02-17 at 02:58:24ID: 10380936
grg99, I am getting the structs proper. The problem is with the data part.

I agree with grg99 that the code I am using is pretty bad and not very well commented. I will re code it but after a while.

To all
---------

However I still feel there is something BIG that is amiss. This is because when I opened the avi file in notepad and read its contents, towards the end I saw the 'dc's placed closer to each other than in the beggining of the file. It might be only 8 - 10 bytes between 2 'dc's whereas the image size is 20,880 i.e. biSizeimage. This is not just for a single avi. I opened 5 - 6 files and found the same pattern.

Also the pattern of total no. of 'db' + 'dc' = 2 * no of frames was true for all the avi files.

Dhyanesh
 
by: dhyaneshPosted on 2004-03-04 at 07:44:43ID: 10514847
Can anybody tell me where I can get code in C for existing AVI players? I am sure there must be some open source Linux AVI player
 
by: Posted on 2004-03-06 at 12:13:38ID: 10532319
Hi,

Sorry for delay, here is the source of program that copies frames into bmps.
====start aviparser.cpp===
// AviParser.cpp : Defines the entry point for the console application.
//

#include "stdafx.h"
#include <iostream>
#include <fstream>
#include <strstream>
using namespace std;
#pragma pack(2)
typedef struct tagBITMAPFILEHEADER { // bmfh
        char  bfType[2];  /* always 'BM' */
        unsigned long   bfSize;  /* size of bitmap file in bytes */
        unsigned short  bfReserved1; /* always 0 */
        unsigned short  bfReserved2; /* always 0 */
        unsigned long   bfOffBits;   /* offset to data for bitmap */
} BITMAPFILEHEADER;

typedef struct tagBITMAPINFOHEADER{ // bmih
   unsigned long  biSize;
   unsigned long   biWidth;
   unsigned long   biHeight;
   unsigned short   biPlanes;
   unsigned short   biBitCount;
   unsigned long            biCompression; /* a DIB can be compressed using run length encoding */
   unsigned long            biSizeImage;
   unsigned long            biXPelsPerMeter;
   unsigned long            biYPelsPerMeter;
   unsigned long            biClrUsed;
   unsigned long            biClrImportant;
} BITMAPINFOHEADER;
typedef struct _avistreamheader
{
      char fccType[4];
      char fccHandler[4];
      unsigned long  dwFlags;
      unsigned short wPriority;
      unsigned short wLanguage;
      unsigned long  dwInitialFrames;
      unsigned long  dwScale;
      unsigned long  dwRate;
      unsigned long  dwStart;
      unsigned long  dwLength;
      unsigned long  dwSuggestedBufferSize;
      unsigned long  dwQuality;
      unsigned long  dwSampleSize;
      struct {
            short int left;
            short int top;
            short int right;
            short int bottom;
      }  rcFrame;
} AVISTREAMHEADER;

typedef struct _RIFF_HDR
{
      char FOURCC[4];
      long filesize;
      char filetype[4];
} RIFFHDR;
typedef struct _CHUNK_HDR
{
      char chunkid[4];
      long realblocksize;
      long blocksize;
} CHUNKHDR;
typedef struct _avimainheader {
    unsigned long  dwMicroSecPerFrame;
    unsigned long  dwMaxBytesPerSec;
    unsigned long  dwPaddingGranularity;
    unsigned long  dwFlags;
    unsigned long  dwTotalFrames;
    unsigned long  dwInitialFrames;
    unsigned long  dwStreams;
    unsigned long  dwSuggestedBufferSize;
    unsigned long  dwWidth;
    unsigned long  dwHeight;
    unsigned long  dwReserved[4];
} AVIMAINHEADER;
#define AVI_FILE_NAME _T("C:\\filecopy.avi") //<<--- Change here file name.

long readList(ifstream* favi,long chunk_size);
#define UNPACK_FOURCC(lfourcc,fourcc) (fourcc)[0] = (char)(lfourcc & 0x00FF); \
                                                        (fourcc)[1] = (char)((lfourcc >>  8) & 0x00FF);\
                                                        (fourcc)[2] = (char)((lfourcc >> 16) & 0x00FF);\
                                                        (fourcc)[3] = (char)((lfourcc >> 24) & 0x00FF);
void printFOURCC(char* Msg, char fourcc[4])
{
      cout<< Msg << fourcc[0]<< fourcc[1] << fourcc[2] << fourcc[3] << endl;
}
long readChunkDsc(ifstream* favi, char* fourcc, long* chunk_size)
{
      long lfourcc = 0;
      favi->read((char*)&lfourcc,sizeof(long));
      UNPACK_FOURCC(lfourcc,fourcc)
      long qq=0;
      favi->read((char *)&qq,sizeof(long));
      *chunk_size = qq;
      return *chunk_size;
}

void printAviHeader(AVIMAINHEADER *hdr)
{
      cout << " ==== AVI HEADER ====" << endl;
}
void printAviStreamHeader(AVISTREAMHEADER *hdr)
{
      cout << " ==== AVI STREAM HEADER ====" <<endl;
}
bool readAviHeader(ifstream* favi,AVIMAINHEADER* avimainhdr)
{
      favi->read((char *)avimainhdr,sizeof(AVIMAINHEADER));
      return  favi->good();
}
long readhdrlList(ifstream* favi,long ch_size)
{
      char chunk_id[4];
      long chunk_size;
      long len = 0;
      while (ch_size>len)
      {
            readChunkDsc(favi, (char *)chunk_id, &chunk_size);
            long pad = favi->tellg();
            if (chunk_size == 0)
                  return 0;
            if (strncmp(chunk_id,"LIST",4) == 0)
                  readList(favi,chunk_size);
            if (strncmp(chunk_id,"avih",4) == 0)
            {
                  AVIMAINHEADER hdr;
                  readAviHeader(favi,&hdr);
                  printAviHeader(&hdr);
            }
            len += chunk_size + 8;
            favi->seekg(chunk_size + pad, ios::beg);
      }
      return chunk_size;
}
typedef struct tagRGBQUAD {
        unsigned char      rgbBlue;
        unsigned char      rgbGreen;
        unsigned char      rgbRed;
            unsigned char      rgbReserved;
} RGBQUAD;

typedef struct tagBITMAPINFO {
    BITMAPINFOHEADER    bmiHeader;
    RGBQUAD             bmiColors[1];
} BITMAPINFO;

BITMAPINFO *bmpinf = NULL;
long sz_BITMAPINFO = 0;
long readstrlList(ifstream* favi,long ch_size)
{
      char chunk_id[4];
      long chunk_size;
      long len = 4;
      int flag_set = 0;
      while (ch_size>len)
      {
            readChunkDsc(favi, (char *)chunk_id, &chunk_size);
            long pad = favi->tellg();
            if (chunk_size == 0)
                  return 0;
            if (strncmp(chunk_id,"LIST",4) == 0)
                  readList(favi,chunk_size);
            if (strncmp(chunk_id,"strh",4) == 0)
            {
                  AVISTREAMHEADER hdr;
                  favi->read((char *)&hdr,sizeof(AVISTREAMHEADER));
                  if (strncmp(hdr.fccType, "vids",4) == 0)
                        flag_set = 1;
                  printAviStreamHeader(&hdr);
            }
            if (flag_set && strncmp(chunk_id,"strf",4) == 0)
            {
                  bmpinf = (BITMAPINFO *) malloc(chunk_size);
                  favi->read((char*)bmpinf, chunk_size);
                  sz_BITMAPINFO = chunk_size;
                  flag_set = 0;
            }
            if ((chunk_size % 2) == 1)
                  chunk_size +=1;
            len += chunk_size + 8;
            favi->seekg(chunk_size + pad, ios::beg);
      }
      return chunk_size;
}
void printmovichunkinfo(char chunk_id[4])
{
      cout << "Stream " << chunk_id[0] << chunk_id[1] << endl;
      if (strncmp(&chunk_id[2],"db",2) == 0)
            cout << "Uncompressed video" << endl;
      if (strncmp(&chunk_id[2],"dc",2) == 0)
            cout << "Compressed video" << endl;
      if (strncmp(&chunk_id[2],"pc",2) == 0)
            cout << "palette change" << endl;
      if (strncmp(&chunk_id[2],"wb",2) == 0)
            cout << "Audio Data" << endl;
}
bool copyBMPtoFile(ifstream *favi,long chunk_size, long fnameid)
{
      BITMAPFILEHEADER bmfh;
      memset(&bmfh,0,sizeof(BITMAPFILEHEADER));
      bmfh.bfType[0] = 'B';
      bmfh.bfType[1] = 'M';
      bmfh.bfSize = (unsigned long)(chunk_size + sizeof(BITMAPFILEHEADER) + sz_BITMAPINFO);
      bmfh.bfOffBits = sz_BITMAPINFO + sizeof(BITMAPFILEHEADER);

      ofstream fout;
      char * buf = new char[chunk_size];
      try
      {
            strstream str_str;
            str_str << "c:\\" << "bmp" << fnameid << ".bmp" << '\0';
            fout.open(str_str.str(),ios::binary|ios::out|ios::trunc);
            favi->read(buf, chunk_size);
            fout.write((char *)(void *)&bmfh,sizeof(BITMAPFILEHEADER));
            fout.write((char *)bmpinf,sz_BITMAPINFO);
            fout.write(buf, chunk_size);
            fout.close();
      }
      catch(...)
      {
            cout << "copy to bmp failed" << endl;
      }
      delete [] buf;
      return favi->good();
}

long readmoviList(ifstream* favi,long ch_size)
{
      long len = 4;
      int imgNumber = 1;
      long chunk_size;
      char chunk_id[4];
      while (ch_size>len)
      {
            memset(&chunk_id[0],0,4);
            readChunkDsc(favi, (char *)chunk_id, &chunk_size);
            long pad = favi->tellg();
            if (chunk_size == 0)
                  return 0;
            if (strncmp(chunk_id,"LIST",4) == 0)
                  readList(favi,chunk_size);
            else
            {
                  printmovichunkinfo(chunk_id);
                  cout << "Chunk Size: " << chunk_size << endl;
                  copyBMPtoFile(favi,chunk_size,imgNumber++);
                  favi->seekg(chunk_size + pad, ios::beg);
            }
            len += chunk_size + 8;
      }
      return chunk_size;
}

long readIdxList(ifstream* favi,long chunk_size)
{
      return chunk_size;
}
long readmovirecList(ifstream* favi,long ch_size)
{
      long len = 4;
      int imgNumber = 1;
      long chunk_size;
      char chunk_id[4];
      while (ch_size>len)
      {
            memset(&chunk_id[0],0,4);
            long pad = favi->tellg();
            readChunkDsc(favi, (char *)chunk_id, &chunk_size);
            pad = favi->tellg();
            if (chunk_size == 0)
                  return 0;
            if (strncmp(chunk_id,"LIST",4) == 0)
                  readList(favi,chunk_size);
            //      printFOURCC("Chunk_id: ",chunk_id);
            if (strncmp(chunk_id,"JUNK",4) != 0)
            {
                  printmovichunkinfo(chunk_id);
                  cout << "Chunk Size: " << chunk_size << endl;
                  copyBMPtoFile(favi,chunk_size,imgNumber++);
            }
            favi->seekg(chunk_size + pad, ios::beg);
            len += chunk_size + 8;
      }
      return chunk_size;
}
long readList(ifstream* favi,long chunk_size)
{
      char listtype[4];
      long pos = favi->tellg();
      favi->read((char *)&listtype[0], sizeof(listtype));
      printFOURCC("List Type: ", listtype);
      if(strncmp(listtype,"hdrl",4) == 0)
            readhdrlList(favi,chunk_size );
      else if(strncmp(listtype,"movi",4) == 0)
            readmoviList(favi,chunk_size );
      else if(strncmp(listtype,"idx1",4) == 0)
            readIdxList(favi,chunk_size );
      else if(strncmp(listtype,"indx",4) == 0)
            readIdxList(favi,chunk_size );
      else if(strncmp(listtype,"strl",4) == 0)
            readstrlList(favi,chunk_size );
      else if(strncmp(listtype,"rec ",4) == 0)
            readmovirecList(favi,chunk_size );
      favi->seekg(pos + chunk_size);
      return chunk_size;
}
int _tmain(int argc, _TCHAR* argv[])
{
      ifstream favi;
      favi.open(AVI_FILE_NAME,ios::binary);
      if (favi == NULL)
      {
            cout << "Can't open file " << AVI_FILE_NAME << endl;
      }
      try
      {
            RIFFHDR rh;
            favi.read((char *)&rh,sizeof(rh));
            printFOURCC("RIFF header: ",rh.FOURCC);
            cout << "FileSize: "<<rh.filesize << endl;
            printFOURCC("FileType: ",rh.filetype);
            char chid[4];
            memset(&chid[0],0,4);
            long chunk_size;
            long chunk_pos = favi.tellg();
            while(!favi.eof())
            {
                  if (readChunkDsc(&favi,(char *)chid,&chunk_size))
                  {
                        printFOURCC("Chunk ID: ",chid);
                        cout << "Chunk Size: " << chunk_size << endl;
                        if (strncmp(chid,"LIST",4) == 0)
                        {
                              //readlist
                              readList(&favi,chunk_size);
                        }
                        if (strncmp(chid,"hdrl",4) == 0)
                        {
                              //read hdrl
                              cout << "hdrl chunk" << endl;
                        }
                        if (strncmp(chid,"movi",4) == 0)
                        {
                              //read movi
                              cout << "movi chunk" << endl;
                        }
                        if (strncmp(chid,"idx1",4) == 0)
                        {
                              //read movi
                              cout << "idx1 chunk" << endl;
                        }
                  }
                  else
                        break;
                  favi.seekg( chunk_pos + chunk_size + 8,ios::beg);
                  chunk_pos = favi.tellg();
            }
           
           
      }
      catch(...)
      {
      }
      free(bmpinf);
      favi.close();
      char _a;
      cin >> _a;
}
====end of aviparser.cpp===

Tell me if this is what you need?
 
by: dhyaneshPosted on 2004-03-06 at 22:14:42ID: 10533968
RomanPetrenko,

Your program works great for small files like filecopy and filedisk.

However for larger files all the frames are blank. Is the size a reason or do they have a different format ?

One more thing. When the files filecopy and filedisk are split to frames the first frame gives the entire picture. The next frames give only the difference or something like that. What type of compression is this? Is is motion estimation and compesation?

Dhyanesh
 
by: RomanPetrenkoPosted on 2004-03-06 at 23:36:08ID: 10534104
They have different format I think. For larger files used some kind of codecs...(DivX, XviD and so on). so you can't extract bitmaps directly from AVI.
this is the clasic schema how video Player works...

FileParser->Codec->Frames->Monitor(BMP) and there io no codecs in my prog FileParser->Frames->Monitor. So youcan work only with "original" avi(created without codecs).

Is it motion estimation? I'm not sure. This is like frames stored in avi.

 
by: dhyaneshPosted on 2004-03-07 at 00:19:51ID: 10534186
>> This is like frames stored in avi.


If you extract the frames in filecopy.avi, you will notice that the first frame has a purple background. However the remaining frames do not have a purple background. They have a black background and only the page which is moving is redrawn. The rest of the image is black. It seems that somehow this frame is mixed with the previous frame to get current frame. How do I know which of AVI files use such a technique? This will be very crucial


Dhyanesh
 
by: RomanPetrenkoPosted on 2004-03-07 at 02:45:28ID: 10534352
in idx1 chunk stored following structure:
typedef struct _avioldindex {
   FOURCC  fcc;
   DWORD   cb;
   struct _avioldindex_entry {
      DWORD   dwChunkId;
      DWORD   dwFlags;
      DWORD   dwOffset;
      DWORD   dwSize;
  } aIndex[];
} AVIOLDINDEX;

where dwFlags can contain AVIIF_KEYFRAME which mean that this frame is full picture, frames between key frames contains just difference images.
AVIMAINHEADER also contains dwFlags that can be AVIF_HASINDEX - means that avi file contain idx1 chunk and AVIF_MUSTUSEINDEX - which means that you should use index instead of physical ordering of chunks.

look this three links:
http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directx9_c/directx/htm/avioldindexstructure.asp
http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directx9_c/directx/htm/avirifffilereference.asp
http://msdn.microsoft.com/library/default.asp?url=/library/en-us/directx9_c/directx/htm/avimainheaderstructure.asp

 


  1. #include "aviparser.h"   
  2.    
  3. // just create an empty object with all the variables at their default value   
  4. AviParser::AviParser() : //filehandle(NULL),    
  5.     VideoDecompressor(NULL), sourceVideoData(NULL), sourceAudioData(NULL),    
  6.     uncompressedVideoData(NULL), uncompressedAudioData(NULL), ACMstreamHandle(NULL),   
  7.     sourceAudioFormat(NULL), audioData(NULL), frameData(NULL), moviPos(0),   
  8.     lastAudioFrame(0), lastVideoFrame(0), totalKeys(0) ,keyFrames(NULL) {   
  9. }   
  10.    
  11. AviParser::~AviParser(){   
  12.     CloseAvi();   
  13. }   
  14.    
  15. bool AviParser::UsePavi( char * file ){   
  16.    
  17.     /*PAVISTREAM video;  
  18.     PAVISTREAM audio;  
  19.     AVISTREAMINFO info;   
  20.     BITMAPINFO videoFormat;  
  21.     int size;  
  22.     AVIFileInit();  
  23.     if (AVIStreamOpenFromFile(&video, file, streamtypeVIDEO, 0, OF_READ, NULL))  
  24.         return false;  
  25.     if (AVIStreamInfo( video, &info,sizeof(info)) )  
  26.         return false;  
  27.   
  28.     // AVISTREAMINFO contains more data than AVIStreamHeader, and in a slightly different order...  
  29.     VideoStreamHeader.fccType = info.fccType;  
  30.     VideoStreamHeader.fccHandler = info.fccHandler;  
  31.     VideoStreamHeader.dwFlags = info.dwFlags;   
  32.     VideoStreamHeader.wPriority = info.wPriority;  
  33.     VideoStreamHeader.wLanguage = info.wLanguage;  
  34.     VideoStreamHeader.dwInitialFrames = info.dwInitialFrames;   
  35.     VideoStreamHeader.dwScale = info.dwScale;     
  36.     VideoStreamHeader.dwRate = info.dwRate;  
  37.     VideoStreamHeader.dwStart = info.dwStart;  
  38.     VideoStreamHeader.dwLength = info.dwLength;  
  39.     VideoStreamHeader.dwSuggestedBufferSize = info.dwSuggestedBufferSize;  
  40.     VideoStreamHeader.dwQuality = info.dwQuality;  
  41.     VideoStreamHeader.dwSampleSize = info.dwSampleSize;  
  42.     VideoStreamHeader.rcFrame = info.rcFrame;   
  43.     FileHeader.dwTotalFrames = info.dwLength;  
  44.   
  45.     PAVIFILE avifile;  
  46.     if (AVIFileOpen( &avifile, file, OF_SHARE_DENY_NONE, NULL ))  
  47.         return false;  
  48.   
  49.     AVIFILEINFO header;  
  50.     if (AVIFileInfo(avifile, &header, sizeof(header)))  
  51.         return false;  
  52.   
  53.     FileHeader.dwMaxBytesPerSec = header.dwMaxBytesPerSec;  
  54.     FileHeader.dwFlags = header.dwFlags;  
  55.     FileHeader.dwStreams = header.dwStreams;  
  56.     FileHeader.dwSuggestedBufferSize = header.dwSuggestedBufferSize;  
  57.     FileHeader.dwWidth = header.dwWidth;  
  58.     FileHeader.dwHeight = header.dwHeight;  
  59.   
  60.     if ( FileHeader.dwStreams > 1 ) {  
  61.         if ( AVIStreamOpenFromFile(&audio, file, streamtypeAUDIO, 0, OF_READ, NULL))  
  62.             return false;  
  63.         if ( AVIStreamInfo(audio, &info, sizeof(info)) )  
  64.             return false;  
  65.   
  66.         AudioStreamHeader.fccType = info.fccType;  
  67.         AudioStreamHeader.fccHandler = info.fccHandler;  
  68.         AudioStreamHeader.dwFlags = info.dwFlags;   
  69.         AudioStreamHeader.wPriority = info.wPriority;  
  70.         AudioStreamHeader.wLanguage = info.wLanguage;  
  71.         AudioStreamHeader.dwInitialFrames = info.dwInitialFrames;   
  72.         AudioStreamHeader.dwScale = info.dwScale;     
  73.         AudioStreamHeader.dwRate = info.dwRate;  
  74.         AudioStreamHeader.dwStart = info.dwStart;  
  75.         AudioStreamHeader.dwLength = info.dwLength;  
  76.         AudioStreamHeader.dwSuggestedBufferSize = info.dwSuggestedBufferSize;  
  77.         AudioStreamHeader.dwQuality = info.dwQuality;  
  78.         AudioStreamHeader.dwSampleSize = info.dwSampleSize;  
  79.         AudioStreamHeader.rcFrame = info.rcFrame;  
  80.   
  81.         if (AVIStreamReadFormat(audio, AVIStreamStart(audio), NULL, &size))  
  82.             return false;  
  83.         sourceAudioFormat = (WAVEFORMATEX *)malloc(size);  
  84.         if (AVIStreamReadFormat(audio, AVIStreamStart(audio), sourceAudioFormat, &size) )  
  85.             return false;  
  86.     }  
  87.     size = sizeof(sourceVideoFormat);  
  88.     if (AVIStreamReadFormat(video, AVIStreamStart(video), &sourceVideoFormat, &size))  
  89.         return false;  
  90.   
  91.     AVIStreamRelease(video); // Release The Stream  
  92.     AVIStreamRelease(audio);  
  93.     avifile->Release();  
  94.     AVIFileExit();*/ // Release The File*/   
  95.     return true;   
  96. }   
  97.    
  98. //***************************************************************   
  99.    
  100. /*  This function reads in the index at the end of the file. The index is neccisary for   
  101.     efficent random access; since the size of each audio and video frame is variable,   
  102.     to access them fast the player needs to know where each one starts and how long they   
  103.     are. This is what the index does. The function also splits the index into 2 smaller    
  104.     indexes, one for video and 1 for audio */   
  105. char AviParser::ReadIndex( char * name, long indexPos){   
  106.    
  107.     bool found = false;   
  108.     long idx1Pos, indexLength;   
  109.    
  110.     unsigned long dwActual;   
  111.     char temp[1003];   
  112.     // if there is an index, it will at least have 1 index per frame   
  113.    
  114.     if ( indexPos ){   
  115.            
  116.         SetFilePointer(filehandle, indexPos-4, NULL ,FILE_BEGIN);   
  117.         ReadFile(filehandle, &temp[0], 4, &dwActual, NULL);   
  118.     }   
  119.     if (*(long *)&temp[0] == '1xdi' ){   
  120.         idx1Pos = indexPos;   
  121.         found = true;   
  122.         printf("Using suggested index position\n");   
  123.     } else {   
  124.         printf("Not using suggested index position\n");   
  125.         long pos = FileHeader.dwTotalFrames * 16;   
  126.    
  127.         // the index is at the back of the file, so start searching for the   
  128.         // idx1 tag at the back of the file and work forward untill 4% of the   
  129.         // file has been read or it has been found   
  130.         for ( ; (pos < fileSize/19) ; pos +=1000 ){   
  131.             SetFilePointer(filehandle, -pos, NULL ,FILE_END);   
  132.             ReadFile(filehandle,&temp[0], 1003, &dwActual, NULL);   
  133.             for ( int i =0; i < 1000; i++ ){   
  134.                 if (*(long *)&temp[i] == '1xdi' ){   
  135.                     found = true;   
  136.                     idx1Pos = fileSize-pos+i;   
  137.                     printf( "Index found at %d bytes, suggested at %d\n", idx1Pos+4, indexPos);   
  138.                     break;   
  139.                 }   
  140.             }   
  141.             if ( found )    
  142.                 break;   
  143.         }   
  144.         idx1Pos+=4;   
  145.     }   
  146.        
  147.     if ( !found ) {   
  148.         printf("\nAvi index not found.\n");   
  149.         return AVI_INCOMPLETE;   
  150.     }    
  151.    
  152.     // the 1st 2 bytes represent the stream number (00 for video, 01 for audio)   
  153.     // the next 2 bytes tell what kind of data it is:   
  154.     // ##db = RGB DIB bits, typically indicates a keyframes if used   
  155.     // ##dc = compressed DIB bits   
  156.     // ##wb = waveform audio bytes   
  157.     unsigned long dcFrameTag = *((long *)"00dc");   
  158.     unsigned long dbFrameTag = *((long *)"00db");   
  159.     unsigned long audioTag   = *((long *)"01wb");   
  160.        
  161.     // skip any junk between idx1 (index) tag and acual indexes   
  162.     SetFilePointer(filehandle, idx1Pos, NULL ,FILE_BEGIN);   
  163.     ReadFile(filehandle, &temp[0], 1003, &dwActual, NULL );   
  164.     for ( int p = 0; p < 1000; p++){   
  165.         if ( *(long *)(&temp[p]) == dcFrameTag || *(long *)(&temp[p]) == dbFrameTag || *(long *)(&temp[p]) == audioTag){   
  166.             idx1Pos+=p;   
  167.             indexLength = *(long *)&temp[0];   
  168.             break;   
  169.         }   
  170.     }   
  171.     //set up an index array to hold all the indexes   
  172.     //indexLength = (VideoStreamHeader.dwLength + AudioStreamHeader.dwLength/AudioStreamHeader.dwScale) *16;   
  173.     Index *indexChunks = (Index *)malloc( indexLength );   
  174.     if ( indexChunks == NULL ){   
  175.         printf( "Unable to allocate index array\n");   
  176.         return false;   
  177.     }   
  178.        
  179.     // load the all the index entries into the index array   
  180.     SetFilePointer(filehandle, idx1Pos, NULL ,FILE_BEGIN);   
  181.     ReadFile(filehandle, indexChunks, indexLength, &dwActual, NULL );   
  182.    
  183.     #ifdef _DEBUG   
  184.     printf( "First 4 bytes of index array = %c%c%c%c\n", ((char *)indexChunks)[0], ((char *)indexChunks)[1], ((char *)indexChunks)[2], ((char *)indexChunks)[3]);   
  185.     #endif   
  186.    
  187.     indexLength/=16;   
  188.     long framePos = 0;   
  189.     long C;   
  190.    
  191.     // allocate memory for the frame indexes   
  192.     lastVideoFrame = FileHeader.dwTotalFrames;   
  193.     frameData = (unsigned long *)malloc( sizeof(unsigned long ) * lastVideoFrame );   
  194.    
  195.     // copy the frame indexes from the general index.   
  196.     totalKeys = 0;   
  197.     for ( C =0; C < indexLength && framePos < lastVideoFrame; C++ ){   
  198.         if ( indexChunks[C].ckid == dcFrameTag || indexChunks[C].ckid == dbFrameTag){   
  199.             #ifdef _DEBUG   
  200.             if ( indexChunks[C].dwFlags > 255 )   
  201.                 printf( "Large flag: %d\n", indexChunks[C].dwFlags);   
  202.             if( indexChunks[C].dwFlags != 16 && indexChunks[C].dwFlags != 0)   
  203.                 printf( "flag == %d\n", indexChunks[C].dwFlags);   
  204.             if( indexChunks[C].ckid == dbFrameTag )   
  205.                 printf("00db tag\n");   
  206.             #endif   
  207.             if ( indexChunks[C].dwFlags & AVIIF_KEYFRAME )   
  208.                 totalKeys++;   
  209.             frameData[framePos] = indexChunks[C].dwChunkLength?indexChunks[C].dwChunkOffset:0;   
  210.             framePos++;   
  211.         }   
  212.     }   
  213.    
  214.     lastVideoFrame = framePos;   
  215.    
  216.     #ifdef _DEBUG   
  217.     if ( framePos != lastVideoFrame )   
  218.         printf("\nFramePos != lastVideo frame: %d, %d\n\n", framePos, lastVideoFrame);   
  219.     #endif   
  220.    
  221.     // if it is uncompressed ("DIB ") or losslessly compressed with huffman yuv ("hfyu","HFYU")   
  222.     // every frame is a key frame, so no need to index them   
  223.     if ( VideoStreamHeader.fccHandler != ' BID' && VideoStreamHeader.fccHandler != 'uyfh' && VideoStreamHeader.fccHandler != 'UYFH'){   
  224.         long keyPos=0;   
  225.         framePos =0;   
  226.         keyFrames = (unsigned long *)malloc( sizeof(unsigned long) * totalKeys);   
  227.         for ( C =0; C < indexLength && framePos < lastVideoFrame; C++ ){   
  228.             if ( (indexChunks[C].ckid == dcFrameTag || indexChunks[C].ckid == dbFrameTag) ){   
  229.                 if (indexChunks[C].dwFlags & AVIIF_KEYFRAME)   
  230.                     keyFrames[keyPos++] = framePos;   
  231.                 framePos++;   
  232.             }   
  233.         }   
  234.     }   
  235.    
  236.     // If the video buffer size is not specified, the largest frame also   
  237.     // must be found here since the size data will be discarded.   
  238.     if ( !VideoStreamHeader.dwSuggestedBufferSize ){   
  239.         long bufferSize = 0;   
  240.         for ( C =0; C < indexLength && framePos <= lastVideoFrame; C++ ){   
  241.             if ( (indexChunks[C].ckid == dcFrameTag || indexChunks[C].ckid == dbFrameTag) && indexChunks[C].dwChunkLength > bufferSize)   
  242.                 bufferSize = indexChunks[C].dwChunkLength;   
  243.         }   
  244.         VideoStreamHeader.dwSuggestedBufferSize = bufferSize;   
  245.     }   
  246.    
  247.     framePos = 0;   
  248.    
  249.     if ( indexLength-lastVideoFrame > 0 || FileHeader.dwStreams == 1){   
  250.         // allocate space for the audio indexes   
  251.         audioData = (AudioIndex *)malloc( sizeof(AudioIndex ) * (indexLength-lastVideoFrame)  );   
  252.            
  253.         // copy the audio indexes from the general index   
  254.         for ( C =0; C < indexLength && framePos < (indexLength-lastVideoFrame) ; C++ ){   
  255.             if ( indexChunks[C].ckid == audioTag ){   
  256.                 #ifdef _DEBUG   
  257.                 if ( indexChunks[C].dwFlags != 16 && indexChunks[C].dwFlags != 0 )   
  258.                     printf( "Non 16 or 0 audio flag: %d\n", indexChunks[C].dwFlags );   
  259.                 #endif   
  260.                 audioData[framePos].dwChunkOffset = indexChunks[C].dwChunkOffset;   
  261.                 audioData[framePos].dwChunkLength = indexChunks[C].dwChunkLength;   
  262.                 framePos++;   
  263.             }   
  264.         }   
  265.     }   
  266.    
  267.     lastAudioFrame = framePos;   
  268.    
  269.     #ifdef _DEBUG   
  270.     printf( "Audio frames == %d\n", framePos);   
  271.     #endif   
  272.    
  273.     audioRate = ((float)framePos)/GetTotalFrames();   
  274.     if ( audioRate >= .998 && audioRate <= 1.002 )   
  275.         audioRate = 1.0;   
  276.     free( indexChunks ); // no longer needed   
  277.        
  278.     return AVI_VALID;   
  279. }   
  280.    
  281. /*  This function must be called before any other member functions. Failing to do so  
  282.     will almost certainly cause the program to crash. This function reads in the avi  
  283.     and stream headers, as well as the stream formats. It also calls readIndex() to   
  284.     set up the audio and video indexes */   
  285.    
  286. int AviParser::InitFile( char * file ){   
  287.    
  288.     bool found = false;   
  289.     unsigned long dwActual;   
  290.     char buffer[32];   
  291.    
  292.     filehandle = CreateFile(file, GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);   
  293.     if ( INVALID_HANDLE_VALUE == filehandle ){   
  294.         printf( "Unable to open specified file\n");   
  295.         return FILE_INVALID;   
  296.     }   
  297.         // no further error checking for the file is done, since it was checked by VideoParser::TestFile,   
  298.         // so as long as it is still there it should be fine   
  299.    
  300.     fileSize=GetFileSize(filehandle,NULL);   
  301.    
  302.     if ( true ){//!UsePavi(file) ){   
  303.         //printf("PAVI not used for finding stream headers\n");   
  304.    
  305.         ReadFile(filehandle, &buffer, 32, &dwActual, NULL);   
  306.    
  307.         // read in the avi file header   
  308.         ReadFile(filehandle, &FileHeader, sizeof(FileHeader), &dwActual, NULL);   
  309.         ReadFile(filehandle, &buffer[0], 20, &dwActual, NULL);   
  310.    
  311.         // read in the video stream header   
  312.         ReadFile(filehandle, &VideoStreamHeader, *(long *)&buffer[16], &dwActual, NULL);   
  313.         ReadFile(filehandle, &buffer[0], 8, &dwActual, NULL);   
  314.    
  315.         // read in the video format   
  316.         int tempRead = *(long *)&buffer[4];   
  317.         if ( tempRead < sizeof( BITMAPINFO))   
  318.             sourceVideoFormat = (BITMAPINFO *)malloc (sizeof( BITMAPINFO));   
  319.         else    
  320.             sourceVideoFormat = (BITMAPINFO *)malloc(tempRead);   
  321.         //printf("Specifided source video format size is %d bytes greater than the size of BITMAPINFO, truncating the format\n", tempRead-sizeof( sourceVideoFormat));   
  322.         ReadFile(filehandle, sourceVideoFormat, tempRead, &dwActual, NULL);   
  323.         sourceVideoFormat->bmiHeader.biSize= tempRead>sizeof( BITMAPINFO)?tempRead:sizeof( BITMAPINFO);   
  324.         //SetFilePointer(filehandle, tempRead-sizeof( sourceVideoFormat), NULL, FILE_CURRENT);   
  325.         //} else {   
  326.         //  ReadFile(filehandle,  &sourceVideoFormat, tempRead, &dwActual, NULL);   
  327.         //}   
  328.    
  329.         if ( FileHeader.dwStreams > 1 ){   
  330.             // find the next stream ( audio )   
  331.             while ( !found ){   
  332.                 moviPos +=4;   
  333.                 if ( moviPos >= fileSize/20 ) {   
  334.                     printf( "Unable to find the audio stream header\n");   
  335.                     return FILE_INVALID;   
  336.                 }   
  337.                 ReadFile(filehandle, &buffer, 4, &dwActual, NULL);   
  338.                 #ifdef _DEBUG   
  339.                 if ( *(long *)&buffer[0] == 'drts')   
  340.                     printf("\nstrd found\n\n");   
  341.                 #endif   
  342.    
  343.                 if ( *(long *)&buffer[0] == 'lrts')   
  344.                     found = true;   
  345.             }   
  346.            
  347.             ReadFile(filehandle,  &buffer[0], 8, &dwActual, NULL);   
  348.    
  349.             // read audio stream header   
  350.             ReadFile(filehandle, &AudioStreamHeader, *(long *)&buffer[4], &dwActual, NULL);   
  351.             audioBytesPerFrame = (float)AudioStreamHeader.dwScale*AudioStreamHeader.dwLength/VideoStreamHeader.dwLength;   
  352.    
  353.             ReadFile(filehandle, &buffer[0], 8, &dwActual, NULL);   
  354.             // read the audio format   
  355.             printf( "Reading audio format...");   
  356.             sourceAudioFormat = (WAVEFORMATEX *)malloc( *(long *)&buffer[4]);   
  357.             if ( sourceAudioFormat == NULL ){   
  358.                 printf( "\nError reading format.\n");   
  359.                 return FILE_INVALID;   
  360.             }   
  361.             ReadFile(filehandle,  sourceAudioFormat, *(long *)&buffer[4], &dwActual, NULL);   
  362.             MPEGLAYER3WAVEFORMAT tempDebug;   
  363.             memcpy(&tempDebug,sourceAudioFormat,30);   
  364.                
  365.             printf( " format header requires %d bytes\n",*(long *)&buffer[4]);   
  366.             if ( sourceAudioFormat->wFormatTag == WAVE_FORMAT_PCM )   
  367.                 printf( "\nAudio is uncompressed PCM, %d KBps\n", sourceAudioFormat->nAvgBytesPerSec/1000);   
  368.         } else {   
  369.             printf( "No audio stream\n");   
  370.         }   
  371.     } // done reading in header information   
  372.     // check the standard 'movi' tag position   
  373.     SetFilePointer(filehandle, 10244, NULL ,FILE_BEGIN);   
  374.     ReadFile(filehandle, &buffer[0], 8, &dwActual, NULL );   
  375.    
  376.     long indexPos = 10252 + *(long *)&buffer[0];   
  377.     if ( *(long *)&buffer[4] != 'ivom' ){   
  378.         found = false;   
  379.        
  380.         SetFilePointer(filehandle, moviPos, NULL ,FILE_BEGIN);   
  381.         char temp[500];   
  382.         bool half=false;   
  383.            
  384.         // find the byte position of the 'movi' tag   
  385.         while ( !found && moviPos < 40000 ){   
  386.             indexPos = *(long *)&temp[496];   
  387.             ReadFile(filehandle, &temp, 500, &dwActual, NULL);   
  388.             if ( half ){   
  389.                 if ( *((short*)temp) == 'iv' ){   
  390.                     found = true;   
  391.                     moviPos-=2;   
  392.                     break;   
  393.                 }   
  394.             }   
  395.             for ( int a =0; a < 498; a+=2){   
  396.                 if ( *(long *)&temp[a] == 'ivom'){   
  397.                     found = true;   
  398.                     moviPos-=496-a;   
  399.                     if ( a )   
  400.                         indexPos = *(long *)&temp[a-4] + moviPos+500;   
  401.                     break;   
  402.                 }   
  403.             }   
  404.             if ( *((short*)&temp[498])=='om' )   
  405.                 half = true;   
  406.             else   
  407.                 half = false;   
  408.             moviPos +=500;   
  409.         }   
  410.         if ( !found ){   
  411.             #ifdef _DEBUG   
  412.             printf( "\n\n*******************************************\nmovi pos not found\n*******************************************\n\n");   
  413.             #endif   
  414.             moviPos = 4;   
  415.             indexPos = 0;   
  416.         }   
  417.         #ifdef _DEBUG   
  418.         printf( "\nmovi pos does not equal 10252. Pos = %d\n\n", moviPos);   
  419.         #endif   
  420.     } else {   
  421.         moviPos = 10252;   
  422.     }   
  423.    
  424.     char indexResults = ReadIndex(file, indexPos);   
  425.     //bufferedRead.SetBufferState(true);   
  426.     //bufferedRead.StartBufferAt(moviPos);   
  427.     return indexResults;   
  428.        
  429. }   
  430.    
  431. //***************************************************************   
  432.    
  433.    
  434. // set up the video decompressor and test if it gives us valid data   
  435. bool AviParser::InitVideoDecompressor(){   
  436.    
  437.     // the video buffer size is checked when the index is loaded, so this   
  438.     // will a valid size.   
  439.     long size = VideoStreamHeader.dwSuggestedBufferSize;   
  440.    
  441.     sourceVideoData = (unsigned char *)malloc(size );   
  442.     if ( VideoStreamHeader.fccHandler != ' BID' ){   
  443.         uncompressedVideoData = (unsigned char *)malloc(sourceVideoFormat->bmiHeader.biWidth * sourceVideoFormat->bmiHeader.biHeight * 3 +1);   
  444.         if ( uncompressedVideoData == NULL || sourceVideoData ==NULL){   
  445.             printf( "Unable to allocate memory for uncompressed video buffer\n");   
  446.             return false;   
  447.         }   
  448.     } else {   
  449.         uncompressedVideoData = sourceVideoData;   
  450.         targetVideoFormat = *sourceVideoFormat;    
  451.         GetVideoFrame(0);   
  452.         return true;   
  453.     }   
  454.        
  455.     LongChar bicomp;   
  456.     bicomp.value = sourceVideoFormat->bmiHeader.biCompression;   
  457.     printf( "Video format bi-compresssion = %c%c%c%c\n", bicomp.bytes[0], bicomp.bytes[1], bicomp.bytes[2], bicomp.bytes[3]);   
  458.     printf( "Video stream ffc handler = %c%c%c%c\n", ((char *)&VideoStreamHeader.fccHandler)[0], ((char *)&VideoStreamHeader.fccHandler)[1], ((char *)&VideoStreamHeader.fccHandler)[2], ((char *)&VideoStreamHeader.fccHandler)[3] );   
  459.    
  460.     VideoDecompressor = ICLocate(ICTYPE_VIDEO, VideoStreamHeader.fccHandler, &sourceVideoFormat->bmiHeader, 0, ICMODE_DECOMPRESS);   
  461.        
  462.     if (  VideoDecompressor != NULL && TestDecompressor() ){   
  463.         printf("Video decompressor set up using ICLocate and the fccHandler\n");   
  464.         return true;   
  465.     }   
  466.    
  467.     VideoDecompressor = ICLocate(ICTYPE_VIDEO, sourceVideoFormat->bmiHeader.biCompression, &sourceVideoFormat->bmiHeader, 0, ICMODE_DECOMPRESS);   
  468.    
  469.     if (  VideoDecompressor != NULL && TestDecompressor() ){   
  470.         printf("Video decompressor set up using ICLocate and the biCompression\n");   
  471.         return true;   
  472.     }   
  473.    
  474.     VideoDecompressor = ICOpen(ICTYPE_VIDEO, VideoStreamHeader.fccHandler, ICMODE_DECOMPRESS);   
  475.     if ( VideoDecompressor != NULL && TestDecompressor() ){   
  476.         printf("Video decompressor set up using fcchandler\n");   
  477.         return true;   
  478.     }   
  479.     VideoDecompressor = ICOpen(ICTYPE_VIDEO, sourceVideoFormat->bmiHeader.biCompression, ICMODE_DECOMPRESS);   
  480.     if ( VideoDecompressor != NULL && TestDecompressor() ){   
  481.         printf( "Video decompressor set up using bicompresion tag\n");   
  482.         return true;   
  483.     }   
  484.    
  485.     // if the player was unable to find a valid decompressor, it tries all divx decompressors   
  486.     printf( "Unable to obtain a valid video decompressor tag based based on fcchandler or bicompression\n");   
  487.     printf( "Trying to use anouther divx decompressor...\n");   
  488.    
  489.    
  490.     LongChar divx;   
  491.     divx.value = *(long *)"divx";   
  492.     long xvid  = *(long *)"xvid";   
  493.     long divx5 = *(long *)"DX50";   
  494.    
  495.     // try divx 5   
  496.     sourceVideoFormat->bmiHeader.biCompression = divx5;   
  497.     VideoDecompressor = ICOpen(ICTYPE_VIDEO,  sourceVideoFormat->bmiHeader.biCompression, ICMODE_DECOMPRESS);   
  498.     if ( VideoDecompressor != NULL && TestDecompressor() ){   
  499.         printf("Video decompressor set up using Divx 5.0\n");   
  500.         return true;   
  501.     }   
  502.        
  503.     // try divx, div3, and div4   
  504.     for ( int i = 0; i < 3; i++){   
  505.         sourceVideoFormat->bmiHeader.biCompression = divx.value;   
  506.         VideoDecompressor = ICOpen(ICTYPE_VIDEO,  sourceVideoFormat->bmiHeader.biCompression, ICMODE_DECOMPRESS);   
  507.         if ( VideoDecompressor != NULL && TestDecompressor() ){   
  508.             printf("Video decompressor set up using Div%c\n", divx.bytes[3]);   
  509.             return true;   
  510.         }   
  511.         divx.bytes[3] = i+'3';   
  512.     }    
  513.    
  514.     // try xvid   
  515.     sourceVideoFormat->bmiHeader.biCompression = xvid;   
  516.     VideoDecompressor = ICOpen(ICTYPE_VIDEO,  sourceVideoFormat->bmiHeader.biCompression, ICMODE_DECOMPRESS);   
  517.     if ( VideoDecompressor != NULL && TestDecompressor() ){   
  518.         printf("Video decompressor set up using Xvid\n");   
  519.         return true;   
  520.     }   
  521.    
  522.     return false;   
  523. }   
  524.    
  525. //***************************************************************   
  526.    
  527. // see if the video decompressor actually gives us good data   
  528. bool AviParser::TestDecompressor(){   
  529.     if ( ICERR_OK != ICDecompressGetFormat(VideoDecompressor, sourceVideoFormat, &targetVideoFormat)) {   
  530.         printf( "Unable to get a valid uncompressed format, attempting to work around\n");   
  531.         targetVideoFormat = *sourceVideoFormat;   
  532.         targetVideoFormat.bmiHeader.biBitCount = 24;   
  533.         targetVideoFormat.bmiHeader.biCompression = BI_RGB;   
  534.         //targetVideoFormat.bmiHeader.biSizeImage = 0;   
  535.     }   
  536.     if ( targetVideoFormat.bmiHeader.biCompression != BI_RGB || targetVideoFormat.bmiHeader.biBitCount != 24){   
  537.         targetVideoFormat.bmiHeader.biBitCount = 24;   
  538.         targetVideoFormat.bmiHeader.biCompression = BI_RGB;   
  539.     }   
  540.     #ifdef _DEBUG   
  541.     if ( VideoDecompressor == NULL )   
  542.         printf("\nVideo decompressor nulled in test decompressor\n\n");   
  543.     #endif   
  544.     DWORD error = ICDecompressBegin( VideoDecompressor, sourceVideoFormat, &targetVideoFormat);   
  545.     if ( error != ICERR_OK )    
  546.         return false;   
  547.    
  548.     if ( GetVideoFrame(0) )   
  549.         return true;   
  550.     else   
  551.         return false;   
  552. }   
  553.    
  554. //***************************************************************   
  555.    
  556. // set up the audio decompressor for the avi audio stream   
  557. bool AviParser::InitAudioDecompressor(){   
  558.    
  559.     unsigned long sourceSize = AudioStreamHeader.dwSuggestedBufferSize;   
  560.    
  561.     // if the suggested buffer size is not set, find the largest   
  562.     // chunk size and set the buffer size to that.   
  563.     // PCM audio also appears to not have correct buffers at times...   
  564.     if ( sourceSize < 1 || sourceAudioFormat->wFormatTag == WAVE_FORMAT_PCM ){   
  565.    
  566.         if ( sourceSize < 1 )   
  567.             printf( "Source audio buffer size not specified in stream header\n");   
  568.         sourceSize = 0;   
  569.         for ( long f = 0; f < lastAudioFrame; f++ ){   
  570.             if ( audioData[f].dwChunkLength > sourceSize )   
  571.                 sourceSize = audioData[f].dwChunkLength;   
  572.         }   
  573.         if ( sourceAudioFormat->wFormatTag != WAVE_FORMAT_PCM  )    
  574.             printf( "Setting source audio buffer size to %d\n", sourceSize);   
  575.     }   
  576.     sourceAudioData = (unsigned char *)malloc( sourceSize );   
  577.     if ( sourceAudioData == NULL ){   
  578.         printf( "Unable to allocate memory for source audio buffer\n");   
  579.         return false;   
  580.     }   
  581.     if ( sourceAudioFormat->wFormatTag != WAVE_FORMAT_PCM ){   
  582.    
  583.         if ( sourceAudioFormat->nBlockAlign == 1152 && sourceAudioFormat->wFormatTag == WAVE_FORMAT_MPEGLAYER3 ){   
  584.             printf( "\nThis file contains a VBR MP3 stream\n");   
  585.             printf( "This may cause problems with video sync after fast\n");   
  586.             printf( "forwarding or rewinding.\n\n");   
  587.         }   
  588.    
  589.         // the audio is compressed, so it needs to be converted to pcm to play   
  590.         targetAudioFormat.wFormatTag = WAVE_FORMAT_PCM;   
  591.         #ifdef _DEBUG   
  592.         printf("wBitsPerSample = %d\n", sourceAudioFormat->wBitsPerSample);   
  593.         #endif   
  594.         unsigned long Flags = ACM_FORMATSUGGESTF_WFORMATTAG;   
  595.         if ( !sourceAudioFormat->wBitsPerSample ){   
  596.             Flags = ACM_FORMATSUGGESTF_WFORMATTAG;   
  597.             targetAudioFormat.wBitsPerSample = 8;   
  598.         }   
  599.         DWORD maxSize;   
  600.    
  601.         //unsigned long bytes;   
  602.         //HANDLE output = CreateFile( "C:\\Documents and Settings\\Lags\\Desktop\\audiohdr.lag", GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, CREATE_NEW, 0, NULL);   
  603.         //WriteFile(output, sourceAudioFormat,sizeof(WAVEFORMATEX)+sourceAudioFormat->cbSize,&bytes,NULL);   
  604.         //CloseHandle(output);   
  605.    
  606.         // obtain an uncompressed format and open a conversion stream   
  607.         acmMetrics(NULL, ACM_METRIC_MAX_SIZE_FORMAT, (LPVOID)&maxSize);   
  608.         if ( acmFormatSuggest(NULL, sourceAudioFormat, &targetAudioFormat, maxSize, ACM_FORMATSUGGESTF_WFORMATTAG ) ){   
  609.             printf( "Failed to obtain a valid uncompressed audio format\n");   
  610.             printf( "Attempting to work around...\n");   
  611.             targetAudioFormat.nChannels = sourceAudioFormat->nChannels;   
  612.             targetAudioFormat.wBitsPerSample = 8;   
  613.             targetAudioFormat.nBlockAlign = targetAudioFormat.nChannels;   
  614.             targetAudioFormat.nSamplesPerSec = sourceAudioFormat->nSamplesPerSec;   
  615.             targetAudioFormat.nAvgBytesPerSec = targetAudioFormat.nChannels*targetAudioFormat.nSamplesPerSec;   
  616.             if ( acmStreamOpen( &ACMstreamHandle, NULL, sourceAudioFormat, &targetAudioFormat, NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME) ){   
  617.                 targetAudioFormat.wBitsPerSample = 16;   
  618.                 targetAudioFormat.nBlockAlign = 2*targetAudioFormat.nChannels;   
  619.                 targetAudioFormat.nAvgBytesPerSec = targetAudioFormat.nBlockAlign*targetAudioFormat.nSamplesPerSec;   
  620.                 if ( acmStreamOpen( &ACMstreamHandle, NULL, sourceAudioFormat, &targetAudioFormat, NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME) ){   
  621.                     printf("Unable to obtain valid uncompressed audio format.\n");   
  622.                     return false;   
  623.                 }   
  624.             }   
  625.         }  else {   
  626.             if ( acmStreamOpen( &ACMstreamHandle, NULL, sourceAudioFormat, &targetAudioFormat, NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME) ){   
  627.                 printf("Failed to open ACM conversion stream\n");   
  628.                 return false;   
  629.             }   
  630.         }   
  631.         unsigned long uncompressedSize;   
  632.         if ( acmStreamSize( ACMstreamHandle, sourceSize, &uncompressedSize, ACM_STREAMSIZEF_SOURCE ) && uncompressedSize > sourceSize){   
  633.             printf ("Unable to obtain buffer sizes for uncompressed audio, attempting to work around\n");   
  634.             unsigned long initSize=0;   
  635.             uncompressedSize = (targetAudioFormat.nSamplesPerSec*targetAudioFormat.nBlockAlign/GetFramesPerSec()/GetAudioPerFrame());   
  636.             for (; initSize < sourceSize; uncompressedSize++)   
  637.                 acmStreamSize( ACMstreamHandle, uncompressedSize, &initSize, ACM_STREAMSIZEF_DESTINATION );   
  638.    
  639.             printf ( "Setting uncompressed buffer size to %d bytes\n", uncompressedSize);   
  640.         }   
  641.         if ( audioRate < .5 )   
  642.             uncompressedSize*=1.1;   
  643.         uncompressedAudioData = (unsigned char *)malloc(uncompressedSize);   
  644.    
  645.         // set up the acm stream header   
  646.         acmHeader.fdwStatus     = 0;   
  647.         acmHeader.cbStruct      = sizeof(ACMSTREAMHEADER);   
  648.         acmHeader.pbSrc         = (LPBYTE)sourceAudioData;   
  649.         acmHeader.cbSrcLength   = sourceSize;   
  650.         acmHeader.pbDst         = (LPBYTE)uncompressedAudioData;   
  651.         acmHeader.cbDstLength   = uncompressedSize;   
  652.    
  653.         // prepare the header   
  654.         if (acmStreamPrepareHeader( ACMstreamHandle, &acmHeader, 0)){   
  655.             printf("Error preparing audio stream conversion header\n");   
  656.             return false;   
  657.         }   
  658.    
  659.         acmHeader.cbDstLengthUsed = 0;   
  660.         acmHeader.cbSrcLengthUsed = 0;   
  661.    
  662.     } else { // the avi data is already pcm, just stream it   
  663.         uncompressedAudioData = sourceAudioData;   
  664.         targetAudioFormat = *sourceAudioFormat;   
  665.     }   
  666.     printf("Audio decompression set up\n");   
  667.     return true;   
  668. }   
  669.    
  670. //***************************************************************   
  671.    
  672. char * AviParser::GetVideoBuffer(){   
  673.     return (char *)uncompressedVideoData;   
  674. }   
  675.    
  676. //***************************************************************   
  677.    
  678. char * AviParser::GetAudioBuffer(){   
  679.     return (char *)uncompressedAudioData;   
  680. }   
  681.    
  682. //***************************************************************   
  683.    
  684. // fill the uncompressedVideoData array with the uncompressed video data   
  685. // for the specified frame   
  686. bool AviParser::GetVideoFrame(long frame ){   
  687.     unsigned long bytesRead;    
  688.     // if the ofset is set to 0, the frame has a size of 0 and is simply a place-holder   
  689.     if ( frameData[frame] ){   
  690.         unsigned long bytes;   
  691.         SetFilePointer(filehandle,frameData[frame] + moviPos,NULL, FILE_BEGIN);   
  692.         ReadFile(filehandle,&bytesRead, 4, &bytes, NULL);   
  693.         ReadFile(filehandle,sourceVideoData, bytesRead, &bytes, NULL);   
  694.         //bufferedRead.BufferedRead(frameData[frame] + moviPos, &bytesRead, 4);   
  695.         //bufferedRead.BufferedRead(frameData[frame] + moviPos+4, sourceVideoData, bytesRead);   
  696.    
  697.         // if the video is compressed, uncompress it   
  698.         if ( VideoStreamHeader.fccHandler != ' BID' ){   
  699.             try {   
  700.                 if ( long error = ICDecompress( VideoDecompressor, 0, &(sourceVideoFormat->bmiHeader), sourceVideoData, &(targetVideoFormat.bmiHeader), uncompressedVideoData)){        
  701.                     #ifdef _DEBUG   
  702.                     switch ( error ){   
  703.                         case ICERR_UNSUPPORTED: printf("ICERR_UNSUPPORTED"); break;   
  704.                         case ICERR_BADFORMAT: printf("ICERR_BADFORMAT  "); break;   
  705.                         case ICERR_INTERNAL: printf(" ICERR_INTERNAL "); break;   
  706.                         case ICERR_BADFLAGS : printf(" ICERR_BADFLAGS   "); break;   
  707.                         case ICERR_BADPARAM: printf("ICERR_BADPARAM  "); break;   
  708.                         case ICERR_BADSIZE: printf(" ICERR_BADSIZE "); break;   
  709.                         case ICERR_BADHANDLE : printf(" ICERR_BADHANDLE  "); break;   
  710.                         case ICERR_ABORT: printf("  ICERR_ABORT"); break;   
  711.                         case ICERR_BADBITDEPTH: printf(" ICERR_BADBITDEPTH "); break;   
  712.                         case ICERR_BADIMAGESIZE: printf(" ICERR_BADIMAGESIZE "); break;   
  713.                         case ICERR_GOTOKEYFRAME: printf(" ICERR_GOTOKEYFRAME "); break;   
  714.                         default: printf("Video decompression error: frame %d\n", frame); break;   
  715.                     }   
  716.                     #endif   
  717.                     return false;   
  718.                 }   
  719.    
  720.             // handle any decompressor exceptions   
  721.             } catch ( ... ) {   
  722.                 printf("Exception thrown in GetVideoFrame by ICDecompress. Ignoring\n");   
  723.                 return false;   
  724.             }   
  725.         }   
  726.     }   
  727.     /*unsigned char * in = uncompressedVideoData;   
  728.     int length = targetVideoFormat.bmiHeader.biHeight*targetVideoFormat.bmiHeader.biWidth*2;   
  729.     int pos = length*1.5-1;   
  730.     for ( int a =0; a < length/2; a+=4 ){   
  731.         int y,u,v;   
  732.         int r,g,b;   
  733.         y = in[a];   
  734.         u = in[a+1];   
  735.         v = in[a+3];   
  736.         y-=16;   
  737.         u-=128;   
  738.         v-=128;   
  739.         r = 1.164383 * y + 1.596027 * v;   
  740.         g = 1.164383 * y - (0.391762 * u) - (0.812968 * v);   
  741.         b = 1.164383 * y +  2.017232 * u;   
  742.         r=r>255?255:r;   
  743.         b=b>255?255:b;   
  744.         g=g>255?255:g;   
  745.         r=r<0?0:r;   
  746.         g=g<0?0:g;   
  747.         b=b<0?0:b;   
  748.         in[pos--]=r;   
  749.         in[pos--]=g;   
  750.         in[pos--]=b;   
  751.         y=in[a+2];   
  752.         y-=16;   
  753.         r = 1.164383 * y + 1.596027 * v;   
  754.         g = 1.164383 * y - (0.391762 * u) - (0.812968 * v);   
  755.         b = 1.164383 * y +  2.017232 * u;   
  756.         r=r>255?255:r;   
  757.         b=b>255?255:b;   
  758.         g=g>255?255:g;   
  759.         r=r<0?0:r;   
  760.         g=g<0?0:g;   
  761.         b=b<0?0:b;   
  762.         in[pos--]=r;   
  763.         in[pos--]=g;   
  764.         in[pos--]=b;   
  765.     }*/   
  766.     return true;   
  767. }   
  768.    
  769. //***************************************************************   
  770.    
  771. // return a frame of uncompressed audio   
  772. long AviParser::GetAudioFrame(long frame){   
  773.     unsigned long bytes;   
  774.     long value;   
  775.     SetFilePointer(filehandle, audioData[frame].dwChunkOffset + moviPos, NULL, FILE_BEGIN);   
  776.     ReadFile(filehandle, &value, 4, &bytes, NULL );   
  777.     ReadFile(filehandle, sourceAudioData, audioData[frame].dwChunkLength, &bytes, NULL );   
  778.     //bufferedRead.BufferedRead( audioData[frame].dwChunkOffset + moviPos, &value, 4 );   
  779.     bytes = audioData[frame].dwChunkLength;   
  780.     //bufferedRead.BufferedRead( audioData[frame].dwChunkOffset + moviPos+4, sourceAudioData, audioData[frame].dwChunkLength );   
  781.        
  782.     if ( sourceAudioFormat->wFormatTag != WAVE_FORMAT_PCM ){   
  783.         acmHeader.cbSrcLength = bytes;   
  784.         acmHeader.cbSrcLengthUsed = 0;   
  785.         if ( acmStreamConvert(ACMstreamHandle, &acmHeader, ACM_STREAMCONVERTF_BLOCKALIGN ) ){   
  786.             printf("Error decompressing audio frame %d\n", frame);     
  787.         }   
  788.         bytes = acmHeader.cbDstLengthUsed;   
  789.         acmHeader.cbDstLengthUsed = 0;   
  790.    
  791.     }   
  792.     return bytes;   
  793. }     
  794.    
  795. //***************************************************************   
  796.    
  797. long AviParser::GetTotalFrames(){   
  798.     return lastVideoFrame;   
  799. }   
  800.    
  801. //**************************************************************   
  802.    
  803. long AviParser::GetTotalAudioFrames(){   
  804.     return lastAudioFrame;   
  805. }   
  806.    
  807. //***************************************************************   
  808.    
  809. float AviParser::GetFramesPerSec(){   
  810.     return VideoStreamHeader.dwRate/(float)VideoStreamHeader.dwScale;   
  811. }   
  812.    
  813. //**************************************************************   
  814.    
  815. float AviParser::GetAudioPerFrame(){   
  816.     return lastAudioFrame/(float)lastVideoFrame;   
  817. }   
  818.    
  819. //***************************************************************   
  820.    
  821. /* attempt to find a key frame searching forward or backward from the   
  822.    frame passed in. If no frame is found, the frame passed in is returned */   
  823.    
  824. long AviParser::GetKeyFrame( long startFrame, bool forward){   
  825.    
  826.     // if it is uncompressed ("DIB ") or losslessly compressed with huffman yuv ("hfyu')   
  827.     // every frame is a key frame   
  828.     if ( VideoStreamHeader.fccHandler == ' BID' || VideoStreamHeader.fccHandler == 'uyfh' ){   
  829.         if ( startFrame < 0 ){   
  830.             return 0;   
  831.         }   
  832.         if ( startFrame > lastVideoFrame ){   
  833.             return lastVideoFrame - 1;   
  834.         }   
  835.     }   
  836.    
  837.     // use a binary search to find the nearest key frame   
  838.     int i = startFrame;    
  839.     unsigned int times = totalKeys/2;   
  840.     unsigned int pos = times;   
  841.     while ( times > 1 ){   
  842.         times/=2;   
  843.         if ( keyFrames[pos] < startFrame )   
  844.             pos+=times;   
  845.          else    
  846.             pos-=times;    
  847.     }   
  848.    
  849.     if ( startFrame < 0 ){   
  850.         return 0;   
  851.     }   
  852.     if ( startFrame > lastVideoFrame ){   
  853.         return lastVideoFrame - 1;   
  854.         }   
  855.    
  856.     if ( keyFrames[pos] < startFrame && forward){   
  857.         return keyFrames[pos+1];   
  858.     }   
  859.    
  860.     if ( keyFrames[pos] > startFrame && !forward){   
  861.         return keyFrames[pos-1];   
  862.     }   
  863.     return keyFrames[pos];   
  864. }   
  865.    
  866. //***************************************************************   
  867.    
  868. int AviParser::GetHeight(){   
  869.     return targetVideoFormat.bmiHeader.biHeight;   
  870. }   
  871.    
  872. //***************************************************************   
  873.    
  874. int AviParser::GetWidth(){   
  875.     return targetVideoFormat.bmiHeader.biWidth;   
  876. }   
  877.    
  878. //***************************************************************   
  879.    
  880. int AviParser::GetSamplesPerSec(){   
  881.     return targetAudioFormat.nSamplesPerSec;   
  882. }   
  883.    
  884. //***************************************************************   
  885.    
  886. int AviParser::GetChannels(){   
  887.     return targetAudioFormat.nChannels;   
  888. }   
  889.    
  890. //***************************************************************   
  891.    
  892. int AviParser::GetBitsPerSample(){   
  893.     return targetAudioFormat.wBitsPerSample;   
  894. }   
  895.    
  896. //**************************************************************   
  897.    
  898. int AviParser::GetAudioByteDelay(){   
  899.     int samples = 0;   
  900.     if (sourceAudioFormat->wFormatTag == WAVE_FORMAT_MPEGLAYER3) {   
  901.         samples = ((MPEGLAYER3WAVEFORMAT *)sourceAudioFormat)->nCodecDelay;   
  902.         samples *= targetAudioFormat.wBitsPerSample/8;   
  903.         samples *= targetAudioFormat.nChannels;   
  904.     }   
  905.     return samples;   
  906. }   
  907.    
  908. //***************************************************************   
  909.    
  910. void AviParser::GetAudioPos( long &videoFrame, long &audioFrame){      
  911.            
  912.     // if the file has the nBlockAlign set to 1152, it was encoded    
  913.     // in nandub and may be a VBR mp3 audio stream >_<   
  914.     if ( sourceAudioFormat->nBlockAlign == 1152 && sourceAudioFormat->wFormatTag == WAVE_FORMAT_MPEGLAYER3 ){   
  915.    
  916.         unsigned long samples = (videoFrame)*((sourceAudioFormat->nSamplesPerSec)/(VideoStreamHeader.dwRate/(float)VideoStreamHeader.dwScale));   
  917.         unsigned long samples2=0;   
  918.         int neko =0;   
  919.         for( ; samples2< samples; neko++)   
  920.             samples2+=1152;   
  921.         if ( samples2-samples > 576)   
  922.             neko--;   
  923.         audioFrame = neko;   
  924.         return;   
  925.    
  926.     }   
  927.    
  928.     // if the audio is a constant bit rate, the average bytes per frame is used   
  929.     // to determine what audio frame the player should be on   
  930.    
  931.     long bytePos = videoFrame*audioBytesPerFrame;   
  932.     long place = 0;   
  933.     long size=0;   
  934.    
  935.     for ( ; bytePos > size; place++)   
  936.         size+= audioData[place].dwChunkLength;   
  937.     audioFrame = place;   
  938.     videoFrame -= (int)(bytePos-size+audioData[place].dwChunkLength)/audioBytesPerFrame+.5;   
  939. }   
  940.        
  941. //***************************************************************   
  942.    
  943. void AviParser::CloseAvi(){   
  944.     // This releases any allocated memory and nulls the pointers.    
  945.     try {    
  946.         acmStreamUnprepareHeader( ACMstreamHandle, &acmHeader, 0); // free our acm header   
  947.         if ( ACMstreamHandle != NULL ){   
  948.             acmStreamClose(ACMstreamHandle,0);  // Release the acm conversion stream   
  949.             ACMstreamHandle = NULL;   
  950.         }   
  951.         try {   
  952.             ICClose( VideoDecompressor );       // Release the video decompressor   
  953.             VideoDecompressor = NULL;   
  954.         } catch (...){}                         // catch any errors so everything else is deallocated   
  955.        
  956.         if ( sourceAudioData != NULL && sourceAudioData != uncompressedAudioData ){   
  957.             free(sourceAudioData);             
  958.         }   
  959.         if ( sourceVideoData != NULL && uncompressedVideoData != sourceVideoData ){   
  960.             free(sourceVideoData);     
  961.         }   
  962.         if ( uncompressedAudioData != NULL ){   
  963.             free(uncompressedAudioData);   
  964.             uncompressedAudioData = NULL;   
  965.             sourceAudioData = NULL;   
  966.         }   
  967.         if ( uncompressedVideoData != NULL ){   
  968.             free(uncompressedVideoData);     
  969.             uncompressedVideoData = NULL;   
  970.             sourceVideoData = NULL;   
  971.         }   
  972.         if ( sourceAudioFormat != NULL ){   
  973.             free(sourceAudioFormat);   
  974.             sourceAudioFormat = NULL;   
  975.         }   
  976.         if ( frameData!= NULL ){   
  977.             free(frameData);   
  978.             frameData = NULL;   
  979.         }   
  980.         if (audioData != NULL ){   
  981.             free(audioData);   
  982.             audioData = NULL;   
  983.         }   
  984.         if ( keyFrames != NULL ){   
  985.             free(keyFrames);   
  986.             keyFrames = NULL;   
  987.         }   
  988.         //if ( filehandle != NULL ) {   
  989.         //  CloseHandle( filehandle );   
  990.         //  filehandle = NULL;   
  991.         //}   
  992.     } catch (...){   
  993.         // ignore all unsightly exceptions since the player should be exiting    
  994.         #ifdef _DEBUG   
  995.         printf( "Exception caught in AviParser::CloseAvi()\n");   
  996.         _sleep(10000);   
  997.         #endif   
  998.     }   
  999. }  

*********************************************************************************

www.pudn.com > aviplayer-source.zip > incaviparser.cpp, change:2003-09-30,size:5292b
 

  1. #include "incaviparser.h"   
  2. #include "reindex.cpp"   
  3.    
  4. IncompleteAviParser::IncompleteAviParser(): hasKeyFrames(false) {}   
  5.    
  6. IncompleteAviParser::~IncompleteAviParser(){}   
  7.    
  8. char IncompleteAviParser::ReadIndex(char * name, long indexPos){   
  9.    
  10.     printf("\nThe file appears to be incomplete.\n");   
  11.        
  12.     printf("Attempting to use the file data to build an index.\n\n");   
  13.        
  14.     IncompleteAviObject * reference;   
  15.     reference = (IncompleteAviObject *)malloc( sizeof(IncompleteAviObject));   
  16.     reference->aviObjPtr = this;   
  17.     reference->fileName = name;   
  18.     reference->handle = &indexThread;   
  19.     _beginthread( IncompleteTimerThread, 0, reference );   
  20.    
  21.     // wait for a few seconds of frame data to be indexed   
  22.     for( unsigned char bot =0; bot < 255 && lastVideoFrame < 200 && lastVideoFrame < FileHeader.dwTotalFrames; bot++)   
  23.         _sleep(10);   
  24.    
  25.     // if there is not a specified buffer size, load some more frames before    
  26.     // so the calculated video buffer size is more accurate   
  27.     if ( !VideoStreamHeader.dwSuggestedBufferSize )   
  28.         _sleep(500);   
  29.    
  30.     return true;   
  31. }   
  32.    
  33. long IncompleteAviParser::GetKeyFrame(long startFrame, bool forward ){   
  34.     if ( hasKeyFrames)   
  35.         return AviParser::GetKeyFrame(startFrame, forward);   
  36.     return startFrame;   
  37. }   
  38.    
  39. //***************************************************************   
  40.    
  41. void IncompleteAviParser::BuildIndex( void * fileName ){   
  42.     /* this spawns a seperate thread that will scan the avi file while the    
  43.      * player plays the avi. This thread will build the index for files that   
  44.      * are incomplete and/or being downloaded.   
  45.      */   
  46.    
  47.     long totalFrames = FileHeader.dwTotalFrames;   
  48.     long currAudioFrame = 0;   
  49.     long currVideoFrame = 0;   
  50.     long filePos = moviPos;   
  51.     long pos = 4;   
  52.     long size = 0;   
  53.     bool videoFrame = false;   
  54.     char data[8];   
  55.     DWORD dwActual;   
  56.     HANDLE file;   
  57.    
  58.     // give it its own copy of the avi file handle, so it won't interfere   
  59.     // with its parent's reading   
  60.     file = CreateFile((char *)fileName, GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, 0, NULL);   
  61.     fileSize = GetFileSize( file, NULL );   
  62.    
  63.     frameData = (unsigned long *)malloc( sizeof(unsigned long) * totalFrames );   
  64.     keyFrames = (unsigned long *)malloc( sizeof(unsigned long) * totalFrames/40);   
  65.     if ( frameData == NULL ){   
  66.         printf( "Unable to allocate memory for the frame index\n");   
  67.         _sleep(10000);   
  68.         exit(1);   
  69.     }   
  70.     audioData = (AudioIndex *)malloc( sizeof(AudioIndex ) * totalFrames * 2 );   
  71.     if ( audioData == NULL ){   
  72.         printf( "Unable to allocate memory for the audio index\n");   
  73.         _sleep(10000);   
  74.         exit(1);   
  75.     }   
  76.        
  77.     SetFilePointer( file, filePos, NULL ,FILE_BEGIN);   
  78.     ReadFile( file, data, 8, &dwActual, NULL );   
  79.    
  80.     whiletrue ){   
  81.         // if the file size has changed, or if it is a .antifrag file   
  82.         // attempt to read more frames every 30 seconds   
  83.         while ( filePos+8 < fileSize || ( strcmp( (char *)fileName + strlen((char *)fileName)-8, "antifrag") == 0 ) ){   
  84.             if ( *(long *)data == 'bw10' ) {   
  85.                 audioData[currAudioFrame].dwChunkOffset = filePos+4 - moviPos;   
  86.                 audioData[currAudioFrame].dwChunkLength = *(long *)&data[4];   
  87.                 currAudioFrame++;   
  88.                 lastAudioFrame++;   
  89.                 filePos+= *(long *)&data[4]+8 + *(long *)&data[4]%2 ;   
  90.             } else if ( *(long *)data == 'cd00' || *(long *)data == 'bd00' ){   
  91.                 if ( *(long *)data == 'bd00' ){   
  92.                     #ifdef _DEBUG   
  93.                     if ( !hasKeyFrames )   
  94.                         printf( "File uses '00db' frame tags. Marking those frames as key frames\n");   
  95.                     #endif   
  96.                     keyFrames[totalKeys] = currVideoFrame;   
  97.                     hasKeyFrames = true;   
  98.                     totalKeys++;   
  99.                 }   
  100.                 frameData[currVideoFrame] = filePos+4 - moviPos;   
  101.                 // In case the video buffer size is not set   
  102.                 if ( *(unsigned long *)&data[4] > VideoStreamHeader.dwSuggestedBufferSize)   
  103.                     VideoStreamHeader.dwSuggestedBufferSize = *(long *)&data[4];   
  104.                 currVideoFrame++;   
  105.                 lastVideoFrame++;   
  106.                 filePos+= *(long *)&data[4]+8 + *(long *)&data[4]%2;   
  107.             } else if( *(long *)data == '1xdi' ){   
  108.                 return;   
  109.                    
  110.             }   
  111.             SetFilePointer( file, filePos, NULL ,FILE_BEGIN);   
  112.             ReadFile( file, data, 8, &dwActual, NULL );   
  113.         }   
  114.         if ( lastVideoFrame )   
  115.             audioRate = lastAudioFrame/lastVideoFrame;   
  116.    
  117.         // sleep for a minute, then see if the file size has changed.   
  118.         // if so, more of the file is indexed.   
  119.         _sleep(30000);   
  120.         fileSize = GetFileSize( file, NULL );   
  121.     }   
  122. }   
  123.    
  124. void IncompleteAviParser::GetAudioPos( long &videoFrame,long &audioFrame){   
  125.    
  126.     // if the desired audio frame has not been indexed yet, move back the video frame by the    
  127.     // same percentage as the audio frame; this should result in a valid audio frame.   
  128.     AviParser::GetAudioPos(videoFrame,audioFrame);   
  129.     while( audioFrame > lastAudioFrame ){   
  130.         #ifdef _DEBUG   
  131.         printf("Position corrisponds to an audio frame that has not been indexed: frame %d requested, last is %d\n", audioFrame, lastAudioFrame);   
  132.         #endif   
  133.         float percent = lastAudioFrame/(float)audioFrame;   
  134.         videoFrame*=percent;   
  135.         videoFrame--;   
  136.         audioFrame=lastAudioFrame;   
  137.         AviParser::GetAudioPos(videoFrame,audioFrame);   
  138.     }   
  139. }   
  140. //***************************************************************   
  141.    
  142. void IncompleteAviParser::CloseAvi(){   
  143.     // kill the reindex thread so that it cannot attempt to   
  144.     // access memory after it has been freed by CloseAvi()   
  145.     TerminateThread( indexThread, 0);   
  146.     // free the resources used   
  147.     AviParser::CloseAvi();   
  148. }   

****************************************************
using System;
using System.IO;
using System.Text;
using System.Threading;
using Utility;
using System.Collections;
using System.Collections.Generic;
using themonospot_Base_Main;

namespace themonospot_Base_Main
{

    
    
    /// <summary>
    /// AVI Parser for Themonospot
    /// </summary>
    public class clsParserAVI : IParser
    {

		
		#region CONSTANTS

        public const int DWORDSIZE = 4;
        public const int TWODWORDSSIZE = 8;
        public static readonly string RIFF4CC = "RIFF";
        public static readonly string RIFX4CC = "RIFX";
        public static readonly string LIST4CC = "LIST";

		// Known file types
		public static readonly int ckidAVI = Utility.clsEncoding.ToFourCC("AVI ");
		public static readonly int ckidWAV = Utility.clsEncoding.ToFourCC("WAVE");
		public static readonly int ckidRMID = Utility.clsEncoding.ToFourCC("RMID");

        #endregion
		
		
		#region GENERIC OBJECTS
		
        public List<clsInfoItem> VideoItems
        {
            get {return _VideoItems;    }
        }
        
        public List<clsInfoItem> AudioItems
        {
            get {return _AudioItems;    }
        }

        private List<clsInfoItem> _VideoItems;
        private List<clsInfoItem> _AudioItems;
        
        private AVIMAINHEADER myAviHeader = new AVIMAINHEADER();
		private AVISTREAMHEADER[] myAviStreamHeader = new AVISTREAMHEADER[0];
		private BITMAPINFOHEADER[] myAviBitMap = new BITMAPINFOHEADER[0];
		private WAVEFORMATEX[] myAviWave = new WAVEFORMATEX[0];
		private string[] myUData = new string[0];
		private string[] myMUData = new string[0];
		
		private string _m_filename;
		private string _m_shortname;
		private string _udToChange = "";
		private long _m_fourCC_AviStreamHeader;
		private long _m_fourCC_AviVideoHeader;
		private long _m_filesize;
		private long _m_posStream;
		private long _m_MoviSize=0;
		private long _m_MoviSizeNew=0;
		private long _m_MoviStart=0;
		private long _m_IdxSize=0;
		private long _m_IdxStart=0;		
		private double sizOfAudio=0;
		
		private FileStream aviStreamReader = null;
		private FileStream outFile = null;
		
		private bool pbDetect = false;
		private bool _rec_ix_presence = false;
		
		private int stdBufferSize = 4096;
		private int newTotalBytes = 0;
		
		long[] framesOffset = new long[0];
		long[] framesSize = new long[0];
		
		#endregion GENERIC OBJECTS
		
		
		#region PROPERTIES
		
		public string udToChange
		{	
			get	{	return _udToChange;}
			set	{	_udToChange = value ;}
		}
		
		public bool rec_ix_presence
		{	get	{	return _rec_ix_presence;}	}

		public long fourCC_AVISTREAMHEADER_offset
		{	get	{	return _m_fourCC_AviStreamHeader;}	}

		public long fourCC_AVIVIDEOHEADER_offset
		{	get	{	return _m_fourCC_AviVideoHeader;}	}

		public AVIMAINHEADER headerFile
		{	get	{	return myAviHeader;}	}
		
		public BITMAPINFOHEADER[] videoStreams
		{	get	{	return myAviBitMap;}	}

		public WAVEFORMATEX[] audioStreams
		{	get	{	return myAviWave;}	}		
		
		public AVISTREAMHEADER[] headerStreams
		{	get {	return myAviStreamHeader;}	}

		public string[] userData
		{	get	{	return myUData;}	}		

		public string[] MOVIuserData
		{	get	{	return myMUData;}	}		

		public string m_filename
		{	get {	return _m_filename; }		}
		
		public string m_shortname
		{	get {	return _m_shortname; }		}
		
		public long m_filesize
		{	get {	return _m_filesize; }		}
		
		public long m_MoviSize
		{	
			get {	return _m_MoviSize; }		
			set {	_m_MoviSize = value; }		
		}
				
		public long m_MoviStart
		{	
			get {	return _m_MoviStart; }
			set {	_m_MoviStart = value; }		
		}

		public long m_IdxSize
		{	
			get {	return _m_IdxSize; }		
			set {	_m_IdxSize = value; }		
		}
				
		public long m_IdxStart
		{	
			get {	return _m_IdxStart; }
			set {	_m_IdxStart = value; }
		}
		
		#endregion PROPERTIES
		
		
		#region METHODS TO ADD ITEMS AT STRUCTURES
		
		private void addNew_AVISTREAMHEADER()
		{
			int j;
			
			if (myAviStreamHeader == null)
			{
				// There isn't any elements				
				myAviStreamHeader = new AVISTREAMHEADER[1];
			}
			else
			{
				// There is another element
				AVISTREAMHEADER[] tmpASR;
				tmpASR = new AVISTREAMHEADER[myAviStreamHeader.Length + 1];
				
				for (j=0; j<myAviStreamHeader.Length; j++)
				{	tmpASR[j] = myAviStreamHeader[j];	}
				
				myAviStreamHeader = tmpASR;
			}
			
			return;
		}
		
		private void addNew_BITMAPINFOHEADER()
		{
			int j;
			
			if (myAviBitMap == null)
			{
				// There isn't any elements				
				myAviBitMap = new BITMAPINFOHEADER[1];
			}
			else
			{
				// There is another element	
				BITMAPINFOHEADER[] tmpASR;
				tmpASR = new BITMAPINFOHEADER[myAviBitMap.Length + 1];
				
				for (j=0; j<myAviBitMap.Length; j++)
				{	tmpASR[j] = myAviBitMap[j];	}
				
				myAviBitMap = tmpASR;
			}
			
			return;
		}

		private void addNew_WAVEFORMATEX()
		{
			int j;
			
			if (myAviWave == null)
			{
				// There isn't any elements				
				myAviWave = new WAVEFORMATEX[1];
			}
			else
			{
				// There is another element
				WAVEFORMATEX[] tmpASR;
				tmpASR = new WAVEFORMATEX[myAviWave.Length + 1];
				
				for (j=0; j<myAviWave.Length; j++)
				{	tmpASR[j] = myAviWave[j];	}
				
				myAviWave = tmpASR;
			}
			
			return;
		}
		
		private void addNew_STRING()
		{
			int j;
			if (myUData == null)
			{
				// There isn't any elements				
				myUData = new string[1];
			}
			else
			{
				// There is another element
				string[] tmpASR;
				tmpASR = new string[myUData.Length + 1];
				
				for (j=0; j<myUData.Length; j++)
				{	tmpASR[j] = myUData[j];	}
				
				myUData = tmpASR;
			}
			
			return;			
		}
		
		private void addNew_moviSTRING()
		{
			int j;
			if (myMUData == null)
			{
				// There isn't any elements				
				myMUData = new string[1];
			}
			else
			{
				// There is another element
				string[] tmpASR;
				tmpASR = new string[myMUData.Length + 1];
				
				for (j=0; j<myMUData.Length; j++)
				{	tmpASR[j] = myMUData[j];	}
				
				myMUData = tmpASR;
			}
			
			return;			
		}

		#endregion METHODS TO ADD ITEMS AT STRUCTURES
		
		
        
        
        public clsParserAVI()
        {
        }
        
        
        
        
        
        
        
        
        
        /// <summary>
		/// Parse the selected file 
		/// </summary>
		public void OpenFile(string FileName)
		{	
						
			// File Not Found...
			if (File.Exists(FileName) != true)
				throw new ParserException("File (" + FileName + ") Not Found...");				
			
			_VideoItems = new List<clsInfoItem>();
			_AudioItems = new List<clsInfoItem>();
			
			
			// Read File Infos
			FileInfo fi = new FileInfo(FileName);
			_m_filename = fi.FullName;
			_m_shortname = fi.Name;
			_m_filesize = fi.Length;
			
			// DEBUG
			Console.WriteLine(""); Console.WriteLine(""); Console.WriteLine("");
			Console.WriteLine("_m_filename  = " + _m_filename);
			Console.WriteLine("_m_shortname = " + _m_shortname);
			Console.WriteLine("_m_filesize  = " + _m_filesize.ToString("#,###.##"));
			
			// Open the streamer
			aviStreamReader = new FileStream(_m_filename, FileMode.Open, FileAccess.Read, FileShare.Read);
			_m_posStream = 0;
			
			#region Verify Integrity 
			
			// Detect File Format
			int FourCC = readDWord();
			_m_posStream +=4;
			string sFourCC;
			string hexFourCC;
			int dataSize = readDWord();
			int fileType = readDWord();
			
			
			_m_posStream +=8;
			
			// Check FourCC Header			
			sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
			Console.WriteLine("FourCC ".PadRight(20,(char)46) + sFourCC + " (" + dataSize.ToString("#,###.##") + ")");
			
			
			if (sFourCC != RIFF4CC && 
			    sFourCC != RIFX4CC)
			{
				// No Riff File
				aviStreamReader.Close(); aviStreamReader = null;
				throw new ParserException("Error. Not a valid RIFF file");
			}
			
			// Check File Size
			if (_m_filesize < (dataSize + 8) )
			{
				// No Riff File
				aviStreamReader.Close(); aviStreamReader = null;
				throw new ParserException("Error. Truncated file");
			}
			
			#endregion

			
			byte[] tmpByteArray;			
			int byteToRead;
			int readBytes;
			long byteOfList;
			int byteOfINFO=0;
			int byteOfINFOReaded=0;
			string strType = "";
			
			_m_fourCC_AviVideoHeader = 0;
			_m_fourCC_AviStreamHeader = 0;
			
			// Loop until EOF
			while (_m_posStream < _m_filesize )
			{
				FourCC = readDWord();				
				sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
				hexFourCC = FourCC.ToString("X8");
				byteToRead = readDWord();
				
				// Adjust bytes to read (no odd)
				// TO VERIFY *********************************** 
				if (byteToRead % 2 != 0)
					byteToRead++;
					
				_m_posStream +=8;
				
				Console.WriteLine("FourCC ".PadRight(20,(char)46) + sFourCC + " (" + byteToRead.ToString("#,###.##") + ") [" + hexFourCC + "]" + "    filepos=" + _m_posStream.ToString("#,###.00") );
				
				
				// Check REC? or IX?? FourCC presence
				if (sFourCC.ToLower().Substring(3) == "rec" ||
				    sFourCC.ToLower().Substring(2) == "ix" )
				{
					_rec_ix_presence = true;
				}
				
				// Check memory
				if (_m_posStream + byteToRead > _m_filesize)
				{
					aviStreamReader.Close(); 
					aviStreamReader = null;
					return;
				}
				
				// Parse FourCC
				if (sFourCC == LIST4CC)
				{
					// LIST section 
					FourCC = readDWord();
					_m_posStream += 4;
					sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
					hexFourCC = FourCC.ToString("X8");
					byteOfList = byteToRead;
					Console.WriteLine("LIST type ".PadRight(20,(char)46) + sFourCC + "[" + hexFourCC + "]"+ "    filepos=" + _m_posStream.ToString("#,###.00") );
					
					
					// Verify FourCC
					if (FourCC == AviRiffData.ckidAVIMovieData)
					{
						// skip "movi" section 6D 6F 76 69
						_m_MoviStart = aviStreamReader.Position;
						_m_posStream += byteOfList-4;
						// aviStreamReader.Seek(byteOfList-4, SeekOrigin.Current);
						parseMoviChunk(byteOfList-4);
						_m_MoviSize = byteOfList-4;						
						 
					}
					else if (FourCC == AviRiffData.ckidINFOList )
					{
						// INFO section
						byteOfINFOReaded=0;
						byteOfINFO=(int)(byteOfList-4);
					}

					
				}
				else
				{
					// Other TAGS
					if(FourCC == AviRiffData.ckidMainAVIHeader)
					{
						// "avih"
						tmpByteArray = new byte[56];
						readBytes = aviStreamReader.Read(tmpByteArray,0,56);
						_m_posStream +=readBytes;
						myAviHeader.loadDataStructure(tmpByteArray);
					
					}
					else if(FourCC == AviRiffData.ckidAVIStreamHeader)
					{
						// "strh"						
						tmpByteArray = new byte[byteToRead];
						readBytes = aviStreamReader.Read(tmpByteArray,0,byteToRead);						
						_m_posStream +=byteToRead;
						
						// Update Array of Stream Headers
						addNew_AVISTREAMHEADER();
						AVISTREAMHEADER tmpSH = new AVISTREAMHEADER();
						tmpSH.loadDataStructure(tmpByteArray);						
						myAviStreamHeader[ myAviStreamHeader.Length-1 ] = tmpSH;						
						strType = clsEncoding.FromFourCC(tmpSH.fccType);
						Console.WriteLine("STREAM TYPE ".PadRight(20,(char)46) + clsEncoding.FromFourCC(tmpSH.fccType) );
						Console.WriteLine("STREAM HEARER ".PadRight(20,(char)46) + clsEncoding.FromFourCC(tmpSH.fccHandler) );
						
						// fourCC_AviStreamHeader position
						if (_m_fourCC_AviVideoHeader == 0)
							_m_fourCC_AviStreamHeader = _m_posStream - (byteToRead - 4);
						
					}					
					else if(FourCC == AviRiffData.ckidStreamFormat)
					{
						// "strf"
						if (strType == "vids")
						{
							tmpByteArray = new byte[byteToRead];
							readBytes = aviStreamReader.Read(tmpByteArray,0,byteToRead);
							_m_posStream +=readBytes;
							
							// fourCC_AviVideoHeader position
							_m_fourCC_AviVideoHeader = _m_posStream - 24;
						
							// Update Array of Stream Format Video
							addNew_BITMAPINFOHEADER();
							BITMAPINFOHEADER tmpBMP = new BITMAPINFOHEADER();
							tmpBMP.loadDataStructure(tmpByteArray);
							myAviBitMap[ myAviBitMap.Length-1 ] = tmpBMP;							 
							
						}
						else if (strType == "auds")
						{
							tmpByteArray = new byte[byteToRead];
							readBytes = aviStreamReader.Read(tmpByteArray,0,byteToRead);
							_m_posStream +=readBytes;
							
							// Update Array of Stream Format Video
							addNew_WAVEFORMATEX();
							WAVEFORMATEX tmpWFR = new WAVEFORMATEX();
							tmpWFR.loadDataStructure(tmpByteArray);						
							myAviWave[ myAviWave.Length-1 ] = tmpWFR;							
							string strAudioType = tmpWFR.wFormatTag.ToString("X4");

						}
						else						
						{
							// other FourCC in a stream list
							aviStreamReader.Seek(byteToRead, SeekOrigin.Current);
							_m_posStream +=byteToRead;
						}
						
					}					
					else if(FourCC == AviRiffData.ckidAVIOldIndex)
					{
						// "idx1"
						// parseIdxChunk(byteToRead);
						_m_IdxStart = aviStreamReader.Position;
						_m_IdxSize = byteToRead;
						aviStreamReader.Seek(byteToRead, SeekOrigin.Current);
						_m_posStream +=byteToRead;
					}
					else if(FourCC == AviRiffData.ckidJUNKTag)
					{
						// "JUNK"
						// Skip 
						tmpByteArray = new byte[byteToRead];
						readBytes = aviStreamReader.Read(tmpByteArray,0,byteToRead);						
						_m_posStream +=readBytes;
						clsEncoding myEnc = new clsEncoding();
						string theStrData = myEnc.getAsciiFromArray(tmpByteArray);
						if (theStrData.Trim() != "")
						{	
							addNew_STRING();
							myUData[myUData.Length-1] = theStrData;
							Console.WriteLine("JUNKDATA ".PadRight(20,(char)46) + myEnc.getAsciiFromArray(tmpByteArray));							
						}
						// aviStreamReader.Seek(byteToRead, SeekOrigin.Current);
					}
					else if(FourCC == AviRiffData.ckidAVIISFT)
					{
						// "ISFT"
						tmpByteArray = new byte[byteToRead];
						readBytes = aviStreamReader.Read(tmpByteArray,0,byteToRead);
						_m_posStream +=readBytes;
						byteOfINFOReaded += byteToRead+8;
						addNew_STRING();				
						clsEncoding myEnc = new clsEncoding();
						myUData[myUData.Length-1] = myEnc.getAsciiFromArray(tmpByteArray);
						Console.WriteLine("ISFTDATA ".PadRight(20,(char)46) + myEnc.getAsciiFromArray(tmpByteArray));
						
						// Check remaining byte 
						if ((byteOfINFO - byteOfINFOReaded) < 8 )
							aviStreamReader.Seek((byteOfINFO - byteOfINFOReaded), SeekOrigin.Current);
							_m_posStream +=(byteOfINFO - byteOfINFOReaded);
					}
					else
					{
						// other FourCC
						aviStreamReader.Seek(byteToRead, SeekOrigin.Current);
						_m_posStream +=byteToRead;
					}
					
				}
				

			}	// end while (_m_posStream < dataSize)

			
			// Close the streamer
			aviStreamReader.Close(); aviStreamReader = null;
			
			GetAudioInformations();			
			GetVideoInformations();
			
			
			return;
		}
		
		
		// Parse MOVI Chunk to extract xxdc or xxdb subarea
		private void parseMoviChunk(long MoviChunkSize)
		{
			long tmpMoviPointer = 0;
			int FourCC = 0;
			int byteOfRead = 0;
			string sFourCC = "";
			int frameCount = 0;
			
			pbDetect=false;
			
			while (tmpMoviPointer < MoviChunkSize)
			{
				FourCC = readDWord();
				sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
				byteOfRead = readDWord();
				
				// Adjust bytes to read (no odd)
				if ((byteOfRead % 2) != 0 )
					byteOfRead++;				
				
				tmpMoviPointer += 8;
				
				// Console.WriteLine(sFourCC + " Founded, size = " + byteOfRead.ToString());
					
				if (sFourCC.Substring(2,2) == "dc" || sFourCC.Substring(2,2) == "db")
	    		{
					
					frameCount ++;
					
	    			if (frameCount == 1)
	    				parseDCuserdata(byteOfRead);
	    			else
	    				parseDCvopdata(byteOfRead);	    			
	    			
	    			tmpMoviPointer += byteOfRead;
	    			
		    		// scan only first 100 xxdc or xxdb frames in MOVI chunk
		    		
		    		if (frameCount >= 200)
	    			{
	    				aviStreamReader.Seek(MoviChunkSize - tmpMoviPointer, SeekOrigin.Current);
	    				tmpMoviPointer += (MoviChunkSize - tmpMoviPointer);
	    			}				

				}
	    		else
	    		{
	    			//Console.WriteLine(sFourCC + " Founded, size = " + byteOfRead.ToString());
	    			aviStreamReader.Seek(byteOfRead, SeekOrigin.Current);
	    			tmpMoviPointer += byteOfRead;
	    		}

	    		
			}
			
		}
		
		// Extract UserData info from DC subarea
		private void parseDCuserdata(long DCsubareaSize)
		{
			//aviStreamReader.Seek(DCsubareaSize, SeekOrigin.Current);
			
			clsEncoding myEnc = new clsEncoding();
			string outValue="";
			int sPoint = 0; 
			int ePoint = 0;
			
			byte[] dcBuffer = new byte[(int)DCsubareaSize];
			aviStreamReader.Read(dcBuffer,0,(int)DCsubareaSize);
			
			// Find UserData ... START			
			sPoint =  myEnc.compareBytesArray(dcBuffer, AviRiffData.UserDataBytes, 0);
			
			while (sPoint < DCsubareaSize && sPoint >= 0)
			{
				
				ePoint = myEnc.compareBytesArray(dcBuffer, AviRiffData.UserDataBytes, sPoint + 3);
				
				if (ePoint < 0)
					ePoint = myEnc.compareBytesArray(dcBuffer, AviRiffData.VOLStartBytes , sPoint + 3);
				
				if (ePoint < 0)
					ePoint = myEnc.compareBytesArray(dcBuffer, AviRiffData.VOPStartBytes , sPoint + 3);
				
				if (ePoint < 0)
				{
					// from sPoint to end of Byte Array
					outValue = myEnc.getHexFromBytes(dcBuffer,sPoint+4, ((int)DCsubareaSize - (sPoint+3)));
					addNew_moviSTRING();
					myMUData[myMUData.Length-1] = myEnc.getAsciiFromHex(outValue);
					Console.WriteLine("UD founded".PadRight(20,(char)46) + myEnc.getAsciiFromHex(outValue));
					break;
				}
				else
				{
					// from sPoint to ePoint
					outValue = myEnc.getHexFromBytes(dcBuffer,sPoint+4, (ePoint - (sPoint+4)));
					addNew_moviSTRING();
					myMUData[myMUData.Length-1] = myEnc.getAsciiFromHex(outValue);
					Console.WriteLine("UD founded".PadRight(20,(char)46) + myEnc.getAsciiFromHex(outValue));
					sPoint = myEnc.compareBytesArray(dcBuffer, AviRiffData.UserDataBytes, ePoint);
				}
			}
			
			// Find UserData ... END
			
			dcBuffer = null;
			
		}
		
		

		// Extract VOP count in to DC subarea (Packet Bitstream Detect)
		private void parseDCvopdata(long DCsubareaSize)
		{
			//aviStreamReader.Seek(DCsubareaSize, SeekOrigin.Current);
			
			clsEncoding myEnc = new clsEncoding();
			//string outValue="";
			int sPoint = 0; 
			//int ePoint = 0;
			int vopCount = 0;
			
			byte[] dcBuffer = new byte[(int)DCsubareaSize];
			aviStreamReader.Read(dcBuffer,0,(int)DCsubareaSize);
			
			// Find Packed Bitstream ... START
			sPoint =  myEnc.compareBytesArray(dcBuffer, AviRiffData.VOPStartBytes, 0);
			
			while (sPoint < (DCsubareaSize-2) && sPoint >= 0)
			{
				vopCount++;
				sPoint =  myEnc.compareBytesArray(dcBuffer, AviRiffData.VOPStartBytes, sPoint+3);
			}
			
			// Find Packed Bitstream ... END
			
			// Debug Packet Bitstream Detect
			// Console.WriteLine("VOP detect".PadRight(20,(char)46) + vopCount.ToString());
			if (vopCount > 1)
				pbDetect = true;
			
			dcBuffer = null;
			
		}
		
		
		
		// Parse IDX_ Chunk to extract index
		private void parseIdxChunk(long IdxChunkSize)
		{
			long tmpIdxPointer = 0;
			int FourCC = 0;
			int byteOfRead = 0;
			string sFourCC = "";
			int frameCount = 0;
			
			while (tmpIdxPointer < IdxChunkSize)
			{
				FourCC = readDWord();
				sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
				
				// Adjust bytes to read (no odd)
				if ((byteOfRead % 2) != 0 )
					byteOfRead++;
				
				tmpIdxPointer += 4;
				frameCount ++;
				// Console.WriteLine(sFourCC + " Founded, size = " + (byteOfRead).ToString());
				aviStreamReader.Seek(12, SeekOrigin.Current);
    			tmpIdxPointer += 12;				
			}
			
			Console.WriteLine("Total Frame Counted: " + frameCount.ToString() );
		}
		
		
		
		
		
		
		
		/// <summary>
		/// Detect audio codec
		/// </summary>
		/// <param name="audioVal">codec id</param>
		/// <returns>codec string name</returns>
		public string parseAudioType(string audioVal)
	    {
	    	// return name of codec audio
	    	
			if (audioVal == "0055")	return "0x" + audioVal + " (MP3)";
	    	else if (audioVal == "0001")	return "0x" + audioVal + " (PCM)";
			else if (audioVal == "2001")	return "0x" + audioVal + " (DTS)";
			else if (audioVal == "000A")	return "0x" + audioVal + " (WMA9)";
			else if (audioVal == "0030")	return "0x" + audioVal + " (Dolby AC2)";
			else if (audioVal == "0050")	return "0x" + audioVal + " (MPEG)";
			else if (audioVal == "2000")	return "0x" + audioVal + " (AC3)";
			else
				return "0x" + audioVal + " (?)";
	    }
		
		
		
		
		
		public void saveNewAvi(string newFileName, 
		                   ref bool _redrawInfo, 
			               ref string _saveError, 
			               ref double _saveStatus,
			               ref bool _saveFlag, 
			               ref double _totProgressItems, 
			               ref string _saveInfo )
		{
		
			
			// Begin Write new avi file
			aviStreamReader = new FileStream(_m_filename, FileMode.Open, FileAccess.Read, FileShare.Read);
			outFile = new FileStream(newFileName, FileMode.Create,FileAccess.Write, FileShare.None);
			
			
			FileInfo fi = new FileInfo(_m_filename);
			long filmsize = fi.Length;
			long filmoffset = 0;
			long diffBytes = 0;
			byte[] tmpBytes;
			int totMoviItems = Convert.ToInt32(_m_IdxSize / 16) + 3000;
			
			framesOffset = new long[ totMoviItems ];
			framesSize = new long[ totMoviItems ];
			int bufferSize = stdBufferSize;
			
			// Write data before MOVI chunk			
			_totProgressItems = (double)_m_MoviStart;
			
			Console.WriteLine("Write Header START");			
			_saveInfo = clsLanguages.EXPMESSAGE1;
			_redrawInfo=true;
			
			Console.WriteLine("filmoffset = " + filmoffset.ToString() + "  - _m_MoviStart = " + _m_MoviStart.ToString() );
			
			while (filmoffset < _m_MoviStart)
			{
				if ((filmoffset + bufferSize) > _m_MoviStart)
					bufferSize = (int)(_m_MoviStart - filmoffset);
				
				tmpBytes = new byte[bufferSize];
				aviStreamReader.Read(tmpBytes, 0, bufferSize);
				outFile.Write(tmpBytes, 0, bufferSize);
				
				_saveStatus = (double)filmoffset;
				// myPBar.Text = Convert.ToInt32(myPBar.Fraction * 100).ToString("D3") + "%";
				
				if (_saveFlag == false)
				{
					outFile.Close();
					outFile.Dispose();
					outFile = null;
					aviStreamReader.Close();
					aviStreamReader.Dispose();
					aviStreamReader = null;
					
					return;
				}
				
				filmoffset += bufferSize;				
			}
			Console.WriteLine("Write Header END");
			
			
			// Write new MOVI chunk from old
			if (writeMoviChunk(ref _redrawInfo, ref _saveError, ref _saveStatus, ref _saveFlag, ref _totProgressItems, ref _saveInfo) != 0)
			{
					outFile.Close();
					outFile.Dispose();
					outFile = null;
					aviStreamReader.Close();
					aviStreamReader.Dispose();
					aviStreamReader = null;
					
					return;
			}
			
			
			
			// Write data before IDX1 chunk
			diffBytes = (_m_IdxStart - aviStreamReader.Position);
			bufferSize = stdBufferSize;
			
			_saveStatus = 0;
			_totProgressItems = (double)diffBytes;
			filmoffset = 0;
			
			Console.WriteLine("Write CONTENT_1 START");
			_saveInfo = clsLanguages.EXPMESSAGE2;
			_redrawInfo = true;
			while (filmoffset < diffBytes)
			{
				if ((filmoffset + bufferSize) > diffBytes)
					bufferSize = (int)(diffBytes - filmoffset);
						
				tmpBytes = new byte[bufferSize];
				aviStreamReader.Read(tmpBytes, 0, bufferSize);
				outFile.Write(tmpBytes, 0, bufferSize);
				
				_saveStatus = (double)filmoffset;
				// myPBar.Text = Convert.ToInt32(myPBar.Fraction * 100).ToString("D3") + "%";
				
				if (_saveFlag == false)
				{
					outFile.Close();
					outFile.Dispose();
					outFile = null;
					aviStreamReader.Close();
					aviStreamReader.Dispose();
					aviStreamReader = null;
					return;
				}
				
				filmoffset += bufferSize;				
			}
			
			Console.WriteLine("Write CONTENT 1 END");
			
			
			
			// Write new IDX1 chunk from new MOVI created
			if (writeIdx1Chunk(ref _redrawInfo, ref _saveError, ref _saveStatus, ref _saveFlag, ref _totProgressItems, ref _saveInfo) != 0)
			{
					outFile.Close();
					outFile.Dispose();
					outFile = null;
					aviStreamReader.Close();
					aviStreamReader.Dispose();
					aviStreamReader = null;
					return;
			}
			
			
			// Write data after IDX1 chunk			
			diffBytes = (filmsize - aviStreamReader.Position);
			bufferSize = stdBufferSize;
			
			_saveStatus = 0;			
			_totProgressItems = (double)diffBytes;
			filmoffset = 0;
			
			Console.WriteLine("Write CONTENT 2 START");
			_saveInfo = clsLanguages.EXPMESSAGE2;
			_redrawInfo = true;
			while (filmoffset < diffBytes)
			{
				if ((filmoffset + bufferSize) > diffBytes)
					bufferSize = (int)(diffBytes - filmoffset);
						
				tmpBytes = new byte[bufferSize];
				aviStreamReader.Read(tmpBytes, 0, bufferSize);
				outFile.Write(tmpBytes, 0, bufferSize);
				
				_saveStatus = (double)filmoffset;
				// myPBar.Text = Convert.ToInt32(myPBar.Fraction * 100).ToString("D3") + "%";
				
				if (_saveFlag == false)
				{
					outFile.Close();
					outFile.Dispose();
					outFile = null;
					aviStreamReader.Close();
					aviStreamReader.Dispose();
					aviStreamReader = null;					
					return;
				}
				
				filmoffset += bufferSize;				
			}
			
			Console.WriteLine("Write CONTENT 2 END");
			
			// Close output
			outFile.Close();
			outFile.Dispose();
			outFile = null;
			
			// Read File Infos
			fi = new FileInfo(newFileName);
			newTotalBytes = (int)(fi.Length - 8);
			fi = null;
			
			// Update MOVI Size and File Size
			outFile = new FileStream(newFileName, FileMode.Open,FileAccess.Write, FileShare.None);
			outFile.Seek(4,SeekOrigin.Begin);
			outFile.Write(intToByteArray(newTotalBytes),0,4);
			outFile.Seek(_m_MoviStart - 8,SeekOrigin.Begin);
			outFile.Write(intToByteArray((int)(_m_MoviSizeNew + 4)),0,4);
						
			// Close all streams and return to Main Window
			outFile.Close();
			outFile.Dispose();
			outFile = null;
			aviStreamReader.Close();
			aviStreamReader.Dispose();
			aviStreamReader = null;
			
			_saveFlag=false;
		}
		
		
		
		
		// write MOVI Chunk frames
		private int writeMoviChunk(ref bool _redrawInfo, 
			               		   ref string _saveError, 
			                       ref double _saveStatus,
			                       ref bool _saveFlag, 
			                       ref double _totProgressItems,
			                       ref string _saveInfo  )
		{
			long tmpMoviPointer = 0;
			int FourCC = 0;
			int byteOfRead = 0;
			int sizeOfFrame = 0;
			int newByteOfRead = 0;
			string sFourCC = "";
			int frameCount = 0;		
			int lenOfFrame = 0;
			string hexFourCC = "";
			byte[] tmpByteArray = new byte[0];
			int stepGuiUpdate = 1024;
			int stepFrame = 0;

			// Write data before MOVI chunk
			_saveStatus = 0;			
			_totProgressItems = (double)_m_MoviSize;
			_saveInfo = clsLanguages.EXPMESSAGE3;
			_redrawInfo = true;
			
			Console.WriteLine("Write MOVI START (" + _m_MoviStart.ToString() + " SIZE " + _m_MoviSize.ToString() + ")");
			_m_MoviSizeNew = 0;
			
			while (tmpMoviPointer < _m_MoviSize)
			{
				// Exit if Cancel button was pressed
				if (_saveFlag == false)
					return 1;
				
				FourCC = readDWord();
				hexFourCC = FourCC.ToString("X8");
				sFourCC = Utility.clsEncoding.FromFourCC(FourCC);
				byteOfRead = readDWord();
				
				tmpMoviPointer += 8;
				_m_MoviSizeNew += 8;
				
				// Adjust bytes to read (no odd)
				sizeOfFrame = byteOfRead; 
				if ((byteOfRead % 2) != 0 )
					byteOfRead++;				
				
				tmpByteArray = new byte[byteOfRead];
				aviStreamReader.Read(tmpByteArray, 0, byteOfRead);
				
				stepFrame ++;
				
				// Verify frame type
				if (sFourCC.Substring(2,2) == "dc" || sFourCC.Substring(2,2) == "db")
	    		{
	    			// 
	    			tmpByteArray = processFrame(tmpByteArray, ref sizeOfFrame);
	    			newByteOfRead = tmpByteArray.Length;
	    			
	    			framesOffset[frameCount] = outFile.Position;
	    			framesSize[frameCount] = (long)newByteOfRead;
	    			outFile.Write(intToByteArray(FourCC),0, 4);
	    			outFile.Write(intToByteArray(sizeOfFrame),0, 4);
	    			outFile.Write(tmpByteArray, 0, newByteOfRead);
	    			
	    			tmpMoviPointer += byteOfRead;
	    			_m_MoviSizeNew += newByteOfRead;
					
				}
	    		else
	    		{
	    			framesOffset[frameCount] = outFile.Position;	    			
	    			framesSize[frameCount] = (long)byteOfRead;
	    			outFile.Write(intToByteArray(FourCC),0, 4);
	    			outFile.Write(intToByteArray(sizeOfFrame),0, 4);
	    			outFile.Write(tmpByteArray, 0, byteOfRead);
	    			
	    			tmpMoviPointer += byteOfRead;
	    			_m_MoviSizeNew += byteOfRead;
	    		}

	    		frameCount ++;
	    		
	    		if (stepFrame >= stepGuiUpdate)
	    		{
		    		// Update progressbar
		    		_saveStatus = (double)tmpMoviPointer;
					// myPBar.Text = Convert.ToInt32(myPBar.Fraction * 100).ToString("D3") + "%";
					
					stepFrame = 0;
				}
	    		
	    		
			}
			Console.WriteLine("Tot Frames: " + frameCount.ToString());
			Console.WriteLine("Write MOVI END");
			
			return 0;
			
		}
		
		// Extract UserData info from DC subarea
		private byte[] processFrame(byte[] inByteArray, ref int frameLength)
		{
			clsEncoding myEnc = new clsEncoding();			
			byte[] tmpByteArray = null;
			byte[] userdataOld = null;
			byte[] outByteArray = null;
			ASCIIEncoding TextEncoding = new ASCIIEncoding();
			
			tmpByteArray = inByteArray;
			userdataOld = TextEncoding.GetBytes(_udToChange);

			int startPos = myEnc.compareBytesArray(tmpByteArray, userdataOld,0);
			
			// int totalFrameBytes = 0;
			int totalFrameBytes, newFrameLength;
			
			
			if (startPos >= 0)
			{
				// totalFrameBytes = tmpByteArray.Length - _udToChange.Length + 12;
				newFrameLength = (frameLength - _udToChange.Length) + 12;
				
				// Padded to an even number of bytes but make sure the padding isn't included
                // in the size written to the chunk header or index
				totalFrameBytes = newFrameLength;
				
				if ((totalFrameBytes % 2) != 0)
					totalFrameBytes ++;
				
				// frameLength = totalFrameBytes;
				// frameLength = newFrameLength;
				
				
				outByteArray = new byte[totalFrameBytes];
				Array.Copy(tmpByteArray, outByteArray, startPos);
				Array.Copy(TextEncoding.GetBytes("DivX999b000p"), 0, outByteArray, startPos, 12);
				Array.Copy(tmpByteArray, 
				           startPos + _udToChange.Length , 
				           outByteArray, 
				           startPos + 12, 
				           frameLength - (_udToChange.Length + startPos));
				
				frameLength = newFrameLength;
			}
			else
				outByteArray = tmpByteArray;
			
			
			
			return outByteArray;			
		}
		
		
		// Write new Idx1 Chunk
		private int writeIdx1Chunk(ref bool _redrawInfo, 
			                       ref string _saveError, 
			                       ref double _saveStatus,
			                       ref bool _saveFlag, 
			                       ref double _totProgressItems, 
			                       ref string _saveInfo )
		{
			long tmpIdxPointer = 0;
			int FourCC = 0;
			int byteOfRead = 0;
			string sFourCC = "";
			int frameCount = 0;
			string hexFourCC = "";
			byte[] tmpByteArray = new byte[16];
			byte[] tmpDWordArray = new byte[4];
			int stepGuiUpdate = 256;
			int stepFrame = 0;
			
			// Write data before MOVI chunk
			_saveStatus = 0;			
			_totProgressItems = (double)_m_IdxSize;
			_saveInfo = clsLanguages.EXPMESSAGE4;
			_redrawInfo=true;
			Console.WriteLine("Write IDX START (" + _m_IdxStart.ToString() + " SIZE " + _m_IdxSize.ToString() + ")");
			
			while (tmpIdxPointer < _m_IdxSize)
			{
				// Exit if Cancel button was pressed
				if (_saveFlag == false)
					return 1;
				
				aviStreamReader.Read(tmpByteArray,0,16);
				
				// Offsets are relative to the start of the 'movi' list type
				tmpDWordArray = intToByteArray((int)(framesOffset[frameCount] - (_m_MoviStart - 4)));
				for (int j=0; j<4; j++)
					tmpByteArray[8+j]=tmpDWordArray[j];

				tmpDWordArray = intToByteArray((int)framesSize[frameCount]);
				for (int j=0; j<4; j++)
					tmpByteArray[12+j]=tmpDWordArray[j];
				
				outFile.Write(tmpByteArray,0,16);
				
				tmpIdxPointer += 16;
				
				frameCount ++;
				stepFrame++;
	    		
	    		if (stepFrame >= stepGuiUpdate)
	    		{
		    		// Update progressbar
		    		_saveStatus = (double)tmpIdxPointer;
					// myPBar.Text = Convert.ToInt32(myPBar.Fraction * 100).ToString("D3") + "%";
				
					stepFrame = 0;
				}
				
			}
			
			
			Console.WriteLine("Tot Frames: " + frameCount.ToString());
			Console.WriteLine("Write IDX END");
			
			return 0;
		}
		
		
		
		
		/// <summary>
		/// Read 2 bytes and return the value
		/// </summary>
		/// <returns></returns>
		private int readWord()
		{	
			int retValue;
			int readBytes;
			byte[] tmpBuffer = new byte[2];
			readBytes = aviStreamReader.Read(tmpBuffer,0,2);
			retValue = (tmpBuffer[0]) + (tmpBuffer[1]<<8);
			return retValue;
		}
		
		// returns a byte array of length 4
		private byte[] intToByteArray(int i) 
		{
			byte[] dword = new byte[4];
			dword[0] = (byte) (i & 0x00FF);
			dword[1] = (byte) ((i >> 8) & 0x000000FF);
			dword[2] = (byte) ((i >> 16) & 0x000000FF);
			dword[3] = (byte) ((i >> 24) & 0x000000FF);
			return dword;
		}


		/// <summary>
		/// Read 4 bytes and return the value
		/// </summary>
		/// <returns></returns>
		private int readDWord()
		{	
			int retValue;
			int readBytes;
			byte[] tmpBuffer = new byte[4];
			readBytes = aviStreamReader.Read(tmpBuffer,0,4);
			retValue = tmpBuffer[0]+(tmpBuffer[1]<<8)+(tmpBuffer[2]<<16)+(tmpBuffer[3]<<24);
			return retValue;
		}

        
		
        public void change4CC(string ASHval, string VSHval, long ASHpos, long VSHpos)
        {
            byte[] tmpASHarray = null;
			byte[] tmpVSHarray = null;
						
			FileStream updaterSW = new FileStream(_m_filename , FileMode.Open ,FileAccess.ReadWrite);
			
			updaterSW.Seek(ASHpos, SeekOrigin.Current);
			tmpASHarray = Utility.clsEncoding.ToFourCCByte(ASHval);
			Console.WriteLine(tmpASHarray[0].ToString("X2") + 
			                  tmpASHarray[1].ToString("X2") + 
			                  tmpASHarray[2].ToString("X2") + 
			                  tmpASHarray[3].ToString("X2"));
			updaterSW.Write(tmpASHarray, 0, 4);

			updaterSW.Seek(VSHpos, SeekOrigin.Begin);
			tmpVSHarray = Utility.clsEncoding.ToFourCCByte(VSHval);
			Console.WriteLine(tmpVSHarray[0].ToString("X2") + 
			                  tmpVSHarray[1].ToString("X2") + 
			                  tmpVSHarray[2].ToString("X2") + 
			                  tmpVSHarray[3].ToString("X2"));
			updaterSW.Write(tmpVSHarray, 0, 4);

			updaterSW.Close();
			updaterSW.Dispose();
			updaterSW = null;
            
			return;
        }
        
        
        
        
        
        
        
        
        
        
        
        
		
	    private void GetVideoInformations () {
			int k, l;
			int iV=0;
			int iA=0;
			//double sizOfAudio=0;
			double sizOfVideo=0;
			double sizOfHeader=0;
			double videoQuality=0;
			double WdH=0;
			int framePerSec=1;
			int AverageVideoBitRate = 0;
			string fccDesc = "";
			string Frame_Size = "";
			string Total_Time = "";
			string Frame_Rate = "";
			string Total_Frames = "";
			string Video_Data_Rate = "";
			string Packet_Bitstream = "Off";
			
			Console.WriteLine(myAviStreamHeader.Length);
			for (k=0; k < headerStreams.Length; k++ ) {
				Console.WriteLine(k);
				if(clsEncoding.FromFourCC(headerStreams[k].fccType)
				!= "vids" ) 
					continue;
				long totalTime = 0;
				fccDesc = clsEncoding.FromFourCC(headerStreams[k].fccHandler);
				if (headerFile.dwMicroSecPerFrame > 0)
					totalTime =(long)((long)headerFile.dwTotalFrames *
							  (long) headerFile.dwMicroSecPerFrame);
				totalTime = (long)(totalTime / 1000000.0);
				int hours = (int)(totalTime / 3600);
				totalTime -= (long)(hours * 3600);
				int mins = (int)(totalTime / 60);
				totalTime -= (long)(mins * 60);
				framePerSec = headerStreams[k].dwRate / headerStreams[k].dwScale;
				WdH = videoStreams[0].biWidth;
				WdH /= videoStreams[0].biHeight;				
				Frame_Size = 
					videoStreams[0].biWidth.ToString()
					+ " x " +
					videoStreams[0].biHeight.ToString();
				Total_Time = String.Format("{0:00}:{1:00}:{2:00.00#} seconds", hours, mins, totalTime);
				Frame_Rate = String.Format("{0:N2} Frames/Sec", (1000000.0 / headerFile.dwMicroSecPerFrame));
				Total_Frames = String.Format("{0:G}", headerFile.dwTotalFrames  );
				Video_Data_Rate = String.Format("{0:N2} frames/Sec", framePerSec );
				iV++;
			}
			sizOfHeader = headerFile.dwTotalFrames * 8 * (iA+1);
			
			sizOfVideo = m_MoviSize - sizOfHeader - sizOfAudio;
			Console.WriteLine("m_MoviSize = " + m_MoviSize.ToString());
			Console.WriteLine("sizOfHeader = " + sizOfHeader.ToString());
			Console.WriteLine("sizOfAudio = " + sizOfAudio.ToString());
			
			Console.WriteLine("sizOfVideo = " + sizOfVideo.ToString());
			Console.WriteLine("dwTotalFrames = " + headerFile.dwTotalFrames.ToString());
			
			AverageVideoBitRate = (int)((sizOfVideo * framePerSec * 8) /  (headerFile.dwTotalFrames * 1000));
			videoQuality = (0.75 * WdH) * (AverageVideoBitRate / framePerSec);
			
			if (pbDetect == true)
				Packet_Bitstream = "On";

			_VideoItems.Add(new clsInfoItem("Video codec:", clsEncoding.FromFourCC(videoStreams[0].biCompression)) );
			_VideoItems.Add(new clsInfoItem("Codec descr:", fccDesc));
			_VideoItems.Add(new clsInfoItem("Frame Size:", Frame_Size) );
			_VideoItems.Add(new clsInfoItem("Average Video Bitrate:", AverageVideoBitRate.ToString() + " Kb/Sec"));
			_VideoItems.Add(new clsInfoItem("Avi file size:", ((m_filesize / 1024).ToString("#,### KB"))));
			_VideoItems.Add(new clsInfoItem("Total Time:", Total_Time ));
			_VideoItems.Add(new clsInfoItem("Frame Rate:", Frame_Rate)) ;
			_VideoItems.Add(new clsInfoItem("Total Frames:", Total_Frames));
			_VideoItems.Add(new clsInfoItem("Video Data Rate:", Video_Data_Rate ));
			_VideoItems.Add(new clsInfoItem("Video Quality:", videoQuality.ToString("#,###.##") ));
			_VideoItems.Add(new clsInfoItem("Packet Bitstream:", Packet_Bitstream ));

			if (userData.Length >0 )
				for (l=0; l<userData.Length;l++)
			    _VideoItems.Add(new clsInfoItem("Info Data[" + l + "]:", userData[l].ToString()));

			if (MOVIuserData.Length >0 )
			{
			    for (l=0; l<MOVIuserData.Length;l++)
			    {
			        _VideoItems.Add(new clsInfoItem("User Data[" + l + "]:", MOVIuserData[l].ToString()));
			        if (MOVIuserData[l].IndexOf("DivX") == 0 && MOVIuserData[l] != "DivX999b000p")
			            this._udToChange = MOVIuserData[l];
			    }
			}


			return;
	    }
	    
	    private void GetAudioInformations () {
			int k;
			int iA=0;			
			int blockPerSec=0;			
			sizOfAudio=0;
			
			for (k=0; k < headerStreams.Length; k++ ) {
				if(clsEncoding.FromFourCC(headerStreams[k].fccType)
				== "vids" )
					continue;
				string aFormat = audioStreams[iA].wFormatTag.ToString("X4");
				string CVBR = "";
				double audioRate = (8.0 * audioStreams[iA].nAvgBytesPerSec) ;
				
				//if (headerStreams[k].dwSampleSize > 0 )
				//	audioRate /= (double) headerStreams[k].dwSampleSize;
				if(aFormat == "0055") {
					CVBR = "";
					// MP3 CODEC
					_AudioItems.Add(new clsInfoItem("Audio " + 
							 (iA+1).ToString() + 
							 ":",
							 parseAudioType(aFormat)
							 + " " + CVBR + " " +
							 String.Format("{0:N2} Kb/Sec",
								       audioRate / 1000.0) + " " + 
							 "- " + audioStreams[iA].nSamplesPerSec + " Hz (" +
							 audioStreams[iA].nChannels.ToString() + " Channels)"));
				} else {
					// Other codec
					_AudioItems.Add(new clsInfoItem("Audio " + (iA+1).ToString() + ":",
							 parseAudioType(aFormat) + " " +
							 String.Format("{0:N2} Kb/Sec", audioRate / 1000.0) + " " + 
							 "- " + audioStreams[iA].nSamplesPerSec + " Hz (" +
							 audioStreams[iA].nChannels.ToString() + " Channels)"));
				}
										
				// Calc Data for AVBitrate
				blockPerSec = headerStreams[k].dwRate / headerStreams[k].dwScale;
					
				double tmpAudio = headerStreams[k].dwLength;
				tmpAudio *= audioStreams[iA].nAvgBytesPerSec;
				tmpAudio /= blockPerSec;
				sizOfAudio += tmpAudio;
				
				// increment total audio streams
				iA++;
					
			}
				
			return;
		}
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
    }
    
    
    
    
    
    
    
    
}


***************************************************************************************

/*
 * @(#)AviParser.java	1.39 01/03/16
 *
 * Copyright (c) 1996-2001 Sun Microsystems, Inc. All Rights Reserved.
 *
 * Sun grants you ("Licensee") a non-exclusive, royalty free, license to use,
 * modify and redistribute this software in source and binary code form,
 * provided that i) this copyright notice and license appear on all copies of
 * the software; and ii) Licensee does not utilize the software in a manner
 * which is disparaging to Sun.
 *
 * This software is provided "AS IS," without a warranty of any kind. ALL
 * EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ANY
 * IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
 * NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE
 * LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING
 * OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS
 * LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT,
 * INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER
 * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF
 * OR INABILITY TO USE SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGES.
 *
 * This software is not designed or intended for use in on-line control of
 * aircraft, air traffic, aircraft navigation or aircraft communications; or in
 * the design, construction, operation or maintenance of any nuclear
 * facility. Licensee represents and warrants that it will not use or
 * redistribute the Software for such purposes.
 */

package com.sun.media.parser.video;

import java.io.IOException;
import javax.media.Track;
import javax.media.Time;
import javax.media.Duration;
import javax.media.IncompatibleSourceException;
import javax.media.BadHeaderException;
import javax.media.TrackListener;
import javax.media.Buffer;
import javax.media.protocol.DataSource;
import javax.media.protocol.SourceStream;
import javax.media.protocol.PullSourceStream;
import javax.media.protocol.Seekable;
import javax.media.Format;
import javax.media.protocol.ContentDescriptor;
import javax.media.protocol.CachedStream;
import javax.media.format.AudioFormat;
import com.sun.media.format.WavAudioFormat;
import javax.media.format.VideoFormat;
import com.sun.media.parser.BasicPullParser;
import com.sun.media.vfw.BitMapInfo;


public class AviParser extends BasicPullParser {

    private static ContentDescriptor[] supportedFormat =
	new ContentDescriptor[] {new ContentDescriptor("video.x_msvideo")};
    private PullSourceStream stream = null;
    private CachedStream cacheStream;
    private Track[] tracks;
    private Seekable seekableStream;
    private int numSupportedTracks = 0;
    private int length;
    private int audioTrack = -1;
    private int videoTrack = -1;
    private int keyFrameTrack = -1;
    private final static int SIZE_OF_AVI_INDEX   = 16;
    private final static int AVIH_HEADER_LENGTH = 56;
    private final static int STRH_HEADER_LENGTH = 56;
    private final static int STRF_VIDEO_HEADER_LENGTH = 40;
    private final static int STRF_AUDIO_HEADER_LENGTH = 16;

    final static int AVIF_HASINDEX 		= 0x00000010;
    final static int AVIF_MUSTUSEINDEX		= 0x00000020;
    final static int AVIF_ISINTERLEAVED 	= 0x00000100;
    final static int AVIF_WASCAPTUREFILE	= 0x00010000;
    final static int AVIF_COPYRIGHTED		= 0x00020000;
    final static int AVIF_KEYFRAME		= 0x00000010;

    final static String AUDIO         = "auds";
    final static String VIDEO         = "vids";

    final static String LISTRECORDCHUNK        = "rec ";
    final static String VIDEO_MAGIC		= "dc"; // Video
    final static String VIDEO_MAGIC_JPEG	= "db"; // Video
    final static String VIDEO_MAGIC_IV32a	= "iv"; // Indeo 3.2
    final static String VIDEO_MAGIC_IV32b	= "32"; // Indeo 3.2
    final static String VIDEO_MAGIC_IV31	= "31"; // Indeo 3.1
    final static String VIDEO_MAGIC_CVID	= "id"; // Cinepak
    final static String AUDIO_MAGIC		= "wb"; // Audio

    private int usecPerFrame = 0;
    private long nanoSecPerFrame = 0;
    private int maxBytesPerSecond;
    private int paddingGranularity;
    private int flags;
    private int totalFrames = 0;
    private int initialFrames;
    private int numTracks = 0;
    private int suggestedBufferSize;
    private int width;
    private int height;
    private TrakList[] trakList;
    private int idx1MinimumChunkOffset;
    private int moviOffset = 0;
    private Time duration = Duration.DURATION_UNKNOWN;

    private boolean moviChunkSeen = false;
    private boolean idx1ChunkSeen = false;
    private int maxAudioChunkIndex = 0;
    private int maxVideoChunkIndex = 0;

    private int extraHeaderLength = 0;
    private byte[] codecSpecificHeader = null;


    // Used to make the seek and the subsequent readBytes call atomic
    // operations, so that the video and audio track
    // threads don't trample each other.
    private Object seekSync = new Object();

    /**
     * Avi format requires that the stream be seekable and
     * random accessible. 
     */
    protected boolean supports(SourceStream[] streams) {
	return seekable;
    }

    public void setSource(DataSource source)
	throws IOException, IncompatibleSourceException {

	super.setSource(source);
	stream = (PullSourceStream) streams[0];
	seekableStream = (Seekable) streams[0];
    }

    public ContentDescriptor [] getSupportedInputContentDescriptors() {
	return supportedFormat;
    }


    public Track[] getTracks() throws IOException, BadHeaderException {
 	if (tracks != null)
 	    return tracks;
	
	if (seekableStream == null) {
	    return new Track[0];
	}

	readHeader();
 	if (!moviChunkSeen) {
 	    throw new BadHeaderException("No movi chunk");
 	}

	if (!idx1ChunkSeen) {
 	    throw new BadHeaderException("Currently files with no idx1 chunk are not supported");
	}

	if (numTracks <= 0) {
	    throw new BadHeaderException("Error parsing header");
	}

	tracks = new Track[numTracks];

	for (int i = 0; i < tracks.length; i++) {
	    TrakList trakInfo = trakList[i];
	    if (trakInfo.trackType.equals(AUDIO)) {
		tracks[i] = new AudioTrack(trakInfo);
	    } else if (trakInfo.trackType.equals(VIDEO)) {
		tracks[i] = new VideoTrack(trakInfo);
	    }
	}
	return tracks;

    }

    private void readHeader()
	throws IOException, BadHeaderException {
	
	String magicRIFF = readString(stream);
	if (!(magicRIFF.equals("RIFF"))) {
	    throw new BadHeaderException("AVI Parser: expected string RIFF, got "
					 + magicRIFF);
	}

	length = readInt(stream, /* bigEndian = */ false);
	length += 8; // For RIFF and AVI

	String magicAVI = readString(stream);
	if (!(magicAVI.equals("AVI "))) {
	    throw new BadHeaderException("AVI Parser: expected string AVI, got "
					 + magicAVI);
	}

	int currentTrack = 0;
	while (getLocation(stream) <= (length-12)) {
	    String next = readString(stream);
	    int subchunkLength = readInt(stream, /* bigEndian = */ false);
	    if (next.equals("LIST")) {
		String subchunk = readString(stream);
		if (subchunk.equals("hdrl")) {
		    parseHDRL();
		} else if (subchunk.equals("strl")) {
		    parseSTRL(subchunkLength, currentTrack);
		    currentTrack++;
		} else if (subchunk.equals("movi"))
		    parseMOVI(subchunkLength - 4);
		else {
		    // System.err.println("Unsupported subchunk " + subchunk +
		    //  " in LIST");
		    skip(stream, subchunkLength-4);
		}
	    } else if (next.equals("idx1")) {
		parseIDX1(subchunkLength);
	    } else {
		skip(stream, subchunkLength);
		if ( (subchunkLength & 1) > 0)
		    skip(stream, 1);
	    }
	}
	if ( (totalFrames != 0) && (usecPerFrame != 0) ) {
	    duration = new Time((long) usecPerFrame * totalFrames * 1000);
	}
    }

    // The inner class can use this method as they cannot use
    // the getLocation(stream) method.
    private long getLocation() {
	return getLocation(stream);
    }

    private void parseHDRL() throws BadHeaderException {

	try {
	    String next = readString(stream);
	    if (!next.equals("avih")) {
		throw new BadHeaderException("AVI Parser: expected string AVIH, got "
					 + next);
	    }
	    int headerLength = readInt(stream, /* bigEndian = */ false);
	    parseAVIH(headerLength);
	    trakList = new TrakList[numTracks];
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing hdrl");
	}

    }


    private void parseSTRL(int length, int currentTrack) throws BadHeaderException {
 	try {
	    if (currentTrack >= trakList.length ) {
		throw new BadHeaderException("inconsistent number of strl atoms");
	    }

	    length -= 12; // for "LIST <length> strl"
	    while (length >= 12) { // TODO: check
		String subchunkid = readString(stream);
		int subchunkLength = readInt(stream, /* bigEndian = */ false);
		if (subchunkid.equals("strh")) {
		    parseSTRH(subchunkLength, currentTrack);
		} else if (subchunkid.equals("strf")) {
		    if (trakList[currentTrack] == null) {
			throw new BadHeaderException("strf doesn't have a strh atom preceding it");
		    }
		    parseSTRF(subchunkLength, currentTrack);
		} else {
		    // System.err.println("Unsupported subchunk " + subchunkid +
		    //	       " in strl. length " + subchunkLength);
		    if ( (subchunkLength & 1) > 0) {
			// Some avi files like billy.avi are don't have strn
			// chunks with incorrect odd number for the length.
			// The actual offset is 1 more. If this correction
			// is not made all the remaining chunks will be read
			// incorrectly.
			subchunkLength++;
		    }
		    skip(stream, subchunkLength);
		}
		length -= (subchunkLength + 4); // 4 is for subchunkid
	    }
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing hdrl");
	}
    }

    private void parseSTRH(int length, int currentTrack) throws BadHeaderException {
  	try {
	    if (length < STRH_HEADER_LENGTH) {
		throw new BadHeaderException("strh: header length should be atleast " +
					     STRH_HEADER_LENGTH + " but is " +
					     length);
	    }

	    trakList[currentTrack] = new TrakList();
	    trakList[currentTrack].trackType = readString(stream);
	    trakList[currentTrack].streamHandler = readString(stream);
	    trakList[currentTrack].flags = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].priority = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].initialFrames = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].scale = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].rate = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].start = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].length = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].suggestedBufferSize = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].quality = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].sampleSize = readInt(stream, /* bigEndian = */ false);

	    skip(stream, 8); // Padding
	    if ( (length - STRH_HEADER_LENGTH) > 0)
		skip(stream, length - STRH_HEADER_LENGTH);

 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    }


    private void parseSTRF(int length, int currentTrack) throws BadHeaderException {
  	try {
	    String trackType = trakList[currentTrack].trackType;
	    if (trackType.equals(VIDEO)) {
		Video video = new Video();
		video.size = readInt(stream, /* bigEndian = */ false);
		video.width = readInt(stream, /* bigEndian = */ false);
		video.height = readInt(stream, /* bigEndian = */ false);
		video.planes = readShort(stream, /* bigEndian = */ false);
		video.depth = readShort(stream, /* bigEndian = */ false);
		// Instead of readString, read the four bytes to see
		// if its a raw format.
		byte [] intArray = new byte[4];
		readBytes(stream, intArray, 4);
		if (intArray[0] > 32) {
		    video.compressor = new String(intArray);
		} else {
		    switch (intArray[0]) {
		    case 0:
			video.compressor = VideoFormat.RGB;
			break;
		    case 1:
			video.compressor = "rle8";
			break;
		    case 2:
			video.compressor = "rle4";
			break;
		    case 3:
			video.compressor = VideoFormat.RGB;
			break;
		    }
		}
		
		// Get the BITMAPINFO data needed by the decompressor
		BitMapInfo bmi = new BitMapInfo();
		bmi.biWidth = video.width;
		bmi.biHeight = video.height;
		bmi.biPlanes = video.planes;
		bmi.biBitCount = video.depth;
		bmi.fourcc = new String(video.compressor);
		video.bitMapInfo = bmi;
		bmi.biSizeImage = readInt(stream, false);
		bmi.biXPelsPerMeter = readInt(stream, false);
		bmi.biYPelsPerMeter = readInt(stream, false);
		bmi.biClrUsed = readInt(stream, false);
		bmi.biClrImportant = readInt(stream, false);
		

		if ( (length - STRF_VIDEO_HEADER_LENGTH) > 0) {
		    bmi.extraSize = (length - STRF_VIDEO_HEADER_LENGTH);
		    bmi.extraBytes = new byte[bmi.extraSize];
		    readBytes(stream, bmi.extraBytes, bmi.extraSize);
		}
		
		trakList[currentTrack].media = video;
		trakList[currentTrack].media.maxSampleSize =
		    trakList[currentTrack].suggestedBufferSize;
		videoTrack = currentTrack;
	    } else if (trackType.equals(AUDIO)) {
		Audio audio = new Audio();

                audio.formatTag = readShort(stream, /* bigEndian = */ false);
                audio.channels = readShort(stream, /* bigEndian = */ false);
                audio.sampleRate = readInt(stream, /* bigEndian = */ false);
                audio.avgBytesPerSec = readInt(stream, /* bigEndian = */ false);
                audio.blockAlign = readShort(stream, /* bigEndian = */ false);
                audio.bitsPerSample = readShort(stream, /* bigEndian = */ false);

		int remainingFormatSize = length - STRF_AUDIO_HEADER_LENGTH;

		codecSpecificHeader = null;
		int extraFieldsSize = 0;
		if (remainingFormatSize >= 2) {
		    extraFieldsSize = readShort(stream, /* bigEndian = */ false);
		    remainingFormatSize -= 2;

		    if (extraFieldsSize > 0) {
			codecSpecificHeader = new byte[extraFieldsSize];
			readBytes(stream, codecSpecificHeader, codecSpecificHeader.length);
			remainingFormatSize -= extraFieldsSize;
		    }


		    // TODO: do other encodings provide samplesPerBlock?
		    // Note that this info is there in codecSpecificHeader
		    if ( (audio.formatTag == WavAudioFormat.WAVE_FORMAT_ADPCM) ||
			 (audio.formatTag == WavAudioFormat.WAVE_FORMAT_DVI_ADPCM) ||
			 (audio.formatTag == WavAudioFormat.WAVE_FORMAT_GSM610) ) {

			if (extraFieldsSize < 2) {
			    throw new
				BadHeaderException("samplesPerBlock field not available for encoding" + audio.formatTag);
							 
			}
			audio.samplesPerBlock = BasicPullParser.parseShortFromArray(codecSpecificHeader,
								/* bigEndian = */ false);
		    }
		}
		    
		if (remainingFormatSize < 0) {
		    throw new BadHeaderException("Avi Parser: incorrect headersize in the STRF");
		}

 		if ( remainingFormatSize > 0)
 		    skip(stream, length - STRF_AUDIO_HEADER_LENGTH);

		trakList[currentTrack].media = audio;
		audioTrack = currentTrack;
	    } else {
		throw new BadHeaderException("strf: unsupported stream type " + trackType);
	    }

 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    }

    private void parseAVIH(int length) throws BadHeaderException {
 	try {
	    if (length < AVIH_HEADER_LENGTH) {
		throw new BadHeaderException("avih: header size is not 56");
	    }

	    usecPerFrame = readInt(stream, /* bigEndian = */ false);
	    nanoSecPerFrame = usecPerFrame * 1000;
	    maxBytesPerSecond = readInt(stream, /* bigEndian = */ false);
	    paddingGranularity = readInt(stream, /* bigEndian = */ false);
	    flags = readInt(stream, /* bigEndian = */ false);
	    totalFrames = readInt(stream, /* bigEndian = */ false);
	    initialFrames = readInt(stream, /* bigEndian = */ false);
	    numTracks = readInt(stream, /* bigEndian = */ false);
	    suggestedBufferSize = readInt(stream, /* bigEndian = */ false);
	    width = readInt(stream, /* bigEndian = */ false);
	    height = readInt(stream, /* bigEndian = */ false);
	    skip(stream, 4*4); // int reserved[4]
	    if ( (length - AVIH_HEADER_LENGTH) > 0)
		skip(stream, length - AVIH_HEADER_LENGTH);
 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    }


    private void parseIDX1(int length) throws BadHeaderException {
	try {
	    if (!moviChunkSeen) {
		throw new BadHeaderException("idx1 chunk appears before movi chunk");
	    }
	    // TODO: check for valid length value
	    int numIndices = (length / SIZE_OF_AVI_INDEX);
	    String id;
	    int flag;
	    int chunkOffset;
	    int chunkLength;

	    for (int i = 0; i < numTracks; i++) {
		if (trakList[i] == null) {
		    throw new BadHeaderException("Bad file format");
		}
		trakList[i].chunkInfo = new AVIIndexEntry[numIndices];
		if (trakList[i].trackType.equals(VIDEO)) {
		    trakList[i].keyFrames = new int[numIndices];
		}
	    }

	    idx1MinimumChunkOffset = Integer.MAX_VALUE;

	    for (int i = 0; i < numIndices; i++) {
		id = readString(stream);
 		if (id.equals(LISTRECORDCHUNK)) {
		    // $$$ DISCARD for now
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
 		    continue;
		}
		int streamNumber;
		try {
		    streamNumber = Integer.parseInt(id.substring(0,2));
		} catch (NumberFormatException e) {
		    // DISCARD chunk at it doesn't represent a stream
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
 		    continue;
		}

		if ( (streamNumber < 0) || (streamNumber >= numTracks) ) {
		    throw new BadHeaderException("index chunk has illegal stream # " +
						 streamNumber);
		}
 		flag = readInt(stream, /* bigEndian = */ false);
 		chunkOffset = readInt(stream, /* bigEndian = */ false);
 		chunkLength = readInt(stream, /* bigEndian = */ false);
		
		AVIIndexEntry[] chunkInfo = trakList[streamNumber].chunkInfo;
		int index = trakList[streamNumber].maxChunkIndex;
		
		chunkInfo[index] = new AVIIndexEntry();
		chunkInfo[index].id = id;
		chunkInfo[index].flag = flag;
		chunkInfo[index].chunkOffset = chunkOffset;
		chunkInfo[index].chunkLength = chunkLength;
		
		if (trakList[streamNumber].trackType.equals(AUDIO)) {
		    int c = trakList[streamNumber].tmpCumulativeChunkLength += chunkLength;
		    chunkInfo[index].cumulativeChunkLength = c;
		}
		
		if (trakList[streamNumber].trackType.equals(VIDEO)) {
		    if ( (flag & AVIF_KEYFRAME) > 0 ) {
			int keyFrameIndex = trakList[streamNumber].numKeyFrames;
			trakList[streamNumber].keyFrames[keyFrameIndex] = index;
			trakList[streamNumber].numKeyFrames++;
		    }
		}
		trakList[streamNumber].maxChunkIndex++;
		
		if (chunkOffset < idx1MinimumChunkOffset) {
		    idx1MinimumChunkOffset = chunkOffset;
		}
	    }

	    // For video tracks, if all the frames are not key frames,
	    // build the indexToKeyframeIndex table
	    // which maps a video frame to a key frame.
	    for (int i = 0; i < numTracks; i++) {
		if (trakList[i].trackType.equals(VIDEO)) {
		    int numKeyFrames = trakList[i].numKeyFrames;
		    if (numKeyFrames > 0)
			keyFrameTrack = i;
		    int maxChunkIndex = trakList[i].maxChunkIndex;
		    if ( (numKeyFrames > 0) && (numKeyFrames < maxChunkIndex) ) {
			trakList[i].indexToKeyframeIndex =
			    buildIndexToKeyFrameIndexTable(trakList[i].keyFrames,
							   numKeyFrames,
							   maxChunkIndex);
		    }
		    trakList[i].keyFrames = null;
		}
	    }

	    if (idx1MinimumChunkOffset >=  moviOffset) {
		// idx1 chunk offsets refer to start of the file.
		moviOffset = 0;
	    }
	    moviOffset += 8; // for chunk id and size
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing IDX1");
	}
	idx1ChunkSeen = true;
    }

    private void parseMOVI(int length) throws BadHeaderException {
	try {
	    moviChunkSeen = true;
	    if ( (flags & AVIF_HASINDEX) > 0) {
		// Subtract 4 to include MOVI string
		moviOffset = (int) getLocation(stream) - 4;
		skip(stream, length);
	    } else {
		// System.out.println("parseMOVI: NO AVIF_HASINDEX"); // REMOVE
	    }
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing movi");
	}
    }

    public Time setPosition(Time where, int rounding) {
	int keyframeNum = -1;
	if ( (keyFrameTrack != -1) && (tracks[keyFrameTrack].isEnabled()) ) {
	    // keyframe track present and is enabled

	    TrakList trakInfo = trakList[keyFrameTrack];
	    Track track = tracks[keyFrameTrack];
	    int frameNum = track.mapTimeToFrame(where);
	    keyframeNum = frameNum;
	    // TODO: handle FRAME_UNKNOWN
	    
	    if (trakInfo.indexToKeyframeIndex.length > frameNum) {
		keyframeNum = trakInfo.indexToKeyframeIndex[frameNum];
	    }
	    
	    if (keyframeNum != frameNum) {
		where = track.mapFrameToTime(keyframeNum);
	    }
	}
	for (int i = 0; i < numTracks; i++) {
	    if (!tracks[i].isEnabled())
		continue;

	    int chunkNumber =0;
	    int offsetWithinChunk = 0;
	    try {
		if (i == keyFrameTrack) {
		    chunkNumber = keyframeNum;
		    continue;
		}

		TrakList trakInfo = trakList[i];
		if (trakInfo.trackType.equals("vids")) {
		    if (usecPerFrame != 0) {
			chunkNumber = (int) (where.getNanoseconds() / nanoSecPerFrame);
			if (chunkNumber < 0)
			    chunkNumber = 0;
			else if (chunkNumber >= trakInfo.maxChunkIndex) {
			    continue; // EOM
			}
		    }
		} else if (trakInfo.trackType.equals("auds")) {
		    int bytePos = (int) ( where.getSeconds() *
					  ((Audio) trakInfo.media).avgBytesPerSec);
		    if (bytePos < 0)
			bytePos = 0;

		    // Note: the else statement can also handle the if
		    // case, ie maxChunkIndex == 1, but is separated here
		    // for clarity and a slight efficiency.
		    if (trakInfo.maxChunkIndex == 1) {
			if (bytePos >= trakInfo.chunkInfo[0].chunkLength) {
			    chunkNumber = trakInfo.maxChunkIndex; // EOM
			    continue; // EOM
			}
			chunkNumber = 0;
			offsetWithinChunk = bytePos;
		    } else {
			int approx;
			chunkNumber = trakInfo.getChunkNumber(bytePos);
			if (chunkNumber >= trakInfo.maxChunkIndex)
			    continue; // EOM
			
			approx = trakInfo.chunkInfo[chunkNumber].cumulativeChunkLength -
			    trakInfo.chunkInfo[chunkNumber].chunkLength;
			offsetWithinChunk = bytePos - approx;
		    }

		    if ( (offsetWithinChunk & 1) > 0)
			offsetWithinChunk--;
		    
		    int blockAlign = ((Audio) trakInfo.media).blockAlign;
		    if (blockAlign != 0) {
			offsetWithinChunk -= (offsetWithinChunk % blockAlign);
		    }
		}
	    } finally {
		((MediaTrack)tracks[i]).setChunkNumberAndOffset(chunkNumber,
								offsetWithinChunk);
	    }
	}
	return where;
    }

    public Time getMediaTime() {
	return null;  // TODO
    }

    public Time getDuration() {
	return duration;
    }

    /**
     * Returns a descriptive name for the plug-in.
     * This is a user readable string.
     */
    public String getName() {
	return "Parser for avi file format";
    }

    private boolean isSupported(String trackType) {
	return ( trackType.equals(VIDEO) || trackType.equals(AUDIO) );
    }


    private int[] buildIndexToKeyFrameIndexTable(int[] syncSamples,
						 int numKeyFrames,
						 int numberOfSamples) {
	
	int[] syncSampleMapping = new int[numberOfSamples];
	int index = 0;
	int previous;
	if (syncSamples[0] != 0) {
	    // Bug in the sync table of the avi file
	    // The first sample should always be a key frame
	    previous = syncSampleMapping[0] = 0;
	} else {
	    previous = syncSampleMapping[0] = 0;
	    index++;
	}
	
	for (; index < numKeyFrames; index++) {
	    int next = syncSamples[index];
	    for (int j = previous+1; j < next; j++) {
		syncSampleMapping[j] = previous;
	    }

	    syncSampleMapping[next] = next;

	    previous = next;
	}
	int lastSyncFrame = syncSamples[numKeyFrames - 1];
	for (index = lastSyncFrame+1; index < numberOfSamples; index++) {
	    syncSampleMapping[index] = lastSyncFrame;
	}
	return syncSampleMapping;
    }

    private abstract class Media {
        int maxSampleSize;
	abstract Format createFormat();
    }

    private class Audio extends Media {
	int formatTag;
	int channels;
	int sampleRate;
	int avgBytesPerSec;
	int blockAlign;
	int bitsPerSample;
	int samplesPerBlock;
	AudioFormat format = null;

	Format createFormat() {
	    if (format != null)
		return format;
	    String encodingString = (String)
		WavAudioFormat.formatMapper.get(new Integer(formatTag));
	    if (encodingString == null) {
		encodingString = "unknown";
	    }

	    boolean signed;
	    if (bitsPerSample > 8)
		signed = true;
	    else
		signed = false;

	    // TODO: If possible create WavAudioFormat only when necessary otherwise
	    // create AudioFormat
	    format = new WavAudioFormat(encodingString,
					sampleRate,
					bitsPerSample,
					channels,
					/*frameSizeInBits=*/blockAlign * 8,
					avgBytesPerSec,
					AudioFormat.LITTLE_ENDIAN,
					signed ? AudioFormat.SIGNED : AudioFormat.UNSIGNED,
					Format.NOT_SPECIFIED, // No FRAME_RATE specified
					Format.byteArray,
					codecSpecificHeader);
	    return format;

	}

	public String toString() {
	    System.out.println("Audio Media: " + format);
	    System.out.println("Number of channels " + channels);
	    System.out.println("average bytes per second " + avgBytesPerSec);
	    System.out.println("sampleRate " + sampleRate);
	    System.out.println("blockAlign " + blockAlign);
	    System.out.println("bitsPerSample " + bitsPerSample);
	    System.out.println("formatTag " + formatTag);
	    return super.toString();
	}
    }

    private class Video extends Media {
	int size;
	int width;
	int height;
	int planes;
	int depth;
	String compressor;
	VideoFormat format = null;
	BitMapInfo bitMapInfo = null;


	Format createFormat() {
	    if (format != null)
		return format;
	    if (usecPerFrame != 0) {
		format = bitMapInfo.createVideoFormat(Format.byteArray,
						      (float) ((1.0/usecPerFrame)*1000000));
	    } else {
		format = bitMapInfo.createVideoFormat(Format.byteArray);
	    }
	    return format;
	}

	public String toString() {
	    System.out.println("size is " + size);
	    System.out.println("width is " + width);
	    System.out.println("height is " + height);
	    System.out.println("planes is " + planes);
	    System.out.println("depth is " + depth);
	    System.out.println("compressor is " + compressor);
	    
	    return super.toString();
	}

    }


    private class TrakList {
	Time duration = Duration.DURATION_UNKNOWN;
	
	String trackType;                 // Chunk identifier
	String streamHandler;              // Device handler identifier
	int flags;                      // Data Parameters
	int priority;                   // Set to 0
	int initialFrames;              // Number of initial audio frames
	int scale;                      // Unit used to measure time
	int rate;                       // Data rate of playback
	int start;                      // Starting time of AVI data
	int length;                     // Size of AVI data chunk
	int suggestedBufferSize;        // Minimum playback buffer size
	int quality;                    // Sample quality factor
	int sampleSize;                 // Size of the sample in bytes

	Media media; // Info specific to each track type
	
	// From the implementation
	// Can be used as a debugging aid to disable a track
	boolean supported = true; // Is this track type supported
	
	AVIIndexEntry[] chunkInfo = new AVIIndexEntry[0];
	int maxChunkIndex = 0;

	int[] indexToKeyframeIndex = new int[0];
	int[] keyFrames = new int[0];
    
	// boolean allKeyFrames = false;
	int numKeyFrames = 0;
	int tmpCumulativeChunkLength = 0;


	// TODO: speedup: use binary search
	int getChunkNumber(int offset) {
	    for (int i = 0; i < maxChunkIndex; i++) {
		if (offset < chunkInfo[i].cumulativeChunkLength) {
		    return i;
		}
	    }
	    return maxChunkIndex; // EOM
	}
    }

    // TODO extend BasicTrack if possible
    private abstract class MediaTrack implements Track {
	protected TrakList trakInfo;
	private boolean enabled = true;
	private int numBuffers = 4; // TODO: check
	private Format format;
	private long sequenceNumber = 0;
	private int chunkNumber = 0;
	protected int useChunkNumber = 0;
	protected int offsetWithinChunk = -1;
	protected int useOffsetWithinChunk = 0;
	private AviParser parser = AviParser.this;
	private AVIIndexEntry indexEntry;
	private Object header = null;
	private TrackListener listener;

	MediaTrack(TrakList trakInfo) {
	    this.trakInfo = trakInfo;
	    format = trakInfo.media.createFormat();
	}


	public void setTrackListener(TrackListener l) {
	    listener = l;
	}


	public Format getFormat() {
	    return format;
	}
	
	
	public void setEnabled(boolean t) {
	    enabled = t;
	}
	
	public boolean isEnabled() {
	    return enabled;
	}
	
	public Time getDuration() {
	    return trakInfo.duration;
	}
	
	
	public Time getStartTime() {
	    return new Time(0); // TODO
	}
	
 	synchronized void setChunkNumberAndOffset(int number, int offset) {
 	    chunkNumber = number;
	    offsetWithinChunk = offset;
 	}


	public void readFrame(Buffer buffer) {
	    if (buffer == null)
		return;
	    
	    if (!enabled) {
		buffer.setDiscard(true);
		return;
	    }
	    
	    synchronized (this) {
		if (offsetWithinChunk == -1) {
		    useOffsetWithinChunk = 0;
		} else {
		    useOffsetWithinChunk = offsetWithinChunk;
		    offsetWithinChunk = -1; // Reset offsetWithinChunk
		}
		useChunkNumber = chunkNumber;
	    }

	    // TODO: handle chunkNumber < 0 case differently
 	    if ( (useChunkNumber >= trakInfo.maxChunkIndex) ||
 		 (useChunkNumber < 0 ) ) {
		buffer.setLength(0);
		buffer.setEOM(true);
 		return;
 	    }
	    buffer.setFormat(format);

	    indexEntry = trakInfo.chunkInfo[useChunkNumber];


	    int chunkLength = indexEntry.chunkLength;

	    Object obj = buffer.getData();
	    byte[] data;

	    buffer.setHeader(new Integer(indexEntry.flag));

	    if  ( (obj == null) ||
		  (! (obj instanceof byte[]) ) ||
		  ( ((byte[])obj).length < chunkLength) ) {
		data = new byte[chunkLength];
		buffer.setData(data);
	    } else {
		data = (byte[]) obj;
	    }

	    try {
		int actualBytesRead;
		synchronized (seekSync) {
		    seekableStream.seek(indexEntry.chunkOffset
					+ moviOffset +
					useOffsetWithinChunk);
		    actualBytesRead = parser.readBytes(stream, data,
                                   chunkLength - useOffsetWithinChunk);
		    offsetWithinChunk = 0;
		    buffer.setTimeStamp(getTimeStamp());
		}
		buffer.setLength(actualBytesRead);
		long frameDuration = Buffer.TIME_UNKNOWN;
		if (trakInfo.trackType.equals(VIDEO)) {
		    if (nanoSecPerFrame > 0)
			frameDuration = nanoSecPerFrame;
		    if (
			// All Frames are key frames
			(trakInfo.indexToKeyframeIndex.length == 0) ||
			// or current frame is a key frame
			 (useChunkNumber == trakInfo.indexToKeyframeIndex[useChunkNumber])
			) {
			buffer.setFlags(buffer.getFlags() | Buffer.FLAG_KEY_FRAME);
		    }
		}
		buffer.setDuration(frameDuration);
		buffer.setSequenceNumber(++sequenceNumber);
	    } catch (IOException e) {
		buffer.setLength(0);
		buffer.setEOM(true);
	    }
	    synchronized(this) {
		if (chunkNumber == useChunkNumber) // Not changed by setPosition()
		    chunkNumber++;
	    }
	}
	
	abstract void doReadFrame(Buffer buffer);

	public int mapTimeToFrame(Time t) {
	    return FRAME_UNKNOWN;
	}
	
	public Time mapFrameToTime(int frameNumber) {
	    return TIME_UNKNOWN;
	}
	
	abstract long getTimeStamp();
    }

    private class AudioTrack extends MediaTrack  {
	int channels;
	int avgBytesPerSec;
	AVIIndexEntry[] chunkInfo;

	AudioTrack(TrakList trakInfo) {
	    super(trakInfo);
	    channels =  ((Audio)trakInfo.media).channels;
	    avgBytesPerSec = ((Audio) trakInfo.media).avgBytesPerSec;
	    chunkInfo = trakInfo.chunkInfo;
	}
	
	void doReadFrame(Buffer buffer) {
	}

	long getTimeStamp() {
	    if (avgBytesPerSec > 0) {
		long bytes = useOffsetWithinChunk;
		if (useChunkNumber > 0) {
		    bytes += chunkInfo[useChunkNumber - 1].cumulativeChunkLength;
		}
		return (long) (((float) bytes / avgBytesPerSec) * 1E9);
	    } else {
		return 0;
	    }
	}
    }

    private class VideoTrack extends MediaTrack  {
	int needBufferSize;
	boolean variableSampleSize = true;

	VideoTrack(TrakList trakInfo) {
	    super(trakInfo);
	}
	
	void doReadFrame(Buffer buffer) {
	}

	long getTimeStamp() {
	    return (useChunkNumber * usecPerFrame * 1000L);
	}
	
	public int mapTimeToFrame(Time t) {
	    if (nanoSecPerFrame <= 0)
		return FRAME_UNKNOWN;
	    
	    if (t.getNanoseconds() < 0)
		return FRAME_UNKNOWN;
	    
	    int chunkNumber;
	    chunkNumber = (int) (t.getNanoseconds() / nanoSecPerFrame);

	    if (chunkNumber >= trakInfo.maxChunkIndex)
		return trakInfo.maxChunkIndex - 1;
	    return chunkNumber;
	}

	public Time mapFrameToTime(int frameNumber) {
	    if ( (frameNumber < 0) || (frameNumber >= trakInfo.maxChunkIndex) )
		return TIME_UNKNOWN;
	    
	    long time = frameNumber * nanoSecPerFrame;
	    return new Time(time);
	}
    }
    
    // An Index Chunk has the identifier idx1 and must appear after hdrl and
    // movi chunks. This chunk contains a list of all chunks within the AVI
    // chunk, along with their locations, and is used for random access of audio
    // and video data.
    //
    private class AVIIndexEntry {
	public String id;                       // Chunk identifier reference
	public int flag;                       // Type of Chunk referenced
	public int chunkOffset;                // Position of Chunk in file
	public int chunkLength;                // Length of chunk in bytes
	// Currently only audio track uses cumulativeChunkLength
	public int cumulativeChunkLength = 0;       // Derived data
    }

}