1974 lines
81 KiB
C
1974 lines
81 KiB
C
/* ##C_FILE#
|
||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||
FILE : AviFunc.c
|
||
|
||
DESCRIPTION : Implementation file of module of AVI management : VDO
|
||
Low level functions used by "AviFunc.c"
|
||
CAUTION : this module is in charge of working with AVIs which contain
|
||
sound tracks in "ADPCM" ("AVI" of type "POD")
|
||
Multi-threading can't be used with "Winmm", so the functions "AVIFileInit( )" and "AVIFileExit( )"
|
||
must be done in the same task.
|
||
|
||
VERSION : 1.00/Nicolas Meyer/Creation
|
||
|
||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||
*/
|
||
|
||
/* ##INCLUDE#----------------------------------------------------------------------------
|
||
Includes Files
|
||
---------------------------------------------------------------------------------------*/
|
||
#include "AviFunc.h"
|
||
|
||
|
||
// Configuration of the compilation
|
||
// Management of a spy file
|
||
//#define _USE_FILE_DBG
|
||
|
||
// Selction of the uncompress function (Api or uncompress module)
|
||
//#define _USE_DECOMPRESS
|
||
|
||
// Selection of windowed mode
|
||
// _USE_WINDOWS_DISPLAY
|
||
|
||
/* ##GLOBVAR#----------------------------------------------------------------------------
|
||
Globale variable declaration
|
||
---------------------------------------------------------------------------------------*/
|
||
static HINSTANCE ghInstance; // Instance of the application
|
||
static char gstrDir[ 256 ]; // Pointer on a string which contains the directory
|
||
static unsigned long gnType;
|
||
|
||
static unsigned long gnBankIndice;
|
||
|
||
// Variables about the "AVI" file
|
||
static PAVIFILE gpAviFile = NULL; // Interface of AVI file
|
||
static AVIFILEINFO gAviInfo; // Information about an AVI file
|
||
static PAVISTREAM gpAviStream[ MAX_STREAM_TYPE ]; // Interface array of AVI Streams
|
||
static AVISTREAMINFO gAviStreamInfo[ MAX_STREAM_TYPE ]; // Information array about each Streams
|
||
static long gnNbVideoStream; // Number of video stream ( if = - 1 no video )
|
||
static unsigned long gnIndiceVideo[ NB_STREAM_PERTYPE ]; // Index array of Stream interface in charge of video
|
||
static long gnNbAudioStream; // Number of audio stream ( if = - 1 no audio )
|
||
static unsigned long gnIndiceAudio[ NB_STREAM_PERTYPE ]; // Index array of Stream interface in charge of audio
|
||
static long gnAudioTongue; // Index of audio stream
|
||
|
||
// Variables for audio management
|
||
static LPWAVEFORMATEX gpWaveFormatexIn; // Pointer on a structure describing the entering sound datas
|
||
static WAVEFORMATEX gWaveFormatexOut; // Structure describing the sound datas to be provided to client buffers
|
||
static long gnVoice = 0; // Index of voice returned by "SendRequestSound"
|
||
static long gnLineSound = SND_C_LINE_FANTOME;
|
||
static long gnTypeSound;
|
||
static double gdLastTime; // give the time of last refresh
|
||
|
||
// specific variables for AVI which contains at least one sound channel
|
||
|
||
// Management variable for audio decompression
|
||
// ( in the case where we use the sound track of AVI in ADPCM )
|
||
static HACMDRIVER ghAcmDriver = NULL; // Handle on an uncompress driver
|
||
static HACMSTREAM ghAcmStream = NULL; // Handle on an ACM "Stream"
|
||
static ACMSTREAMHEADER gAcmStreamHeader[ NB_BUFFERS_CONVERT ]; // Array of heads for uncompression
|
||
|
||
static unsigned char* gpAcmUnConvertData = NULL; // Pointer on the first sample buffer in ADPCM
|
||
static unsigned char* gpAcmConvertData = NULL; // Pointer on the first sample buffer in PCM
|
||
|
||
// Variables containing the indexes of buffers of datas to be converted
|
||
static unsigned long gnConvertIndiceBuffer; // Index of the buffer of converted datas
|
||
static unsigned long gnUnConvertIndiceBuffer; // Index of the buffer of no converted datas
|
||
|
||
static unsigned long gnSizeSampleConvertBuffer; // Size (in sample) of a buffer of converted datas
|
||
static unsigned long gnSizeSampleUnConvertBuffer; // Size (in sample) of a buffer of no converted datas
|
||
static unsigned long gnSizeSampleConvertAllBuffers; // Total size for all the buffers
|
||
static unsigned long gnSizeSampleUnConvertAllBuffers; // Total size for all the files
|
||
static long gnBufferID; // Store the ID number of client buffer
|
||
|
||
// Variables describing the positions of various datas
|
||
static long gnAviReadPosition; // CAUTION: TO OBTAIN THE POSITION IN SAMPLE YOU MUST DIVIDE BY "BLOCALIGN"
|
||
static long gnAviReadPositionLast;
|
||
static long gnConvertPosition;
|
||
static long gnConvertPositionLast;
|
||
|
||
// Variables in charge of the synchronisation management between sound and picture
|
||
static unsigned long gnVideoPosition;
|
||
static unsigned long gnVideoPositionLast;
|
||
static unsigned long gnVideoPositionSkew;
|
||
static unsigned long gnSoundPerImage; // Number of audio frames between 2 video frames ( video period in audio samples )
|
||
|
||
// Time in milliseconds
|
||
static unsigned long gnVideoTimeStart; // Time of buffer beginning
|
||
static unsigned long gnVideoTimePerImage; // Period between two pictures
|
||
static unsigned long gnVideoTimeLast; // Time of displaying of the least picture recal<61>e boiled down to a multiple of period
|
||
static unsigned long gnVideoTimeSkew; // Duration of SKEW if necessary
|
||
static unsigned long gnVideoTimePauseStart; // Time of beginning of pause
|
||
|
||
|
||
static BITMAPINFO gBmpFormat; // Structure describing the video datas with their compression format
|
||
static LPBITMAPINFOHEADER gpDecompressBHeader = NULL; // Pointer on the uncompress video frame
|
||
|
||
static PGETFRAME gpGetVideoFrame = NULL; // Pointer on a frame structure
|
||
|
||
// Variables in charge of multiThread and "Callbacks"
|
||
static CRITICAL_SECTION gCriticalSection; // critical section
|
||
static CRITICAL_SECTION gCriticalSectionPlay; // critical section for mutual exclusion of the three functions "Play", "Stop", and "IsRunning"
|
||
static CRITICAL_SECTION gCriticalSectionPause; // critical section for mutual exclusion of the two functions "Pause" and "Resume"
|
||
static SECURITY_ATTRIBUTES gAttributes;
|
||
static HANDLE ghEventInitDone; // Event which indicate the end of the video module initialisation
|
||
//static HANDLE ghEventSynchro; // Event for synchronising the video with the sound engine
|
||
|
||
// Variables for avoiding the reentrances and the systeme lockings
|
||
static SndBool gbAudioThread; // Boolean for asking the end of the task
|
||
static BOOL gbInitDone = FALSE; // Boolean used to indicate the end of initialisation
|
||
static BOOL gbFlagStop = FALSE; // Flag used when a "STOP" is demanded
|
||
static BOOL gbPauseDone = FALSE; // Flag used to say that a pause was done
|
||
static HANDLE ghThread = NULL; // Handle on a background task
|
||
|
||
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
// Display of window type
|
||
static HWND ghVideoWnd; // Handle video displaying window ( ressources )
|
||
static SIZE gDstWndSize; // Structure describing the window
|
||
static HDRAWDIB ghDrawDib; // Handle of draw dib
|
||
static HDC ghDC; // Handle of dvice context
|
||
#else
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
// AFFICHAGE DE TYPE GLIDE (Non utilis<69> pour F1)
|
||
static GLD_tdhDevice ghDevice; // Variable of "Device" used by "GLD"
|
||
static GLD_tdhViewport ghViewPort; // Variable of "ViewPort" used by "GLD"
|
||
static void* ghDrawSem; // Handle d'un s<>maphore qui g<>re l'affichage
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
static FILE* gpFile;
|
||
static unsigned long gnCumulateTime;
|
||
static unsigned long gnNbFrame;
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
|
||
/* ##FUNCDEF#----------------------------------------------------------------------------
|
||
Private Functions definition
|
||
---------------------------------------------------------------------------------------*/
|
||
// Set of "Callback" functions used in the creation of a new type of object
|
||
SndReal AviRetObjetSonorePosVDO( long indice );
|
||
unsigned char AviRetObjetSonoreReverbVDO( long indice );
|
||
void AviRetObjetSonoreExtraCoefVDO( long indice, SND_tduRefEvt evt, SndReal* pitch, SndReal* pan, SndReal* vol );
|
||
long AviRetObjetSonoreSwitchVDO( long indice, long type_switch );
|
||
SndBool AviRetObjetSonoreMicroLinkVDO( long indice, long micro );
|
||
void AviRetObjetSonoreInfoVDO( long indice, char* texte, long size );
|
||
|
||
// used "Callbacks"
|
||
BOOL CALLBACK AviAcmDriverCallback( HACMDRIVERID hDriverId, DWORD dwInstance, DWORD dwSupport );
|
||
void SND_CALL AviCompleteBufferClientCallback( long nID, unsigned long nFirstSample, unsigned long nNb, void* pvFirstSample );
|
||
|
||
// Body of used tasks
|
||
#if defined( __WATCOMC__ ) && ( __WATCOMC__ < 1100 )
|
||
void AviVideoThread( void* pParam );
|
||
void AviVideoSoundThread( void* pParam );
|
||
#else
|
||
void /*__cdecl*/ AviVideoThread( void* pParam ); // Task in the case where we synchronise with BNM
|
||
void /*__cdecl*/ AviVideoSoundThread( void* pParam ); // Task in the case where we synchronise with the sound channel
|
||
#endif
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviGetVideoDimensions
|
||
DESCRIPTION : return the window dimensions
|
||
INPUT :
|
||
OUTPUT : size
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
SIZE AviGetVideoDimensions( )
|
||
{
|
||
SIZE size;
|
||
|
||
size.cx = gAviInfo.dwWidth;
|
||
size.cy = gAviInfo.dwHeight;
|
||
|
||
return size;
|
||
}/*AviGetVideoDimensions*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviGetVideoHeight
|
||
DESCRIPTION : return the height of video window
|
||
INPUT :
|
||
OUTPUT : height
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
unsigned long AviGetVideoHeight( )
|
||
{
|
||
return gAviInfo.dwHeight;
|
||
}/*AviGetVideoHeight*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviGetVideoWidth
|
||
DESCRIPTION : return the Width of video window
|
||
INPUT :
|
||
OUTPUT : height
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
unsigned long AviGetVideoWidth( )
|
||
{
|
||
return gAviInfo.dwWidth;
|
||
}/*AviGetVideoWidth*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviDesInitModule
|
||
DESCRIPTION : uninit the module of treatement of AVI
|
||
INPUT :
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void AviDesInitModule( )
|
||
{
|
||
long nCpt;
|
||
unsigned long nResult;
|
||
|
||
switch( gnType ) {
|
||
case AVI_SYNCH_OBJECT:
|
||
//=============================================================================
|
||
// VERY VERY VERY VERY IMPORTANT if this function is not called we don't unload
|
||
// the binary bank of "ED" of 9 Mo
|
||
nResult = SND_fn_bUnLoadBinaryBank( gnBankIndice );
|
||
//=============================================================================
|
||
break;
|
||
case AVI_SYNCH_STREAM:
|
||
case AVI_SYNCH_NOSOUND:
|
||
if( gpWaveFormatexIn ) {
|
||
if( gWaveFormatexOut.wFormatTag != gpWaveFormatexIn->wFormatTag ) {
|
||
//=====================================================================================================
|
||
// Data buffer management in ADPCM
|
||
//=====================================================================================================
|
||
unsigned long nCpt;
|
||
if( gAcmStreamHeader->cbStruct ) {
|
||
for( nCpt = 0; nCpt < NB_BUFFERS_CONVERT; nCpt++ ) {
|
||
nResult = acmStreamUnprepareHeader( ghAcmStream,
|
||
& gAcmStreamHeader[ nCpt ],
|
||
0 );
|
||
}
|
||
memset( gAcmStreamHeader, 0, NB_BUFFERS_CONVERT * sizeof(ACMSTREAMHEADER) );
|
||
}
|
||
if( ghAcmStream ) {
|
||
acmStreamClose( ghAcmStream, 0 );
|
||
ghAcmStream = NULL;
|
||
}
|
||
if( ghAcmDriver ) {
|
||
acmDriverClose( ghAcmDriver, 0 );
|
||
ghAcmDriver = NULL;
|
||
}
|
||
if( gpAcmUnConvertData ) {
|
||
free( gpAcmUnConvertData );
|
||
gpAcmUnConvertData = NULL;
|
||
gpAcmConvertData = NULL;
|
||
}
|
||
}
|
||
else {
|
||
//=====================================================================================================
|
||
// data buffer management in PCM
|
||
//=====================================================================================================
|
||
if( gpAcmConvertData ) {
|
||
free( gpAcmConvertData );
|
||
gpAcmConvertData = NULL;
|
||
}
|
||
}
|
||
}
|
||
break;
|
||
}
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
// Lib<69>re les ressources "HDRAWDIB"
|
||
if( ghDrawDib )
|
||
DrawDibClose( ghDrawDib );
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#ifdef _USE_DECOMPRESS//============================ BEGIN _USE_DECOMPRESS =================================
|
||
// FUNCTION OF UNINITIALISATION OF UNCOMPRESSION VIDEO MODULE
|
||
DecompressDesInitModule( );
|
||
#else
|
||
if( gpGetVideoFrame ) {
|
||
nResult = AVIStreamGetFrameClose( gpGetVideoFrame );
|
||
gpGetVideoFrame = NULL;
|
||
}
|
||
#endif//============================================ END _USE_DECOMPRESS =================================
|
||
|
||
// uninitialisation of video and sound channels
|
||
for( nCpt = 0; nCpt < gnNbVideoStream + gnNbAudioStream; nCpt++ ) {
|
||
nResult = AVIStreamRelease( gpAviStream[ nCpt ] );
|
||
memset( & gAviStreamInfo[ nCpt ], 0, sizeof(AVISTREAMINFO) );
|
||
}
|
||
if( gpAviFile ) {
|
||
nResult = AVIFileRelease( gpAviFile );
|
||
gpAviFile = NULL;
|
||
}
|
||
AVIFileExit( );
|
||
if( gpWaveFormatexIn ) {
|
||
free( gpWaveFormatexIn );
|
||
gpWaveFormatexIn = NULL;
|
||
}
|
||
gnNbVideoStream = 0;
|
||
gnNbAudioStream = 0;
|
||
memset( & gBmpFormat, 0, sizeof(BITMAPINFO) );
|
||
memset( & gWaveFormatexOut, 0, sizeof(WAVEFORMATEX) );
|
||
gbInitDone = FALSE;
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Valeur moyenne de d<>compression %d\n", gnCumulateTime / gnNbFrame );
|
||
fprintf( gpFile, "Nombre de trames trait<69>es %d\n", gnNbFrame );
|
||
fprintf( gpFile, "Fin DesInit\n" );
|
||
fprintf( gpFile, "===================================================\n" );
|
||
fclose( gpFile );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
}/*AviDesInitModule*/
|
||
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviInitModule
|
||
DESCRIPTION : Init the module of treatement of AVI from the file name of a bank
|
||
INPUT : szFilename (char *)
|
||
OUTPUT : BOOL
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
BOOL AviInitModule( char* szFilename )
|
||
{
|
||
char strPathName[ 256 ];
|
||
long nNbBytes;
|
||
unsigned long nCpt;
|
||
|
||
// Specific variables for AVI which contains at least one sound channel
|
||
|
||
long nNbSamples;
|
||
long nResult;
|
||
DWORD nSizeUnConvert;
|
||
ACMFORMATTAGDETAILS AcmFormatTag;
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
gpFile = fopen( "Toto.log", "a" );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
// the initialisation was already done, so we don't made one more time
|
||
if( gbInitDone ) {
|
||
SetEvent( ghEventInitDone );
|
||
return FALSE;
|
||
}
|
||
|
||
if( ! szFilename )
|
||
return FALSE;
|
||
|
||
if( strlen( gstrDir ) ) {
|
||
strcpy( strPathName, gstrDir );
|
||
strcat( strPathName, "\\" );
|
||
strcat( strPathName, szFilename );
|
||
}
|
||
else
|
||
strcpy( strPathName, szFilename );
|
||
|
||
//=============================================================================
|
||
// Get informations about the "AVI" file
|
||
//=============================================================================
|
||
// gnBankIndice = MAKELONG( 1, 0 );
|
||
|
||
AVIFileInit( );
|
||
if( AVIFileOpen( & gpAviFile, strPathName, OF_READ, NULL ) ) {
|
||
|
||
gbInitDone = FALSE;
|
||
AVIFileExit( );
|
||
SetEvent( ghEventInitDone );
|
||
return FALSE;
|
||
}
|
||
// free( strPathName );
|
||
if( AVIFileInfo( gpAviFile, & gAviInfo, sizeof(AVIFILEINFO) ) ) {
|
||
|
||
gbInitDone = FALSE;
|
||
AVIFileRelease( gpAviFile );
|
||
AVIFileExit( );
|
||
SetEvent( ghEventInitDone );
|
||
return FALSE;
|
||
}
|
||
|
||
//=============================================================================
|
||
// Get informations about the streams of the "AVI"
|
||
//=============================================================================
|
||
|
||
gnNbVideoStream = 0;
|
||
gnNbAudioStream = 0;
|
||
|
||
// Store an interface pointer on a "stream" or channel
|
||
// Get a structure describing the detected "stream"
|
||
// calculate the different "streams" by category
|
||
|
||
for( nCpt = 0; nCpt < gAviInfo.dwStreams; nCpt++ ) {
|
||
if( ! AVIFileGetStream( gpAviFile, & gpAviStream[ nCpt ], (long)0, nCpt ) ) {
|
||
AVIStreamInfo( gpAviStream[ nCpt ], & gAviStreamInfo[ nCpt ], sizeof(AVISTREAMINFO) );
|
||
|
||
switch( gAviStreamInfo[ nCpt ].fccType ) {
|
||
case streamtypeVIDEO: // mmioFOURCC('v', 'i', 'd', 's')
|
||
gnIndiceVideo[ gnNbVideoStream ] = nCpt;
|
||
gnNbVideoStream++;
|
||
break;
|
||
case streamtypeAUDIO: // mmioFOURCC('a', 'u', 'd', 's')
|
||
gnIndiceAudio[ gnNbAudioStream ] = nCpt;
|
||
gnNbAudioStream++;
|
||
break;
|
||
case streamtypeMIDI: // mmioFOURCC('m', 'i', 'd', 's')
|
||
case streamtypeTEXT: // mmioFOURCC('t', 'x', 't', 's')
|
||
;
|
||
}
|
||
}
|
||
// else {
|
||
// }
|
||
}
|
||
|
||
if( gnNbVideoStream != 0 ) {
|
||
|
||
//=============================================================================
|
||
// INITIALISATION of the "DIB" OBJECT
|
||
//=============================================================================
|
||
|
||
nNbBytes = sizeof(BITMAPINFO);
|
||
// Get an information structure about the video stream
|
||
AVIStreamReadFormat( gpAviStream[ gnIndiceVideo[ 0 ] ],
|
||
(long)0,
|
||
& gBmpFormat,
|
||
& nNbBytes );
|
||
|
||
//=============================================================================
|
||
// Initialisation function of the module of video uncompression
|
||
//=============================================================================
|
||
#ifdef _USE_DECOMPRESS//============================ BEGIN _USE_DECOMPRESS =================================
|
||
DecompressInitModule( & gBmpFormat.bmiHeader, & gAviStreamInfo[ gnIndiceVideo[ 0 ] ], gpAviStream[ gnIndiceVideo[ 0 ] ] );
|
||
gpDecompressBHeader = DecompressGetFrame( 0 );
|
||
#else
|
||
gpGetVideoFrame = AVIStreamGetFrameOpen( gpAviStream[ gnIndiceVideo[ 0 ] ], NULL );
|
||
if( ! gpGetVideoFrame ) {
|
||
for( nCpt = 0; nCpt < gAviInfo.dwStreams; nCpt++ ) {
|
||
AVIStreamRelease( gpAviStream[ nCpt ] );
|
||
memset( & gAviStreamInfo[ nCpt ], 0, sizeof(AVISTREAMINFO) );
|
||
}
|
||
AVIFileRelease( gpAviFile );
|
||
AVIFileExit( );
|
||
SetEvent( ghEventInitDone );
|
||
return FALSE;
|
||
}
|
||
// Initialise the first video frame
|
||
gpDecompressBHeader = (LPBITMAPINFOHEADER)AVIStreamGetFrame( gpGetVideoFrame, 0 );
|
||
#endif//============================================ END _USE_DECOMPRESS =================================
|
||
|
||
// INITIALISATION of the widowed mode
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= _USE_WINDOWS_DISPLAY =================================
|
||
ghDrawDib = DrawDibOpen( );
|
||
ghDC = GetDC( ghVideoWnd );
|
||
gDstWndSize.cx = gAviInfo.dwWidth;
|
||
gDstWndSize.cy = gAviInfo.dwHeight;
|
||
|
||
PostMessage( ghVideoWnd, WM_RESIZE, HIWORD( & gDstWndSize ), LOWORD( & gDstWndSize ) );
|
||
|
||
nResult = DrawDibBegin( ghDrawDib,
|
||
ghDC,
|
||
- 1,
|
||
- 1,
|
||
gpDecompressBHeader,
|
||
gAviInfo.dwWidth,
|
||
gAviInfo.dwHeight,
|
||
0 );
|
||
#endif//=========================================== _USE_WINDOWS_DISPLAY =================================
|
||
// Period in milliseconds between two pictures
|
||
gnVideoTimePerImage = gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwScale * 1000 /
|
||
gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwRate;
|
||
gnVideoTimeSkew = 0;
|
||
|
||
}
|
||
gpWaveFormatexIn = NULL;
|
||
|
||
if( ( gnNbAudioStream != 0 ) && ( gnType != AVI_SYNCH_NOSOUND ) ) {
|
||
//=============================================================================
|
||
// INITIALISATION OF SOUND PARAMETERS
|
||
//=============================================================================
|
||
|
||
// Index of the language by default ( english in most cases ) if the parameter is incorrect
|
||
if( gnAudioTongue >= gnNbAudioStream )
|
||
gnAudioTongue = 0;
|
||
|
||
// Get the size of the structure in charge of getting the informations
|
||
// about the storage of the sound. Several formats are defined in "mmreg.h"
|
||
nResult = AVIStreamReadFormat( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ], (long)0, NULL, & nNbBytes );
|
||
gpWaveFormatexIn = (LPWAVEFORMATEX)malloc( nNbBytes );
|
||
// We don't know the format used for coding the datas at this level, we must foresee a greater size
|
||
// than the common "WAVEFORMATEX"
|
||
nResult = AVIStreamReadFormat( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
(long)0,
|
||
gpWaveFormatexIn,
|
||
& nNbBytes );
|
||
//=============================================================================
|
||
// DON'T DO THIS !!!!!!!!!
|
||
// because the field "cbSize" is used by the "free" of the "malloc" when "gpWaveFormatexIn"
|
||
// is unallocated
|
||
// if( gpWaveFormatexIn->wFormatTag == WAVE_FORMAT_PCM )
|
||
// gpWaveFormatexIn->cbSize = 0;
|
||
//=============================================================================
|
||
|
||
gWaveFormatexOut.wFormatTag = WAVE_FORMAT_PCM;
|
||
gWaveFormatexOut.nChannels = gpWaveFormatexIn->nChannels;
|
||
gWaveFormatexOut.nSamplesPerSec = gpWaveFormatexIn->nSamplesPerSec;
|
||
gWaveFormatexOut.nAvgBytesPerSec = gpWaveFormatexIn->nSamplesPerSec * gpWaveFormatexIn->nChannels * 2;
|
||
gWaveFormatexOut.nBlockAlign = gpWaveFormatexIn->nChannels * 2;
|
||
gWaveFormatexOut.wBitsPerSample = 16;
|
||
gWaveFormatexOut.cbSize = 0;
|
||
|
||
|
||
// Code of non error "MMSYSERR_NOERROR" in "mmsystem.h"
|
||
//=============================================================================
|
||
// INITIALISATION OF UNCOMPRESSION PARAMETERS
|
||
//=============================================================================
|
||
|
||
gnSoundPerImage = (long)( gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwScale * gWaveFormatexOut.nSamplesPerSec /
|
||
gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwRate );
|
||
gnVideoPosition = 0;
|
||
gnVideoPositionLast = 0;
|
||
// gnVideoPositionSkew = (long)( ( gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwInitialFrames ) * gnSoundPerImage/* + gnSizeSampleConvertAllBuffers*/ );
|
||
gnVideoPositionSkew = 0;
|
||
|
||
if( gWaveFormatexOut.wFormatTag != gpWaveFormatexIn->wFormatTag ) {
|
||
|
||
// Enumeration of all the conversion drivers and storage of their
|
||
nResult = acmDriverEnum( AviAcmDriverCallback,
|
||
(unsigned long)ghInstance,
|
||
ACM_DRIVERENUMF_DISABLED );
|
||
|
||
// Case where a ACM driver was detected
|
||
if( ghAcmDriver ) {
|
||
|
||
memset( & AcmFormatTag, 0, sizeof(ACMFORMATTAGDETAILS) );
|
||
AcmFormatTag.cbStruct = sizeof(ACMFORMATTAGDETAILS);
|
||
AcmFormatTag.dwFormatTag = WAVE_FORMAT_ADPCM;
|
||
|
||
nResult = acmFormatTagDetails( ghAcmDriver,
|
||
& AcmFormatTag,
|
||
ACM_FORMATTAGDETAILSF_FORMATTAG );
|
||
|
||
// Opening of an uncompression channel "ADPCM" in synchronous mode (without any "Callback")
|
||
nResult = acmStreamOpen( & ghAcmStream,
|
||
ghAcmDriver,
|
||
gpWaveFormatexIn,
|
||
& gWaveFormatexOut,
|
||
NULL,
|
||
(unsigned long)0,
|
||
(unsigned long)0,
|
||
0 );
|
||
|
||
// Give the size of source buffer of non converted datas in bytes in function of the exit one
|
||
nResult = acmStreamSize( ghAcmStream,
|
||
gWaveFormatexOut.nBlockAlign * SIZE_BUFFER_SAMPLE_CONVERT, //Taille en octet du buffer de sortie
|
||
& nSizeUnConvert,
|
||
ACM_STREAMSIZEF_DESTINATION );
|
||
|
||
// Allocation of "NB_BUFFERS_CONVERT" buffers of size "SIZE_BUFFER_SAMPLE_CONVERT" samples
|
||
gpAcmUnConvertData = (unsigned char*)malloc( ( nSizeUnConvert + gWaveFormatexOut.nBlockAlign * SIZE_BUFFER_SAMPLE_CONVERT ) * NB_BUFFERS_CONVERT );
|
||
gpAcmConvertData = gpAcmUnConvertData + nSizeUnConvert * NB_BUFFERS_CONVERT;
|
||
|
||
// Fill all the uncoverted sample buffers with ADPCM datas
|
||
nResult = AVIStreamRead( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart + 0,
|
||
(long)SIZE_ALL_BUFFERS / gpWaveFormatexIn->nBlockAlign,
|
||
gpAcmUnConvertData,
|
||
(long)( nSizeUnConvert * NB_BUFFERS_CONVERT ),
|
||
& nNbBytes,
|
||
& nNbSamples );
|
||
|
||
gnSizeSampleConvertAllBuffers = 0;
|
||
gnSizeSampleUnConvertAllBuffers = 0;
|
||
|
||
// Fill all the heads of uncompression then uncompress
|
||
gAcmStreamHeader[ 0 ].cbStruct = sizeof(ACMSTREAMHEADER);
|
||
gAcmStreamHeader[ 0 ].fdwStatus = 0;
|
||
gAcmStreamHeader[ 0 ].pbSrc = gpAcmUnConvertData;
|
||
gAcmStreamHeader[ 0 ].cbSrcLength = nSizeUnConvert;
|
||
gAcmStreamHeader[ 0 ].cbSrcLengthUsed = 0;
|
||
gAcmStreamHeader[ 0 ].pbDst = gpAcmConvertData;
|
||
gAcmStreamHeader[ 0 ].cbDstLength = SIZE_BUFFER_SAMPLE_CONVERT * gWaveFormatexOut.nBlockAlign;
|
||
gAcmStreamHeader[ 0 ].cbDstLengthUsed = 0;
|
||
nResult = acmStreamPrepareHeader( ghAcmStream,
|
||
& gAcmStreamHeader[ 0 ],
|
||
0 );
|
||
nResult = acmStreamConvert( ghAcmStream,
|
||
& gAcmStreamHeader[ 0 ],
|
||
ACM_STREAMCONVERTF_BLOCKALIGN );
|
||
// Store the size of the converted blocs in the user datas
|
||
gAcmStreamHeader[ 0 ].dwUser = gAcmStreamHeader[ 0 ].cbDstLengthUsed / gWaveFormatexOut.nBlockAlign;
|
||
gAcmStreamHeader[ 0 ].dwSrcUser = gAcmStreamHeader[ 0 ].cbSrcLengthUsed;
|
||
gAcmStreamHeader[ 0 ].dwDstUser = gAcmStreamHeader[ 0 ].cbDstLengthUsed;
|
||
|
||
gnSizeSampleConvertAllBuffers += gAcmStreamHeader[ 0 ].dwUser;
|
||
gnSizeSampleUnConvertAllBuffers += gAcmStreamHeader[ 0 ].dwSrcUser; // "/ gpWaveFormatexIn->nBlockAlign"
|
||
|
||
for( nCpt = 1; nCpt < NB_BUFFERS_CONVERT; nCpt++ ) {
|
||
gAcmStreamHeader[ nCpt ].cbStruct = sizeof(ACMSTREAMHEADER);
|
||
gAcmStreamHeader[ nCpt ].fdwStatus = 0;
|
||
gAcmStreamHeader[ nCpt ].pbSrc = gAcmStreamHeader[ nCpt - 1 ].pbSrc + gAcmStreamHeader[ nCpt - 1 ].cbSrcLengthUsed;
|
||
gAcmStreamHeader[ nCpt ].cbSrcLength = nSizeUnConvert;
|
||
gAcmStreamHeader[ nCpt ].cbSrcLengthUsed = 0;
|
||
gAcmStreamHeader[ nCpt ].pbDst = gAcmStreamHeader[ nCpt - 1 ].pbDst + gAcmStreamHeader[ nCpt - 1 ].cbDstLengthUsed;
|
||
gAcmStreamHeader[ nCpt ].cbDstLength = SIZE_BUFFER_SAMPLE_CONVERT * gWaveFormatexOut.nBlockAlign;
|
||
gAcmStreamHeader[ nCpt ].cbDstLengthUsed = 0;
|
||
nResult = acmStreamPrepareHeader( ghAcmStream,
|
||
& gAcmStreamHeader[ nCpt ],
|
||
0 );
|
||
nResult = acmStreamConvert( ghAcmStream,
|
||
& gAcmStreamHeader[ nCpt ],
|
||
ACM_STREAMCONVERTF_BLOCKALIGN );
|
||
gAcmStreamHeader[ nCpt ].dwUser = gAcmStreamHeader[ nCpt ].cbDstLengthUsed / gWaveFormatexOut.nBlockAlign;
|
||
gAcmStreamHeader[ nCpt ].dwSrcUser = gAcmStreamHeader[ nCpt ].cbSrcLengthUsed;
|
||
gAcmStreamHeader[ nCpt ].dwDstUser = gAcmStreamHeader[ nCpt ].cbDstLengthUsed;
|
||
|
||
gnSizeSampleConvertAllBuffers += gAcmStreamHeader[ nCpt ].dwUser;
|
||
gnSizeSampleUnConvertAllBuffers += gAcmStreamHeader[ nCpt ].dwSrcUser;// "/ gpWaveFormatexIn->nBlockAlign"
|
||
|
||
}
|
||
gnSizeSampleConvertBuffer = gnSizeSampleConvertAllBuffers / NB_BUFFERS_CONVERT;
|
||
gnSizeSampleUnConvertBuffer = gnSizeSampleUnConvertAllBuffers / NB_BUFFERS_CONVERT;
|
||
|
||
// Fill the non converted data advance buffer
|
||
gnAviReadPosition = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart + NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer;
|
||
gnAviReadPositionLast = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart + NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer;
|
||
|
||
nNbSamples = NB_BUFFERS_CONVERT;
|
||
nResult = AVIStreamRead( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
gnAviReadPositionLast / gpWaveFormatexIn->nBlockAlign,
|
||
(long)( SIZE_ALL_BUFFERS / gpWaveFormatexIn->nBlockAlign ),
|
||
gpAcmUnConvertData,
|
||
(long)( nSizeUnConvert * NB_BUFFERS_CONVERT ),
|
||
& nNbBytes,
|
||
& nNbSamples );
|
||
|
||
gnConvertPosition = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart; // Position <20> partir de laquelle les donn<6E>es converties sont stock<63>es
|
||
gnConvertPositionLast = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart;
|
||
|
||
gnConvertIndiceBuffer = 0;
|
||
// The index of non converted data buffers is the half of the total number of buffers
|
||
gnUnConvertIndiceBuffer = NB_BUFFERS_CONVERT / 2;
|
||
gnAviReadPosition += NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer / 2;
|
||
gnAviReadPositionLast += NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer / 2;
|
||
}
|
||
else
|
||
// if no ACM driver was detected, we play the AVI with no sound
|
||
gnType = AVI_SYNCH_NOSOUND;
|
||
|
||
}
|
||
// case where the datas are not in ADPCM
|
||
else {
|
||
// the loading of the datas is done in the callback
|
||
gnSizeSampleConvertBuffer = SIZE_BUFFER_SAMPLE_CONVERT;
|
||
gnSizeSampleConvertAllBuffers = SIZE_ALL_BUFFERS;
|
||
|
||
gpAcmConvertData = (unsigned char*)malloc( gWaveFormatexOut.nBlockAlign * gnSizeSampleConvertAllBuffers );
|
||
// Fill all the buffers with PCM datas
|
||
nResult = AVIStreamRead( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
0,
|
||
(long)( SIZE_ALL_BUFFERS ),
|
||
gpAcmConvertData,
|
||
(long)( SIZE_ALL_BUFFERS * gWaveFormatexOut.nBlockAlign ),
|
||
& nNbBytes,
|
||
& nNbSamples );
|
||
|
||
gnAviReadPosition = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart + SIZE_ALL_BUFFERS;
|
||
gnAviReadPositionLast = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart + SIZE_ALL_BUFFERS;
|
||
|
||
gnConvertIndiceBuffer = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart;
|
||
gnConvertPosition = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart;
|
||
gnConvertPositionLast = gAviStreamInfo[ gnIndiceAudio[ gnAudioTongue ] ].dwStart;
|
||
}
|
||
}
|
||
else {
|
||
// CAUTION the parameters used in the case where we synchronise with an object are initialised here
|
||
if( gnType != AVI_SYNCH_OBJECT ) {
|
||
// we use no sound band, so we'll "virtualy " synchronize on a 22050 sound band
|
||
// CAUTION: use this variable gnSoundPerImage only for division result
|
||
gnSoundPerImage = (long)( gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwScale * 22050 /
|
||
gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwRate );
|
||
gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwRate;
|
||
}
|
||
else {
|
||
gnSoundPerImage = gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwRate /
|
||
gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwScale;
|
||
}
|
||
}
|
||
|
||
gbInitDone = TRUE;
|
||
SetEvent( ghEventInitDone );
|
||
return TRUE;
|
||
}/*AviInitModule*/
|
||
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviPlayVideo
|
||
DESCRIPTION : Launch the exploitation task of AVI
|
||
INPUT : szFilename (char *)
|
||
nType
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void __stdcall AviPlayVideo( char* szFilename, unsigned long nType )
|
||
{
|
||
//=======================================================================================
|
||
// ENTRANCE IN CRITICAL SECTION (Avoid interference between stop and play)
|
||
EnterCriticalSection( & gCriticalSectionPlay );
|
||
|
||
if( ! gbInitDone ) {
|
||
//=====================================================================================================
|
||
// the sound track is contained in the sound bank
|
||
// we use a thread to manage the video refresh
|
||
//=====================================================================================================
|
||
gbAudioThread = TRUE;
|
||
gbFlagStop = FALSE;
|
||
gnType = nType;
|
||
ghThread = NULL;
|
||
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
gpFile = fopen( "Toto.log", "w" );
|
||
fprintf( gpFile, "Open %s\n===============================\n", szFilename );
|
||
fclose( gpFile );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
|
||
switch( nType ) {
|
||
case AVI_SYNCH_OBJECT:
|
||
#if defined( __WATCOMC__ ) && ( __WATCOMC__ < 1100 )
|
||
ghThread = (HANDLE)_beginthread( AviVideoThread, NULL, 0, (void*)szFilename );
|
||
#else
|
||
ghThread = (HANDLE)_beginthread( AviVideoThread, 0, (void*)szFilename );
|
||
#endif
|
||
break;
|
||
|
||
case AVI_SYNCH_STREAM:
|
||
case AVI_SYNCH_NOSOUND:
|
||
#if defined( __WATCOMC__ ) && ( __WATCOMC__ < 1100 )
|
||
ghThread = (HANDLE)_beginthread( AviVideoSoundThread, NULL, 0, (void*)szFilename );
|
||
#else
|
||
ghThread = (HANDLE)_beginthread( AviVideoSoundThread, 0, (void*)szFilename );
|
||
#endif
|
||
break;
|
||
}
|
||
if( ghThread ) {
|
||
WaitForSingleObject( ghEventInitDone, INFINITE );
|
||
}
|
||
//=====================================================================================================
|
||
// the sound track is contained in the AVI
|
||
// we use a "callback" of client buffers to manage the video refresh
|
||
//=====================================================================================================
|
||
}
|
||
|
||
LeaveCriticalSection( & gCriticalSectionPlay );
|
||
// EXIt of CRITICAL SECTION
|
||
//=======================================================================================
|
||
}/*AviPlayVideo*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviVideoThread
|
||
DESCRIPTION : body of a background task in charge of initialisation of the AVI module,
|
||
refreshing the display in function of audio samples, and desiniting the task.
|
||
INPUT : void*
|
||
OUTPUT : unsigned long
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
#if defined( __WATCOMC__ ) && ( __WATCOMC__ < 1100 )
|
||
void AviVideoThread( void* pParam )
|
||
#else
|
||
void /*__cdecl*/ AviVideoThread( void* pParam )
|
||
#endif
|
||
{
|
||
double dTimePerFrame;
|
||
double dSkew;
|
||
double dTime;
|
||
unsigned char* pchDibDatas;
|
||
SndReal nVideoTimeCurrent;
|
||
SND_tduRefEvt lRefEvent;
|
||
|
||
unsigned long nIndiceNextFrame;
|
||
unsigned long nIndiceLastFrame;
|
||
unsigned long nIndiceCurrentFrame;
|
||
BOOL bDisplayFrame;
|
||
|
||
|
||
// SetThreadPriority( GetCurrentThread( ), GetThreadPriority( GetCurrentThread( ) ) + 1 );
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_NORMAL );
|
||
|
||
// if the init fails, we return
|
||
if( ! AviInitModule( (char*)pParam ) )
|
||
return ;
|
||
|
||
#ifndef _USE_WINDOWS_DISPLAY//====================== BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
GLD_bClearDevice( 0 ); // clear the back buffer
|
||
GLD_bFlipDevice( 0 );
|
||
GLD_bClearDevice( 0 ); // clear the front buffer
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
|
||
if( gnNbAudioStream == 0 ) {
|
||
gdLastTime = 0;
|
||
// if the bank loading fails, we return
|
||
if( ! SND_fn_bLoadBinaryBank( gnBankIndice ) ) {
|
||
AviDesInitModule( );
|
||
return;
|
||
}
|
||
else {
|
||
lRefEvent.Id = MAKELONG( 0, NB_BANK_BNM ); // Toujours le premier <20>l<EFBFBD>ment
|
||
lRefEvent.pstPtr = SND_fn_pGetBinEvent( lRefEvent.Id );
|
||
gnVoice = SND_fn_lSendRequestSound( SND_C_OBJET_FANTOME, gnTypeSound, lRefEvent, 0, NULL );
|
||
// if the sending of sound request fails, we return
|
||
if( gnVoice == SND_C_EVT_FAILED ) {
|
||
AviDesInitModule( );
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
nIndiceNextFrame = 0;
|
||
nIndiceLastFrame = 0;
|
||
nIndiceCurrentFrame = 0;
|
||
bDisplayFrame = TRUE;
|
||
dTimePerFrame = 1 / (double)gnSoundPerImage;
|
||
dSkew = (double)gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwInitialFrames;
|
||
// GOOD !!!, let's loop
|
||
do {
|
||
// Nous ne sommes pas en pause
|
||
if( ! gbPauseDone ) {
|
||
|
||
//=====================================================================================================
|
||
// DISPLAY OF THE VIDEO FRAME
|
||
//=====================================================================================================
|
||
if( bDisplayFrame ) {
|
||
if( gpDecompressBHeader->biBitCount > 8 ) {
|
||
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
bDisplayFrame = FALSE;
|
||
pchDibDatas = (unsigned char*)gpDecompressBHeader + (long)gpDecompressBHeader->biSize + (long)gpDecompressBHeader->biClrUsed * sizeof(RGBQUAD);
|
||
DrawDibDraw( ghDrawDib,
|
||
ghDC,
|
||
0,
|
||
0,
|
||
-1,
|
||
-1,
|
||
gpDecompressBHeader,
|
||
(void*)pchDibDatas,
|
||
0,
|
||
0,
|
||
gAviInfo.dwWidth,
|
||
gAviInfo.dwHeight,
|
||
0 );
|
||
#else
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
pchDibDatas = (unsigned char*)gpDecompressBHeader + (long)gpDecompressBHeader->biSize + (long)gpDecompressBHeader->biClrUsed * sizeof(RGBQUAD);
|
||
if( WaitForSingleObject( ghDrawSem, 0 ) == WAIT_OBJECT_0 ) {
|
||
// Si le s<>maphore est libre on affiche
|
||
bDisplayFrame = FALSE;
|
||
// N<>cessaire dans le cas de la perte de focus
|
||
GLD_bClearDevice( 0 );
|
||
GLD_vBitmapConvertAndBlit24bTo16bPixelFormat( ghDevice,
|
||
ghViewPort,
|
||
gpDecompressBHeader->biWidth,
|
||
gpDecompressBHeader->biHeight,
|
||
pchDibDatas );
|
||
ReleaseSemaphore( ghDrawSem, 1, NULL );
|
||
}
|
||
#else
|
||
fn_vVideoDisplayFrame(gpDecompressBHeader);
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
}
|
||
}
|
||
|
||
//=====================================================================================================
|
||
// UNCOMPRESSION MANAGEMENT
|
||
//=====================================================================================================
|
||
nVideoTimeCurrent = SND_fn_rGetPosSound( gnVoice );
|
||
if( ( ( nVideoTimeCurrent != SND_C_POS_ENDED ) && ( nVideoTimeCurrent != SND_C_POS_UNKNOWN ) )/* && ( ! gbFlagStop )*/ ) {
|
||
// Rajout d'un "Skew" de 0.25 seconde
|
||
dTime = M_RealToDoubleSnd( nVideoTimeCurrent );
|
||
nIndiceCurrentFrame = (unsigned long)( dTime / dTimePerFrame );
|
||
if( ( nIndiceNextFrame < nIndiceCurrentFrame ) || ( nIndiceLastFrame == nIndiceNextFrame ) ) {
|
||
if( nIndiceNextFrame < nIndiceCurrentFrame ) {
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_ABOVE_NORMAL );
|
||
nIndiceNextFrame = nIndiceCurrentFrame;
|
||
}
|
||
else {
|
||
nIndiceNextFrame++;
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_NORMAL );
|
||
}
|
||
if( nIndiceNextFrame < gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwLength ) {
|
||
gpDecompressBHeader = (LPBITMAPINFOHEADER)AVIStreamGetFrame( gpGetVideoFrame, nIndiceNextFrame );
|
||
}
|
||
else {
|
||
gbAudioThread = FALSE;
|
||
}
|
||
}
|
||
}
|
||
else
|
||
gbAudioThread = FALSE;
|
||
|
||
//=====================================================================================================
|
||
// VIDEO DISPLAY
|
||
//=====================================================================================================
|
||
// Autorise the frame display
|
||
nVideoTimeCurrent = SND_fn_rGetPosSound( gnVoice );
|
||
if( ( ( nVideoTimeCurrent != SND_C_POS_ENDED ) && ( nVideoTimeCurrent != SND_C_POS_UNKNOWN ) )/* && ( ! gbFlagStop )*/ ) {
|
||
// Add a "Skew" of 0.25 seconde
|
||
dTime = M_RealToDoubleSnd( nVideoTimeCurrent );
|
||
if( dTime - gdLastTime > dTimePerFrame ) {
|
||
bDisplayFrame = TRUE;
|
||
nIndiceLastFrame = nIndiceNextFrame;
|
||
gdLastTime = dTimePerFrame * nIndiceNextFrame;
|
||
}
|
||
}
|
||
}
|
||
else
|
||
Sleep( 100 );
|
||
}
|
||
while( gbAudioThread && ( ! gbFlagStop ) );
|
||
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
SND_fn_vKillObjectSound2( SND_C_OBJET_FANTOME, gnTypeSound );
|
||
AviDesInitModule( );
|
||
#else
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
SND_fn_vKillObjectSound2( SND_C_OBJET_FANTOME, gnTypeSound );
|
||
AviDesInitModule( );
|
||
GLD_bClearDevice( 0 );
|
||
#else
|
||
SND_fn_vKillObjectSound( SND_C_OBJET_FANTOME);
|
||
AviDesInitModule( );
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
}/*AviVideoThread*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviVideoSoundThread
|
||
DESCRIPTION : body of a background task in charge of initialisation of the AVI module,
|
||
refreshing the display in function of audio samples, and desiniting the task.
|
||
IN THE CASE WHERE AVI CONTAINS ITS OWN SOUND CHANNELS
|
||
INPUT : void*
|
||
OUTPUT : unsigned long
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
#if defined( __WATCOMC__ ) && ( __WATCOMC__ < 1100 )
|
||
void AviVideoSoundThread( void* pParam )
|
||
#else
|
||
void /*__cdecl*/ AviVideoSoundThread( void* pParam )
|
||
#endif
|
||
{
|
||
// Copy of global values which can be modified during the task
|
||
long nConvertPosition;
|
||
long nAviReadPosition;
|
||
long nNbBytesRead;
|
||
long nNbSamplesRead;
|
||
long nScaleConvert;
|
||
long nResult;
|
||
BOOL bEndReach;
|
||
BOOL bDisplayFrame;
|
||
TIMECAPS timeCaps;
|
||
volatile long nCpt; // avoid the variable to be stocked in a register
|
||
unsigned char* pchDibDatas;
|
||
unsigned short nTimeResolution;
|
||
unsigned long nVideoTimeCurrent;
|
||
// Contient l'indice de la derni<6E>re trame vid<69>o affich<63>e
|
||
unsigned long nIndiceNextFrame;
|
||
unsigned long nIndiceLastFrame;
|
||
unsigned long nIndiceCurrentFrame;
|
||
|
||
long nTemp;
|
||
unsigned long nVideoPosition;
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
unsigned long nTrameJump;
|
||
unsigned long nTrameCurrent;
|
||
unsigned long nLastTime;
|
||
unsigned long nTempTime;
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_NORMAL );
|
||
// if init fails, we return
|
||
if( ! AviInitModule( (char*)pParam ) )
|
||
return;
|
||
|
||
timeGetDevCaps( & timeCaps, sizeof(TIMECAPS) );
|
||
nTimeResolution = max( TIME_RESOLUTION, timeCaps.wPeriodMin );
|
||
timeBeginPeriod( nTimeResolution );
|
||
bEndReach = FALSE;
|
||
bDisplayFrame = TRUE;
|
||
nIndiceLastFrame = 0;
|
||
nIndiceNextFrame = 0;
|
||
nScaleConvert = 0;
|
||
nTemp = 0;
|
||
|
||
|
||
#ifndef _USE_WINDOWS_DISPLAY//====================== BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
GLD_bClearDevice( 0 );
|
||
GLD_bFlipDevice( 0 );
|
||
GLD_bClearDevice( 0 );
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
gnCumulateTime = 0;
|
||
nTrameCurrent = 0;
|
||
nTrameJump = 0;
|
||
gnNbFrame = 1;
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
// To be sure that "gnVideoTimeStart" is greater than "gnVideoTimePerImage"
|
||
if( gnType != AVI_SYNCH_NOSOUND ) {
|
||
if( gnNbAudioStream ) {
|
||
// Creation of a client buffer of 1 seconde in average
|
||
gnBufferID = SND_fn_lCreateNewBufferSound( gnSizeSampleConvertAllBuffers, gWaveFormatexOut.wBitsPerSample, gWaveFormatexOut.nChannels, gWaveFormatexOut.nSamplesPerSec, NULL, AviCompleteBufferClientCallback, 0 );
|
||
if( gnBufferID == C_PLAY_FAILED )
|
||
gnType = AVI_SYNCH_NOSOUND;
|
||
}
|
||
else
|
||
gnType = AVI_SYNCH_NOSOUND;
|
||
}
|
||
|
||
// Time of beginning of the video, which aloows the decompression of the second video frame
|
||
// gnVideoTimeStart= timeGetTime( ) - gnVideoTimePerImage;
|
||
|
||
// Beginning of the video
|
||
// THIS INIT MUST BE PLACED HERE AND NOWHERE ELSE
|
||
gnVideoTimeStart= timeGetTime( );
|
||
do {
|
||
|
||
// Exact Copy
|
||
EnterCriticalSection( & gCriticalSection );
|
||
nAviReadPosition = gnAviReadPosition;
|
||
nConvertPosition = gnConvertPosition;
|
||
LeaveCriticalSection( & gCriticalSection );
|
||
|
||
if( gnType != AVI_SYNCH_NOSOUND ) {
|
||
|
||
nScaleConvert = ( nConvertPosition - gnConvertPositionLast ) / gnSizeSampleConvertBuffer;
|
||
// Increase the task priority when we are late
|
||
if( nScaleConvert > 1 )
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_ABOVE_NORMAL );
|
||
else
|
||
SetThreadPriority( GetCurrentThread( ), THREAD_PRIORITY_NORMAL );
|
||
|
||
// Test if we must refresh and convert new buffers
|
||
if( gWaveFormatexOut.wFormatTag != gpWaveFormatexIn->wFormatTag ) {
|
||
|
||
//=====================================================================================================
|
||
// MANAGEMENT OF CONVERTED DATA BUFFERS
|
||
//=====================================================================================================
|
||
if( nScaleConvert > 0 ) {
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
// fprintf( gpFile, "A %d\n", nScaleConvert );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
if( nScaleConvert != 1 ) {
|
||
if( nScaleConvert < NB_BUFFERS_CONVERT / 4 )
|
||
nScaleConvert = 2;
|
||
else
|
||
if( nScaleConvert < NB_BUFFERS_CONVERT / 2 )
|
||
nScaleConvert = NB_BUFFERS_CONVERT / 4;
|
||
else
|
||
if( nScaleConvert < 3 * NB_BUFFERS_CONVERT / 4 )
|
||
nScaleConvert = NB_BUFFERS_CONVERT / 2;
|
||
else
|
||
nScaleConvert = 3 * NB_BUFFERS_CONVERT / 4;
|
||
}
|
||
|
||
for( nCpt = 0; nCpt < nScaleConvert; nCpt++ ) {
|
||
|
||
nResult = acmStreamConvert( ghAcmStream,
|
||
& gAcmStreamHeader[ gnConvertIndiceBuffer ],
|
||
ACM_STREAMCONVERTF_BLOCKALIGN );
|
||
gnConvertPositionLast += gnSizeSampleConvertBuffer;
|
||
if( ++gnConvertIndiceBuffer == NB_BUFFERS_CONVERT )
|
||
gnConvertIndiceBuffer = 0;
|
||
}
|
||
|
||
//=====================================================================================================
|
||
// MANAGEMENT OF UNCONVERTED DATA BUFFERS
|
||
//=====================================================================================================
|
||
for( nCpt = 0; nCpt < nScaleConvert; nCpt++ ) {
|
||
|
||
nResult = AVIStreamRead( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
gnAviReadPositionLast / gpWaveFormatexIn->nBlockAlign,
|
||
gnSizeSampleUnConvertBuffer / gpWaveFormatexIn->nBlockAlign,
|
||
gpAcmUnConvertData + gnUnConvertIndiceBuffer * gnSizeSampleUnConvertBuffer,
|
||
gnSizeSampleUnConvertBuffer,
|
||
& nNbBytesRead,
|
||
& nNbSamplesRead );
|
||
if( nResult ) {
|
||
memset( gpAcmUnConvertData + gnUnConvertIndiceBuffer * gnSizeSampleUnConvertBuffer, 0, gnSizeSampleUnConvertBuffer );
|
||
if( ! bEndReach )
|
||
if( gnUnConvertIndiceBuffer > 0 )
|
||
memset( gpAcmUnConvertData + ( gnUnConvertIndiceBuffer - 1 ) * gnSizeSampleUnConvertBuffer, 0, gnSizeSampleUnConvertBuffer );
|
||
else
|
||
memset( gpAcmUnConvertData + NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer, 0, gnSizeSampleUnConvertBuffer );
|
||
bEndReach = TRUE;
|
||
}
|
||
gnAviReadPositionLast += gnSizeSampleUnConvertBuffer;
|
||
if( ++gnUnConvertIndiceBuffer == NB_BUFFERS_CONVERT )
|
||
gnUnConvertIndiceBuffer = 0;
|
||
}
|
||
}
|
||
}
|
||
else {
|
||
if( nScaleConvert > 0 ) {
|
||
|
||
//=====================================================================================================
|
||
// MANAGEMENT OF PCM DATA BUFFERS
|
||
//=====================================================================================================
|
||
for( nCpt = 0; nCpt < nScaleConvert; nCpt++ ) {
|
||
nResult = AVIStreamRead( gpAviStream[ gnIndiceAudio[ gnAudioTongue ] ],
|
||
gnAviReadPositionLast,
|
||
gnSizeSampleConvertBuffer,
|
||
gpAcmConvertData + gnConvertIndiceBuffer * gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign,
|
||
gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign,
|
||
& nNbBytesRead,
|
||
& nNbSamplesRead );
|
||
if( nResult ) {
|
||
memset( gpAcmConvertData + gnConvertIndiceBuffer * gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign, 0, gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign );
|
||
if( ! bEndReach )
|
||
// Case of short AVI, where the init buffers ara sufficient to load all the sound part
|
||
if( gnConvertIndiceBuffer > 0 )
|
||
memset( gpAcmConvertData + ( gnConvertIndiceBuffer - 1 ) * gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign, 0, gnSizeSampleConvertBuffer * gWaveFormatexOut.nBlockAlign );
|
||
else
|
||
memset( gpAcmUnConvertData + NB_BUFFERS_CONVERT * gnSizeSampleUnConvertBuffer, 0, gnSizeSampleUnConvertBuffer );
|
||
bEndReach = TRUE;
|
||
}
|
||
|
||
gnConvertPositionLast += gnSizeSampleConvertBuffer;
|
||
gnAviReadPositionLast += gnSizeSampleConvertBuffer;
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
//fprintf( gpFile, "%d\n", gnConvertIndiceBuffer );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
if( ++gnConvertIndiceBuffer == NB_BUFFERS_CONVERT )
|
||
gnConvertIndiceBuffer = 0;
|
||
}
|
||
}
|
||
}
|
||
} // if( gnType != AVI_SYNCH_NOSOUND )
|
||
|
||
//=====================================================================================================
|
||
// DISPLAY THE VIDEO FRAME
|
||
//=====================================================================================================
|
||
if( bDisplayFrame ) {
|
||
bDisplayFrame = FALSE;
|
||
if( gpDecompressBHeader->biBitCount > 8 ) {
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
if( nTrameCurrent != nIndiceLastFrame ) {
|
||
fprintf( gpFile, "On saute %d trame(s)\n", nIndiceLastFrame - nTrameCurrent );
|
||
nTrameJump += nIndiceLastFrame - nTrameCurrent;
|
||
nTrameCurrent = nIndiceLastFrame;
|
||
}
|
||
nTrameCurrent++;
|
||
// nLastTime = timeGetTime( );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
pchDibDatas = (unsigned char*)gpDecompressBHeader + (long)gpDecompressBHeader->biSize + (long)gpDecompressBHeader->biClrUsed * sizeof(RGBQUAD);
|
||
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
DrawDibDraw( ghDrawDib,
|
||
ghDC,
|
||
0,
|
||
0,
|
||
-1,
|
||
-1,
|
||
gpDecompressBHeader,
|
||
(void*)pchDibDatas,
|
||
0,
|
||
0,
|
||
gAviInfo.dwWidth,
|
||
gAviInfo.dwHeight,
|
||
0 );
|
||
#else
|
||
GLD_vBitmapConvertAndBlit24bTo16bPixelFormat( ghDevice,
|
||
ghViewPort,
|
||
gpDecompressBHeader->biWidth,
|
||
gpDecompressBHeader->biHeight,
|
||
pchDibDatas );
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#else
|
||
fn_vVideoDisplayFrame( gpDecompressBHeader );
|
||
#endif//============================================ END VDO_FOR_F1=================================
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "V %d**Time %d\n", nIndiceLastFrame ,timeGetTime()-gnVideoTimeStart);
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
}
|
||
}
|
||
|
||
//=====================================================================================================
|
||
// UNCOMPRESSION MANAGEMENT
|
||
//=====================================================================================================
|
||
if( gnType != AVI_SYNCH_NOSOUND ) {
|
||
|
||
double dddd;
|
||
// PAR POSITION DE SAMPLE AUDIOS
|
||
EnterCriticalSection( & gCriticalSection );
|
||
nVideoPosition = gnVideoPosition;
|
||
LeaveCriticalSection( & gCriticalSection );
|
||
|
||
dddd=M_RealToDoubleSnd( SND_fn_rGetPosBufferSound( gnBufferID ) );
|
||
nTemp = (long)( (double)gWaveFormatexOut.nSamplesPerSec * dddd ) - ( nVideoPosition - 1 ) % gnSizeSampleConvertAllBuffers;
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Test %d", nTemp );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
if( nTemp >= 0 ) {
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, " >=0**" );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
nTemp += nVideoPosition - gnSizeSampleConvertAllBuffers;
|
||
}
|
||
else {
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, " <0**" );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
nTemp += nVideoPosition;
|
||
}
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "VidPos %d**Inc %d**Pos buff %f**Time %d\n", nTemp, nVideoPosition ,1000.0 * dddd, timeGetTime( ) - gnVideoTimeStart );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
nVideoPosition = nTemp;
|
||
|
||
if( nVideoPosition > 0 ) {
|
||
nIndiceCurrentFrame = nVideoPosition / gnSoundPerImage;
|
||
// if( nVideoPosition > gnVideoPositionSkew ) {
|
||
// nIndiceCurrentFrame = ( nVideoPosition - gnVideoPositionSkew ) / gnSoundPerImage;
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
// fprintf( gpFile, "time %d**buf %d\n", timeGetTime( ) - gnVideoTimeStart, (long)( 1000.0* M_RealToDoubleSnd( SND_fn_rGetPosBufferSound( gnBufferID ) ) ) );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
}
|
||
else
|
||
nIndiceCurrentFrame = 0;
|
||
}
|
||
else {
|
||
// each "timeGetTime"
|
||
nIndiceCurrentFrame = timeGetTime( );
|
||
if( nIndiceCurrentFrame > gnVideoTimeStart + gnVideoTimeSkew ) {
|
||
nIndiceCurrentFrame -= gnVideoTimeStart + gnVideoTimeSkew;
|
||
nIndiceCurrentFrame /= gnVideoTimePerImage;
|
||
}
|
||
else
|
||
nIndiceCurrentFrame = 0;
|
||
}
|
||
|
||
if( ( nIndiceCurrentFrame > nIndiceNextFrame ) || ( nIndiceNextFrame == nIndiceLastFrame ) ) {
|
||
|
||
if( nIndiceCurrentFrame > nIndiceNextFrame )
|
||
nIndiceNextFrame = nIndiceCurrentFrame;
|
||
else
|
||
nIndiceNextFrame++;
|
||
|
||
if( nIndiceNextFrame < gAviStreamInfo[ gnIndiceVideo[ 0 ] ].dwLength ){
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
nLastTime = timeGetTime( );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
|
||
#ifdef _USE_DECOMPRESS//======================= BEGIN _USE_DECOMPRESS =================================
|
||
/*gpDecompressBHeader = */DecompressGetFrame( nIndiceNextFrame );
|
||
#else
|
||
/*gpDecompressBHeader = (LPBITMAPINFOHEADER)*/AVIStreamGetFrame( gpGetVideoFrame, nIndiceNextFrame );
|
||
#endif//======================================= END _USE_DECOMPRESS =================================
|
||
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
nTempTime = timeGetTime( );
|
||
|
||
#ifdef _USE_DECOMPRESS//============================ BEGIN _USE_DECOMPRESS =================================
|
||
// fprintf( gpFile, "Frame %d**Perso %d\n", nIndiceNextFrame, nTempTime - nLastTime );
|
||
#else
|
||
fprintf( gpFile, "Frame %d**Api %d**Time %d\n", nIndiceNextFrame, nTempTime - nLastTime, nTempTime - gnVideoTimeStart );
|
||
#endif//============================================ END _USE_DECOMPRESS =================================
|
||
|
||
gnCumulateTime += nTempTime - nLastTime;
|
||
gnNbFrame++;
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
}
|
||
else
|
||
gbAudioThread = FALSE;
|
||
}
|
||
|
||
//=====================================================================================================
|
||
// COMMAND THE VIDEO DISPLAY
|
||
//=====================================================================================================
|
||
// Allow the frame displaying
|
||
if( gnType != AVI_SYNCH_NOSOUND ) {
|
||
// long nTemp;
|
||
// FOR EACH POSITION OF AUDIO SAMPLES
|
||
EnterCriticalSection( & gCriticalSection );
|
||
nVideoPosition = gnVideoPosition;
|
||
LeaveCriticalSection( & gCriticalSection );
|
||
|
||
nTemp = (long)( (double)gWaveFormatexOut.nSamplesPerSec * M_RealToDoubleSnd( SND_fn_rGetPosBufferSound( gnBufferID ) ) )
|
||
- ( nVideoPosition - 1 ) % gnSizeSampleConvertAllBuffers;
|
||
if( nTemp >= 0 ) {
|
||
nTemp += nVideoPosition - gnSizeSampleConvertAllBuffers;
|
||
}
|
||
else {
|
||
nTemp += nVideoPosition;
|
||
}
|
||
nVideoPosition = nTemp;
|
||
|
||
|
||
if( nVideoPosition > gnVideoPositionSkew ) {
|
||
// Compemsate the low precision of the function "GetPos" of the client buffer
|
||
unsigned long nVideoPositionCurrent;
|
||
|
||
nVideoPositionCurrent = nVideoPosition - gnVideoPositionSkew;
|
||
if( nVideoPositionCurrent - gnVideoPositionLast > gnSoundPerImage ) {
|
||
|
||
bDisplayFrame = TRUE;
|
||
// gnVideoPositionLast = nVideoPositionCurrent - nVideoPositionCurrent % gnSoundPerImage;
|
||
gnVideoPositionLast = nIndiceNextFrame * gnSoundPerImage;
|
||
nIndiceLastFrame = nIndiceNextFrame;
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "DisplayVideo\n" );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
}
|
||
}
|
||
// When we have no action else to do, we swap
|
||
if( ( nScaleConvert == 0 ) &&
|
||
( nVideoPosition - gnVideoPositionLast < gnSoundPerImage ) &&
|
||
( ! bDisplayFrame ) ) {
|
||
unsigned long nSleepTime;
|
||
nSleepTime = ( gnVideoPositionLast + gnSoundPerImage - nVideoPosition ) * 1000 / gWaveFormatexOut.nSamplesPerSec;
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Test Sleep %d\n", gnVideoPositionLast + gnSoundPerImage - nVideoPosition );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
if( nSleepTime > 0 ) {
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Sleep %d ms\n", nSleepTime );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
Sleep( nSleepTime );
|
||
}
|
||
}
|
||
|
||
}
|
||
else {
|
||
// EACH "timeGetTime"
|
||
nVideoTimeCurrent = timeGetTime( ) - gnVideoTimeStart - gnVideoTimeSkew;
|
||
if( nVideoTimeCurrent - gnVideoTimeLast > gnVideoTimePerImage ) {
|
||
bDisplayFrame = TRUE;
|
||
gnVideoTimeLast = nVideoTimeCurrent - nVideoTimeCurrent % gnVideoTimePerImage;
|
||
nIndiceLastFrame = nIndiceNextFrame;
|
||
}
|
||
}
|
||
// When nothing else to do, we swap
|
||
if( ( nScaleConvert == 0 ) &&
|
||
( nVideoTimeCurrent - gnVideoTimeLast < gnVideoTimePerImage ) &&
|
||
( ! bDisplayFrame ) ) {
|
||
unsigned long nSleepTime;
|
||
nSleepTime = gnVideoTimeLast + gnVideoTimePerImage - nVideoTimeCurrent;
|
||
if( nSleepTime > 0 ) {
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Sleep %d ms\n", nSleepTime );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
Sleep( nSleepTime );
|
||
}
|
||
}
|
||
}
|
||
while( ( gbAudioThread ) && ( ! gbFlagStop ) );
|
||
|
||
if( gnType != AVI_SYNCH_NOSOUND )
|
||
SND_fn_vDeleteBufferSound( gnBufferID );
|
||
|
||
timeEndPeriod( nTimeResolution );
|
||
|
||
|
||
#ifdef _USE_FILE_DBG//============================== BEGIN _USE_FILE_DBG =================================
|
||
fprintf( gpFile, "Nombre de trames saut<75>es %d\n", nTrameJump );
|
||
#endif//============================================ END _USE_FILE_DBG =================================
|
||
|
||
AviDesInitModule( );
|
||
|
||
#ifndef _USE_WINDOWS_DISPLAY//====================== BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
GLD_bClearDevice( 0 );
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
}/*AviVideoSoundThread*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_bIsRunningAVI
|
||
DESCRIPTION : Indicate if the video task is over or running
|
||
INPUT :
|
||
OUTPUT : ACP_tdxBool
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
ACP_tdxBool VDO_fn_bIsRunningAVI( )
|
||
{
|
||
ACP_tdxBool bEnd;
|
||
|
||
bEnd = FALSE;
|
||
// the return code "WAIT_OBJECT_0" tells that the thread was killed
|
||
if( gbInitDone ) {
|
||
//=======================================================================================
|
||
// ENTRANCE IN CRITICAL SECTION (Avoid interference between stop and play)
|
||
EnterCriticalSection( & gCriticalSectionPlay );
|
||
|
||
if( ! ghThread )
|
||
bEnd = FALSE;
|
||
if( WaitForSingleObject( ghThread, 0 ) == WAIT_OBJECT_0 )
|
||
bEnd = FALSE;
|
||
else
|
||
bEnd = TRUE;
|
||
LeaveCriticalSection( & gCriticalSectionPlay );
|
||
// EXIT OF CRITICAL SECTION
|
||
//=======================================================================================
|
||
}
|
||
|
||
return bEnd;
|
||
}/*VDO_fn_bIsRunningAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xDesInitAVI
|
||
DESCRIPTION : Function of uninitialisation. To be called only one time
|
||
(destroy the critical section)
|
||
INPUT :
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xDesInitAVI( )
|
||
{
|
||
CloseHandle( ghEventInitDone );
|
||
// CloseHandle( ghEventSynchro );
|
||
DeleteCriticalSection( & gCriticalSectionPlay );
|
||
DeleteCriticalSection( & gCriticalSection );
|
||
DeleteCriticalSection( & gCriticalSectionPause );
|
||
}/*VDO_fn_xDesInitAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xInitAVI
|
||
DESCRIPTION : Function of initialisation. To be called only one time to define only one type
|
||
of sound object. Initialize an handle for the current instance and a window handle
|
||
in the case of windows.
|
||
INPUT : HINSTANCE hInstance
|
||
HANDLE hWnd
|
||
void* hDrawSem
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
void VDO_fn_xInitAVI( HINSTANCE hInstance, HANDLE hWnd )
|
||
#else
|
||
void VDO_fn_xInitAVI( HINSTANCE hInstance, void* hDrawSem )
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
{
|
||
tdstAllRetObjectSound ObjetSound;
|
||
|
||
memset( & ObjetSound, 0, sizeof(tdstAllRetObjectSound) );
|
||
|
||
gnLineSound = SND_fn_lAddVolumeLineSound( );
|
||
|
||
ObjetSound.X = AviRetObjetSonorePosVDO;
|
||
ObjetSound.Y = AviRetObjetSonorePosVDO;
|
||
ObjetSound.Z = AviRetObjetSonorePosVDO;
|
||
ObjetSound.VX = AviRetObjetSonorePosVDO;
|
||
ObjetSound.VY = AviRetObjetSonorePosVDO;
|
||
ObjetSound.VZ = AviRetObjetSonorePosVDO;
|
||
ObjetSound.REVERB = AviRetObjetSonoreReverbVDO;
|
||
ObjetSound.ExtraCoef = AviRetObjetSonoreExtraCoefVDO;
|
||
ObjetSound.Switch = AviRetObjetSonoreSwitchVDO;
|
||
ObjetSound.MicroLink = AviRetObjetSonoreMicroLinkVDO;
|
||
|
||
gnTypeSound = SND_fn_lAddObjectTypeSound2( & ObjetSound, gnLineSound );
|
||
|
||
#ifdef VDO_FOR_F1//================================= BEGIN VDO_FOR_F1 =================================
|
||
SND_fn_vSetInfoForTypeSound( gnLineSound, AviRetObjetSonoreInfoVDO );
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
InitializeCriticalSection( & gCriticalSection );
|
||
InitializeCriticalSection( & gCriticalSectionPlay );
|
||
InitializeCriticalSection( & gCriticalSectionPause );
|
||
ghEventInitDone = NULL;
|
||
gAttributes.nLength = sizeof(SECURITY_ATTRIBUTES);
|
||
gAttributes.lpSecurityDescriptor = NULL;
|
||
gAttributes.bInheritHandle = TRUE;
|
||
// The event autodestroyed and is put in a state non signaled
|
||
ghEventInitDone = CreateEvent( & gAttributes,
|
||
FALSE,
|
||
FALSE,
|
||
NULL );
|
||
// The event puts himself in a state non signaled at the reception and is initialised in a state non signaled
|
||
/* ghEventSynchro = CreateEvent( NULL,
|
||
FALSE,
|
||
FALSE,
|
||
NULL );*/
|
||
ghInstance = hInstance;
|
||
ghDrawSem = hDrawSem;
|
||
gnBankIndice = NB_BANK_BNM;
|
||
|
||
#ifdef _USE_WINDOWS_DISPLAY//======================= BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
ghVideoWnd = (HWND)hWnd;
|
||
#else
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
ghDevice = 0;
|
||
ghViewPort = 0;
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
|
||
}/*VDO_fn_xInitAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xPlayVideoAVI
|
||
DESCRIPTION : Procedure used to launch the play (the only indirection of the module)
|
||
We indicate the type of play we want to realize
|
||
INPUT : char* szFilename
|
||
unsigned long nType
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xPlayVideoAVI( char* szFilename, unsigned long nType )
|
||
{
|
||
AviPlayVideo( szFilename, nType );
|
||
}/*VDO_fn_xPlayVideoAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xPauseVideoAVI
|
||
DESCRIPTION : Pause the video
|
||
INPUT :
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xPauseVideoAVI( )
|
||
{
|
||
if( VDO_fn_bIsRunningAVI( ) ) {
|
||
//=======================================================================================
|
||
// ENTRANCE IN CRITICAL SECTION (Avoid interferences between stop and play)
|
||
EnterCriticalSection( & gCriticalSectionPause );
|
||
// Protection against the reentrance
|
||
if( ! gbPauseDone ) {
|
||
gbPauseDone = TRUE;
|
||
switch( gnType ) {
|
||
case AVI_SYNCH_OBJECT:
|
||
SND_fn_vPauseSound( );
|
||
break;
|
||
case AVI_SYNCH_STREAM:
|
||
SND_fn_vPauseBufferSound( gnBufferID );
|
||
break;
|
||
case AVI_SYNCH_NOSOUND:
|
||
gnVideoTimePauseStart = timeGetTime( );
|
||
break;
|
||
}
|
||
// Trop violent
|
||
// SuspendThread( ghThread );
|
||
}
|
||
LeaveCriticalSection( & gCriticalSectionPause );
|
||
// EXIT OF CRITICAL SECTION
|
||
//=======================================================================================
|
||
}
|
||
}/*VDO_fn_xPauseVideoAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xResumeVideoAVI
|
||
DESCRIPTION : UnPause the video
|
||
INPUT :
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xResumeVideoAVI( )
|
||
{
|
||
if( VDO_fn_bIsRunningAVI( ) ) {
|
||
//=======================================================================================
|
||
// ENTRANCE IN CRITICAL SECTION (Avoid interferences between stop and play)
|
||
EnterCriticalSection( & gCriticalSectionPause );
|
||
if( gbPauseDone ) {
|
||
ResumeThread( ghThread );
|
||
switch( gnType ) {
|
||
case AVI_SYNCH_OBJECT:
|
||
SND_fn_vResumeSound( );
|
||
break;
|
||
case AVI_SYNCH_STREAM:
|
||
SND_fn_vResumeBufferSound( gnBufferID );
|
||
break;
|
||
case AVI_SYNCH_NOSOUND:
|
||
gnVideoTimeSkew += timeGetTime( ) - gnVideoTimePauseStart;
|
||
break;
|
||
}
|
||
gbPauseDone = FALSE;
|
||
}
|
||
LeaveCriticalSection( & gCriticalSectionPause );
|
||
// EXIT OF CRITICAL SECTION
|
||
//=======================================================================================
|
||
}
|
||
}/*VDO_fn_xResumeVideoAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xSetAudioTongue
|
||
DESCRIPTION : In case of multi-language AVI, this function give the index of the tongue
|
||
INPUT : long nIndice
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xSetAudioTongue( long nIndice )
|
||
{
|
||
gnAudioTongue = nIndice;
|
||
}/*VDO_fn_xSetAudioTongue*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xSetBankIndice
|
||
DESCRIPTION : This function give the index of the bank
|
||
INPUT : unsigned short nIndice
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xSetBankIndice( unsigned short nIndice )
|
||
{
|
||
gnBankIndice = (unsigned long)nIndice;
|
||
}/*VDO_fn_xSetBankIndice*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xSetDirectoryAVI
|
||
DESCRIPTION : Init the working directory of the "AVI" module
|
||
CAUTION: the path of the AVI directory mustn't end by "\"
|
||
INPUT : char* szDirectory
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xSetDirectoryAVI( char* szDirectory )
|
||
{
|
||
strcpy( gstrDir, szDirectory );
|
||
}/*VDO_fn_xSetDirectoryAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xGetDirectoryAVI
|
||
DESCRIPTION : Get the working directory of the "AVI" module
|
||
INPUT :
|
||
OUTPUT : char*
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
char *VDO_fn_szGetDirectoryAVI(void)
|
||
{
|
||
return(gstrDir);
|
||
}/*VDO_fn_xGetDirectoryAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xSetGraphicEnvironment
|
||
DESCRIPTION : Init the graphical datas
|
||
INPUT : GLD_tdhDevice hDevice
|
||
GLD_tdhViewport hViewPort
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xSetGraphicEnvironment( GLD_tdhDevice hDevice, GLD_tdhViewport hViewPort )
|
||
{
|
||
#ifndef _USE_WINDOWS_DISPLAY//====================== BEGIN _USE_WINDOWS_DISPLAY =================================
|
||
|
||
#ifndef VDO_FOR_F1//================================ BEGIN VDO_FOR_F1 =================================
|
||
ghDevice = hDevice;
|
||
ghViewPort = hViewPort;
|
||
#endif//============================================ END VDO_FOR_F1 =================================
|
||
|
||
#endif//============================================ END _USE_WINDOWS_DISPLAY =================================
|
||
}/*VDO_fn_xSetGraphicEnvironment*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : VDO_fn_xStopVideoAVI
|
||
DESCRIPTION : Stop the AVI
|
||
INPUT :
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void VDO_fn_xStopVideoAVI( )
|
||
{
|
||
//=======================================================================================
|
||
// ENTRANCE IN CRITICAL SECTION (Avoid interference between stop and play)
|
||
EnterCriticalSection( & gCriticalSectionPlay );
|
||
|
||
if( gbInitDone ) {
|
||
// In the case where we're in pause
|
||
if( gbPauseDone ) {
|
||
VDO_fn_xResumeVideoAVI( );
|
||
}
|
||
switch( gnType ) {
|
||
case AVI_SYNCH_OBJECT:
|
||
if( gnNbAudioStream == 0 ) {
|
||
gbFlagStop = TRUE;
|
||
// We're waiting the end of the task
|
||
WaitForSingleObject( ghThread, INFINITE );
|
||
}
|
||
break;
|
||
case AVI_SYNCH_STREAM:
|
||
case AVI_SYNCH_NOSOUND: // Ce cas se g<>re comme celui des streams
|
||
gbFlagStop = TRUE;
|
||
// We're waiting the end of the task
|
||
WaitForSingleObject( ghThread, INFINITE );
|
||
break;
|
||
}
|
||
}
|
||
|
||
LeaveCriticalSection( & gCriticalSectionPlay );
|
||
// EXIT OF CRITICAL SECTION
|
||
//=======================================================================================
|
||
}/*VDO_fn_xStopVideoAVI*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviCompleteBufferClientCallback
|
||
DESCRIPTION : Callback in charge of fulling the sound client buffers
|
||
INPUT : long nID
|
||
unsigned long nFirstSample
|
||
unsigned long nNb
|
||
void* pvFirstSample
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void SND_CALL AviCompleteBufferClientCallback( long nID, unsigned long nFirstSample, unsigned long nNb, void* pvFirstSample )
|
||
{
|
||
unsigned long nNbSamples;
|
||
|
||
nNbSamples = gnSizeSampleConvertAllBuffers - gnConvertPosition % gnSizeSampleConvertAllBuffers;
|
||
// Refresh the client buffer
|
||
if( nNbSamples >= nNb ) {
|
||
memcpy( pvFirstSample,
|
||
gpAcmConvertData + ( gnConvertPosition % gnSizeSampleConvertAllBuffers ) * gWaveFormatexOut.nBlockAlign,
|
||
nNb * gWaveFormatexOut.nBlockAlign );
|
||
}
|
||
else {
|
||
memcpy( pvFirstSample,
|
||
gpAcmConvertData + ( gnConvertPosition % gnSizeSampleConvertAllBuffers ) * gWaveFormatexOut.nBlockAlign,
|
||
nNbSamples * gWaveFormatexOut.nBlockAlign );
|
||
memcpy( (char*)pvFirstSample + nNbSamples * gWaveFormatexOut.nBlockAlign,
|
||
gpAcmConvertData,
|
||
( nNb - nNbSamples ) * gWaveFormatexOut.nBlockAlign );
|
||
}
|
||
// Increase the position of the different buffers
|
||
EnterCriticalSection( & gCriticalSection );
|
||
gnConvertPosition += nNb; // Position of converted datas
|
||
gnAviReadPosition += nNb; // Position of datas inside the AVI file
|
||
gnVideoPosition += nNb; // Position of video datas
|
||
LeaveCriticalSection( & gCriticalSection );
|
||
}/*AviCompleteBufferClientCallback*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviAcmDriverCallback
|
||
DESCRIPTION : Callback of the sound client buffers
|
||
INPUT : HACMDRIVERID has
|
||
DWORD dwInstance
|
||
DWORD dwSupport
|
||
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
BOOL CALLBACK AviAcmDriverCallback( HACMDRIVERID hAcmDrvId, DWORD dwInstance, DWORD dwSupport )
|
||
{
|
||
unsigned long nResult;
|
||
static HACMDRIVER hAcmDrv;
|
||
static WAVEFORMATEX wfx;
|
||
static ACMDRIVERDETAILS AcmDrvDetail;
|
||
|
||
if( dwSupport & ( ACMDRIVERDETAILS_SUPPORTF_CODEC | ACMDRIVERDETAILS_SUPPORTF_CONVERTER ) ) {
|
||
|
||
nResult = acmDriverOpen( & hAcmDrv,
|
||
hAcmDrvId,
|
||
0 );
|
||
|
||
// Full a structure to establish a suggered format of compression
|
||
memset( & wfx, 0, sizeof(WAVEFORMATEX) );
|
||
wfx.wFormatTag = WAVE_FORMAT_PCM;
|
||
nResult = acmFormatSuggest( hAcmDrv,
|
||
gpWaveFormatexIn,
|
||
& wfx,
|
||
sizeof(WAVEFORMATEX),
|
||
ACM_FORMATSUGGESTF_WFORMATTAG );
|
||
|
||
if( ( wfx.nChannels == gWaveFormatexOut.nChannels ) &&
|
||
( wfx.nBlockAlign == gWaveFormatexOut.nBlockAlign ) &&
|
||
( wfx.nSamplesPerSec == gWaveFormatexOut.nSamplesPerSec ) ) {
|
||
|
||
memset( & AcmDrvDetail, 0, sizeof(ACMDRIVERDETAILS) );
|
||
AcmDrvDetail.cbStruct = sizeof(ACMDRIVERDETAILS);
|
||
|
||
acmDriverDetails( hAcmDrvId,
|
||
& AcmDrvDetail,
|
||
0 );
|
||
|
||
/* _RPT0( _CRT_WARN, "************************************\n" );
|
||
_RPT4( _CRT_WARN, "Description du driver:\n%s\n%s\n%s\n%s",
|
||
AcmDrvDetail.szShortName,
|
||
AcmDrvDetail.szLongName,
|
||
AcmDrvDetail.szCopyright,
|
||
AcmDrvDetail.szLicensing );
|
||
_RPT1( _CRT_WARN, "%s\n",
|
||
AcmDrvDetail.szFeatures );*/
|
||
ghAcmDriver = hAcmDrv;
|
||
|
||
return FALSE;
|
||
}
|
||
else {
|
||
acmDriverClose( hAcmDrv,
|
||
0 );
|
||
return TRUE;
|
||
}
|
||
}
|
||
return TRUE;
|
||
}/*AviAcmDriverCallback*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonorePosVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
OUTPUT : SndReal
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
SndReal AviRetObjetSonorePosVDO( long indice )
|
||
{
|
||
return 0;
|
||
}/*AviRetObjetSonorePosVDO*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonoreReverbVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
OUTPUT : unsigned char
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
unsigned char AviRetObjetSonoreReverbVDO( long indice )
|
||
{
|
||
return 0;
|
||
}/*AviRetObjetSonoreReverbVDO*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonoreExtraCoefVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
SND_tduRefEvt evt
|
||
SndReal* pitch
|
||
SndReal* pan
|
||
SndReal* vol
|
||
OUTPUT :
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void AviRetObjetSonoreExtraCoefVDO( long indice, SND_tduRefEvt evt, SndReal* pitch, SndReal* pan, SndReal* vol )
|
||
{
|
||
}/*AviRetObjetSonoreExtraCoefVDO*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonoreSwitchVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
long type_switch
|
||
OUTPUT : long
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
long AviRetObjetSonoreSwitchVDO( long indice, long type_switch )
|
||
{
|
||
return 0;
|
||
}/*AviRetObjetSonoreSwitchVDO*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonoreMicroLinkVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
long micro
|
||
OUTPUT : SndBool
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
SndBool AviRetObjetSonoreMicroLinkVDO( long indice, long micro )
|
||
{
|
||
return TRUE;
|
||
}/*AviRetObjetSonoreMicroLinkVDO*/
|
||
|
||
/* ##F===================================================================================
|
||
NAME : AviRetObjetSonoreInfoVDO
|
||
DESCRIPTION : Register a new type of sound object
|
||
INPUT : long indice
|
||
char* texte
|
||
long size
|
||
OUTPUT : SndBool
|
||
=========================================================================================
|
||
CREATION : Nicolas Meyer
|
||
LAST MODIFICATIONS : Date/Author/Modification (5 maximum)
|
||
=======================================================================================*/
|
||
void AviRetObjetSonoreInfoVDO( long indice, char* texte, long size )
|
||
{
|
||
char tmp[16];
|
||
|
||
sprintf( tmp, "Obj VDO", indice );
|
||
memset( texte, 0, size );
|
||
strncpy( texte, tmp, size );
|
||
}/*AviRetObjetSonoreInfoVDO*/
|