author | llornkcor <llornkcor> | 2002-04-21 16:13:30 (UTC) |
---|---|---|
committer | llornkcor <llornkcor> | 2002-04-21 16:13:30 (UTC) |
commit | 392eb0350465565e2a849ada60e6734be025787b (patch) (side-by-side diff) | |
tree | 44727c201e6ef8e4d61d6011180d41b46c8f4ea4 | |
parent | d7ff40e1530a1aab578a3d3c3126c4367cf3e898 (diff) | |
download | opie-392eb0350465565e2a849ada60e6734be025787b.zip opie-392eb0350465565e2a849ada60e6734be025787b.tar.gz opie-392eb0350465565e2a849ada60e6734be025787b.tar.bz2 |
fix unfixed stream stop
-rw-r--r-- | core/multimedia/opieplayer/audiodevice.cpp | 1 | ||||
-rw-r--r-- | core/multimedia/opieplayer/loopcontrol.cpp | 2 |
2 files changed, 2 insertions, 1 deletions
diff --git a/core/multimedia/opieplayer/audiodevice.cpp b/core/multimedia/opieplayer/audiodevice.cpp index 2087c7f..ad44abb 100644 --- a/core/multimedia/opieplayer/audiodevice.cpp +++ b/core/multimedia/opieplayer/audiodevice.cpp @@ -59,311 +59,312 @@ static const int sound_fragment_bytes = (1<<sound_fragment_shift); #endif class AudioDevicePrivate { public: int handle; unsigned int frequency; unsigned int channels; unsigned int bytesPerSample; unsigned int bufferSize; #ifndef Q_OS_WIN32 bool can_GETOSPACE; char* unwrittenBuffer; unsigned int unwritten; #endif static int dspFd; static bool muted; static unsigned int leftVolume; static unsigned int rightVolume; }; #ifdef Q_WS_QWS // This is for keeping the device open in-between playing files when // the device makes clicks and it starts to drive you insane! :) // Best to have the device not open when not using it though //#define KEEP_DEVICE_OPEN #endif int AudioDevicePrivate::dspFd = 0; bool AudioDevicePrivate::muted = FALSE; unsigned int AudioDevicePrivate::leftVolume = 0; unsigned int AudioDevicePrivate::rightVolume = 0; void AudioDevice::getVolume( unsigned int& leftVolume, unsigned int& rightVolume, bool &muted ) { muted = AudioDevicePrivate::muted; unsigned int volume; #ifdef Q_OS_WIN32 HWAVEOUT handle; WAVEFORMATEX formatData; formatData.cbSize = sizeof(WAVEFORMATEX); formatData.wFormatTag = WAVE_FORMAT_PCM; formatData.nAvgBytesPerSec = 4 * 44000; formatData.nBlockAlign = 4; formatData.nChannels = 2; formatData.nSamplesPerSec = 44000; formatData.wBitsPerSample = 16; waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); if ( waveOutGetVolume( handle, (LPDWORD)&volume ) ) // qDebug( "get volume of audio device failed" ); waveOutClose( handle ); leftVolume = volume & 0xFFFF; rightVolume = volume >> 16; #else int mixerHandle = open( "/dev/mixer", O_RDWR ); if ( mixerHandle >= 0 ) { if(ioctl( mixerHandle, MIXER_READ(0), &volume )==-1) perror("ioctl(\"MIXER_READ\")"); close( mixerHandle ); } else perror("open(\"/dev/mixer\")"); leftVolume = ((volume & 0x00FF) << 16) / 101; rightVolume = ((volume & 0xFF00) << 8) / 101; #endif } void AudioDevice::setVolume( unsigned int leftVolume, unsigned int rightVolume, bool muted ) { AudioDevicePrivate::muted = muted; if ( muted ) { AudioDevicePrivate::leftVolume = leftVolume; AudioDevicePrivate::rightVolume = rightVolume; leftVolume = 0; rightVolume = 0; } else { leftVolume = ( (int) leftVolume < 0 ) ? 0 : (( leftVolume > 0xFFFF ) ? 0xFFFF : leftVolume ); rightVolume = ( (int)rightVolume < 0 ) ? 0 : (( rightVolume > 0xFFFF ) ? 0xFFFF : rightVolume ); } #ifdef Q_OS_WIN32 HWAVEOUT handle; WAVEFORMATEX formatData; formatData.cbSize = sizeof(WAVEFORMATEX); formatData.wFormatTag = WAVE_FORMAT_PCM; formatData.nAvgBytesPerSec = 4 * 44000; formatData.nBlockAlign = 4; formatData.nChannels = 2; formatData.nSamplesPerSec = 44000; formatData.wBitsPerSample = 16; waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); unsigned int volume = (rightVolume << 16) | leftVolume; if ( waveOutSetVolume( handle, volume ) ) // qDebug( "set volume of audio device failed" ); waveOutClose( handle ); #else // Volume can be from 0 to 100 which is 101 distinct values unsigned int rV = (rightVolume * 101) >> 16; # if 0 unsigned int lV = (leftVolume * 101) >> 16; unsigned int volume = ((rV << 8) & 0xFF00) | (lV & 0x00FF); int mixerHandle = 0; if ( ( mixerHandle = open( "/dev/mixer", O_RDWR ) ) >= 0 ) { if(ioctl( mixerHandle, MIXER_WRITE(0), &volume ) ==-1) perror("ioctl(\"MIXER_WRITE\")"); close( mixerHandle ); } else perror("open(\"/dev/mixer\")"); # else // This is the way this has to be done now I guess, doesn't allow for // independant right and left channel setting, or setting for different outputs Config cfg("Sound"); cfg.setGroup("System"); cfg.writeEntry("Volume",(int)rV); # endif #endif // qDebug( "setting volume to: 0x%x", volume ); #if ( defined Q_WS_QWS || defined(_WS_QWS_) ) && !defined(QT_NO_COP) // Send notification that the volume has changed QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << muted; #endif } AudioDevice::AudioDevice( unsigned int f, unsigned int chs, unsigned int bps ) { qDebug("creating new audio device"); d = new AudioDevicePrivate; d->frequency = f; d->channels = chs; d->bytesPerSample = bps; qDebug("%d",bps); int format=0; if( bps == 8) format = AFMT_U8; else if( bps <= 0) format = AFMT_S16_LE; else format = AFMT_S16_LE; qDebug("AD- freq %d, channels %d, b/sample %d, bitrate %d",f,chs,bps,format); connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( volumeChanged(bool) ) ); int fragments = 0x10000 * 8 + sound_fragment_shift; int capabilities = 0; #ifdef KEEP_DEVICE_OPEN if ( AudioDevicePrivate::dspFd == 0 ) { #endif if ( ( d->handle = ::open( "/dev/dsp", O_WRONLY ) ) < 0 ) { perror("open(\"/dev/dsp\") sending to /dev/null instead"); d->handle = ::open( "/dev/null", O_WRONLY ); } #ifdef KEEP_DEVICE_OPEN AudioDevicePrivate::dspFd = d->handle; } else { d->handle = AudioDevicePrivate::dspFd; } #endif if(ioctl( d->handle, SNDCTL_DSP_GETCAPS, &capabilities )==-1) perror("ioctl(\"SNDCTL_DSP_GETCAPS\")"); if(ioctl( d->handle, SNDCTL_DSP_SETFRAGMENT, &fragments )==-1) perror("ioctl(\"SNDCTL_DSP_SETFRAGMENT\")"); if(ioctl( d->handle, SNDCTL_DSP_SETFMT, & format )==-1) perror("ioctl(\"SNDCTL_DSP_SETFMT\")"); qDebug("freq %d", d->frequency); if(ioctl( d->handle, SNDCTL_DSP_SPEED, &d->frequency )==-1) perror("ioctl(\"SNDCTL_DSP_SPEED\")"); qDebug("channels %d",d->channels); if ( ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels ) == -1 ) { d->channels = ( d->channels == 1 ) ? 2 : d->channels; if(ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels )==-1) perror("ioctl(\"SNDCTL_DSP_CHANNELS\")"); } d->bufferSize = sound_fragment_bytes; d->unwrittenBuffer = new char[d->bufferSize]; d->unwritten = 0; d->can_GETOSPACE = TRUE; // until we find otherwise //if ( chs != d->channels ) qDebug( "Wanted %d, got %d channels", chs, d->channels ); //if ( f != d->frequency ) qDebug( "wanted %dHz, got %dHz", f, d->frequency ); //if ( capabilities & DSP_CAP_BATCH ) qDebug( "Sound card has local buffer" ); //if ( capabilities & DSP_CAP_REALTIME )qDebug( "Sound card has realtime sync" ); //if ( capabilities & DSP_CAP_TRIGGER ) qDebug( "Sound card has precise trigger" ); //if ( capabilities & DSP_CAP_MMAP ) qDebug( "Sound card can mmap" ); } AudioDevice::~AudioDevice() { + qDebug("destryo audiodevice"); #ifdef Q_OS_WIN32 waveOutClose( (HWAVEOUT)d->handle ); #else # ifndef KEEP_DEVICE_OPEN close( d->handle ); // Now it should be safe to shut the handle # endif delete d->unwrittenBuffer; delete d; #endif } void AudioDevice::volumeChanged( bool muted ) { AudioDevicePrivate::muted = muted; } void AudioDevice::write( char *buffer, unsigned int length ) { #ifdef Q_OS_WIN32 // returns immediately and (to be implemented) emits completedIO() when finished writing WAVEHDR *lpWaveHdr = (WAVEHDR *)malloc( sizeof(WAVEHDR) ); // maybe the buffer should be copied so that this fool proof, but its a performance hit lpWaveHdr->lpData = buffer; lpWaveHdr->dwBufferLength = length; lpWaveHdr->dwFlags = 0L; lpWaveHdr->dwLoops = 0L; waveOutPrepareHeader( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ); // waveOutWrite returns immediately. the data is sent in the background. if ( waveOutWrite( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ) ) qDebug( "failed to write block to audio device" ); // emit completedIO(); #else int t = ::write( d->handle, buffer, length ); if ( t<0 ) t = 0; if ( t != (int)length) { qDebug("Ahhh!! memcpys 1"); memcpy(d->unwrittenBuffer,buffer+t,length-t); d->unwritten = length-t; } #endif } unsigned int AudioDevice::channels() const { return d->channels; } unsigned int AudioDevice::frequency() const { return d->frequency; } unsigned int AudioDevice::bytesPerSample() const { return d->bytesPerSample; } unsigned int AudioDevice::bufferSize() const { return d->bufferSize; } unsigned int AudioDevice::canWrite() const { #ifdef Q_OS_WIN32 return bufferSize(); // Any better? #else audio_buf_info info; if ( d->can_GETOSPACE && ioctl(d->handle,SNDCTL_DSP_GETOSPACE,&info) ) { d->can_GETOSPACE = FALSE; fcntl( d->handle, F_SETFL, O_NONBLOCK ); } if ( d->can_GETOSPACE ) { int t = info.fragments * sound_fragment_bytes; return QMIN(t,(int)bufferSize()); } else { if ( d->unwritten ) { int t = ::write( d->handle, d->unwrittenBuffer, d->unwritten ); if ( t<0 ) t = 0; if ( (unsigned)t!=d->unwritten ) { memcpy(d->unwrittenBuffer,d->unwrittenBuffer+t,d->unwritten-t); d->unwritten -= t; } else { d->unwritten = 0; } } if ( d->unwritten ) return 0; else return d->bufferSize; } #endif } int AudioDevice::bytesWritten() { #ifdef Q_OS_WIN32 MMTIME pmmt = { TIME_BYTES, 0 }; if ( ( waveOutGetPosition( (HWAVEOUT)d->handle, &pmmt, sizeof(MMTIME) ) != MMSYSERR_NOERROR ) || ( pmmt.wType != TIME_BYTES ) ) { qDebug( "failed to get audio device position" ); return -1; } return pmmt.u.cb; #else int buffered = 0; if ( ioctl( d->handle, SNDCTL_DSP_GETODELAY, &buffered ) ) { qDebug( "failed to get audio device position" ); return -1; } return buffered; #endif } diff --git a/core/multimedia/opieplayer/loopcontrol.cpp b/core/multimedia/opieplayer/loopcontrol.cpp index 01596a0..7005886 100644 --- a/core/multimedia/opieplayer/loopcontrol.cpp +++ b/core/multimedia/opieplayer/loopcontrol.cpp @@ -59,385 +59,385 @@ bool threadOkToGo = FALSE; class Mutex { public: Mutex() { pthread_mutexattr_t attr; pthread_mutexattr_init( &attr ); pthread_mutex_init( &mutex, &attr ); pthread_mutexattr_destroy( &attr ); } ~Mutex() { pthread_mutex_destroy( &mutex ); } void lock() { pthread_mutex_lock( &mutex ); } void unlock() { pthread_mutex_unlock( &mutex ); } private: pthread_mutex_t mutex; }; void *startAudioThread( void *ptr ) { LoopControl *mpegView = (LoopControl *)ptr; while ( TRUE ) { if ( threadOkToGo && mpegView->moreAudio ) mpegView->startAudio(); else usleep( 10000 ); // Semi-buzy-wait till we are playing again } return 0; } Mutex *audioMutex; LoopControl::LoopControl( QObject *parent, const char *name ) : QObject( parent, name ) { isMuted = FALSE; connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( setMute(bool) ) ); //qDebug("starting loopcontrol"); audioMutex = new Mutex; pthread_attr_init(&audio_attr); #define USE_REALTIME_AUDIO_THREAD #ifdef USE_REALTIME_AUDIO_THREAD // Attempt to set it to real-time round robin if ( pthread_attr_setschedpolicy( &audio_attr, SCHED_RR ) == 0 ) { sched_param params; params.sched_priority = 50; pthread_attr_setschedparam(&audio_attr,¶ms); } else { qDebug( "Error setting up a realtime thread, reverting to using a normal thread." ); pthread_attr_destroy(&audio_attr); pthread_attr_init(&audio_attr); } #endif //qDebug("create audio thread"); pthread_create(&audio_tid, &audio_attr, (void * (*)(void *))startAudioThread, this); } LoopControl::~LoopControl() { stop(); } static long prev_frame = 0; static int currentSample = 0; void LoopControl::timerEvent( QTimerEvent *te ) { if ( te->timerId() == videoId ) startVideo(); if ( te->timerId() == sliderId ) { if ( hasAudioChannel && !hasVideoChannel && moreAudio ) { mediaPlayerState->updatePosition( audioSampleCounter ); } else if ( hasVideoChannel && moreVideo ) { mediaPlayerState->updatePosition( current_frame ); } } if ( !moreVideo && !moreAudio ) { mediaPlayerState->setPlaying( FALSE ); mediaPlayerState->setNext(); } } void LoopControl::setPosition( long pos ) { audioMutex->lock(); if ( hasVideoChannel && hasAudioChannel ) { playtime.restart(); playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); current_frame = pos + 1; mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); prev_frame = current_frame - 1; currentSample = (int)( (double)current_frame * freq / framerate ); mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); audioSampleCounter = currentSample - 1; } else if ( hasVideoChannel ) { playtime.restart(); playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); current_frame = pos + 1; mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); prev_frame = current_frame - 1; } else if ( hasAudioChannel ) { playtime.restart(); playtime = playtime.addMSecs( long((double)-pos * 1000.0 / freq) ); currentSample = pos + 1; mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); audioSampleCounter = currentSample - 1; } audioMutex->unlock(); } void LoopControl::startVideo() { if ( moreVideo ) { if ( mediaPlayerState->curDecoder() ) { if ( hasAudioChannel && !isMuted ) { current_frame = long( playtime.elapsed() * framerate / 1000 ); if ( prev_frame != -1 && current_frame <= prev_frame ) return; } else { // Don't skip current_frame++; } if ( prev_frame == -1 || current_frame > prev_frame ) { if ( current_frame > prev_frame + 1 ) { mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); } moreVideo = videoUI->playVideo(); prev_frame = current_frame; } } else { moreVideo = FALSE; killTimer( videoId ); } } } void LoopControl::startAudio() { //qDebug("start audio"); audioMutex->lock(); if ( moreAudio ) { if ( !isMuted && mediaPlayerState->curDecoder() ) { currentSample = audioSampleCounter + 1; if ( currentSample != audioSampleCounter + 1 ) qDebug("out of sync with decoder %i %i", currentSample, audioSampleCounter); long samplesRead = 0; bool readOk=mediaPlayerState->curDecoder()->audioReadSamples( (short*)audioBuffer, channels, 1024, samplesRead, stream ); long sampleWeShouldBeAt = long( playtime.elapsed() ) * freq / 1000; long sampleWaitTime = currentSample - sampleWeShouldBeAt; // this causes drop outs not sure why its even here // if ( ( sampleWaitTime > 2000 ) && ( sampleWaitTime < 20000 ) ) { // usleep( (long)((double)sampleWaitTime * 1000000.0 / freq) ); // } // else if ( sampleWaitTime <= -5000 ) { // qDebug("need to catch up by: %li (%i,%li)", -sampleWaitTime, currentSample, sampleWeShouldBeAt ); // //mediaPlayerState->curDecoder()->audioSetSample( sampleWeShouldBeAt, stream ); // currentSample = sampleWeShouldBeAt; // } audioDevice->write( audioBuffer, samplesRead * 2 * channels ); -// if( mediaPlayerState->isStreaming == FALSE) + if( mediaPlayerState->isStreaming == FALSE) audioSampleCounter = currentSample + samplesRead - 1; moreAudio = readOk && (audioSampleCounter <= total_audio_samples); } else { moreAudio = FALSE; } } audioMutex->unlock(); } void LoopControl::killTimers() { audioMutex->lock(); if ( hasVideoChannel ) killTimer( videoId ); killTimer( sliderId ); threadOkToGo = FALSE; audioMutex->unlock(); } void LoopControl::startTimers() { audioMutex->lock(); moreVideo = FALSE; moreAudio = FALSE; if ( hasVideoChannel ) { moreVideo = TRUE; int mSecsBetweenFrames = (int)(100 / framerate); // 10% of the real value videoId = startTimer( mSecsBetweenFrames ); } if ( hasAudioChannel ) { moreAudio = TRUE; threadOkToGo = TRUE; } sliderId = startTimer( 300 ); // update slider every 1/3 second audioMutex->unlock(); } void LoopControl::setPaused( bool pause ) { if ( !mediaPlayerState->curDecoder() || !mediaPlayerState->curDecoder()->isOpen() ) return; if ( pause ) { killTimers(); } else { // Force an update of the position mediaPlayerState->setPosition( mediaPlayerState->position() + 1 ); mediaPlayerState->setPosition( mediaPlayerState->position() - 1 ); // Just like we never stopped startTimers(); } } void LoopControl::stop( bool willPlayAgainShortly ) { #if defined(Q_WS_QWS) && !defined(QT_NO_COP) if ( !willPlayAgainShortly && disabledSuspendScreenSaver ) { disabledSuspendScreenSaver = FALSE; // Re-enable the suspend mode QCopEnvelope("QPE/System", "setScreenSaverMode(int)" ) << QPEApplication::Enable; } #endif if ( mediaPlayerState->curDecoder() && mediaPlayerState->curDecoder()->isOpen() ) { killTimers(); audioMutex->lock(); mediaPlayerState->curDecoder()->close(); if ( audioDevice ) { delete audioDevice; delete audioBuffer; audioDevice = 0; audioBuffer = 0; } audioMutex->unlock(); } } bool LoopControl::init( const QString& filename ) { stop(); audioMutex->lock(); fileName = filename; stream = 0; // only play stream 0 for now current_frame = total_video_frames = total_audio_samples = 0; qDebug( "Using the %s decoder", mediaPlayerState->curDecoder()->pluginName() ); // ### Hack to use libmpeg3plugin to get the number of audio samples if we are using the libmad plugin if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibMadPlugin") ) { if ( mediaPlayerState->libMpeg3Decoder() && mediaPlayerState->libMpeg3Decoder()->open( filename ) ) { total_audio_samples = mediaPlayerState->libMpeg3Decoder()->audioSamples( 0 ); mediaPlayerState->libMpeg3Decoder()->close(); } } if ( !mediaPlayerState->curDecoder()|| !mediaPlayerState->curDecoder()->open( filename ) ) { audioMutex->unlock(); return FALSE; } hasAudioChannel = mediaPlayerState->curDecoder()->audioStreams() > 0; hasVideoChannel = mediaPlayerState->curDecoder()->videoStreams() > 0; if ( hasAudioChannel ) { int astream = 0; channels = mediaPlayerState->curDecoder()->audioChannels( astream ); qDebug( "LC- channels = %d", channels ); if ( !total_audio_samples ) total_audio_samples = mediaPlayerState->curDecoder()->audioSamples( astream ); // total_audio_samples += 1000; mediaPlayerState->setLength( total_audio_samples ); freq = mediaPlayerState->curDecoder()->audioFrequency( astream ); qDebug( "LC- frequency = %d", freq ); audioSampleCounter = 0; int bits_per_sample; if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibWavPlugin") ) { bits_per_sample =(int) mediaPlayerState->curDecoder()->getTime(); qDebug("using stupid hack"); } else { bits_per_sample=0; } audioDevice = new AudioDevice( freq, channels, bits_per_sample); audioBuffer = new char[ audioDevice->bufferSize() ]; channels = audioDevice->channels(); //### must check which frequency is actually used. static const int size = 1; short int buf[size]; long samplesRead = 0; mediaPlayerState->curDecoder()->audioReadSamples( buf, channels, size, samplesRead, stream ); } if ( hasVideoChannel ) { total_video_frames = mediaPlayerState->curDecoder()->videoFrames( stream ); mediaPlayerState->setLength( total_video_frames ); framerate = mediaPlayerState->curDecoder()->videoFrameRate( stream ); DecodeLoopDebug(( "Frame rate %g total %ld", framerate, total_video_frames )); if ( framerate <= 1.0 ) { DecodeLoopDebug(( "Crazy frame rate, resetting to sensible" )); framerate = 25; } if ( total_video_frames == 1 ) { DecodeLoopDebug(( "Cannot seek to frame" )); } } current_frame = 0; prev_frame = -1; if( fileName.left(7) == "http://") mediaPlayerState->isStreaming = TRUE; connect( mediaPlayerState, SIGNAL( positionChanged( long ) ), this, SLOT( setPosition( long ) ) ); connect( mediaPlayerState, SIGNAL( pausedToggled( bool ) ), this, SLOT( setPaused( bool ) ) ); |