-rw-r--r-- | core/multimedia/opieplayer/audiodevice.cpp | 8 | ||||
-rw-r--r-- | core/multimedia/opieplayer/loopcontrol.cpp | 14 |
2 files changed, 15 insertions, 7 deletions
diff --git a/core/multimedia/opieplayer/audiodevice.cpp b/core/multimedia/opieplayer/audiodevice.cpp index ad44abb..e0989d0 100644 --- a/core/multimedia/opieplayer/audiodevice.cpp +++ b/core/multimedia/opieplayer/audiodevice.cpp | |||
@@ -14,357 +14,365 @@ | |||
14 | ** See http://www.trolltech.com/gpl/ for GPL licensing information. | 14 | ** See http://www.trolltech.com/gpl/ for GPL licensing information. |
15 | ** | 15 | ** |
16 | ** Contact info@trolltech.com if any conditions of this licensing are | 16 | ** Contact info@trolltech.com if any conditions of this licensing are |
17 | ** not clear to you. | 17 | ** not clear to you. |
18 | ** | 18 | ** |
19 | **********************************************************************/ | 19 | **********************************************************************/ |
20 | // L.J.Potter added better error code Fri 02-15-2002 14:37:47 | 20 | // L.J.Potter added better error code Fri 02-15-2002 14:37:47 |
21 | 21 | ||
22 | #include <stdlib.h> | 22 | #include <stdlib.h> |
23 | #include <stdio.h> | 23 | #include <stdio.h> |
24 | #include <qpe/qpeapplication.h> | 24 | #include <qpe/qpeapplication.h> |
25 | #include <qpe/config.h> | 25 | #include <qpe/config.h> |
26 | #include "audiodevice.h" | 26 | #include "audiodevice.h" |
27 | 27 | ||
28 | #if ( defined Q_WS_QWS || defined(_WS_QWS_) ) && !defined(QT_NO_COP) | 28 | #if ( defined Q_WS_QWS || defined(_WS_QWS_) ) && !defined(QT_NO_COP) |
29 | #include "qpe/qcopenvelope_qws.h" | 29 | #include "qpe/qcopenvelope_qws.h" |
30 | #endif | 30 | #endif |
31 | 31 | ||
32 | #ifdef Q_WS_WIN | 32 | #ifdef Q_WS_WIN |
33 | #include <windows.h> | 33 | #include <windows.h> |
34 | #include <mmsystem.h> | 34 | #include <mmsystem.h> |
35 | #include <mmreg.h> | 35 | #include <mmreg.h> |
36 | #endif | 36 | #endif |
37 | 37 | ||
38 | #if defined(Q_WS_X11) || defined(Q_WS_QWS) | 38 | #if defined(Q_WS_X11) || defined(Q_WS_QWS) |
39 | #include <fcntl.h> | 39 | #include <fcntl.h> |
40 | #include <sys/ioctl.h> | 40 | #include <sys/ioctl.h> |
41 | #include <sys/soundcard.h> | 41 | #include <sys/soundcard.h> |
42 | #include <sys/stat.h> | 42 | #include <sys/stat.h> |
43 | #include <sys/time.h> | 43 | #include <sys/time.h> |
44 | #include <sys/types.h> | 44 | #include <sys/types.h> |
45 | #include <unistd.h> | 45 | #include <unistd.h> |
46 | #endif | 46 | #endif |
47 | 47 | ||
48 | #if defined(Q_OS_WIN32) | 48 | #if defined(Q_OS_WIN32) |
49 | static const int expectedBytesPerMilliSecond = 2 * 2 * 44000 / 1000; | 49 | static const int expectedBytesPerMilliSecond = 2 * 2 * 44000 / 1000; |
50 | static const int timerResolutionMilliSeconds = 30; | 50 | static const int timerResolutionMilliSeconds = 30; |
51 | static const int sound_fragment_bytes = timerResolutionMilliSeconds * expectedBytesPerMilliSecond; | 51 | static const int sound_fragment_bytes = timerResolutionMilliSeconds * expectedBytesPerMilliSecond; |
52 | #else | 52 | #else |
53 | # if defined(QT_QWS_IPAQ) | 53 | # if defined(QT_QWS_IPAQ) |
54 | static const int sound_fragment_shift = 14; | 54 | static const int sound_fragment_shift = 14; |
55 | # else | 55 | # else |
56 | static const int sound_fragment_shift = 16; | 56 | static const int sound_fragment_shift = 16; |
57 | # endif | 57 | # endif |
58 | static const int sound_fragment_bytes = (1<<sound_fragment_shift); | 58 | static const int sound_fragment_bytes = (1<<sound_fragment_shift); |
59 | #endif | 59 | #endif |
60 | 60 | ||
61 | 61 | ||
62 | class AudioDevicePrivate { | 62 | class AudioDevicePrivate { |
63 | public: | 63 | public: |
64 | int handle; | 64 | int handle; |
65 | unsigned int frequency; | 65 | unsigned int frequency; |
66 | unsigned int channels; | 66 | unsigned int channels; |
67 | unsigned int bytesPerSample; | 67 | unsigned int bytesPerSample; |
68 | unsigned int bufferSize; | 68 | unsigned int bufferSize; |
69 | #ifndef Q_OS_WIN32 | 69 | #ifndef Q_OS_WIN32 |
70 | bool can_GETOSPACE; | 70 | bool can_GETOSPACE; |
71 | char* unwrittenBuffer; | 71 | char* unwrittenBuffer; |
72 | unsigned int unwritten; | 72 | unsigned int unwritten; |
73 | #endif | 73 | #endif |
74 | 74 | ||
75 | static int dspFd; | 75 | static int dspFd; |
76 | static bool muted; | 76 | static bool muted; |
77 | static unsigned int leftVolume; | 77 | static unsigned int leftVolume; |
78 | static unsigned int rightVolume; | 78 | static unsigned int rightVolume; |
79 | }; | 79 | }; |
80 | 80 | ||
81 | 81 | ||
82 | #ifdef Q_WS_QWS | 82 | #ifdef Q_WS_QWS |
83 | // This is for keeping the device open in-between playing files when | 83 | // This is for keeping the device open in-between playing files when |
84 | // the device makes clicks and it starts to drive you insane! :) | 84 | // the device makes clicks and it starts to drive you insane! :) |
85 | // Best to have the device not open when not using it though | 85 | // Best to have the device not open when not using it though |
86 | //#define KEEP_DEVICE_OPEN | 86 | //#define KEEP_DEVICE_OPEN |
87 | #endif | 87 | #endif |
88 | 88 | ||
89 | 89 | ||
90 | int AudioDevicePrivate::dspFd = 0; | 90 | int AudioDevicePrivate::dspFd = 0; |
91 | bool AudioDevicePrivate::muted = FALSE; | 91 | bool AudioDevicePrivate::muted = FALSE; |
92 | unsigned int AudioDevicePrivate::leftVolume = 0; | 92 | unsigned int AudioDevicePrivate::leftVolume = 0; |
93 | unsigned int AudioDevicePrivate::rightVolume = 0; | 93 | unsigned int AudioDevicePrivate::rightVolume = 0; |
94 | 94 | ||
95 | 95 | ||
96 | void AudioDevice::getVolume( unsigned int& leftVolume, unsigned int& rightVolume, bool &muted ) { | 96 | void AudioDevice::getVolume( unsigned int& leftVolume, unsigned int& rightVolume, bool &muted ) { |
97 | muted = AudioDevicePrivate::muted; | 97 | muted = AudioDevicePrivate::muted; |
98 | unsigned int volume; | 98 | unsigned int volume; |
99 | #ifdef Q_OS_WIN32 | 99 | #ifdef Q_OS_WIN32 |
100 | HWAVEOUT handle; | 100 | HWAVEOUT handle; |
101 | WAVEFORMATEX formatData; | 101 | WAVEFORMATEX formatData; |
102 | formatData.cbSize = sizeof(WAVEFORMATEX); | 102 | formatData.cbSize = sizeof(WAVEFORMATEX); |
103 | formatData.wFormatTag = WAVE_FORMAT_PCM; | 103 | formatData.wFormatTag = WAVE_FORMAT_PCM; |
104 | formatData.nAvgBytesPerSec = 4 * 44000; | 104 | formatData.nAvgBytesPerSec = 4 * 44000; |
105 | formatData.nBlockAlign = 4; | 105 | formatData.nBlockAlign = 4; |
106 | formatData.nChannels = 2; | 106 | formatData.nChannels = 2; |
107 | formatData.nSamplesPerSec = 44000; | 107 | formatData.nSamplesPerSec = 44000; |
108 | formatData.wBitsPerSample = 16; | 108 | formatData.wBitsPerSample = 16; |
109 | waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); | 109 | waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); |
110 | if ( waveOutGetVolume( handle, (LPDWORD)&volume ) ) | 110 | if ( waveOutGetVolume( handle, (LPDWORD)&volume ) ) |
111 | // qDebug( "get volume of audio device failed" ); | 111 | // qDebug( "get volume of audio device failed" ); |
112 | waveOutClose( handle ); | 112 | waveOutClose( handle ); |
113 | leftVolume = volume & 0xFFFF; | 113 | leftVolume = volume & 0xFFFF; |
114 | rightVolume = volume >> 16; | 114 | rightVolume = volume >> 16; |
115 | #else | 115 | #else |
116 | int mixerHandle = open( "/dev/mixer", O_RDWR ); | 116 | int mixerHandle = open( "/dev/mixer", O_RDWR ); |
117 | if ( mixerHandle >= 0 ) { | 117 | if ( mixerHandle >= 0 ) { |
118 | if(ioctl( mixerHandle, MIXER_READ(0), &volume )==-1) | 118 | if(ioctl( mixerHandle, MIXER_READ(0), &volume )==-1) |
119 | perror("ioctl(\"MIXER_READ\")"); | 119 | perror("ioctl(\"MIXER_READ\")"); |
120 | close( mixerHandle ); | 120 | close( mixerHandle ); |
121 | } else | 121 | } else |
122 | perror("open(\"/dev/mixer\")"); | 122 | perror("open(\"/dev/mixer\")"); |
123 | leftVolume = ((volume & 0x00FF) << 16) / 101; | 123 | leftVolume = ((volume & 0x00FF) << 16) / 101; |
124 | rightVolume = ((volume & 0xFF00) << 8) / 101; | 124 | rightVolume = ((volume & 0xFF00) << 8) / 101; |
125 | #endif | 125 | #endif |
126 | } | 126 | } |
127 | 127 | ||
128 | 128 | ||
129 | void AudioDevice::setVolume( unsigned int leftVolume, unsigned int rightVolume, bool muted ) { | 129 | void AudioDevice::setVolume( unsigned int leftVolume, unsigned int rightVolume, bool muted ) { |
130 | AudioDevicePrivate::muted = muted; | 130 | AudioDevicePrivate::muted = muted; |
131 | if ( muted ) { | 131 | if ( muted ) { |
132 | AudioDevicePrivate::leftVolume = leftVolume; | 132 | AudioDevicePrivate::leftVolume = leftVolume; |
133 | AudioDevicePrivate::rightVolume = rightVolume; | 133 | AudioDevicePrivate::rightVolume = rightVolume; |
134 | leftVolume = 0; | 134 | leftVolume = 0; |
135 | rightVolume = 0; | 135 | rightVolume = 0; |
136 | } else { | 136 | } else { |
137 | leftVolume = ( (int) leftVolume < 0 ) ? 0 : (( leftVolume > 0xFFFF ) ? 0xFFFF : leftVolume ); | 137 | leftVolume = ( (int) leftVolume < 0 ) ? 0 : (( leftVolume > 0xFFFF ) ? 0xFFFF : leftVolume ); |
138 | rightVolume = ( (int)rightVolume < 0 ) ? 0 : (( rightVolume > 0xFFFF ) ? 0xFFFF : rightVolume ); | 138 | rightVolume = ( (int)rightVolume < 0 ) ? 0 : (( rightVolume > 0xFFFF ) ? 0xFFFF : rightVolume ); |
139 | } | 139 | } |
140 | #ifdef Q_OS_WIN32 | 140 | #ifdef Q_OS_WIN32 |
141 | HWAVEOUT handle; | 141 | HWAVEOUT handle; |
142 | WAVEFORMATEX formatData; | 142 | WAVEFORMATEX formatData; |
143 | formatData.cbSize = sizeof(WAVEFORMATEX); | 143 | formatData.cbSize = sizeof(WAVEFORMATEX); |
144 | formatData.wFormatTag = WAVE_FORMAT_PCM; | 144 | formatData.wFormatTag = WAVE_FORMAT_PCM; |
145 | formatData.nAvgBytesPerSec = 4 * 44000; | 145 | formatData.nAvgBytesPerSec = 4 * 44000; |
146 | formatData.nBlockAlign = 4; | 146 | formatData.nBlockAlign = 4; |
147 | formatData.nChannels = 2; | 147 | formatData.nChannels = 2; |
148 | formatData.nSamplesPerSec = 44000; | 148 | formatData.nSamplesPerSec = 44000; |
149 | formatData.wBitsPerSample = 16; | 149 | formatData.wBitsPerSample = 16; |
150 | waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); | 150 | waveOutOpen(&handle, WAVE_MAPPER, &formatData, 0L, 0L, CALLBACK_NULL); |
151 | unsigned int volume = (rightVolume << 16) | leftVolume; | 151 | unsigned int volume = (rightVolume << 16) | leftVolume; |
152 | if ( waveOutSetVolume( handle, volume ) ) | 152 | if ( waveOutSetVolume( handle, volume ) ) |
153 | // qDebug( "set volume of audio device failed" ); | 153 | // qDebug( "set volume of audio device failed" ); |
154 | waveOutClose( handle ); | 154 | waveOutClose( handle ); |
155 | #else | 155 | #else |
156 | // Volume can be from 0 to 100 which is 101 distinct values | 156 | // Volume can be from 0 to 100 which is 101 distinct values |
157 | unsigned int rV = (rightVolume * 101) >> 16; | 157 | unsigned int rV = (rightVolume * 101) >> 16; |
158 | 158 | ||
159 | # if 0 | 159 | # if 0 |
160 | unsigned int lV = (leftVolume * 101) >> 16; | 160 | unsigned int lV = (leftVolume * 101) >> 16; |
161 | unsigned int volume = ((rV << 8) & 0xFF00) | (lV & 0x00FF); | 161 | unsigned int volume = ((rV << 8) & 0xFF00) | (lV & 0x00FF); |
162 | int mixerHandle = 0; | 162 | int mixerHandle = 0; |
163 | if ( ( mixerHandle = open( "/dev/mixer", O_RDWR ) ) >= 0 ) { | 163 | if ( ( mixerHandle = open( "/dev/mixer", O_RDWR ) ) >= 0 ) { |
164 | if(ioctl( mixerHandle, MIXER_WRITE(0), &volume ) ==-1) | 164 | if(ioctl( mixerHandle, MIXER_WRITE(0), &volume ) ==-1) |
165 | perror("ioctl(\"MIXER_WRITE\")"); | 165 | perror("ioctl(\"MIXER_WRITE\")"); |
166 | close( mixerHandle ); | 166 | close( mixerHandle ); |
167 | } else | 167 | } else |
168 | perror("open(\"/dev/mixer\")"); | 168 | perror("open(\"/dev/mixer\")"); |
169 | 169 | ||
170 | # else | 170 | # else |
171 | // This is the way this has to be done now I guess, doesn't allow for | 171 | // This is the way this has to be done now I guess, doesn't allow for |
172 | // independant right and left channel setting, or setting for different outputs | 172 | // independant right and left channel setting, or setting for different outputs |
173 | Config cfg("Sound"); | 173 | Config cfg("Sound"); |
174 | cfg.setGroup("System"); | 174 | cfg.setGroup("System"); |
175 | cfg.writeEntry("Volume",(int)rV); | 175 | cfg.writeEntry("Volume",(int)rV); |
176 | # endif | 176 | # endif |
177 | 177 | ||
178 | #endif | 178 | #endif |
179 | // qDebug( "setting volume to: 0x%x", volume ); | 179 | // qDebug( "setting volume to: 0x%x", volume ); |
180 | #if ( defined Q_WS_QWS || defined(_WS_QWS_) ) && !defined(QT_NO_COP) | 180 | #if ( defined Q_WS_QWS || defined(_WS_QWS_) ) && !defined(QT_NO_COP) |
181 | // Send notification that the volume has changed | 181 | // Send notification that the volume has changed |
182 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << muted; | 182 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << muted; |
183 | #endif | 183 | #endif |
184 | } | 184 | } |
185 | 185 | ||
186 | 186 | ||
187 | 187 | ||
188 | 188 | ||
189 | AudioDevice::AudioDevice( unsigned int f, unsigned int chs, unsigned int bps ) { | 189 | AudioDevice::AudioDevice( unsigned int f, unsigned int chs, unsigned int bps ) { |
190 | qDebug("creating new audio device"); | 190 | qDebug("creating new audio device"); |
191 | d = new AudioDevicePrivate; | 191 | d = new AudioDevicePrivate; |
192 | d->frequency = f; | 192 | d->frequency = f; |
193 | d->channels = chs; | 193 | d->channels = chs; |
194 | d->bytesPerSample = bps; | 194 | d->bytesPerSample = bps; |
195 | qDebug("%d",bps); | 195 | qDebug("%d",bps); |
196 | int format=0; | 196 | int format=0; |
197 | if( bps == 8) format = AFMT_U8; | 197 | if( bps == 8) format = AFMT_U8; |
198 | else if( bps <= 0) format = AFMT_S16_LE; | 198 | else if( bps <= 0) format = AFMT_S16_LE; |
199 | else format = AFMT_S16_LE; | 199 | else format = AFMT_S16_LE; |
200 | qDebug("AD- freq %d, channels %d, b/sample %d, bitrate %d",f,chs,bps,format); | 200 | qDebug("AD- freq %d, channels %d, b/sample %d, bitrate %d",f,chs,bps,format); |
201 | connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( volumeChanged(bool) ) ); | 201 | connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( volumeChanged(bool) ) ); |
202 | 202 | ||
203 | int fragments = 0x10000 * 8 + sound_fragment_shift; | 203 | int fragments = 0x10000 * 8 + sound_fragment_shift; |
204 | int capabilities = 0; | 204 | int capabilities = 0; |
205 | 205 | ||
206 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << TRUE; | ||
207 | |||
206 | #ifdef KEEP_DEVICE_OPEN | 208 | #ifdef KEEP_DEVICE_OPEN |
207 | if ( AudioDevicePrivate::dspFd == 0 ) { | 209 | if ( AudioDevicePrivate::dspFd == 0 ) { |
208 | #endif | 210 | #endif |
209 | if ( ( d->handle = ::open( "/dev/dsp", O_WRONLY ) ) < 0 ) { | 211 | if ( ( d->handle = ::open( "/dev/dsp", O_WRONLY ) ) < 0 ) { |
210 | perror("open(\"/dev/dsp\") sending to /dev/null instead"); | 212 | perror("open(\"/dev/dsp\") sending to /dev/null instead"); |
211 | d->handle = ::open( "/dev/null", O_WRONLY ); | 213 | d->handle = ::open( "/dev/null", O_WRONLY ); |
212 | } | 214 | } |
213 | #ifdef KEEP_DEVICE_OPEN | 215 | #ifdef KEEP_DEVICE_OPEN |
214 | AudioDevicePrivate::dspFd = d->handle; | 216 | AudioDevicePrivate::dspFd = d->handle; |
215 | } else { | 217 | } else { |
216 | d->handle = AudioDevicePrivate::dspFd; | 218 | d->handle = AudioDevicePrivate::dspFd; |
217 | } | 219 | } |
218 | #endif | 220 | #endif |
219 | 221 | ||
220 | if(ioctl( d->handle, SNDCTL_DSP_GETCAPS, &capabilities )==-1) | 222 | if(ioctl( d->handle, SNDCTL_DSP_GETCAPS, &capabilities )==-1) |
221 | perror("ioctl(\"SNDCTL_DSP_GETCAPS\")"); | 223 | perror("ioctl(\"SNDCTL_DSP_GETCAPS\")"); |
222 | if(ioctl( d->handle, SNDCTL_DSP_SETFRAGMENT, &fragments )==-1) | 224 | if(ioctl( d->handle, SNDCTL_DSP_SETFRAGMENT, &fragments )==-1) |
223 | perror("ioctl(\"SNDCTL_DSP_SETFRAGMENT\")"); | 225 | perror("ioctl(\"SNDCTL_DSP_SETFRAGMENT\")"); |
224 | if(ioctl( d->handle, SNDCTL_DSP_SETFMT, & format )==-1) | 226 | if(ioctl( d->handle, SNDCTL_DSP_SETFMT, & format )==-1) |
225 | perror("ioctl(\"SNDCTL_DSP_SETFMT\")"); | 227 | perror("ioctl(\"SNDCTL_DSP_SETFMT\")"); |
226 | qDebug("freq %d", d->frequency); | 228 | qDebug("freq %d", d->frequency); |
227 | if(ioctl( d->handle, SNDCTL_DSP_SPEED, &d->frequency )==-1) | 229 | if(ioctl( d->handle, SNDCTL_DSP_SPEED, &d->frequency )==-1) |
228 | perror("ioctl(\"SNDCTL_DSP_SPEED\")"); | 230 | perror("ioctl(\"SNDCTL_DSP_SPEED\")"); |
229 | qDebug("channels %d",d->channels); | 231 | qDebug("channels %d",d->channels); |
230 | if ( ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels ) == -1 ) { | 232 | if ( ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels ) == -1 ) { |
231 | d->channels = ( d->channels == 1 ) ? 2 : d->channels; | 233 | d->channels = ( d->channels == 1 ) ? 2 : d->channels; |
232 | if(ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels )==-1) | 234 | if(ioctl( d->handle, SNDCTL_DSP_CHANNELS, &d->channels )==-1) |
233 | perror("ioctl(\"SNDCTL_DSP_CHANNELS\")"); | 235 | perror("ioctl(\"SNDCTL_DSP_CHANNELS\")"); |
234 | } | 236 | } |
235 | 237 | ||
236 | d->bufferSize = sound_fragment_bytes; | 238 | d->bufferSize = sound_fragment_bytes; |
237 | d->unwrittenBuffer = new char[d->bufferSize]; | 239 | d->unwrittenBuffer = new char[d->bufferSize]; |
238 | d->unwritten = 0; | 240 | d->unwritten = 0; |
239 | d->can_GETOSPACE = TRUE; // until we find otherwise | 241 | d->can_GETOSPACE = TRUE; // until we find otherwise |
240 | 242 | ||
241 | //if ( chs != d->channels ) qDebug( "Wanted %d, got %d channels", chs, d->channels ); | 243 | //if ( chs != d->channels ) qDebug( "Wanted %d, got %d channels", chs, d->channels ); |
242 | //if ( f != d->frequency ) qDebug( "wanted %dHz, got %dHz", f, d->frequency ); | 244 | //if ( f != d->frequency ) qDebug( "wanted %dHz, got %dHz", f, d->frequency ); |
243 | //if ( capabilities & DSP_CAP_BATCH ) qDebug( "Sound card has local buffer" ); | 245 | //if ( capabilities & DSP_CAP_BATCH ) qDebug( "Sound card has local buffer" ); |
244 | //if ( capabilities & DSP_CAP_REALTIME )qDebug( "Sound card has realtime sync" ); | 246 | //if ( capabilities & DSP_CAP_REALTIME )qDebug( "Sound card has realtime sync" ); |
245 | //if ( capabilities & DSP_CAP_TRIGGER ) qDebug( "Sound card has precise trigger" ); | 247 | //if ( capabilities & DSP_CAP_TRIGGER ) qDebug( "Sound card has precise trigger" ); |
246 | //if ( capabilities & DSP_CAP_MMAP ) qDebug( "Sound card can mmap" ); | 248 | //if ( capabilities & DSP_CAP_MMAP ) qDebug( "Sound card can mmap" ); |
249 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << FALSE; | ||
250 | |||
247 | } | 251 | } |
248 | 252 | ||
249 | 253 | ||
250 | AudioDevice::~AudioDevice() { | 254 | AudioDevice::~AudioDevice() { |
251 | qDebug("destryo audiodevice"); | 255 | qDebug("destryo audiodevice"); |
256 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << TRUE; | ||
257 | |||
252 | #ifdef Q_OS_WIN32 | 258 | #ifdef Q_OS_WIN32 |
253 | waveOutClose( (HWAVEOUT)d->handle ); | 259 | waveOutClose( (HWAVEOUT)d->handle ); |
254 | #else | 260 | #else |
255 | # ifndef KEEP_DEVICE_OPEN | 261 | # ifndef KEEP_DEVICE_OPEN |
256 | close( d->handle ); // Now it should be safe to shut the handle | 262 | close( d->handle ); // Now it should be safe to shut the handle |
257 | # endif | 263 | # endif |
258 | delete d->unwrittenBuffer; | 264 | delete d->unwrittenBuffer; |
259 | delete d; | 265 | delete d; |
260 | #endif | 266 | #endif |
267 | QCopEnvelope( "QPE/System", "volumeChange(bool)" ) << FALSE; | ||
268 | |||
261 | } | 269 | } |
262 | 270 | ||
263 | 271 | ||
264 | void AudioDevice::volumeChanged( bool muted ) | 272 | void AudioDevice::volumeChanged( bool muted ) |
265 | { | 273 | { |
266 | AudioDevicePrivate::muted = muted; | 274 | AudioDevicePrivate::muted = muted; |
267 | } | 275 | } |
268 | 276 | ||
269 | 277 | ||
270 | void AudioDevice::write( char *buffer, unsigned int length ) | 278 | void AudioDevice::write( char *buffer, unsigned int length ) |
271 | { | 279 | { |
272 | #ifdef Q_OS_WIN32 | 280 | #ifdef Q_OS_WIN32 |
273 | // returns immediately and (to be implemented) emits completedIO() when finished writing | 281 | // returns immediately and (to be implemented) emits completedIO() when finished writing |
274 | WAVEHDR *lpWaveHdr = (WAVEHDR *)malloc( sizeof(WAVEHDR) ); | 282 | WAVEHDR *lpWaveHdr = (WAVEHDR *)malloc( sizeof(WAVEHDR) ); |
275 | // maybe the buffer should be copied so that this fool proof, but its a performance hit | 283 | // maybe the buffer should be copied so that this fool proof, but its a performance hit |
276 | lpWaveHdr->lpData = buffer; | 284 | lpWaveHdr->lpData = buffer; |
277 | lpWaveHdr->dwBufferLength = length; | 285 | lpWaveHdr->dwBufferLength = length; |
278 | lpWaveHdr->dwFlags = 0L; | 286 | lpWaveHdr->dwFlags = 0L; |
279 | lpWaveHdr->dwLoops = 0L; | 287 | lpWaveHdr->dwLoops = 0L; |
280 | waveOutPrepareHeader( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ); | 288 | waveOutPrepareHeader( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ); |
281 | // waveOutWrite returns immediately. the data is sent in the background. | 289 | // waveOutWrite returns immediately. the data is sent in the background. |
282 | if ( waveOutWrite( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ) ) | 290 | if ( waveOutWrite( (HWAVEOUT)d->handle, lpWaveHdr, sizeof(WAVEHDR) ) ) |
283 | qDebug( "failed to write block to audio device" ); | 291 | qDebug( "failed to write block to audio device" ); |
284 | // emit completedIO(); | 292 | // emit completedIO(); |
285 | #else | 293 | #else |
286 | int t = ::write( d->handle, buffer, length ); | 294 | int t = ::write( d->handle, buffer, length ); |
287 | if ( t<0 ) t = 0; | 295 | if ( t<0 ) t = 0; |
288 | if ( t != (int)length) { | 296 | if ( t != (int)length) { |
289 | qDebug("Ahhh!! memcpys 1"); | 297 | qDebug("Ahhh!! memcpys 1"); |
290 | memcpy(d->unwrittenBuffer,buffer+t,length-t); | 298 | memcpy(d->unwrittenBuffer,buffer+t,length-t); |
291 | d->unwritten = length-t; | 299 | d->unwritten = length-t; |
292 | } | 300 | } |
293 | #endif | 301 | #endif |
294 | } | 302 | } |
295 | 303 | ||
296 | 304 | ||
297 | unsigned int AudioDevice::channels() const | 305 | unsigned int AudioDevice::channels() const |
298 | { | 306 | { |
299 | return d->channels; | 307 | return d->channels; |
300 | } | 308 | } |
301 | 309 | ||
302 | 310 | ||
303 | unsigned int AudioDevice::frequency() const | 311 | unsigned int AudioDevice::frequency() const |
304 | { | 312 | { |
305 | return d->frequency; | 313 | return d->frequency; |
306 | } | 314 | } |
307 | 315 | ||
308 | 316 | ||
309 | unsigned int AudioDevice::bytesPerSample() const | 317 | unsigned int AudioDevice::bytesPerSample() const |
310 | { | 318 | { |
311 | return d->bytesPerSample; | 319 | return d->bytesPerSample; |
312 | } | 320 | } |
313 | 321 | ||
314 | 322 | ||
315 | unsigned int AudioDevice::bufferSize() const | 323 | unsigned int AudioDevice::bufferSize() const |
316 | { | 324 | { |
317 | return d->bufferSize; | 325 | return d->bufferSize; |
318 | } | 326 | } |
319 | 327 | ||
320 | unsigned int AudioDevice::canWrite() const | 328 | unsigned int AudioDevice::canWrite() const |
321 | { | 329 | { |
322 | #ifdef Q_OS_WIN32 | 330 | #ifdef Q_OS_WIN32 |
323 | return bufferSize(); // Any better? | 331 | return bufferSize(); // Any better? |
324 | #else | 332 | #else |
325 | audio_buf_info info; | 333 | audio_buf_info info; |
326 | if ( d->can_GETOSPACE && ioctl(d->handle,SNDCTL_DSP_GETOSPACE,&info) ) { | 334 | if ( d->can_GETOSPACE && ioctl(d->handle,SNDCTL_DSP_GETOSPACE,&info) ) { |
327 | d->can_GETOSPACE = FALSE; | 335 | d->can_GETOSPACE = FALSE; |
328 | fcntl( d->handle, F_SETFL, O_NONBLOCK ); | 336 | fcntl( d->handle, F_SETFL, O_NONBLOCK ); |
329 | } | 337 | } |
330 | if ( d->can_GETOSPACE ) { | 338 | if ( d->can_GETOSPACE ) { |
331 | int t = info.fragments * sound_fragment_bytes; | 339 | int t = info.fragments * sound_fragment_bytes; |
332 | return QMIN(t,(int)bufferSize()); | 340 | return QMIN(t,(int)bufferSize()); |
333 | } else { | 341 | } else { |
334 | if ( d->unwritten ) { | 342 | if ( d->unwritten ) { |
335 | int t = ::write( d->handle, d->unwrittenBuffer, d->unwritten ); | 343 | int t = ::write( d->handle, d->unwrittenBuffer, d->unwritten ); |
336 | if ( t<0 ) t = 0; | 344 | if ( t<0 ) t = 0; |
337 | if ( (unsigned)t!=d->unwritten ) { | 345 | if ( (unsigned)t!=d->unwritten ) { |
338 | memcpy(d->unwrittenBuffer,d->unwrittenBuffer+t,d->unwritten-t); | 346 | memcpy(d->unwrittenBuffer,d->unwrittenBuffer+t,d->unwritten-t); |
339 | d->unwritten -= t; | 347 | d->unwritten -= t; |
340 | } else { | 348 | } else { |
341 | d->unwritten = 0; | 349 | d->unwritten = 0; |
342 | } | 350 | } |
343 | } | 351 | } |
344 | if ( d->unwritten ) | 352 | if ( d->unwritten ) |
345 | return 0; | 353 | return 0; |
346 | else | 354 | else |
347 | return d->bufferSize; | 355 | return d->bufferSize; |
348 | } | 356 | } |
349 | #endif | 357 | #endif |
350 | } | 358 | } |
351 | 359 | ||
352 | 360 | ||
353 | int AudioDevice::bytesWritten() { | 361 | int AudioDevice::bytesWritten() { |
354 | #ifdef Q_OS_WIN32 | 362 | #ifdef Q_OS_WIN32 |
355 | MMTIME pmmt = { TIME_BYTES, 0 }; | 363 | MMTIME pmmt = { TIME_BYTES, 0 }; |
356 | if ( ( waveOutGetPosition( (HWAVEOUT)d->handle, &pmmt, sizeof(MMTIME) ) != MMSYSERR_NOERROR ) || ( pmmt.wType != TIME_BYTES ) ) { | 364 | if ( ( waveOutGetPosition( (HWAVEOUT)d->handle, &pmmt, sizeof(MMTIME) ) != MMSYSERR_NOERROR ) || ( pmmt.wType != TIME_BYTES ) ) { |
357 | qDebug( "failed to get audio device position" ); | 365 | qDebug( "failed to get audio device position" ); |
358 | return -1; | 366 | return -1; |
359 | } | 367 | } |
360 | return pmmt.u.cb; | 368 | return pmmt.u.cb; |
361 | #else | 369 | #else |
362 | int buffered = 0; | 370 | int buffered = 0; |
363 | if ( ioctl( d->handle, SNDCTL_DSP_GETODELAY, &buffered ) ) { | 371 | if ( ioctl( d->handle, SNDCTL_DSP_GETODELAY, &buffered ) ) { |
364 | qDebug( "failed to get audio device position" ); | 372 | qDebug( "failed to get audio device position" ); |
365 | return -1; | 373 | return -1; |
366 | } | 374 | } |
367 | return buffered; | 375 | return buffered; |
368 | #endif | 376 | #endif |
369 | } | 377 | } |
370 | 378 | ||
diff --git a/core/multimedia/opieplayer/loopcontrol.cpp b/core/multimedia/opieplayer/loopcontrol.cpp index 6f86b4a..3171c4b 100644 --- a/core/multimedia/opieplayer/loopcontrol.cpp +++ b/core/multimedia/opieplayer/loopcontrol.cpp | |||
@@ -48,392 +48,392 @@ extern MediaPlayerState *mediaPlayerState; | |||
48 | static char *audioBuffer = NULL; | 48 | static char *audioBuffer = NULL; |
49 | static AudioDevice *audioDevice = NULL; | 49 | static AudioDevice *audioDevice = NULL; |
50 | static bool disabledSuspendScreenSaver = FALSE; | 50 | static bool disabledSuspendScreenSaver = FALSE; |
51 | static bool previousSuspendMode = FALSE; | 51 | static bool previousSuspendMode = FALSE; |
52 | 52 | ||
53 | 53 | ||
54 | pthread_t audio_tid; | 54 | pthread_t audio_tid; |
55 | pthread_attr_t audio_attr; | 55 | pthread_attr_t audio_attr; |
56 | bool threadOkToGo = FALSE; | 56 | bool threadOkToGo = FALSE; |
57 | 57 | ||
58 | 58 | ||
59 | class Mutex { | 59 | class Mutex { |
60 | public: | 60 | public: |
61 | Mutex() { | 61 | Mutex() { |
62 | pthread_mutexattr_t attr; | 62 | pthread_mutexattr_t attr; |
63 | pthread_mutexattr_init( &attr ); | 63 | pthread_mutexattr_init( &attr ); |
64 | pthread_mutex_init( &mutex, &attr ); | 64 | pthread_mutex_init( &mutex, &attr ); |
65 | pthread_mutexattr_destroy( &attr ); | 65 | pthread_mutexattr_destroy( &attr ); |
66 | } | 66 | } |
67 | 67 | ||
68 | ~Mutex() { | 68 | ~Mutex() { |
69 | pthread_mutex_destroy( &mutex ); | 69 | pthread_mutex_destroy( &mutex ); |
70 | } | 70 | } |
71 | 71 | ||
72 | void lock() { | 72 | void lock() { |
73 | pthread_mutex_lock( &mutex ); | 73 | pthread_mutex_lock( &mutex ); |
74 | } | 74 | } |
75 | 75 | ||
76 | void unlock() { | 76 | void unlock() { |
77 | pthread_mutex_unlock( &mutex ); | 77 | pthread_mutex_unlock( &mutex ); |
78 | } | 78 | } |
79 | private: | 79 | private: |
80 | pthread_mutex_t mutex; | 80 | pthread_mutex_t mutex; |
81 | }; | 81 | }; |
82 | 82 | ||
83 | 83 | ||
84 | void *startAudioThread( void *ptr ) { | 84 | void *startAudioThread( void *ptr ) { |
85 | LoopControl *mpegView = (LoopControl *)ptr; | 85 | LoopControl *mpegView = (LoopControl *)ptr; |
86 | while ( TRUE ) { | 86 | while ( TRUE ) { |
87 | if ( threadOkToGo && mpegView->moreAudio ) | 87 | if ( threadOkToGo && mpegView->moreAudio ) |
88 | mpegView->startAudio(); | 88 | mpegView->startAudio(); |
89 | else | 89 | else |
90 | usleep( 10000 ); // Semi-buzy-wait till we are playing again | 90 | usleep( 10000 ); // Semi-buzy-wait till we are playing again |
91 | } | 91 | } |
92 | return 0; | 92 | return 0; |
93 | } | 93 | } |
94 | 94 | ||
95 | 95 | ||
96 | Mutex *audioMutex; | 96 | Mutex *audioMutex; |
97 | 97 | ||
98 | 98 | ||
99 | LoopControl::LoopControl( QObject *parent, const char *name ) | 99 | LoopControl::LoopControl( QObject *parent, const char *name ) |
100 | : QObject( parent, name ) { | 100 | : QObject( parent, name ) { |
101 | isMuted = FALSE; | 101 | isMuted = FALSE; |
102 | connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( setMute(bool) ) ); | 102 | connect( qApp, SIGNAL( volumeChanged(bool) ), this, SLOT( setMute(bool) ) ); |
103 | //qDebug("starting loopcontrol"); | 103 | //qDebug("starting loopcontrol"); |
104 | audioMutex = new Mutex; | 104 | audioMutex = new Mutex; |
105 | 105 | ||
106 | pthread_attr_init(&audio_attr); | 106 | pthread_attr_init(&audio_attr); |
107 | #define USE_REALTIME_AUDIO_THREAD | 107 | #define USE_REALTIME_AUDIO_THREAD |
108 | #ifdef USE_REALTIME_AUDIO_THREAD | 108 | #ifdef USE_REALTIME_AUDIO_THREAD |
109 | // Attempt to set it to real-time round robin | 109 | // Attempt to set it to real-time round robin |
110 | if ( pthread_attr_setschedpolicy( &audio_attr, SCHED_RR ) == 0 ) { | 110 | if ( pthread_attr_setschedpolicy( &audio_attr, SCHED_RR ) == 0 ) { |
111 | sched_param params; | 111 | sched_param params; |
112 | params.sched_priority = 50; | 112 | params.sched_priority = 50; |
113 | pthread_attr_setschedparam(&audio_attr,¶ms); | 113 | pthread_attr_setschedparam(&audio_attr,¶ms); |
114 | } else { | 114 | } else { |
115 | qDebug( "Error setting up a realtime thread, reverting to using a normal thread." ); | 115 | qDebug( "Error setting up a realtime thread, reverting to using a normal thread." ); |
116 | pthread_attr_destroy(&audio_attr); | 116 | pthread_attr_destroy(&audio_attr); |
117 | pthread_attr_init(&audio_attr); | 117 | pthread_attr_init(&audio_attr); |
118 | } | 118 | } |
119 | #endif | 119 | #endif |
120 | //qDebug("create audio thread"); | 120 | //qDebug("create audio thread"); |
121 | pthread_create(&audio_tid, &audio_attr, (void * (*)(void *))startAudioThread, this); | 121 | pthread_create(&audio_tid, &audio_attr, (void * (*)(void *))startAudioThread, this); |
122 | } | 122 | } |
123 | 123 | ||
124 | 124 | ||
125 | LoopControl::~LoopControl() { | 125 | LoopControl::~LoopControl() { |
126 | stop(); | 126 | stop(); |
127 | } | 127 | } |
128 | 128 | ||
129 | 129 | ||
130 | static long prev_frame = 0; | 130 | static long prev_frame = 0; |
131 | static int currentSample = 0; | 131 | static int currentSample = 0; |
132 | 132 | ||
133 | 133 | ||
134 | void LoopControl::timerEvent( QTimerEvent *te ) { | 134 | void LoopControl::timerEvent( QTimerEvent *te ) { |
135 | 135 | ||
136 | if ( te->timerId() == videoId ) | 136 | if ( te->timerId() == videoId ) |
137 | startVideo(); | 137 | startVideo(); |
138 | 138 | ||
139 | if ( te->timerId() == sliderId ) { | 139 | if ( te->timerId() == sliderId ) { |
140 | if ( hasAudioChannel && !hasVideoChannel && moreAudio ) { | 140 | if ( hasAudioChannel && !hasVideoChannel && moreAudio ) { |
141 | mediaPlayerState->updatePosition( audioSampleCounter ); | 141 | mediaPlayerState->updatePosition( audioSampleCounter ); |
142 | } else if ( hasVideoChannel && moreVideo ) { | 142 | } else if ( hasVideoChannel && moreVideo ) { |
143 | mediaPlayerState->updatePosition( current_frame ); | 143 | mediaPlayerState->updatePosition( current_frame ); |
144 | } | 144 | } |
145 | } | 145 | } |
146 | 146 | ||
147 | if ( !moreVideo && !moreAudio ) { | 147 | if ( !moreVideo && !moreAudio ) { |
148 | mediaPlayerState->setPlaying( FALSE ); | 148 | mediaPlayerState->setPlaying( FALSE ); |
149 | mediaPlayerState->setNext(); | 149 | mediaPlayerState->setNext(); |
150 | } | 150 | } |
151 | } | 151 | } |
152 | 152 | ||
153 | 153 | ||
154 | void LoopControl::setPosition( long pos ) { | 154 | void LoopControl::setPosition( long pos ) { |
155 | audioMutex->lock(); | 155 | audioMutex->lock(); |
156 | 156 | ||
157 | if ( hasVideoChannel && hasAudioChannel ) { | 157 | if ( hasVideoChannel && hasAudioChannel ) { |
158 | playtime.restart(); | 158 | playtime.restart(); |
159 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); | 159 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); |
160 | current_frame = pos + 1; | 160 | current_frame = pos + 1; |
161 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); | 161 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); |
162 | prev_frame = current_frame - 1; | 162 | prev_frame = current_frame - 1; |
163 | currentSample = (int)( (double)current_frame * freq / framerate ); | 163 | currentSample = (int)( (double)current_frame * freq / framerate ); |
164 | mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); | 164 | mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); |
165 | audioSampleCounter = currentSample - 1; | 165 | audioSampleCounter = currentSample - 1; |
166 | } else if ( hasVideoChannel ) { | 166 | } else if ( hasVideoChannel ) { |
167 | playtime.restart(); | 167 | playtime.restart(); |
168 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); | 168 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / framerate) ); |
169 | current_frame = pos + 1; | 169 | current_frame = pos + 1; |
170 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); | 170 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); |
171 | prev_frame = current_frame - 1; | 171 | prev_frame = current_frame - 1; |
172 | } else if ( hasAudioChannel ) { | 172 | } else if ( hasAudioChannel ) { |
173 | playtime.restart(); | 173 | playtime.restart(); |
174 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / freq) ); | 174 | playtime = playtime.addMSecs( long((double)-pos * 1000.0 / freq) ); |
175 | currentSample = pos + 1; | 175 | currentSample = pos + 1; |
176 | mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); | 176 | mediaPlayerState->curDecoder()->audioSetSample( currentSample, stream ); |
177 | audioSampleCounter = currentSample - 1; | 177 | audioSampleCounter = currentSample - 1; |
178 | } | 178 | } |
179 | 179 | ||
180 | audioMutex->unlock(); | 180 | audioMutex->unlock(); |
181 | } | 181 | } |
182 | 182 | ||
183 | 183 | ||
184 | void LoopControl::startVideo() { | 184 | void LoopControl::startVideo() { |
185 | 185 | ||
186 | if ( moreVideo ) { | 186 | if ( moreVideo ) { |
187 | 187 | ||
188 | if ( mediaPlayerState->curDecoder() ) { | 188 | if ( mediaPlayerState->curDecoder() ) { |
189 | 189 | ||
190 | if ( hasAudioChannel && !isMuted ) { | 190 | if ( hasAudioChannel && !isMuted ) { |
191 | 191 | ||
192 | current_frame = long( playtime.elapsed() * framerate / 1000 ); | 192 | current_frame = long( playtime.elapsed() * framerate / 1000 ); |
193 | 193 | ||
194 | if ( prev_frame != -1 && current_frame <= prev_frame ) | 194 | if ( prev_frame != -1 && current_frame <= prev_frame ) |
195 | return; | 195 | return; |
196 | 196 | ||
197 | } else { | 197 | } else { |
198 | // Don't skip | 198 | // Don't skip |
199 | current_frame++; | 199 | current_frame++; |
200 | } | 200 | } |
201 | 201 | ||
202 | if ( prev_frame == -1 || current_frame > prev_frame ) { | 202 | if ( prev_frame == -1 || current_frame > prev_frame ) { |
203 | if ( current_frame > prev_frame + 1 ) { | 203 | if ( current_frame > prev_frame + 1 ) { |
204 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); | 204 | mediaPlayerState->curDecoder()->videoSetFrame( current_frame, stream ); |
205 | } | 205 | } |
206 | moreVideo = videoUI->playVideo(); | 206 | moreVideo = videoUI->playVideo(); |
207 | prev_frame = current_frame; | 207 | prev_frame = current_frame; |
208 | } | 208 | } |
209 | 209 | ||
210 | } else { | 210 | } else { |
211 | 211 | ||
212 | moreVideo = FALSE; | 212 | moreVideo = FALSE; |
213 | killTimer( videoId ); | 213 | killTimer( videoId ); |
214 | 214 | ||
215 | } | 215 | } |
216 | 216 | ||
217 | } | 217 | } |
218 | } | 218 | } |
219 | 219 | ||
220 | 220 | ||
221 | void LoopControl::startAudio() { | 221 | void LoopControl::startAudio() { |
222 | 222 | ||
223 | //qDebug("start audio"); | 223 | //qDebug("start audio"); |
224 | audioMutex->lock(); | 224 | audioMutex->lock(); |
225 | if ( moreAudio ) { | 225 | if ( moreAudio ) { |
226 | 226 | ||
227 | if ( !isMuted && mediaPlayerState->curDecoder() ) { | 227 | if ( !isMuted && mediaPlayerState->curDecoder() ) { |
228 | 228 | ||
229 | currentSample = audioSampleCounter + 1; | 229 | currentSample = audioSampleCounter + 1; |
230 | 230 | ||
231 | if ( currentSample != audioSampleCounter + 1 ) | 231 | if ( currentSample != audioSampleCounter + 1 ) |
232 | qDebug("out of sync with decoder %i %i", currentSample, audioSampleCounter); | 232 | qDebug("out of sync with decoder %i %i", currentSample, audioSampleCounter); |
233 | 233 | ||
234 | long samplesRead = 0; | 234 | long samplesRead = 0; |
235 | bool readOk=mediaPlayerState->curDecoder()->audioReadSamples( (short*)audioBuffer, channels, 1024, samplesRead, stream ); | 235 | bool readOk=mediaPlayerState->curDecoder()->audioReadSamples( (short*)audioBuffer, channels, 1024, samplesRead, stream ); |
236 | long sampleWeShouldBeAt = long( playtime.elapsed() ) * freq / 1000; | 236 | long sampleWeShouldBeAt = long( playtime.elapsed() ) * freq / 1000; |
237 | long sampleWaitTime = currentSample - sampleWeShouldBeAt; | 237 | long sampleWaitTime = currentSample - sampleWeShouldBeAt; |
238 | 238 | ||
239 | // this causes drop outs not sure why its even here | 239 | // this causes drop outs not sure why its even here |
240 | // if ( ( sampleWaitTime > 2000 ) && ( sampleWaitTime < 20000 ) ) { | 240 | if ( ( sampleWaitTime > 2000 ) && ( sampleWaitTime < 20000 ) ) { |
241 | // usleep( (long)((double)sampleWaitTime * 1000000.0 / freq) ); | 241 | usleep( (long)((double)sampleWaitTime * 1000000.0 / freq) ); |
242 | // } | 242 | } |
243 | // else if ( sampleWaitTime <= -5000 ) { | 243 | else if ( sampleWaitTime <= -5000 ) { |
244 | // qDebug("need to catch up by: %li (%i,%li)", -sampleWaitTime, currentSample, sampleWeShouldBeAt ); | 244 | qDebug("need to catch up by: %li (%i,%li)", -sampleWaitTime, currentSample, sampleWeShouldBeAt ); |
245 | // //mediaPlayerState->curDecoder()->audioSetSample( sampleWeShouldBeAt, stream ); | 245 | // //mediaPlayerState->curDecoder()->audioSetSample( sampleWeShouldBeAt, stream ); |
246 | // currentSample = sampleWeShouldBeAt; | 246 | currentSample = sampleWeShouldBeAt; |
247 | // } | 247 | } |
248 | 248 | ||
249 | audioDevice->write( audioBuffer, samplesRead * 2 * channels ); | 249 | audioDevice->write( audioBuffer, samplesRead * 2 * channels ); |
250 | 250 | ||
251 | if( mediaPlayerState->isStreaming == FALSE) | 251 | if( mediaPlayerState->isStreaming == FALSE) |
252 | audioSampleCounter = currentSample + samplesRead - 1; | 252 | audioSampleCounter = currentSample + samplesRead - 1; |
253 | 253 | ||
254 | moreAudio = readOk && (audioSampleCounter <= total_audio_samples); | 254 | moreAudio = readOk && (audioSampleCounter <= total_audio_samples); |
255 | 255 | ||
256 | } else { | 256 | } else { |
257 | 257 | ||
258 | moreAudio = FALSE; | 258 | moreAudio = FALSE; |
259 | 259 | ||
260 | } | 260 | } |
261 | 261 | ||
262 | } | 262 | } |
263 | 263 | ||
264 | audioMutex->unlock(); | 264 | audioMutex->unlock(); |
265 | } | 265 | } |
266 | 266 | ||
267 | 267 | ||
268 | void LoopControl::killTimers() { | 268 | void LoopControl::killTimers() { |
269 | 269 | ||
270 | audioMutex->lock(); | 270 | audioMutex->lock(); |
271 | 271 | ||
272 | if ( hasVideoChannel ) | 272 | if ( hasVideoChannel ) |
273 | killTimer( videoId ); | 273 | killTimer( videoId ); |
274 | killTimer( sliderId ); | 274 | killTimer( sliderId ); |
275 | threadOkToGo = FALSE; | 275 | threadOkToGo = FALSE; |
276 | 276 | ||
277 | audioMutex->unlock(); | 277 | audioMutex->unlock(); |
278 | } | 278 | } |
279 | 279 | ||
280 | 280 | ||
281 | void LoopControl::startTimers() { | 281 | void LoopControl::startTimers() { |
282 | 282 | ||
283 | audioMutex->lock(); | 283 | audioMutex->lock(); |
284 | 284 | ||
285 | moreVideo = FALSE; | 285 | moreVideo = FALSE; |
286 | moreAudio = FALSE; | 286 | moreAudio = FALSE; |
287 | 287 | ||
288 | if ( hasVideoChannel ) { | 288 | if ( hasVideoChannel ) { |
289 | moreVideo = TRUE; | 289 | moreVideo = TRUE; |
290 | int mSecsBetweenFrames = (int)(100 / framerate); // 10% of the real value | 290 | int mSecsBetweenFrames = (int)(100 / framerate); // 10% of the real value |
291 | videoId = startTimer( mSecsBetweenFrames ); | 291 | videoId = startTimer( mSecsBetweenFrames ); |
292 | } | 292 | } |
293 | 293 | ||
294 | if ( hasAudioChannel ) { | 294 | if ( hasAudioChannel ) { |
295 | moreAudio = TRUE; | 295 | moreAudio = TRUE; |
296 | threadOkToGo = TRUE; | 296 | threadOkToGo = TRUE; |
297 | } | 297 | } |
298 | 298 | ||
299 | sliderId = startTimer( 300 ); // update slider every 1/3 second | 299 | sliderId = startTimer( 300 ); // update slider every 1/3 second |
300 | 300 | ||
301 | audioMutex->unlock(); | 301 | audioMutex->unlock(); |
302 | } | 302 | } |
303 | 303 | ||
304 | 304 | ||
305 | void LoopControl::setPaused( bool pause ) { | 305 | void LoopControl::setPaused( bool pause ) { |
306 | 306 | ||
307 | if ( !mediaPlayerState->curDecoder() || !mediaPlayerState->curDecoder()->isOpen() ) | 307 | if ( !mediaPlayerState->curDecoder() || !mediaPlayerState->curDecoder()->isOpen() ) |
308 | return; | 308 | return; |
309 | 309 | ||
310 | if ( pause ) { | 310 | if ( pause ) { |
311 | killTimers(); | 311 | killTimers(); |
312 | } else { | 312 | } else { |
313 | // Force an update of the position | 313 | // Force an update of the position |
314 | mediaPlayerState->setPosition( mediaPlayerState->position() + 1 ); | 314 | mediaPlayerState->setPosition( mediaPlayerState->position() + 1 ); |
315 | mediaPlayerState->setPosition( mediaPlayerState->position() - 1 ); | 315 | mediaPlayerState->setPosition( mediaPlayerState->position() - 1 ); |
316 | // Just like we never stopped | 316 | // Just like we never stopped |
317 | startTimers(); | 317 | startTimers(); |
318 | } | 318 | } |
319 | } | 319 | } |
320 | 320 | ||
321 | 321 | ||
322 | void LoopControl::stop( bool willPlayAgainShortly ) { | 322 | void LoopControl::stop( bool willPlayAgainShortly ) { |
323 | 323 | ||
324 | #if defined(Q_WS_QWS) && !defined(QT_NO_COP) | 324 | #if defined(Q_WS_QWS) && !defined(QT_NO_COP) |
325 | if ( !willPlayAgainShortly && disabledSuspendScreenSaver ) { | 325 | if ( !willPlayAgainShortly && disabledSuspendScreenSaver ) { |
326 | disabledSuspendScreenSaver = FALSE; | 326 | disabledSuspendScreenSaver = FALSE; |
327 | // Re-enable the suspend mode | 327 | // Re-enable the suspend mode |
328 | QCopEnvelope("QPE/System", "setScreenSaverMode(int)" ) << QPEApplication::Enable; | 328 | QCopEnvelope("QPE/System", "setScreenSaverMode(int)" ) << QPEApplication::Enable; |
329 | } | 329 | } |
330 | #endif | 330 | #endif |
331 | 331 | ||
332 | if ( mediaPlayerState->curDecoder() && mediaPlayerState->curDecoder()->isOpen() ) { | 332 | if ( mediaPlayerState->curDecoder() && mediaPlayerState->curDecoder()->isOpen() ) { |
333 | 333 | ||
334 | killTimers(); | 334 | killTimers(); |
335 | 335 | ||
336 | audioMutex->lock(); | 336 | audioMutex->lock(); |
337 | 337 | ||
338 | mediaPlayerState->curDecoder()->close(); | 338 | mediaPlayerState->curDecoder()->close(); |
339 | 339 | ||
340 | if ( audioDevice ) { | 340 | if ( audioDevice ) { |
341 | delete audioDevice; | 341 | delete audioDevice; |
342 | delete audioBuffer; | 342 | delete audioBuffer; |
343 | audioDevice = 0; | 343 | audioDevice = 0; |
344 | audioBuffer = 0; | 344 | audioBuffer = 0; |
345 | } | 345 | } |
346 | 346 | ||
347 | audioMutex->unlock(); | 347 | audioMutex->unlock(); |
348 | 348 | ||
349 | } | 349 | } |
350 | } | 350 | } |
351 | 351 | ||
352 | 352 | ||
353 | bool LoopControl::init( const QString& filename ) { | 353 | bool LoopControl::init( const QString& filename ) { |
354 | stop(); | 354 | stop(); |
355 | 355 | ||
356 | audioMutex->lock(); | 356 | audioMutex->lock(); |
357 | 357 | ||
358 | fileName = filename; | 358 | fileName = filename; |
359 | stream = 0; // only play stream 0 for now | 359 | stream = 0; // only play stream 0 for now |
360 | current_frame = total_video_frames = total_audio_samples = 0; | 360 | current_frame = total_video_frames = total_audio_samples = 0; |
361 | 361 | ||
362 | qDebug( "Using the %s decoder", mediaPlayerState->curDecoder()->pluginName() ); | 362 | qDebug( "Using the %s decoder", mediaPlayerState->curDecoder()->pluginName() ); |
363 | 363 | ||
364 | // ### Hack to use libmpeg3plugin to get the number of audio samples if we are using the libmad plugin | 364 | // ### Hack to use libmpeg3plugin to get the number of audio samples if we are using the libmad plugin |
365 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibMadPlugin") ) { | 365 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibMadPlugin") ) { |
366 | if ( mediaPlayerState->libMpeg3Decoder() && mediaPlayerState->libMpeg3Decoder()->open( filename )) { | 366 | if ( mediaPlayerState->libMpeg3Decoder() && mediaPlayerState->libMpeg3Decoder()->open( filename )) { |
367 | total_audio_samples = mediaPlayerState->libMpeg3Decoder()->audioSamples( 0 ); | 367 | total_audio_samples = mediaPlayerState->libMpeg3Decoder()->audioSamples( 0 ); |
368 | mediaPlayerState->libMpeg3Decoder()->close(); | 368 | mediaPlayerState->libMpeg3Decoder()->close(); |
369 | } | 369 | } |
370 | } | 370 | } |
371 | 371 | ||
372 | if ( !mediaPlayerState->curDecoder()|| !mediaPlayerState->curDecoder()->open( filename ) ) { | 372 | if ( !mediaPlayerState->curDecoder()|| !mediaPlayerState->curDecoder()->open( filename ) ) { |
373 | audioMutex->unlock(); | 373 | audioMutex->unlock(); |
374 | return FALSE; | 374 | return FALSE; |
375 | } | 375 | } |
376 | 376 | ||
377 | hasAudioChannel = mediaPlayerState->curDecoder()->audioStreams() > 0; | 377 | hasAudioChannel = mediaPlayerState->curDecoder()->audioStreams() > 0; |
378 | hasVideoChannel = mediaPlayerState->curDecoder()->videoStreams() > 0; | 378 | hasVideoChannel = mediaPlayerState->curDecoder()->videoStreams() > 0; |
379 | 379 | ||
380 | if ( hasAudioChannel ) { | 380 | if ( hasAudioChannel ) { |
381 | int astream = 0; | 381 | int astream = 0; |
382 | 382 | ||
383 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibMpeg3Plugin") ) | 383 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibMpeg3Plugin") ) |
384 | channels = 2; //dont akx me why, but it needs this hack | 384 | channels = 2; //dont akx me why, but it needs this hack |
385 | else | 385 | else |
386 | channels = mediaPlayerState->curDecoder()->audioChannels( astream ); | 386 | channels = mediaPlayerState->curDecoder()->audioChannels( astream ); |
387 | 387 | ||
388 | qDebug( "LC- channels = %d", channels ); | 388 | qDebug( "LC- channels = %d", channels ); |
389 | 389 | ||
390 | if ( !total_audio_samples ) | 390 | if ( !total_audio_samples ) |
391 | total_audio_samples = mediaPlayerState->curDecoder()->audioSamples( astream ); | 391 | total_audio_samples = mediaPlayerState->curDecoder()->audioSamples( astream ); |
392 | 392 | ||
393 | total_audio_samples += 1000; | 393 | total_audio_samples += 1000; |
394 | 394 | ||
395 | mediaPlayerState->setLength( total_audio_samples ); | 395 | mediaPlayerState->setLength( total_audio_samples ); |
396 | 396 | ||
397 | freq = mediaPlayerState->curDecoder()->audioFrequency( astream ); | 397 | freq = mediaPlayerState->curDecoder()->audioFrequency( astream ); |
398 | qDebug( "LC- frequency = %d", freq ); | 398 | qDebug( "LC- frequency = %d", freq ); |
399 | 399 | ||
400 | audioSampleCounter = 0; | 400 | audioSampleCounter = 0; |
401 | int bits_per_sample; | 401 | int bits_per_sample; |
402 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibWavPlugin") ) { | 402 | if ( mediaPlayerState->curDecoder()->pluginName() == QString("LibWavPlugin") ) { |
403 | bits_per_sample =(int) mediaPlayerState->curDecoder()->getTime(); | 403 | bits_per_sample =(int) mediaPlayerState->curDecoder()->getTime(); |
404 | qDebug("using stupid hack"); | 404 | qDebug("using stupid hack"); |
405 | } else { | 405 | } else { |
406 | bits_per_sample=0; | 406 | bits_per_sample=0; |
407 | } | 407 | } |
408 | 408 | ||
409 | audioDevice = new AudioDevice( freq, channels, bits_per_sample); | 409 | audioDevice = new AudioDevice( freq, channels, bits_per_sample); |
410 | audioBuffer = new char[ audioDevice->bufferSize() ]; | 410 | audioBuffer = new char[ audioDevice->bufferSize() ]; |
411 | channels = audioDevice->channels(); | 411 | channels = audioDevice->channels(); |
412 | 412 | ||
413 | //### must check which frequency is actually used. | 413 | //### must check which frequency is actually used. |
414 | static const int size = 1; | 414 | static const int size = 1; |
415 | short int buf[size]; | 415 | short int buf[size]; |
416 | long samplesRead = 0; | 416 | long samplesRead = 0; |
417 | mediaPlayerState->curDecoder()->audioReadSamples( buf, channels, size, samplesRead, stream ); | 417 | mediaPlayerState->curDecoder()->audioReadSamples( buf, channels, size, samplesRead, stream ); |
418 | } | 418 | } |
419 | 419 | ||
420 | if ( hasVideoChannel ) { | 420 | if ( hasVideoChannel ) { |
421 | total_video_frames = mediaPlayerState->curDecoder()->videoFrames( stream ); | 421 | total_video_frames = mediaPlayerState->curDecoder()->videoFrames( stream ); |
422 | 422 | ||
423 | mediaPlayerState->setLength( total_video_frames ); | 423 | mediaPlayerState->setLength( total_video_frames ); |
424 | 424 | ||
425 | framerate = mediaPlayerState->curDecoder()->videoFrameRate( stream ); | 425 | framerate = mediaPlayerState->curDecoder()->videoFrameRate( stream ); |
426 | DecodeLoopDebug(( "Frame rate %g total %ld", framerate, total_video_frames )); | 426 | DecodeLoopDebug(( "Frame rate %g total %ld", framerate, total_video_frames )); |
427 | 427 | ||
428 | if ( framerate <= 1.0 ) { | 428 | if ( framerate <= 1.0 ) { |
429 | DecodeLoopDebug(( "Crazy frame rate, resetting to sensible" )); | 429 | DecodeLoopDebug(( "Crazy frame rate, resetting to sensible" )); |
430 | framerate = 25; | 430 | framerate = 25; |
431 | } | 431 | } |
432 | 432 | ||
433 | if ( total_video_frames == 1 ) { | 433 | if ( total_video_frames == 1 ) { |
434 | DecodeLoopDebug(( "Cannot seek to frame" )); | 434 | DecodeLoopDebug(( "Cannot seek to frame" )); |
435 | } | 435 | } |
436 | 436 | ||
437 | } | 437 | } |
438 | 438 | ||
439 | current_frame = 0; | 439 | current_frame = 0; |