Merge lp:~rockstar/ubuntuone-ios-music/upgrade-audiostreamer into lp:ubuntuone-ios-music
- upgrade-audiostreamer
- Merge into trunk
Proposed by
Paul Hummer
Status: | Merged |
---|---|
Approved by: | Paul Hummer |
Approved revision: | 294 |
Merged at revision: | 251 |
Proposed branch: | lp:~rockstar/ubuntuone-ios-music/upgrade-audiostreamer |
Merge into: | lp:ubuntuone-ios-music |
Prerequisite: | lp:~rockstar/ubuntuone-ios-music/flurry-analytics |
Diff against target: |
4466 lines (+2180/-2199) 5 files modified
Dependencies/AudioStreamer/AudioStreamer.h (+198/-0) Dependencies/AudioStreamer/AudioStreamer.m (+1967/-0) U1Music.xcodeproj/project.pbxproj (+15/-6) utilities/AudioStreamer.h (+0/-197) utilities/AudioStreamer.m (+0/-1996) |
To merge this branch: | bzr merge lp:~rockstar/ubuntuone-ios-music/upgrade-audiostreamer |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Mike McCracken (community) | Approve | ||
Review via email:
|
Commit message
Upgrade AudioStreamer
Description of the change
We're using Matt Gallagher's AudioStreamer library for streaming audio. I found a couple of crasher bugs that appear to be fixed with a newer version of AudioStreamer. I upgraded it.
There is no actual code change, but I did relocate where the files were located, to make it clearer that it's not actually code we "own" (which fooled me for the first few months I worked on this).
A rubberstamp review is all I need here.
To post a comment you must log in.
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Mike McCracken (mikemc) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added directory 'Dependencies/AudioStreamer' |
2 | === added file 'Dependencies/AudioStreamer/AudioStreamer.h' |
3 | --- Dependencies/AudioStreamer/AudioStreamer.h 1970-01-01 00:00:00 +0000 |
4 | +++ Dependencies/AudioStreamer/AudioStreamer.h 2013-02-11 04:17:22 +0000 |
5 | @@ -0,0 +1,198 @@ |
6 | +// |
7 | +// AudioStreamer.h |
8 | +// StreamingAudioPlayer |
9 | +// |
10 | +// Created by Matt Gallagher on 27/09/08. |
11 | +// Copyright 2008 Matt Gallagher. All rights reserved. |
12 | +// |
13 | +// This software is provided 'as-is', without any express or implied |
14 | +// warranty. In no event will the authors be held liable for any damages |
15 | +// arising from the use of this software. Permission is granted to anyone to |
16 | +// use this software for any purpose, including commercial applications, and to |
17 | +// alter it and redistribute it freely, subject to the following restrictions: |
18 | +// |
19 | +// 1. The origin of this software must not be misrepresented; you must not |
20 | +// claim that you wrote the original software. If you use this software |
21 | +// in a product, an acknowledgment in the product documentation would be |
22 | +// appreciated but is not required. |
23 | +// 2. Altered source versions must be plainly marked as such, and must not be |
24 | +// misrepresented as being the original software. |
25 | +// 3. This notice may not be removed or altered from any source |
26 | +// distribution. |
27 | +// |
28 | + |
29 | +#if TARGET_OS_IPHONE |
30 | +#import <UIKit/UIKit.h> |
31 | +#else |
32 | +#import <Cocoa/Cocoa.h> |
33 | +#endif // TARGET_OS_IPHONE |
34 | + |
35 | +#include <pthread.h> |
36 | +#include <AudioToolbox/AudioToolbox.h> |
37 | + |
38 | +#define LOG_QUEUED_BUFFERS 0 |
39 | + |
40 | +#define kNumAQBufs 16 // Number of audio queue buffers we allocate. |
41 | + // Needs to be big enough to keep audio pipeline |
42 | + // busy (non-zero number of queued buffers) but |
43 | + // not so big that audio takes too long to begin |
44 | + // (kNumAQBufs * kAQBufSize of data must be |
45 | + // loaded before playback will start). |
46 | + // |
47 | + // Set LOG_QUEUED_BUFFERS to 1 to log how many |
48 | + // buffers are queued at any time -- if it drops |
49 | + // to zero too often, this value may need to |
50 | + // increase. Min 3, typical 8-24. |
51 | + |
52 | +#define kAQDefaultBufSize 2048 // Number of bytes in each audio queue buffer |
53 | + // Needs to be big enough to hold a packet of |
54 | + // audio from the audio file. If number is too |
55 | + // large, queuing of audio before playback starts |
56 | + // will take too long. |
57 | + // Highly compressed files can use smaller |
58 | + // numbers (512 or less). 2048 should hold all |
59 | + // but the largest packets. A buffer size error |
60 | + // will occur if this number is too small. |
61 | + |
62 | +#define kAQMaxPacketDescs 512 // Number of packet descriptions in our array |
63 | + |
64 | +typedef enum |
65 | +{ |
66 | + AS_INITIALIZED = 0, |
67 | + AS_STARTING_FILE_THREAD, |
68 | + AS_WAITING_FOR_DATA, |
69 | + AS_FLUSHING_EOF, |
70 | + AS_WAITING_FOR_QUEUE_TO_START, |
71 | + AS_PLAYING, |
72 | + AS_BUFFERING, |
73 | + AS_STOPPING, |
74 | + AS_STOPPED, |
75 | + AS_PAUSED |
76 | +} AudioStreamerState; |
77 | + |
78 | +typedef enum |
79 | +{ |
80 | + AS_NO_STOP = 0, |
81 | + AS_STOPPING_EOF, |
82 | + AS_STOPPING_USER_ACTION, |
83 | + AS_STOPPING_ERROR, |
84 | + AS_STOPPING_TEMPORARILY |
85 | +} AudioStreamerStopReason; |
86 | + |
87 | +typedef enum |
88 | +{ |
89 | + AS_NO_ERROR = 0, |
90 | + AS_NETWORK_CONNECTION_FAILED, |
91 | + AS_FILE_STREAM_GET_PROPERTY_FAILED, |
92 | + AS_FILE_STREAM_SEEK_FAILED, |
93 | + AS_FILE_STREAM_PARSE_BYTES_FAILED, |
94 | + AS_FILE_STREAM_OPEN_FAILED, |
95 | + AS_FILE_STREAM_CLOSE_FAILED, |
96 | + AS_AUDIO_DATA_NOT_FOUND, |
97 | + AS_AUDIO_QUEUE_CREATION_FAILED, |
98 | + AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED, |
99 | + AS_AUDIO_QUEUE_ENQUEUE_FAILED, |
100 | + AS_AUDIO_QUEUE_ADD_LISTENER_FAILED, |
101 | + AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED, |
102 | + AS_AUDIO_QUEUE_START_FAILED, |
103 | + AS_AUDIO_QUEUE_PAUSE_FAILED, |
104 | + AS_AUDIO_QUEUE_BUFFER_MISMATCH, |
105 | + AS_AUDIO_QUEUE_DISPOSE_FAILED, |
106 | + AS_AUDIO_QUEUE_STOP_FAILED, |
107 | + AS_AUDIO_QUEUE_FLUSH_FAILED, |
108 | + AS_AUDIO_STREAMER_FAILED, |
109 | + AS_GET_AUDIO_TIME_FAILED, |
110 | + AS_AUDIO_BUFFER_TOO_SMALL |
111 | +} AudioStreamerErrorCode; |
112 | + |
113 | +extern NSString * const ASStatusChangedNotification; |
114 | + |
115 | +@interface AudioStreamer : NSObject |
116 | +{ |
117 | + NSURL *url; |
118 | + |
119 | + // |
120 | + // Special threading consideration: |
121 | + // The audioQueue property should only ever be accessed inside a |
122 | + // synchronized(self) block and only *after* checking that ![self isFinishing] |
123 | + // |
124 | + AudioQueueRef audioQueue; |
125 | + AudioFileStreamID audioFileStream; // the audio file stream parser |
126 | + AudioStreamBasicDescription asbd; // description of the audio |
127 | + NSThread *internalThread; // the thread where the download and |
128 | + // audio file stream parsing occurs |
129 | + |
130 | + AudioQueueBufferRef audioQueueBuffer[kNumAQBufs]; // audio queue buffers |
131 | + AudioStreamPacketDescription packetDescs[kAQMaxPacketDescs]; // packet descriptions for enqueuing audio |
132 | + unsigned int fillBufferIndex; // the index of the audioQueueBuffer that is being filled |
133 | + UInt32 packetBufferSize; |
134 | + size_t bytesFilled; // how many bytes have been filled |
135 | + size_t packetsFilled; // how many packets have been filled |
136 | + bool inuse[kNumAQBufs]; // flags to indicate that a buffer is still in use |
137 | + NSInteger buffersUsed; |
138 | + NSDictionary *httpHeaders; |
139 | + NSString *fileExtension; |
140 | + |
141 | + AudioStreamerState state; |
142 | + AudioStreamerStopReason stopReason; |
143 | + AudioStreamerErrorCode errorCode; |
144 | + OSStatus err; |
145 | + |
146 | + bool discontinuous; // flag to indicate middle of the stream |
147 | + |
148 | + pthread_mutex_t queueBuffersMutex; // a mutex to protect the inuse flags |
149 | + pthread_cond_t queueBufferReadyCondition; // a condition varable for handling the inuse flags |
150 | + |
151 | + CFReadStreamRef stream; |
152 | + NSNotificationCenter *notificationCenter; |
153 | + |
154 | + UInt32 bitRate; // Bits per second in the file |
155 | + NSInteger dataOffset; // Offset of the first audio packet in the stream |
156 | + NSInteger fileLength; // Length of the file in bytes |
157 | + NSInteger seekByteOffset; // Seek offset within the file in bytes |
158 | + UInt64 audioDataByteCount; // Used when the actual number of audio bytes in |
159 | + // the file is known (more accurate than assuming |
160 | + // the whole file is audio) |
161 | + |
162 | + UInt64 processedPacketsCount; // number of packets accumulated for bitrate estimation |
163 | + UInt64 processedPacketsSizeTotal; // byte size of accumulated estimation packets |
164 | + |
165 | + double seekTime; |
166 | + BOOL seekWasRequested; |
167 | + double requestedSeekTime; |
168 | + double sampleRate; // Sample rate of the file (used to compare with |
169 | + // samples played by the queue for current playback |
170 | + // time) |
171 | + double packetDuration; // sample rate times frames per packet |
172 | + double lastProgress; // last calculated progress point |
173 | +#if TARGET_OS_IPHONE |
174 | + BOOL pausedByInterruption; |
175 | +#endif |
176 | +} |
177 | + |
178 | +@property AudioStreamerErrorCode errorCode; |
179 | +@property (readonly) AudioStreamerState state; |
180 | +@property (readonly) double progress; |
181 | +@property (readonly) double duration; |
182 | +@property (readwrite) UInt32 bitRate; |
183 | +@property (readonly) NSDictionary *httpHeaders; |
184 | +@property (copy,readwrite) NSString *fileExtension; |
185 | + |
186 | +- (id)initWithURL:(NSURL *)aURL; |
187 | +- (void)start; |
188 | +- (void)stop; |
189 | +- (void)pause; |
190 | +- (BOOL)isPlaying; |
191 | +- (BOOL)isPaused; |
192 | +- (BOOL)isWaiting; |
193 | +- (BOOL)isIdle; |
194 | +- (void)seekToTime:(double)newSeekTime; |
195 | +- (double)calculatedBitRate; |
196 | + |
197 | +@end |
198 | + |
199 | + |
200 | + |
201 | + |
202 | + |
203 | + |
204 | |
205 | === added file 'Dependencies/AudioStreamer/AudioStreamer.m' |
206 | --- Dependencies/AudioStreamer/AudioStreamer.m 1970-01-01 00:00:00 +0000 |
207 | +++ Dependencies/AudioStreamer/AudioStreamer.m 2013-02-11 04:17:22 +0000 |
208 | @@ -0,0 +1,1967 @@ |
209 | +// |
210 | +// AudioStreamer.m |
211 | +// StreamingAudioPlayer |
212 | +// |
213 | +// Created by Matt Gallagher on 27/09/08. |
214 | +// Copyright 2008 Matt Gallagher. All rights reserved. |
215 | +// |
216 | +// This software is provided 'as-is', without any express or implied |
217 | +// warranty. In no event will the authors be held liable for any damages |
218 | +// arising from the use of this software. Permission is granted to anyone to |
219 | +// use this software for any purpose, including commercial applications, and to |
220 | +// alter it and redistribute it freely, subject to the following restrictions: |
221 | +// |
222 | +// 1. The origin of this software must not be misrepresented; you must not |
223 | +// claim that you wrote the original software. If you use this software |
224 | +// in a product, an acknowledgment in the product documentation would be |
225 | +// appreciated but is not required. |
226 | +// 2. Altered source versions must be plainly marked as such, and must not be |
227 | +// misrepresented as being the original software. |
228 | +// 3. This notice may not be removed or altered from any source |
229 | +// distribution. |
230 | +// |
231 | + |
232 | +#import "AudioStreamer.h" |
233 | +#if TARGET_OS_IPHONE |
234 | +#import <CFNetwork/CFNetwork.h> |
235 | +#endif |
236 | + |
237 | +#define BitRateEstimationMaxPackets 5000 |
238 | +#define BitRateEstimationMinPackets 50 |
239 | + |
240 | +NSString * const ASStatusChangedNotification = @"ASStatusChangedNotification"; |
241 | + |
242 | +NSString * const AS_NO_ERROR_STRING = @"No error."; |
243 | +NSString * const AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING = @"File stream get property failed."; |
244 | +NSString * const AS_FILE_STREAM_SEEK_FAILED_STRING = @"File stream seek failed."; |
245 | +NSString * const AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING = @"Parse bytes failed."; |
246 | +NSString * const AS_FILE_STREAM_OPEN_FAILED_STRING = @"Open audio file stream failed."; |
247 | +NSString * const AS_FILE_STREAM_CLOSE_FAILED_STRING = @"Close audio file stream failed."; |
248 | +NSString * const AS_AUDIO_QUEUE_CREATION_FAILED_STRING = @"Audio queue creation failed."; |
249 | +NSString * const AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING = @"Audio buffer allocation failed."; |
250 | +NSString * const AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING = @"Queueing of audio buffer failed."; |
251 | +NSString * const AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING = @"Audio queue add listener failed."; |
252 | +NSString * const AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING = @"Audio queue remove listener failed."; |
253 | +NSString * const AS_AUDIO_QUEUE_START_FAILED_STRING = @"Audio queue start failed."; |
254 | +NSString * const AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING = @"Audio queue buffers don't match."; |
255 | +NSString * const AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING = @"Audio queue dispose failed."; |
256 | +NSString * const AS_AUDIO_QUEUE_PAUSE_FAILED_STRING = @"Audio queue pause failed."; |
257 | +NSString * const AS_AUDIO_QUEUE_STOP_FAILED_STRING = @"Audio queue stop failed."; |
258 | +NSString * const AS_AUDIO_DATA_NOT_FOUND_STRING = @"No audio data found."; |
259 | +NSString * const AS_AUDIO_QUEUE_FLUSH_FAILED_STRING = @"Audio queue flush failed."; |
260 | +NSString * const AS_GET_AUDIO_TIME_FAILED_STRING = @"Audio queue get current time failed."; |
261 | +NSString * const AS_AUDIO_STREAMER_FAILED_STRING = @"Audio playback failed"; |
262 | +NSString * const AS_NETWORK_CONNECTION_FAILED_STRING = @"Network connection failed"; |
263 | +NSString * const AS_AUDIO_BUFFER_TOO_SMALL_STRING = @"Audio packets are larger than kAQDefaultBufSize."; |
264 | + |
265 | +@interface AudioStreamer () |
266 | +@property (readwrite) AudioStreamerState state; |
267 | + |
268 | +- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream |
269 | + fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID |
270 | + ioFlags:(UInt32 *)ioFlags; |
271 | +- (void)handleAudioPackets:(const void *)inInputData |
272 | + numberBytes:(UInt32)inNumberBytes |
273 | + numberPackets:(UInt32)inNumberPackets |
274 | + packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; |
275 | +- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ |
276 | + buffer:(AudioQueueBufferRef)inBuffer; |
277 | +- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ |
278 | + propertyID:(AudioQueuePropertyID)inID; |
279 | + |
280 | +#if TARGET_OS_IPHONE |
281 | +- (void)handleInterruptionChangeToState:(AudioQueuePropertyID)inInterruptionState; |
282 | +#endif |
283 | + |
284 | +- (void)internalSeekToTime:(double)newSeekTime; |
285 | +- (void)enqueueBuffer; |
286 | +- (void)handleReadFromStream:(CFReadStreamRef)aStream |
287 | + eventType:(CFStreamEventType)eventType; |
288 | + |
289 | +@end |
290 | + |
291 | +#pragma mark Audio Callback Function Implementations |
292 | + |
293 | +// |
294 | +// ASPropertyListenerProc |
295 | +// |
296 | +// Receives notification when the AudioFileStream has audio packets to be |
297 | +// played. In response, this function creates the AudioQueue, getting it |
298 | +// ready to begin playback (playback won't begin until audio packets are |
299 | +// sent to the queue in ASEnqueueBuffer). |
300 | +// |
301 | +// This function is adapted from Apple's example in AudioFileStreamExample with |
302 | +// kAudioQueueProperty_IsRunning listening added. |
303 | +// |
304 | +static void ASPropertyListenerProc(void * inClientData, |
305 | + AudioFileStreamID inAudioFileStream, |
306 | + AudioFileStreamPropertyID inPropertyID, |
307 | + UInt32 * ioFlags) |
308 | +{ |
309 | + // this is called by audio file stream when it finds property values |
310 | + AudioStreamer* streamer = (AudioStreamer *)inClientData; |
311 | + [streamer |
312 | + handlePropertyChangeForFileStream:inAudioFileStream |
313 | + fileStreamPropertyID:inPropertyID |
314 | + ioFlags:ioFlags]; |
315 | +} |
316 | + |
317 | +// |
318 | +// ASPacketsProc |
319 | +// |
320 | +// When the AudioStream has packets to be played, this function gets an |
321 | +// idle audio buffer and copies the audio packets into it. The calls to |
322 | +// ASEnqueueBuffer won't return until there are buffers available (or the |
323 | +// playback has been stopped). |
324 | +// |
325 | +// This function is adapted from Apple's example in AudioFileStreamExample with |
326 | +// CBR functionality added. |
327 | +// |
328 | +static void ASPacketsProc( void * inClientData, |
329 | + UInt32 inNumberBytes, |
330 | + UInt32 inNumberPackets, |
331 | + const void * inInputData, |
332 | + AudioStreamPacketDescription *inPacketDescriptions) |
333 | +{ |
334 | + // this is called by audio file stream when it finds packets of audio |
335 | + AudioStreamer* streamer = (AudioStreamer *)inClientData; |
336 | + [streamer |
337 | + handleAudioPackets:inInputData |
338 | + numberBytes:inNumberBytes |
339 | + numberPackets:inNumberPackets |
340 | + packetDescriptions:inPacketDescriptions]; |
341 | +} |
342 | + |
343 | +// |
344 | +// ASAudioQueueOutputCallback |
345 | +// |
346 | +// Called from the AudioQueue when playback of specific buffers completes. This |
347 | +// function signals from the AudioQueue thread to the AudioStream thread that |
348 | +// the buffer is idle and available for copying data. |
349 | +// |
350 | +// This function is unchanged from Apple's example in AudioFileStreamExample. |
351 | +// |
352 | +static void ASAudioQueueOutputCallback(void* inClientData, |
353 | + AudioQueueRef inAQ, |
354 | + AudioQueueBufferRef inBuffer) |
355 | +{ |
356 | + // this is called by the audio queue when it has finished decoding our data. |
357 | + // The buffer is now free to be reused. |
358 | + AudioStreamer* streamer = (AudioStreamer*)inClientData; |
359 | + [streamer handleBufferCompleteForQueue:inAQ buffer:inBuffer]; |
360 | +} |
361 | + |
362 | +// |
363 | +// ASAudioQueueIsRunningCallback |
364 | +// |
365 | +// Called from the AudioQueue when playback is started or stopped. This |
366 | +// information is used to toggle the observable "isPlaying" property and |
367 | +// set the "finished" flag. |
368 | +// |
369 | +static void ASAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) |
370 | +{ |
371 | + AudioStreamer* streamer = (AudioStreamer *)inUserData; |
372 | + [streamer handlePropertyChangeForQueue:inAQ propertyID:inID]; |
373 | +} |
374 | + |
375 | +#if TARGET_OS_IPHONE |
376 | +// |
377 | +// ASAudioSessionInterruptionListener |
378 | +// |
379 | +// Invoked if the audio session is interrupted (like when the phone rings) |
380 | +// |
381 | +static void ASAudioSessionInterruptionListener(void *inClientData, UInt32 inInterruptionState) |
382 | +{ |
383 | + AudioStreamer* streamer = (AudioStreamer *)inClientData; |
384 | + [streamer handleInterruptionChangeToState:inInterruptionState]; |
385 | +} |
386 | +#endif |
387 | + |
388 | +#pragma mark CFReadStream Callback Function Implementations |
389 | + |
390 | +// |
391 | +// ReadStreamCallBack |
392 | +// |
393 | +// This is the callback for the CFReadStream from the network connection. This |
394 | +// is where all network data is passed to the AudioFileStream. |
395 | +// |
396 | +// Invoked when an error occurs, the stream ends or we have data to read. |
397 | +// |
398 | +static void ASReadStreamCallBack |
399 | +( |
400 | + CFReadStreamRef aStream, |
401 | + CFStreamEventType eventType, |
402 | + void* inClientInfo |
403 | +) |
404 | +{ |
405 | + AudioStreamer* streamer = (AudioStreamer *)inClientInfo; |
406 | + [streamer handleReadFromStream:aStream eventType:eventType]; |
407 | +} |
408 | + |
409 | +@implementation AudioStreamer |
410 | + |
411 | +@synthesize errorCode; |
412 | +@synthesize state; |
413 | +@synthesize bitRate; |
414 | +@synthesize httpHeaders; |
415 | +@synthesize fileExtension; |
416 | + |
417 | +// |
418 | +// initWithURL |
419 | +// |
420 | +// Init method for the object. |
421 | +// |
422 | +- (id)initWithURL:(NSURL *)aURL |
423 | +{ |
424 | + self = [super init]; |
425 | + if (self != nil) |
426 | + { |
427 | + url = [aURL retain]; |
428 | + } |
429 | + return self; |
430 | +} |
431 | + |
432 | +// |
433 | +// dealloc |
434 | +// |
435 | +// Releases instance memory. |
436 | +// |
437 | +- (void)dealloc |
438 | +{ |
439 | + [self stop]; |
440 | + [url release]; |
441 | + [fileExtension release]; |
442 | + [super dealloc]; |
443 | +} |
444 | + |
445 | +// |
446 | +// isFinishing |
447 | +// |
448 | +// returns YES if the audio has reached a stopping condition. |
449 | +// |
450 | +- (BOOL)isFinishing |
451 | +{ |
452 | + @synchronized (self) |
453 | + { |
454 | + if ((errorCode != AS_NO_ERROR && state != AS_INITIALIZED) || |
455 | + ((state == AS_STOPPING || state == AS_STOPPED) && |
456 | + stopReason != AS_STOPPING_TEMPORARILY)) |
457 | + { |
458 | + return YES; |
459 | + } |
460 | + } |
461 | + |
462 | + return NO; |
463 | +} |
464 | + |
465 | +// |
466 | +// runLoopShouldExit |
467 | +// |
468 | +// returns YES if the run loop should exit. |
469 | +// |
470 | +- (BOOL)runLoopShouldExit |
471 | +{ |
472 | + @synchronized(self) |
473 | + { |
474 | + if (errorCode != AS_NO_ERROR || |
475 | + (state == AS_STOPPED && |
476 | + stopReason != AS_STOPPING_TEMPORARILY)) |
477 | + { |
478 | + return YES; |
479 | + } |
480 | + } |
481 | + |
482 | + return NO; |
483 | +} |
484 | + |
485 | +// |
486 | +// stringForErrorCode: |
487 | +// |
488 | +// Converts an error code to a string that can be localized or presented |
489 | +// to the user. |
490 | +// |
491 | +// Parameters: |
492 | +// anErrorCode - the error code to convert |
493 | +// |
494 | +// returns the string representation of the error code |
495 | +// |
496 | ++ (NSString *)stringForErrorCode:(AudioStreamerErrorCode)anErrorCode |
497 | +{ |
498 | + switch (anErrorCode) |
499 | + { |
500 | + case AS_NO_ERROR: |
501 | + return AS_NO_ERROR_STRING; |
502 | + case AS_FILE_STREAM_GET_PROPERTY_FAILED: |
503 | + return AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING; |
504 | + case AS_FILE_STREAM_SEEK_FAILED: |
505 | + return AS_FILE_STREAM_SEEK_FAILED_STRING; |
506 | + case AS_FILE_STREAM_PARSE_BYTES_FAILED: |
507 | + return AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING; |
508 | + case AS_AUDIO_QUEUE_CREATION_FAILED: |
509 | + return AS_AUDIO_QUEUE_CREATION_FAILED_STRING; |
510 | + case AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED: |
511 | + return AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING; |
512 | + case AS_AUDIO_QUEUE_ENQUEUE_FAILED: |
513 | + return AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING; |
514 | + case AS_AUDIO_QUEUE_ADD_LISTENER_FAILED: |
515 | + return AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING; |
516 | + case AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED: |
517 | + return AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING; |
518 | + case AS_AUDIO_QUEUE_START_FAILED: |
519 | + return AS_AUDIO_QUEUE_START_FAILED_STRING; |
520 | + case AS_AUDIO_QUEUE_BUFFER_MISMATCH: |
521 | + return AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING; |
522 | + case AS_FILE_STREAM_OPEN_FAILED: |
523 | + return AS_FILE_STREAM_OPEN_FAILED_STRING; |
524 | + case AS_FILE_STREAM_CLOSE_FAILED: |
525 | + return AS_FILE_STREAM_CLOSE_FAILED_STRING; |
526 | + case AS_AUDIO_QUEUE_DISPOSE_FAILED: |
527 | + return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; |
528 | + case AS_AUDIO_QUEUE_PAUSE_FAILED: |
529 | + return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; |
530 | + case AS_AUDIO_QUEUE_FLUSH_FAILED: |
531 | + return AS_AUDIO_QUEUE_FLUSH_FAILED_STRING; |
532 | + case AS_AUDIO_DATA_NOT_FOUND: |
533 | + return AS_AUDIO_DATA_NOT_FOUND_STRING; |
534 | + case AS_GET_AUDIO_TIME_FAILED: |
535 | + return AS_GET_AUDIO_TIME_FAILED_STRING; |
536 | + case AS_NETWORK_CONNECTION_FAILED: |
537 | + return AS_NETWORK_CONNECTION_FAILED_STRING; |
538 | + case AS_AUDIO_QUEUE_STOP_FAILED: |
539 | + return AS_AUDIO_QUEUE_STOP_FAILED_STRING; |
540 | + case AS_AUDIO_STREAMER_FAILED: |
541 | + return AS_AUDIO_STREAMER_FAILED_STRING; |
542 | + case AS_AUDIO_BUFFER_TOO_SMALL: |
543 | + return AS_AUDIO_BUFFER_TOO_SMALL_STRING; |
544 | + default: |
545 | + return AS_AUDIO_STREAMER_FAILED_STRING; |
546 | + } |
547 | + |
548 | + return AS_AUDIO_STREAMER_FAILED_STRING; |
549 | +} |
550 | + |
551 | +// |
552 | +// presentAlertWithTitle:message: |
553 | +// |
554 | +// Common code for presenting error dialogs |
555 | +// |
556 | +// Parameters: |
557 | +// title - title for the dialog |
558 | +// message - main test for the dialog |
559 | +// |
560 | +- (void)presentAlertWithTitle:(NSString*)title message:(NSString*)message |
561 | +{ |
562 | +#if TARGET_OS_IPHONE |
563 | + UIAlertView *alert = [ |
564 | + [[UIAlertView alloc] |
565 | + initWithTitle:title |
566 | + message:message |
567 | + delegate:self |
568 | + cancelButtonTitle:NSLocalizedString(@"OK", @"") |
569 | + otherButtonTitles: nil] |
570 | + autorelease]; |
571 | + [alert |
572 | + performSelector:@selector(show) |
573 | + onThread:[NSThread mainThread] |
574 | + withObject:nil |
575 | + waitUntilDone:NO]; |
576 | +#else |
577 | + NSAlert *alert = |
578 | + [NSAlert |
579 | + alertWithMessageText:title |
580 | + defaultButton:NSLocalizedString(@"OK", @"") |
581 | + alternateButton:nil |
582 | + otherButton:nil |
583 | + informativeTextWithFormat:message]; |
584 | + [alert |
585 | + performSelector:@selector(runModal) |
586 | + onThread:[NSThread mainThread] |
587 | + withObject:nil |
588 | + waitUntilDone:NO]; |
589 | +#endif |
590 | +} |
591 | + |
592 | +// |
593 | +// failWithErrorCode: |
594 | +// |
595 | +// Sets the playback state to failed and logs the error. |
596 | +// |
597 | +// Parameters: |
598 | +// anErrorCode - the error condition |
599 | +// |
600 | +- (void)failWithErrorCode:(AudioStreamerErrorCode)anErrorCode |
601 | +{ |
602 | + @synchronized(self) |
603 | + { |
604 | + if (errorCode != AS_NO_ERROR) |
605 | + { |
606 | + // Only set the error once. |
607 | + return; |
608 | + } |
609 | + |
610 | + errorCode = anErrorCode; |
611 | + |
612 | + if (err) |
613 | + { |
614 | + char *errChars = (char *)&err; |
615 | + NSLog(@"%@ err: %c%c%c%c %d\n", |
616 | + [AudioStreamer stringForErrorCode:anErrorCode], |
617 | + errChars[3], errChars[2], errChars[1], errChars[0], |
618 | + (int)err); |
619 | + } |
620 | + else |
621 | + { |
622 | + NSLog(@"%@", [AudioStreamer stringForErrorCode:anErrorCode]); |
623 | + } |
624 | + |
625 | + if (state == AS_PLAYING || |
626 | + state == AS_PAUSED || |
627 | + state == AS_BUFFERING) |
628 | + { |
629 | + self.state = AS_STOPPING; |
630 | + stopReason = AS_STOPPING_ERROR; |
631 | + AudioQueueStop(audioQueue, true); |
632 | + } |
633 | + |
634 | + [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
635 | + message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
636 | + } |
637 | +} |
638 | + |
639 | +// |
640 | +// mainThreadStateNotification |
641 | +// |
642 | +// Method invoked on main thread to send notifications to the main thread's |
643 | +// notification center. |
644 | +// |
645 | +- (void)mainThreadStateNotification |
646 | +{ |
647 | + NSNotification *notification = |
648 | + [NSNotification |
649 | + notificationWithName:ASStatusChangedNotification |
650 | + object:self]; |
651 | + [[NSNotificationCenter defaultCenter] |
652 | + postNotification:notification]; |
653 | +} |
654 | + |
655 | +// |
656 | +// state |
657 | +// |
658 | +// returns the state value. |
659 | +// |
660 | +- (AudioStreamerState)state |
661 | +{ |
662 | + @synchronized(self) |
663 | + { |
664 | + return state; |
665 | + } |
666 | +} |
667 | + |
668 | +// |
669 | +// setState: |
670 | +// |
671 | +// Sets the state and sends a notification that the state has changed. |
672 | +// |
673 | +// This method |
674 | +// |
675 | +// Parameters: |
676 | +// anErrorCode - the error condition |
677 | +// |
678 | +- (void)setState:(AudioStreamerState)aStatus |
679 | +{ |
680 | + @synchronized(self) |
681 | + { |
682 | + if (state != aStatus) |
683 | + { |
684 | + state = aStatus; |
685 | + |
686 | + if ([[NSThread currentThread] isEqual:[NSThread mainThread]]) |
687 | + { |
688 | + [self mainThreadStateNotification]; |
689 | + } |
690 | + else |
691 | + { |
692 | + [self |
693 | + performSelectorOnMainThread:@selector(mainThreadStateNotification) |
694 | + withObject:nil |
695 | + waitUntilDone:NO]; |
696 | + } |
697 | + } |
698 | + } |
699 | +} |
700 | + |
701 | +// |
702 | +// isPlaying |
703 | +// |
704 | +// returns YES if the audio currently playing. |
705 | +// |
706 | +- (BOOL)isPlaying |
707 | +{ |
708 | + if (state == AS_PLAYING) |
709 | + { |
710 | + return YES; |
711 | + } |
712 | + |
713 | + return NO; |
714 | +} |
715 | + |
716 | +// |
717 | +// isPaused |
718 | +// |
719 | +// returns YES if the audio currently playing. |
720 | +// |
721 | +- (BOOL)isPaused |
722 | +{ |
723 | + if (state == AS_PAUSED) |
724 | + { |
725 | + return YES; |
726 | + } |
727 | + |
728 | + return NO; |
729 | +} |
730 | + |
731 | +// |
732 | +// isWaiting |
733 | +// |
734 | +// returns YES if the AudioStreamer is waiting for a state transition of some |
735 | +// kind. |
736 | +// |
737 | +- (BOOL)isWaiting |
738 | +{ |
739 | + @synchronized(self) |
740 | + { |
741 | + if ([self isFinishing] || |
742 | + state == AS_STARTING_FILE_THREAD|| |
743 | + state == AS_WAITING_FOR_DATA || |
744 | + state == AS_WAITING_FOR_QUEUE_TO_START || |
745 | + state == AS_BUFFERING) |
746 | + { |
747 | + return YES; |
748 | + } |
749 | + } |
750 | + |
751 | + return NO; |
752 | +} |
753 | + |
754 | +// |
755 | +// isIdle |
756 | +// |
757 | +// returns YES if the AudioStream is in the AS_INITIALIZED state (i.e. |
758 | +// isn't doing anything). |
759 | +// |
760 | +- (BOOL)isIdle |
761 | +{ |
762 | + if (state == AS_INITIALIZED) |
763 | + { |
764 | + return YES; |
765 | + } |
766 | + |
767 | + return NO; |
768 | +} |
769 | + |
770 | +// |
771 | +// hintForFileExtension: |
772 | +// |
773 | +// Generates a first guess for the file type based on the file's extension |
774 | +// |
775 | +// Parameters: |
776 | +// fileExtension - the file extension |
777 | +// |
778 | +// returns a file type hint that can be passed to the AudioFileStream |
779 | +// |
780 | ++ (AudioFileTypeID)hintForFileExtension:(NSString *)fileExtension |
781 | +{ |
782 | + AudioFileTypeID fileTypeHint = kAudioFileAAC_ADTSType; |
783 | + if ([fileExtension isEqual:@"mp3"]) |
784 | + { |
785 | + fileTypeHint = kAudioFileMP3Type; |
786 | + } |
787 | + else if ([fileExtension isEqual:@"wav"]) |
788 | + { |
789 | + fileTypeHint = kAudioFileWAVEType; |
790 | + } |
791 | + else if ([fileExtension isEqual:@"aifc"]) |
792 | + { |
793 | + fileTypeHint = kAudioFileAIFCType; |
794 | + } |
795 | + else if ([fileExtension isEqual:@"aiff"]) |
796 | + { |
797 | + fileTypeHint = kAudioFileAIFFType; |
798 | + } |
799 | + else if ([fileExtension isEqual:@"m4a"]) |
800 | + { |
801 | + fileTypeHint = kAudioFileM4AType; |
802 | + } |
803 | + else if ([fileExtension isEqual:@"mp4"]) |
804 | + { |
805 | + fileTypeHint = kAudioFileMPEG4Type; |
806 | + } |
807 | + else if ([fileExtension isEqual:@"caf"]) |
808 | + { |
809 | + fileTypeHint = kAudioFileCAFType; |
810 | + } |
811 | + else if ([fileExtension isEqual:@"aac"]) |
812 | + { |
813 | + fileTypeHint = kAudioFileAAC_ADTSType; |
814 | + } |
815 | + return fileTypeHint; |
816 | +} |
817 | + |
818 | +// |
819 | +// openReadStream |
820 | +// |
821 | +// Open the audioFileStream to parse data and the fileHandle as the data |
822 | +// source. |
823 | +// |
824 | +- (BOOL)openReadStream |
825 | +{ |
826 | + @synchronized(self) |
827 | + { |
828 | + NSAssert([[NSThread currentThread] isEqual:internalThread], |
829 | + @"File stream download must be started on the internalThread"); |
830 | + NSAssert(stream == nil, @"Download stream already initialized"); |
831 | + |
832 | + // |
833 | + // Create the HTTP GET request |
834 | + // |
835 | + CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (CFURLRef)url, kCFHTTPVersion1_1); |
836 | + |
837 | + // |
838 | + // If we are creating this request to seek to a location, set the |
839 | + // requested byte range in the headers. |
840 | + // |
841 | + if (fileLength > 0 && seekByteOffset > 0) |
842 | + { |
843 | + CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), |
844 | + (CFStringRef)[NSString stringWithFormat:@"bytes=%ld-%ld", seekByteOffset, fileLength]); |
845 | + discontinuous = YES; |
846 | + } |
847 | + |
848 | + // |
849 | + // Create the read stream that will receive data from the HTTP request |
850 | + // |
851 | + stream = CFReadStreamCreateForHTTPRequest(NULL, message); |
852 | + CFRelease(message); |
853 | + |
854 | + // |
855 | + // Enable stream redirection |
856 | + // |
857 | + if (CFReadStreamSetProperty( |
858 | + stream, |
859 | + kCFStreamPropertyHTTPShouldAutoredirect, |
860 | + kCFBooleanTrue) == false) |
861 | + { |
862 | + [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
863 | + message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
864 | + return NO; |
865 | + } |
866 | + |
867 | + // |
868 | + // Handle proxies |
869 | + // |
870 | + CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings(); |
871 | + CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings); |
872 | + CFRelease(proxySettings); |
873 | + |
874 | + // |
875 | + // Handle SSL connections |
876 | + // |
877 | + if( [[url absoluteString] rangeOfString:@"https"].location != NSNotFound ) |
878 | + { |
879 | + NSDictionary *sslSettings = |
880 | + [NSDictionary dictionaryWithObjectsAndKeys: |
881 | + (NSString *)kCFStreamSocketSecurityLevelNegotiatedSSL, kCFStreamSSLLevel, |
882 | + [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredCertificates, |
883 | + [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredRoots, |
884 | + [NSNumber numberWithBool:YES], kCFStreamSSLAllowsAnyRoot, |
885 | + [NSNumber numberWithBool:NO], kCFStreamSSLValidatesCertificateChain, |
886 | + [NSNull null], kCFStreamSSLPeerName, |
887 | + nil]; |
888 | + |
889 | + CFReadStreamSetProperty(stream, kCFStreamPropertySSLSettings, sslSettings); |
890 | + } |
891 | + |
892 | + // |
893 | + // We're now ready to receive data |
894 | + // |
895 | + self.state = AS_WAITING_FOR_DATA; |
896 | + |
897 | + // |
898 | + // Open the stream |
899 | + // |
900 | + if (!CFReadStreamOpen(stream)) |
901 | + { |
902 | + CFRelease(stream); |
903 | + [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
904 | + message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
905 | + return NO; |
906 | + } |
907 | + |
908 | + // |
909 | + // Set our callback function to receive the data |
910 | + // |
911 | + CFStreamClientContext context = {0, self, NULL, NULL, NULL}; |
912 | + CFReadStreamSetClient( |
913 | + stream, |
914 | + kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, |
915 | + ASReadStreamCallBack, |
916 | + &context); |
917 | + CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes); |
918 | + } |
919 | + |
920 | + return YES; |
921 | +} |
922 | + |
923 | +// |
924 | +// startInternal |
925 | +// |
926 | +// This is the start method for the AudioStream thread. This thread is created |
927 | +// because it will be blocked when there are no audio buffers idle (and ready |
928 | +// to receive audio data). |
929 | +// |
930 | +// Activity in this thread: |
931 | +// - Creation and cleanup of all AudioFileStream and AudioQueue objects |
932 | +// - Receives data from the CFReadStream |
933 | +// - AudioFileStream processing |
934 | +// - Copying of data from AudioFileStream into audio buffers |
935 | +// - Stopping of the thread because of end-of-file |
936 | +// - Stopping due to error or failure |
937 | +// |
938 | +// Activity *not* in this thread: |
939 | +// - AudioQueue playback and notifications (happens in AudioQueue thread) |
940 | +// - Actual download of NSURLConnection data (NSURLConnection's thread) |
941 | +// - Creation of the AudioStreamer (other, likely "main" thread) |
942 | +// - Invocation of -start method (other, likely "main" thread) |
943 | +// - User/manual invocation of -stop (other, likely "main" thread) |
944 | +// |
945 | +// This method contains bits of the "main" function from Apple's example in |
946 | +// AudioFileStreamExample. |
947 | +// |
948 | +- (void)startInternal |
949 | +{ |
950 | + NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; |
951 | + |
952 | + @synchronized(self) |
953 | + { |
954 | + if (state != AS_STARTING_FILE_THREAD) |
955 | + { |
956 | + if (state != AS_STOPPING && |
957 | + state != AS_STOPPED) |
958 | + { |
959 | + NSLog(@"### Not starting audio thread. State code is: %ld", (long)state); |
960 | + } |
961 | + self.state = AS_INITIALIZED; |
962 | + [pool release]; |
963 | + return; |
964 | + } |
965 | + |
966 | + #if TARGET_OS_IPHONE |
967 | + // |
968 | + // Set the audio session category so that we continue to play if the |
969 | + // iPhone/iPod auto-locks. |
970 | + // |
971 | + AudioSessionInitialize ( |
972 | + NULL, // 'NULL' to use the default (main) run loop |
973 | + NULL, // 'NULL' to use the default run loop mode |
974 | + ASAudioSessionInterruptionListener, // a reference to your interruption callback |
975 | + self // data to pass to your interruption listener callback |
976 | + ); |
977 | + UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback; |
978 | + AudioSessionSetProperty ( |
979 | + kAudioSessionProperty_AudioCategory, |
980 | + sizeof (sessionCategory), |
981 | + &sessionCategory |
982 | + ); |
983 | + AudioSessionSetActive(true); |
984 | + #endif |
985 | + |
986 | + // initialize a mutex and condition so that we can block on buffers in use. |
987 | + pthread_mutex_init(&queueBuffersMutex, NULL); |
988 | + pthread_cond_init(&queueBufferReadyCondition, NULL); |
989 | + |
990 | + if (![self openReadStream]) |
991 | + { |
992 | + goto cleanup; |
993 | + } |
994 | + } |
995 | + |
996 | + // |
997 | + // Process the run loop until playback is finished or failed. |
998 | + // |
999 | + BOOL isRunning = YES; |
1000 | + do |
1001 | + { |
1002 | + isRunning = [[NSRunLoop currentRunLoop] |
1003 | + runMode:NSDefaultRunLoopMode |
1004 | + beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; |
1005 | + |
1006 | + @synchronized(self) { |
1007 | + if (seekWasRequested) { |
1008 | + [self internalSeekToTime:requestedSeekTime]; |
1009 | + seekWasRequested = NO; |
1010 | + } |
1011 | + } |
1012 | + |
1013 | + // |
1014 | + // If there are no queued buffers, we need to check here since the |
1015 | + // handleBufferCompleteForQueue:buffer: should not change the state |
1016 | + // (may not enter the synchronized section). |
1017 | + // |
1018 | + if (buffersUsed == 0 && self.state == AS_PLAYING) |
1019 | + { |
1020 | + err = AudioQueuePause(audioQueue); |
1021 | + if (err) |
1022 | + { |
1023 | + [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; |
1024 | + return; |
1025 | + } |
1026 | + self.state = AS_BUFFERING; |
1027 | + } |
1028 | + } while (isRunning && ![self runLoopShouldExit]); |
1029 | + |
1030 | +cleanup: |
1031 | + |
1032 | + @synchronized(self) |
1033 | + { |
1034 | + // |
1035 | + // Cleanup the read stream if it is still open |
1036 | + // |
1037 | + if (stream) |
1038 | + { |
1039 | + CFReadStreamClose(stream); |
1040 | + CFRelease(stream); |
1041 | + stream = nil; |
1042 | + } |
1043 | + |
1044 | + // |
1045 | + // Close the audio file strea, |
1046 | + // |
1047 | + if (audioFileStream) |
1048 | + { |
1049 | + err = AudioFileStreamClose(audioFileStream); |
1050 | + audioFileStream = nil; |
1051 | + if (err) |
1052 | + { |
1053 | + [self failWithErrorCode:AS_FILE_STREAM_CLOSE_FAILED]; |
1054 | + } |
1055 | + } |
1056 | + |
1057 | + // |
1058 | + // Dispose of the Audio Queue |
1059 | + // |
1060 | + if (audioQueue) |
1061 | + { |
1062 | + err = AudioQueueDispose(audioQueue, true); |
1063 | + audioQueue = nil; |
1064 | + if (err) |
1065 | + { |
1066 | + [self failWithErrorCode:AS_AUDIO_QUEUE_DISPOSE_FAILED]; |
1067 | + } |
1068 | + } |
1069 | + |
1070 | + pthread_mutex_destroy(&queueBuffersMutex); |
1071 | + pthread_cond_destroy(&queueBufferReadyCondition); |
1072 | + |
1073 | +#if TARGET_OS_IPHONE |
1074 | + AudioSessionSetActive(false); |
1075 | +#endif |
1076 | + |
1077 | + [httpHeaders release]; |
1078 | + httpHeaders = nil; |
1079 | + |
1080 | + bytesFilled = 0; |
1081 | + packetsFilled = 0; |
1082 | + seekByteOffset = 0; |
1083 | + packetBufferSize = 0; |
1084 | + self.state = AS_INITIALIZED; |
1085 | + |
1086 | + [internalThread release]; |
1087 | + internalThread = nil; |
1088 | + } |
1089 | + |
1090 | + [pool release]; |
1091 | +} |
1092 | + |
1093 | +// |
1094 | +// start |
1095 | +// |
1096 | +// Calls startInternal in a new thread. |
1097 | +// |
1098 | +- (void)start |
1099 | +{ |
1100 | + @synchronized (self) |
1101 | + { |
1102 | + if (state == AS_PAUSED) |
1103 | + { |
1104 | + [self pause]; |
1105 | + } |
1106 | + else if (state == AS_INITIALIZED) |
1107 | + { |
1108 | + NSAssert([[NSThread currentThread] isEqual:[NSThread mainThread]], |
1109 | + @"Playback can only be started from the main thread."); |
1110 | + notificationCenter = |
1111 | + [[NSNotificationCenter defaultCenter] retain]; |
1112 | + self.state = AS_STARTING_FILE_THREAD; |
1113 | + internalThread = |
1114 | + [[NSThread alloc] |
1115 | + initWithTarget:self |
1116 | + selector:@selector(startInternal) |
1117 | + object:nil]; |
1118 | + [internalThread start]; |
1119 | + } |
1120 | + } |
1121 | +} |
1122 | + |
1123 | + |
1124 | +// internalSeekToTime: |
1125 | +// |
1126 | +// Called from our internal runloop to reopen the stream at a seeked location |
1127 | +// |
1128 | +- (void)internalSeekToTime:(double)newSeekTime |
1129 | +{ |
1130 | + if ([self calculatedBitRate] == 0.0 || fileLength <= 0) |
1131 | + { |
1132 | + return; |
1133 | + } |
1134 | + |
1135 | + // |
1136 | + // Calculate the byte offset for seeking |
1137 | + // |
1138 | + seekByteOffset = dataOffset + |
1139 | + (newSeekTime / self.duration) * (fileLength - dataOffset); |
1140 | + |
1141 | + // |
1142 | + // Attempt to leave 1 useful packet at the end of the file (although in |
1143 | + // reality, this may still seek too far if the file has a long trailer). |
1144 | + // |
1145 | + if (seekByteOffset > fileLength - 2 * packetBufferSize) |
1146 | + { |
1147 | + seekByteOffset = fileLength - 2 * packetBufferSize; |
1148 | + } |
1149 | + |
1150 | + // |
1151 | + // Store the old time from the audio queue and the time that we're seeking |
1152 | + // to so that we'll know the correct time progress after seeking. |
1153 | + // |
1154 | + seekTime = newSeekTime; |
1155 | + |
1156 | + // |
1157 | + // Attempt to align the seek with a packet boundary |
1158 | + // |
1159 | + double calculatedBitRate = [self calculatedBitRate]; |
1160 | + if (packetDuration > 0 && |
1161 | + calculatedBitRate > 0) |
1162 | + { |
1163 | + UInt32 ioFlags = 0; |
1164 | + SInt64 packetAlignedByteOffset; |
1165 | + SInt64 seekPacket = floor(newSeekTime / packetDuration); |
1166 | + err = AudioFileStreamSeek(audioFileStream, seekPacket, &packetAlignedByteOffset, &ioFlags); |
1167 | + if (!err && !(ioFlags & kAudioFileStreamSeekFlag_OffsetIsEstimated)) |
1168 | + { |
1169 | + seekTime -= ((seekByteOffset - dataOffset) - packetAlignedByteOffset) * 8.0 / calculatedBitRate; |
1170 | + seekByteOffset = packetAlignedByteOffset + dataOffset; |
1171 | + } |
1172 | + } |
1173 | + |
1174 | + // |
1175 | + // Close the current read straem |
1176 | + // |
1177 | + if (stream) |
1178 | + { |
1179 | + CFReadStreamClose(stream); |
1180 | + CFRelease(stream); |
1181 | + stream = nil; |
1182 | + } |
1183 | + |
1184 | + // |
1185 | + // Stop the audio queue |
1186 | + // |
1187 | + self.state = AS_STOPPING; |
1188 | + stopReason = AS_STOPPING_TEMPORARILY; |
1189 | + err = AudioQueueStop(audioQueue, true); |
1190 | + if (err) |
1191 | + { |
1192 | + [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; |
1193 | + return; |
1194 | + } |
1195 | + |
1196 | + // |
1197 | + // Re-open the file stream. It will request a byte-range starting at |
1198 | + // seekByteOffset. |
1199 | + // |
1200 | + [self openReadStream]; |
1201 | +} |
1202 | + |
1203 | +// |
1204 | +// seekToTime: |
1205 | +// |
1206 | +// Attempts to seek to the new time. Will be ignored if the bitrate or fileLength |
1207 | +// are unknown. |
1208 | +// |
1209 | +// Parameters: |
1210 | +// newTime - the time to seek to |
1211 | +// |
1212 | +- (void)seekToTime:(double)newSeekTime |
1213 | +{ |
1214 | + @synchronized(self) |
1215 | + { |
1216 | + seekWasRequested = YES; |
1217 | + requestedSeekTime = newSeekTime; |
1218 | + } |
1219 | +} |
1220 | + |
1221 | +// |
1222 | +// progress |
1223 | +// |
1224 | +// returns the current playback progress. Will return zero if sampleRate has |
1225 | +// not yet been detected. |
1226 | +// |
1227 | +- (double)progress |
1228 | +{ |
1229 | + @synchronized(self) |
1230 | + { |
1231 | + if (sampleRate > 0 && (state == AS_STOPPING || ![self isFinishing])) |
1232 | + { |
1233 | + if (state != AS_PLAYING && state != AS_PAUSED && state != AS_BUFFERING && state != AS_STOPPING) |
1234 | + { |
1235 | + return lastProgress; |
1236 | + } |
1237 | + |
1238 | + AudioTimeStamp queueTime; |
1239 | + Boolean discontinuity; |
1240 | + err = AudioQueueGetCurrentTime(audioQueue, NULL, &queueTime, &discontinuity); |
1241 | + |
1242 | + const OSStatus AudioQueueStopped = 0x73746F70; // 0x73746F70 is 'stop' |
1243 | + if (err == AudioQueueStopped) |
1244 | + { |
1245 | + return lastProgress; |
1246 | + } |
1247 | + else if (err) |
1248 | + { |
1249 | + [self failWithErrorCode:AS_GET_AUDIO_TIME_FAILED]; |
1250 | + } |
1251 | + |
1252 | + double progress = seekTime + queueTime.mSampleTime / sampleRate; |
1253 | + if (progress < 0.0) |
1254 | + { |
1255 | + progress = 0.0; |
1256 | + } |
1257 | + |
1258 | + lastProgress = progress; |
1259 | + return progress; |
1260 | + } |
1261 | + } |
1262 | + |
1263 | + return lastProgress; |
1264 | +} |
1265 | + |
1266 | +// |
1267 | +// calculatedBitRate |
1268 | +// |
1269 | +// returns the bit rate, if known. Uses packet duration times running bits per |
1270 | +// packet if available, otherwise it returns the nominal bitrate. Will return |
1271 | +// zero if no useful option available. |
1272 | +// |
1273 | +- (double)calculatedBitRate |
1274 | +{ |
1275 | + if (packetDuration && processedPacketsCount > BitRateEstimationMinPackets) |
1276 | + { |
1277 | + double averagePacketByteSize = processedPacketsSizeTotal / processedPacketsCount; |
1278 | + return 8.0 * averagePacketByteSize / packetDuration; |
1279 | + } |
1280 | + |
1281 | + if (bitRate) |
1282 | + { |
1283 | + return (double)bitRate; |
1284 | + } |
1285 | + |
1286 | + return 0; |
1287 | +} |
1288 | + |
1289 | +// |
1290 | +// duration |
1291 | +// |
1292 | +// Calculates the duration of available audio from the bitRate and fileLength. |
1293 | +// |
1294 | +// returns the calculated duration in seconds. |
1295 | +// |
1296 | +- (double)duration |
1297 | +{ |
1298 | + double calculatedBitRate = [self calculatedBitRate]; |
1299 | + |
1300 | + if (calculatedBitRate == 0 || fileLength == 0) |
1301 | + { |
1302 | + return 0.0; |
1303 | + } |
1304 | + |
1305 | + return (fileLength - dataOffset) / (calculatedBitRate * 0.125); |
1306 | +} |
1307 | + |
1308 | +// |
1309 | +// pause |
1310 | +// |
1311 | +// A togglable pause function. |
1312 | +// |
1313 | +- (void)pause |
1314 | +{ |
1315 | + @synchronized(self) |
1316 | + { |
1317 | + if (state == AS_PLAYING) |
1318 | + { |
1319 | + err = AudioQueuePause(audioQueue); |
1320 | + if (err) |
1321 | + { |
1322 | + [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; |
1323 | + return; |
1324 | + } |
1325 | + self.state = AS_PAUSED; |
1326 | + } |
1327 | + else if (state == AS_PAUSED) |
1328 | + { |
1329 | + err = AudioQueueStart(audioQueue, NULL); |
1330 | + if (err) |
1331 | + { |
1332 | + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
1333 | + return; |
1334 | + } |
1335 | + self.state = AS_PLAYING; |
1336 | + } |
1337 | + } |
1338 | +} |
1339 | + |
1340 | +// |
1341 | +// stop |
1342 | +// |
1343 | +// This method can be called to stop downloading/playback before it completes. |
1344 | +// It is automatically called when an error occurs. |
1345 | +// |
1346 | +// If playback has not started before this method is called, it will toggle the |
1347 | +// "isPlaying" property so that it is guaranteed to transition to true and |
1348 | +// back to false |
1349 | +// |
1350 | +- (void)stop |
1351 | +{ |
1352 | + @synchronized(self) |
1353 | + { |
1354 | + if (audioQueue && |
1355 | + (state == AS_PLAYING || state == AS_PAUSED || |
1356 | + state == AS_BUFFERING || state == AS_WAITING_FOR_QUEUE_TO_START)) |
1357 | + { |
1358 | + self.state = AS_STOPPING; |
1359 | + stopReason = AS_STOPPING_USER_ACTION; |
1360 | + err = AudioQueueStop(audioQueue, true); |
1361 | + if (err) |
1362 | + { |
1363 | + [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; |
1364 | + return; |
1365 | + } |
1366 | + } |
1367 | + else if (state != AS_INITIALIZED) |
1368 | + { |
1369 | + self.state = AS_STOPPED; |
1370 | + stopReason = AS_STOPPING_USER_ACTION; |
1371 | + } |
1372 | + seekWasRequested = NO; |
1373 | + } |
1374 | + |
1375 | + while (state != AS_INITIALIZED) |
1376 | + { |
1377 | + [NSThread sleepForTimeInterval:0.1]; |
1378 | + } |
1379 | +} |
1380 | + |
1381 | +// |
1382 | +// handleReadFromStream:eventType: |
1383 | +// |
1384 | +// Reads data from the network file stream into the AudioFileStream |
1385 | +// |
1386 | +// Parameters: |
1387 | +// aStream - the network file stream |
1388 | +// eventType - the event which triggered this method |
1389 | +// |
1390 | +- (void)handleReadFromStream:(CFReadStreamRef)aStream |
1391 | + eventType:(CFStreamEventType)eventType |
1392 | +{ |
1393 | + if (aStream != stream) |
1394 | + { |
1395 | + // |
1396 | + // Ignore messages from old streams |
1397 | + // |
1398 | + return; |
1399 | + } |
1400 | + |
1401 | + if (eventType == kCFStreamEventErrorOccurred) |
1402 | + { |
1403 | + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
1404 | + } |
1405 | + else if (eventType == kCFStreamEventEndEncountered) |
1406 | + { |
1407 | + @synchronized(self) |
1408 | + { |
1409 | + if ([self isFinishing]) |
1410 | + { |
1411 | + return; |
1412 | + } |
1413 | + } |
1414 | + |
1415 | + // |
1416 | + // If there is a partially filled buffer, pass it to the AudioQueue for |
1417 | + // processing |
1418 | + // |
1419 | + if (bytesFilled) |
1420 | + { |
1421 | + if (self.state == AS_WAITING_FOR_DATA) |
1422 | + { |
1423 | + // |
1424 | + // Force audio data smaller than one whole buffer to play. |
1425 | + // |
1426 | + self.state = AS_FLUSHING_EOF; |
1427 | + } |
1428 | + [self enqueueBuffer]; |
1429 | + } |
1430 | + |
1431 | + @synchronized(self) |
1432 | + { |
1433 | + if (state == AS_WAITING_FOR_DATA) |
1434 | + { |
1435 | + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
1436 | + } |
1437 | + |
1438 | + // |
1439 | + // We left the synchronized section to enqueue the buffer so we |
1440 | + // must check that we are !finished again before touching the |
1441 | + // audioQueue |
1442 | + // |
1443 | + else if (![self isFinishing]) |
1444 | + { |
1445 | + if (audioQueue) |
1446 | + { |
1447 | + // |
1448 | + // Set the progress at the end of the stream |
1449 | + // |
1450 | + err = AudioQueueFlush(audioQueue); |
1451 | + if (err) |
1452 | + { |
1453 | + [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; |
1454 | + return; |
1455 | + } |
1456 | + |
1457 | + self.state = AS_STOPPING; |
1458 | + stopReason = AS_STOPPING_EOF; |
1459 | + err = AudioQueueStop(audioQueue, false); |
1460 | + if (err) |
1461 | + { |
1462 | + [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; |
1463 | + return; |
1464 | + } |
1465 | + } |
1466 | + else |
1467 | + { |
1468 | + self.state = AS_STOPPED; |
1469 | + stopReason = AS_STOPPING_EOF; |
1470 | + } |
1471 | + } |
1472 | + } |
1473 | + } |
1474 | + else if (eventType == kCFStreamEventHasBytesAvailable) |
1475 | + { |
1476 | + if (!httpHeaders) |
1477 | + { |
1478 | + CFTypeRef message = |
1479 | + CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader); |
1480 | + httpHeaders = |
1481 | + (NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); |
1482 | + CFRelease(message); |
1483 | + |
1484 | + // |
1485 | + // Only read the content length if we seeked to time zero, otherwise |
1486 | + // we only have a subset of the total bytes. |
1487 | + // |
1488 | + if (seekByteOffset == 0) |
1489 | + { |
1490 | + fileLength = [[httpHeaders objectForKey:@"Content-Length"] integerValue]; |
1491 | + } |
1492 | + } |
1493 | + |
1494 | + if (!audioFileStream) |
1495 | + { |
1496 | + // |
1497 | + // Attempt to guess the file type from the URL. Reading the MIME type |
1498 | + // from the httpHeaders might be a better approach since lots of |
1499 | + // URL's don't have the right extension. |
1500 | + // |
1501 | + // If you have a fixed file-type, you may want to hardcode this. |
1502 | + // |
1503 | + if (!self.fileExtension) |
1504 | + { |
1505 | + self.fileExtension = [[url path] pathExtension]; |
1506 | + } |
1507 | + AudioFileTypeID fileTypeHint = |
1508 | + [AudioStreamer hintForFileExtension:self.fileExtension]; |
1509 | + |
1510 | + // create an audio file stream parser |
1511 | + err = AudioFileStreamOpen(self, ASPropertyListenerProc, ASPacketsProc, |
1512 | + fileTypeHint, &audioFileStream); |
1513 | + if (err) |
1514 | + { |
1515 | + [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; |
1516 | + return; |
1517 | + } |
1518 | + } |
1519 | + |
1520 | + UInt8 bytes[kAQDefaultBufSize]; |
1521 | + CFIndex length; |
1522 | + @synchronized(self) |
1523 | + { |
1524 | + if ([self isFinishing] || !CFReadStreamHasBytesAvailable(stream)) |
1525 | + { |
1526 | + return; |
1527 | + } |
1528 | + |
1529 | + // |
1530 | + // Read the bytes from the stream |
1531 | + // |
1532 | + length = CFReadStreamRead(stream, bytes, kAQDefaultBufSize); |
1533 | + |
1534 | + if (length == -1) |
1535 | + { |
1536 | + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
1537 | + return; |
1538 | + } |
1539 | + |
1540 | + if (length == 0) |
1541 | + { |
1542 | + return; |
1543 | + } |
1544 | + } |
1545 | + |
1546 | + if (discontinuous) |
1547 | + { |
1548 | + err = AudioFileStreamParseBytes(audioFileStream, length, bytes, kAudioFileStreamParseFlag_Discontinuity); |
1549 | + if (err) |
1550 | + { |
1551 | + [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; |
1552 | + return; |
1553 | + } |
1554 | + } |
1555 | + else |
1556 | + { |
1557 | + err = AudioFileStreamParseBytes(audioFileStream, length, bytes, 0); |
1558 | + if (err) |
1559 | + { |
1560 | + [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; |
1561 | + return; |
1562 | + } |
1563 | + } |
1564 | + } |
1565 | +} |
1566 | + |
1567 | +// |
1568 | +// enqueueBuffer |
1569 | +// |
1570 | +// Called from ASPacketsProc and connectionDidFinishLoading to pass filled audio |
1571 | +// bufffers (filled by ASPacketsProc) to the AudioQueue for playback. This |
1572 | +// function does not return until a buffer is idle for further filling or |
1573 | +// the AudioQueue is stopped. |
1574 | +// |
1575 | +// This function is adapted from Apple's example in AudioFileStreamExample with |
1576 | +// CBR functionality added. |
1577 | +// |
1578 | +- (void)enqueueBuffer |
1579 | +{ |
1580 | + @synchronized(self) |
1581 | + { |
1582 | + if ([self isFinishing] || stream == 0) |
1583 | + { |
1584 | + return; |
1585 | + } |
1586 | + |
1587 | + inuse[fillBufferIndex] = true; // set in use flag |
1588 | + buffersUsed++; |
1589 | + |
1590 | + // enqueue buffer |
1591 | + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
1592 | + fillBuf->mAudioDataByteSize = bytesFilled; |
1593 | + |
1594 | + if (packetsFilled) |
1595 | + { |
1596 | + err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, packetsFilled, packetDescs); |
1597 | + } |
1598 | + else |
1599 | + { |
1600 | + err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, 0, NULL); |
1601 | + } |
1602 | + |
1603 | + if (err) |
1604 | + { |
1605 | + [self failWithErrorCode:AS_AUDIO_QUEUE_ENQUEUE_FAILED]; |
1606 | + return; |
1607 | + } |
1608 | + |
1609 | + |
1610 | + if (state == AS_BUFFERING || |
1611 | + state == AS_WAITING_FOR_DATA || |
1612 | + state == AS_FLUSHING_EOF || |
1613 | + (state == AS_STOPPED && stopReason == AS_STOPPING_TEMPORARILY)) |
1614 | + { |
1615 | + // |
1616 | + // Fill all the buffers before starting. This ensures that the |
1617 | + // AudioFileStream stays a small amount ahead of the AudioQueue to |
1618 | + // avoid an audio glitch playing streaming files on iPhone SDKs < 3.0 |
1619 | + // |
1620 | + if (state == AS_FLUSHING_EOF || buffersUsed == kNumAQBufs - 1) |
1621 | + { |
1622 | + if (self.state == AS_BUFFERING) |
1623 | + { |
1624 | + err = AudioQueueStart(audioQueue, NULL); |
1625 | + if (err) |
1626 | + { |
1627 | + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
1628 | + return; |
1629 | + } |
1630 | + self.state = AS_PLAYING; |
1631 | + } |
1632 | + else |
1633 | + { |
1634 | + self.state = AS_WAITING_FOR_QUEUE_TO_START; |
1635 | + |
1636 | + err = AudioQueueStart(audioQueue, NULL); |
1637 | + if (err) |
1638 | + { |
1639 | + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
1640 | + return; |
1641 | + } |
1642 | + } |
1643 | + } |
1644 | + } |
1645 | + |
1646 | + // go to next buffer |
1647 | + if (++fillBufferIndex >= kNumAQBufs) fillBufferIndex = 0; |
1648 | + bytesFilled = 0; // reset bytes filled |
1649 | + packetsFilled = 0; // reset packets filled |
1650 | + } |
1651 | + |
1652 | + // wait until next buffer is not in use |
1653 | + pthread_mutex_lock(&queueBuffersMutex); |
1654 | + while (inuse[fillBufferIndex]) |
1655 | + { |
1656 | + pthread_cond_wait(&queueBufferReadyCondition, &queueBuffersMutex); |
1657 | + } |
1658 | + pthread_mutex_unlock(&queueBuffersMutex); |
1659 | +} |
1660 | + |
1661 | +// |
1662 | +// createQueue |
1663 | +// |
1664 | +// Method to create the AudioQueue from the parameters gathered by the |
1665 | +// AudioFileStream. |
1666 | +// |
1667 | +// Creation is deferred to the handling of the first audio packet (although |
1668 | +// it could be handled any time after kAudioFileStreamProperty_ReadyToProducePackets |
1669 | +// is true). |
1670 | +// |
1671 | +- (void)createQueue |
1672 | +{ |
1673 | + sampleRate = asbd.mSampleRate; |
1674 | + packetDuration = asbd.mFramesPerPacket / sampleRate; |
1675 | + |
1676 | + // create the audio queue |
1677 | + err = AudioQueueNewOutput(&asbd, ASAudioQueueOutputCallback, self, NULL, NULL, 0, &audioQueue); |
1678 | + if (err) |
1679 | + { |
1680 | + [self failWithErrorCode:AS_AUDIO_QUEUE_CREATION_FAILED]; |
1681 | + return; |
1682 | + } |
1683 | + |
1684 | + // start the queue if it has not been started already |
1685 | + // listen to the "isRunning" property |
1686 | + err = AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning, ASAudioQueueIsRunningCallback, self); |
1687 | + if (err) |
1688 | + { |
1689 | + [self failWithErrorCode:AS_AUDIO_QUEUE_ADD_LISTENER_FAILED]; |
1690 | + return; |
1691 | + } |
1692 | + |
1693 | + // get the packet size if it is available |
1694 | + UInt32 sizeOfUInt32 = sizeof(UInt32); |
1695 | + err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_PacketSizeUpperBound, &sizeOfUInt32, &packetBufferSize); |
1696 | + if (err || packetBufferSize == 0) |
1697 | + { |
1698 | + err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MaximumPacketSize, &sizeOfUInt32, &packetBufferSize); |
1699 | + if (err || packetBufferSize == 0) |
1700 | + { |
1701 | + // No packet size available, just use the default |
1702 | + packetBufferSize = kAQDefaultBufSize; |
1703 | + } |
1704 | + } |
1705 | + |
1706 | + // allocate audio queue buffers |
1707 | + for (unsigned int i = 0; i < kNumAQBufs; ++i) |
1708 | + { |
1709 | + err = AudioQueueAllocateBuffer(audioQueue, packetBufferSize, &audioQueueBuffer[i]); |
1710 | + if (err) |
1711 | + { |
1712 | + [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED]; |
1713 | + return; |
1714 | + } |
1715 | + } |
1716 | + |
1717 | + // get the cookie size |
1718 | + UInt32 cookieSize; |
1719 | + Boolean writable; |
1720 | + OSStatus ignorableError; |
1721 | + ignorableError = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable); |
1722 | + if (ignorableError) |
1723 | + { |
1724 | + return; |
1725 | + } |
1726 | + |
1727 | + // get the cookie data |
1728 | + void* cookieData = calloc(1, cookieSize); |
1729 | + ignorableError = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData); |
1730 | + if (ignorableError) |
1731 | + { |
1732 | + return; |
1733 | + } |
1734 | + |
1735 | + // set the cookie on the queue. |
1736 | + ignorableError = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize); |
1737 | + free(cookieData); |
1738 | + if (ignorableError) |
1739 | + { |
1740 | + return; |
1741 | + } |
1742 | +} |
1743 | + |
1744 | +// |
1745 | +// handlePropertyChangeForFileStream:fileStreamPropertyID:ioFlags: |
1746 | +// |
1747 | +// Object method which handles implementation of ASPropertyListenerProc |
1748 | +// |
1749 | +// Parameters: |
1750 | +// inAudioFileStream - should be the same as self->audioFileStream |
1751 | +// inPropertyID - the property that changed |
1752 | +// ioFlags - the ioFlags passed in |
1753 | +// |
1754 | +- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream |
1755 | + fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID |
1756 | + ioFlags:(UInt32 *)ioFlags |
1757 | +{ |
1758 | + @synchronized(self) |
1759 | + { |
1760 | + if ([self isFinishing]) |
1761 | + { |
1762 | + return; |
1763 | + } |
1764 | + |
1765 | + if (inPropertyID == kAudioFileStreamProperty_ReadyToProducePackets) |
1766 | + { |
1767 | + discontinuous = true; |
1768 | + } |
1769 | + else if (inPropertyID == kAudioFileStreamProperty_DataOffset) |
1770 | + { |
1771 | + SInt64 offset; |
1772 | + UInt32 offsetSize = sizeof(offset); |
1773 | + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataOffset, &offsetSize, &offset); |
1774 | + if (err) |
1775 | + { |
1776 | + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
1777 | + return; |
1778 | + } |
1779 | + dataOffset = offset; |
1780 | + |
1781 | + if (audioDataByteCount) |
1782 | + { |
1783 | + fileLength = dataOffset + audioDataByteCount; |
1784 | + } |
1785 | + } |
1786 | + else if (inPropertyID == kAudioFileStreamProperty_AudioDataByteCount) |
1787 | + { |
1788 | + UInt32 byteCountSize = sizeof(UInt64); |
1789 | + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_AudioDataByteCount, &byteCountSize, &audioDataByteCount); |
1790 | + if (err) |
1791 | + { |
1792 | + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
1793 | + return; |
1794 | + } |
1795 | + fileLength = dataOffset + audioDataByteCount; |
1796 | + } |
1797 | + else if (inPropertyID == kAudioFileStreamProperty_DataFormat) |
1798 | + { |
1799 | + if (asbd.mSampleRate == 0) |
1800 | + { |
1801 | + UInt32 asbdSize = sizeof(asbd); |
1802 | + |
1803 | + // get the stream format. |
1804 | + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd); |
1805 | + if (err) |
1806 | + { |
1807 | + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
1808 | + return; |
1809 | + } |
1810 | + } |
1811 | + } |
1812 | + else if (inPropertyID == kAudioFileStreamProperty_FormatList) |
1813 | + { |
1814 | + Boolean outWriteable; |
1815 | + UInt32 formatListSize; |
1816 | + err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, &outWriteable); |
1817 | + if (err) |
1818 | + { |
1819 | + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
1820 | + return; |
1821 | + } |
1822 | + |
1823 | + AudioFormatListItem *formatList = malloc(formatListSize); |
1824 | + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList); |
1825 | + if (err) |
1826 | + { |
1827 | + free(formatList); |
1828 | + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
1829 | + return; |
1830 | + } |
1831 | + |
1832 | + for (int i = 0; i * sizeof(AudioFormatListItem) < formatListSize; i += sizeof(AudioFormatListItem)) |
1833 | + { |
1834 | + AudioStreamBasicDescription pasbd = formatList[i].mASBD; |
1835 | + |
1836 | + if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE || |
1837 | + pasbd.mFormatID == kAudioFormatMPEG4AAC_HE_V2) |
1838 | + { |
1839 | + // |
1840 | + // We've found HE-AAC, remember this to tell the audio queue |
1841 | + // when we construct it. |
1842 | + // |
1843 | +#if !TARGET_IPHONE_SIMULATOR |
1844 | + asbd = pasbd; |
1845 | +#endif |
1846 | + break; |
1847 | + } |
1848 | + } |
1849 | + free(formatList); |
1850 | + } |
1851 | + else |
1852 | + { |
1853 | +// NSLog(@"Property is %c%c%c%c", |
1854 | +// ((char *)&inPropertyID)[3], |
1855 | +// ((char *)&inPropertyID)[2], |
1856 | +// ((char *)&inPropertyID)[1], |
1857 | +// ((char *)&inPropertyID)[0]); |
1858 | + } |
1859 | + } |
1860 | +} |
1861 | + |
1862 | +// |
1863 | +// handleAudioPackets:numberBytes:numberPackets:packetDescriptions: |
1864 | +// |
1865 | +// Object method which handles the implementation of ASPacketsProc |
1866 | +// |
1867 | +// Parameters: |
1868 | +// inInputData - the packet data |
1869 | +// inNumberBytes - byte size of the data |
1870 | +// inNumberPackets - number of packets in the data |
1871 | +// inPacketDescriptions - packet descriptions |
1872 | +// |
1873 | +- (void)handleAudioPackets:(const void *)inInputData |
1874 | + numberBytes:(UInt32)inNumberBytes |
1875 | + numberPackets:(UInt32)inNumberPackets |
1876 | + packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; |
1877 | +{ |
1878 | + @synchronized(self) |
1879 | + { |
1880 | + if ([self isFinishing]) |
1881 | + { |
1882 | + return; |
1883 | + } |
1884 | + |
1885 | + if (bitRate == 0) |
1886 | + { |
1887 | + // |
1888 | + // m4a and a few other formats refuse to parse the bitrate so |
1889 | + // we need to set an "unparseable" condition here. If you know |
1890 | + // the bitrate (parsed it another way) you can set it on the |
1891 | + // class if needed. |
1892 | + // |
1893 | + bitRate = ~0; |
1894 | + } |
1895 | + |
1896 | + // we have successfully read the first packests from the audio stream, so |
1897 | + // clear the "discontinuous" flag |
1898 | + if (discontinuous) |
1899 | + { |
1900 | + discontinuous = false; |
1901 | + } |
1902 | + |
1903 | + if (!audioQueue) |
1904 | + { |
1905 | + [self createQueue]; |
1906 | + } |
1907 | + } |
1908 | + |
1909 | + // the following code assumes we're streaming VBR data. for CBR data, the second branch is used. |
1910 | + if (inPacketDescriptions) |
1911 | + { |
1912 | + for (int i = 0; i < inNumberPackets; ++i) |
1913 | + { |
1914 | + SInt64 packetOffset = inPacketDescriptions[i].mStartOffset; |
1915 | + SInt64 packetSize = inPacketDescriptions[i].mDataByteSize; |
1916 | + size_t bufSpaceRemaining; |
1917 | + |
1918 | + if (processedPacketsCount < BitRateEstimationMaxPackets) |
1919 | + { |
1920 | + processedPacketsSizeTotal += packetSize; |
1921 | + processedPacketsCount += 1; |
1922 | + } |
1923 | + |
1924 | + @synchronized(self) |
1925 | + { |
1926 | + // If the audio was terminated before this point, then |
1927 | + // exit. |
1928 | + if ([self isFinishing]) |
1929 | + { |
1930 | + return; |
1931 | + } |
1932 | + |
1933 | + if (packetSize > packetBufferSize) |
1934 | + { |
1935 | + [self failWithErrorCode:AS_AUDIO_BUFFER_TOO_SMALL]; |
1936 | + } |
1937 | + |
1938 | + bufSpaceRemaining = packetBufferSize - bytesFilled; |
1939 | + } |
1940 | + |
1941 | + // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. |
1942 | + if (bufSpaceRemaining < packetSize) |
1943 | + { |
1944 | + [self enqueueBuffer]; |
1945 | + } |
1946 | + |
1947 | + @synchronized(self) |
1948 | + { |
1949 | + // If the audio was terminated while waiting for a buffer, then |
1950 | + // exit. |
1951 | + if ([self isFinishing]) |
1952 | + { |
1953 | + return; |
1954 | + } |
1955 | + |
1956 | + // |
1957 | + // If there was some kind of issue with enqueueBuffer and we didn't |
1958 | + // make space for the new audio data then back out |
1959 | + // |
1960 | + if (bytesFilled + packetSize > packetBufferSize) |
1961 | + { |
1962 | + return; |
1963 | + } |
1964 | + |
1965 | + // copy data to the audio queue buffer |
1966 | + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
1967 | + memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)inInputData + packetOffset, packetSize); |
1968 | + |
1969 | + // fill out packet description |
1970 | + packetDescs[packetsFilled] = inPacketDescriptions[i]; |
1971 | + packetDescs[packetsFilled].mStartOffset = bytesFilled; |
1972 | + // keep track of bytes filled and packets filled |
1973 | + bytesFilled += packetSize; |
1974 | + packetsFilled += 1; |
1975 | + } |
1976 | + |
1977 | + // if that was the last free packet description, then enqueue the buffer. |
1978 | + size_t packetsDescsRemaining = kAQMaxPacketDescs - packetsFilled; |
1979 | + if (packetsDescsRemaining == 0) { |
1980 | + [self enqueueBuffer]; |
1981 | + } |
1982 | + } |
1983 | + } |
1984 | + else |
1985 | + { |
1986 | + size_t offset = 0; |
1987 | + while (inNumberBytes) |
1988 | + { |
1989 | + // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. |
1990 | + size_t bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; |
1991 | + if (bufSpaceRemaining < inNumberBytes) |
1992 | + { |
1993 | + [self enqueueBuffer]; |
1994 | + } |
1995 | + |
1996 | + @synchronized(self) |
1997 | + { |
1998 | + // If the audio was terminated while waiting for a buffer, then |
1999 | + // exit. |
2000 | + if ([self isFinishing]) |
2001 | + { |
2002 | + return; |
2003 | + } |
2004 | + |
2005 | + bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; |
2006 | + size_t copySize; |
2007 | + if (bufSpaceRemaining < inNumberBytes) |
2008 | + { |
2009 | + copySize = bufSpaceRemaining; |
2010 | + } |
2011 | + else |
2012 | + { |
2013 | + copySize = inNumberBytes; |
2014 | + } |
2015 | + |
2016 | + // |
2017 | + // If there was some kind of issue with enqueueBuffer and we didn't |
2018 | + // make space for the new audio data then back out |
2019 | + // |
2020 | + if (bytesFilled > packetBufferSize) |
2021 | + { |
2022 | + return; |
2023 | + } |
2024 | + |
2025 | + // copy data to the audio queue buffer |
2026 | + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
2027 | + memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)(inInputData + offset), copySize); |
2028 | + |
2029 | + |
2030 | + // keep track of bytes filled and packets filled |
2031 | + bytesFilled += copySize; |
2032 | + packetsFilled = 0; |
2033 | + inNumberBytes -= copySize; |
2034 | + offset += copySize; |
2035 | + } |
2036 | + } |
2037 | + } |
2038 | +} |
2039 | + |
2040 | +// |
2041 | +// handleBufferCompleteForQueue:buffer: |
2042 | +// |
2043 | +// Handles the buffer completetion notification from the audio queue |
2044 | +// |
2045 | +// Parameters: |
2046 | +// inAQ - the queue |
2047 | +// inBuffer - the buffer |
2048 | +// |
2049 | +- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ |
2050 | + buffer:(AudioQueueBufferRef)inBuffer |
2051 | +{ |
2052 | + unsigned int bufIndex = -1; |
2053 | + for (unsigned int i = 0; i < kNumAQBufs; ++i) |
2054 | + { |
2055 | + if (inBuffer == audioQueueBuffer[i]) |
2056 | + { |
2057 | + bufIndex = i; |
2058 | + break; |
2059 | + } |
2060 | + } |
2061 | + |
2062 | + if (bufIndex == -1) |
2063 | + { |
2064 | + [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_MISMATCH]; |
2065 | + pthread_mutex_lock(&queueBuffersMutex); |
2066 | + pthread_cond_signal(&queueBufferReadyCondition); |
2067 | + pthread_mutex_unlock(&queueBuffersMutex); |
2068 | + return; |
2069 | + } |
2070 | + |
2071 | + // signal waiting thread that the buffer is free. |
2072 | + pthread_mutex_lock(&queueBuffersMutex); |
2073 | + inuse[bufIndex] = false; |
2074 | + buffersUsed--; |
2075 | + |
2076 | +// |
2077 | +// Enable this logging to measure how many buffers are queued at any time. |
2078 | +// |
2079 | +#if LOG_QUEUED_BUFFERS |
2080 | + NSLog(@"Queued buffers: %ld", buffersUsed); |
2081 | +#endif |
2082 | + |
2083 | + pthread_cond_signal(&queueBufferReadyCondition); |
2084 | + pthread_mutex_unlock(&queueBuffersMutex); |
2085 | +} |
2086 | + |
2087 | +// |
2088 | +// handlePropertyChangeForQueue:propertyID: |
2089 | +// |
2090 | +// Implementation for ASAudioQueueIsRunningCallback |
2091 | +// |
2092 | +// Parameters: |
2093 | +// inAQ - the audio queue |
2094 | +// inID - the property ID |
2095 | +// |
2096 | +- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ |
2097 | + propertyID:(AudioQueuePropertyID)inID |
2098 | +{ |
2099 | + NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; |
2100 | + |
2101 | + @synchronized(self) |
2102 | + { |
2103 | + if (inID == kAudioQueueProperty_IsRunning) |
2104 | + { |
2105 | + if (state == AS_STOPPING) |
2106 | + { |
2107 | + self.state = AS_STOPPED; |
2108 | + } |
2109 | + else if (state == AS_WAITING_FOR_QUEUE_TO_START) |
2110 | + { |
2111 | + // |
2112 | + // Note about this bug avoidance quirk: |
2113 | + // |
2114 | + // On cleanup of the AudioQueue thread, on rare occasions, there would |
2115 | + // be a crash in CFSetContainsValue as a CFRunLoopObserver was getting |
2116 | + // removed from the CFRunLoop. |
2117 | + // |
2118 | + // After lots of testing, it appeared that the audio thread was |
2119 | + // attempting to remove CFRunLoop observers from the CFRunLoop after the |
2120 | + // thread had already deallocated the run loop. |
2121 | + // |
2122 | + // By creating an NSRunLoop for the AudioQueue thread, it changes the |
2123 | + // thread destruction order and seems to avoid this crash bug -- or |
2124 | + // at least I haven't had it since (nasty hard to reproduce error!) |
2125 | + // |
2126 | + [NSRunLoop currentRunLoop]; |
2127 | + |
2128 | + self.state = AS_PLAYING; |
2129 | + } |
2130 | + else |
2131 | + { |
2132 | + NSLog(@"AudioQueue changed state in unexpected way."); |
2133 | + } |
2134 | + } |
2135 | + } |
2136 | + |
2137 | + [pool release]; |
2138 | +} |
2139 | + |
2140 | +#if TARGET_OS_IPHONE |
2141 | +// |
2142 | +// handleInterruptionChangeForQueue:propertyID: |
2143 | +// |
2144 | +// Implementation for ASAudioQueueInterruptionListener |
2145 | +// |
2146 | +// Parameters: |
2147 | +// inAQ - the audio queue |
2148 | +// inID - the property ID |
2149 | +// |
2150 | +- (void)handleInterruptionChangeToState:(AudioQueuePropertyID)inInterruptionState |
2151 | +{ |
2152 | + if (inInterruptionState == kAudioSessionBeginInterruption) |
2153 | + { |
2154 | + if ([self isPlaying]) { |
2155 | + [self pause]; |
2156 | + |
2157 | + pausedByInterruption = YES; |
2158 | + } |
2159 | + } |
2160 | + else if (inInterruptionState == kAudioSessionEndInterruption) |
2161 | + { |
2162 | + AudioSessionSetActive( true ); |
2163 | + |
2164 | + if ([self isPaused] && pausedByInterruption) { |
2165 | + [self pause]; // this is actually resume |
2166 | + |
2167 | + pausedByInterruption = NO; // this is redundant |
2168 | + } |
2169 | + } |
2170 | +} |
2171 | +#endif |
2172 | + |
2173 | +@end |
2174 | + |
2175 | + |
2176 | |
2177 | === modified file 'U1Music.xcodeproj/project.pbxproj' |
2178 | --- U1Music.xcodeproj/project.pbxproj 2013-02-11 04:17:22 +0000 |
2179 | +++ U1Music.xcodeproj/project.pbxproj 2013-02-11 04:17:22 +0000 |
2180 | @@ -11,6 +11,7 @@ |
2181 | 1DF5F4E00D08C38300B7A737 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1DF5F4DF0D08C38300B7A737 /* UIKit.framework */; }; |
2182 | 2892E4100DC94CBA00A64D0F /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2892E40F0DC94CBA00A64D0F /* CoreGraphics.framework */; }; |
2183 | 5207A4C216C89B140006A4E6 /* libFlurry.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5207A4C116C89B140006A4E6 /* libFlurry.a */; }; |
2184 | + 5207A4CE16C8A6620006A4E6 /* AudioStreamer.m in Sources */ = {isa = PBXBuildFile; fileRef = 5207A4CD16C8A6620006A4E6 /* AudioStreamer.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; }; |
2185 | 520BBF2416B51F2A00307F32 /* UODownloader.m in Sources */ = {isa = PBXBuildFile; fileRef = 520BBF2316B51F2A00307F32 /* UODownloader.m */; }; |
2186 | 52169C9815D95DD100ED366D /* cancel-grey.png in Resources */ = {isa = PBXBuildFile; fileRef = 52169C9415D95DD100ED366D /* cancel-grey.png */; }; |
2187 | 52169C9915D95DD100ED366D /* cancel-grey@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 52169C9515D95DD100ED366D /* cancel-grey@2x.png */; }; |
2188 | @@ -169,7 +170,6 @@ |
2189 | 93F334691247FB02006C6707 /* MainWindow.xib in Resources */ = {isa = PBXBuildFile; fileRef = 93F334651247FB02006C6707 /* MainWindow.xib */; }; |
2190 | 93F3346A1247FB02006C6707 /* SearchableTableViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 93F334661247FB02006C6707 /* SearchableTableViewController.xib */; }; |
2191 | 93F334721247FB78006C6707 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 93F334701247FB78006C6707 /* main.m */; }; |
2192 | - 93F334751247FB9F006C6707 /* AudioStreamer.m in Sources */ = {isa = PBXBuildFile; fileRef = 93F334741247FB9F006C6707 /* AudioStreamer.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; }; |
2193 | 93FA42AE124DC1350080DF62 /* 03-loopback.png in Resources */ = {isa = PBXBuildFile; fileRef = 93FA42A7124DC1350080DF62 /* 03-loopback.png */; }; |
2194 | 93FA42AF124DC1350080DF62 /* 03-loopback@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 93FA42A8124DC1350080DF62 /* 03-loopback@2x.png */; }; |
2195 | 93FA42B0124DC1350080DF62 /* 05-shuffle.png in Resources */ = {isa = PBXBuildFile; fileRef = 93FA42A9124DC1350080DF62 /* 05-shuffle.png */; }; |
2196 | @@ -259,6 +259,8 @@ |
2197 | 2892E40F0DC94CBA00A64D0F /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; }; |
2198 | 5207A4C016C89B140006A4E6 /* Flurry.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Flurry.h; sourceTree = "<group>"; }; |
2199 | 5207A4C116C89B140006A4E6 /* libFlurry.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libFlurry.a; sourceTree = "<group>"; }; |
2200 | + 5207A4CC16C8A6620006A4E6 /* AudioStreamer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioStreamer.h; sourceTree = "<group>"; }; |
2201 | + 5207A4CD16C8A6620006A4E6 /* AudioStreamer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioStreamer.m; sourceTree = "<group>"; }; |
2202 | 520BBF2216B51F2A00307F32 /* UODownloader.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = UODownloader.h; sourceTree = "<group>"; }; |
2203 | 520BBF2316B51F2A00307F32 /* UODownloader.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = UODownloader.m; sourceTree = "<group>"; }; |
2204 | 52169C9415D95DD100ED366D /* cancel-grey.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "cancel-grey.png"; sourceTree = "<group>"; }; |
2205 | @@ -558,8 +560,6 @@ |
2206 | 93F3346E1247FB78006C6707 /* U1MusicAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = U1MusicAppDelegate.h; path = "Other Sources/U1MusicAppDelegate.h"; sourceTree = "<group>"; }; |
2207 | 93F3346F1247FB78006C6707 /* U1MusicAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = U1MusicAppDelegate.m; path = "Other Sources/U1MusicAppDelegate.m"; sourceTree = "<group>"; }; |
2208 | 93F334701247FB78006C6707 /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; }; |
2209 | - 93F334731247FB9F006C6707 /* AudioStreamer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioStreamer.h; sourceTree = "<group>"; }; |
2210 | - 93F334741247FB9F006C6707 /* AudioStreamer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioStreamer.m; sourceTree = "<group>"; }; |
2211 | 93F334841247FC15006C6707 /* U1Music_Prefix.pch */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = U1Music_Prefix.pch; path = ../U1Music_Prefix.pch; sourceTree = "<group>"; }; |
2212 | 93F3348D1247FCDC006C6707 /* ArtistViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ArtistViewController.h; path = view_controllers/ArtistViewController.h; sourceTree = "<group>"; }; |
2213 | 93F3348E1247FCDC006C6707 /* ArtistViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; lineEnding = 0; name = ArtistViewController.m; path = view_controllers/ArtistViewController.m; sourceTree = "<group>"; xcLanguageSpecificationIdentifier = xcode.lang.objc; }; |
2214 | @@ -721,6 +721,16 @@ |
2215 | path = Dependencies/Flurry; |
2216 | sourceTree = "<group>"; |
2217 | }; |
2218 | + 5207A4CB16C8A6620006A4E6 /* AudioStreamer */ = { |
2219 | + isa = PBXGroup; |
2220 | + children = ( |
2221 | + 5207A4CC16C8A6620006A4E6 /* AudioStreamer.h */, |
2222 | + 5207A4CD16C8A6620006A4E6 /* AudioStreamer.m */, |
2223 | + ); |
2224 | + name = AudioStreamer; |
2225 | + path = Dependencies/AudioStreamer; |
2226 | + sourceTree = "<group>"; |
2227 | + }; |
2228 | 522B24E716B4BBC30084B023 /* Controls */ = { |
2229 | isa = PBXGroup; |
2230 | children = ( |
2231 | @@ -1115,8 +1125,6 @@ |
2232 | 91C493CF14E4593C00BC1815 /* U1ByteSizeValueTransformer.m */, |
2233 | 93BC5207124C181600B7587C /* Subsonic.h */, |
2234 | 93BC5208124C181600B7587C /* Subsonic.m */, |
2235 | - 93F334731247FB9F006C6707 /* AudioStreamer.h */, |
2236 | - 93F334741247FB9F006C6707 /* AudioStreamer.m */, |
2237 | 93F3344B1247FA0B006C6707 /* Reachability.h */, |
2238 | 93F3344C1247FA0B006C6707 /* Reachability.m */, |
2239 | 93BC52A5124C1E6900B7587C /* StreamingPlayer.h */, |
2240 | @@ -1313,6 +1321,7 @@ |
2241 | 964FA39013CA5BE60018A65B /* Dependencies */ = { |
2242 | isa = PBXGroup; |
2243 | children = ( |
2244 | + 5207A4CB16C8A6620006A4E6 /* AudioStreamer */, |
2245 | 5207A4BF16C89B140006A4E6 /* Flurry */, |
2246 | 5257414C16C37E1A00530CCC /* SSPullToRefresh */, |
2247 | 93BC520A124C187700B7587C /* SynthesizeSingleton.h */, |
2248 | @@ -1598,7 +1607,6 @@ |
2249 | 93F3345C1247FA85006C6707 /* AlbumUITableViewCell.m in Sources */, |
2250 | 93F3345F1247FA97006C6707 /* SongUITableViewCell.m in Sources */, |
2251 | 93F334721247FB78006C6707 /* main.m in Sources */, |
2252 | - 93F334751247FB9F006C6707 /* AudioStreamer.m in Sources */, |
2253 | 9354D0AB1248267B00733067 /* NSDictionary+Extras.m in Sources */, |
2254 | 93BC52A7124C1E6900B7587C /* StreamingPlayer.m in Sources */, |
2255 | 93D6B0FF124ED061007880B0 /* SubsonicViewController.m in Sources */, |
2256 | @@ -1683,6 +1691,7 @@ |
2257 | 5257415616C37E1A00530CCC /* SSPullToRefreshView.m in Sources */, |
2258 | 5257417716C5CC5D00530CCC /* NSString+UbuntuOne.m in Sources */, |
2259 | 5257417A16C5CDA900530CCC /* UIImageView+UbuntuOne.m in Sources */, |
2260 | + 5207A4CE16C8A6620006A4E6 /* AudioStreamer.m in Sources */, |
2261 | ); |
2262 | runOnlyForDeploymentPostprocessing = 0; |
2263 | }; |
2264 | |
2265 | === removed file 'utilities/AudioStreamer.h' |
2266 | --- utilities/AudioStreamer.h 2012-08-16 17:14:27 +0000 |
2267 | +++ utilities/AudioStreamer.h 1970-01-01 00:00:00 +0000 |
2268 | @@ -1,197 +0,0 @@ |
2269 | -// |
2270 | -// AudioStreamer.h |
2271 | -// StreamingAudioPlayer |
2272 | -// |
2273 | -// Created by Matt Gallagher on 27/09/08. |
2274 | -// Copyright 2008 Matt Gallagher. All rights reserved. |
2275 | -// |
2276 | -// This software is provided 'as-is', without any express or implied |
2277 | -// warranty. In no event will the authors be held liable for any damages |
2278 | -// arising from the use of this software. Permission is granted to anyone to |
2279 | -// use this software for any purpose, including commercial applications, and to |
2280 | -// alter it and redistribute it freely, subject to the following restrictions: |
2281 | -// |
2282 | -// 1. The origin of this software must not be misrepresented; you must not |
2283 | -// claim that you wrote the original software. If you use this software |
2284 | -// in a product, an acknowledgment in the product documentation would be |
2285 | -// appreciated but is not required. |
2286 | -// 2. Altered source versions must be plainly marked as such, and must not be |
2287 | -// misrepresented as being the original software. |
2288 | -// 3. This notice may not be removed or altered from any source |
2289 | -// distribution. |
2290 | -// |
2291 | - |
2292 | -#if TARGET_OS_IPHONE |
2293 | -#import <UIKit/UIKit.h> |
2294 | -#else |
2295 | -#import <Cocoa/Cocoa.h> |
2296 | -#endif // TARGET_OS_IPHONE |
2297 | - |
2298 | -#include <pthread.h> |
2299 | -#include <AudioToolbox/AudioToolbox.h> |
2300 | - |
2301 | -#define LOG_QUEUED_BUFFERS 0 |
2302 | - |
2303 | -#define kNumAQBufs 16 // Number of audio queue buffers we allocate. |
2304 | - // Needs to be big enough to keep audio pipeline |
2305 | - // busy (non-zero number of queued buffers) but |
2306 | - // not so big that audio takes too long to begin |
2307 | - // (kNumAQBufs * kAQBufSize of data must be |
2308 | - // loaded before playback will start). |
2309 | - // |
2310 | - // Set LOG_QUEUED_BUFFERS to 1 to log how many |
2311 | - // buffers are queued at any time -- if it drops |
2312 | - // to zero too often, this value may need to |
2313 | - // increase. Min 3, typical 8-24. |
2314 | - |
2315 | -#define kAQDefaultBufSize 2048 // Number of bytes in each audio queue buffer |
2316 | - // Needs to be big enough to hold a packet of |
2317 | - // audio from the audio file. If number is too |
2318 | - // large, queuing of audio before playback starts |
2319 | - // will take too long. |
2320 | - // Highly compressed files can use smaller |
2321 | - // numbers (512 or less). 2048 should hold all |
2322 | - // but the largest packets. A buffer size error |
2323 | - // will occur if this number is too small. |
2324 | - |
2325 | -#define kAQMaxPacketDescs 512 // Number of packet descriptions in our array |
2326 | - |
2327 | -typedef enum |
2328 | -{ |
2329 | - AS_INITIALIZED = 0, |
2330 | - AS_STARTING_FILE_THREAD, |
2331 | - AS_WAITING_FOR_DATA, |
2332 | - AS_FLUSHING_EOF, |
2333 | - AS_WAITING_FOR_QUEUE_TO_START, |
2334 | - AS_PLAYING, |
2335 | - AS_BUFFERING, |
2336 | - AS_STOPPING, |
2337 | - AS_STOPPED, |
2338 | - AS_PAUSED |
2339 | -} AudioStreamerState; |
2340 | - |
2341 | -typedef enum |
2342 | -{ |
2343 | - AS_NO_STOP = 0, |
2344 | - AS_STOPPING_EOF, |
2345 | - AS_STOPPING_USER_ACTION, |
2346 | - AS_STOPPING_ERROR, |
2347 | - AS_STOPPING_TEMPORARILY |
2348 | -} AudioStreamerStopReason; |
2349 | - |
2350 | -typedef enum |
2351 | -{ |
2352 | - AS_NO_ERROR = 0, |
2353 | - AS_NETWORK_CONNECTION_FAILED, |
2354 | - AS_FILE_STREAM_GET_PROPERTY_FAILED, |
2355 | - AS_FILE_STREAM_SEEK_FAILED, |
2356 | - AS_FILE_STREAM_PARSE_BYTES_FAILED, |
2357 | - AS_FILE_STREAM_OPEN_FAILED, |
2358 | - AS_FILE_STREAM_CLOSE_FAILED, |
2359 | - AS_AUDIO_DATA_NOT_FOUND, |
2360 | - AS_AUDIO_QUEUE_CREATION_FAILED, |
2361 | - AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED, |
2362 | - AS_AUDIO_QUEUE_ENQUEUE_FAILED, |
2363 | - AS_AUDIO_QUEUE_ADD_LISTENER_FAILED, |
2364 | - AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED, |
2365 | - AS_AUDIO_QUEUE_START_FAILED, |
2366 | - AS_AUDIO_QUEUE_PAUSE_FAILED, |
2367 | - AS_AUDIO_QUEUE_BUFFER_MISMATCH, |
2368 | - AS_AUDIO_QUEUE_DISPOSE_FAILED, |
2369 | - AS_AUDIO_QUEUE_STOP_FAILED, |
2370 | - AS_AUDIO_QUEUE_FLUSH_FAILED, |
2371 | - AS_AUDIO_STREAMER_FAILED, |
2372 | - AS_GET_AUDIO_TIME_FAILED, |
2373 | - AS_AUDIO_BUFFER_TOO_SMALL |
2374 | -} AudioStreamerErrorCode; |
2375 | - |
2376 | -extern NSString * const ASStatusChangedNotification; |
2377 | - |
2378 | -@interface AudioStreamer : NSObject |
2379 | -{ |
2380 | - NSURL *url; |
2381 | - |
2382 | - // |
2383 | - // Special threading consideration: |
2384 | - // The audioQueue property should only ever be accessed inside a |
2385 | - // synchronized(self) block and only *after* checking that ![self isFinishing] |
2386 | - // |
2387 | - AudioQueueRef audioQueue; |
2388 | - AudioFileStreamID audioFileStream; // the audio file stream parser |
2389 | - AudioStreamBasicDescription asbd; // description of the audio |
2390 | - NSThread *internalThread; // the thread where the download and |
2391 | - // audio file stream parsing occurs |
2392 | - |
2393 | - AudioQueueBufferRef audioQueueBuffer[kNumAQBufs]; // audio queue buffers |
2394 | - AudioStreamPacketDescription packetDescs[kAQMaxPacketDescs]; // packet descriptions for enqueuing audio |
2395 | - unsigned int fillBufferIndex; // the index of the audioQueueBuffer that is being filled |
2396 | - UInt32 packetBufferSize; |
2397 | - size_t bytesFilled; // how many bytes have been filled |
2398 | - size_t packetsFilled; // how many packets have been filled |
2399 | - bool inuse[kNumAQBufs]; // flags to indicate that a buffer is still in use |
2400 | - NSInteger buffersUsed; |
2401 | - NSDictionary *httpHeaders; |
2402 | - |
2403 | - AudioStreamerState state; |
2404 | - AudioStreamerStopReason stopReason; |
2405 | - AudioStreamerErrorCode errorCode; |
2406 | - OSStatus err; |
2407 | - |
2408 | - bool discontinuous; // flag to indicate middle of the stream |
2409 | - |
2410 | - pthread_mutex_t queueBuffersMutex; // a mutex to protect the inuse flags |
2411 | - pthread_cond_t queueBufferReadyCondition; // a condition varable for handling the inuse flags |
2412 | - |
2413 | - CFReadStreamRef stream; |
2414 | - NSNotificationCenter *notificationCenter; |
2415 | - |
2416 | - UInt32 bitRate; // Bits per second in the file |
2417 | - NSInteger dataOffset; // Offset of the first audio packet in the stream |
2418 | - NSInteger fileLength; // Length of the file in bytes |
2419 | - NSInteger seekByteOffset; // Seek offset within the file in bytes |
2420 | - UInt64 audioDataByteCount; // Used when the actual number of audio bytes in |
2421 | - // the file is known (more accurate than assuming |
2422 | - // the whole file is audio) |
2423 | - |
2424 | - UInt64 processedPacketsCount; // number of packets accumulated for bitrate estimation |
2425 | - UInt64 processedPacketsSizeTotal; // byte size of accumulated estimation packets |
2426 | - |
2427 | - double seekTime; |
2428 | - BOOL seekWasRequested; |
2429 | - double requestedSeekTime; |
2430 | - double sampleRate; // Sample rate of the file (used to compare with |
2431 | - // samples played by the queue for current playback |
2432 | - // time) |
2433 | - double packetDuration; // sample rate times frames per packet |
2434 | - double lastProgress; // last calculated progress point |
2435 | -#if TARGET_OS_IPHONE |
2436 | - BOOL pausedByInterruption; |
2437 | -#endif |
2438 | -} |
2439 | - |
2440 | -@property AudioStreamerErrorCode errorCode; |
2441 | -@property (nonatomic, readonly) AudioStreamerState state; |
2442 | -@property (readonly) AudioStreamerStopReason stopReason; |
2443 | -@property (readonly) double progress; |
2444 | -@property (readonly) double duration; |
2445 | -@property (readwrite) UInt32 bitRate; |
2446 | -@property (readonly) NSDictionary *httpHeaders; |
2447 | - |
2448 | -- (id)initWithURL:(NSURL *)aURL; |
2449 | -- (void)start; |
2450 | -- (void)stop; |
2451 | -- (void)pause; |
2452 | -- (BOOL)isPlaying; |
2453 | -- (BOOL)isPaused; |
2454 | -- (BOOL)isWaiting; |
2455 | -- (BOOL)isIdle; |
2456 | -- (void)seekToTime:(double)newSeekTime; |
2457 | -- (double)calculatedBitRate; |
2458 | - |
2459 | -@end |
2460 | - |
2461 | - |
2462 | - |
2463 | - |
2464 | - |
2465 | - |
2466 | |
2467 | === removed file 'utilities/AudioStreamer.m' |
2468 | --- utilities/AudioStreamer.m 2012-09-15 16:33:02 +0000 |
2469 | +++ utilities/AudioStreamer.m 1970-01-01 00:00:00 +0000 |
2470 | @@ -1,1996 +0,0 @@ |
2471 | -// |
2472 | -// AudioStreamer.m |
2473 | -// StreamingAudioPlayer |
2474 | -// |
2475 | -// Created by Matt Gallagher on 27/09/08. |
2476 | -// Copyright 2008 Matt Gallagher. All rights reserved. |
2477 | -// |
2478 | -// This software is provided 'as-is', without any express or implied |
2479 | -// warranty. In no event will the authors be held liable for any damages |
2480 | -// arising from the use of this software. Permission is granted to anyone to |
2481 | -// use this software for any purpose, including commercial applications, and to |
2482 | -// alter it and redistribute it freely, subject to the following restrictions: |
2483 | -// |
2484 | -// 1. The origin of this software must not be misrepresented; you must not |
2485 | -// claim that you wrote the original software. If you use this software |
2486 | -// in a product, an acknowledgment in the product documentation would be |
2487 | -// appreciated but is not required. |
2488 | -// 2. Altered source versions must be plainly marked as such, and must not be |
2489 | -// misrepresented as being the original software. |
2490 | -// 3. This notice may not be removed or altered from any source |
2491 | -// distribution. |
2492 | -// |
2493 | - |
2494 | -#import "AudioStreamer.h" |
2495 | -#if TARGET_OS_IPHONE |
2496 | -#import <CFNetwork/CFNetwork.h> |
2497 | -#import <UIKit/UIKit.h> |
2498 | -#endif |
2499 | - |
2500 | -#define BitRateEstimationMaxPackets 5000 |
2501 | -#define BitRateEstimationMinPackets 50 |
2502 | - |
2503 | -NSString * const ASStatusChangedNotification = @"ASStatusChangedNotification"; |
2504 | - |
2505 | -NSString * const AS_NO_ERROR_STRING = @"No error."; |
2506 | -NSString * const AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING = @"File stream get property failed."; |
2507 | -NSString * const AS_FILE_STREAM_SEEK_FAILED_STRING = @"File stream seek failed."; |
2508 | -NSString * const AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING = @"Parse bytes failed."; |
2509 | -NSString * const AS_FILE_STREAM_OPEN_FAILED_STRING = @"Open audio file stream failed."; |
2510 | -NSString * const AS_FILE_STREAM_CLOSE_FAILED_STRING = @"Close audio file stream failed."; |
2511 | -NSString * const AS_AUDIO_QUEUE_CREATION_FAILED_STRING = @"Audio queue creation failed."; |
2512 | -NSString * const AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING = @"Audio buffer allocation failed."; |
2513 | -NSString * const AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING = @"Queueing of audio buffer failed."; |
2514 | -NSString * const AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING = @"Audio queue add listener failed."; |
2515 | -NSString * const AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING = @"Audio queue remove listener failed."; |
2516 | -NSString * const AS_AUDIO_QUEUE_START_FAILED_STRING = @"Audio queue start failed."; |
2517 | -NSString * const AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING = @"Audio queue buffers don't match."; |
2518 | -NSString * const AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING = @"Audio queue dispose failed."; |
2519 | -NSString * const AS_AUDIO_QUEUE_PAUSE_FAILED_STRING = @"Audio queue pause failed."; |
2520 | -NSString * const AS_AUDIO_QUEUE_STOP_FAILED_STRING = @"Audio queue stop failed."; |
2521 | -NSString * const AS_AUDIO_DATA_NOT_FOUND_STRING = @"No audio data found."; |
2522 | -NSString * const AS_AUDIO_QUEUE_FLUSH_FAILED_STRING = @"Audio queue flush failed."; |
2523 | -NSString * const AS_GET_AUDIO_TIME_FAILED_STRING = @"Audio queue get current time failed."; |
2524 | -NSString * const AS_AUDIO_STREAMER_FAILED_STRING = @"Audio playback failed"; |
2525 | -NSString * const AS_NETWORK_CONNECTION_FAILED_STRING = @"Network connection failed"; |
2526 | -NSString * const AS_AUDIO_BUFFER_TOO_SMALL_STRING = @"Audio packets are larger than kAQDefaultBufSize."; |
2527 | - |
2528 | -@interface AudioStreamer () <UIAlertViewDelegate> |
2529 | -@property (nonatomic, readwrite) AudioStreamerState state; |
2530 | -@property UIBackgroundTaskIdentifier bgTaskId; |
2531 | - |
2532 | -- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream |
2533 | - fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID |
2534 | - ioFlags:(UInt32 *)ioFlags; |
2535 | -- (void)handleAudioPackets:(const void *)inInputData |
2536 | - numberBytes:(UInt32)inNumberBytes |
2537 | - numberPackets:(UInt32)inNumberPackets |
2538 | - packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; |
2539 | -- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ |
2540 | - buffer:(AudioQueueBufferRef)inBuffer; |
2541 | -- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ |
2542 | - propertyID:(AudioQueuePropertyID)inID; |
2543 | - |
2544 | -#if TARGET_OS_IPHONE |
2545 | -- (void)handleInterruptionChangeToState:(AudioQueuePropertyID)inInterruptionState; |
2546 | -#endif |
2547 | - |
2548 | -- (void)internalSeekToTime:(double)newSeekTime; |
2549 | -- (void)enqueueBuffer; |
2550 | -- (void)handleReadFromStream:(CFReadStreamRef)aStream |
2551 | - eventType:(CFStreamEventType)eventType; |
2552 | - |
2553 | -@end |
2554 | - |
2555 | -#pragma mark Audio Callback Function Prototypes |
2556 | - |
2557 | -void MyAudioQueueOutputCallback(void* inClientData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer); |
2558 | -void MyAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID); |
2559 | -void MyPropertyListenerProc( void * inClientData, |
2560 | - AudioFileStreamID inAudioFileStream, |
2561 | - AudioFileStreamPropertyID inPropertyID, |
2562 | - UInt32 * ioFlags); |
2563 | -void MyPacketsProc( void * inClientData, |
2564 | - UInt32 inNumberBytes, |
2565 | - UInt32 inNumberPackets, |
2566 | - const void * inInputData, |
2567 | - AudioStreamPacketDescription *inPacketDescriptions); |
2568 | -OSStatus MyEnqueueBuffer(AudioStreamer* myData); |
2569 | - |
2570 | -#if TARGET_OS_IPHONE |
2571 | -void MyAudioSessionInterruptionListener(void *inClientData, UInt32 inInterruptionState); |
2572 | -#endif |
2573 | - |
2574 | -#pragma mark Audio Callback Function Implementations |
2575 | - |
2576 | -// |
2577 | -// MyPropertyListenerProc |
2578 | -// |
2579 | -// Receives notification when the AudioFileStream has audio packets to be |
2580 | -// played. In response, this function creates the AudioQueue, getting it |
2581 | -// ready to begin playback (playback won't begin until audio packets are |
2582 | -// sent to the queue in MyEnqueueBuffer). |
2583 | -// |
2584 | -// This function is adapted from Apple's example in AudioFileStreamExample with |
2585 | -// kAudioQueueProperty_IsRunning listening added. |
2586 | -// |
2587 | -void MyPropertyListenerProc( void * inClientData, |
2588 | - AudioFileStreamID inAudioFileStream, |
2589 | - AudioFileStreamPropertyID inPropertyID, |
2590 | - UInt32 * ioFlags) |
2591 | -{ |
2592 | - // this is called by audio file stream when it finds property values |
2593 | - AudioStreamer* streamer = (AudioStreamer *)inClientData; |
2594 | - [streamer |
2595 | - handlePropertyChangeForFileStream:inAudioFileStream |
2596 | - fileStreamPropertyID:inPropertyID |
2597 | - ioFlags:ioFlags]; |
2598 | -} |
2599 | - |
2600 | -// |
2601 | -// MyPacketsProc |
2602 | -// |
2603 | -// When the AudioStream has packets to be played, this function gets an |
2604 | -// idle audio buffer and copies the audio packets into it. The calls to |
2605 | -// MyEnqueueBuffer won't return until there are buffers available (or the |
2606 | -// playback has been stopped). |
2607 | -// |
2608 | -// This function is adapted from Apple's example in AudioFileStreamExample with |
2609 | -// CBR functionality added. |
2610 | -// |
2611 | -void MyPacketsProc( void * inClientData, |
2612 | - UInt32 inNumberBytes, |
2613 | - UInt32 inNumberPackets, |
2614 | - const void * inInputData, |
2615 | - AudioStreamPacketDescription *inPacketDescriptions) |
2616 | -{ |
2617 | - // this is called by audio file stream when it finds packets of audio |
2618 | - AudioStreamer* streamer = (AudioStreamer *)inClientData; |
2619 | - [streamer |
2620 | - handleAudioPackets:inInputData |
2621 | - numberBytes:inNumberBytes |
2622 | - numberPackets:inNumberPackets |
2623 | - packetDescriptions:inPacketDescriptions]; |
2624 | -} |
2625 | - |
2626 | -// |
2627 | -// MyAudioQueueOutputCallback |
2628 | -// |
2629 | -// Called from the AudioQueue when playback of specific buffers completes. This |
2630 | -// function signals from the AudioQueue thread to the AudioStream thread that |
2631 | -// the buffer is idle and available for copying data. |
2632 | -// |
2633 | -// This function is unchanged from Apple's example in AudioFileStreamExample. |
2634 | -// |
2635 | -void MyAudioQueueOutputCallback( void* inClientData, |
2636 | - AudioQueueRef inAQ, |
2637 | - AudioQueueBufferRef inBuffer) |
2638 | -{ |
2639 | - // this is called by the audio queue when it has finished decoding our data. |
2640 | - // The buffer is now free to be reused. |
2641 | - AudioStreamer* streamer = (AudioStreamer*)inClientData; |
2642 | - [streamer handleBufferCompleteForQueue:inAQ buffer:inBuffer]; |
2643 | -} |
2644 | - |
2645 | -// |
2646 | -// MyAudioQueueIsRunningCallback |
2647 | -// |
2648 | -// Called from the AudioQueue when playback is started or stopped. This |
2649 | -// information is used to toggle the observable "isPlaying" property and |
2650 | -// set the "finished" flag. |
2651 | -// |
2652 | -void MyAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) |
2653 | -{ |
2654 | - AudioStreamer* streamer = (AudioStreamer *)inUserData; |
2655 | - [streamer handlePropertyChangeForQueue:inAQ propertyID:inID]; |
2656 | -} |
2657 | - |
2658 | -#if TARGET_OS_IPHONE |
2659 | -// |
2660 | -// MyAudioSessionInterruptionListener |
2661 | -// |
2662 | -// Invoked if the audio session is interrupted (like when the phone rings) |
2663 | -// |
2664 | -void MyAudioSessionInterruptionListener(void *inClientData, UInt32 inInterruptionState) |
2665 | -{ |
2666 | - AudioStreamer* streamer = (AudioStreamer *)inClientData; |
2667 | - [streamer handleInterruptionChangeToState:inInterruptionState]; |
2668 | -} |
2669 | -#endif |
2670 | - |
2671 | -#pragma mark CFReadStream Callback Function Implementations |
2672 | - |
2673 | -// |
2674 | -// ReadStreamCallBack |
2675 | -// |
2676 | -// This is the callback for the CFReadStream from the network connection. This |
2677 | -// is where all network data is passed to the AudioFileStream. |
2678 | -// |
2679 | -// Invoked when an error occurs, the stream ends or we have data to read. |
2680 | -// |
2681 | -void ASReadStreamCallBack |
2682 | -( |
2683 | - CFReadStreamRef aStream, |
2684 | - CFStreamEventType eventType, |
2685 | - void* inClientInfo |
2686 | -) |
2687 | -{ |
2688 | - AudioStreamer* streamer = (AudioStreamer *)inClientInfo; |
2689 | - [streamer handleReadFromStream:aStream eventType:eventType]; |
2690 | -} |
2691 | - |
2692 | -@implementation AudioStreamer |
2693 | - |
2694 | -@synthesize errorCode; |
2695 | -@synthesize state; |
2696 | -@synthesize stopReason; |
2697 | -@synthesize bitRate; |
2698 | -@synthesize httpHeaders; |
2699 | -@synthesize bgTaskId; |
2700 | - |
2701 | -// |
2702 | -// initWithURL |
2703 | -// |
2704 | -// Init method for the object. |
2705 | -// |
2706 | -- (id)initWithURL:(NSURL *)aURL |
2707 | -{ |
2708 | - self = [super init]; |
2709 | - if (self != nil) |
2710 | - { |
2711 | - url = [aURL retain]; |
2712 | - } |
2713 | - return self; |
2714 | -} |
2715 | - |
2716 | -// |
2717 | -// dealloc |
2718 | -// |
2719 | -// Releases instance memory. |
2720 | -// |
2721 | -- (void)dealloc |
2722 | -{ |
2723 | - [self stop]; |
2724 | - [url release]; |
2725 | - [super dealloc]; |
2726 | -} |
2727 | - |
2728 | -// |
2729 | -// isFinishing |
2730 | -// |
2731 | -// returns YES if the audio has reached a stopping condition. |
2732 | -// |
2733 | -- (BOOL)isFinishing |
2734 | -{ |
2735 | - @synchronized (self) |
2736 | - { |
2737 | - if ((errorCode != AS_NO_ERROR && state != AS_INITIALIZED) || |
2738 | - ((state == AS_STOPPING || state == AS_STOPPED) && |
2739 | - stopReason != AS_STOPPING_TEMPORARILY)) |
2740 | - { |
2741 | - return YES; |
2742 | - } |
2743 | - } |
2744 | - |
2745 | - return NO; |
2746 | -} |
2747 | - |
2748 | -// |
2749 | -// runLoopShouldExit |
2750 | -// |
2751 | -// returns YES if the run loop should exit. |
2752 | -// |
2753 | -- (BOOL)runLoopShouldExit |
2754 | -{ |
2755 | - @synchronized(self) |
2756 | - { |
2757 | - if (errorCode != AS_NO_ERROR || |
2758 | - (state == AS_STOPPED && |
2759 | - stopReason != AS_STOPPING_TEMPORARILY)) |
2760 | - { |
2761 | - return YES; |
2762 | - } |
2763 | - } |
2764 | - |
2765 | - return NO; |
2766 | -} |
2767 | - |
2768 | -// |
2769 | -// stringForErrorCode: |
2770 | -// |
2771 | -// Converts an error code to a string that can be localized or presented |
2772 | -// to the user. |
2773 | -// |
2774 | -// Parameters: |
2775 | -// anErrorCode - the error code to convert |
2776 | -// |
2777 | -// returns the string representation of the error code |
2778 | -// |
2779 | -+ (NSString *)stringForErrorCode:(AudioStreamerErrorCode)anErrorCode |
2780 | -{ |
2781 | - switch (anErrorCode) |
2782 | - { |
2783 | - case AS_NO_ERROR: |
2784 | - return AS_NO_ERROR_STRING; |
2785 | - case AS_FILE_STREAM_GET_PROPERTY_FAILED: |
2786 | - return AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING; |
2787 | - case AS_FILE_STREAM_SEEK_FAILED: |
2788 | - return AS_FILE_STREAM_SEEK_FAILED_STRING; |
2789 | - case AS_FILE_STREAM_PARSE_BYTES_FAILED: |
2790 | - return AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING; |
2791 | - case AS_AUDIO_QUEUE_CREATION_FAILED: |
2792 | - return AS_AUDIO_QUEUE_CREATION_FAILED_STRING; |
2793 | - case AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED: |
2794 | - return AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING; |
2795 | - case AS_AUDIO_QUEUE_ENQUEUE_FAILED: |
2796 | - return AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING; |
2797 | - case AS_AUDIO_QUEUE_ADD_LISTENER_FAILED: |
2798 | - return AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING; |
2799 | - case AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED: |
2800 | - return AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING; |
2801 | - case AS_AUDIO_QUEUE_START_FAILED: |
2802 | - return AS_AUDIO_QUEUE_START_FAILED_STRING; |
2803 | - case AS_AUDIO_QUEUE_BUFFER_MISMATCH: |
2804 | - return AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING; |
2805 | - case AS_FILE_STREAM_OPEN_FAILED: |
2806 | - return AS_FILE_STREAM_OPEN_FAILED_STRING; |
2807 | - case AS_FILE_STREAM_CLOSE_FAILED: |
2808 | - return AS_FILE_STREAM_CLOSE_FAILED_STRING; |
2809 | - case AS_AUDIO_QUEUE_DISPOSE_FAILED: |
2810 | - return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; |
2811 | - case AS_AUDIO_QUEUE_PAUSE_FAILED: |
2812 | - return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; |
2813 | - case AS_AUDIO_QUEUE_FLUSH_FAILED: |
2814 | - return AS_AUDIO_QUEUE_FLUSH_FAILED_STRING; |
2815 | - case AS_AUDIO_DATA_NOT_FOUND: |
2816 | - return AS_AUDIO_DATA_NOT_FOUND_STRING; |
2817 | - case AS_GET_AUDIO_TIME_FAILED: |
2818 | - return AS_GET_AUDIO_TIME_FAILED_STRING; |
2819 | - case AS_NETWORK_CONNECTION_FAILED: |
2820 | - return AS_NETWORK_CONNECTION_FAILED_STRING; |
2821 | - case AS_AUDIO_QUEUE_STOP_FAILED: |
2822 | - return AS_AUDIO_QUEUE_STOP_FAILED_STRING; |
2823 | - case AS_AUDIO_STREAMER_FAILED: |
2824 | - return AS_AUDIO_STREAMER_FAILED_STRING; |
2825 | - case AS_AUDIO_BUFFER_TOO_SMALL: |
2826 | - return AS_AUDIO_BUFFER_TOO_SMALL_STRING; |
2827 | - default: |
2828 | - return AS_AUDIO_STREAMER_FAILED_STRING; |
2829 | - } |
2830 | - |
2831 | - return AS_AUDIO_STREAMER_FAILED_STRING; |
2832 | -} |
2833 | - |
2834 | -// |
2835 | -// presentAlertWithTitle:message: |
2836 | -// |
2837 | -// Common code for presenting error dialogs |
2838 | -// |
2839 | -// Parameters: |
2840 | -// title - title for the dialog |
2841 | -// message - main test for the dialog |
2842 | -// |
2843 | -- (void)presentAlertWithTitle:(NSString*)title message:(NSString*)message |
2844 | -{ |
2845 | -#if TARGET_OS_IPHONE |
2846 | - dispatch_async(dispatch_get_main_queue(), ^{ |
2847 | - |
2848 | - UIAlertView *alert = [[UIAlertView alloc] initWithTitle:title |
2849 | - message:message |
2850 | - delegate:self |
2851 | - cancelButtonTitle:NSLocalizedString(@"OK", @"") |
2852 | - otherButtonTitles:nil]; |
2853 | - [alert setDelegate:self]; |
2854 | - [alert show]; |
2855 | - }); |
2856 | -#else |
2857 | - NSAlert *alert = |
2858 | - [NSAlert |
2859 | - alertWithMessageText:title |
2860 | - defaultButton:NSLocalizedString(@"OK", @"") |
2861 | - alternateButton:nil |
2862 | - otherButton:nil |
2863 | - informativeTextWithFormat:message]; |
2864 | - [alert |
2865 | - performSelector:@selector(runModal) |
2866 | - onThread:[NSThread mainThread] |
2867 | - withObject:nil |
2868 | - waitUntilDone:NO]; |
2869 | -#endif |
2870 | -} |
2871 | - |
2872 | -- (void)alertView:(UIAlertView *)alertView didDismissWithButtonIndex:(NSInteger)buttonIndex; |
2873 | -{ |
2874 | - [alertView release]; |
2875 | -} |
2876 | - |
2877 | -// |
2878 | -// failWithErrorCode: |
2879 | -// |
2880 | -// Sets the playback state to failed and logs the error. |
2881 | -// |
2882 | -// Parameters: |
2883 | -// anErrorCode - the error condition |
2884 | -// |
2885 | -- (void)failWithErrorCode:(AudioStreamerErrorCode)anErrorCode |
2886 | -{ |
2887 | - @synchronized(self) |
2888 | - { |
2889 | - if (errorCode != AS_NO_ERROR) |
2890 | - { |
2891 | - // Only set the error once. |
2892 | - return; |
2893 | - } |
2894 | - |
2895 | - errorCode = anErrorCode; |
2896 | - |
2897 | - if (err) |
2898 | - { |
2899 | - char *errChars = (char *)&err; |
2900 | - NSLog(@"%@ err: %c%c%c%c %d\n", |
2901 | - [AudioStreamer stringForErrorCode:anErrorCode], |
2902 | - errChars[3], errChars[2], errChars[1], errChars[0], |
2903 | - (int)err); |
2904 | - } |
2905 | - else |
2906 | - { |
2907 | - NSLog(@"%@", [AudioStreamer stringForErrorCode:anErrorCode]); |
2908 | - } |
2909 | - |
2910 | - if (state == AS_PLAYING || |
2911 | - state == AS_PAUSED || |
2912 | - state == AS_BUFFERING) |
2913 | - { |
2914 | - self.state = AS_STOPPING; |
2915 | - stopReason = AS_STOPPING_ERROR; |
2916 | - AudioQueueStop(audioQueue, true); |
2917 | - } |
2918 | - |
2919 | -// [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
2920 | -// message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
2921 | - } |
2922 | -} |
2923 | - |
2924 | -// |
2925 | -// mainThreadStateNotification |
2926 | -// |
2927 | -// Method invoked on main thread to send notifications to the main thread's |
2928 | -// notification center. |
2929 | -// |
2930 | -- (void)mainThreadStateNotification |
2931 | -{ |
2932 | - NSNotification *notification = |
2933 | - [NSNotification |
2934 | - notificationWithName:ASStatusChangedNotification |
2935 | - object:self]; |
2936 | - [[NSNotificationCenter defaultCenter] |
2937 | - postNotification:notification]; |
2938 | -} |
2939 | - |
2940 | -// |
2941 | -// setState: |
2942 | -// |
2943 | -// Sets the state and sends a notification that the state has changed. |
2944 | -// |
2945 | -// This method |
2946 | -// |
2947 | -// Parameters: |
2948 | -// anErrorCode - the error condition |
2949 | -// |
2950 | -- (void)setState:(AudioStreamerState)aStatus |
2951 | -{ |
2952 | - @synchronized(self) |
2953 | - { |
2954 | - if (state != aStatus) |
2955 | - { |
2956 | - state = aStatus; |
2957 | - |
2958 | - if ([[NSThread currentThread] isEqual:[NSThread mainThread]]) |
2959 | - { |
2960 | - [self mainThreadStateNotification]; |
2961 | - } |
2962 | - else |
2963 | - { |
2964 | - [self |
2965 | - performSelectorOnMainThread:@selector(mainThreadStateNotification) |
2966 | - withObject:nil |
2967 | - waitUntilDone:NO]; |
2968 | - } |
2969 | - } |
2970 | - } |
2971 | -} |
2972 | - |
2973 | -// |
2974 | -// isPlaying |
2975 | -// |
2976 | -// returns YES if the audio currently playing. |
2977 | -// |
2978 | -- (BOOL)isPlaying |
2979 | -{ |
2980 | - if (state == AS_PLAYING) |
2981 | - { |
2982 | - return YES; |
2983 | - } |
2984 | - |
2985 | - return NO; |
2986 | -} |
2987 | - |
2988 | -// |
2989 | -// isPaused |
2990 | -// |
2991 | -// returns YES if the audio currently playing. |
2992 | -// |
2993 | -- (BOOL)isPaused |
2994 | -{ |
2995 | - if (state == AS_PAUSED) |
2996 | - { |
2997 | - return YES; |
2998 | - } |
2999 | - |
3000 | - return NO; |
3001 | -} |
3002 | - |
3003 | -// |
3004 | -// isWaiting |
3005 | -// |
3006 | -// returns YES if the AudioStreamer is waiting for a state transition of some |
3007 | -// kind. |
3008 | -// |
3009 | -- (BOOL)isWaiting |
3010 | -{ |
3011 | - @synchronized(self) |
3012 | - { |
3013 | - if ([self isFinishing] || |
3014 | - state == AS_STARTING_FILE_THREAD|| |
3015 | - state == AS_WAITING_FOR_DATA || |
3016 | - state == AS_WAITING_FOR_QUEUE_TO_START || |
3017 | - state == AS_BUFFERING) |
3018 | - { |
3019 | - return YES; |
3020 | - } |
3021 | - } |
3022 | - |
3023 | - return NO; |
3024 | -} |
3025 | - |
3026 | -// |
3027 | -// isIdle |
3028 | -// |
3029 | -// returns YES if the AudioStream is in the AS_INITIALIZED state (i.e. |
3030 | -// isn't doing anything). |
3031 | -// |
3032 | -- (BOOL)isIdle |
3033 | -{ |
3034 | - if (state == AS_INITIALIZED) |
3035 | - { |
3036 | - return YES; |
3037 | - } |
3038 | - |
3039 | - return NO; |
3040 | -} |
3041 | - |
3042 | -// |
3043 | -// hintForFileExtension: |
3044 | -// |
3045 | -// Generates a first guess for the file type based on the file's extension |
3046 | -// |
3047 | -// Parameters: |
3048 | -// fileExtension - the file extension |
3049 | -// |
3050 | -// returns a file type hint that can be passed to the AudioFileStream |
3051 | -// |
3052 | -+ (AudioFileTypeID)hintForFileExtension:(NSString *)fileExtension |
3053 | -{ |
3054 | - AudioFileTypeID fileTypeHint = kAudioFileAAC_ADTSType; |
3055 | - if ([fileExtension isEqual:@"mp3"]) |
3056 | - { |
3057 | - fileTypeHint = kAudioFileMP3Type; |
3058 | - } |
3059 | - else if ([fileExtension isEqual:@"wav"]) |
3060 | - { |
3061 | - fileTypeHint = kAudioFileWAVEType; |
3062 | - } |
3063 | - else if ([fileExtension isEqual:@"aifc"]) |
3064 | - { |
3065 | - fileTypeHint = kAudioFileAIFCType; |
3066 | - } |
3067 | - else if ([fileExtension isEqual:@"aiff"]) |
3068 | - { |
3069 | - fileTypeHint = kAudioFileAIFFType; |
3070 | - } |
3071 | - else if ([fileExtension isEqual:@"m4a"]) |
3072 | - { |
3073 | - fileTypeHint = kAudioFileM4AType; |
3074 | - } |
3075 | - else if ([fileExtension isEqual:@"mp4"]) |
3076 | - { |
3077 | - fileTypeHint = kAudioFileMPEG4Type; |
3078 | - } |
3079 | - else if ([fileExtension isEqual:@"caf"]) |
3080 | - { |
3081 | - fileTypeHint = kAudioFileCAFType; |
3082 | - } |
3083 | - else if ([fileExtension isEqual:@"aac"]) |
3084 | - { |
3085 | - fileTypeHint = kAudioFileAAC_ADTSType; |
3086 | - } |
3087 | - return fileTypeHint; |
3088 | -} |
3089 | - |
3090 | -// |
3091 | -// openReadStream |
3092 | -// |
3093 | -// Open the audioFileStream to parse data and the fileHandle as the data |
3094 | -// source. |
3095 | -// |
3096 | -- (BOOL)openReadStream |
3097 | -{ |
3098 | - @synchronized(self) |
3099 | - { |
3100 | - NSAssert([[NSThread currentThread] isEqual:internalThread], |
3101 | - @"File stream download must be started on the internalThread"); |
3102 | - NSAssert(stream == nil, @"Download stream already initialized"); |
3103 | - |
3104 | - if ([url isFileURL]) |
3105 | - { |
3106 | - stream = CFReadStreamCreateWithFile(kCFAllocatorDefault, (CFURLRef)url); |
3107 | - } |
3108 | - else |
3109 | - { |
3110 | - // |
3111 | - // Create the HTTP GET request |
3112 | - // |
3113 | - CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (CFURLRef)url, kCFHTTPVersion1_1); |
3114 | - |
3115 | - // |
3116 | - // If we are creating this request to seek to a location, set the |
3117 | - // requested byte range in the headers. |
3118 | - // |
3119 | - if (fileLength > 0 && seekByteOffset > 0) |
3120 | - { |
3121 | - CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), |
3122 | - (CFStringRef)[NSString stringWithFormat:@"bytes=%u-%u", seekByteOffset, fileLength]); |
3123 | - discontinuous = YES; |
3124 | - } |
3125 | - |
3126 | - // |
3127 | - // Create the read stream that will receive data from the HTTP request |
3128 | - // |
3129 | - stream = CFReadStreamCreateForHTTPRequest(NULL, message); |
3130 | - CFRelease(message); |
3131 | - |
3132 | - // |
3133 | - // Enable stream redirection |
3134 | - // |
3135 | - if (CFReadStreamSetProperty( |
3136 | - stream, |
3137 | - kCFStreamPropertyHTTPShouldAutoredirect, |
3138 | - kCFBooleanTrue) == false) |
3139 | - { |
3140 | - [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
3141 | - message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
3142 | - return NO; |
3143 | - } |
3144 | - |
3145 | - // |
3146 | - // Handle proxies |
3147 | - // |
3148 | - CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings(); |
3149 | - CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings); |
3150 | - CFRelease(proxySettings); |
3151 | - |
3152 | - // |
3153 | - // Handle SSL connections |
3154 | - // |
3155 | - if( [[url absoluteString] rangeOfString:@"https"].location != NSNotFound ) |
3156 | - { |
3157 | - NSDictionary *sslSettings = |
3158 | - [NSDictionary dictionaryWithObjectsAndKeys: |
3159 | - (NSString *)kCFStreamSocketSecurityLevelNegotiatedSSL, kCFStreamSSLLevel, |
3160 | - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredCertificates, |
3161 | - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredRoots, |
3162 | - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsAnyRoot, |
3163 | - [NSNumber numberWithBool:NO], kCFStreamSSLValidatesCertificateChain, |
3164 | - [NSNull null], kCFStreamSSLPeerName, |
3165 | - nil]; |
3166 | - |
3167 | - CFReadStreamSetProperty(stream, kCFStreamPropertySSLSettings, sslSettings); |
3168 | - } |
3169 | - } |
3170 | - |
3171 | - // |
3172 | - // We're now ready to receive data |
3173 | - // |
3174 | - self.state = AS_WAITING_FOR_DATA; |
3175 | - |
3176 | - // |
3177 | - // Open the stream |
3178 | - // |
3179 | - if (!CFReadStreamOpen(stream)) |
3180 | - { |
3181 | - CFRelease(stream); |
3182 | - [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) |
3183 | - message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; |
3184 | - return NO; |
3185 | - } |
3186 | - |
3187 | - // |
3188 | - // Set our callback function to receive the data |
3189 | - // |
3190 | - CFStreamClientContext context = {0, self, NULL, NULL, NULL}; |
3191 | - CFReadStreamSetClient( |
3192 | - stream, |
3193 | - kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, |
3194 | - ASReadStreamCallBack, |
3195 | - &context); |
3196 | - CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes); |
3197 | - } |
3198 | - |
3199 | - return YES; |
3200 | -} |
3201 | - |
3202 | -// |
3203 | -// startInternal |
3204 | -// |
3205 | -// This is the start method for the AudioStream thread. This thread is created |
3206 | -// because it will be blocked when there are no audio buffers idle (and ready |
3207 | -// to receive audio data). |
3208 | -// |
3209 | -// Activity in this thread: |
3210 | -// - Creation and cleanup of all AudioFileStream and AudioQueue objects |
3211 | -// - Receives data from the CFReadStream |
3212 | -// - AudioFileStream processing |
3213 | -// - Copying of data from AudioFileStream into audio buffers |
3214 | -// - Stopping of the thread because of end-of-file |
3215 | -// - Stopping due to error or failure |
3216 | -// |
3217 | -// Activity *not* in this thread: |
3218 | -// - AudioQueue playback and notifications (happens in AudioQueue thread) |
3219 | -// - Actual download of NSURLConnection data (NSURLConnection's thread) |
3220 | -// - Creation of the AudioStreamer (other, likely "main" thread) |
3221 | -// - Invocation of -start method (other, likely "main" thread) |
3222 | -// - User/manual invocation of -stop (other, likely "main" thread) |
3223 | -// |
3224 | -// This method contains bits of the "main" function from Apple's example in |
3225 | -// AudioFileStreamExample. |
3226 | -// |
3227 | -- (void)startInternal |
3228 | -{ |
3229 | - NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; |
3230 | - |
3231 | - @synchronized(self) |
3232 | - { |
3233 | - if (state != AS_STARTING_FILE_THREAD) |
3234 | - { |
3235 | - if (state != AS_STOPPING && |
3236 | - state != AS_STOPPED) |
3237 | - { |
3238 | - NSLog(@"### Not starting audio thread. State code is: %u", state); |
3239 | - } |
3240 | - self.state = AS_INITIALIZED; |
3241 | - [pool release]; |
3242 | - return; |
3243 | - } |
3244 | - |
3245 | - #if TARGET_OS_IPHONE |
3246 | - // |
3247 | - // Set the audio session category so that we continue to play if the |
3248 | - // iPhone/iPod auto-locks. |
3249 | - // |
3250 | - AudioSessionInitialize ( |
3251 | - NULL, // 'NULL' to use the default (main) run loop |
3252 | - NULL, // 'NULL' to use the default run loop mode |
3253 | - MyAudioSessionInterruptionListener, // a reference to your interruption callback |
3254 | - self // data to pass to your interruption listener callback |
3255 | - ); |
3256 | - UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback; |
3257 | - AudioSessionSetProperty ( |
3258 | - kAudioSessionProperty_AudioCategory, |
3259 | - sizeof (sessionCategory), |
3260 | - &sessionCategory |
3261 | - ); |
3262 | - AudioSessionSetActive(true); |
3263 | - #endif |
3264 | - |
3265 | - // initialize a mutex and condition so that we can block on buffers in use. |
3266 | - pthread_mutex_init(&queueBuffersMutex, NULL); |
3267 | - pthread_cond_init(&queueBufferReadyCondition, NULL); |
3268 | - |
3269 | - if (![self openReadStream]) |
3270 | - { |
3271 | - goto cleanup; |
3272 | - } |
3273 | - } |
3274 | - |
3275 | - // |
3276 | - // Process the run loop until playback is finished or failed. |
3277 | - // |
3278 | - BOOL isRunning = YES; |
3279 | - do |
3280 | - { |
3281 | - isRunning = [[NSRunLoop currentRunLoop] |
3282 | - runMode:NSDefaultRunLoopMode |
3283 | - beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; |
3284 | - |
3285 | - @synchronized(self) { |
3286 | - if (seekWasRequested) { |
3287 | - [self internalSeekToTime:requestedSeekTime]; |
3288 | - seekWasRequested = NO; |
3289 | - } |
3290 | - } |
3291 | - |
3292 | - // |
3293 | - // If there are no queued buffers, we need to check here since the |
3294 | - // handleBufferCompleteForQueue:buffer: should not change the state |
3295 | - // (may not enter the synchronized section). |
3296 | - // |
3297 | - if (buffersUsed == 0 && self.state == AS_PLAYING) |
3298 | - { |
3299 | - err = AudioQueuePause(audioQueue); |
3300 | - if (err) |
3301 | - { |
3302 | - [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; |
3303 | - return; |
3304 | - } |
3305 | - self.state = AS_BUFFERING; |
3306 | - } |
3307 | - } while (isRunning && ![self runLoopShouldExit]); |
3308 | - |
3309 | -cleanup: |
3310 | - |
3311 | - @synchronized(self) |
3312 | - { |
3313 | - // |
3314 | - // Cleanup the read stream if it is still open |
3315 | - // |
3316 | - if (stream) |
3317 | - { |
3318 | - CFReadStreamClose(stream); |
3319 | - CFRelease(stream); |
3320 | - stream = nil; |
3321 | - } |
3322 | - |
3323 | - // |
3324 | - // Close the audio file strea, |
3325 | - // |
3326 | - if (audioFileStream) |
3327 | - { |
3328 | - err = AudioFileStreamClose(audioFileStream); |
3329 | - audioFileStream = nil; |
3330 | - if (err) |
3331 | - { |
3332 | - [self failWithErrorCode:AS_FILE_STREAM_CLOSE_FAILED]; |
3333 | - } |
3334 | - } |
3335 | - |
3336 | - // |
3337 | - // Dispose of the Audio Queue |
3338 | - // |
3339 | - if (audioQueue) |
3340 | - { |
3341 | - err = AudioQueueDispose(audioQueue, true); |
3342 | - audioQueue = nil; |
3343 | - if (err) |
3344 | - { |
3345 | - [self failWithErrorCode:AS_AUDIO_QUEUE_DISPOSE_FAILED]; |
3346 | - } |
3347 | - } |
3348 | - |
3349 | - pthread_mutex_destroy(&queueBuffersMutex); |
3350 | - pthread_cond_destroy(&queueBufferReadyCondition); |
3351 | - |
3352 | -#if TARGET_OS_IPHONE |
3353 | - AudioSessionSetActive(false); |
3354 | -#endif |
3355 | - |
3356 | - [httpHeaders release]; |
3357 | - httpHeaders = nil; |
3358 | - |
3359 | - bytesFilled = 0; |
3360 | - packetsFilled = 0; |
3361 | - seekByteOffset = 0; |
3362 | - packetBufferSize = 0; |
3363 | - self.state = AS_INITIALIZED; |
3364 | - |
3365 | - [internalThread release]; |
3366 | - internalThread = nil; |
3367 | - } |
3368 | - |
3369 | - [pool release]; |
3370 | -} |
3371 | - |
3372 | -// |
3373 | -// start |
3374 | -// |
3375 | -// Calls startInternal in a new thread. |
3376 | -// |
3377 | -- (void)start |
3378 | -{ |
3379 | - @synchronized (self) |
3380 | - { |
3381 | - if (state == AS_PAUSED) |
3382 | - { |
3383 | - [self pause]; |
3384 | - } |
3385 | - else if (state == AS_INITIALIZED) |
3386 | - { |
3387 | - NSAssert([[NSThread currentThread] isEqual:[NSThread mainThread]], |
3388 | - @"Playback can only be started from the main thread."); |
3389 | - notificationCenter = |
3390 | - [[NSNotificationCenter defaultCenter] retain]; |
3391 | - self.state = AS_STARTING_FILE_THREAD; |
3392 | - internalThread = |
3393 | - [[NSThread alloc] |
3394 | - initWithTarget:self |
3395 | - selector:@selector(startInternal) |
3396 | - object:nil]; |
3397 | - [internalThread start]; |
3398 | - } |
3399 | - } |
3400 | -} |
3401 | - |
3402 | - |
3403 | -// internalSeekToTime: |
3404 | -// |
3405 | -// Called from our internal runloop to reopen the stream at a seeked location |
3406 | -// |
3407 | -- (void)internalSeekToTime:(double)newSeekTime |
3408 | -{ |
3409 | - if ([self calculatedBitRate] == 0.0 || fileLength <= 0) |
3410 | - { |
3411 | - return; |
3412 | - } |
3413 | - |
3414 | - // |
3415 | - // Calculate the byte offset for seeking |
3416 | - // |
3417 | - seekByteOffset = dataOffset + |
3418 | - (newSeekTime / self.duration) * (fileLength - dataOffset); |
3419 | - |
3420 | - // |
3421 | - // Attempt to leave 1 useful packet at the end of the file (although in |
3422 | - // reality, this may still seek too far if the file has a long trailer). |
3423 | - // |
3424 | - if (seekByteOffset > fileLength - 2 * packetBufferSize) |
3425 | - { |
3426 | - seekByteOffset = fileLength - 2 * packetBufferSize; |
3427 | - } |
3428 | - |
3429 | - // |
3430 | - // Store the old time from the audio queue and the time that we're seeking |
3431 | - // to so that we'll know the correct time progress after seeking. |
3432 | - // |
3433 | - seekTime = newSeekTime; |
3434 | - |
3435 | - // |
3436 | - // Attempt to align the seek with a packet boundary |
3437 | - // |
3438 | - double calculatedBitRate = [self calculatedBitRate]; |
3439 | - if (packetDuration > 0 && |
3440 | - calculatedBitRate > 0) |
3441 | - { |
3442 | - UInt32 ioFlags = 0; |
3443 | - SInt64 packetAlignedByteOffset; |
3444 | - SInt64 seekPacket = floor(newSeekTime / packetDuration); |
3445 | - err = AudioFileStreamSeek(audioFileStream, seekPacket, &packetAlignedByteOffset, &ioFlags); |
3446 | - if (!err && !(ioFlags & kAudioFileStreamSeekFlag_OffsetIsEstimated)) |
3447 | - { |
3448 | - seekTime -= ((seekByteOffset - dataOffset) - packetAlignedByteOffset) * 8.0 / calculatedBitRate; |
3449 | - seekByteOffset = packetAlignedByteOffset + dataOffset; |
3450 | - } |
3451 | - } |
3452 | - |
3453 | - // |
3454 | - // Close the current read straem |
3455 | - // |
3456 | - if (stream) |
3457 | - { |
3458 | - CFReadStreamClose(stream); |
3459 | - CFRelease(stream); |
3460 | - stream = nil; |
3461 | - } |
3462 | - |
3463 | - // |
3464 | - // Stop the audio queue |
3465 | - // |
3466 | - self.state = AS_STOPPING; |
3467 | - stopReason = AS_STOPPING_TEMPORARILY; |
3468 | - err = AudioQueueStop(audioQueue, true); |
3469 | - if (err) |
3470 | - { |
3471 | - [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; |
3472 | - return; |
3473 | - } |
3474 | - |
3475 | - // |
3476 | - // Re-open the file stream. It will request a byte-range starting at |
3477 | - // seekByteOffset. |
3478 | - // |
3479 | - [self openReadStream]; |
3480 | -} |
3481 | - |
3482 | -// |
3483 | -// seekToTime: |
3484 | -// |
3485 | -// Attempts to seek to the new time. Will be ignored if the bitrate or fileLength |
3486 | -// are unknown. |
3487 | -// |
3488 | -// Parameters: |
3489 | -// newTime - the time to seek to |
3490 | -// |
3491 | -- (void)seekToTime:(double)newSeekTime |
3492 | -{ |
3493 | - @synchronized(self) |
3494 | - { |
3495 | - seekWasRequested = YES; |
3496 | - requestedSeekTime = newSeekTime; |
3497 | - } |
3498 | -} |
3499 | - |
3500 | -// |
3501 | -// progress |
3502 | -// |
3503 | -// returns the current playback progress. Will return zero if sampleRate has |
3504 | -// not yet been detected. |
3505 | -// |
3506 | -- (double)progress |
3507 | -{ |
3508 | - @synchronized(self) |
3509 | - { |
3510 | - if (sampleRate > 0 && ![self isFinishing]) |
3511 | - { |
3512 | - if (state != AS_PLAYING && state != AS_PAUSED && state != AS_BUFFERING) |
3513 | - { |
3514 | - return lastProgress; |
3515 | - } |
3516 | - |
3517 | - AudioTimeStamp queueTime; |
3518 | - Boolean discontinuity; |
3519 | - err = AudioQueueGetCurrentTime(audioQueue, NULL, &queueTime, &discontinuity); |
3520 | - |
3521 | - const OSStatus AudioQueueStopped = 0x73746F70; // 0x73746F70 is 'stop' |
3522 | - if (err == AudioQueueStopped) |
3523 | - { |
3524 | - return lastProgress; |
3525 | - } |
3526 | - else if (err) |
3527 | - { |
3528 | - [self failWithErrorCode:AS_GET_AUDIO_TIME_FAILED]; |
3529 | - } |
3530 | - |
3531 | - double progress = seekTime + queueTime.mSampleTime / sampleRate; |
3532 | - if (progress < 0.0) |
3533 | - { |
3534 | - progress = 0.0; |
3535 | - } |
3536 | - |
3537 | - lastProgress = progress; |
3538 | - return progress; |
3539 | - } |
3540 | - } |
3541 | - |
3542 | - return lastProgress; |
3543 | -} |
3544 | - |
3545 | -// |
3546 | -// calculatedBitRate |
3547 | -// |
3548 | -// returns the bit rate, if known. Uses packet duration times running bits per |
3549 | -// packet if available, otherwise it returns the nominal bitrate. Will return |
3550 | -// zero if no useful option available. |
3551 | -// |
3552 | -- (double)calculatedBitRate |
3553 | -{ |
3554 | - if (packetDuration && processedPacketsCount > BitRateEstimationMinPackets) |
3555 | - { |
3556 | - double averagePacketByteSize = processedPacketsSizeTotal / processedPacketsCount; |
3557 | - return 8.0 * averagePacketByteSize / packetDuration; |
3558 | - } |
3559 | - |
3560 | - if (bitRate) |
3561 | - { |
3562 | - return (double)bitRate; |
3563 | - } |
3564 | - |
3565 | - return 0; |
3566 | -} |
3567 | - |
3568 | -// |
3569 | -// duration |
3570 | -// |
3571 | -// Calculates the duration of available audio from the bitRate and fileLength. |
3572 | -// |
3573 | -// returns the calculated duration in seconds. |
3574 | -// |
3575 | -- (double)duration |
3576 | -{ |
3577 | - double calculatedBitRate = [self calculatedBitRate]; |
3578 | - |
3579 | - if (calculatedBitRate == 0 || fileLength == 0) |
3580 | - { |
3581 | - return 0.0; |
3582 | - } |
3583 | - |
3584 | - return (fileLength - dataOffset) / (calculatedBitRate * 0.125); |
3585 | -} |
3586 | - |
3587 | -// |
3588 | -// pause |
3589 | -// |
3590 | -// A togglable pause function. |
3591 | -// |
3592 | -- (void)pause |
3593 | -{ |
3594 | - @synchronized(self) |
3595 | - { |
3596 | - if (state == AS_PLAYING) |
3597 | - { |
3598 | - err = AudioQueuePause(audioQueue); |
3599 | - if (err) |
3600 | - { |
3601 | - [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; |
3602 | - return; |
3603 | - } |
3604 | - self.state = AS_PAUSED; |
3605 | - } |
3606 | - else if (state == AS_PAUSED) |
3607 | - { |
3608 | - err = AudioQueueStart(audioQueue, NULL); |
3609 | - self.bgTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:NULL]; |
3610 | - if (err) |
3611 | - { |
3612 | - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
3613 | - return; |
3614 | - } |
3615 | - self.state = AS_PLAYING; |
3616 | - } |
3617 | - } |
3618 | -} |
3619 | - |
3620 | -// |
3621 | -// stop |
3622 | -// |
3623 | -// This method can be called to stop downloading/playback before it completes. |
3624 | -// It is automatically called when an error occurs. |
3625 | -// |
3626 | -// If playback has not started before this method is called, it will toggle the |
3627 | -// "isPlaying" property so that it is guaranteed to transition to true and |
3628 | -// back to false |
3629 | -// |
3630 | -- (void)stop |
3631 | -{ |
3632 | - @synchronized(self) |
3633 | - { |
3634 | - if (audioQueue && |
3635 | - (state == AS_PLAYING || state == AS_PAUSED || |
3636 | - state == AS_BUFFERING || state == AS_WAITING_FOR_QUEUE_TO_START)) |
3637 | - { |
3638 | - self.state = AS_STOPPING; |
3639 | - stopReason = AS_STOPPING_USER_ACTION; |
3640 | - err = AudioQueueStop(audioQueue, true); |
3641 | - if (err) |
3642 | - { |
3643 | - [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; |
3644 | - return; |
3645 | - } |
3646 | - } |
3647 | - else if (state != AS_INITIALIZED) |
3648 | - { |
3649 | - self.state = AS_STOPPED; |
3650 | - stopReason = AS_STOPPING_USER_ACTION; |
3651 | - } |
3652 | - seekWasRequested = NO; |
3653 | - } |
3654 | - |
3655 | - while (state != AS_INITIALIZED) |
3656 | - { |
3657 | - [NSThread sleepForTimeInterval:0.1]; |
3658 | - } |
3659 | -} |
3660 | - |
3661 | -// |
3662 | -// handleReadFromStream:eventType: |
3663 | -// |
3664 | -// Reads data from the network file stream into the AudioFileStream |
3665 | -// |
3666 | -// Parameters: |
3667 | -// aStream - the network file stream |
3668 | -// eventType - the event which triggered this method |
3669 | -// |
3670 | -- (void)handleReadFromStream:(CFReadStreamRef)aStream |
3671 | - eventType:(CFStreamEventType)eventType |
3672 | -{ |
3673 | - if (aStream != stream) |
3674 | - { |
3675 | - // |
3676 | - // Ignore messages from old streams |
3677 | - // |
3678 | - return; |
3679 | - } |
3680 | - |
3681 | - if (eventType == kCFStreamEventErrorOccurred) |
3682 | - { |
3683 | - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
3684 | - } |
3685 | - else if (eventType == kCFStreamEventEndEncountered) |
3686 | - { |
3687 | - @synchronized(self) |
3688 | - { |
3689 | - if ([self isFinishing]) |
3690 | - { |
3691 | - return; |
3692 | - } |
3693 | - } |
3694 | - |
3695 | - // |
3696 | - // If there is a partially filled buffer, pass it to the AudioQueue for |
3697 | - // processing |
3698 | - // |
3699 | - if (bytesFilled) |
3700 | - { |
3701 | - if (self.state == AS_WAITING_FOR_DATA) |
3702 | - { |
3703 | - // |
3704 | - // Force audio data smaller than one whole buffer to play. |
3705 | - // |
3706 | - self.state = AS_FLUSHING_EOF; |
3707 | - } |
3708 | - [self enqueueBuffer]; |
3709 | - } |
3710 | - |
3711 | - @synchronized(self) |
3712 | - { |
3713 | - if (state == AS_WAITING_FOR_DATA) |
3714 | - { |
3715 | - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
3716 | - } |
3717 | - |
3718 | - // |
3719 | - // We left the synchronized section to enqueue the buffer so we |
3720 | - // must check that we are !finished again before touching the |
3721 | - // audioQueue |
3722 | - // |
3723 | - else if (![self isFinishing]) |
3724 | - { |
3725 | - if (audioQueue) |
3726 | - { |
3727 | - // |
3728 | - // Set the progress at the end of the stream |
3729 | - // |
3730 | - err = AudioQueueFlush(audioQueue); |
3731 | - if (err) |
3732 | - { |
3733 | - [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; |
3734 | - return; |
3735 | - } |
3736 | - |
3737 | - self.state = AS_STOPPING; |
3738 | - stopReason = AS_STOPPING_EOF; |
3739 | - err = AudioQueueStop(audioQueue, false); |
3740 | - if (err) |
3741 | - { |
3742 | - [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; |
3743 | - return; |
3744 | - } |
3745 | - } |
3746 | - else |
3747 | - { |
3748 | - self.state = AS_STOPPED; |
3749 | - stopReason = AS_STOPPING_EOF; |
3750 | - } |
3751 | - } |
3752 | - } |
3753 | - } |
3754 | - else if (eventType == kCFStreamEventHasBytesAvailable) |
3755 | - { |
3756 | - if (!httpHeaders) |
3757 | - { |
3758 | - CFTypeRef message = |
3759 | - CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader); |
3760 | - if (message) |
3761 | - { |
3762 | - httpHeaders = |
3763 | - (NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); |
3764 | - CFRelease(message); |
3765 | - |
3766 | - // |
3767 | - // Only read the content length if we seeked to time zero, otherwise |
3768 | - // we only have a subset of the total bytes. |
3769 | - // |
3770 | - if (seekByteOffset == 0) |
3771 | - { |
3772 | - fileLength = [[httpHeaders objectForKey:@"Content-Length"] integerValue]; |
3773 | - } |
3774 | - } |
3775 | - } |
3776 | - |
3777 | - if (!audioFileStream) |
3778 | - { |
3779 | - // |
3780 | - // Attempt to guess the file type from the URL. Reading the MIME type |
3781 | - // from the httpHeaders might be a better approach since lots of |
3782 | - // URL's don't have the right extension. |
3783 | - // |
3784 | - // If you have a fixed file-type, you may want to hardcode this. |
3785 | - // |
3786 | - AudioFileTypeID fileTypeHint = |
3787 | - [AudioStreamer hintForFileExtension:[[url path] pathExtension]]; |
3788 | - |
3789 | - // create an audio file stream parser |
3790 | - err = AudioFileStreamOpen(self, MyPropertyListenerProc, MyPacketsProc, |
3791 | - fileTypeHint, &audioFileStream); |
3792 | - if (err) |
3793 | - { |
3794 | - [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; |
3795 | - return; |
3796 | - } |
3797 | - } |
3798 | - |
3799 | - UInt8 bytes[kAQDefaultBufSize]; |
3800 | - CFIndex length; |
3801 | - @synchronized(self) |
3802 | - { |
3803 | - if ([self isFinishing] || !CFReadStreamHasBytesAvailable(stream)) |
3804 | - { |
3805 | - return; |
3806 | - } |
3807 | - |
3808 | - // |
3809 | - // Read the bytes from the stream |
3810 | - // |
3811 | - length = CFReadStreamRead(stream, bytes, kAQDefaultBufSize); |
3812 | - |
3813 | - if (length == -1) |
3814 | - { |
3815 | - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; |
3816 | - return; |
3817 | - } |
3818 | - |
3819 | - if (length == 0) |
3820 | - { |
3821 | - return; |
3822 | - } |
3823 | - } |
3824 | - |
3825 | - if (discontinuous) |
3826 | - { |
3827 | - err = AudioFileStreamParseBytes(audioFileStream, length, bytes, kAudioFileStreamParseFlag_Discontinuity); |
3828 | - if (err) |
3829 | - { |
3830 | - [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; |
3831 | - return; |
3832 | - } |
3833 | - } |
3834 | - else |
3835 | - { |
3836 | - err = AudioFileStreamParseBytes(audioFileStream, length, bytes, 0); |
3837 | - if (err) |
3838 | - { |
3839 | - [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; |
3840 | - return; |
3841 | - } |
3842 | - } |
3843 | - } |
3844 | -} |
3845 | - |
3846 | -// |
3847 | -// enqueueBuffer |
3848 | -// |
3849 | -// Called from MyPacketsProc and connectionDidFinishLoading to pass filled audio |
3850 | -// bufffers (filled by MyPacketsProc) to the AudioQueue for playback. This |
3851 | -// function does not return until a buffer is idle for further filling or |
3852 | -// the AudioQueue is stopped. |
3853 | -// |
3854 | -// This function is adapted from Apple's example in AudioFileStreamExample with |
3855 | -// CBR functionality added. |
3856 | -// |
3857 | -- (void)enqueueBuffer |
3858 | -{ |
3859 | - @synchronized(self) |
3860 | - { |
3861 | - if ([self isFinishing] || stream == 0) |
3862 | - { |
3863 | - return; |
3864 | - } |
3865 | - |
3866 | - inuse[fillBufferIndex] = true; // set in use flag |
3867 | - buffersUsed++; |
3868 | - |
3869 | - // enqueue buffer |
3870 | - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
3871 | - fillBuf->mAudioDataByteSize = bytesFilled; |
3872 | - |
3873 | - if (packetsFilled) |
3874 | - { |
3875 | - err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, packetsFilled, packetDescs); |
3876 | - } |
3877 | - else |
3878 | - { |
3879 | - err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, 0, NULL); |
3880 | - } |
3881 | - |
3882 | - if (err) |
3883 | - { |
3884 | - [self failWithErrorCode:AS_AUDIO_QUEUE_ENQUEUE_FAILED]; |
3885 | - return; |
3886 | - } |
3887 | - |
3888 | - |
3889 | - if (state == AS_BUFFERING || |
3890 | - state == AS_WAITING_FOR_DATA || |
3891 | - state == AS_FLUSHING_EOF || |
3892 | - (state == AS_STOPPED && stopReason == AS_STOPPING_TEMPORARILY)) |
3893 | - { |
3894 | - // |
3895 | - // Fill all the buffers before starting. This ensures that the |
3896 | - // AudioFileStream stays a small amount ahead of the AudioQueue to |
3897 | - // avoid an audio glitch playing streaming files on iPhone SDKs < 3.0 |
3898 | - // |
3899 | - if (state == AS_FLUSHING_EOF || buffersUsed == kNumAQBufs - 1) |
3900 | - { |
3901 | - if (self.state == AS_BUFFERING) |
3902 | - { |
3903 | - err = AudioQueueStart(audioQueue, NULL); |
3904 | - self.bgTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:NULL]; |
3905 | - if (err) |
3906 | - { |
3907 | - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
3908 | - return; |
3909 | - } |
3910 | - self.state = AS_PLAYING; |
3911 | - } |
3912 | - else |
3913 | - { |
3914 | - self.state = AS_WAITING_FOR_QUEUE_TO_START; |
3915 | - self.bgTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:NULL]; |
3916 | - err = AudioQueueStart(audioQueue, NULL); |
3917 | - if (err) |
3918 | - { |
3919 | - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; |
3920 | - return; |
3921 | - } |
3922 | - } |
3923 | - } |
3924 | - } |
3925 | - |
3926 | - // go to next buffer |
3927 | - if (++fillBufferIndex >= kNumAQBufs) fillBufferIndex = 0; |
3928 | - bytesFilled = 0; // reset bytes filled |
3929 | - packetsFilled = 0; // reset packets filled |
3930 | - } |
3931 | - |
3932 | - // wait until next buffer is not in use |
3933 | - pthread_mutex_lock(&queueBuffersMutex); |
3934 | - while (inuse[fillBufferIndex]) |
3935 | - { |
3936 | - pthread_cond_wait(&queueBufferReadyCondition, &queueBuffersMutex); |
3937 | - } |
3938 | - pthread_mutex_unlock(&queueBuffersMutex); |
3939 | -} |
3940 | - |
3941 | -// |
3942 | -// createQueue |
3943 | -// |
3944 | -// Method to create the AudioQueue from the parameters gathered by the |
3945 | -// AudioFileStream. |
3946 | -// |
3947 | -// Creation is deferred to the handling of the first audio packet (although |
3948 | -// it could be handled any time after kAudioFileStreamProperty_ReadyToProducePackets |
3949 | -// is true). |
3950 | -// |
3951 | -- (void)createQueue |
3952 | -{ |
3953 | - sampleRate = asbd.mSampleRate; |
3954 | - packetDuration = asbd.mFramesPerPacket / sampleRate; |
3955 | - |
3956 | - // create the audio queue |
3957 | - err = AudioQueueNewOutput(&asbd, MyAudioQueueOutputCallback, self, NULL, NULL, 0, &audioQueue); |
3958 | - if (err) |
3959 | - { |
3960 | - [self failWithErrorCode:AS_AUDIO_QUEUE_CREATION_FAILED]; |
3961 | - return; |
3962 | - } |
3963 | - |
3964 | - // start the queue if it has not been started already |
3965 | - // listen to the "isRunning" property |
3966 | - err = AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning, MyAudioQueueIsRunningCallback, self); |
3967 | - if (err) |
3968 | - { |
3969 | - [self failWithErrorCode:AS_AUDIO_QUEUE_ADD_LISTENER_FAILED]; |
3970 | - return; |
3971 | - } |
3972 | - |
3973 | - // get the packet size if it is available |
3974 | - UInt32 sizeOfUInt32 = sizeof(UInt32); |
3975 | - err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_PacketSizeUpperBound, &sizeOfUInt32, &packetBufferSize); |
3976 | - if (err || packetBufferSize == 0) |
3977 | - { |
3978 | - err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MaximumPacketSize, &sizeOfUInt32, &packetBufferSize); |
3979 | - if (err || packetBufferSize == 0) |
3980 | - { |
3981 | - // No packet size available, just use the default |
3982 | - packetBufferSize = kAQDefaultBufSize; |
3983 | - } |
3984 | - } |
3985 | - |
3986 | - // allocate audio queue buffers |
3987 | - for (unsigned int i = 0; i < kNumAQBufs; ++i) |
3988 | - { |
3989 | - err = AudioQueueAllocateBuffer(audioQueue, packetBufferSize, &audioQueueBuffer[i]); |
3990 | - if (err) |
3991 | - { |
3992 | - [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED]; |
3993 | - return; |
3994 | - } |
3995 | - } |
3996 | - |
3997 | - // get the cookie size |
3998 | - UInt32 cookieSize; |
3999 | - Boolean writable; |
4000 | - OSStatus ignorableError; |
4001 | - ignorableError = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable); |
4002 | - if (ignorableError) |
4003 | - { |
4004 | - return; |
4005 | - } |
4006 | - |
4007 | - // get the cookie data |
4008 | - void* cookieData = calloc(1, cookieSize); |
4009 | - ignorableError = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData); |
4010 | - if (ignorableError) |
4011 | - { |
4012 | - return; |
4013 | - } |
4014 | - |
4015 | - // set the cookie on the queue. |
4016 | - ignorableError = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize); |
4017 | - free(cookieData); |
4018 | - if (ignorableError) |
4019 | - { |
4020 | - return; |
4021 | - } |
4022 | -} |
4023 | - |
4024 | -// |
4025 | -// handlePropertyChangeForFileStream:fileStreamPropertyID:ioFlags: |
4026 | -// |
4027 | -// Object method which handles implementation of MyPropertyListenerProc |
4028 | -// |
4029 | -// Parameters: |
4030 | -// inAudioFileStream - should be the same as self->audioFileStream |
4031 | -// inPropertyID - the property that changed |
4032 | -// ioFlags - the ioFlags passed in |
4033 | -// |
4034 | -- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream |
4035 | - fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID |
4036 | - ioFlags:(UInt32 *)ioFlags |
4037 | -{ |
4038 | - @synchronized(self) |
4039 | - { |
4040 | - if ([self isFinishing]) |
4041 | - { |
4042 | - return; |
4043 | - } |
4044 | - |
4045 | - if (inPropertyID == kAudioFileStreamProperty_ReadyToProducePackets) |
4046 | - { |
4047 | - discontinuous = true; |
4048 | - } |
4049 | - else if (inPropertyID == kAudioFileStreamProperty_DataOffset) |
4050 | - { |
4051 | - SInt64 offset; |
4052 | - UInt32 offsetSize = sizeof(offset); |
4053 | - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataOffset, &offsetSize, &offset); |
4054 | - if (err) |
4055 | - { |
4056 | - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
4057 | - return; |
4058 | - } |
4059 | - dataOffset = offset; |
4060 | - |
4061 | - if (audioDataByteCount) |
4062 | - { |
4063 | - fileLength = dataOffset + audioDataByteCount; |
4064 | - } |
4065 | - } |
4066 | - else if (inPropertyID == kAudioFileStreamProperty_AudioDataByteCount) |
4067 | - { |
4068 | - UInt32 byteCountSize = sizeof(UInt64); |
4069 | - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_AudioDataByteCount, &byteCountSize, &audioDataByteCount); |
4070 | - if (err) |
4071 | - { |
4072 | - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
4073 | - return; |
4074 | - } |
4075 | - fileLength = dataOffset + audioDataByteCount; |
4076 | - } |
4077 | - else if (inPropertyID == kAudioFileStreamProperty_DataFormat) |
4078 | - { |
4079 | - if (asbd.mSampleRate == 0) |
4080 | - { |
4081 | - UInt32 asbdSize = sizeof(asbd); |
4082 | - |
4083 | - // get the stream format. |
4084 | - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd); |
4085 | - if (err) |
4086 | - { |
4087 | - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
4088 | - return; |
4089 | - } |
4090 | - } |
4091 | - } |
4092 | - else if (inPropertyID == kAudioFileStreamProperty_FormatList) |
4093 | - { |
4094 | - Boolean outWriteable; |
4095 | - UInt32 formatListSize; |
4096 | - err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, &outWriteable); |
4097 | - if (err) |
4098 | - { |
4099 | - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
4100 | - return; |
4101 | - } |
4102 | - |
4103 | - AudioFormatListItem *formatList = malloc(formatListSize); |
4104 | - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList); |
4105 | - [(id)formatList release]; |
4106 | - if (err) |
4107 | - { |
4108 | - free(formatList); |
4109 | - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; |
4110 | - return; |
4111 | - } |
4112 | - |
4113 | - for (int i = 0; i * sizeof(AudioFormatListItem) < formatListSize; i += sizeof(AudioFormatListItem)) |
4114 | - { |
4115 | - AudioStreamBasicDescription pasbd = formatList[i].mASBD; |
4116 | - |
4117 | - if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE) |
4118 | - { |
4119 | - // |
4120 | - // We've found HE-AAC, remember this to tell the audio queue |
4121 | - // when we construct it. |
4122 | - // |
4123 | -#if !TARGET_IPHONE_SIMULATOR |
4124 | - asbd = pasbd; |
4125 | -#endif |
4126 | - break; |
4127 | - } |
4128 | - } |
4129 | - free(formatList); |
4130 | - } |
4131 | - else |
4132 | - { |
4133 | -// NSLog(@"Property is %c%c%c%c", |
4134 | -// ((char *)&inPropertyID)[3], |
4135 | -// ((char *)&inPropertyID)[2], |
4136 | -// ((char *)&inPropertyID)[1], |
4137 | -// ((char *)&inPropertyID)[0]); |
4138 | - } |
4139 | - } |
4140 | -} |
4141 | - |
4142 | -// |
4143 | -// handleAudioPackets:numberBytes:numberPackets:packetDescriptions: |
4144 | -// |
4145 | -// Object method which handles the implementation of MyPacketsProc |
4146 | -// |
4147 | -// Parameters: |
4148 | -// inInputData - the packet data |
4149 | -// inNumberBytes - byte size of the data |
4150 | -// inNumberPackets - number of packets in the data |
4151 | -// inPacketDescriptions - packet descriptions |
4152 | -// |
4153 | -- (void)handleAudioPackets:(const void *)inInputData |
4154 | - numberBytes:(UInt32)inNumberBytes |
4155 | - numberPackets:(UInt32)inNumberPackets |
4156 | - packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; |
4157 | -{ |
4158 | - @synchronized(self) |
4159 | - { |
4160 | - if ([self isFinishing]) |
4161 | - { |
4162 | - return; |
4163 | - } |
4164 | - |
4165 | - if (bitRate == 0) |
4166 | - { |
4167 | - // |
4168 | - // m4a and a few other formats refuse to parse the bitrate so |
4169 | - // we need to set an "unparseable" condition here. If you know |
4170 | - // the bitrate (parsed it another way) you can set it on the |
4171 | - // class if needed. |
4172 | - // |
4173 | - bitRate = ~0; |
4174 | - } |
4175 | - |
4176 | - // we have successfully read the first packests from the audio stream, so |
4177 | - // clear the "discontinuous" flag |
4178 | - if (discontinuous) |
4179 | - { |
4180 | - discontinuous = false; |
4181 | - } |
4182 | - |
4183 | - if (!audioQueue) |
4184 | - { |
4185 | - [self createQueue]; |
4186 | - } |
4187 | - } |
4188 | - |
4189 | - // the following code assumes we're streaming VBR data. for CBR data, the second branch is used. |
4190 | - if (inPacketDescriptions) |
4191 | - { |
4192 | - for (int i = 0; i < inNumberPackets; ++i) |
4193 | - { |
4194 | - SInt64 packetOffset = inPacketDescriptions[i].mStartOffset; |
4195 | - SInt64 packetSize = inPacketDescriptions[i].mDataByteSize; |
4196 | - size_t bufSpaceRemaining; |
4197 | - |
4198 | - if (processedPacketsCount < BitRateEstimationMaxPackets) |
4199 | - { |
4200 | - processedPacketsSizeTotal += packetSize; |
4201 | - processedPacketsCount += 1; |
4202 | - } |
4203 | - |
4204 | - @synchronized(self) |
4205 | - { |
4206 | - // If the audio was terminated before this point, then |
4207 | - // exit. |
4208 | - if ([self isFinishing]) |
4209 | - { |
4210 | - return; |
4211 | - } |
4212 | - |
4213 | - if (packetSize > packetBufferSize) |
4214 | - { |
4215 | - [self failWithErrorCode:AS_AUDIO_BUFFER_TOO_SMALL]; |
4216 | - } |
4217 | - |
4218 | - bufSpaceRemaining = packetBufferSize - bytesFilled; |
4219 | - } |
4220 | - |
4221 | - // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. |
4222 | - if (bufSpaceRemaining < packetSize) |
4223 | - { |
4224 | - [self enqueueBuffer]; |
4225 | - } |
4226 | - |
4227 | - @synchronized(self) |
4228 | - { |
4229 | - // If the audio was terminated while waiting for a buffer, then |
4230 | - // exit. |
4231 | - if ([self isFinishing]) |
4232 | - { |
4233 | - return; |
4234 | - } |
4235 | - |
4236 | - // |
4237 | - // If there was some kind of issue with enqueueBuffer and we didn't |
4238 | - // make space for the new audio data then back out |
4239 | - // |
4240 | - if (bytesFilled + packetSize > packetBufferSize) |
4241 | - { |
4242 | - return; |
4243 | - } |
4244 | - |
4245 | - // copy data to the audio queue buffer |
4246 | - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
4247 | - memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)inInputData + packetOffset, packetSize); |
4248 | - |
4249 | - // fill out packet description |
4250 | - packetDescs[packetsFilled] = inPacketDescriptions[i]; |
4251 | - packetDescs[packetsFilled].mStartOffset = bytesFilled; |
4252 | - // keep track of bytes filled and packets filled |
4253 | - bytesFilled += packetSize; |
4254 | - packetsFilled += 1; |
4255 | - } |
4256 | - |
4257 | - // if that was the last free packet description, then enqueue the buffer. |
4258 | - size_t packetsDescsRemaining = kAQMaxPacketDescs - packetsFilled; |
4259 | - if (packetsDescsRemaining == 0) { |
4260 | - [self enqueueBuffer]; |
4261 | - } |
4262 | - } |
4263 | - } |
4264 | - else |
4265 | - { |
4266 | - size_t offset = 0; |
4267 | - while (inNumberBytes) |
4268 | - { |
4269 | - // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. |
4270 | - size_t bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; |
4271 | - if (bufSpaceRemaining < inNumberBytes) |
4272 | - { |
4273 | - [self enqueueBuffer]; |
4274 | - } |
4275 | - |
4276 | - @synchronized(self) |
4277 | - { |
4278 | - // If the audio was terminated while waiting for a buffer, then |
4279 | - // exit. |
4280 | - if ([self isFinishing]) |
4281 | - { |
4282 | - return; |
4283 | - } |
4284 | - |
4285 | - bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; |
4286 | - size_t copySize; |
4287 | - if (bufSpaceRemaining < inNumberBytes) |
4288 | - { |
4289 | - copySize = bufSpaceRemaining; |
4290 | - } |
4291 | - else |
4292 | - { |
4293 | - copySize = inNumberBytes; |
4294 | - } |
4295 | - |
4296 | - // |
4297 | - // If there was some kind of issue with enqueueBuffer and we didn't |
4298 | - // make space for the new audio data then back out |
4299 | - // |
4300 | - if (bytesFilled > packetBufferSize) |
4301 | - { |
4302 | - return; |
4303 | - } |
4304 | - |
4305 | - // copy data to the audio queue buffer |
4306 | - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; |
4307 | - memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)(inInputData + offset), copySize); |
4308 | - |
4309 | - |
4310 | - // keep track of bytes filled and packets filled |
4311 | - bytesFilled += copySize; |
4312 | - packetsFilled = 0; |
4313 | - inNumberBytes -= copySize; |
4314 | - offset += copySize; |
4315 | - } |
4316 | - } |
4317 | - } |
4318 | -} |
4319 | - |
4320 | -// |
4321 | -// handleBufferCompleteForQueue:buffer: |
4322 | -// |
4323 | -// Handles the buffer completetion notification from the audio queue |
4324 | -// |
4325 | -// Parameters: |
4326 | -// inAQ - the queue |
4327 | -// inBuffer - the buffer |
4328 | -// |
4329 | -- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ |
4330 | - buffer:(AudioQueueBufferRef)inBuffer |
4331 | -{ |
4332 | - unsigned int bufIndex = -1; |
4333 | - for (unsigned int i = 0; i < kNumAQBufs; ++i) |
4334 | - { |
4335 | - if (inBuffer == audioQueueBuffer[i]) |
4336 | - { |
4337 | - bufIndex = i; |
4338 | - break; |
4339 | - } |
4340 | - } |
4341 | - |
4342 | - if (bufIndex == -1) |
4343 | - { |
4344 | - [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_MISMATCH]; |
4345 | - pthread_mutex_lock(&queueBuffersMutex); |
4346 | - pthread_cond_signal(&queueBufferReadyCondition); |
4347 | - pthread_mutex_unlock(&queueBuffersMutex); |
4348 | - return; |
4349 | - } |
4350 | - |
4351 | - // signal waiting thread that the buffer is free. |
4352 | - pthread_mutex_lock(&queueBuffersMutex); |
4353 | - inuse[bufIndex] = false; |
4354 | - buffersUsed--; |
4355 | - |
4356 | -// |
4357 | -// Enable this logging to measure how many buffers are queued at any time. |
4358 | -// |
4359 | -#if LOG_QUEUED_BUFFERS |
4360 | - NSLog(@"Queued buffers: %ld", buffersUsed); |
4361 | -#endif |
4362 | - |
4363 | - pthread_cond_signal(&queueBufferReadyCondition); |
4364 | - pthread_mutex_unlock(&queueBuffersMutex); |
4365 | -} |
4366 | - |
4367 | -// |
4368 | -// handlePropertyChangeForQueue:propertyID: |
4369 | -// |
4370 | -// Implementation for MyAudioQueueIsRunningCallback |
4371 | -// |
4372 | -// Parameters: |
4373 | -// inAQ - the audio queue |
4374 | -// inID - the property ID |
4375 | -// |
4376 | -- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ |
4377 | - propertyID:(AudioQueuePropertyID)inID |
4378 | -{ |
4379 | - NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; |
4380 | - UIBackgroundTaskIdentifier newTaskId = UIBackgroundTaskInvalid; |
4381 | - |
4382 | - @synchronized(self) |
4383 | - { |
4384 | - if (inID == kAudioQueueProperty_IsRunning) |
4385 | - { |
4386 | - if (state == AS_STOPPING) |
4387 | - { |
4388 | - self.state = AS_STOPPED; |
4389 | - } |
4390 | - else if (state == AS_WAITING_FOR_QUEUE_TO_START) |
4391 | - { |
4392 | - // |
4393 | - // Note about this bug avoidance quirk: |
4394 | - // |
4395 | - // On cleanup of the AudioQueue thread, on rare occasions, there would |
4396 | - // be a crash in CFSetContainsValue as a CFRunLoopObserver was getting |
4397 | - // removed from the CFRunLoop. |
4398 | - // |
4399 | - // After lots of testing, it appeared that the audio thread was |
4400 | - // attempting to remove CFRunLoop observers from the CFRunLoop after the |
4401 | - // thread had already deallocated the run loop. |
4402 | - // |
4403 | - // By creating an NSRunLoop for the AudioQueue thread, it changes the |
4404 | - // thread destruction order and seems to avoid this crash bug -- or |
4405 | - // at least I haven't had it since (nasty hard to reproduce error!) |
4406 | - // |
4407 | - |
4408 | - newTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:NULL]; |
4409 | - |
4410 | - [NSRunLoop currentRunLoop]; |
4411 | - |
4412 | - self.state = AS_PLAYING; |
4413 | - |
4414 | - if (self.bgTaskId != UIBackgroundTaskInvalid) |
4415 | - { |
4416 | - [[UIApplication sharedApplication] endBackgroundTask: bgTaskId]; |
4417 | - } |
4418 | - self.bgTaskId = newTaskId; |
4419 | - |
4420 | - } |
4421 | - else |
4422 | - { |
4423 | - NSLog(@"AudioQueue changed state in unexpected way."); |
4424 | - } |
4425 | - } |
4426 | - } |
4427 | - |
4428 | - [pool release]; |
4429 | -} |
4430 | - |
4431 | -#if TARGET_OS_IPHONE |
4432 | -// |
4433 | -// handleInterruptionChangeForQueue:propertyID: |
4434 | -// |
4435 | -// Implementation for MyAudioQueueInterruptionListener |
4436 | -// |
4437 | -// Parameters: |
4438 | -// inAQ - the audio queue |
4439 | -// inID - the property ID |
4440 | -// |
4441 | -- (void)handleInterruptionChangeToState:(AudioQueuePropertyID)inInterruptionState |
4442 | -{ |
4443 | - if (inInterruptionState == kAudioSessionBeginInterruption) |
4444 | - { |
4445 | - if ([self isPlaying]) { |
4446 | - [self pause]; |
4447 | - |
4448 | - pausedByInterruption = YES; |
4449 | - } |
4450 | - } |
4451 | - else if (inInterruptionState == kAudioSessionEndInterruption) |
4452 | - { |
4453 | - AudioSessionSetActive( true ); |
4454 | - |
4455 | - if ([self isPaused] && pausedByInterruption) { |
4456 | - [self pause]; // this is actually resume |
4457 | - |
4458 | - pausedByInterruption = NO; // this is redundant |
4459 | - } |
4460 | - } |
4461 | -} |
4462 | -#endif |
4463 | - |
4464 | -@end |
4465 | - |
4466 | - |