PageRenderTime 20ms CodeModel.GetById 8ms app.highlight 10ms RepoModel.GetById 1ms app.codeStats 0ms

/io/audio.d

http://github.com/wilkie/djehuty
D | 155 lines | 92 code | 34 blank | 29 comment | 9 complexity | 574275e1ebe15768537919281113cb5b MD5 | raw file
  1/*
  2 * audio.d
  3 *
  4 * This file implements the Audio class. This class faciliates low-level access
  5 * to the audio device. The Sound class, however, is a higher-level accessor.
  6 *
  7 * Author: Dave Wilkinson
  8 *
  9 */
 10
 11module io.audio;
 12
 13import core.stream;
 14import core.time;
 15import core.event;
 16
 17import synch.semaphore;
 18
 19import platform.vars.wave;
 20
 21import scaffold.wave;
 22
 23import io.console;
 24
 25// Section: Types
 26
 27// Description: This structure contains information about an audio file and its uncompressed format.  The Audio class uses this to know how to send buffers given by the audio codec to the audio device.
 28struct AudioFormat {
 29	uint compressionType;
 30
 31	// Description: The number of channels.
 32	uint numChannels;
 33
 34	// Description: The number of samples per second.
 35	uint samplesPerSecond;
 36
 37	// Description: The average number of bytes per second.
 38	uint averageBytesPerSecond;
 39
 40	// Description: The block alignment.
 41	uint blockAlign;
 42
 43	// Description: The number of bits per sample.
 44	uint bitsPerSample;
 45}
 46
 47struct AudioInfo {
 48	// File Information
 49
 50	long totalTime;
 51
 52	// ID3 Information?
 53
 54	// --- //
 55}
 56
 57// Section: Core
 58
 59// Description: This class provides a low-level interface to an audio device.
 60class Audio : Dispatcher {
 61
 62	enum Signal {
 63		BufferPlayed,
 64	}
 65
 66	this() {
 67	}
 68
 69	~this() {
 70		closeDevice();
 71	}
 72
 73	// Description: Opens an audio device with the format given.  The format describes the representation of the audio stream.
 74	// format: The format of the audio stream that will indicate the representation of any audio buffers passed to the device.
 75	void openDevice(AudioFormat format) {
 76		if (_opened) {
 77			// reopen
 78			synchronized(this) {
 79				if (_format == format) {
 80					_format = format;
 81					WaveOpenDevice(this, _pfvars, format);
 82				}
 83			}
 84			return;
 85		}
 86
 87		synchronized(this) {
 88			_opened = true;
 89			_format = format;
 90			WaveOpenDevice(this, _pfvars, format);
 91		}
 92	}
 93
 94	// Description: Closes an already opened device, stops playback, and frees any pending buffers.
 95	void closeDevice() {
 96		synchronized(this) {
 97			if (_opened) {
 98				WaveCloseDevice(this, _pfvars);
 99				_opened = false;
100			}
101		}
102	}
103
104	// --- //
105
106	// Description: Sends an audio buffer to the device.  These can be queued, and any number may be sent.
107	void sendBuffer(Stream waveBuffer, bool isLast = false) {
108		synchronized(this) {
109			if (_opened) {
110				WaveSendBuffer(this, _pfvars, waveBuffer, isLast);
111			}
112		}
113	}
114
115	// Description: Resumes a paused device.
116	void resume() {
117		synchronized(this) {
118			if (_opened) {
119				WaveResume(this, _pfvars);
120			}
121		}
122	}
123
124	// Description: Pauses playback of a device.
125	void pause() {
126		synchronized(this) {
127			if (_opened) {
128				WavePause(this, _pfvars);
129			}
130		}
131	}
132
133	Time position() {
134		synchronized(this) {
135			if (!WaveIsOpen(this, _pfvars)) {
136				Time myTime = Time.init;
137				return myTime;
138			}
139		}
140		return WaveGetPosition(this, _pfvars);
141	}
142
143protected:
144
145	WavePlatformVars _pfvars;
146	AudioFormat _format;
147
148	bool _opened;
149}
150
151void WaveFireCallback(ref Audio w) {
152	if (w.responder !is null) {
153		w.raiseSignal(Audio.Signal.BufferPlayed);
154	}
155}