/
audio.d
155 lines (121 loc) · 2.99 KB
/
audio.d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
/*
* audio.d
*
* This file implements the Audio class. This class faciliates low-level access
* to the audio device. The Sound class, however, is a higher-level accessor.
*
* Author: Dave Wilkinson
*
*/
module io.audio;
import core.stream;
import core.time;
import core.event;
import synch.semaphore;
import platform.vars.wave;
import scaffold.wave;
import io.console;
// Section: Types
// Description: This structure contains information about an audio file and its uncompressed format. The Audio class uses this to know how to send buffers given by the audio codec to the audio device.
struct AudioFormat {
uint compressionType;
// Description: The number of channels.
uint numChannels;
// Description: The number of samples per second.
uint samplesPerSecond;
// Description: The average number of bytes per second.
uint averageBytesPerSecond;
// Description: The block alignment.
uint blockAlign;
// Description: The number of bits per sample.
uint bitsPerSample;
}
struct AudioInfo {
// File Information
long totalTime;
// ID3 Information?
// --- //
}
// Section: Core
// Description: This class provides a low-level interface to an audio device.
class Audio : Dispatcher {
enum Signal {
BufferPlayed,
}
this() {
}
~this() {
closeDevice();
}
// Description: Opens an audio device with the format given. The format describes the representation of the audio stream.
// format: The format of the audio stream that will indicate the representation of any audio buffers passed to the device.
void openDevice(AudioFormat format) {
if (_opened) {
// reopen
synchronized(this) {
if (_format == format) {
_format = format;
WaveOpenDevice(this, _pfvars, format);
}
}
return;
}
synchronized(this) {
_opened = true;
_format = format;
WaveOpenDevice(this, _pfvars, format);
}
}
// Description: Closes an already opened device, stops playback, and frees any pending buffers.
void closeDevice() {
synchronized(this) {
if (_opened) {
WaveCloseDevice(this, _pfvars);
_opened = false;
}
}
}
// --- //
// Description: Sends an audio buffer to the device. These can be queued, and any number may be sent.
void sendBuffer(Stream waveBuffer, bool isLast = false) {
synchronized(this) {
if (_opened) {
WaveSendBuffer(this, _pfvars, waveBuffer, isLast);
}
}
}
// Description: Resumes a paused device.
void resume() {
synchronized(this) {
if (_opened) {
WaveResume(this, _pfvars);
}
}
}
// Description: Pauses playback of a device.
void pause() {
synchronized(this) {
if (_opened) {
WavePause(this, _pfvars);
}
}
}
Time position() {
synchronized(this) {
if (!WaveIsOpen(this, _pfvars)) {
Time myTime = Time.init;
return myTime;
}
}
return WaveGetPosition(this, _pfvars);
}
protected:
WavePlatformVars _pfvars;
AudioFormat _format;
bool _opened;
}
void WaveFireCallback(ref Audio w) {
if (w.responder !is null) {
w.raiseSignal(Audio.Signal.BufferPlayed);
}
}