--- /dev/null
+
+#if NET_2_0
+using System;
+using System.IO;
+using System.Runtime.InteropServices;
+
+namespace Mono.Audio {
+
+#if PUBLIC_API
+ public
+#else
+ internal
+#endif
+ abstract class AudioData {
+ protected const int buffer_size = 4096;
+ bool stopped = false;
+
+ public abstract int Channels {
+ get;
+ }
+
+ public abstract int Rate {
+ get;
+ }
+
+ public abstract AudioFormat Format {
+ get;
+ }
+
+ public virtual void Setup (AudioDevice dev) {
+ dev.SetFormat (Format, Channels, Rate);
+ }
+
+ public abstract void Play (AudioDevice dev);
+
+ public virtual bool IsStopped {
+ get {
+ return stopped;
+ }
+ set {
+ stopped = value;
+ }
+ }
+ }
+
+ /*public enum WavCmpCodes {
+ Unknown,
+ PCM,
+ ADPCM,
+ }*/
+
+#if PUBLIC_API
+ public
+#else
+ internal
+#endif
+ class WavData : AudioData {
+ Stream stream;
+ short channels;
+ ushort frame_divider;
+ int sample_rate;
+ int data_len = 0;
+ AudioFormat format;
+
+ public WavData (Stream data) {
+ stream = data;
+ byte[] buffer = new byte [12+32];
+ int c = stream.Read (buffer, 0, 12 + 32);
+ if (c != (12 + 32) ||
+ buffer [0] != 'R' || buffer [1] != 'I' || buffer [2] != 'F' || buffer [3] != 'F' ||
+ buffer [8] != 'W' || buffer [9] != 'A' || buffer [10] != 'V' || buffer [11] != 'E') {
+ throw new Exception ("incorrect format" + c);
+ }
+ if (buffer [12] != 'f' || buffer [13] != 'm' || buffer [14] != 't' || buffer [15] != ' ') {
+ throw new Exception ("incorrect format (fmt)");
+ }
+ int extra_size = buffer [16];
+ extra_size |= buffer [17] << 8;
+ extra_size |= buffer [18] << 16;
+ extra_size |= buffer [19] << 24;
+ int compression = buffer [20] | (buffer [21] << 8);
+ if (compression != 1)
+ throw new Exception ("incorrect format (not PCM)");
+ channels = (short)(buffer [22] | (buffer [23] << 8));
+ sample_rate = buffer [24];
+ sample_rate |= buffer [25] << 8;
+ sample_rate |= buffer [26] << 16;
+ sample_rate |= buffer [27] << 24;
+ int avg_bytes = buffer [28];
+ avg_bytes |= buffer [29] << 8;
+ avg_bytes |= buffer [30] << 16;
+ avg_bytes |= buffer [31] << 24;
+ int block_align = buffer [32] | (buffer [33] << 8);
+ int sign_bits = buffer [34] | (buffer [35] << 8);
+ /*Console.WriteLine (extra_size);
+ Console.WriteLine (compression);
+ Console.WriteLine (channels);
+ Console.WriteLine (sample_rate);
+ Console.WriteLine (avg_bytes);
+ Console.WriteLine (block_align);
+ Console.WriteLine (sign_bits);*/
+ if (buffer [36] != 'd' || buffer [37] != 'a' || buffer [38] != 't' || buffer [39] != 'a') {
+ throw new Exception ("incorrect format (data)");
+ }
+ int sample_size = buffer [40];
+ sample_size |= buffer [41] << 8;
+ sample_size |= buffer [42] << 16;
+ sample_size |= buffer [43] << 24;
+ data_len = sample_size;
+ //Console.WriteLine (sample_size);
+ switch (sign_bits) {
+ case 8:
+ frame_divider = 1;
+ format = AudioFormat.U8; break;
+ case 16:
+ frame_divider = 2;
+ format = AudioFormat.S16_LE; break;
+ default:
+ throw new Exception ("bits per sample");
+ }
+ }
+
+ public override void Play (AudioDevice dev) {
+ int read;
+ int count = data_len;
+ byte[] buffer = new byte [buffer_size];
+ while (!IsStopped && count >= 0 && (read = stream.Read (buffer, 0, System.Math.Min (buffer.Length, count))) > 0) {
+ // FIXME: account for leftover bytes
+ dev.PlaySample (buffer, read/frame_divider);
+ count -= read;
+ }
+ }
+
+ public override int Channels {
+ get {return channels;}
+ }
+ public override int Rate {
+ get {return sample_rate;}
+ }
+ public override AudioFormat Format {
+ get {return format;}
+ }
+ }
+
+ // http://en.wikipedia.org/wiki/Au_file_format
+#if PUBLIC_API
+ public
+#else
+ internal
+#endif
+ class AuData : AudioData {
+ Stream stream;
+ short channels;
+ ushort frame_divider;
+ int sample_rate;
+ int data_len = 0;
+ AudioFormat format;
+
+ public AuData (Stream data) {
+ stream = data;
+ byte[] buffer = new byte [24];
+ int c = stream.Read (buffer, 0, 24);
+ if (c != 24 ||
+ buffer [0] != '.' || buffer [1] != 's' || buffer [2] != 'n' || buffer [3] != 'd') {
+ throw new Exception ("incorrect format" + c);
+ }
+ int data_offset = buffer [7];
+ data_offset |= buffer [6] << 8;
+ data_offset |= buffer [5] << 16;
+ data_offset |= buffer [4] << 24;
+ data_len = buffer [11];
+ data_len |= buffer [10] << 8;
+ data_len |= buffer [9] << 16;
+ data_len |= buffer [8] << 24;
+ int encoding = buffer [15];
+ encoding |= buffer [14] << 8;
+ encoding |= buffer [13] << 16;
+ encoding |= buffer [12] << 24;
+ sample_rate = buffer [19];
+ sample_rate |= buffer [18] << 8;
+ sample_rate |= buffer [17] << 16;
+ sample_rate |= buffer [16] << 24;
+ int chans = buffer [23];
+ chans |= buffer [22] << 8;
+ chans |= buffer [21] << 16;
+ chans |= buffer [20] << 24;
+ channels = (short)chans;
+ if (data_offset < 24 || (chans != 1 && chans != 2)) {
+ throw new Exception ("incorrect format offset" + data_offset);
+ }
+ if (data_offset != 24) {
+ for (int l = 24; l < data_offset; ++l)
+ stream.ReadByte ();
+ }
+ switch (encoding) {
+ case 1:
+ frame_divider = 1;
+ format = AudioFormat.MU_LAW; break;
+ default:
+ throw new Exception ("incorrect format encoding" + encoding);
+ }
+ if (data_len == -1) {
+ data_len = (int)stream.Length - data_offset;
+ }
+ // Console.WriteLine ("format: {0}, rate: {1}", format, sample_rate);
+ }
+
+ public override void Play (AudioDevice dev) {
+ int read;
+ int count = data_len;
+ byte[] buffer = new byte [buffer_size];
+ while (!IsStopped && count >= 0 && (read = stream.Read (buffer, 0, System.Math.Min (buffer.Length, count))) > 0) {
+ // FIXME: account for leftover bytes
+ dev.PlaySample (buffer, read/frame_divider);
+ count -= read;
+ }
+ }
+
+ public override int Channels {
+ get {return channels;}
+ }
+ public override int Rate {
+ get {return sample_rate;}
+ }
+ public override AudioFormat Format {
+ get {return format;}
+ }
+ }
+
+}
+
+#endif
+
--- /dev/null
+
+#if NET_2_0
+using System;
+using System.IO;
+using System.Runtime.InteropServices;
+
+namespace Mono.Audio {
+
+ /* these are the values used by alsa */
+#if PUBLIC_API
+ public
+#else
+ internal
+#endif
+ enum AudioFormat {
+ S8,
+ U8,
+ S16_LE,
+ S16_BE,
+ U16_LE,
+ U16_BE,
+ S24_LE,
+ S24_BE,
+ U24_LE,
+ U24_BE,
+ S32_LE,
+ S32_BE,
+ U32_LE,
+ U32_BE,
+ FLOAT_LE,
+ FLOAT_BE,
+ FLOAT64_LE,
+ FLOAT64_BE,
+ IEC958_SUBFRAME_LE,
+ IEC958_SUBFRAME_BE,
+ MU_LAW,
+ A_LAW,
+ IMA_ADPCM,
+ MPEG,
+ GSM
+ }
+
+#if PUBLIC_API
+ public
+#else
+ internal
+#endif
+ class AudioDevice {
+
+ static AudioDevice TryAlsa (string name) {
+ AudioDevice dev;
+ try {
+ dev = new AlsaDevice (name);
+ return dev;
+ } catch {
+ return null;
+ }
+ }
+
+ public static AudioDevice CreateDevice (string name) {
+ AudioDevice dev;
+
+ dev = TryAlsa (name);
+ /* if no option is found, return a silent device */
+ if (dev == null)
+ dev = new AudioDevice ();
+ return dev;
+ }
+
+ public virtual bool SetFormat (AudioFormat format, int channels, int rate) {
+ return true;
+ }
+
+ public virtual int PlaySample (byte[] buffer, int num_frames) {
+ return num_frames;
+ }
+
+ public virtual void Wait () {
+ }
+ }
+
+ class AlsaDevice: AudioDevice, IDisposable {
+ IntPtr handle;
+
+ [DllImport ("libasound.so.2")]
+ static extern int snd_pcm_open (ref IntPtr handle, string pcm_name, int stream, int mode);
+
+ [DllImport ("libasound.so.2")]
+ static extern int snd_pcm_close (IntPtr handle);
+
+ [DllImport ("libasound.so.2")]
+ static extern int snd_pcm_drain (IntPtr handle);
+
+ [DllImport ("libasound.so.2")]
+ static extern int snd_pcm_writei (IntPtr handle, byte[] buf, int size);
+
+ [DllImport ("libasound.so.2")]
+ static extern int snd_pcm_set_params (IntPtr handle, int format, int access, int channels, int rate, int soft_resample, int latency);
+
+ public AlsaDevice (string name) {
+ if (name == null)
+ name = "default";
+ int err = snd_pcm_open (ref handle, name, 0, 0);
+ if (err < 0)
+ throw new Exception ("no open " + err);
+ }
+
+ ~AlsaDevice () {
+ Dispose (false);
+ }
+
+ public void Dispose () {
+ Dispose (true);
+ GC.SuppressFinalize (this);
+ }
+
+ protected virtual void Dispose (bool disposing) {
+ if (disposing) {
+
+ }
+ if (handle != IntPtr.Zero)
+ snd_pcm_close (handle);
+ handle = IntPtr.Zero;
+ }
+
+ public override bool SetFormat (AudioFormat format, int channels, int rate) {
+ int err = snd_pcm_set_params (handle, (int)format, 3, channels, rate, 1, 500000);
+ return err == 0;
+ }
+
+ public override int PlaySample (byte[] buffer, int num_frames) {
+ int frames = snd_pcm_writei (handle, buffer, num_frames);
+ return frames;
+ }
+
+ public override void Wait () {
+ snd_pcm_drain (handle);
+ }
+ }
+
+}
+#endif
+
+Wed Dec 20 18:26:36 CET 2006 Paolo Molaro <lupus@ximian.com>
+
+ * AudioData.cs, AudioDevice.cs: small (alsa) audio device and
+ Wav file parsing implementation.
+ * SoundPlayer.cs, SystemSound.cs: implemented most of the methods.
+
Tue Dec 19 19:17:06 CET 2006 Paolo Molaro <lupus@ximian.com>
* SoundPlayer.cs, SystemSound.cs, SystemSounds.cs: first stubs
#if NET_2_0
using System;
using System.IO;
+using System.Threading;
using System.Runtime.Serialization;
using System.ComponentModel;
+using Mono.Audio;
namespace System.Media {
string sound_location;
Stream audiostream;
object tag = String.Empty;
+ MemoryStream mstream;
+ AudioDevice adev;
+ AudioData adata;
bool load_completed;
+ bool stopped;
int load_timeout = 10000;
public SoundPlayer ()
{
+ sound_location = String.Empty;
}
- public SoundPlayer (Stream stream)
+ public SoundPlayer (Stream stream): this ()
{
- sound_location = String.Empty;
+ if (stream == null)
+ throw new ArgumentNullException ("stream");
audiostream = stream;
}
- public SoundPlayer (string soundLocation)
+ public SoundPlayer (string soundLocation): this ()
{
+ if (soundLocation == null)
+ throw new ArgumentNullException ("soundLocation");
sound_location = soundLocation;
}
- protected SoundPlayer (SerializationInfo serializationInfo, StreamingContext context)
+ protected SoundPlayer (SerializationInfo serializationInfo, StreamingContext context): this ()
{
throw new NotImplementedException ();
}
+ void LoadFromStream (Stream s)
+ {
+ mstream = new MemoryStream ();
+ byte[] buf = new byte [4096];
+ int count;
+ while ((count = s.Read (buf, 0, 4096)) > 0) {
+ mstream.Write (buf, 0, count);
+ }
+ mstream.Position = 0;
+ }
+
public void Load ()
{
+ // can this be reused to load the same file again without re-setting the location?
+ if (load_completed)
+ return;
+ if (audiostream != null) {
+ LoadFromStream (audiostream);
+ } else {
+ throw new NotImplementedException ("from uri");
+ }
+ load_completed = true;
+ AsyncCompletedEventArgs e = new AsyncCompletedEventArgs (null, false, this);
+ OnLoadCompleted (e);
+ if (LoadCompleted != null)
+ LoadCompleted (this, e);
+ }
+
+ void AsyncFinished (IAsyncResult ar)
+ {
+ ThreadStart async = ar.AsyncState as ThreadStart;
+ async.EndInvoke (ar);
}
public void LoadAsync ()
{
+ if (load_completed)
+ return;
+ ThreadStart async = new ThreadStart (Load);
+ IAsyncResult a = async.BeginInvoke (AsyncFinished, async);
}
protected virtual void OnLoadCompleted (AsyncCompletedEventArgs e)
{
}
+ void Start ()
+ {
+ stopped = false;
+ if (adata != null)
+ adata.IsStopped = false;
+ if (!load_completed)
+ Load ();
+ }
+
public void Play ()
{
+ ThreadStart async = new ThreadStart (PlaySync);
+ IAsyncResult a = async.BeginInvoke (AsyncFinished, async);
}
+ public void PlayLoop ()
+ {
+ Start ();
+ while (!stopped) {
+ PlaySync ();
+ }
+ }
public void PlayLooping ()
{
+ ThreadStart async = new ThreadStart (PlayLoop);
+ IAsyncResult a = async.BeginInvoke (AsyncFinished, async);
}
public void PlaySync ()
{
+ Start ();
+ try {
+ adata = new WavData (mstream);
+ if (adev == null)
+ adev = AudioDevice.CreateDevice (null);
+ if (adata != null) {
+ adata.Setup (adev);
+ adata.Play (adev);
+ }
+ } catch {
+ }
}
public void Stop ()
{
+ stopped = true;
+ if (adata != null)
+ adata.IsStopped = true;
}
void ISerializable.GetObjectData (SerializationInfo info, StreamingContext context)
return load_timeout;
}
set {
+ if (value < 0)
+ throw new ArgumentException ("timeout must be >= 0");
load_timeout = value;
}
}
return sound_location;
}
set {
+ if (value == null)
+ throw new ArgumentNullException ("value");
sound_location = value;
+ load_completed = false;
+ OnSoundLocationChanged (EventArgs.Empty);
+ if (SoundLocationChanged != null)
+ SoundLocationChanged (this, EventArgs.Empty);
}
}
return audiostream;
}
set {
+ if (value == null)
+ throw new ArgumentNullException ("value");
audiostream = value;
+ load_completed = false;
+ OnStreamChanged (EventArgs.Empty);
+ if (StreamChanged != null)
+ StreamChanged (this, EventArgs.Empty);
}
}
public class SystemSound {
+ Stream resource;
+
internal SystemSound (string tag)
{
+ resource = typeof (SystemSound).Assembly.GetManifestResourceStream (tag + ".wav");
}
// plays async
public void Play ()
{
- throw new NotImplementedException ();
+ SoundPlayer sp = new SoundPlayer (resource);
+ sp.Play ();
}
}
}